xxxxxxxxxx
import requests
from bs4 import BeautifulSoup
def get_all_urls_from_website(url):
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
urls = []
for link in soup.find_all('a'):
url = link.get('href')
if url is not None:
urls.append(url)
return urls
except requests.exceptions.RequestException as e:
print("Error: ", e)
return []
# Example usage:
website_url = "https://www.example.com"
all_urls = get_all_urls_from_website(website_url)
for url in all_urls:
print(url)