<<<<<<< HEAD
2import time
3import asyncio
4from playwright.async_api import async_playwright
5from bs4 import BeautifulSoup as bs
6
7
8async def main():
9 async with async_playwright() as p:
10 browser = await p.chromium.launch(headless=False)
11 page = await browser.new_page(storage_state='auth.json')
12 await page.goto('https://www.instagram.com/explore/tags/alanzoka/')
13 time.sleep(6)
14 html = await page.content()
15 soup = bs(html, 'html.parser')
16 link = soup.find_all('img')
17 for link in soup.find_all('img'):
18 foto = (link.get('src'))
19 print(foto)
20
21 # organizar links com pandas
22
23 # await page.goto('https://www.instagram.com/explore/tags/duckcute/')
24
25 time.sleep(5)
26
27 # await browser.close()
28
29
30asyncio.run(main())
31=======
32import time
33import asyncio
34from playwright.async_api import async_playwright
35from bs4 import BeautifulSoup as bs
36
37
38async def main():
39 async with async_playwright() as p:
40 browser = await p.chromium.launch(headless=True)
41 page = await browser.new_page(storage_state= 'auth.json')
42 await page.goto('https://www.instagram.com/explore/')
43 time.sleep(15)
44 html = await page.content()
45 soup = bs(html, 'html.parser')
46 link = soup.find_all('img')
47 for link in soup.find_all('img'):
48 url = (link.get('src'))
49 return url
50 # tratar os dados
51 print(url)
52
53
54
55 # organizar links com pandas
56
57
58
59
60
61
62 # await page.goto('https://www.instagram.com/explore/tags/duckcute/')
63
64
65
66
67
68
69 time.sleep(5)
70
71 #await browser.close()
72
73asyncio.run(main())
74
75
76
77
78
79
80
81>>>>>>> f238b564304ae1ce6052dfea88eb9134e2eab08b
82