Merge pull request #3 from Trivernis/develop

New Code Improvement
pull/9/head v0.1.1
Trivernis 6 years ago committed by GitHub
commit 4bae373f2b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -20,7 +20,7 @@ hdr: Dict[str, str] = {
'Connection': 'keep-alive'} 'Connection': 'keep-alive'}
async def get_img_as(url): async def request_soup(url):
req = urlreq.Request(url, headers=hdr) req = urlreq.Request(url, headers=hdr)
html = None html = None
for x in range(0, 10): for x in range(0, 10):
@ -31,6 +31,11 @@ async def get_img_as(url):
print('[-]', e) print('[-]', e)
await asyncio.sleep(1) await asyncio.sleep(1)
soup = BeautifulSoup(html, "lxml") soup = BeautifulSoup(html, "lxml")
return soup
async def get_img_as(url):
soup = await request_soup(url)
ret = [] ret = []
for t in soup.find_all(has_source): for t in soup.find_all(has_source):
if 'redditmedia' not in t['src']: if 'redditmedia' not in t['src']:
@ -42,17 +47,8 @@ async def get_img_as(url):
async def get_next(url): async def get_next(url):
req = urlreq.Request(url, headers=hdr)
html = None
for x in range(0, 10):
try:
html = urlreq.urlopen(req).read()
break
except Exception as e:
print('[-]', e)
await asyncio.sleep(1)
soup = BeautifulSoup(html, "lxml")
ids = [] ids = []
soup = await request_soup(url)
for t in soup.find_all(has_source): for t in soup.find_all(has_source):
if 'redditmedia' not in t['src']: if 'redditmedia' not in t['src']:
try: try:

Loading…
Cancel
Save