123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566 |
- import asyncio
- import asyncio
- import os
- import async_timeout
- import aiohttp
- from bs4 import BeautifulSoup
- import urllib.request
- import random
- import string
- class Timer:
- def __init__(self, timeout, callback):
- self._timeout = timeout
- self._callback = callback
- self._task = asyncio.ensure_future(self._job())
- async def _job(self):
- await asyncio.sleep(self._timeout)
- await self._callback()
- def cancel(self):
- self._task.cancel()
- async def timeout_callback():
- await asyncio.sleep(0.1)
- print('echo!')
- def get_html():
- url_base = 'https://prnt.sc/'
- url_last3 = str(''.join(random.choice(string.digits + string.ascii_lowercase) for _ in range(6)))
- req = str(url_base + url_last3)
- html = urllib.request.urlopen(req).read()
- return html
-
- opener = urllib.request.build_opener()
- opener.addheaders = [('User-Agent', 'Mozilla/5.0')]
- urllib.request.install_opener(opener)
- html = get_html()
- soup = BeautifulSoup(html, 'html.parser')
- picture_url = soup.find(id='screenshot-image')['src']
- await urllib.request.urlretrieve(picture_url, picture_url[40:])
- print ("DONE!")
- async def main():
- print('\nfirst example:')
- timer = Timer(2, timeout_callback)
- await asyncio.sleep(5)
- print('\nsecond example:')
- timer = Timer(2, timeout_callback)
- await asyncio.sleep(1)
- timer.cancel() # cancel it
- await asyncio.sleep(1.5)
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
- try:
- loop.run_until_complete(main())
- finally:
- loop.run_until_complete(loop.shutdown_asyncgens())
- loop.close()
|