5.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. import asyncio
  2. import asyncio
  3. import os
  4. import async_timeout
  5. import aiohttp
  6. from bs4 import BeautifulSoup
  7. import urllib.request
  8. import random
  9. import string
  10. class Timer:
  11. def __init__(self, timeout, callback):
  12. self._timeout = timeout
  13. self._callback = callback
  14. self._task = asyncio.ensure_future(self._job())
  15. async def _job(self):
  16. await asyncio.sleep(self._timeout)
  17. await self._callback()
  18. def cancel(self):
  19. self._task.cancel()
  20. async def timeout_callback():
  21. await asyncio.sleep(0.1)
  22. print('echo!')
  23. def get_html():
  24. url_base = 'https://prnt.sc/'
  25. url_last3 = str(''.join(random.choice(string.digits + string.ascii_lowercase) for _ in range(6)))
  26. req = str(url_base + url_last3)
  27. html = urllib.request.urlopen(req).read()
  28. return html
  29. opener = urllib.request.build_opener()
  30. opener.addheaders = [('User-Agent', 'Mozilla/5.0')]
  31. urllib.request.install_opener(opener)
  32. html = get_html()
  33. soup = BeautifulSoup(html, 'html.parser')
  34. picture_url = soup.find(id='screenshot-image')['src']
  35. await urllib.request.urlretrieve(picture_url, picture_url[40:])
  36. print ("DONE!")
  37. async def main():
  38. print('\nfirst example:')
  39. timer = Timer(2, timeout_callback)
  40. await asyncio.sleep(5)
  41. print('\nsecond example:')
  42. timer = Timer(2, timeout_callback)
  43. await asyncio.sleep(1)
  44. timer.cancel() # cancel it
  45. await asyncio.sleep(1.5)
  46. loop = asyncio.new_event_loop()
  47. asyncio.set_event_loop(loop)
  48. try:
  49. loop.run_until_complete(main())
  50. finally:
  51. loop.run_until_complete(loop.shutdown_asyncgens())
  52. loop.close()