ChatAiGpt.py 2.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768
  1. from __future__ import annotations
  2. import re
  3. from aiohttp import ClientSession
  4. from ...typing import AsyncResult, Messages
  5. from ..base_provider import AsyncGeneratorProvider
  6. from ..helper import format_prompt
  7. class ChatAiGpt(AsyncGeneratorProvider):
  8. url = "https://chataigpt.org"
  9. supports_gpt_35_turbo = True
  10. _nonce = None
  11. _post_id = None
  12. @classmethod
  13. async def create_async_generator(
  14. cls,
  15. model: str,
  16. messages: Messages,
  17. proxy: str = None,
  18. **kwargs
  19. ) -> AsyncResult:
  20. headers = {
  21. "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0",
  22. "Accept": "*/*",
  23. "Accept-Language": "de,en-US;q=0.7,en;q=0.3",
  24. "Accept-Encoding": "gzip, deflate, br",
  25. "Origin": cls.url,
  26. "Alt-Used": cls.url,
  27. "Connection": "keep-alive",
  28. "Referer": cls.url,
  29. "Pragma": "no-cache",
  30. "Cache-Control": "no-cache",
  31. "TE": "trailers",
  32. "Sec-Fetch-Dest": "empty",
  33. "Sec-Fetch-Mode": "cors",
  34. "Sec-Fetch-Site": "same-origin",
  35. }
  36. async with ClientSession(headers=headers) as session:
  37. if not cls._nonce:
  38. async with session.get(f"{cls.url}/", proxy=proxy) as response:
  39. response.raise_for_status()
  40. response = await response.text()
  41. result = re.search(
  42. r'data-nonce=(.*?) data-post-id=([0-9]+)', response
  43. )
  44. if result:
  45. cls._nonce, cls._post_id = result.group(1), result.group(2)
  46. else:
  47. raise RuntimeError("No nonce found")
  48. prompt = format_prompt(messages)
  49. data = {
  50. "_wpnonce": cls._nonce,
  51. "post_id": cls._post_id,
  52. "url": cls.url,
  53. "action": "wpaicg_chat_shortcode_message",
  54. "message": prompt,
  55. "bot_id": 0
  56. }
  57. async with session.post(f"{cls.url}/wp-admin/admin-ajax.php", data=data, proxy=proxy) as response:
  58. response.raise_for_status()
  59. async for chunk in response.content:
  60. if chunk:
  61. yield chunk.decode()