Opchatgpts.py 2.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859
  1. from __future__ import annotations
  2. import random, string, json
  3. from aiohttp import ClientSession
  4. from ...typing import Messages, AsyncResult
  5. from ..base_provider import AsyncGeneratorProvider
  6. from ..helper import get_random_string
  7. class Opchatgpts(AsyncGeneratorProvider):
  8. url = "https://opchatgpts.net"
  9. working = False
  10. supports_message_history = True
  11. supports_gpt_35_turbo = True
  12. @classmethod
  13. async def create_async_generator(
  14. cls,
  15. model: str,
  16. messages: Messages,
  17. proxy: str = None, **kwargs) -> AsyncResult:
  18. headers = {
  19. "User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
  20. "Accept" : "*/*",
  21. "Accept-Language" : "de,en-US;q=0.7,en;q=0.3",
  22. "Origin" : cls.url,
  23. "Alt-Used" : "opchatgpts.net",
  24. "Referer" : f"{cls.url}/chatgpt-free-use/",
  25. "Sec-Fetch-Dest" : "empty",
  26. "Sec-Fetch-Mode" : "cors",
  27. "Sec-Fetch-Site" : "same-origin",
  28. }
  29. async with ClientSession(
  30. headers=headers
  31. ) as session:
  32. data = {
  33. "botId": "default",
  34. "chatId": get_random_string(),
  35. "contextId": 28,
  36. "customId": None,
  37. "messages": messages,
  38. "newMessage": messages[-1]["content"],
  39. "session": "N/A",
  40. "stream": True
  41. }
  42. async with session.post(f"{cls.url}/wp-json/mwai-ui/v1/chats/submit", json=data, proxy=proxy) as response:
  43. response.raise_for_status()
  44. async for line in response.content:
  45. if line.startswith(b"data: "):
  46. try:
  47. line = json.loads(line[6:])
  48. assert "type" in line
  49. except:
  50. raise RuntimeError(f"Broken line: {line.decode()}")
  51. if line["type"] == "live":
  52. yield line["data"]
  53. elif line["type"] == "end":
  54. break