FreeChatgpt.py 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778
  1. from __future__ import annotations
  2. import json, random
  3. from aiohttp import ClientSession
  4. from ..typing import AsyncResult, Messages
  5. from .base_provider import AsyncGeneratorProvider
  6. models = {
  7. "claude-v2": "claude-2.0",
  8. "claude-v2.1":"claude-2.1",
  9. "gemini-pro": "google-gemini-pro"
  10. }
  11. urls = [
  12. "https://free.chatgpt.org.uk",
  13. "https://ai.chatgpt.org.uk"
  14. ]
  15. class FreeChatgpt(AsyncGeneratorProvider):
  16. url = "https://free.chatgpt.org.uk"
  17. working = True
  18. supports_gpt_35_turbo = True
  19. supports_gpt_4 = True
  20. supports_message_history = True
  21. @classmethod
  22. async def create_async_generator(
  23. cls,
  24. model: str,
  25. messages: Messages,
  26. proxy: str = None,
  27. **kwargs
  28. ) -> AsyncResult:
  29. if model in models:
  30. model = models[model]
  31. elif not model:
  32. model = "gpt-3.5-turbo"
  33. url = random.choice(urls)
  34. headers = {
  35. "Accept": "application/json, text/event-stream",
  36. "Content-Type":"application/json",
  37. "Accept-Encoding": "gzip, deflate, br",
  38. "Accept-Language": "en-US,en;q=0.5",
  39. "Host":"free.chatgpt.org.uk",
  40. "Referer":f"{cls.url}/",
  41. "Origin":f"{cls.url}",
  42. "Sec-Fetch-Dest": "empty",
  43. "Sec-Fetch-Mode": "cors",
  44. "Sec-Fetch-Site": "same-origin",
  45. "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
  46. }
  47. async with ClientSession(headers=headers) as session:
  48. data = {
  49. "messages":messages,
  50. "stream":True,
  51. "model":model,
  52. "temperature":0.5,
  53. "presence_penalty":0,
  54. "frequency_penalty":0,
  55. "top_p":1,
  56. **kwargs
  57. }
  58. async with session.post(f'{url}/api/openai/v1/chat/completions', json=data, proxy=proxy) as response:
  59. response.raise_for_status()
  60. started = False
  61. async for line in response.content:
  62. if line.startswith(b"data: [DONE]"):
  63. break
  64. elif line.startswith(b"data: "):
  65. line = json.loads(line[6:])
  66. if(line["choices"]==[]):
  67. continue
  68. chunk = line["choices"][0]["delta"].get("content")
  69. if chunk:
  70. started = True
  71. yield chunk
  72. if not started:
  73. raise RuntimeError("Empty response")