GeminiProChat.py 2.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657
  1. from __future__ import annotations
  2. import time
  3. from hashlib import sha256
  4. from aiohttp import ClientSession
  5. from ..typing import AsyncResult, Messages
  6. from .base_provider import AsyncGeneratorProvider
  7. class GeminiProChat(AsyncGeneratorProvider):
  8. url = "https://gemini-chatbot-sigma.vercel.app"
  9. working = True
  10. @classmethod
  11. async def create_async_generator(
  12. cls,
  13. model: str,
  14. messages: Messages,
  15. proxy: str = None,
  16. **kwargs
  17. ) -> AsyncResult:
  18. headers = {
  19. "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:122.0) Gecko/20100101 Firefox/122.0",
  20. "Accept": "*/*",
  21. "Accept-Language": "en-US,en;q=0.5",
  22. "Accept-Encoding": "gzip, deflate, br",
  23. "Content-Type": "text/plain;charset=UTF-8",
  24. "Referer": "https://gemini-chatbot-sigma.vercel.app/",
  25. "Origin": "https://gemini-chatbot-sigma.vercel.app",
  26. "Sec-Fetch-Dest": "empty",
  27. "Sec-Fetch-Mode": "cors",
  28. "Sec-Fetch-Site": "same-origin",
  29. "Connection": "keep-alive",
  30. "TE": "trailers",
  31. }
  32. async with ClientSession(headers=headers) as session:
  33. timestamp = int(time.time() * 1e3)
  34. data = {
  35. "messages":[{
  36. "role": "model" if message["role"] == "assistant" else "user",
  37. "parts": [{"text": message["content"]}]
  38. } for message in messages],
  39. "time": timestamp,
  40. "pass": None,
  41. "sign": generate_signature(timestamp, messages[-1]["content"]),
  42. }
  43. async with session.post(f"{cls.url}/api/generate", json=data, proxy=proxy) as response:
  44. response.raise_for_status()
  45. async for chunk in response.content.iter_any():
  46. yield chunk.decode()
  47. def generate_signature(time: int, text: str, secret: str = ""):
  48. message = f'{time}:{text}:{secret}';
  49. return sha256(message.encode()).hexdigest()