CablyAI.py 1.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. from __future__ import annotations
  2. from ...errors import ModelNotSupportedError
  3. from ..template import OpenaiTemplate
  4. class CablyAI(OpenaiTemplate):
  5. url = "https://cablyai.com/chat"
  6. login_url = "https://cablyai.com"
  7. api_base = "https://cablyai.com/v1"
  8. working = True
  9. needs_auth = False
  10. supports_stream = True
  11. supports_system_message = True
  12. supports_message_history = True
  13. @classmethod
  14. def create_async_generator(
  15. cls,
  16. model: str,
  17. messages: Messages,
  18. api_key: str = None,
  19. stream: bool = False,
  20. **kwargs
  21. ) -> AsyncResult:
  22. headers = {
  23. "Accept": "*/*",
  24. "Accept-Language": "en-US,en;q=0.9",
  25. "Authorization": f"Bearer {api_key}",
  26. "Content-Type": "application/json",
  27. "Origin": cls.url,
  28. "Referer": f"{cls.url}/chat",
  29. "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36"
  30. }
  31. return super().create_async_generator(
  32. model=model,
  33. messages=messages,
  34. api_key=api_key,
  35. stream=stream,
  36. headers=headers,
  37. **kwargs
  38. )