Glider.py 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. from __future__ import annotations
  2. import json
  3. from aiohttp import ClientSession
  4. from ..typing import AsyncResult, Messages
  5. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  6. from ..requests.raise_for_status import raise_for_status
  7. from ..providers.response import FinishReason, Reasoning
  8. from .helper import format_prompt
  9. class Glider(AsyncGeneratorProvider, ProviderModelMixin):
  10. label = "Glider"
  11. url = "https://glider.so"
  12. api_endpoint = "https://glider.so/api/chat"
  13. working = True
  14. needs_auth = False
  15. supports_stream = True
  16. supports_system_message = True
  17. supports_message_history = True
  18. default_model = 'chat-llama-3-1-70b'
  19. models = [
  20. 'chat-llama-3-1-70b',
  21. 'chat-llama-3-1-8b',
  22. 'chat-llama-3-2-3b',
  23. 'deepseek-ai/DeepSeek-R1'
  24. ]
  25. model_aliases = {
  26. "llama-3.1-70b": "chat-llama-3-1-70b",
  27. "llama-3.1-8b": "chat-llama-3-1-8b",
  28. "llama-3.2-3b": "chat-llama-3-2-3b",
  29. "deepseek-r1": "deepseek-ai/DeepSeek-R1",
  30. }
  31. @classmethod
  32. async def create_async_generator(
  33. cls,
  34. model: str,
  35. messages: Messages,
  36. proxy: str = None,
  37. **kwargs
  38. ) -> AsyncResult:
  39. model = cls.get_model(model)
  40. headers = {
  41. "accept": "*/*",
  42. "accept-language": "en-US,en;q=0.9",
  43. "content-type": "application/json",
  44. "origin": cls.url,
  45. "referer": f"{cls.url}/",
  46. "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36"
  47. }
  48. async with ClientSession(headers=headers) as session:
  49. data = {
  50. "messages": [{
  51. "role": "user",
  52. "content": format_prompt(messages),
  53. "id": "",
  54. "chatId": "",
  55. "createdOn": "",
  56. "model": None
  57. }],
  58. "model": model
  59. }
  60. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  61. await raise_for_status(response)
  62. async for chunk in response.content:
  63. if not chunk:
  64. continue
  65. text = chunk.decode(errors="ignore")
  66. if not text.startswith("data: "):
  67. continue
  68. if "[DONE]" in text:
  69. yield FinishReason("stop")
  70. return
  71. try:
  72. json_data = json.loads(text[6:])
  73. content = json_data["choices"][0].get("delta", {}).get("content", "")
  74. if content:
  75. yield content
  76. except json.JSONDecodeError:
  77. continue
  78. except Exception:
  79. yield FinishReason("error")
  80. return