Lockchat.py 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354
  1. from __future__ import annotations
  2. import json
  3. import requests
  4. from ...typing import Any, CreateResult
  5. from ..base_provider import AbstractProvider
  6. class Lockchat(AbstractProvider):
  7. url: str = "http://supertest.lockchat.app"
  8. supports_stream = True
  9. supports_gpt_35_turbo = True
  10. supports_gpt_4 = True
  11. @staticmethod
  12. def create_completion(
  13. model: str,
  14. messages: list[dict[str, str]],
  15. stream: bool, **kwargs: Any) -> CreateResult:
  16. temperature = float(kwargs.get("temperature", 0.7))
  17. payload = {
  18. "temperature": temperature,
  19. "messages" : messages,
  20. "model" : model,
  21. "stream" : True,
  22. }
  23. headers = {
  24. "user-agent": "ChatX/39 CFNetwork/1408.0.4 Darwin/22.5.0",
  25. }
  26. response = requests.post("http://supertest.lockchat.app/v1/chat/completions",
  27. json=payload, headers=headers, stream=True)
  28. response.raise_for_status()
  29. for token in response.iter_lines():
  30. if b"The model: `gpt-4` does not exist" in token:
  31. print("error, retrying...")
  32. Lockchat.create_completion(
  33. model = model,
  34. messages = messages,
  35. stream = stream,
  36. temperature = temperature,
  37. **kwargs)
  38. if b"content" in token:
  39. token = json.loads(token.decode("utf-8").split("data: ")[1])
  40. token = token["choices"][0]["delta"].get("content")
  41. if token:
  42. yield (token)