PerplexityLabs.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. from __future__ import annotations
  2. import random
  3. import json
  4. from aiohttp import ClientSession, BaseConnector
  5. from ..typing import AsyncResult, Messages
  6. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  7. from .helper import get_connector
  8. API_URL = "https://labs-api.perplexity.ai/socket.io/"
  9. WS_URL = "wss://labs-api.perplexity.ai/socket.io/"
  10. class PerplexityLabs(AsyncGeneratorProvider, ProviderModelMixin):
  11. url = "https://labs.perplexity.ai"
  12. working = True
  13. default_model = 'pplx-70b-online'
  14. models = [
  15. 'pplx-7b-online', 'pplx-70b-online', 'pplx-7b-chat', 'pplx-70b-chat', 'mistral-7b-instruct',
  16. 'codellama-34b-instruct', 'llama-2-70b-chat', 'llava-7b-chat', 'mixtral-8x7b-instruct',
  17. 'mistral-medium', 'related'
  18. ]
  19. model_aliases = {
  20. "mistralai/Mistral-7B-Instruct-v0.1": "mistral-7b-instruct",
  21. "meta-llama/Llama-2-70b-chat-hf": "llama-2-70b-chat",
  22. "mistralai/Mixtral-8x7B-Instruct-v0.1": "mixtral-8x7b-instruct",
  23. "codellama/CodeLlama-34b-Instruct-hf": "codellama-34b-instruct"
  24. }
  25. @classmethod
  26. async def create_async_generator(
  27. cls,
  28. model: str,
  29. messages: Messages,
  30. proxy: str = None,
  31. connector: BaseConnector = None,
  32. **kwargs
  33. ) -> AsyncResult:
  34. headers = {
  35. "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:121.0) Gecko/20100101 Firefox/121.0",
  36. "Accept": "*/*",
  37. "Accept-Language": "de,en-US;q=0.7,en;q=0.3",
  38. "Accept-Encoding": "gzip, deflate, br",
  39. "Origin": cls.url,
  40. "Connection": "keep-alive",
  41. "Referer": f"{cls.url}/",
  42. "Sec-Fetch-Dest": "empty",
  43. "Sec-Fetch-Mode": "cors",
  44. "Sec-Fetch-Site": "same-site",
  45. "TE": "trailers",
  46. }
  47. async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
  48. t = format(random.getrandbits(32), '08x')
  49. async with session.get(
  50. f"{API_URL}?EIO=4&transport=polling&t={t}"
  51. ) as response:
  52. text = await response.text()
  53. sid = json.loads(text[1:])['sid']
  54. post_data = '40{"jwt":"anonymous-ask-user"}'
  55. async with session.post(
  56. f'{API_URL}?EIO=4&transport=polling&t={t}&sid={sid}',
  57. data=post_data
  58. ) as response:
  59. assert await response.text() == 'OK'
  60. async with session.ws_connect(f'{WS_URL}?EIO=4&transport=websocket&sid={sid}', autoping=False) as ws:
  61. await ws.send_str('2probe')
  62. assert(await ws.receive_str() == '3probe')
  63. await ws.send_str('5')
  64. assert(await ws.receive_str())
  65. assert(await ws.receive_str() == '6')
  66. message_data = {
  67. 'version': '2.2',
  68. 'source': 'default',
  69. 'model': cls.get_model(model),
  70. 'messages': messages
  71. }
  72. await ws.send_str('42' + json.dumps(['perplexity_labs', message_data]))
  73. last_message = 0
  74. while True:
  75. message = await ws.receive_str()
  76. if message == '2':
  77. await ws.send_str('3')
  78. continue
  79. try:
  80. data = json.loads(message[2:])[1]
  81. yield data["output"][last_message:]
  82. last_message = len(data["output"])
  83. if data["final"]:
  84. break
  85. except:
  86. raise RuntimeError(f"Message: {message}")