Liaobots.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325
  1. from __future__ import annotations
  2. import uuid
  3. from aiohttp import ClientSession, BaseConnector
  4. from ..typing import AsyncResult, Messages
  5. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  6. from .helper import get_connector
  7. from ..requests import raise_for_status
  8. models = {
  9. "gpt-4o-mini-free": {
  10. "id": "gpt-4o-mini-free",
  11. "name": "GPT-4o-Mini-Free",
  12. "model": "ChatGPT",
  13. "provider": "OpenAI",
  14. "maxLength": 31200,
  15. "tokenLimit": 7800,
  16. "context": "8K",
  17. },
  18. "gpt-4o-mini": {
  19. "id": "gpt-4o-mini",
  20. "name": "GPT-4o-Mini",
  21. "model": "ChatGPT",
  22. "provider": "OpenAI",
  23. "maxLength": 260000,
  24. "tokenLimit": 126000,
  25. "context": "128K",
  26. },
  27. "gpt-4o-free": {
  28. "id": "gpt-4o-free",
  29. "name": "GPT-4o-free",
  30. "model": "ChatGPT",
  31. "provider": "OpenAI",
  32. "maxLength": 31200,
  33. "tokenLimit": 7800,
  34. "context": "8K",
  35. },
  36. "gpt-4o-2024-08-06": {
  37. "id": "gpt-4o-2024-08-06",
  38. "name": "GPT-4o",
  39. "model": "ChatGPT",
  40. "provider": "OpenAI",
  41. "maxLength": 260000,
  42. "tokenLimit": 126000,
  43. "context": "128K",
  44. },
  45. "gpt-4-turbo-2024-04-09": {
  46. "id": "gpt-4-turbo-2024-04-09",
  47. "name": "GPT-4-Turbo",
  48. "model": "ChatGPT",
  49. "provider": "OpenAI",
  50. "maxLength": 260000,
  51. "tokenLimit": 126000,
  52. "context": "128K",
  53. },
  54. "grok-beta": {
  55. "id": "grok-beta",
  56. "name": "Grok-Beta",
  57. "model": "Grok",
  58. "provider": "x.ai",
  59. "maxLength": 400000,
  60. "tokenLimit": 100000,
  61. "context": "100K",
  62. },
  63. "grok-2": {
  64. "id": "grok-2",
  65. "name": "Grok-2",
  66. "model": "Grok",
  67. "provider": "x.ai",
  68. "maxLength": 400000,
  69. "tokenLimit": 100000,
  70. "context": "100K",
  71. },
  72. "grok-2-mini": {
  73. "id": "grok-2-mini",
  74. "name": "Grok-2-mini",
  75. "model": "Grok",
  76. "provider": "x.ai",
  77. "maxLength": 400000,
  78. "tokenLimit": 100000,
  79. "context": "100K",
  80. },
  81. "claude-3-opus-20240229": {
  82. "id": "claude-3-opus-20240229",
  83. "name": "Claude-3-Opus",
  84. "model": "Claude",
  85. "provider": "Anthropic",
  86. "maxLength": 800000,
  87. "tokenLimit": 200000,
  88. "context": "200K",
  89. },
  90. "claude-3-opus-20240229-aws": {
  91. "id": "claude-3-opus-20240229-aws",
  92. "name": "Claude-3-Opus-Aws",
  93. "model": "Claude",
  94. "provider": "Anthropic",
  95. "maxLength": 800000,
  96. "tokenLimit": 200000,
  97. "context": "200K",
  98. },
  99. "claude-3-5-sonnet-20240620": {
  100. "id": "claude-3-5-sonnet-20240620",
  101. "name": "Claude-3.5-Sonnet",
  102. "model": "Claude",
  103. "provider": "Anthropic",
  104. "maxLength": 800000,
  105. "tokenLimit": 200000,
  106. "context": "200K",
  107. },
  108. "claude-3-5-sonnet-20241022": {
  109. "id": "claude-3-5-sonnet-20241022",
  110. "name": "Claude-3.5-Sonnet-V2",
  111. "model": "Claude",
  112. "provider": "Anthropic",
  113. "maxLength": 800000,
  114. "tokenLimit": 200000,
  115. "context": "200K",
  116. },
  117. "claude-3-sonnet-20240229": {
  118. "id": "claude-3-sonnet-20240229",
  119. "name": "Claude-3-Sonnet",
  120. "model": "Claude",
  121. "provider": "Anthropic",
  122. "maxLength": 800000,
  123. "tokenLimit": 200000,
  124. "context": "200K",
  125. },
  126. "claude-3-haiku-20240307": {
  127. "id": "claude-3-haiku-20240307",
  128. "name": "Claude-3-Haiku",
  129. "model": "Claude",
  130. "provider": "Anthropic",
  131. "maxLength": 800000,
  132. "tokenLimit": 200000,
  133. "context": "200K",
  134. },
  135. "claude-2.1": {
  136. "id": "claude-2.1",
  137. "name": "Claude-2.1-200k",
  138. "model": "Claude",
  139. "provider": "Anthropic",
  140. "maxLength": 800000,
  141. "tokenLimit": 200000,
  142. "context": "200K",
  143. },
  144. "gemini-1.5-flash-002": {
  145. "id": "gemini-1.5-flash-002",
  146. "name": "Gemini-1.5-Flash-1M",
  147. "model": "Gemini",
  148. "provider": "Google",
  149. "maxLength": 4000000,
  150. "tokenLimit": 1000000,
  151. "context": "1024K",
  152. },
  153. "gemini-1.5-pro-002": {
  154. "id": "gemini-1.5-pro-002",
  155. "name": "Gemini-1.5-Pro-1M",
  156. "model": "Gemini",
  157. "provider": "Google",
  158. "maxLength": 4000000,
  159. "tokenLimit": 1000000,
  160. "context": "1024K",
  161. },
  162. }
  163. class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
  164. url = "https://liaobots.site"
  165. working = True
  166. supports_message_history = True
  167. supports_system_message = True
  168. default_model = "gpt-4o-2024-08-06"
  169. models = list(models.keys())
  170. model_aliases = {
  171. "gpt-4o-mini": "gpt-4o-mini-free",
  172. "gpt-4o": "gpt-4o-free",
  173. "gpt-4o": "gpt-4o-2024-08-06",
  174. "gpt-4-turbo": "gpt-4-turbo-2024-04-09",
  175. "gpt-4": "gpt-4o-mini-free",
  176. "claude-3-opus": "claude-3-opus-20240229",
  177. "claude-3-opus": "claude-3-opus-20240229-aws",
  178. "claude-3-sonnet": "claude-3-sonnet-20240229",
  179. "claude-3.5-sonnet": "claude-3-5-sonnet-20240620",
  180. "claude-3.5-sonnet": "claude-3-5-sonnet-20241022",
  181. "claude-3-haiku": "claude-3-haiku-20240307",
  182. "claude-2.1": "claude-2.1",
  183. "gemini-flash": "gemini-1.5-flash-002",
  184. "gemini-pro": "gemini-1.5-pro-002",
  185. }
  186. _auth_code = ""
  187. _cookie_jar = None
  188. @classmethod
  189. def get_model(cls, model: str) -> str:
  190. """
  191. Retrieve the internal model identifier based on the provided model name or alias.
  192. """
  193. if model in cls.model_aliases:
  194. model = cls.model_aliases[model]
  195. if model not in models:
  196. raise ValueError(f"Model '{model}' is not supported.")
  197. return model
  198. @classmethod
  199. def is_supported(cls, model: str) -> bool:
  200. """
  201. Check if the given model is supported.
  202. """
  203. return model in models or model in cls.model_aliases
  204. @classmethod
  205. async def create_async_generator(
  206. cls,
  207. model: str,
  208. messages: Messages,
  209. auth: str = None,
  210. proxy: str = None,
  211. connector: BaseConnector = None,
  212. **kwargs
  213. ) -> AsyncResult:
  214. model = cls.get_model(model)
  215. headers = {
  216. "authority": "liaobots.com",
  217. "content-type": "application/json",
  218. "origin": cls.url,
  219. "referer": f"{cls.url}/",
  220. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
  221. }
  222. async with ClientSession(
  223. headers=headers,
  224. cookie_jar=cls._cookie_jar,
  225. connector=get_connector(connector, proxy, True)
  226. ) as session:
  227. data = {
  228. "conversationId": str(uuid.uuid4()),
  229. "model": models[model],
  230. "messages": messages,
  231. "key": "",
  232. "prompt": kwargs.get("system_message", "You are a helpful assistant."),
  233. }
  234. if not cls._auth_code:
  235. async with session.post(
  236. "https://liaobots.work/recaptcha/api/login",
  237. data={"token": "abcdefghijklmnopqrst"},
  238. verify_ssl=False
  239. ) as response:
  240. await raise_for_status(response)
  241. try:
  242. async with session.post(
  243. "https://liaobots.work/api/user",
  244. json={"authcode": cls._auth_code},
  245. verify_ssl=False
  246. ) as response:
  247. await raise_for_status(response)
  248. cls._auth_code = (await response.json(content_type=None))["authCode"]
  249. if not cls._auth_code:
  250. raise RuntimeError("Empty auth code")
  251. cls._cookie_jar = session.cookie_jar
  252. async with session.post(
  253. "https://liaobots.work/api/chat",
  254. json=data,
  255. headers={"x-auth-code": cls._auth_code},
  256. verify_ssl=False
  257. ) as response:
  258. await raise_for_status(response)
  259. async for chunk in response.content.iter_any():
  260. if b"<html coupert-item=" in chunk:
  261. raise RuntimeError("Invalid session")
  262. if chunk:
  263. yield chunk.decode(errors="ignore")
  264. except:
  265. async with session.post(
  266. "https://liaobots.work/api/user",
  267. json={"authcode": "pTIQr4FTnVRfr"},
  268. verify_ssl=False
  269. ) as response:
  270. await raise_for_status(response)
  271. cls._auth_code = (await response.json(content_type=None))["authCode"]
  272. if not cls._auth_code:
  273. raise RuntimeError("Empty auth code")
  274. cls._cookie_jar = session.cookie_jar
  275. async with session.post(
  276. "https://liaobots.work/api/chat",
  277. json=data,
  278. headers={"x-auth-code": cls._auth_code},
  279. verify_ssl=False
  280. ) as response:
  281. await raise_for_status(response)
  282. async for chunk in response.content.iter_any():
  283. if b"<html coupert-item=" in chunk:
  284. raise RuntimeError("Invalid session")
  285. if chunk:
  286. yield chunk.decode(errors="ignore")
  287. @classmethod
  288. async def initialize_auth_code(cls, session: ClientSession) -> None:
  289. """
  290. Initialize the auth code by making the necessary login requests.
  291. """
  292. async with session.post(
  293. "https://liaobots.work/api/user",
  294. json={"authcode": "pTIQr4FTnVRfr"},
  295. verify_ssl=False
  296. ) as response:
  297. await raise_for_status(response)
  298. cls._auth_code = (await response.json(content_type=None))["authCode"]
  299. if not cls._auth_code:
  300. raise RuntimeError("Empty auth code")
  301. cls._cookie_jar = session.cookie_jar
  302. @classmethod
  303. async def ensure_auth_code(cls, session: ClientSession) -> None:
  304. """
  305. Ensure the auth code is initialized, and if not, perform the initialization.
  306. """
  307. if not cls._auth_code:
  308. await cls.initialize_auth_code(session)