BlackboxAPI.py 2.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576
  1. from __future__ import annotations
  2. from aiohttp import ClientSession
  3. from ..typing import AsyncResult, Messages
  4. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  5. from ..requests.raise_for_status import raise_for_status
  6. class BlackboxAPI(AsyncGeneratorProvider, ProviderModelMixin):
  7. label = "Blackbox AI API"
  8. url = "https://api.blackbox.ai"
  9. api_endpoint = "https://api.blackbox.ai/api/chat"
  10. working = True
  11. needs_auth = False
  12. supports_stream = False
  13. supports_system_message = True
  14. supports_message_history = True
  15. default_model = 'deepseek-ai/DeepSeek-V3'
  16. models = [
  17. default_model,
  18. 'deepseek-ai/DeepSeek-R1',
  19. 'mistralai/Mistral-Small-24B-Instruct-2501',
  20. 'deepseek-ai/deepseek-llm-67b-chat',
  21. 'databricks/dbrx-instruct',
  22. 'Qwen/QwQ-32B-Preview',
  23. 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO'
  24. ]
  25. model_aliases = {
  26. "deepseek-v3": "deepseek-ai/DeepSeek-V3",
  27. "deepseek-r1": "deepseek-ai/DeepSeek-R1",
  28. "deepseek-chat": "deepseek-ai/deepseek-llm-67b-chat",
  29. "mixtral-small-28b": "mistralai/Mistral-Small-24B-Instruct-2501",
  30. "dbrx-instruct": "databricks/dbrx-instruct",
  31. "qwq-32b": "Qwen/QwQ-32B-Preview",
  32. "hermes-2-dpo": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
  33. }
  34. @classmethod
  35. async def create_async_generator(
  36. cls,
  37. model: str,
  38. messages: Messages,
  39. proxy: str = None,
  40. max_tokens: str = None,
  41. **kwargs
  42. ) -> AsyncResult:
  43. model = cls.get_model(model)
  44. headers = {
  45. "Content-Type": "application/json",
  46. }
  47. async with ClientSession(headers=headers) as session:
  48. data = {
  49. "messages": messages,
  50. "model": model,
  51. "max_tokens": max_tokens
  52. }
  53. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  54. await raise_for_status(response)
  55. async for chunk in response.content:
  56. if not chunk:
  57. continue
  58. text = chunk.decode(errors='ignore')
  59. try:
  60. if text:
  61. yield text
  62. except Exception as e:
  63. return