Groq.py 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344
  1. from __future__ import annotations
  2. from .OpenaiAPI import OpenaiAPI
  3. from ...typing import AsyncResult, Messages
  4. class Groq(OpenaiAPI):
  5. label = "Groq"
  6. url = "https://console.groq.com/playground"
  7. working = True
  8. default_model = "mixtral-8x7b-32768"
  9. models = [
  10. "distil-whisper-large-v3-en",
  11. "gemma2-9b-it",
  12. "gemma-7b-it",
  13. "llama3-groq-70b-8192-tool-use-preview",
  14. "llama3-groq-8b-8192-tool-use-preview",
  15. "llama-3.1-70b-versatile",
  16. "llama-3.1-8b-instant",
  17. "llama-3.2-1b-preview",
  18. "llama-3.2-3b-preview",
  19. "llama-3.2-11b-vision-preview",
  20. "llama-3.2-90b-vision-preview",
  21. "llama-guard-3-8b",
  22. "llava-v1.5-7b-4096-preview",
  23. "llama3-70b-8192",
  24. "llama3-8b-8192",
  25. "mixtral-8x7b-32768",
  26. "whisper-large-v3",
  27. "whisper-large-v3-turbo",
  28. ]
  29. model_aliases = {"mixtral-8x7b": "mixtral-8x7b-32768", "llama2-70b": "llama2-70b-4096"}
  30. @classmethod
  31. def create_async_generator(
  32. cls,
  33. model: str,
  34. messages: Messages,
  35. api_base: str = "https://api.groq.com/openai/v1",
  36. **kwargs
  37. ) -> AsyncResult:
  38. return super().create_async_generator(
  39. model, messages, api_base=api_base, **kwargs
  40. )