Coverage for mindsdb / interfaces / agents / provider_utils.py: 9%
19 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-21 00:36 +0000
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-21 00:36 +0000
1"""Utilities for working with agent providers.
3These helpers are intentionally free of heavy optional dependencies so they can
4be imported in lightweight builds where LangChain is not installed.
5"""
7from typing import Dict
9from mindsdb.interfaces.agents.constants import (
10 ANTHROPIC_CHAT_MODELS,
11 GOOGLE_GEMINI_CHAT_MODELS,
12 NVIDIA_NIM_CHAT_MODELS,
13 OLLAMA_CHAT_MODELS,
14 OPEN_AI_CHAT_MODELS,
15 WRITER_CHAT_MODELS,
16)
19def get_llm_provider(args: Dict) -> str:
20 """Infer the LLM provider from the supplied arguments."""
22 # Prefer an explicitly provided provider.
23 if "provider" in args:
24 return args["provider"]
26 model_name = args.get("model_name")
27 if model_name in ANTHROPIC_CHAT_MODELS:
28 return "anthropic"
29 if model_name in OPEN_AI_CHAT_MODELS:
30 return "openai"
31 if model_name in OLLAMA_CHAT_MODELS:
32 return "ollama"
33 if model_name in NVIDIA_NIM_CHAT_MODELS:
34 return "nvidia_nim"
35 if model_name in GOOGLE_GEMINI_CHAT_MODELS:
36 return "google"
37 if model_name in WRITER_CHAT_MODELS:
38 return "writer"
40 raise ValueError("Invalid model name. Please define a supported llm provider")