Merge pull request #788 from SoulSniper-V2/feat/add-deepseek-docs
docs(llm): add DeepSeek models support documentation and examples
This commit is contained in:
@@ -331,7 +331,7 @@ No. Aden is built from the ground up with no dependencies on LangChain, CrewAI,
|
||||
|
||||
**Q: What LLM providers does Aden support?**
|
||||
|
||||
Aden supports 100+ LLM providers through LiteLLM integration, including OpenAI (GPT-4, GPT-4o), Anthropic (Claude models), Google Gemini, Mistral, Groq, and many more. Simply set the appropriate API key environment variable and specify the model name.
|
||||
Aden supports 100+ LLM providers through LiteLLM integration, including OpenAI (GPT-4, GPT-4o), Anthropic (Claude models), Google Gemini, DeepSeek, Mistral, Groq, and many more. Simply set the appropriate API key environment variable and specify the model name.
|
||||
|
||||
**Q: Can I use Aden with local AI models like Ollama?**
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ class LiteLLMProvider(LLMProvider):
|
||||
- OpenAI: gpt-4o, gpt-4o-mini, gpt-4-turbo, gpt-3.5-turbo
|
||||
- Anthropic: claude-3-opus, claude-3-sonnet, claude-3-haiku
|
||||
- Google: gemini-pro, gemini-1.5-pro, gemini-1.5-flash
|
||||
- DeepSeek: deepseek-chat, deepseek-coder, deepseek-reasoner
|
||||
- Mistral: mistral-large, mistral-medium, mistral-small
|
||||
- Groq: llama3-70b, mixtral-8x7b
|
||||
- Local: ollama/llama3, ollama/mistral
|
||||
@@ -42,6 +43,9 @@ class LiteLLMProvider(LLMProvider):
|
||||
# Google Gemini
|
||||
provider = LiteLLMProvider(model="gemini/gemini-1.5-flash")
|
||||
|
||||
# DeepSeek
|
||||
provider = LiteLLMProvider(model="deepseek/deepseek-chat")
|
||||
|
||||
# Local Ollama
|
||||
provider = LiteLLMProvider(model="ollama/llama3")
|
||||
|
||||
|
||||
@@ -34,6 +34,12 @@ class TestLiteLLMProviderInit:
|
||||
provider = LiteLLMProvider(model="claude-3-haiku-20240307")
|
||||
assert provider.model == "claude-3-haiku-20240307"
|
||||
|
||||
def test_init_deepseek_model(self):
|
||||
"""Test initialization with DeepSeek model."""
|
||||
with patch.dict(os.environ, {"DEEPSEEK_API_KEY": "test-key"}):
|
||||
provider = LiteLLMProvider(model="deepseek/deepseek-chat")
|
||||
assert provider.model == "deepseek/deepseek-chat"
|
||||
|
||||
def test_init_with_api_key(self):
|
||||
"""Test initialization with explicit API key."""
|
||||
provider = LiteLLMProvider(model="gpt-4o-mini", api_key="my-api-key")
|
||||
|
||||
Reference in New Issue
Block a user