diff --git a/backend/app/rag/llms/provider.py b/backend/app/rag/llms/provider.py index d061c08b..b003c90a 100644 --- a/backend/app/rag/llms/provider.py +++ b/backend/app/rag/llms/provider.py @@ -119,7 +119,7 @@ class LLMProviderOption(BaseModel): ), LLMProviderOption( provider=LLMProvider.ANTHROPIC_VERTEX, - provider_display_name="Anthropic Vertex AI", + provider_display_name="Anthropic Vertex AI (Deprecated)", provider_description="Anthropic's Claude models are now generally available through Vertex AI.", provider_url="https://docs.anthropic.com/en/api/claude-on-vertex-ai", default_llm_model="claude-3-5-sonnet@20241022", diff --git a/backend/app/utils/dspy.py b/backend/app/utils/dspy.py index 4946c92c..81031ab7 100644 --- a/backend/app/utils/dspy.py +++ b/backend/app/utils/dspy.py @@ -75,7 +75,10 @@ def get_dspy_lm_by_llama_llm(llama_llm: BaseLLM) -> dspy.LM: "Bedrock model " + llama_llm.model + " is not supported by dspy." ) elif type(llama_llm) is Vertex: - raise ValueError("Vertex is not supported by dspy.") + return dspy.GoogleVertexAI( + model=llama_llm.model, + max_output_tokens=llama_llm.max_tokens or 8192, + ) elif type(llama_llm) is Ollama: return DspyOllamaLocal( model=llama_llm.model, diff --git a/frontend/app/src/pages/docs/llm.mdx b/frontend/app/src/pages/docs/llm.mdx index 2d87fd1b..cefcdfaa 100644 --- a/frontend/app/src/pages/docs/llm.mdx +++ b/frontend/app/src/pages/docs/llm.mdx @@ -34,13 +34,9 @@ To learn more about OpenAI, please visit [OpenAI](https://platform.openai.com/). To learn more about Google Gemini, please visit [Google Gemini](https://gemini.google.com/). -### Anthropic Vertex AI - -To learn more about Anthropic Vertex AI, please visit [Anthropic Vertex AI](https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/use-claude) - ### Vertex AI -To learn more about Vertex AI, please visit [Vertex AI](https://cloud.google.com/vertex-ai) +To learn more about Vertex AI, please visit [Vertex AI](https://cloud.google.com/vertex-ai). ### Amazon Bedrock