Skip to content

Commit

Permalink
Merge pull request #721 from arc53/feature/anthropic
Browse files Browse the repository at this point in the history
anthropic LLM
  • Loading branch information
pabik authored Oct 28, 2023
2 parents 1bee088 + 04b4001 commit 71fdff1
Show file tree
Hide file tree
Showing 6 changed files with 105 additions and 1 deletion.
2 changes: 2 additions & 0 deletions application/api/answer/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@

if settings.LLM_NAME == "gpt4":
gpt_model = 'gpt-4'
elif settings.LLM_NAME == "anthropic":
gpt_model = 'claude-2'
else:
gpt_model = 'gpt-3.5-turbo'

Expand Down
40 changes: 40 additions & 0 deletions application/llm/anthropic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
from application.llm.base import BaseLLM
from application.core.settings import settings

class AnthropicLLM(BaseLLM):

def __init__(self, api_key=None):
from anthropic import Anthropic, HUMAN_PROMPT, AI_PROMPT
self.api_key = api_key or settings.ANTHROPIC_API_KEY # If not provided, use a default from settings
self.anthropic = Anthropic(api_key=self.api_key)
self.HUMAN_PROMPT = HUMAN_PROMPT
self.AI_PROMPT = AI_PROMPT

def gen(self, model, messages, engine=None, max_tokens=300, stream=False, **kwargs):
context = messages[0]['content']
user_question = messages[-1]['content']
prompt = f"### Context \n {context} \n ### Question \n {user_question}"
if stream:
return self.gen_stream(model, prompt, max_tokens, **kwargs)

completion = self.anthropic.completions.create(
model=model,
max_tokens_to_sample=max_tokens,
stream=stream,
prompt=f"{self.HUMAN_PROMPT} {prompt}{self.AI_PROMPT}",
)
return completion.completion

def gen_stream(self, model, messages, engine=None, max_tokens=300, **kwargs):
context = messages[0]['content']
user_question = messages[-1]['content']
prompt = f"### Context \n {context} \n ### Question \n {user_question}"
stream_response = self.anthropic.completions.create(
model=model,
prompt=f"{self.HUMAN_PROMPT} {prompt}{self.AI_PROMPT}",
max_tokens_to_sample=max_tokens,
stream=True,
)

for completion in stream_response:
yield completion.completion
4 changes: 3 additions & 1 deletion application/llm/llm_creator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from application.llm.sagemaker import SagemakerAPILLM
from application.llm.huggingface import HuggingFaceLLM
from application.llm.llama_cpp import LlamaCpp
from application.llm.anthropic import AnthropicLLM



Expand All @@ -11,7 +12,8 @@ class LLMCreator:
'azure_openai': AzureOpenAILLM,
'sagemaker': SagemakerAPILLM,
'huggingface': HuggingFaceLLM,
'llama.cpp': LlamaCpp
'llama.cpp': LlamaCpp,
'anthropic': AnthropicLLM
}

@classmethod
Expand Down
1 change: 1 addition & 0 deletions application/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ aiohttp-retry==2.8.3
aiosignal==1.3.1
aleph-alpha-client==2.16.1
amqp==5.1.1
anthropic==0.5.0
async-timeout==4.0.2
attrs==22.2.0
billiard==3.6.4.0
Expand Down
2 changes: 2 additions & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ services:
environment:
- API_KEY=$API_KEY
- EMBEDDINGS_KEY=$API_KEY
- LLM_NAME=$LLM_NAME
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/1
- MONGO_URI=mongodb://mongo:27017/docsgpt
Expand All @@ -35,6 +36,7 @@ services:
environment:
- API_KEY=$API_KEY
- EMBEDDINGS_KEY=$API_KEY
- LLM_NAME=$LLM_NAME
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/1
- MONGO_URI=mongodb://mongo:27017/docsgpt
Expand Down
57 changes: 57 additions & 0 deletions tests/llm/test_anthropic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
import unittest
from unittest.mock import patch, Mock
from application.llm.anthropic import AnthropicLLM

class TestAnthropicLLM(unittest.TestCase):

def setUp(self):
self.api_key = "TEST_API_KEY"
self.llm = AnthropicLLM(api_key=self.api_key)

@patch("application.llm.anthropic.settings")
def test_init_default_api_key(self, mock_settings):
mock_settings.ANTHROPIC_API_KEY = "DEFAULT_API_KEY"
llm = AnthropicLLM()
self.assertEqual(llm.api_key, "DEFAULT_API_KEY")

def test_gen(self):
messages = [
{"content": "context"},
{"content": "question"}
]
mock_response = Mock()
mock_response.completion = "test completion"

with patch.object(self.llm.anthropic.completions, "create", return_value=mock_response) as mock_create:
response = self.llm.gen("test_model", messages)
self.assertEqual(response, "test completion")

prompt_expected = "### Context \n context \n ### Question \n question"
mock_create.assert_called_with(
model="test_model",
max_tokens_to_sample=300,
stream=False,
prompt=f"{self.llm.HUMAN_PROMPT} {prompt_expected}{self.llm.AI_PROMPT}"
)

def test_gen_stream(self):
messages = [
{"content": "context"},
{"content": "question"}
]
mock_responses = [Mock(completion="response_1"), Mock(completion="response_2")]

with patch.object(self.llm.anthropic.completions, "create", return_value=iter(mock_responses)) as mock_create:
responses = list(self.llm.gen_stream("test_model", messages))
self.assertListEqual(responses, ["response_1", "response_2"])

prompt_expected = "### Context \n context \n ### Question \n question"
mock_create.assert_called_with(
model="test_model",
prompt=f"{self.llm.HUMAN_PROMPT} {prompt_expected}{self.llm.AI_PROMPT}",
max_tokens_to_sample=300,
stream=True
)

if __name__ == "__main__":
unittest.main()

2 comments on commit 71fdff1

@vercel
Copy link

@vercel vercel bot commented on 71fdff1 Oct 28, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

nextra-docsgpt – ./docs

docs.docsgpt.co.uk
nextra-docsgpt-git-main-arc53.vercel.app
nextra-docsgpt-arc53.vercel.app
nextra-docsgpt.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 71fdff1 Oct 28, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

docs-gpt – ./frontend

docs-gpt-git-main-arc53.vercel.app
docs-gpt-arc53.vercel.app
docs-gpt-brown.vercel.app

Please sign in to comment.