From c46b7ee144a7a5a4d008dd1f7253704ab30bfbf7 Mon Sep 17 00:00:00 2001 From: Phillip Kruger Date: Thu, 12 Sep 2024 11:17:40 +1000 Subject: [PATCH] Make sure other OpenAI compatible server can also work --- pom.xml | 2 +- .../java/org/chappiebot/ChappieService.java | 45 +++++++++++++------ 2 files changed, 33 insertions(+), 14 deletions(-) diff --git a/pom.xml b/pom.xml index 3b418a6..903c9b2 100644 --- a/pom.xml +++ b/pom.xml @@ -50,7 +50,7 @@ UTF-8 quarkus-bom io.quarkus.platform - 3.14.2 + 3.14.3 true 3.2.5 diff --git a/src/main/java/org/chappiebot/ChappieService.java b/src/main/java/org/chappiebot/ChappieService.java index ed2366b..f82dc2c 100644 --- a/src/main/java/org/chappiebot/ChappieService.java +++ b/src/main/java/org/chappiebot/ChappieService.java @@ -33,28 +33,31 @@ public class ChappieService { @ConfigProperty(name = "chappie.log.response", defaultValue = "false") boolean logResponse; + @ConfigProperty(name = "chappie.timeout") + Optional timeout; + // OpenAI @ConfigProperty(name = "chappie.openai.api-key") Optional openaiKey; + @ConfigProperty(name = "chappie.openai.base-url") + Optional openaiBaseUrl; + @ConfigProperty(name = "chappie.openai.model-name", defaultValue = "gpt-4o-mini") String openAiModelName; // Ollama @ConfigProperty(name = "chappie.ollama.base-url", defaultValue = "http://localhost:11434") - String baseUrl; + String ollamaBaseUrl; @ConfigProperty(name = "chappie.ollama.model-name", defaultValue = "codellama") String ollamaModelName; - @ConfigProperty(name = "chappie.ollama.timeout", defaultValue = "PT60S") - String timeout; - @PostConstruct public void init(){ - if(openaiKey.isPresent()){ + if(openaiKey.isPresent() || openaiBaseUrl.isPresent()){ loadOpenAiModel(); }else{ loadOllamaModel(); @@ -62,24 +65,43 @@ public void init(){ } private void loadOpenAiModel(){ - Log.info("Using OpenAI " + openAiModelName); + + openaiBaseUrl.ifPresentOrElse( + burl -> Log.info("Using OpenAI (" + burl + ") "), + () -> Log.info("Using OpenAI " + openAiModelName) + ); OpenAiChatModel.OpenAiChatModelBuilder builder = OpenAiChatModel.builder(); builder = builder.logRequests(logRequest).logResponses(logResponse); - builder = builder.apiKey(openaiKey.get()); + if(openaiKey.isPresent()){ + builder = builder.apiKey(openaiKey.get()); + }else{ + builder = builder.apiKey("demo"); + } + if(openaiBaseUrl.isPresent()){ + builder = builder.baseUrl(openaiBaseUrl.get()); + } + builder = builder.modelName(openAiModelName); + + if(timeout.isPresent()){ + builder = builder.timeout(timeout.get()); + } + // TODO: Tune the other setting ? this.chatLanguageModel = builder.build(); } private void loadOllamaModel(){ - Log.info("Using Ollama (" + baseUrl + ") " + ollamaModelName); + Log.info("Using Ollama (" + ollamaBaseUrl + ") " + ollamaModelName); OllamaChatModel.OllamaChatModelBuilder builder = OllamaChatModel.builder(); builder = builder.logRequests(logRequest).logResponses(logResponse); - builder = builder.baseUrl(baseUrl); + builder = builder.baseUrl(ollamaBaseUrl); builder = builder.modelName(ollamaModelName); - builder = builder.timeout(Duration.parse(timeout)); + if(timeout.isPresent()){ + builder = builder.timeout(timeout.get()); + } // TODO: Tune the other setting ? this.chatLanguageModel = builder.build(); } @@ -103,7 +125,4 @@ public ExplainAssistant getExplainAssistant(){ public TestAssistant getTestAssistant(){ return AiServices.create(TestAssistant.class, chatLanguageModel); } - - - }