Skip to content

Commit

Permalink
Merge pull request #15 from phillip-kruger/podman
Browse files Browse the repository at this point in the history
Make sure other OpenAI compatible server can also work
  • Loading branch information
phillip-kruger authored Sep 12, 2024
2 parents 1d8099b + c46b7ee commit cd549f8
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 14 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<quarkus.platform.artifact-id>quarkus-bom</quarkus.platform.artifact-id>
<quarkus.platform.group-id>io.quarkus.platform</quarkus.platform.group-id>
<quarkus.platform.version>3.14.2</quarkus.platform.version>
<quarkus.platform.version>3.14.3</quarkus.platform.version>
<skipITs>true</skipITs>
<surefire-plugin.version>3.2.5</surefire-plugin.version>

Expand Down
45 changes: 32 additions & 13 deletions src/main/java/org/chappiebot/ChappieService.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,53 +33,75 @@ public class ChappieService {
@ConfigProperty(name = "chappie.log.response", defaultValue = "false")
boolean logResponse;

@ConfigProperty(name = "chappie.timeout")
Optional<Duration> timeout;

// OpenAI

@ConfigProperty(name = "chappie.openai.api-key")
Optional<String> openaiKey;

@ConfigProperty(name = "chappie.openai.base-url")
Optional<String> openaiBaseUrl;

@ConfigProperty(name = "chappie.openai.model-name", defaultValue = "gpt-4o-mini")
String openAiModelName;

// Ollama

@ConfigProperty(name = "chappie.ollama.base-url", defaultValue = "http://localhost:11434")
String baseUrl;
String ollamaBaseUrl;

@ConfigProperty(name = "chappie.ollama.model-name", defaultValue = "codellama")
String ollamaModelName;

@ConfigProperty(name = "chappie.ollama.timeout", defaultValue = "PT60S")
String timeout;

@PostConstruct
public void init(){
if(openaiKey.isPresent()){
if(openaiKey.isPresent() || openaiBaseUrl.isPresent()){
loadOpenAiModel();
}else{
loadOllamaModel();
}
}

private void loadOpenAiModel(){
Log.info("Using OpenAI " + openAiModelName);

openaiBaseUrl.ifPresentOrElse(
burl -> Log.info("Using OpenAI (" + burl + ") "),
() -> Log.info("Using OpenAI " + openAiModelName)
);

OpenAiChatModel.OpenAiChatModelBuilder builder = OpenAiChatModel.builder();
builder = builder.logRequests(logRequest).logResponses(logResponse);
builder = builder.apiKey(openaiKey.get());
if(openaiKey.isPresent()){
builder = builder.apiKey(openaiKey.get());
}else{
builder = builder.apiKey("demo");
}
if(openaiBaseUrl.isPresent()){
builder = builder.baseUrl(openaiBaseUrl.get());
}

builder = builder.modelName(openAiModelName);

if(timeout.isPresent()){
builder = builder.timeout(timeout.get());
}

// TODO: Tune the other setting ?
this.chatLanguageModel = builder.build();

}

private void loadOllamaModel(){
Log.info("Using Ollama (" + baseUrl + ") " + ollamaModelName);
Log.info("Using Ollama (" + ollamaBaseUrl + ") " + ollamaModelName);
OllamaChatModel.OllamaChatModelBuilder builder = OllamaChatModel.builder();
builder = builder.logRequests(logRequest).logResponses(logResponse);
builder = builder.baseUrl(baseUrl);
builder = builder.baseUrl(ollamaBaseUrl);
builder = builder.modelName(ollamaModelName);
builder = builder.timeout(Duration.parse(timeout));
if(timeout.isPresent()){
builder = builder.timeout(timeout.get());
}
// TODO: Tune the other setting ?
this.chatLanguageModel = builder.build();
}
Expand All @@ -103,7 +125,4 @@ public ExplainAssistant getExplainAssistant(){
public TestAssistant getTestAssistant(){
return AiServices.create(TestAssistant.class, chatLanguageModel);
}



}

0 comments on commit cd549f8

Please sign in to comment.