Skip to content

Commit

Permalink
Make endpoint url for Ollama configurable (#355)
Browse files Browse the repository at this point in the history
  • Loading branch information
senovr authored Jul 2, 2024
1 parent fef25a2 commit 18f5277
Show file tree
Hide file tree
Showing 5 changed files with 89 additions and 4 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,13 @@ You can also run it with local model through ollama:
git add <files...>
OCO_AI_PROVIDER='ollama' opencommit
```
if you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url.
You can do so by setting the `OCO_OLLAMA_API_URL` environment variable as follows:

```sh
OCO_OLLAMA_API_URL='http://192.168.1.10:11434/api/chat' opencommit
```
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.

### Flags

Expand Down
34 changes: 33 additions & 1 deletion out/cli.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -30629,6 +30629,26 @@ function getI18nLocal(value) {
}

// src/commands/config.ts
var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY";
CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY";
CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY";
CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT";
CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT";
CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH";
CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION";
CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH";
CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT";
CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT";
CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_API_URL";
return CONFIG_KEYS2;
})(CONFIG_KEYS || {});
var MODEL_LIST = {
openai: [
"gpt-3.5-turbo",
Expand Down Expand Up @@ -30825,6 +30845,14 @@ var configValidators = {
'Must be in format "https://<resource name>.openai.azure.com/"'
);
return value;
},
["OCO_API_URL" /* OCO_OLLAMA_API_URL */](value) {
validateConfig(
CONFIG_KEYS.OCO_API_URL,
typeof value === "string" && value.startsWith("http"),
`${value} is not a valid URL`
);
return value;
}
};
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
Expand Down Expand Up @@ -34151,12 +34179,16 @@ var api = new OpenAi();
var config4 = getConfig();
var OllamaAi = class {
model = "mistral";
url = "http://localhost:11434/api/chat";
setModel(model) {
this.model = model ?? config4?.OCO_MODEL ?? "mistral";
}
setUrl(url2) {
this.url = url2 ?? config4?.OCO_OLLAMA_API_URL ?? "http://localhost:11434/api/chat";
}
async generateCommitMessage(messages) {
const model = this.model;
const url2 = "http://localhost:11434/api/chat";
const url2 = this.url;
const p4 = {
model,
messages,
Expand Down
34 changes: 33 additions & 1 deletion out/github-action.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -49444,6 +49444,26 @@ function getI18nLocal(value) {
}

// src/commands/config.ts
var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY";
CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY";
CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY";
CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT";
CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT";
CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH";
CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION";
CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH";
CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT";
CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT";
CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_API_URL";
return CONFIG_KEYS2;
})(CONFIG_KEYS || {});
var MODEL_LIST = {
openai: [
"gpt-3.5-turbo",
Expand Down Expand Up @@ -49640,6 +49660,14 @@ var configValidators = {
'Must be in format "https://<resource name>.openai.azure.com/"'
);
return value;
},
["OCO_API_URL" /* OCO_OLLAMA_API_URL */](value) {
validateConfig(
CONFIG_KEYS.OCO_API_URL,
typeof value === "string" && value.startsWith("http"),
`${value} is not a valid URL`
);
return value;
}
};
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
Expand Down Expand Up @@ -52966,12 +52994,16 @@ var api = new OpenAi();
var config4 = getConfig();
var OllamaAi = class {
model = "mistral";
url = "http://localhost:11434/api/chat";
setModel(model) {
this.model = model ?? config4?.OCO_MODEL ?? "mistral";
}
setUrl(url2) {
this.url = url2 ?? config4?.OCO_OLLAMA_API_URL ?? "http://localhost:11434/api/chat";
}
async generateCommitMessage(messages) {
const model = this.model;
const url2 = "http://localhost:11434/api/chat";
const url2 = this.url;
const p3 = {
model,
messages,
Expand Down
11 changes: 10 additions & 1 deletion src/commands/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ export enum CONFIG_KEYS {
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
OCO_GITPUSH = 'OCO_GITPUSH',
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT'
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT',
OCO_OLLAMA_API_URL = 'OCO_API_URL',
}

export enum CONFIG_MODES {
Expand Down Expand Up @@ -270,6 +271,14 @@ export const configValidators = {

return value;
},
[CONFIG_KEYS.OCO_OLLAMA_API_URL](value: any) { // add simple api validator
validateConfig(
CONFIG_KEYS.OCO_API_URL,
typeof value === 'string' && value.startsWith('http'),
`${value} is not a valid URL`
);
return value;
},
};

export type ConfigType = {
Expand Down
7 changes: 6 additions & 1 deletion src/engine/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,15 @@ const config = getConfig();

export class OllamaAi implements AiEngine {
private model = "mistral"; // as default model of Ollama
private url = "http://localhost:11434/api/chat"; // default URL of Ollama API

setModel(model: string) {
this.model = model ?? config?.OCO_MODEL ?? 'mistral';
}

setUrl(url: string) {
this.url = url ?? config?.OCO_OLLAMA_API_URL ?? 'http://localhost:11434/api/chat';
}
async generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> {
Expand All @@ -22,7 +27,7 @@ export class OllamaAi implements AiEngine {
//console.log(messages);
//process.exit()

const url = 'http://localhost:11434/api/chat';
const url = this.url;
const p = {
model,
messages,
Expand Down

0 comments on commit 18f5277

Please sign in to comment.