diff --git a/README.md b/README.md index 4cb4b2bd..b97d974a 100644 --- a/README.md +++ b/README.md @@ -70,6 +70,13 @@ You can also run it with local model through ollama: git add OCO_AI_PROVIDER='ollama' opencommit ``` +if you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url. +You can do so by setting the `OCO_OLLAMA_API_URL` environment variable as follows: + +```sh +OCO_OLLAMA_API_URL='http://192.168.1.10:11434/api/chat' opencommit +``` +where 192.168.1.10 is example of endpoint URL, where you have ollama set up. ### Flags diff --git a/out/cli.cjs b/out/cli.cjs index 5eaf2d72..65707468 100755 --- a/out/cli.cjs +++ b/out/cli.cjs @@ -30629,6 +30629,26 @@ function getI18nLocal(value) { } // src/commands/config.ts +var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => { + CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY"; + CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY"; + CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY"; + CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT"; + CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT"; + CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH"; + CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION"; + CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI"; + CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL"; + CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE"; + CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER"; + CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE"; + CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER"; + CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH"; + CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT"; + CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT"; + CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_API_URL"; + return CONFIG_KEYS2; +})(CONFIG_KEYS || {}); var MODEL_LIST = { openai: [ "gpt-3.5-turbo", @@ -30825,6 +30845,14 @@ var configValidators = { 'Must be in format "https://.openai.azure.com/"' ); return value; + }, + ["OCO_API_URL" /* OCO_OLLAMA_API_URL */](value) { + validateConfig( + CONFIG_KEYS.OCO_API_URL, + typeof value === "string" && value.startsWith("http"), + `${value} is not a valid URL` + ); + return value; } }; var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit"); @@ -34151,12 +34179,16 @@ var api = new OpenAi(); var config4 = getConfig(); var OllamaAi = class { model = "mistral"; + url = "http://localhost:11434/api/chat"; setModel(model) { this.model = model ?? config4?.OCO_MODEL ?? "mistral"; } + setUrl(url2) { + this.url = url2 ?? config4?.OCO_OLLAMA_API_URL ?? "http://localhost:11434/api/chat"; + } async generateCommitMessage(messages) { const model = this.model; - const url2 = "http://localhost:11434/api/chat"; + const url2 = this.url; const p4 = { model, messages, diff --git a/out/github-action.cjs b/out/github-action.cjs index f0b4ad43..e310be2e 100644 --- a/out/github-action.cjs +++ b/out/github-action.cjs @@ -49444,6 +49444,26 @@ function getI18nLocal(value) { } // src/commands/config.ts +var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => { + CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY"; + CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY"; + CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY"; + CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT"; + CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT"; + CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH"; + CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION"; + CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI"; + CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL"; + CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE"; + CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER"; + CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE"; + CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER"; + CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH"; + CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT"; + CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT"; + CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_API_URL"; + return CONFIG_KEYS2; +})(CONFIG_KEYS || {}); var MODEL_LIST = { openai: [ "gpt-3.5-turbo", @@ -49640,6 +49660,14 @@ var configValidators = { 'Must be in format "https://.openai.azure.com/"' ); return value; + }, + ["OCO_API_URL" /* OCO_OLLAMA_API_URL */](value) { + validateConfig( + CONFIG_KEYS.OCO_API_URL, + typeof value === "string" && value.startsWith("http"), + `${value} is not a valid URL` + ); + return value; } }; var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit"); @@ -52966,12 +52994,16 @@ var api = new OpenAi(); var config4 = getConfig(); var OllamaAi = class { model = "mistral"; + url = "http://localhost:11434/api/chat"; setModel(model) { this.model = model ?? config4?.OCO_MODEL ?? "mistral"; } + setUrl(url2) { + this.url = url2 ?? config4?.OCO_OLLAMA_API_URL ?? "http://localhost:11434/api/chat"; + } async generateCommitMessage(messages) { const model = this.model; - const url2 = "http://localhost:11434/api/chat"; + const url2 = this.url; const p3 = { model, messages, diff --git a/src/commands/config.ts b/src/commands/config.ts index 3d5122cf..a08ab749 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -27,7 +27,8 @@ export enum CONFIG_KEYS { OCO_AI_PROVIDER = 'OCO_AI_PROVIDER', OCO_GITPUSH = 'OCO_GITPUSH', OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT', - OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT' + OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT', + OCO_OLLAMA_API_URL = 'OCO_API_URL', } export enum CONFIG_MODES { @@ -270,6 +271,14 @@ export const configValidators = { return value; }, + [CONFIG_KEYS.OCO_OLLAMA_API_URL](value: any) { // add simple api validator + validateConfig( + CONFIG_KEYS.OCO_API_URL, + typeof value === 'string' && value.startsWith('http'), + `${value} is not a valid URL` + ); + return value; + }, }; export type ConfigType = { diff --git a/src/engine/ollama.ts b/src/engine/ollama.ts index 1cf80224..5b187f0d 100644 --- a/src/engine/ollama.ts +++ b/src/engine/ollama.ts @@ -10,10 +10,15 @@ const config = getConfig(); export class OllamaAi implements AiEngine { private model = "mistral"; // as default model of Ollama + private url = "http://localhost:11434/api/chat"; // default URL of Ollama API setModel(model: string) { this.model = model ?? config?.OCO_MODEL ?? 'mistral'; } + + setUrl(url: string) { + this.url = url ?? config?.OCO_OLLAMA_API_URL ?? 'http://localhost:11434/api/chat'; + } async generateCommitMessage( messages: Array ): Promise { @@ -22,7 +27,7 @@ export class OllamaAi implements AiEngine { //console.log(messages); //process.exit() - const url = 'http://localhost:11434/api/chat'; + const url = this.url; const p = { model, messages,