Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion src/commands/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ export enum CONFIG_KEYS {
OCO_API_CUSTOM_HEADERS = 'OCO_API_CUSTOM_HEADERS',
OCO_OMIT_SCOPE = 'OCO_OMIT_SCOPE',
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT'
OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT',
OCO_OLLAMA_THINK = 'OCO_OLLAMA_THINK'
}

export enum CONFIG_MODES {
Expand Down Expand Up @@ -838,6 +839,15 @@ export const configValidators = {
typeof value === 'boolean',
'Must be true or false'
);
},

[CONFIG_KEYS.OCO_OLLAMA_THINK](value: any) {
validateConfig(
CONFIG_KEYS.OCO_OLLAMA_THINK,
typeof value === 'boolean',
'Must be true or false'
);
return value;
}
};

Expand Down Expand Up @@ -905,6 +915,7 @@ export type ConfigType = {
[CONFIG_KEYS.OCO_OMIT_SCOPE]: boolean;
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
[CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT]: boolean;
[CONFIG_KEYS.OCO_OLLAMA_THINK]?: boolean;
};

export const defaultConfigPath = pathJoin(homedir(), '.opencommit');
Expand Down
1 change: 1 addition & 0 deletions src/engine/Engine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ export interface AiEngineConfig {
baseURL?: string;
proxy?: string;
customHeaders?: Record<string, string>;
ollamaThink?: boolean;
}

type Client =
Expand Down
9 changes: 7 additions & 2 deletions src/engine/ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ import { normalizeEngineError } from '../utils/engineErrorHandler';
import { removeContentTags } from '../utils/removeContentTags';
import { AiEngine, AiEngineConfig } from './Engine';

interface OllamaConfig extends AiEngineConfig {}
interface OllamaConfig extends AiEngineConfig {
ollamaThink?: boolean;
}

const DEFAULT_OLLAMA_URL = 'http://localhost:11434';
const OLLAMA_CHAT_PATH = '/api/chat';
Expand Down Expand Up @@ -32,12 +34,15 @@ export class OllamaEngine implements AiEngine {
async generateCommitMessage(
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
): Promise<string | undefined> {
const params = {
const params: Record<string, any> = {
model: this.config.model ?? 'mistral',
messages,
options: { temperature: 0, top_p: 0.1 },
stream: false
};
if (typeof this.config.ollamaThink === 'boolean') {
params.think = this.config.ollamaThink;
}
try {
const response = await this.client.post(this.chatUrl, params);

Expand Down
5 changes: 4 additions & 1 deletion src/utils/engine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,10 @@ export function getEngine(): AiEngine {

switch (provider) {
case OCO_AI_PROVIDER_ENUM.OLLAMA:
return new OllamaEngine(DEFAULT_CONFIG);
return new OllamaEngine({
...DEFAULT_CONFIG,
ollamaThink: config.OCO_OLLAMA_THINK
});

case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
return new AnthropicEngine(DEFAULT_CONFIG);
Expand Down
64 changes: 64 additions & 0 deletions test/unit/ollama.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import { OllamaEngine } from '../../src/engine/ollama';

describe('OllamaEngine', () => {
it('sends think=false when configured', async () => {
const engine = new OllamaEngine({
apiKey: 'ollama',
model: 'qwen3.5:2b',
maxTokensOutput: 500,
maxTokensInput: 4096,
ollamaThink: false
});

const post = jest.fn().mockResolvedValue({
data: {
message: {
content: 'feat: add support for ollama think config'
}
}
});

engine.client = { post } as any;

await engine.generateCommitMessage([
{ role: 'user', content: 'diff --git a/file b/file' }
]);

expect(post).toHaveBeenCalledWith(
'http://localhost:11434/api/chat',
expect.objectContaining({
think: false
})
);
});

it('omits think when not configured', async () => {
const engine = new OllamaEngine({
apiKey: 'ollama',
model: 'qwen3.5:2b',
maxTokensOutput: 500,
maxTokensInput: 4096
});

const post = jest.fn().mockResolvedValue({
data: {
message: {
content: 'feat: add support for ollama think config'
}
}
});

engine.client = { post } as any;

await engine.generateCommitMessage([
{ role: 'user', content: 'diff --git a/file b/file' }
]);

expect(post).toHaveBeenCalledWith(
'http://localhost:11434/api/chat',
expect.not.objectContaining({
think: expect.anything()
})
);
});
});
Loading