Skip to content

Commit bad1fc9

Browse files
committed
✨ feat(engine): add DeepSeekEngine to support DeepSeek API
♻️ refactor(engine): improve OpenAiEngine and create a new DeepSeekEngine class to handle DeepSeek API requests. The DeepSeekEngine class inherits from OpenAiEngine and overrides the generateCommitMessage method to use the DeepSeek API. This change improves code organization and maintainability.
1 parent 0356046 commit bad1fc9

File tree

3 files changed

+109
-14
lines changed

3 files changed

+109
-14
lines changed

out/cli.cjs

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49153,6 +49153,10 @@ var MODEL_LIST = {
4915349153
"mistral-embed",
4915449154
"mistral-moderation-2411",
4915549155
"mistral-moderation-latest"
49156+
],
49157+
deepseek: [
49158+
"deepseek-chat",
49159+
"deepseek-reasoner"
4915649160
]
4915749161
};
4915849162
var getDefaultModel = (provider) => {
@@ -49169,6 +49173,8 @@ var getDefaultModel = (provider) => {
4916949173
return MODEL_LIST.groq[0];
4917049174
case "mistral":
4917149175
return MODEL_LIST.mistral[0];
49176+
case "deepseek":
49177+
return MODEL_LIST.deepseek[0];
4917249178
default:
4917349179
return MODEL_LIST.openai[0];
4917449180
}
@@ -49194,7 +49200,7 @@ var configValidators = {
4919449200
validateConfig(
4919549201
"OCO_API_KEY",
4919649202
value,
49197-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
49203+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
4919849204
);
4919949205
return value;
4920049206
},
@@ -49300,9 +49306,10 @@ var configValidators = {
4930049306
"azure",
4930149307
"test",
4930249308
"flowise",
49303-
"groq"
49309+
"groq",
49310+
"deepseek"
4930449311
].includes(value) || value.startsWith("ollama"),
49305-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
49312+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
4930649313
);
4930749314
return value;
4930849315
},
@@ -49344,6 +49351,7 @@ var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
4934449351
OCO_AI_PROVIDER_ENUM2["GROQ"] = "groq";
4934549352
OCO_AI_PROVIDER_ENUM2["MISTRAL"] = "mistral";
4934649353
OCO_AI_PROVIDER_ENUM2["MLX"] = "mlx";
49354+
OCO_AI_PROVIDER_ENUM2["DEEPSEEK"] = "deepseek";
4934749355
return OCO_AI_PROVIDER_ENUM2;
4934849356
})(OCO_AI_PROVIDER_ENUM || {});
4934949357
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -59438,8 +59446,12 @@ var OllamaEngine = class {
5943859446
this.client.getUri(this.config),
5943959447
params
5944059448
);
59441-
const message = response.data.message;
59442-
return message?.content;
59449+
const { message } = response.data;
59450+
let content = message?.content;
59451+
if (content && content.includes("<think>")) {
59452+
return content.replace(/<think>[\s\S]*?<\/think>/g, "").trim();
59453+
}
59454+
return content;
5944359455
} catch (err) {
5944459456
const message = err.response?.data?.error ?? err.message;
5944559457
throw new Error(`Ollama provider error: ${message}`);
@@ -63830,6 +63842,16 @@ var MLXEngine = class {
6383063842
}
6383163843
};
6383263844

63845+
// src/engine/deepseek.ts
63846+
var DeepSeekEngine = class extends OpenAiEngine {
63847+
constructor(config7) {
63848+
super({
63849+
...config7,
63850+
baseURL: "https://api.deepseek.com/v1"
63851+
});
63852+
}
63853+
};
63854+
6383363855
// src/utils/engine.ts
6383463856
function getEngine() {
6383563857
const config7 = getConfig();
@@ -63860,6 +63882,8 @@ function getEngine() {
6386063882
return new MistralAiEngine(DEFAULT_CONFIG2);
6386163883
case "mlx" /* MLX */:
6386263884
return new MLXEngine(DEFAULT_CONFIG2);
63885+
case "deepseek" /* DEEPSEEK */:
63886+
return new DeepSeekEngine(DEFAULT_CONFIG2);
6386363887
default:
6386463888
return new OpenAiEngine(DEFAULT_CONFIG2);
6386563889
}

out/github-action.cjs

Lines changed: 28 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -67961,6 +67961,10 @@ var MODEL_LIST = {
6796167961
"mistral-embed",
6796267962
"mistral-moderation-2411",
6796367963
"mistral-moderation-latest"
67964+
],
67965+
deepseek: [
67966+
"deepseek-chat",
67967+
"deepseek-reasoner"
6796467968
]
6796567969
};
6796667970
var getDefaultModel = (provider) => {
@@ -67977,6 +67981,8 @@ var getDefaultModel = (provider) => {
6797767981
return MODEL_LIST.groq[0];
6797867982
case "mistral":
6797967983
return MODEL_LIST.mistral[0];
67984+
case "deepseek":
67985+
return MODEL_LIST.deepseek[0];
6798067986
default:
6798167987
return MODEL_LIST.openai[0];
6798267988
}
@@ -68002,7 +68008,7 @@ var configValidators = {
6800268008
validateConfig(
6800368009
"OCO_API_KEY",
6800468010
value,
68005-
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
68011+
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
6800668012
);
6800768013
return value;
6800868014
},
@@ -68108,9 +68114,10 @@ var configValidators = {
6810868114
"azure",
6810968115
"test",
6811068116
"flowise",
68111-
"groq"
68117+
"groq",
68118+
"deepseek"
6811268119
].includes(value) || value.startsWith("ollama"),
68113-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
68120+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
6811468121
);
6811568122
return value;
6811668123
},
@@ -78233,8 +78240,12 @@ var OllamaEngine = class {
7823378240
this.client.getUri(this.config),
7823478241
params
7823578242
);
78236-
const message = response.data.message;
78237-
return message?.content;
78243+
const { message } = response.data;
78244+
let content = message?.content;
78245+
if (content && content.includes("<think>")) {
78246+
return content.replace(/<think>[\s\S]*?<\/think>/g, "").trim();
78247+
}
78248+
return content;
7823878249
} catch (err) {
7823978250
const message = err.response?.data?.error ?? err.message;
7824078251
throw new Error(`Ollama provider error: ${message}`);
@@ -82625,6 +82636,16 @@ var MLXEngine = class {
8262582636
}
8262682637
};
8262782638

82639+
// src/engine/deepseek.ts
82640+
var DeepSeekEngine = class extends OpenAiEngine {
82641+
constructor(config6) {
82642+
super({
82643+
...config6,
82644+
baseURL: "https://api.deepseek.com/v1"
82645+
});
82646+
}
82647+
};
82648+
8262882649
// src/utils/engine.ts
8262982650
function getEngine() {
8263082651
const config6 = getConfig();
@@ -82655,6 +82676,8 @@ function getEngine() {
8265582676
return new MistralAiEngine(DEFAULT_CONFIG2);
8265682677
case "mlx" /* MLX */:
8265782678
return new MLXEngine(DEFAULT_CONFIG2);
82679+
case "deepseek" /* DEEPSEEK */:
82680+
return new DeepSeekEngine(DEFAULT_CONFIG2);
8265882681
default:
8265982682
return new OpenAiEngine(DEFAULT_CONFIG2);
8266082683
}

src/engine/deepseek.ts

Lines changed: 52 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,60 @@
1-
import { OpenAiEngine, OpenAiConfig } from './openAi';
1+
import axios from 'axios';
2+
import { OpenAI } from 'openai';
3+
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
4+
import { tokenCount } from '../utils/tokenCount';
5+
import { OpenAiEngine, OpenAiConfig } from './openAI';
26

3-
export interface DeepSeekConfig extends OpenAiConfig {}
7+
export interface DeepseekConfig extends OpenAiConfig {}
48

5-
export class DeepSeekEngine extends OpenAiEngine {
6-
constructor(config: DeepSeekConfig) {
9+
export class DeepseekEngine extends OpenAiEngine {
10+
constructor(config: DeepseekConfig) {
11+
// Call OpenAIEngine constructor with forced Deepseek baseURL
712
super({
813
...config,
914
baseURL: 'https://api.deepseek.com/v1'
1015
});
1116
}
17+
18+
// Identical method from OpenAiEngine, re-implemented here
19+
public generateCommitMessage = async (
20+
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
21+
): Promise<string | null> => {
22+
const params = {
23+
model: this.config.model,
24+
messages,
25+
temperature: 0,
26+
top_p: 0.1,
27+
max_tokens: this.config.maxTokensOutput
28+
};
29+
30+
try {
31+
const REQUEST_TOKENS = messages
32+
.map((msg) => tokenCount(msg.content as string) + 4)
33+
.reduce((a, b) => a + b, 0);
34+
35+
if (
36+
REQUEST_TOKENS >
37+
this.config.maxTokensInput - this.config.maxTokensOutput
38+
)
39+
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
40+
41+
const completion = await this.client.chat.completions.create(params);
42+
43+
const message = completion.choices[0].message;
44+
45+
return message?.content;
46+
} catch (error) {
47+
const err = error as Error;
48+
if (
49+
axios.isAxiosError<{ error?: { message: string } }>(error) &&
50+
error.response?.status === 401
51+
) {
52+
const openAiError = error.response.data.error;
53+
54+
if (openAiError) throw new Error(openAiError.message);
55+
}
56+
57+
throw err;
58+
}
59+
};
1260
}

0 commit comments

Comments
 (0)