From c5ce50aaa3b291781a5e035e0d800a40430d9fd4 Mon Sep 17 00:00:00 2001 From: D1m7asis Date: Fri, 1 Aug 2025 13:18:05 +0200 Subject: [PATCH 1/2] feat: add AIML API provider support Introduces AIMLAPI as a supported AI provider, including model list, config validation, and engine implementation. Updates README and engine selection logic to integrate AIMLAPI for chat completions. Refactor AimlApiEngine response handling Removed dependency on removeContentTags and simplified message content extraction. Minor header formatting fix for HTTP-Referer. This streamlines the response handling and reduces unnecessary processing. --- README.md | 2 +- src/commands/config.ts | 117 +++++++++++++++++++++++++++++++++++++++-- src/engine/aimlapi.ts | 47 +++++++++++++++++ src/utils/engine.ts | 4 ++ 4 files changed, 165 insertions(+), 5 deletions(-) create mode 100644 src/engine/aimlapi.ts diff --git a/README.md b/README.md index 74c42296..fed321ca 100644 --- a/README.md +++ b/README.md @@ -106,7 +106,7 @@ Create a `.env` file and add OpenCommit config variables there like this: ```env ... -OCO_AI_PROVIDER= +OCO_AI_PROVIDER= OCO_API_KEY= // or other LLM provider API token OCO_API_URL= OCO_TOKENS_MAX_INPUT= diff --git a/src/commands/config.ts b/src/commands/config.ts index 169ee3cd..d9b65791 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -130,7 +130,112 @@ export const MODEL_LIST = { 'mistral-moderation-2411', 'mistral-moderation-latest' ], - deepseek: ['deepseek-chat', 'deepseek-reasoner'] + deepseek: ['deepseek-chat', 'deepseek-reasoner'], + + // AI/ML API available chat-completion models + // https://api.aimlapi.com/v1/models + aimlapi: [ + 'openai/gpt-4o', + 'gpt-4o-2024-08-06', + 'gpt-4o-2024-05-13', + 'gpt-4o-mini', + 'gpt-4o-mini-2024-07-18', + 'chatgpt-4o-latest', + 'gpt-4-turbo', + 'gpt-4-turbo-2024-04-09', + 'gpt-4', + 'gpt-4-0125-preview', + 'gpt-4-1106-preview', + 'gpt-3.5-turbo', + 'gpt-3.5-turbo-0125', + 'gpt-3.5-turbo-1106', + 'o1-preview', + 'o1-preview-2024-09-12', + 'o1-mini', + 'o1-mini-2024-09-12', + 'o3-mini', + 'gpt-4o-audio-preview', + 'gpt-4o-mini-audio-preview', + 'gpt-4o-search-preview', + 'gpt-4o-mini-search-preview', + 'openai/gpt-4.1-2025-04-14', + 'openai/gpt-4.1-mini-2025-04-14', + 'openai/gpt-4.1-nano-2025-04-14', + 'openai/o4-mini-2025-04-16', + 'openai/o3-2025-04-16', + 'o1', + 'openai/o3-pro', + 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo', + 'google/gemma-2-27b-it', + 'meta-llama/Llama-Vision-Free', + 'Qwen/Qwen2-72B-Instruct', + 'mistralai/Mixtral-8x7B-Instruct-v0.1', + 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF', + 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', + 'meta-llama/Llama-3.3-70B-Instruct-Turbo', + 'meta-llama/Llama-3.2-3B-Instruct-Turbo', + 'meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo', + 'meta-llama/Llama-Guard-3-11B-Vision-Turbo', + 'Qwen/Qwen2.5-7B-Instruct-Turbo', + 'Qwen/Qwen2.5-Coder-32B-Instruct', + 'meta-llama/Meta-Llama-3-8B-Instruct-Lite', + 'meta-llama/Llama-3-8b-chat-hf', + 'meta-llama/Llama-3-70b-chat-hf', + 'Qwen/Qwen2.5-72B-Instruct-Turbo', + 'Qwen/QwQ-32B', + 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo', + 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo', + 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo', + 'mistralai/Mistral-7B-Instruct-v0.2', + 'meta-llama/LlamaGuard-2-8b', + 'mistralai/Mistral-7B-Instruct-v0.1', + 'mistralai/Mistral-7B-Instruct-v0.3', + 'meta-llama/Meta-Llama-Guard-3-8B', + 'meta-llama/llama-4-scout', + 'meta-llama/llama-4-maverick', + 'Qwen/Qwen3-235B-A22B-fp8-tput', + 'claude-3-opus-20240229', + 'claude-3-haiku-20240307', + 'claude-3-5-sonnet-20240620', + 'claude-3-5-sonnet-20241022', + 'claude-3-5-haiku-20241022', + 'claude-3-7-sonnet-20250219', + 'claude-sonnet-4-20250514', + 'claude-opus-4-20250514', + 'google/gemini-2.0-flash-exp', + 'google/gemini-2.0-flash', + 'google/gemini-2.5-pro', + 'google/gemini-2.5-flash', + 'deepseek-chat', + 'deepseek-reasoner', + 'qwen-max', + 'qwen-plus', + 'qwen-turbo', + 'qwen-max-2025-01-25', + 'mistralai/mistral-tiny', + 'mistralai/mistral-nemo', + 'anthracite-org/magnum-v4-72b', + 'nvidia/llama-3.1-nemotron-70b-instruct', + 'cohere/command-r-plus', + 'mistralai/codestral-2501', + 'google/gemma-3-4b-it', + 'google/gemma-3-12b-it', + 'google/gemma-3-27b-it', + 'google/gemini-2.5-flash-lite-preview', + 'deepseek/deepseek-prover-v2', + 'google/gemma-3n-e4b-it', + 'cohere/command-a', + 'MiniMax-Text-01', + 'abab6.5s-chat', + 'minimax/m1', + 'bagoodex/bagoodex-search-v1', + 'moonshot/kimi-k2-preview', + 'perplexity/sonar', + 'perplexity/sonar-pro', + 'x-ai/grok-4-07-09', + 'x-ai/grok-3-beta', + 'x-ai/grok-3-mini-beta', + ], }; const getDefaultModel = (provider: string | undefined): string => { @@ -149,6 +254,8 @@ const getDefaultModel = (provider: string | undefined): string => { return MODEL_LIST.mistral[0]; case 'deepseek': return MODEL_LIST.deepseek[0]; + case 'aimlapi': + return MODEL_LIST.aimlapi[0]; default: return MODEL_LIST.openai[0]; } @@ -322,9 +429,10 @@ export const configValidators = { 'test', 'flowise', 'groq', - 'deepseek' + 'deepseek', + 'aimlapi', ].includes(value) || value.startsWith('ollama'), - `${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)` + `${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)` ); return value; @@ -372,7 +480,8 @@ export enum OCO_AI_PROVIDER_ENUM { GROQ = 'groq', MISTRAL = 'mistral', MLX = 'mlx', - DEEPSEEK = 'deepseek' + DEEPSEEK = 'deepseek', + AIMLAPI = 'aimlapi', } export type ConfigType = { diff --git a/src/engine/aimlapi.ts b/src/engine/aimlapi.ts new file mode 100644 index 00000000..e4af8409 --- /dev/null +++ b/src/engine/aimlapi.ts @@ -0,0 +1,47 @@ +import OpenAI from 'openai'; +import axios, { AxiosInstance } from 'axios'; +import { AiEngine, AiEngineConfig } from './Engine'; + +interface AimlApiConfig extends AiEngineConfig {} + +export class AimlApiEngine implements AiEngine { + client: AxiosInstance; + + constructor(public config: AimlApiConfig) { + this.client = axios.create({ + baseURL: config.baseURL || 'https://api.aimlapi.com/v1/chat/completions', + headers: { + Authorization: `Bearer ${config.apiKey}`, + 'HTTP-Referer': 'https://github.com/di-sukharev/opencommit', + 'X-Title': 'opencommit', + 'Content-Type': 'application/json', + ...config.customHeaders + } + }); + } + + public generateCommitMessage = async ( + messages: Array + ): Promise => { + try { + const response = await this.client.post('', { + model: this.config.model, + messages + }); + + const message = response.data.choices?.[0]?.message; + return message?.content ?? null; + } catch (error) { + const err = error as Error; + if ( + axios.isAxiosError<{ error?: { message: string } }>(error) && + error.response?.status === 401 + ) { + const apiError = error.response.data.error; + if (apiError) throw new Error(apiError.message); + } + + throw err; + } + }; +} diff --git a/src/utils/engine.ts b/src/utils/engine.ts index 3137a05f..18c64c70 100644 --- a/src/utils/engine.ts +++ b/src/utils/engine.ts @@ -11,6 +11,7 @@ import { TestAi, TestMockType } from '../engine/testAi'; import { GroqEngine } from '../engine/groq'; import { MLXEngine } from '../engine/mlx'; import { DeepseekEngine } from '../engine/deepseek'; +import { AimlApiEngine } from '../engine/aimlapi'; export function getEngine(): AiEngine { const config = getConfig(); @@ -55,6 +56,9 @@ export function getEngine(): AiEngine { case OCO_AI_PROVIDER_ENUM.DEEPSEEK: return new DeepseekEngine(DEFAULT_CONFIG); + case OCO_AI_PROVIDER_ENUM.AIMLAPI: + return new AimlApiEngine(DEFAULT_CONFIG); + default: return new OpenAiEngine(DEFAULT_CONFIG); } From 8ae2f7ddf1ce65815be7db169703ade9afbe6537 Mon Sep 17 00:00:00 2001 From: di-sukharev Date: Fri, 1 Aug 2025 16:05:20 +0300 Subject: [PATCH 2/2] 3.2.10 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index ee93607f..49c66fb5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "opencommit", - "version": "3.2.9", + "version": "3.2.10", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "opencommit", - "version": "3.2.9", + "version": "3.2.10", "license": "MIT", "dependencies": { "@actions/core": "^1.10.0", diff --git a/package.json b/package.json index 3a12332f..095b67ba 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "opencommit", - "version": "3.2.9", + "version": "3.2.10", "description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫", "keywords": [ "git",