Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 19 additions & 68 deletions src/background/index.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,10 @@ import {
sendMessageFeedback,
} from '../services/apis/chatgpt-web'
import { generateAnswersWithBingWebApi } from '../services/apis/bing-web.mjs'
import {
generateAnswersWithChatgptApi,
generateAnswersWithGptCompletionApi,
} from '../services/apis/openai-api'
import { generateAnswersWithCustomApi } from '../services/apis/custom-api.mjs'
import { generateAnswersWithOllamaApi } from '../services/apis/ollama-api.mjs'
import { generateAnswersWithOpenAICompatibleApi } from '../services/apis/openai-api'
import { generateAnswersWithAzureOpenaiApi } from '../services/apis/azure-openai-api.mjs'
Comment on lines +8 to 9
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# First, check what files exist in the openai-api path
echo "=== Checking openai-api related files ==="
fd -a 'openai-api' src/services/apis 2>/dev/null || echo "No matches with exact name"
fd 'openai.*api' src/services/apis 2>/dev/null || echo "No matches with pattern"

echo ""
echo "=== Checking all import statements for openai-api in src ==="
rg -n "openai-api" src 2>/dev/null || echo "No matches found"

echo ""
echo "=== Checking bundler/build config files ==="
fd -a 'vite.config.*|webpack.config.*|rollup.config.*|esbuild.*|parcel.*' . 2>/dev/null | head -20

echo ""
echo "=== Checking package.json for module resolution settings ==="
if [ -f "package.json" ]; then
  cat package.json | jq '.type // .exports // .main // empty' 2>/dev/null || cat package.json | grep -A5 '"type"\|"exports"\|"main"' 2>/dev/null || echo "No explicit module resolution found"
fi

echo ""
echo "=== Listing files in src/services/apis directory ==="
ls -la src/services/apis/ 2>/dev/null || echo "Directory not found"

Repository: ChatGPTBox-dev/chatGPTBox

Length of output: 2909


Add .mjs extension to openai-api import for consistency.

The file src/services/apis/openai-api.mjs exists, and every other import of it throughout the codebase uses the explicit .mjs extension (e.g., in ollama-api.mjs, openrouter-api.mjs). Line 9 of the same file already correctly imports azure-openai-api.mjs with the extension. Without explicit file extensions, the import will fail at runtime.

Fix
-import { generateAnswersWithOpenAICompatibleApi } from '../services/apis/openai-api'
+import { generateAnswersWithOpenAICompatibleApi } from '../services/apis/openai-api.mjs'
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
import { generateAnswersWithOpenAICompatibleApi } from '../services/apis/openai-api'
import { generateAnswersWithAzureOpenaiApi } from '../services/apis/azure-openai-api.mjs'
import { generateAnswersWithOpenAICompatibleApi } from '../services/apis/openai-api.mjs'
import { generateAnswersWithAzureOpenaiApi } from '../services/apis/azure-openai-api.mjs'
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@src/background/index.mjs` around lines 8 - 9, Update the import for
generateAnswersWithOpenAICompatibleApi to include the explicit .mjs extension
(match the style used for generateAnswersWithAzureOpenaiApi); change the import
source '../services/apis/openai-api' to '../services/apis/openai-api.mjs' so the
module loader can resolve the file at runtime and remain consistent with other
imports (reference symbol: generateAnswersWithOpenAICompatibleApi).

import { generateAnswersWithClaudeApi } from '../services/apis/claude-api.mjs'
import { generateAnswersWithChatGLMApi } from '../services/apis/chatglm-api.mjs'
import { generateAnswersWithWaylaidwandererApi } from '../services/apis/waylaidwanderer-api.mjs'
import { generateAnswersWithOpenRouterApi } from '../services/apis/openrouter-api.mjs'
import { generateAnswersWithAimlApi } from '../services/apis/aiml-api.mjs'
import {
defaultConfig,
getUserConfig,
Expand Down Expand Up @@ -52,10 +44,8 @@ import { refreshMenu } from './menus.mjs'
import { registerCommands } from './commands.mjs'
import { generateAnswersWithBardWebApi } from '../services/apis/bard-web.mjs'
import { generateAnswersWithClaudeWebApi } from '../services/apis/claude-web.mjs'
import { generateAnswersWithMoonshotCompletionApi } from '../services/apis/moonshot-api.mjs'
import { generateAnswersWithMoonshotWebApi } from '../services/apis/moonshot-web.mjs'
import { isUsingModelName } from '../utils/model-name-convert.mjs'
import { generateAnswersWithDeepSeekApi } from '../services/apis/deepseek-api.mjs'

const RECONNECT_CONFIG = {
MAX_ATTEMPTS: 5,
Expand Down Expand Up @@ -419,6 +409,20 @@ function setPortProxy(port, proxyTabId) {
}
}

function isUsingOpenAICompatibleApiSession(session) {
return (
isUsingCustomModel(session) ||
isUsingChatgptApiModel(session) ||
isUsingMoonshotApiModel(session) ||
isUsingChatGLMApiModel(session) ||
isUsingDeepSeekApiModel(session) ||
isUsingOllamaApiModel(session) ||
isUsingOpenRouterApiModel(session) ||
isUsingAimlApiModel(session) ||
isUsingGptCompletionApiModel(session)
)
}

async function executeApi(session, port, config) {
console.log(
`[background] executeApi called for model: ${session.modelName}, apiMode: ${session.apiMode}`,
Expand All @@ -434,29 +438,7 @@ async function executeApi(session, port, config) {
)
}
try {
if (isUsingCustomModel(session)) {
console.debug('[background] Using Custom Model API')
if (!session.apiMode)
await generateAnswersWithCustomApi(
port,
session.question,
session,
config.customModelApiUrl.trim() || 'http://localhost:8000/v1/chat/completions',
config.customApiKey,
config.customModelName,
)
else
await generateAnswersWithCustomApi(
port,
session.question,
session,
session.apiMode.customUrl?.trim() ||
config.customModelApiUrl.trim() ||
'http://localhost:8000/v1/chat/completions',
session.apiMode.apiKey?.trim() || config.customApiKey,
session.apiMode.customName,
)
} else if (isUsingChatgptWebModel(session)) {
if (isUsingChatgptWebModel(session)) {
console.debug('[background] Using ChatGPT Web Model')
let tabId
if (
Expand Down Expand Up @@ -581,46 +563,15 @@ async function executeApi(session, port, config) {
console.debug('[background] Using Gemini Web Model')
const cookies = await getBardCookies()
await generateAnswersWithBardWebApi(port, session.question, session, cookies)
} else if (isUsingChatgptApiModel(session)) {
console.debug('[background] Using ChatGPT API Model')
await generateAnswersWithChatgptApi(port, session.question, session, config.apiKey)
} else if (isUsingOpenAICompatibleApiSession(session)) {
console.debug('[background] Using OpenAI-compatible API provider')
await generateAnswersWithOpenAICompatibleApi(port, session.question, session, config)
} else if (isUsingClaudeApiModel(session)) {
console.debug('[background] Using Claude API Model')
await generateAnswersWithClaudeApi(port, session.question, session)
} else if (isUsingMoonshotApiModel(session)) {
console.debug('[background] Using Moonshot API Model')
await generateAnswersWithMoonshotCompletionApi(
port,
session.question,
session,
config.moonshotApiKey,
)
} else if (isUsingChatGLMApiModel(session)) {
console.debug('[background] Using ChatGLM API Model')
await generateAnswersWithChatGLMApi(port, session.question, session)
} else if (isUsingDeepSeekApiModel(session)) {
console.debug('[background] Using DeepSeek API Model')
await generateAnswersWithDeepSeekApi(port, session.question, session, config.deepSeekApiKey)
} else if (isUsingOllamaApiModel(session)) {
console.debug('[background] Using Ollama API Model')
await generateAnswersWithOllamaApi(port, session.question, session)
} else if (isUsingOpenRouterApiModel(session)) {
console.debug('[background] Using OpenRouter API Model')
await generateAnswersWithOpenRouterApi(
port,
session.question,
session,
config.openRouterApiKey,
)
} else if (isUsingAimlApiModel(session)) {
console.debug('[background] Using AIML API Model')
await generateAnswersWithAimlApi(port, session.question, session, config.aimlApiKey)
} else if (isUsingAzureOpenAiApiModel(session)) {
console.debug('[background] Using Azure OpenAI API Model')
await generateAnswersWithAzureOpenaiApi(port, session.question, session)
} else if (isUsingGptCompletionApiModel(session)) {
console.debug('[background] Using GPT Completion API Model')
await generateAnswersWithGptCompletionApi(port, session.question, session, config.apiKey)
} else if (isUsingGithubThirdPartyApiModel(session)) {
console.debug('[background] Using Github Third Party API Model')
await generateAnswersWithWaylaidwandererApi(port, session.question, session)
Expand Down
Loading
Loading