fix(code-tools): support custom provider & fix Windows Terminal Issue (#12504)

* fix(code-tools): support custom Anthropic and OpenAI-Response providers

* refactor: use --config option in codex

* fix(codex): fix OpenAI Codex config parameters

* fix(security): redact sensitive env vars in CodeToolsService logs

* fix(code-tools): increase temp file cleanup delay for Windows Terminal
This commit is contained in:
George·Dong 2026-01-22 11:25:56 +08:00 committed by GitHub
parent ed54bf8810
commit c03475f5db
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 65 additions and 35 deletions

View File

@ -23,6 +23,22 @@ import { promisify } from 'util'
const execAsync = promisify(require('child_process').exec)
const logger = loggerService.withContext('CodeToolsService')
// Sensitive environment variable keys to redact in logs
const SENSITIVE_ENV_KEYS = ['API_KEY', 'APIKEY', 'AUTHORIZATION', 'TOKEN', 'SECRET', 'PASSWORD']
/**
* Sanitize environment variables for safe logging
* Redacts values of sensitive keys to prevent credential leakage
*/
function sanitizeEnvForLogging(env: Record<string, string>): Record<string, string> {
const sanitized: Record<string, string> = {}
for (const [key, value] of Object.entries(env)) {
const isSensitive = SENSITIVE_ENV_KEYS.some((k) => key.toUpperCase().includes(k))
sanitized[key] = isSensitive ? '<redacted>' : value
}
return sanitized
}
interface VersionInfo {
installed: string | null
latest: string | null
@ -617,7 +633,7 @@ class CodeToolsService {
}
logger.info('Setting environment variables:', Object.keys(env))
logger.info('Environment variable values:', env)
logger.debug('Environment variable values:', sanitizeEnvForLogging(env))
if (isWindows) {
// Windows uses set command
@ -640,8 +656,7 @@ class CodeToolsService {
.map(([key, value]) => {
const sanitizedValue = String(value).replace(/\\/g, '\\\\').replace(/"/g, '\\"')
const exportCmd = `export ${key}="${sanitizedValue}"`
logger.info(`Setting env var: ${key}="${sanitizedValue}"`)
logger.info(`Export command: ${exportCmd}`)
logger.debug(`Setting env var: ${key}=<redacted>`)
return exportCmd
})
.join(' && ')
@ -657,19 +672,20 @@ class CodeToolsService {
baseCommand = `${uvPath} tool run ${packageName}`
}
// Add configuration parameters for OpenAI Codex
if (cliTool === codeTools.openaiCodex && env.OPENAI_MODEL_PROVIDER && env.OPENAI_MODEL_PROVIDER != 'openai') {
const provider = env.OPENAI_MODEL_PROVIDER
const model = env.OPENAI_MODEL
// delete the latest /
const baseUrl = env.OPENAI_BASE_URL.replace(/\/$/, '')
// Add configuration parameters for OpenAI Codex using command line args
if (cliTool === codeTools.openaiCodex && env.OPENAI_MODEL_PROVIDER) {
const providerId = env.OPENAI_MODEL_PROVIDER
const providerName = env.OPENAI_MODEL_PROVIDER_NAME || providerId
const normalizedBaseUrl = env.OPENAI_BASE_URL.replace(/\/$/, '')
const model = _model
const configParams = [
`--config model_provider="${provider}"`,
`--config model="${model}"`,
`--config model_providers.${provider}.name="${provider}"`,
`--config model_providers.${provider}.base_url="${baseUrl}"`,
`--config model_providers.${provider}.env_key="OPENAI_API_KEY"`
`--config model_provider="${providerId}"`,
`--config model_providers.${providerId}.name="${providerName}"`,
`--config model_providers.${providerId}.base_url="${normalizedBaseUrl}"`,
`--config model_providers.${providerId}.env_key="OPENAI_API_KEY"`,
`--config model_providers.${providerId}.wire_api="responses"`,
`--config model="${model}"`
].join(' ')
baseCommand = `${baseCommand} ${configParams}`
}
@ -791,14 +807,15 @@ class CodeToolsService {
terminalArgs = args
}
// Set cleanup task (delete temp file after 5 minutes)
// Set cleanup task (delete temp file after 60 seconds)
// Windows Terminal (UWP app) may take longer to initialize and read the file
setTimeout(() => {
try {
fs.existsSync(batFilePath) && fs.unlinkSync(batFilePath)
} catch (error) {
logger.warn(`Failed to cleanup temp bat file: ${error}`)
}
}, 10 * 1000) // Delete temp file after 10 seconds
}, 60 * 1000) // Delete temp file after 60 seconds
break
}

View File

@ -86,6 +86,11 @@ const CodeToolsPage: FC = () => {
if (m.provider === 'silicon') {
return isSiliconAnthropicCompatibleModel(m.id)
}
// Check if model belongs to an anthropic type provider (including custom providers)
const anthropicProvider = providers.find((p) => p.id === m.provider)
if (anthropicProvider?.type === 'anthropic') {
return true
}
return m.id.includes('claude') || CLAUDE_OFFICIAL_SUPPORTED_PROVIDERS.includes(m.provider)
}
@ -102,6 +107,11 @@ const CodeToolsPage: FC = () => {
m.supported_endpoint_types?.includes(type as EndpointType)
)
}
// Check if model belongs to an openai-response type provider (including custom providers)
const openaiProvider = providers.find((p) => p.id === m.provider)
if (openaiProvider?.type === 'openai-response') {
return true
}
return m.id.includes('openai') || OPENAI_CODEX_SUPPORTED_PROVIDERS.includes(m.provider)
}
@ -120,7 +130,7 @@ const CodeToolsPage: FC = () => {
return true
},
[selectedCliTool]
[selectedCliTool, providers]
)
const availableProviders = useMemo(() => {
@ -215,10 +225,12 @@ const CodeToolsPage: FC = () => {
}
// 准备启动环境
const prepareLaunchEnvironment = async (): Promise<Record<string, string> | null> => {
const prepareLaunchEnvironment = async (): Promise<{
env: Record<string, string>
} | null> => {
if (selectedCliTool === codeTools.githubCopilotCli) {
const userEnv = parseEnvironmentVariables(environmentVariables)
return userEnv
return { env: userEnv }
}
if (!selectedModel) return null
@ -229,7 +241,7 @@ const CodeToolsPage: FC = () => {
const apiKey = aiProvider.getApiKey()
// 生成工具特定的环境变量
const toolEnv = generateToolEnvironment({
const { env: toolEnv } = generateToolEnvironment({
tool: selectedCliTool,
model: selectedModel,
modelProvider,
@ -240,7 +252,7 @@ const CodeToolsPage: FC = () => {
// 合并用户自定义的环境变量
const userEnv = parseEnvironmentVariables(environmentVariables)
return { ...toolEnv, ...userEnv }
return { env: { ...toolEnv, ...userEnv } }
}
// 执行启动操作
@ -291,13 +303,13 @@ const CodeToolsPage: FC = () => {
setIsLaunching(true)
try {
const env = await prepareLaunchEnvironment()
if (!env) {
const result = await prepareLaunchEnvironment()
if (!result) {
window.toast.error(t('code.model_required'))
return
}
await executeLaunch(env)
await executeLaunch(result.env)
} catch (error) {
logger.error('start code tools failed:', error as Error)
window.toast.error(t('code.launch.error'))

View File

@ -106,7 +106,7 @@ describe('generateToolEnvironment', () => {
const model = createMockModel('qwen-turbo', 'dashscope')
const provider = createMockProvider('dashscope', 'https://dashscope.aliyuncs.com/compatible-mode')
const env = generateToolEnvironment({
const { env } = generateToolEnvironment({
tool: codeTools.qwenCode,
model,
modelProvider: provider,
@ -122,7 +122,7 @@ describe('generateToolEnvironment', () => {
const model = createMockModel('qwen-turbo', 'dashscope')
const provider = createMockProvider('dashscope', 'https://dashscope.aliyuncs.com/compatible-mode/v1')
const env = generateToolEnvironment({
const { env } = generateToolEnvironment({
tool: codeTools.qwenCode,
model,
modelProvider: provider,
@ -138,7 +138,7 @@ describe('generateToolEnvironment', () => {
const model = createMockModel('qwen-turbo', 'dashscope')
const provider = createMockProvider('dashscope', '')
const env = generateToolEnvironment({
const { env } = generateToolEnvironment({
tool: codeTools.qwenCode,
model,
modelProvider: provider,
@ -154,7 +154,7 @@ describe('generateToolEnvironment', () => {
const model = createMockModel('qwen-plus', 'dashscope')
const provider = createMockProvider('dashscope', 'https://dashscope.aliyuncs.com/v2')
const env = generateToolEnvironment({
const { env } = generateToolEnvironment({
tool: codeTools.qwenCode,
model,
modelProvider: provider,
@ -170,7 +170,7 @@ describe('generateToolEnvironment', () => {
const model = createMockModel('gpt-4', 'openai')
const provider = createMockProvider('openai', 'https://api.openai.com')
const env = generateToolEnvironment({
const { env } = generateToolEnvironment({
tool: codeTools.openaiCodex,
model,
modelProvider: provider,
@ -186,7 +186,7 @@ describe('generateToolEnvironment', () => {
const model = createMockModel('gpt-4', 'iflow')
const provider = createMockProvider('iflow', 'https://api.iflow.cn')
const env = generateToolEnvironment({
const { env } = generateToolEnvironment({
tool: codeTools.iFlowCli,
model,
modelProvider: provider,
@ -202,7 +202,7 @@ describe('generateToolEnvironment', () => {
const model = createMockModel('qwen-turbo', 'dashscope')
const provider = createMockProvider('dashscope', 'https://dashscope.aliyuncs.com/compatible-mode/')
const env = generateToolEnvironment({
const { env } = generateToolEnvironment({
tool: codeTools.qwenCode,
model,
modelProvider: provider,
@ -218,7 +218,7 @@ describe('generateToolEnvironment', () => {
const model = createMockModel('qwen-plus', 'dashscope')
const provider = createMockProvider('dashscope', 'https://dashscope.aliyuncs.com/v2beta')
const env = generateToolEnvironment({
const { env } = generateToolEnvironment({
tool: codeTools.qwenCode,
model,
modelProvider: provider,

View File

@ -58,7 +58,7 @@ export const CLI_TOOL_PROVIDER_MAP: Record<string, (providers: Provider[]) => Pr
providers.filter((p) => p.type === 'gemini' || GEMINI_SUPPORTED_PROVIDERS.includes(p.id)),
[codeTools.qwenCode]: (providers) => providers.filter((p) => p.type.includes('openai')),
[codeTools.openaiCodex]: (providers) =>
providers.filter((p) => p.id === 'openai' || OPENAI_CODEX_SUPPORTED_PROVIDERS.includes(p.id)),
providers.filter((p) => p.type === 'openai-response' || OPENAI_CODEX_SUPPORTED_PROVIDERS.includes(p.id)),
[codeTools.iFlowCli]: (providers) => providers.filter((p) => p.type.includes('openai')),
[codeTools.githubCopilotCli]: () => [],
[codeTools.kimiCli]: (providers) => providers.filter((p) => p.type.includes('openai'))
@ -146,7 +146,7 @@ export const generateToolEnvironment = ({
modelProvider: Provider
apiKey: string
baseUrl: string
}): Record<string, string> => {
}): { env: Record<string, string> } => {
const env: Record<string, string> = {}
const formattedBaseUrl = formatApiHost(baseUrl)
@ -181,6 +181,7 @@ export const generateToolEnvironment = ({
env.OPENAI_BASE_URL = formattedBaseUrl
env.OPENAI_MODEL = model.id
env.OPENAI_MODEL_PROVIDER = modelProvider.id
env.OPENAI_MODEL_PROVIDER_NAME = modelProvider.name
break
case codeTools.iFlowCli:
@ -200,7 +201,7 @@ export const generateToolEnvironment = ({
break
}
return env
return { env }
}
export { default } from './CodeToolsPage'