refactor: update package dependencies to use patch versions for @ai-sdk/google and @ai-sdk/openai-compatible

This commit is contained in:
suyao 2025-12-29 21:36:01 +08:00
parent 42b4a8f7d1
commit 5bb19b3e30
No known key found for this signature in database
11 changed files with 419 additions and 70 deletions

View File

@ -0,0 +1,37 @@
diff --git a/dist/index.js b/dist/index.js
index 8ba8ec4fe78573068683801b75b92cdbe7132b01..cea5346f36296e38ac4fbdf7cf46b330c26fa4f9 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -536,7 +536,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
// src/get-model-path.ts
function getModelPath(modelId) {
- return modelId.includes("/") ? modelId : `models/${modelId}`;
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
}
// src/google-generative-ai-options.ts
diff --git a/dist/index.mjs b/dist/index.mjs
index 0e96cc445379e63fb3c08a3a6238f1c55878d8b5..7e76a0ed624844b5aa8cfc25b61b901572b48a83 100644
--- a/dist/index.mjs
+++ b/dist/index.mjs
@@ -542,7 +542,7 @@ function convertToGoogleGenerativeAIMessages(prompt, options) {
// src/get-model-path.ts
function getModelPath(modelId) {
- return modelId.includes("/") ? modelId : `models/${modelId}`;
+ return modelId.includes("models/") ? modelId : `models/${modelId}`;
}
// src/google-generative-ai-options.ts
@@ -1998,8 +1998,7 @@ function createGoogleGenerativeAI(options = {}) {
}
var google = createGoogleGenerativeAI();
export {
- VERSION,
createGoogleGenerativeAI,
- google
+ google, VERSION
};
//# sourceMappingURL=index.mjs.map
\ No newline at end of file

View File

@ -0,0 +1,261 @@
diff --git a/dist/index.d.mts b/dist/index.d.mts
index 332ebc005dbfca852aa18970aebda3127142a8a0..fc0f7b56991c4b9e3fbd250f8fd526ae38890320 100644
--- a/dist/index.d.mts
+++ b/dist/index.d.mts
@@ -22,6 +22,7 @@ declare const openaiCompatibleProviderOptions: z.ZodObject<{
user: z.ZodOptional<z.ZodString>;
reasoningEffort: z.ZodOptional<z.ZodString>;
textVerbosity: z.ZodOptional<z.ZodString>;
+ sendReasoning: z.ZodOptional<z.ZodBoolean>;
}, z.core.$strip>;
type OpenAICompatibleProviderOptions = z.infer<typeof openaiCompatibleProviderOptions>;
diff --git a/dist/index.d.ts b/dist/index.d.ts
index 332ebc005dbfca852aa18970aebda3127142a8a0..fc0f7b56991c4b9e3fbd250f8fd526ae38890320 100644
--- a/dist/index.d.ts
+++ b/dist/index.d.ts
@@ -22,6 +22,7 @@ declare const openaiCompatibleProviderOptions: z.ZodObject<{
user: z.ZodOptional<z.ZodString>;
reasoningEffort: z.ZodOptional<z.ZodString>;
textVerbosity: z.ZodOptional<z.ZodString>;
+ sendReasoning: z.ZodOptional<z.ZodBoolean>;
}, z.core.$strip>;
type OpenAICompatibleProviderOptions = z.infer<typeof openaiCompatibleProviderOptions>;
diff --git a/dist/index.js b/dist/index.js
index 045e2e85625fe8a9e8557778a6e6fc0a07135198..dee8b6f2e65deda9fb16d83fc049c5dae2fb2bd0 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -98,7 +98,7 @@ function getOpenAIMetadata(message) {
var _a, _b;
return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
}
-function convertToOpenAICompatibleChatMessages(prompt) {
+function convertToOpenAICompatibleChatMessages({prompt, options}){
var _a;
const messages = [];
for (const { role, content, ...message } of prompt) {
@@ -149,6 +149,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
}
case "assistant": {
let text = "";
+ let reasoning_text = "";
const toolCalls = [];
for (const part of content) {
const partMetadata = getOpenAIMetadata(part);
@@ -157,6 +158,12 @@ function convertToOpenAICompatibleChatMessages(prompt) {
text += part.text;
break;
}
+ case "reasoning": {
+ if (options.sendReasoning) {
+ reasoning_text += part.text;
+ }
+ break;
+ }
case "tool-call": {
toolCalls.push({
id: part.toolCallId,
@@ -174,6 +181,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
messages.push({
role: "assistant",
content: text,
+ reasoning_content: reasoning_text ?? void 0,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
...metadata
});
@@ -264,7 +272,8 @@ var openaiCompatibleProviderOptions = import_v42.z.object({
/**
* Controls the verbosity of the generated text. Defaults to `medium`.
*/
- textVerbosity: import_v42.z.string().optional()
+ textVerbosity: import_v42.z.string().optional(),
+ sendReasoning: import_v4.z.boolean().optional()
});
// src/chat/openai-compatible-prepare-tools.ts
@@ -428,7 +437,7 @@ var OpenAICompatibleChatLanguageModel = class {
reasoning_effort: compatibleOptions.reasoningEffort,
verbosity: compatibleOptions.textVerbosity,
// messages:
- messages: convertToOpenAICompatibleChatMessages(prompt),
+ messages: convertToOpenAICompatibleChatMessages({prompt, options: compatibleOptions}),
// tools:
tools: openaiTools,
tool_choice: openaiToolChoice
@@ -464,6 +473,15 @@ var OpenAICompatibleChatLanguageModel = class {
if (text != null && text.length > 0) {
content.push({ type: "text", text });
}
+ if (choice.message.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/);
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ content.push({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ })
+ }
const reasoning = (_a = choice.message.reasoning_content) != null ? _a : choice.message.reasoning;
if (reasoning != null && reasoning.length > 0) {
content.push({
@@ -618,6 +636,17 @@ var OpenAICompatibleChatLanguageModel = class {
delta: delta.content
});
}
+ if (delta.images) {
+ for (const image of delta.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ controller.enqueue({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const index = toolCallDelta.index;
@@ -779,6 +808,14 @@ var OpenAICompatibleChatResponseSchema = import_v43.z.object({
arguments: import_v43.z.string()
})
})
+ ).nullish(),
+ images: import_v43.z.array(
+ import_v43.z.object({
+ type: import_v43.z.literal('image_url'),
+ image_url: import_v43.z.object({
+ url: import_v43.z.string(),
+ })
+ })
).nullish()
}),
finish_reason: import_v43.z.string().nullish()
@@ -808,7 +845,15 @@ var chunkBaseSchema = import_v43.z.object({
arguments: import_v43.z.string().nullish()
})
})
- ).nullish()
+ ).nullish(),
+ images: import_v43.z.array(
+ import_v43.z.object({
+ type: import_v43.z.literal('image_url'),
+ image_url: import_v43.z.object({
+ url: import_v43.z.string(),
+ })
+ })
+ ).nullish()
}).nullish(),
finish_reason: import_v43.z.string().nullish()
})
diff --git a/dist/index.mjs b/dist/index.mjs
index e03379447a27c5801526e327a0cc8bf95767aafb..b70f276bda7e595f13bcaa99d3a2b026ac847990 100644
--- a/dist/index.mjs
+++ b/dist/index.mjs
@@ -80,7 +80,7 @@ function getOpenAIMetadata(message) {
var _a, _b;
return (_b = (_a = message == null ? void 0 : message.providerOptions) == null ? void 0 : _a.openaiCompatible) != null ? _b : {};
}
-function convertToOpenAICompatibleChatMessages(prompt) {
+function convertToOpenAICompatibleChatMessages({prompt, options}) {
var _a;
const messages = [];
for (const { role, content, ...message } of prompt) {
@@ -131,6 +131,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
}
case "assistant": {
let text = "";
+ let reasoning_text = "";
const toolCalls = [];
for (const part of content) {
const partMetadata = getOpenAIMetadata(part);
@@ -139,6 +140,12 @@ function convertToOpenAICompatibleChatMessages(prompt) {
text += part.text;
break;
}
+ case "reasoning": {
+ if (options.sendReasoning) {
+ reasoning_text += part.text;
+ }
+ break;
+ }
case "tool-call": {
toolCalls.push({
id: part.toolCallId,
@@ -156,6 +163,7 @@ function convertToOpenAICompatibleChatMessages(prompt) {
messages.push({
role: "assistant",
content: text,
+ reasoning_content: reasoning_text ?? undefined,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0,
...metadata
});
@@ -246,7 +254,8 @@ var openaiCompatibleProviderOptions = z2.object({
/**
* Controls the verbosity of the generated text. Defaults to `medium`.
*/
- textVerbosity: z2.string().optional()
+ textVerbosity: z2.string().optional(),
+ sendReasoning: z2.boolean().optional()
});
// src/chat/openai-compatible-prepare-tools.ts
@@ -412,7 +421,7 @@ var OpenAICompatibleChatLanguageModel = class {
reasoning_effort: compatibleOptions.reasoningEffort,
verbosity: compatibleOptions.textVerbosity,
// messages:
- messages: convertToOpenAICompatibleChatMessages(prompt),
+ messages: convertToOpenAICompatibleChatMessages({prompt, options: compatibleOptions}),
// tools:
tools: openaiTools,
tool_choice: openaiToolChoice
@@ -455,6 +464,15 @@ var OpenAICompatibleChatLanguageModel = class {
text: reasoning
});
}
+ if (choice.message.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/);
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ content.push({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ })
+ }
if (choice.message.tool_calls != null) {
for (const toolCall of choice.message.tool_calls) {
content.push({
@@ -602,6 +620,17 @@ var OpenAICompatibleChatLanguageModel = class {
delta: delta.content
});
}
+ if (delta.images) {
+ for (const image of delta.images) {
+ const match1 = image.image_url.url.match(/^data:([^;]+)/)
+ const match2 = image.image_url.url.match(/^data:[^;]*;base64,(.+)$/);
+ controller.enqueue({
+ type: 'file',
+ mediaType: match1 ? (match1[1] ?? 'image/jpeg') : 'image/jpeg',
+ data: match2 ? match2[1] : image.image_url.url,
+ });
+ }
+ }
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const index = toolCallDelta.index;
@@ -763,6 +792,14 @@ var OpenAICompatibleChatResponseSchema = z3.object({
arguments: z3.string()
})
})
+ ).nullish(),
+ images: z3.array(
+ z3.object({
+ type: z3.literal('image_url'),
+ image_url: z3.object({
+ url: z3.string(),
+ })
+ })
).nullish()
}),
finish_reason: z3.string().nullish()

View File

@ -0,0 +1,72 @@
diff --git a/dist/index.js b/dist/index.js
index 5038836c1407e819e83df75195bf320cc1891203..91de28a330aa72f90ca185af5001481a90cf8561 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -999,6 +999,21 @@ var OpenAIChatLanguageModel = class {
return;
}
const delta = choice.delta;
+ const reasoningContent = delta.reasoning_content;
+ if (reasoningContent) {
+ if (!isActiveReasoning) {
+ controller.enqueue({
+ type: 'reasoning-start',
+ id: 'reasoning-0',
+ });
+ isActiveReasoning = true;
+ }
+ controller.enqueue({
+ type: 'reasoning-delta',
+ id: 'reasoning-0',
+ delta: reasoningContent,
+ });
+ }
if (delta.content != null) {
if (!isActiveText) {
controller.enqueue({ type: "text-start", id: "0" });
@@ -1111,6 +1126,9 @@ var OpenAIChatLanguageModel = class {
}
},
flush(controller) {
+ if (isActiveReasoning) {
+ controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });
+ }
if (isActiveText) {
controller.enqueue({ type: "text-end", id: "0" });
}
diff --git a/dist/index.mjs b/dist/index.mjs
index 2a58125b0f80f9b121c7f1a615737c5a266e822b..0aa2adbc87030e649355eb4dca25a8d0bca1e25e 100644
--- a/dist/index.mjs
+++ b/dist/index.mjs
@@ -990,6 +990,21 @@ var OpenAIChatLanguageModel = class {
return;
}
const delta = choice.delta;
+ const reasoningContent = delta.reasoning_content;
+ if (reasoningContent) {
+ if (!isActiveReasoning) {
+ controller.enqueue({
+ type: 'reasoning-start',
+ id: 'reasoning-0',
+ });
+ isActiveReasoning = true;
+ }
+ controller.enqueue({
+ type: 'reasoning-delta',
+ id: 'reasoning-0',
+ delta: reasoningContent,
+ });
+ }
if (delta.content != null) {
if (!isActiveText) {
controller.enqueue({ type: "text-start", id: "0" });
@@ -1102,6 +1117,9 @@ var OpenAIChatLanguageModel = class {
}
},
flush(controller) {
+ if (isActiveReasoning) {
+ controller.enqueue({ type: 'reasoning-end', id: 'reasoning-0' });
+ }
if (isActiveText) {
controller.enqueue({ type: "text-end", id: "0" });
}

View File

@ -114,11 +114,11 @@
"@ai-sdk/anthropic": "^3.0.0",
"@ai-sdk/cerebras": "^2.0.0",
"@ai-sdk/gateway": "^3.0.0",
"@ai-sdk/google": "^3.0.0",
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-google-npm-3.0.0-ef668576ff.patch",
"@ai-sdk/google-vertex": "^4.0.0",
"@ai-sdk/huggingface": "^1.0.0",
"@ai-sdk/mistral": "^3.0.0",
"@ai-sdk/openai": "^3.0.0",
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-openai-npm-3.0.0-0b1bba0aab.patch",
"@ai-sdk/perplexity": "^3.0.0",
"@ai-sdk/test-server": "^1.0.0",
"@ant-design/v5-patch-for-react-19": "^1.0.3",
@ -414,12 +414,15 @@
"@langchain/openai@npm:>=0.1.0 <0.6.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
"@langchain/openai@npm:^0.3.16": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
"@langchain/openai@npm:>=0.2.0 <0.7.0": "patch:@langchain/openai@npm%3A1.0.0#~/.yarn/patches/@langchain-openai-npm-1.0.0-474d0ad9d4.patch",
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A2.0.85#~/.yarn/patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch",
"@ai-sdk/openai@npm:^2.0.42": "patch:@ai-sdk/openai@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-openai-npm-3.0.0-0b1bba0aab.patch",
"@ai-sdk/google@npm:^2.0.40": "patch:@ai-sdk/google@npm%3A2.0.40#~/.yarn/patches/@ai-sdk-google-npm-2.0.40-47e0eeee83.patch",
"@ai-sdk/openai-compatible@npm:^1.0.27": "patch:@ai-sdk/openai-compatible@npm%3A1.0.27#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.27-06f74278cf.patch",
"@ai-sdk/google@npm:2.0.49": "patch:@ai-sdk/google@npm%3A2.0.49#~/.yarn/patches/@ai-sdk-google-npm-2.0.49-84720f41bd.patch",
"@ai-sdk/openai-compatible@npm:1.0.27": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch",
"@ai-sdk/openai-compatible@npm:^1.0.19": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch"
"@ai-sdk/openai-compatible@npm:^1.0.19": "patch:@ai-sdk/openai-compatible@npm%3A1.0.28#~/.yarn/patches/@ai-sdk-openai-compatible-npm-1.0.28-5705188855.patch",
"@ai-sdk/openai-compatible@npm:2.0.0": "patch:@ai-sdk/openai-compatible@npm%3A2.0.0#~/.yarn/patches/@ai-sdk-openai-compatible-npm-2.0.0-d8d5f27c45.patch",
"@ai-sdk/openai@npm:3.0.0": "patch:@ai-sdk/openai@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-openai-npm-3.0.0-0b1bba0aab.patch",
"@ai-sdk/google@npm:3.0.0": "patch:@ai-sdk/google@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-google-npm-3.0.0-ef668576ff.patch"
},
"packageManager": "yarn@4.9.1",
"lint-staged": {

View File

@ -41,7 +41,7 @@
"ai": "^5.0.26"
},
"dependencies": {
"@ai-sdk/openai-compatible": "^2.0.0",
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A2.0.0#~/.yarn/patches/@ai-sdk-openai-compatible-npm-2.0.0-d8d5f27c45.patch",
"@ai-sdk/provider": "^3.0.0",
"@ai-sdk/provider-utils": "^4.0.0"
},

View File

@ -43,7 +43,7 @@
"@ai-sdk/anthropic": "^3.0.0",
"@ai-sdk/azure": "^3.0.0",
"@ai-sdk/deepseek": "^2.0.0",
"@ai-sdk/openai-compatible": "^2.0.0",
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A2.0.0#~/.yarn/patches/@ai-sdk-openai-compatible-npm-2.0.0-d8d5f27c45.patch",
"@ai-sdk/provider": "^3.0.0",
"@ai-sdk/provider-utils": "^4.0.0",
"@ai-sdk/xai": "^3.0.0",

View File

@ -190,7 +190,7 @@ export function createMockProviderV3(overrides?: {
* })
* ```
*/
export function createMockMiddleware(_options?: { name?: string }): LanguageModelV3Middleware {
export function createMockMiddleware(): LanguageModelV3Middleware {
return {
specificationVersion: 'v3',
wrapGenerate: vi.fn((doGenerate) => doGenerate),

View File

@ -182,7 +182,7 @@ describe('ModelResolver', () => {
describe('Middleware Application', () => {
it('should apply middlewares to resolved model', async () => {
const mockMiddleware = createMockMiddleware({ name: 'test-middleware' })
const mockMiddleware = createMockMiddleware()
const result = await resolver.resolveLanguageModel('gpt-4', 'openai', undefined, [mockMiddleware])
@ -190,8 +190,8 @@ describe('ModelResolver', () => {
})
it('should apply multiple middlewares in order', async () => {
const middleware1 = createMockMiddleware({ name: 'middleware-1' })
const middleware2 = createMockMiddleware({ name: 'middleware-2' })
const middleware1 = createMockMiddleware()
const middleware2 = createMockMiddleware()
const result = await resolver.resolveLanguageModel('gpt-4', 'openai', undefined, [middleware1, middleware2])

View File

@ -207,29 +207,6 @@ describe('promptToolUsePlugin', () => {
expect(result.system).toBe('Custom prompt with 1 tools and user prompt: User prompt')
})
it('should use custom createSystemMessage when provided', async () => {
const customCreateSystemMessage = vi.fn(() => {
return `Modified system message`
})
const plugin = createPromptToolUsePlugin({
createSystemMessage: customCreateSystemMessage
})
const context = createMockContext()
const params = createMockStreamParams({
system: 'Original',
tools: {
test: createMockTool('test')
}
})
const result = await Promise.resolve(plugin.transformParams!(params, context))
expect(customCreateSystemMessage).toHaveBeenCalled()
expect(result.system).toContain('Modified')
})
it('should save originalParams to context', async () => {
const plugin = createPromptToolUsePlugin()
const context = createMockContext()

View File

@ -269,7 +269,7 @@ describe('RuntimeExecutor - Model Resolution', () => {
describe('Middleware Application', () => {
it('should apply middlewares to string modelId', async () => {
const testMiddleware = createMockMiddleware({ name: 'test-middleware' })
const testMiddleware = createMockMiddleware()
await executor.generateText(
{
@ -285,8 +285,8 @@ describe('RuntimeExecutor - Model Resolution', () => {
})
it('should apply multiple middlewares in order', async () => {
const middleware1 = createMockMiddleware({ name: 'middleware-1' })
const middleware2 = createMockMiddleware({ name: 'middleware-2' })
const middleware1 = createMockMiddleware()
const middleware2 = createMockMiddleware()
await executor.generateText(
{
@ -303,7 +303,7 @@ describe('RuntimeExecutor - Model Resolution', () => {
})
it('should pass middlewares to model resolver for string modelIds', async () => {
const testMiddleware = createMockMiddleware({ name: 'test-middleware' })
const testMiddleware = createMockMiddleware()
await executor.generateText(
{
@ -346,7 +346,7 @@ describe('RuntimeExecutor - Model Resolution', () => {
})
it('should work with middlewares in streamText', async () => {
const middleware = createMockMiddleware({ name: 'stream-middleware' })
const middleware = createMockMiddleware()
await executor.streamText(
{

View File

@ -168,7 +168,7 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/google@npm:3.0.0, @ai-sdk/google@npm:^3.0.0":
"@ai-sdk/google@npm:3.0.0":
version: 3.0.0
resolution: "@ai-sdk/google@npm:3.0.0"
dependencies:
@ -180,6 +180,18 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/google@patch:@ai-sdk/google@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-google-npm-3.0.0-ef668576ff.patch":
version: 3.0.0
resolution: "@ai-sdk/google@patch:@ai-sdk/google@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-google-npm-3.0.0-ef668576ff.patch::version=3.0.0&hash=296a79"
dependencies:
"@ai-sdk/provider": "npm:3.0.0"
"@ai-sdk/provider-utils": "npm:4.0.0"
peerDependencies:
zod: ^3.25.76 || ^4.1.8
checksum: 10c0/19dc30dbd25fce5dd52d3d03fa3cc7c0c043f5b14ca32e968d8c0406f4661251bc7333c29da4324fca067e426339c297879377c7105c5c2bb0b623ba0b14dcb8
languageName: node
linkType: hard
"@ai-sdk/huggingface@npm:^1.0.0":
version: 1.0.0
resolution: "@ai-sdk/huggingface@npm:1.0.0"
@ -217,7 +229,7 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/openai-compatible@npm:2.0.0, @ai-sdk/openai-compatible@npm:^2.0.0":
"@ai-sdk/openai-compatible@npm:2.0.0":
version: 2.0.0
resolution: "@ai-sdk/openai-compatible@npm:2.0.0"
dependencies:
@ -241,19 +253,19 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/openai@npm:2.0.85":
version: 2.0.85
resolution: "@ai-sdk/openai@npm:2.0.85"
"@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A2.0.0#~/.yarn/patches/@ai-sdk-openai-compatible-npm-2.0.0-d8d5f27c45.patch":
version: 2.0.0
resolution: "@ai-sdk/openai-compatible@patch:@ai-sdk/openai-compatible@npm%3A2.0.0#~/.yarn/patches/@ai-sdk-openai-compatible-npm-2.0.0-d8d5f27c45.patch::version=2.0.0&hash=0d8c46"
dependencies:
"@ai-sdk/provider": "npm:2.0.0"
"@ai-sdk/provider-utils": "npm:3.0.19"
"@ai-sdk/provider": "npm:3.0.0"
"@ai-sdk/provider-utils": "npm:4.0.0"
peerDependencies:
zod: ^3.25.76 || ^4.1.8
checksum: 10c0/c8e50de443d939d7a5d7444e1a2ff35357d05dd3add0fca8226b578b199f4ca53c8a9e22c376e88006466b86e39c88d7ceca790a6a866300e3964ad24756d580
checksum: 10c0/754c647f70aef954b1258f8978ec7c13bcfa6c52062ed0685cb09793bfb1650ee65394868c2ec3974ef0d8950c1a19c857de968be5d1cccefd04d94997275cd9
languageName: node
linkType: hard
"@ai-sdk/openai@npm:3.0.0, @ai-sdk/openai@npm:^3.0.0":
"@ai-sdk/openai@npm:3.0.0":
version: 3.0.0
resolution: "@ai-sdk/openai@npm:3.0.0"
dependencies:
@ -265,15 +277,15 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/openai@patch:@ai-sdk/openai@npm%3A2.0.85#~/.yarn/patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch":
version: 2.0.85
resolution: "@ai-sdk/openai@patch:@ai-sdk/openai@npm%3A2.0.85#~/.yarn/patches/@ai-sdk-openai-npm-2.0.85-27483d1d6a.patch::version=2.0.85&hash=81ee54"
"@ai-sdk/openai@patch:@ai-sdk/openai@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-openai-npm-3.0.0-0b1bba0aab.patch":
version: 3.0.0
resolution: "@ai-sdk/openai@patch:@ai-sdk/openai@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-openai-npm-3.0.0-0b1bba0aab.patch::version=3.0.0&hash=f40a70"
dependencies:
"@ai-sdk/provider": "npm:2.0.0"
"@ai-sdk/provider-utils": "npm:3.0.19"
"@ai-sdk/provider": "npm:3.0.0"
"@ai-sdk/provider-utils": "npm:4.0.0"
peerDependencies:
zod: ^3.25.76 || ^4.1.8
checksum: 10c0/8fd0e4e63840b0ceb3fbf61b567e3318edfd5c3177b502076fb04b340ef8ea0a6b4cb95e4c6f7634b3bd8661ef0b69828a22b5434542c8e7d3488bff291e99c1
checksum: 10c0/70cd0d032a54f2a3312c91cb9a87c68e61a9e903b875827bfc4332a552017a5c0a6af2cd5b6cac63f96c89638235cb2496667c7d7397e42d8d04b4d38ba5f0d4
languageName: node
linkType: hard
@ -302,19 +314,6 @@ __metadata:
languageName: node
linkType: hard
"@ai-sdk/provider-utils@npm:3.0.19":
version: 3.0.19
resolution: "@ai-sdk/provider-utils@npm:3.0.19"
dependencies:
"@ai-sdk/provider": "npm:2.0.0"
"@standard-schema/spec": "npm:^1.0.0"
eventsource-parser: "npm:^3.0.6"
peerDependencies:
zod: ^3.25.76 || ^4.1.8
checksum: 10c0/e4decb19264067fa1b1642e07d515d25d1509a1a9143f59ccc051e3ca413c9fb1d708e1052a70eaf329ca39ddf6152520cd833dbf8c95d9bf02bbeffae8ea363
languageName: node
linkType: hard
"@ai-sdk/provider-utils@npm:4.0.0, @ai-sdk/provider-utils@npm:^4.0.0":
version: 4.0.0
resolution: "@ai-sdk/provider-utils@npm:4.0.0"
@ -1933,7 +1932,7 @@ __metadata:
"@ai-sdk/anthropic": "npm:^3.0.0"
"@ai-sdk/azure": "npm:^3.0.0"
"@ai-sdk/deepseek": "npm:^2.0.0"
"@ai-sdk/openai-compatible": "npm:^2.0.0"
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A2.0.0#~/.yarn/patches/@ai-sdk-openai-compatible-npm-2.0.0-d8d5f27c45.patch"
"@ai-sdk/provider": "npm:^3.0.0"
"@ai-sdk/provider-utils": "npm:^4.0.0"
"@ai-sdk/xai": "npm:^3.0.0"
@ -1953,7 +1952,7 @@ __metadata:
version: 0.0.0-use.local
resolution: "@cherrystudio/ai-sdk-provider@workspace:packages/ai-sdk-provider"
dependencies:
"@ai-sdk/openai-compatible": "npm:^2.0.0"
"@ai-sdk/openai-compatible": "patch:@ai-sdk/openai-compatible@npm%3A2.0.0#~/.yarn/patches/@ai-sdk-openai-compatible-npm-2.0.0-d8d5f27c45.patch"
"@ai-sdk/provider": "npm:^3.0.0"
"@ai-sdk/provider-utils": "npm:^4.0.0"
tsdown: "npm:^0.13.3"
@ -10135,11 +10134,11 @@ __metadata:
"@ai-sdk/anthropic": "npm:^3.0.0"
"@ai-sdk/cerebras": "npm:^2.0.0"
"@ai-sdk/gateway": "npm:^3.0.0"
"@ai-sdk/google": "npm:^3.0.0"
"@ai-sdk/google": "patch:@ai-sdk/google@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-google-npm-3.0.0-ef668576ff.patch"
"@ai-sdk/google-vertex": "npm:^4.0.0"
"@ai-sdk/huggingface": "npm:^1.0.0"
"@ai-sdk/mistral": "npm:^3.0.0"
"@ai-sdk/openai": "npm:^3.0.0"
"@ai-sdk/openai": "patch:@ai-sdk/openai@npm%3A3.0.0#~/.yarn/patches/@ai-sdk-openai-npm-3.0.0-0b1bba0aab.patch"
"@ai-sdk/perplexity": "npm:^3.0.0"
"@ai-sdk/test-server": "npm:^1.0.0"
"@ant-design/v5-patch-for-react-19": "npm:^1.0.3"