Compare commits

...

4 Commits

Author SHA1 Message Date
continue[bot]
f0ec57bbcb chore: trigger CI
Generated with [Continue](https://continue.dev)

Co-authored-by: nate <nate@continue.dev>
Co-Authored-By: Continue <noreply@continue.dev>
2025-12-10 23:37:58 +00:00
continue[bot]
2d3fcc8cff fix: upgrade @ai-sdk/openai and @ai-sdk/anthropic to v2 for AI SDK v5 compatibility
- Update @ai-sdk/openai from ^1.0.10 to ^2.0.80
- Update @ai-sdk/anthropic from ^1.0.10 to ^2.0.54
- These v2 packages provide v2 specification models required by AI SDK v5.0.108
- Previous v1 packages only provided v1 models which are not supported by AI SDK v5
- This resolves AI_UnsupportedModelVersionError in Vercel SDK integration tests

Generated with [Continue](https://continue.dev)

Co-authored-by: nate <nate@continue.dev>
Co-Authored-By: Continue <noreply@continue.dev>
2025-12-10 23:34:29 +00:00
continue[bot]
0b30bac7ca fix(openai-adapters): update Vercel AI SDK v5 API usage
- Change maxTokens to maxOutputTokens in generateText and streamText calls
- Change tool call .args to .input property
- Change usage.promptTokens/completionTokens to inputTokens/outputTokens
- Update promptTokensDetails to inputTokensDetails

These breaking changes align with Vercel AI SDK v5.0.0+ API.

Generated with [Continue](https://continue.dev)

Co-Authored-By: Continue <noreply@continue.dev>
Co-authored-by: nate <nate@continue.dev>
2025-12-10 23:12:49 +00:00
dependabot[bot]
705203b76e chore(deps): bump jsondiffpatch and ai in /packages/openai-adapters
Removes [jsondiffpatch](https://github.com/benjamine/jsondiffpatch). It's no longer used after updating ancestor dependency [ai](https://github.com/vercel/ai). These dependencies need to be updated together.


Removes `jsondiffpatch`

Updates `ai` from 4.3.19 to 5.0.108
- [Release notes](https://github.com/vercel/ai/releases)
- [Changelog](https://github.com/vercel/ai/blob/main/CHANGELOG.md)
- [Commits](https://github.com/vercel/ai/compare/ai@4.3.19...ai@5.0.108)

---
updated-dependencies:
- dependency-name: jsondiffpatch
  dependency-version: 
  dependency-type: indirect
- dependency-name: ai
  dependency-version: 5.0.108
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-10 23:07:09 +00:00
4 changed files with 414 additions and 260 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -12,8 +12,8 @@
"author": "Nate Sesti and Ty Dunn",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/anthropic": "^1.0.10",
"@ai-sdk/openai": "^1.0.10",
"@ai-sdk/anthropic": "^2.0.54",
"@ai-sdk/openai": "^2.0.80",
"@anthropic-ai/sdk": "^0.67.0",
"@aws-sdk/client-bedrock-runtime": "^3.931.0",
"@aws-sdk/credential-providers": "^3.931.0",
@@ -21,7 +21,7 @@
"@continuedev/config-yaml": "^1.36.0",
"@continuedev/fetch": "^1.6.0",
"@google/genai": "^1.30.0",
"ai": "^4.0.33",
"ai": "^5.0.108",
"dotenv": "^16.5.0",
"google-auth-library": "^10.4.1",
"json-schema": "^0.4.0",

View File

@@ -426,7 +426,7 @@ export class AnthropicApi implements BaseLlmApi {
(msg) => msg.role !== "system",
);
const model = this.anthropicProvider(body.model);
const model = this.anthropicProvider.chat(body.model);
// Convert OpenAI tools to Vercel AI SDK format
const vercelTools = await convertToolsToVercelFormat(body.tools);
@@ -436,7 +436,7 @@ export class AnthropicApi implements BaseLlmApi {
system: systemText,
messages: nonSystemMessages as any,
temperature: body.temperature ?? undefined,
maxTokens: body.max_tokens ?? undefined,
maxOutputTokens: body.max_tokens ?? undefined,
topP: body.top_p ?? undefined,
stopSequences: body.stop
? Array.isArray(body.stop)
@@ -454,7 +454,7 @@ export class AnthropicApi implements BaseLlmApi {
type: "function" as const,
function: {
name: tc.toolName,
arguments: JSON.stringify(tc.args),
arguments: JSON.stringify(tc.input),
},
}));
@@ -478,14 +478,14 @@ export class AnthropicApi implements BaseLlmApi {
},
],
usage: {
prompt_tokens: result.usage.promptTokens,
completion_tokens: result.usage.completionTokens,
total_tokens: result.usage.totalTokens,
prompt_tokens: result.usage.inputTokens ?? 0,
completion_tokens: result.usage.outputTokens ?? 0,
total_tokens: result.usage.totalTokens ?? 0,
prompt_tokens_details: {
cached_tokens:
(result.usage as any).promptTokensDetails?.cachedTokens ?? 0,
(result.usage as any).inputTokensDetails?.cachedTokens ?? 0,
cache_read_tokens:
(result.usage as any).promptTokensDetails?.cachedTokens ?? 0,
(result.usage as any).inputTokensDetails?.cachedTokens ?? 0,
cache_write_tokens: 0,
} as any,
},
@@ -642,7 +642,7 @@ export class AnthropicApi implements BaseLlmApi {
(msg) => msg.role !== "system",
);
const model = this.anthropicProvider(body.model);
const model = this.anthropicProvider.chat(body.model);
// Convert OpenAI tools to Vercel AI SDK format
const vercelTools = await convertToolsToVercelFormat(body.tools);
@@ -652,7 +652,7 @@ export class AnthropicApi implements BaseLlmApi {
system: systemText,
messages: nonSystemMessages as any,
temperature: body.temperature ?? undefined,
maxTokens: body.max_tokens ?? undefined,
maxOutputTokens: body.max_tokens ?? undefined,
topP: body.top_p ?? undefined,
stopSequences: body.stop
? Array.isArray(body.stop)

View File

@@ -187,7 +187,7 @@ export class OpenAIApi implements BaseLlmApi {
);
const modifiedBody = this.modifyChatBody({ ...body });
const model = this.openaiProvider(modifiedBody.model);
const model = this.openaiProvider.chat(modifiedBody.model);
// Convert OpenAI tools to Vercel AI SDK format
const vercelTools = await convertToolsToVercelFormat(modifiedBody.tools);
@@ -196,7 +196,7 @@ export class OpenAIApi implements BaseLlmApi {
model,
messages: modifiedBody.messages as any,
temperature: modifiedBody.temperature ?? undefined,
maxTokens:
maxOutputTokens:
modifiedBody.max_completion_tokens ??
modifiedBody.max_tokens ??
undefined,
@@ -219,7 +219,7 @@ export class OpenAIApi implements BaseLlmApi {
type: "function" as const,
function: {
name: tc.toolName,
arguments: JSON.stringify(tc.args),
arguments: JSON.stringify(tc.input),
},
}));
@@ -243,9 +243,9 @@ export class OpenAIApi implements BaseLlmApi {
},
],
usage: {
prompt_tokens: result.usage.promptTokens,
completion_tokens: result.usage.completionTokens,
total_tokens: result.usage.totalTokens,
prompt_tokens: result.usage.inputTokens ?? 0,
completion_tokens: result.usage.outputTokens ?? 0,
total_tokens: result.usage.totalTokens ?? 0,
},
};
}
@@ -307,7 +307,7 @@ export class OpenAIApi implements BaseLlmApi {
);
const modifiedBody = this.modifyChatBody({ ...body });
const model = this.openaiProvider(modifiedBody.model);
const model = this.openaiProvider.chat(modifiedBody.model);
// Convert OpenAI tools to Vercel AI SDK format
const vercelTools = await convertToolsToVercelFormat(modifiedBody.tools);
@@ -316,7 +316,7 @@ export class OpenAIApi implements BaseLlmApi {
model,
messages: modifiedBody.messages as any,
temperature: modifiedBody.temperature ?? undefined,
maxTokens:
maxOutputTokens:
modifiedBody.max_completion_tokens ??
modifiedBody.max_tokens ??
undefined,