Compare commits
4 Commits
@continued
...
pr-9115
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f0ec57bbcb | ||
|
|
2d3fcc8cff | ||
|
|
0b30bac7ca | ||
|
|
705203b76e |
632
packages/openai-adapters/package-lock.json
generated
632
packages/openai-adapters/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -12,8 +12,8 @@
|
||||
"author": "Nate Sesti and Ty Dunn",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^1.0.10",
|
||||
"@ai-sdk/openai": "^1.0.10",
|
||||
"@ai-sdk/anthropic": "^2.0.54",
|
||||
"@ai-sdk/openai": "^2.0.80",
|
||||
"@anthropic-ai/sdk": "^0.67.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "^3.931.0",
|
||||
"@aws-sdk/credential-providers": "^3.931.0",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@continuedev/config-yaml": "^1.36.0",
|
||||
"@continuedev/fetch": "^1.6.0",
|
||||
"@google/genai": "^1.30.0",
|
||||
"ai": "^4.0.33",
|
||||
"ai": "^5.0.108",
|
||||
"dotenv": "^16.5.0",
|
||||
"google-auth-library": "^10.4.1",
|
||||
"json-schema": "^0.4.0",
|
||||
|
||||
@@ -426,7 +426,7 @@ export class AnthropicApi implements BaseLlmApi {
|
||||
(msg) => msg.role !== "system",
|
||||
);
|
||||
|
||||
const model = this.anthropicProvider(body.model);
|
||||
const model = this.anthropicProvider.chat(body.model);
|
||||
|
||||
// Convert OpenAI tools to Vercel AI SDK format
|
||||
const vercelTools = await convertToolsToVercelFormat(body.tools);
|
||||
@@ -436,7 +436,7 @@ export class AnthropicApi implements BaseLlmApi {
|
||||
system: systemText,
|
||||
messages: nonSystemMessages as any,
|
||||
temperature: body.temperature ?? undefined,
|
||||
maxTokens: body.max_tokens ?? undefined,
|
||||
maxOutputTokens: body.max_tokens ?? undefined,
|
||||
topP: body.top_p ?? undefined,
|
||||
stopSequences: body.stop
|
||||
? Array.isArray(body.stop)
|
||||
@@ -454,7 +454,7 @@ export class AnthropicApi implements BaseLlmApi {
|
||||
type: "function" as const,
|
||||
function: {
|
||||
name: tc.toolName,
|
||||
arguments: JSON.stringify(tc.args),
|
||||
arguments: JSON.stringify(tc.input),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -478,14 +478,14 @@ export class AnthropicApi implements BaseLlmApi {
|
||||
},
|
||||
],
|
||||
usage: {
|
||||
prompt_tokens: result.usage.promptTokens,
|
||||
completion_tokens: result.usage.completionTokens,
|
||||
total_tokens: result.usage.totalTokens,
|
||||
prompt_tokens: result.usage.inputTokens ?? 0,
|
||||
completion_tokens: result.usage.outputTokens ?? 0,
|
||||
total_tokens: result.usage.totalTokens ?? 0,
|
||||
prompt_tokens_details: {
|
||||
cached_tokens:
|
||||
(result.usage as any).promptTokensDetails?.cachedTokens ?? 0,
|
||||
(result.usage as any).inputTokensDetails?.cachedTokens ?? 0,
|
||||
cache_read_tokens:
|
||||
(result.usage as any).promptTokensDetails?.cachedTokens ?? 0,
|
||||
(result.usage as any).inputTokensDetails?.cachedTokens ?? 0,
|
||||
cache_write_tokens: 0,
|
||||
} as any,
|
||||
},
|
||||
@@ -642,7 +642,7 @@ export class AnthropicApi implements BaseLlmApi {
|
||||
(msg) => msg.role !== "system",
|
||||
);
|
||||
|
||||
const model = this.anthropicProvider(body.model);
|
||||
const model = this.anthropicProvider.chat(body.model);
|
||||
|
||||
// Convert OpenAI tools to Vercel AI SDK format
|
||||
const vercelTools = await convertToolsToVercelFormat(body.tools);
|
||||
@@ -652,7 +652,7 @@ export class AnthropicApi implements BaseLlmApi {
|
||||
system: systemText,
|
||||
messages: nonSystemMessages as any,
|
||||
temperature: body.temperature ?? undefined,
|
||||
maxTokens: body.max_tokens ?? undefined,
|
||||
maxOutputTokens: body.max_tokens ?? undefined,
|
||||
topP: body.top_p ?? undefined,
|
||||
stopSequences: body.stop
|
||||
? Array.isArray(body.stop)
|
||||
|
||||
@@ -187,7 +187,7 @@ export class OpenAIApi implements BaseLlmApi {
|
||||
);
|
||||
|
||||
const modifiedBody = this.modifyChatBody({ ...body });
|
||||
const model = this.openaiProvider(modifiedBody.model);
|
||||
const model = this.openaiProvider.chat(modifiedBody.model);
|
||||
|
||||
// Convert OpenAI tools to Vercel AI SDK format
|
||||
const vercelTools = await convertToolsToVercelFormat(modifiedBody.tools);
|
||||
@@ -196,7 +196,7 @@ export class OpenAIApi implements BaseLlmApi {
|
||||
model,
|
||||
messages: modifiedBody.messages as any,
|
||||
temperature: modifiedBody.temperature ?? undefined,
|
||||
maxTokens:
|
||||
maxOutputTokens:
|
||||
modifiedBody.max_completion_tokens ??
|
||||
modifiedBody.max_tokens ??
|
||||
undefined,
|
||||
@@ -219,7 +219,7 @@ export class OpenAIApi implements BaseLlmApi {
|
||||
type: "function" as const,
|
||||
function: {
|
||||
name: tc.toolName,
|
||||
arguments: JSON.stringify(tc.args),
|
||||
arguments: JSON.stringify(tc.input),
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -243,9 +243,9 @@ export class OpenAIApi implements BaseLlmApi {
|
||||
},
|
||||
],
|
||||
usage: {
|
||||
prompt_tokens: result.usage.promptTokens,
|
||||
completion_tokens: result.usage.completionTokens,
|
||||
total_tokens: result.usage.totalTokens,
|
||||
prompt_tokens: result.usage.inputTokens ?? 0,
|
||||
completion_tokens: result.usage.outputTokens ?? 0,
|
||||
total_tokens: result.usage.totalTokens ?? 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -307,7 +307,7 @@ export class OpenAIApi implements BaseLlmApi {
|
||||
);
|
||||
|
||||
const modifiedBody = this.modifyChatBody({ ...body });
|
||||
const model = this.openaiProvider(modifiedBody.model);
|
||||
const model = this.openaiProvider.chat(modifiedBody.model);
|
||||
|
||||
// Convert OpenAI tools to Vercel AI SDK format
|
||||
const vercelTools = await convertToolsToVercelFormat(modifiedBody.tools);
|
||||
@@ -316,7 +316,7 @@ export class OpenAIApi implements BaseLlmApi {
|
||||
model,
|
||||
messages: modifiedBody.messages as any,
|
||||
temperature: modifiedBody.temperature ?? undefined,
|
||||
maxTokens:
|
||||
maxOutputTokens:
|
||||
modifiedBody.max_completion_tokens ??
|
||||
modifiedBody.max_tokens ??
|
||||
undefined,
|
||||
|
||||
Reference in New Issue
Block a user