Skip to content

Commit 0d059a4

Browse files
authored
fix: Support Vercel v5 AI SDK token usage (#926)
1 parent 9d4e225 commit 0d059a4

File tree

5 files changed

+14
-2
lines changed

5 files changed

+14
-2
lines changed

packages/sdk/server-ai/src/LDAIConfigMapper.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ export class LDAIConfigMapper {
5252
model,
5353
messages,
5454
maxTokens: this._findParameter('max_tokens', 'maxTokens'),
55+
maxOutputTokens: this._findParameter('max_tokens', 'maxTokens'),
5556
temperature: this._findParameter('temperature'),
5657
topP: this._findParameter('top_p', 'topP'),
5758
topK: this._findParameter('top_k', 'topK'),

packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,9 @@ export class LDAIConfigTrackerImpl implements LDAIConfigTracker {
138138
TRes extends {
139139
usage?: {
140140
totalTokens?: number;
141+
inputTokens?: number;
141142
promptTokens?: number;
143+
outputTokens?: number;
142144
completionTokens?: number;
143145
};
144146
},
@@ -161,7 +163,9 @@ export class LDAIConfigTrackerImpl implements LDAIConfigTracker {
161163
finishReason?: Promise<string>;
162164
usage?: Promise<{
163165
totalTokens?: number;
166+
inputTokens?: number;
164167
promptTokens?: number;
168+
outputTokens?: number;
165169
completionTokens?: number;
166170
}>;
167171
},

packages/sdk/server-ai/src/api/config/LDAIConfigTracker.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,7 +149,9 @@ export interface LDAIConfigTracker {
149149
TRes extends {
150150
usage?: {
151151
totalTokens?: number;
152+
inputTokens?: number;
152153
promptTokens?: number;
154+
outputTokens?: number;
153155
completionTokens?: number;
154156
};
155157
},
@@ -174,7 +176,9 @@ export interface LDAIConfigTracker {
174176
finishReason?: Promise<string>;
175177
usage?: Promise<{
176178
totalTokens?: number;
179+
inputTokens?: number;
177180
promptTokens?: number;
181+
outputTokens?: number;
178182
completionTokens?: number;
179183
}>;
180184
},

packages/sdk/server-ai/src/api/config/VercelAISDK.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ export interface VercelAISDKConfig<TMod> {
1010
model: TMod;
1111
messages?: LDMessage[] | undefined;
1212
maxTokens?: number | undefined;
13+
maxOutputTokens?: number | undefined;
1314
temperature?: number | undefined;
1415
topP?: number | undefined;
1516
topK?: number | undefined;

packages/sdk/server-ai/src/api/metrics/VercelAISDKTokenUsage.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,14 @@ import { LDTokenUsage } from './LDTokenUsage';
22

33
export function createVercelAISDKTokenUsage(data: {
44
totalTokens?: number;
5+
inputTokens?: number;
56
promptTokens?: number;
7+
outputTokens?: number;
68
completionTokens?: number;
79
}): LDTokenUsage {
810
return {
911
total: data.totalTokens ?? 0,
10-
input: data.promptTokens ?? 0,
11-
output: data.completionTokens ?? 0,
12+
input: data.inputTokens ?? data.promptTokens ?? 0,
13+
output: data.outputTokens ?? data.completionTokens ?? 0,
1214
};
1315
}

0 commit comments

Comments
 (0)