Skip to content

Commit a57c238

Browse files
committed
feature gpt-5-codex responses api
1 parent 4fe3f4f commit a57c238

File tree

11 files changed

+2115
-4
lines changed

11 files changed

+2115
-4
lines changed

src/routes/messages/handler.ts

Lines changed: 134 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,24 @@ import { streamSSE } from "hono/streaming"
66
import { awaitApproval } from "~/lib/approval"
77
import { checkRateLimit } from "~/lib/rate-limit"
88
import { state } from "~/lib/state"
9+
import {
10+
createResponsesStreamState,
11+
translateResponsesStreamEvent,
12+
} from "~/routes/messages/responses-stream-translation"
13+
import {
14+
translateAnthropicMessagesToResponsesPayload,
15+
translateResponsesResultToAnthropic,
16+
} from "~/routes/messages/responses-translation"
17+
import { getResponsesRequestOptions } from "~/routes/responses/utils"
918
import {
1019
createChatCompletions,
1120
type ChatCompletionChunk,
1221
type ChatCompletionResponse,
1322
} from "~/services/copilot/create-chat-completions"
23+
import {
24+
createResponses,
25+
type ResponsesResult,
26+
} from "~/services/copilot/create-responses"
1427

1528
import {
1629
type AnthropicMessagesPayload,
@@ -28,16 +41,31 @@ export async function handleCompletion(c: Context) {
2841
const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()
2942
consola.debug("Anthropic request payload:", JSON.stringify(anthropicPayload))
3043

44+
const useResponsesApi = shouldUseResponsesApi(anthropicPayload.model)
45+
46+
if (state.manualApprove) {
47+
await awaitApproval()
48+
}
49+
50+
if (useResponsesApi) {
51+
return await handleWithResponsesApi(c, anthropicPayload)
52+
}
53+
54+
return await handleWithChatCompletions(c, anthropicPayload)
55+
}
56+
57+
const RESPONSES_ENDPOINT = "/responses"
58+
59+
const handleWithChatCompletions = async (
60+
c: Context,
61+
anthropicPayload: AnthropicMessagesPayload,
62+
) => {
3163
const openAIPayload = translateToOpenAI(anthropicPayload)
3264
consola.debug(
3365
"Translated OpenAI request payload:",
3466
JSON.stringify(openAIPayload),
3567
)
3668

37-
if (state.manualApprove) {
38-
await awaitApproval()
39-
}
40-
4169
const response = await createChatCompletions(openAIPayload)
4270

4371
if (isNonStreaming(response)) {
@@ -86,6 +114,108 @@ export async function handleCompletion(c: Context) {
86114
})
87115
}
88116

117+
const handleWithResponsesApi = async (
118+
c: Context,
119+
anthropicPayload: AnthropicMessagesPayload,
120+
) => {
121+
const responsesPayload =
122+
translateAnthropicMessagesToResponsesPayload(anthropicPayload)
123+
consola.debug(
124+
"Translated Responses payload:",
125+
JSON.stringify(responsesPayload),
126+
)
127+
128+
const { vision, initiator } = getResponsesRequestOptions(responsesPayload)
129+
const response = await createResponses(responsesPayload, {
130+
vision,
131+
initiator,
132+
})
133+
134+
if (responsesPayload.stream && isAsyncIterable(response)) {
135+
consola.debug("Streaming response from Copilot (Responses API)")
136+
return streamSSE(c, async (stream) => {
137+
const streamState = createResponsesStreamState()
138+
139+
for await (const chunk of response) {
140+
consola.debug("Responses raw stream event:", JSON.stringify(chunk))
141+
142+
const eventName = (chunk as { event?: string }).event
143+
if (eventName === "ping") {
144+
await stream.writeSSE({ event: "ping", data: "" })
145+
continue
146+
}
147+
148+
const data = (chunk as { data?: string }).data
149+
if (!data) {
150+
continue
151+
}
152+
153+
if (data === "[DONE]") {
154+
break
155+
}
156+
157+
const parsed = safeJsonParse(data)
158+
if (!parsed) {
159+
continue
160+
}
161+
162+
const events = translateResponsesStreamEvent(parsed, streamState)
163+
for (const event of events) {
164+
consola.debug("Translated Anthropic event:", JSON.stringify(event))
165+
await stream.writeSSE({
166+
event: event.type,
167+
data: JSON.stringify(event),
168+
})
169+
}
170+
}
171+
172+
if (!streamState.messageCompleted) {
173+
consola.warn(
174+
"Responses stream ended without completion; sending fallback message_stop",
175+
)
176+
const fallback = { type: "message_stop" as const }
177+
await stream.writeSSE({
178+
event: fallback.type,
179+
data: JSON.stringify(fallback),
180+
})
181+
}
182+
})
183+
}
184+
185+
consola.debug(
186+
"Non-streaming Responses result:",
187+
JSON.stringify(response).slice(-400),
188+
)
189+
const anthropicResponse = translateResponsesResultToAnthropic(
190+
response as ResponsesResult,
191+
)
192+
consola.debug(
193+
"Translated Anthropic response:",
194+
JSON.stringify(anthropicResponse),
195+
)
196+
return c.json(anthropicResponse)
197+
}
198+
199+
const shouldUseResponsesApi = (modelId: string): boolean => {
200+
const selectedModel = state.models?.data.find((model) => model.id === modelId)
201+
return (
202+
selectedModel?.supported_endpoints?.includes(RESPONSES_ENDPOINT) ?? false
203+
)
204+
}
205+
89206
const isNonStreaming = (
90207
response: Awaited<ReturnType<typeof createChatCompletions>>,
91208
): response is ChatCompletionResponse => Object.hasOwn(response, "choices")
209+
210+
const isAsyncIterable = <T>(value: unknown): value is AsyncIterable<T> =>
211+
Boolean(value)
212+
&& typeof (value as AsyncIterable<T>)[Symbol.asyncIterator] === "function"
213+
214+
const safeJsonParse = (value: string): Record<string, unknown> | undefined => {
215+
try {
216+
return JSON.parse(value) as Record<string, unknown>
217+
} catch (error) {
218+
consola.warn("Failed to parse Responses stream chunk:", value, error)
219+
return undefined
220+
}
221+
}

0 commit comments

Comments
 (0)