@@ -6,11 +6,24 @@ import { streamSSE } from "hono/streaming"
6
6
import { awaitApproval } from "~/lib/approval"
7
7
import { checkRateLimit } from "~/lib/rate-limit"
8
8
import { state } from "~/lib/state"
9
+ import {
10
+ createResponsesStreamState ,
11
+ translateResponsesStreamEvent ,
12
+ } from "~/routes/messages/responses-stream-translation"
13
+ import {
14
+ translateAnthropicMessagesToResponsesPayload ,
15
+ translateResponsesResultToAnthropic ,
16
+ } from "~/routes/messages/responses-translation"
17
+ import { getResponsesRequestOptions } from "~/routes/responses/utils"
9
18
import {
10
19
createChatCompletions ,
11
20
type ChatCompletionChunk ,
12
21
type ChatCompletionResponse ,
13
22
} from "~/services/copilot/create-chat-completions"
23
+ import {
24
+ createResponses ,
25
+ type ResponsesResult ,
26
+ } from "~/services/copilot/create-responses"
14
27
15
28
import {
16
29
type AnthropicMessagesPayload ,
@@ -28,16 +41,31 @@ export async function handleCompletion(c: Context) {
28
41
const anthropicPayload = await c . req . json < AnthropicMessagesPayload > ( )
29
42
consola . debug ( "Anthropic request payload:" , JSON . stringify ( anthropicPayload ) )
30
43
44
+ const useResponsesApi = shouldUseResponsesApi ( anthropicPayload . model )
45
+
46
+ if ( state . manualApprove ) {
47
+ await awaitApproval ( )
48
+ }
49
+
50
+ if ( useResponsesApi ) {
51
+ return await handleWithResponsesApi ( c , anthropicPayload )
52
+ }
53
+
54
+ return await handleWithChatCompletions ( c , anthropicPayload )
55
+ }
56
+
57
+ const RESPONSES_ENDPOINT = "/responses"
58
+
59
+ const handleWithChatCompletions = async (
60
+ c : Context ,
61
+ anthropicPayload : AnthropicMessagesPayload ,
62
+ ) => {
31
63
const openAIPayload = translateToOpenAI ( anthropicPayload )
32
64
consola . debug (
33
65
"Translated OpenAI request payload:" ,
34
66
JSON . stringify ( openAIPayload ) ,
35
67
)
36
68
37
- if ( state . manualApprove ) {
38
- await awaitApproval ( )
39
- }
40
-
41
69
const response = await createChatCompletions ( openAIPayload )
42
70
43
71
if ( isNonStreaming ( response ) ) {
@@ -86,6 +114,108 @@ export async function handleCompletion(c: Context) {
86
114
} )
87
115
}
88
116
117
+ const handleWithResponsesApi = async (
118
+ c : Context ,
119
+ anthropicPayload : AnthropicMessagesPayload ,
120
+ ) => {
121
+ const responsesPayload =
122
+ translateAnthropicMessagesToResponsesPayload ( anthropicPayload )
123
+ consola . debug (
124
+ "Translated Responses payload:" ,
125
+ JSON . stringify ( responsesPayload ) ,
126
+ )
127
+
128
+ const { vision, initiator } = getResponsesRequestOptions ( responsesPayload )
129
+ const response = await createResponses ( responsesPayload , {
130
+ vision,
131
+ initiator,
132
+ } )
133
+
134
+ if ( responsesPayload . stream && isAsyncIterable ( response ) ) {
135
+ consola . debug ( "Streaming response from Copilot (Responses API)" )
136
+ return streamSSE ( c , async ( stream ) => {
137
+ const streamState = createResponsesStreamState ( )
138
+
139
+ for await ( const chunk of response ) {
140
+ consola . debug ( "Responses raw stream event:" , JSON . stringify ( chunk ) )
141
+
142
+ const eventName = ( chunk as { event ?: string } ) . event
143
+ if ( eventName === "ping" ) {
144
+ await stream . writeSSE ( { event : "ping" , data : "" } )
145
+ continue
146
+ }
147
+
148
+ const data = ( chunk as { data ?: string } ) . data
149
+ if ( ! data ) {
150
+ continue
151
+ }
152
+
153
+ if ( data === "[DONE]" ) {
154
+ break
155
+ }
156
+
157
+ const parsed = safeJsonParse ( data )
158
+ if ( ! parsed ) {
159
+ continue
160
+ }
161
+
162
+ const events = translateResponsesStreamEvent ( parsed , streamState )
163
+ for ( const event of events ) {
164
+ consola . debug ( "Translated Anthropic event:" , JSON . stringify ( event ) )
165
+ await stream . writeSSE ( {
166
+ event : event . type ,
167
+ data : JSON . stringify ( event ) ,
168
+ } )
169
+ }
170
+ }
171
+
172
+ if ( ! streamState . messageCompleted ) {
173
+ consola . warn (
174
+ "Responses stream ended without completion; sending fallback message_stop" ,
175
+ )
176
+ const fallback = { type : "message_stop" as const }
177
+ await stream . writeSSE ( {
178
+ event : fallback . type ,
179
+ data : JSON . stringify ( fallback ) ,
180
+ } )
181
+ }
182
+ } )
183
+ }
184
+
185
+ consola . debug (
186
+ "Non-streaming Responses result:" ,
187
+ JSON . stringify ( response ) . slice ( - 400 ) ,
188
+ )
189
+ const anthropicResponse = translateResponsesResultToAnthropic (
190
+ response as ResponsesResult ,
191
+ )
192
+ consola . debug (
193
+ "Translated Anthropic response:" ,
194
+ JSON . stringify ( anthropicResponse ) ,
195
+ )
196
+ return c . json ( anthropicResponse )
197
+ }
198
+
199
+ const shouldUseResponsesApi = ( modelId : string ) : boolean => {
200
+ const selectedModel = state . models ?. data . find ( ( model ) => model . id === modelId )
201
+ return (
202
+ selectedModel ?. supported_endpoints ?. includes ( RESPONSES_ENDPOINT ) ?? false
203
+ )
204
+ }
205
+
89
206
const isNonStreaming = (
90
207
response : Awaited < ReturnType < typeof createChatCompletions > > ,
91
208
) : response is ChatCompletionResponse => Object . hasOwn ( response , "choices" )
209
+
210
+ const isAsyncIterable = < T > ( value : unknown ) : value is AsyncIterable < T > =>
211
+ Boolean ( value )
212
+ && typeof ( value as AsyncIterable < T > ) [ Symbol . asyncIterator ] === "function"
213
+
214
+ const safeJsonParse = ( value : string ) : Record < string , unknown > | undefined => {
215
+ try {
216
+ return JSON . parse ( value ) as Record < string , unknown >
217
+ } catch ( error ) {
218
+ consola . warn ( "Failed to parse Responses stream chunk:" , value , error )
219
+ return undefined
220
+ }
221
+ }
0 commit comments