From fab26f1149a992d6567826fe0adc09e41cfb9a1c Mon Sep 17 00:00:00 2001 From: Matt Corey Date: Mon, 8 Sep 2025 18:02:31 -0400 Subject: [PATCH] Add support for include parameter and stable web search API MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Add Include enum with 7 output options for OpenAI responses API - Add include parameter to OpenAICreateResponseRequestBody with proper alphabetical ordering - Add support for stable web_search tool alongside web_search_preview - Use typealias WebSearchPreviewTool = WebSearchTool to avoid code duplication - Deprecate webSearchPreview in favor of webSearch for modern GPT models - Add WebSearchAction and WebSearchSource support in OpenAIResponse - Add comprehensive test coverage for web search sources parsing 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude --- .../OpenAICreateResponseRequestBody.swift | 53 ++++++++++++++++++- Sources/AIProxy/OpenAI/OpenAIResponse.swift | 12 +++++ .../OpenAIResponseStreamingEventTests.swift | 34 ++++++++++++ 3 files changed, 97 insertions(+), 2 deletions(-) diff --git a/Sources/AIProxy/OpenAI/OpenAICreateResponseRequestBody.swift b/Sources/AIProxy/OpenAI/OpenAICreateResponseRequestBody.swift index ff16c51..cef5957 100644 --- a/Sources/AIProxy/OpenAI/OpenAICreateResponseRequestBody.swift +++ b/Sources/AIProxy/OpenAI/OpenAICreateResponseRequestBody.swift @@ -19,6 +19,9 @@ public struct OpenAICreateResponseRequestBody: Encodable { /// Text, image, or file inputs to the model, used to generate a response. public let input: OpenAIResponse.Input? + /// Specify additional output data to include in the model response. + public let include: [Include]? + /// Model ID used to generate the response, like gpt-4o or o1. /// OpenAI offers a wide range of models with different capabilities, performance characteristics, and price points. /// Refer to the model guide to browse and compare available models: https://platform.openai.com/docs/models @@ -81,6 +84,7 @@ public struct OpenAICreateResponseRequestBody: Encodable { private enum CodingKeys: String, CodingKey { case input + case include case model case tools case toolChoice = "tool_choice" @@ -102,6 +106,7 @@ public struct OpenAICreateResponseRequestBody: Encodable { // To format, place the cursor in the initializer's parameter list and use `ctrl-m` public init( input: OpenAIResponse.Input? = nil, + include: [Include]? = nil, model: String? = nil, parallelToolCalls: Bool? = nil, previousResponseId: String? = nil, @@ -118,6 +123,7 @@ public struct OpenAICreateResponseRequestBody: Encodable { user: String? = nil ) { self.input = input + self.include = include self.model = model self.parallelToolCalls = parallelToolCalls self.previousResponseId = previousResponseId @@ -140,6 +146,30 @@ public struct OpenAICreateResponseRequestBody: Encodable { extension OpenAICreateResponseRequestBody { + /// Specify additional output data to include in the model response. + public enum Include: String, Codable { + /// Include the outputs of python code execution in code interpreter tool call items. + case codeInterpreterCallOutputs = "code_interpreter_call.outputs" + + /// Include image urls from the computer call output. + case computerCallOutputImageUrl = "computer_call_output.output.image_url" + + /// Include the search results of the file search tool call. + case fileSearchCallResults = "file_search_call.results" + + /// Include image urls from the input message. + case messageInputImageImageUrl = "message.input_image.image_url" + + /// Include logprobs with assistant messages. + case messageOutputTextLogprobs = "message.output_text.logprobs" + + /// Includes an encrypted version of reasoning tokens in reasoning item outputs. + case reasoningEncryptedContent = "reasoning.encrypted_content" + + /// Include the sources of the web search tool call. + case webSearchCallActionSources = "web_search_call.action.sources" + } + /// The truncation strategy to use for the model response. public enum Truncation: String, Encodable { /// If the context of this response and previous ones exceeds the model's context window size, the model will truncate the response to fit the context window by dropping input items in the middle of the conversation. @@ -205,6 +235,11 @@ extension OpenAICreateResponseRequestBody { /// https://platform.openai.com/docs/guides/tools-web-search?api-mode=responses case webSearch(WebSearchTool) + /// Allow models to search the web for the latest information before generating a response (preview version). + /// https://platform.openai.com/docs/guides/tools-web-search?api-mode=responses + @available(*, deprecated, message: "Use webSearch if using a modern GTP model. webSearchPreview will be removed in a future version.") + case webSearchPreview(WebSearchPreviewTool) + private enum CodingKeys: String, CodingKey { case description case displayHeight = "display_height" @@ -234,6 +269,11 @@ extension OpenAICreateResponseRequestBody { try container.encodeIfPresent(tool.rankingOptions, forKey: .rankingOptions) case .webSearch(let tool): + try container.encode("web_search", forKey: .type) + try container.encodeIfPresent(tool.searchContextSize, forKey: .searchContextSize) + try container.encodeIfPresent(tool.userLocation, forKey: .userLocation) + + case .webSearchPreview(let tool): try container.encode("web_search_preview", forKey: .type) try container.encodeIfPresent(tool.searchContextSize, forKey: .searchContextSize) try container.encodeIfPresent(tool.userLocation, forKey: .userLocation) @@ -265,11 +305,16 @@ extension OpenAICreateResponseRequestBody { let rankingOptions = try container.decodeIfPresent(FileSearchTool.RankingOptions.self, forKey: .rankingOptions) self = .fileSearch(FileSearchTool(vectorStoreIDs: vectorStoreIDs, filters: filters, maxNumResults: maxNumResults, rankingOptions: rankingOptions)) - case "web_search_preview": + case "web_search": let searchContextSize = try container.decodeIfPresent(WebSearchTool.SearchContextSize.self, forKey: .searchContextSize) let userLocation = try container.decodeIfPresent(WebSearchTool.UserLocation.self, forKey: .userLocation) self = .webSearch(WebSearchTool(searchContextSize: searchContextSize, userLocation: userLocation)) + case "web_search_preview": + let searchContextSize = try container.decodeIfPresent(WebSearchTool.SearchContextSize.self, forKey: .searchContextSize) + let userLocation = try container.decodeIfPresent(WebSearchTool.UserLocation.self, forKey: .userLocation) + self = .webSearchPreview(WebSearchTool(searchContextSize: searchContextSize, userLocation: userLocation)) + case "computer_use_preview": let displayWidth = try container.decode(Int.self, forKey: .displayWidth) let displayHeight = try container.decode(Int.self, forKey: .displayHeight) @@ -421,7 +466,7 @@ extension OpenAICreateResponseRequestBody { case userLocation = "user_location" } - public let type = "web_search_preview" + public let type = "web_search" public let searchContextSize: SearchContextSize? public let userLocation: UserLocation? @@ -460,6 +505,10 @@ extension OpenAICreateResponseRequestBody { } } + // MARK: - Web Search Tool (Preview) + @available(*, deprecated, message: "Use WebSearchTool instead. WebSearchPreviewTool will be removed in a future version.") + public typealias WebSearchPreviewTool = WebSearchTool + // MARK: - Computer Use Tool public struct ComputerUseTool: Codable { private enum CodingKeys: String, CodingKey { diff --git a/Sources/AIProxy/OpenAI/OpenAIResponse.swift b/Sources/AIProxy/OpenAI/OpenAIResponse.swift index 6e3b346..7677e26 100644 --- a/Sources/AIProxy/OpenAI/OpenAIResponse.swift +++ b/Sources/AIProxy/OpenAI/OpenAIResponse.swift @@ -339,6 +339,18 @@ extension OpenAIResponse { public var type = "web_search_call" public let id: String public let status: String + public let action: WebSearchAction? + + public struct WebSearchAction: Decodable { + public let type: String + public let query: String? + public let sources: [WebSearchSource]? + } + + public struct WebSearchSource: Decodable { + public let type: String + public let url: String + } } // MARK: - File Search Call diff --git a/Tests/AIProxyTests/OpenAIResponseStreamingEventTests.swift b/Tests/AIProxyTests/OpenAIResponseStreamingEventTests.swift index dd43c20..10713df 100644 --- a/Tests/AIProxyTests/OpenAIResponseStreamingEventTests.swift +++ b/Tests/AIProxyTests/OpenAIResponseStreamingEventTests.swift @@ -102,6 +102,40 @@ class OpenAIResponseStreamingEventTests: XCTestCase { XCTAssertEqual(webSearchCall.status, "completed") } + func testWebSearchCallWithSourcesIsDecodable() throws { + let line = #"data: {"type":"response.output_item.done","sequence_number":8,"output_index":1,"item":{"id":"ws_68bf1fcb40ec819b815add7f2df9dcff0a2285864df44dd5","type":"web_search_call","status":"completed","action":{"type":"search","query":"Bills game score live Buffalo Bills score","sources":[{"type":"url","url":"https://www.wsj.com/sports/football/bills-ravens-josh-allen-lamar-jackson-derrick-henry-f8fc4d38"},{"type":"url","url":"https://timesofindia.indiatimes.com/sports/nfl/news/nfl-game-results-today-bills-vs-ravens-match-highlights-top-scorers-and-more/articleshow/123759411.cms"},{"type":"url","url":"https://www.bild.de/sport/mehr-sport/buffalo-bills-vs-baltimore-ravens-dieses-nfl-spiel-hatte-alles-68be463d72019652f128d780"},{"type":"url","url":"https://nypost.com/2025/09/08/sports/lamar-jackson-speaks-out-after-shoving-fan-in-snf-loss-to-bills/"},{"type":"url","url":"https://www.capstone-companies.com/summary/bills-game-today"}]}}}"# + let event = OpenAIResponseStreamingEvent.deserialize(fromLine: line) + + guard case .outputItemDone(let outputItemDone) = event else { + return XCTFail("Expected response.output_item.done") + } + XCTAssertEqual(outputItemDone.sequenceNumber, 8) + + guard case .webSearchCall(let webSearchCall) = outputItemDone.item else { + return XCTFail("Expected web search call") + } + + XCTAssertEqual(webSearchCall.id, "ws_68bf1fcb40ec819b815add7f2df9dcff0a2285864df44dd5") + XCTAssertEqual(webSearchCall.status, "completed") + + // Verify the action is properly decoded + XCTAssertNotNil(webSearchCall.action) + XCTAssertEqual(webSearchCall.action?.type, "search") + XCTAssertEqual(webSearchCall.action?.query, "Bills game score live Buffalo Bills score") + + // Verify the sources are properly decoded + XCTAssertNotNil(webSearchCall.action?.sources) + XCTAssertEqual(webSearchCall.action?.sources?.count, 5) + + let firstSource = webSearchCall.action?.sources?.first + XCTAssertEqual(firstSource?.type, "url") + XCTAssertEqual(firstSource?.url, "https://www.wsj.com/sports/football/bills-ravens-josh-allen-lamar-jackson-derrick-henry-f8fc4d38") + + let lastSource = webSearchCall.action?.sources?.last + XCTAssertEqual(lastSource?.type, "url") + XCTAssertEqual(lastSource?.url, "https://www.capstone-companies.com/summary/bills-game-today") + } + func testOutputItemAddedForContentIsDecodable() throws { let line = #"data: {"type":"response.output_item.added","sequence_number":7,"output_index":1,"item":{"id":"msg_123","type":"message","status":"in_progress","content":[],"role":"assistant"}}"# let event = OpenAIResponseStreamingEvent.deserialize(fromLine: line)