diff --git a/OpenAIKit.podspec b/OpenAIKit.podspec index 29e9f61..36c4fe7 100644 --- a/OpenAIKit.podspec +++ b/OpenAIKit.podspec @@ -8,7 +8,7 @@ Pod::Spec.new do |s| s.name = 'OpenAIKit' - s.version = '1.4.0' + s.version = '1.5.0' s.summary = 'OpenAI is a community-maintained repository containing Swift implementation over OpenAI public API.' s.description = <<-DESC diff --git a/Sources/OpenAIKit/Helpers/NetworkRoutes.swift b/Sources/OpenAIKit/Helpers/NetworkRoutes.swift index 2834545..3605a9e 100644 --- a/Sources/OpenAIKit/Helpers/NetworkRoutes.swift +++ b/Sources/OpenAIKit/Helpers/NetworkRoutes.swift @@ -30,12 +30,10 @@ protocol Endpoint { enum OpenAIEndpoint { case completions - case chatCompletions - case edits - case dalleImage + case dalleImageEdit } extension OpenAIEndpoint: Endpoint { @@ -49,6 +47,8 @@ extension OpenAIEndpoint: Endpoint { return "/v1/edits" case .dalleImage: return "/v1/images/generations" + case .dalleImageEdit: + return "/v1/images/edits" } } diff --git a/Sources/OpenAIKit/Models/ImagesModels.swift b/Sources/OpenAIKit/Models/ImagesModels.swift index e7a4f71..8ac6974 100644 --- a/Sources/OpenAIKit/Models/ImagesModels.swift +++ b/Sources/OpenAIKit/Models/ImagesModels.swift @@ -34,3 +34,30 @@ public struct ImagesResponse: Codable { public let created: TimeInterval public var data: [AIImage] } + +public struct ImageEditRequest: Codable { + /// The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask. + public var image: String + /// An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image. + public var mask: String? + /// A text description of the desired image(s). The maximum length is 1000 characters. + public var prompt: String + /// The number of images to generate. Must be between 1 and 10. + public var n: Int? = nil + /// The size of the generated images. Must be one of `size256`, `size512`, or `size1024`. + public var size: AIImageSize = .size1024 + /// The format in which the generated images are returned. Must be one of `url` or `b64_json` + /// DEFAULT: url + public var responseFormat: String? + /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. + public var user: String? = nil +} + +public struct ImageEditResponse: Codable { + public struct AIImage: Codable { + public let url: String + } + + public let created: TimeInterval + public var data: [AIImage] +} diff --git a/Sources/OpenAIKit/OpenAIKit.swift b/Sources/OpenAIKit/OpenAIKit.swift index c0b42cf..1f41c53 100644 --- a/Sources/OpenAIKit/OpenAIKit.swift +++ b/Sources/OpenAIKit/OpenAIKit.swift @@ -48,4 +48,18 @@ extension OpenAIKit { return headers } + + var baseMultipartHeaders: OpenAIHeaders { + var headers: OpenAIHeaders = [:] + + headers["Authorization"] = "Bearer \(apiToken)" + + if let organization { + headers["OpenAI-Organization"] = organization + } + + headers["content-type"] = "multipart/form-data" + + return headers + } } diff --git a/Sources/OpenAIKit/OpenAIKitRequests/Images.swift b/Sources/OpenAIKit/OpenAIKitRequests/Images.swift index b677d33..73ecc7a 100644 --- a/Sources/OpenAIKit/OpenAIKitRequests/Images.swift +++ b/Sources/OpenAIKit/OpenAIKitRequests/Images.swift @@ -10,6 +10,7 @@ import Foundation @available(swift 5.5) @available(macOS 10.15, iOS 13, watchOS 6, tvOS 13, *) public extension OpenAIKit { + // MARK: - Create image /// Given a prompt and/or an input image, the model will generate a new image. /// /// - Parameters: @@ -47,4 +48,53 @@ public extension OpenAIKit { } } } + + // MARK: - Create image edit + /// Given a prompt and an input image, the model will genreate generate a modified version of the input image + /// + /// - Parameters: + /// - image: The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask. + /// - prompt: A text description of the desired image(s). The maximum length is 1000 characters. + /// - size: The size of the generated images. Must be one of `size256`, `size512`, or `size1024`. + /// - responseFormat: The format in which the generated images are returned. Must be one of url or b64_json. + /// - mask: An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image. + /// - user: A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. + /// - n : How many completions to generate for each prompt. + func sendImageEditRequest(image: String, + prompt: String, + size: AIImageSize = .size1024, + responseFormat: String? = nil, + mask: String? = nil, + user: String? = nil, + n: Int? = nil, + completion: @escaping (Result) -> Void) + { + let endpoint = OpenAIEndpoint.dalleImageEdit + + let requestBody = ImageEditRequest(image: image, mask: mask, prompt: prompt, n: n, size: size, responseFormat: responseFormat, user: user) + + let requestData = try? jsonEncoder.encode(requestBody) + + let headers = baseMultipartHeaders + + network.request(endpoint.method, url: endpoint.urlPath, body: requestData, headers: headers, completion: completion) + } + + @available(swift 5.5) + @available(macOS 10.15, iOS 13, watchOS 6, tvOS 13, *) + func sendImageEditRequest(image: String, + prompt: String, + size: AIImageSize = .size1024, + responseFormat: String? = nil, + mask: String? = nil, + user: String? = nil, + n: Int? = nil) async -> Result + { + return await withCheckedContinuation { continuation in + sendImageEditRequest(image: image, prompt: prompt, size: size, responseFormat: responseFormat, mask: mask, user: user, n: n) { + result in + continuation.resume(returning: result) + } + } + } }