Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Forced function calling #124

Merged
merged 6 commits into from
Apr 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
// limitations under the License.

import SwiftUI

public struct InputField<Label>: View where Label: View {
@Binding
private var text: String
Expand Down
2 changes: 1 addition & 1 deletion Mintfile
Original file line number Diff line number Diff line change
@@ -1 +1 @@
nicklockwood/SwiftFormat@0.52.10
nicklockwood/SwiftFormat@0.53.5
45 changes: 45 additions & 0 deletions Sources/GoogleAI/FunctionCalling.swift
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,51 @@ public struct Tool: Encodable {
}
}

/// Configuration for specifying function calling behavior.
public struct FunctionCallingConfig: Encodable {
/// Defines the execution behavior for function calling by defining the
/// execution mode.
public enum Mode: String, Encodable {
/// The default behavior for function calling. The model calls functions to answer queries at
/// its discretion.
case auto = "AUTO"

/// The model always predicts a provided function call to answer every query.
case any = "ANY"

/// The model will never predict a function call to answer a query. This can also be achieved by
/// not passing any tools to the model.
case none = "NONE"
}

/// Specifies the mode in which function calling should execute. If
/// unspecified, the default value will be set to AUTO.
let mode: Mode?

/// A set of function names that, when provided, limits the functions the model
/// will call.
///
/// This should only be set when the Mode is ANY. Function names
/// should match [FunctionDeclaration.name]. With mode set to ANY, model will
/// predict a function call from the set of function names provided.
let allowedFunctionNames: [String]?

public init(mode: FunctionCallingConfig.Mode? = nil, allowedFunctionNames: [String]? = nil) {
self.mode = mode
self.allowedFunctionNames = allowedFunctionNames
}
}

/// Tool configuration for any `Tool` specified in the request.
@available(iOS 15.0, macOS 11.0, macCatalyst 15.0, *)
public struct ToolConfig: Encodable {
let functionCallingConfig: FunctionCallingConfig?

public init(functionCallingConfig: FunctionCallingConfig? = nil) {
self.functionCallingConfig = functionCallingConfig
}
}

/// Result output from a ``FunctionCall``.
///
/// Contains a string representing the `FunctionDeclaration.name` and a structured JSON object
Expand Down
2 changes: 2 additions & 0 deletions Sources/GoogleAI/GenerateContentRequest.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ struct GenerateContentRequest {
let generationConfig: GenerationConfig?
let safetySettings: [SafetySetting]?
let tools: [Tool]?
let toolConfig: ToolConfig?
let isStreaming: Bool
let options: RequestOptions
}
Expand All @@ -33,6 +34,7 @@ extension GenerateContentRequest: Encodable {
case generationConfig
case safetySettings
case tools
case toolConfig
}
}

Expand Down
10 changes: 10 additions & 0 deletions Sources/GoogleAI/GenerativeModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ public final class GenerativeModel {
/// A list of tools the model may use to generate the next response.
let tools: [Tool]?

/// Tool configuration for any `Tool` specified in the request.
let toolConfig: ToolConfig?

/// Configuration parameters for sending requests to the backend.
let requestOptions: RequestOptions

Expand All @@ -48,19 +51,22 @@ public final class GenerativeModel {
/// - generationConfig: The content generation parameters your model should use.
/// - safetySettings: A value describing what types of harmful content your model should allow.
/// - tools: A list of ``Tool`` objects that the model may use to generate the next response.
/// - toolConfig: Tool configuration for any `Tool` specified in the request.
/// - requestOptions Configuration parameters for sending requests to the backend.
public convenience init(name: String,
apiKey: String,
generationConfig: GenerationConfig? = nil,
safetySettings: [SafetySetting]? = nil,
tools: [Tool]? = nil,
toolConfig: ToolConfig? = nil,
requestOptions: RequestOptions = RequestOptions()) {
self.init(
name: name,
apiKey: apiKey,
generationConfig: generationConfig,
safetySettings: safetySettings,
tools: tools,
toolConfig: toolConfig,
requestOptions: requestOptions,
urlSession: .shared
)
Expand All @@ -72,13 +78,15 @@ public final class GenerativeModel {
generationConfig: GenerationConfig? = nil,
safetySettings: [SafetySetting]? = nil,
tools: [Tool]? = nil,
toolConfig: ToolConfig? = nil,
requestOptions: RequestOptions = RequestOptions(),
urlSession: URLSession) {
modelResourceName = GenerativeModel.modelResourceName(name: name)
generativeAIService = GenerativeAIService(apiKey: apiKey, urlSession: urlSession)
self.generationConfig = generationConfig
self.safetySettings = safetySettings
self.tools = tools
self.toolConfig = toolConfig
self.requestOptions = requestOptions

Logging.default.info("""
Expand Down Expand Up @@ -125,6 +133,7 @@ public final class GenerativeModel {
generationConfig: generationConfig,
safetySettings: safetySettings,
tools: tools,
toolConfig: toolConfig,
isStreaming: false,
options: requestOptions)
response = try await generativeAIService.loadRequest(request: generateContentRequest)
Expand Down Expand Up @@ -197,6 +206,7 @@ public final class GenerativeModel {
generationConfig: generationConfig,
safetySettings: safetySettings,
tools: tools,
toolConfig: toolConfig,
isStreaming: true,
options: requestOptions)

Expand Down
Loading