Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create Contributions #143

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 135 additions & 0 deletions Sources/OpenAI/Dante_Contributions/Agents/InjectorAgent.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
//
// InjectorAgent.swift
// OpenAIPackageContributions
//
// Created by Dante Ausonio on 12/14/23.
//

import Foundation

class InjectorAgent {

private let openAI = OpenAI(apiToken: APIKeys().OpenAIKey)
let system: ChatSystem
let memoryStrategy: MemoryStrategyInterface
var scope: MemoryStrategyInterface = CMS_Scope()
let checkInRate: Int
var currentPhase = 0

let knowledgeBase = ""
let tools = ""




init(process: ChatSystem, checkInRate: Int = 3, memoryStrategy: MemoryStrategyInterface = CMS_UserContext()) {
self.system = process
self.checkInRate = checkInRate
self.memoryStrategy = memoryStrategy
}




func initChat() -> String {
return "\(system.baseBehaviorPrompt)\n\(system.phases[0].prompt)"
}


func performCheckIn(on chat: Conversation) async throws -> String {
print("\n\n----------------------- Perfomring Check In -----------------------\n")
do {
async let scopeString = try scope.setAndGetMemoryString(for: chat)
async let memoryString = try memoryStrategy.setAndGetMemoryString(for: chat)
async let phaseNumber = try self.getPhaseNumber(chat)

try self.currentPhase = await phaseNumber
let systemPromptInjection = try await self.formatInjectionPrompt(phaseNumber, await scopeString, await memoryString)

return systemPromptInjection
} catch {
print("perform check in failed ", error)
throw error
}
}


private func getPhaseNumber(_ chat: Conversation) async throws -> Int {
do {
let agentMessage = chat.messages + [Chat(role: .system, content: self.phaseProgressionPrompt(for: chat))]
let query = ChatQuery(model: .gpt3_5Turbo_1106, messages: agentMessage, n: 1)
let result = try await openAI.chats(query: query)
guard let response = result.choices.first?.message.content else { print("JSON FAILED"); return 0 }
if let phaseNumber = Int(response) {
return phaseNumber
} else {
return 0
}
} catch {
throw error
}
}



private func formatInjectionPrompt(_ phaseNumber: Int, _ scopeStr: String, _ memoryStr: String) -> String {
let phasePrompt = system.phases[phaseNumber].prompt
let systemPrompt = "\(system.baseBehaviorPrompt) You rely on the 3 following pieces of information to guide your interactions. Conversation Scope, Memory, and Instructions.\n\nConversation Scope:\n\(scopeStr)\n\nMemory:\n\(self.memoryStrategy.memoryDescription)\n\(memoryStr)\n\nInstructions:\n\(phasePrompt)"
print("\n\n---------------------------Formatted System Prompt:----------------------------\n\(systemPrompt)")
return systemPrompt
}


}






// MARK: Helper Functions
extension InjectorAgent {
func shouldCheckIn(on chat: Conversation) -> Bool {
let chatCount = chat.chatHistory().count
return (chatCount % self.checkInRate == 0 || (chatCount - 1) % self.checkInRate == 0) && (chatCount > 1)
}
}








// MARK: Prompts
extension InjectorAgent {


private func phaseProgressionPrompt(for chat: Conversation) -> String {
var phaseDescriptions = ""
var i = 0
for phase in system.phases {
phaseDescriptions.append("\(phase.name):\nIndex Number: \(i)\nPhase Description:\(phase.description)\n--------------------------------\n")
i += 1
}

if let prgoressionInstructions = system.phaseProgressionInstruction {
return prgoressionInstructions
}

switch system.progressionStyle {
case .linear:
return "You are a conversational manager. You manage the conversation between a Conversational AI Assistant and a User. There is a set of possible conversational phases the AI can enter. Each phase defines the behavior of the AI Assistant. The phases must progress in order from one to the next. It is your job to determine when to progress to the next phase. The AI is currently in phase number \(self.currentPhase). Decide if the conversation should progress to the next phase based on the scope of the conversation, the conversation so far, and the phase descriptions. Return an integer for the index number associated with the appropriate conversational phase the AI should enter. Return only the index number, and nothing else. In other words, your response should be a number between 0 and \(system.phases.count), and that number should be the index number of the phase the AI Assistant should enter. \n\n\nConversation Scope:\n \(self.scope.memoryString) \n\n\n Phase Descriptions:\(phaseDescriptions)"
case .nonlinear:
return "You are a conversational manager. You manage the conversation between a Conversational AI Assistant and a User. There is a set of possible conversational phases the AI can enter. Each phase defines the behavior of the AI Assistant. The phases can be entered in any order and do NOT have to come one after the other. It is your job to determine which phase is most useful at this point in the conversation. The AI is currently in phase number \(self.currentPhase). Decide phase the assistant should enter based on the scope of the conversation, the conversation so far, and the phase descriptions. Return an integer for the index number associated with the appropriate conversational phase the AI should enter. Return only the index number, and nothing else. In other words, your response should be a number between 0 and \(system.phases.count), and that number should be the index number of the phase the AI Assistant should enter. \n\n\nConversation Scope:\n \(self.scope.memoryString) \n\n\n Phase Descriptions:\(phaseDescriptions)"
}

}
}





// TODO: Figure out how to create a memory system using the Strategy Design Pattern.
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import Foundation
import NaturalLanguage

extension String {
var tokenCount: Int {
let tokenizer = NLTokenizer(unit: .word)
tokenizer.string = self
var tokenCount = 0
tokenizer.enumerateTokens(in: self.startIndex..<self.endIndex) { (tokenRange, _) in
tokenCount += 1
return true
}
return tokenCount
}
}


126 changes: 126 additions & 0 deletions Sources/OpenAI/Dante_Contributions/Model/AIMemoryModels.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
//
// AIMemoryModel.swift
// OpenAIPackageContributions
//
// Created by Dante Ausonio on 12/28/23.
//

import Foundation

protocol MemoryStrategyInterface {
var memoryDescription: String { get set } /// This goes in the system prompt next to memory: to describe the way the memory system works to the conversational AI
var memoryString: String { get set }

func setAndGetMemoryString(for chat: Conversation) async throws -> String
func setMemoryString(for chat: Conversation) async throws
func queryMemory(query: String) -> String

}


// CMS = Concrete Memory Strategy

//MARK: Scope Memory
class CMS_Scope: MemoryStrategyInterface {

private let openAI = OpenAI(apiToken: APIKeys().OpenAIKey)
private var updateScopePrompt: String = ""
var memoryString: String = ""
var memoryDescription: String

private var defaultUpdateScopePrompt: String {
return "You are a conversational manager. You manage the conversation between a User and a conversational AI Assistant. Your task is to update the Scope Description for the conversation in order give the AI conversational memory. This memory is used to determine which conversational paths should be chosen. The Scope Description tells the AI Assistant what the topic of conversation is, what the conversation is about, what has been discussed, what should be discussed and serves as an overall summary for the conversation thus far. The cope should be abstract enough to capture the essence of the conversation with out getting bogged down in the details. It is a high level overview of the conversation, meant for keeping the AI on track. If there is no new relevant information, simply return the original context description. Don't generate repetative information. Keep everything concise and to the point. \nHere is the current Scope Description: \(self.memoryString)\nReturn json containing the updated Scope Description in the form {'description': string value}."
}
private var defaultMemoryDescription: String = "Your memory describes the scope of the conversation"

init(customUpdateScopePrompt: String? = nil, customMemoryDescription: String? = nil) {
self.memoryDescription = customMemoryDescription ?? defaultMemoryDescription
self.updateScopePrompt = customUpdateScopePrompt ?? defaultUpdateScopePrompt

}

func setAndGetMemoryString(for chat: Conversation) async throws -> String {
do {
try await setMemoryString(for: chat)
return self.memoryString
} catch {
print("CMS_Scope.getMemoryString Failed! \(error)")
throw error
}
}

func setMemoryString(for chat: Conversation) async throws {
do {
let agentMessage = chat.messages + [Chat(role: .system, content: self.updateScopePrompt)]
let query = ChatQuery(model: .gpt3_5Turbo_1106, messages: agentMessage, responseFormat: .jsonObject, n: 1)

let result = try await openAI.chats(query: query)
guard let json = result.choices.first?.message.content?.data(using: .utf8) else { return }
let decodedJSON = try JSONDecoder().decode([String: String].self, from: json)
self.memoryString = decodedJSON["description"] ?? ""
} catch {
print("CMS_Scope.getMemoryString Failed! \(error)")
throw error
}
}

func queryMemory(query: String = "") -> String {
return memoryString
}
}



// MARK: User Context Memory
class CMS_UserContext: MemoryStrategyInterface {
private let openAI = OpenAI(apiToken: APIKeys().OpenAIKey)
private var updateMemoryPrompt: String = ""
var memoryDescription: String
var memoryString: String = ""

var defaultMemoryDescription: String = "Your memory provides a list of facts known about the user."
var defautUpdateMemoryPrompt: String {"You are a conversational manager. You manage a conversation held between a conversational AI assistnat and a user. Your task is to update the user context memory of the conversational AI using the conversation so far. The User Context Memory is a bullet point list of facts learned about the user during the conversation that contain key details and information about the user. It should contain specific details not general statements. remembered If there is no new relevant information, simply return the original context description. Don't generate repetative information. Keep everything concise and to the point. \nHere is the current user context memory: \(self.memoryString)\n Return JSON containing the the updated User Context Memory in the form {'description': context} where context is the fact list formatted as a string."
}

init(customUpdateScopePrompt: String? = nil, customMemoryDescription: String? = nil) {
self.memoryDescription = customMemoryDescription ?? defaultMemoryDescription
self.updateMemoryPrompt = customUpdateScopePrompt ?? defautUpdateMemoryPrompt

}

func setAndGetMemoryString(for chat: Conversation) async throws -> String {
try await setMemoryString(for: chat)
return self.memoryString
}

func setMemoryString(for chat: Conversation) async throws {
do {
let agentMessage = chat.messages + [Chat(role: .system, content: self.updateMemoryPrompt)]
let query = ChatQuery(model: .gpt3_5Turbo_1106, messages: agentMessage, responseFormat: .jsonObject, n: 1)

let result = try await openAI.chats(query: query)
guard let json = result.choices.first?.message.content?.data(using: .utf8) else { return }
let decodedJSON = try JSONDecoder().decode([String: String].self, from: json)
self.memoryString = decodedJSON["description"] ?? ""
print("\nCMS_UserContext memory Decoded and Formatted Result:\n", decodedJSON)
} catch {
print("CMS_UserContext.getMemoryString() Failed! \(error)")
throw error
}
}

func queryMemory(query: String = "") -> String {
return memoryString
}
}



// MARK: More Memory Strategies...







46 changes: 46 additions & 0 deletions Sources/OpenAI/Dante_Contributions/Model/ChatSystems.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
//
// ProcessDescriptions.swift
// OpenAIPackageContributions
//
// Created by Dante Ausonio on 12/24/23.
//

import Foundation

struct ChatSystem {
let description: String /// Answers what the system does or is used for. It allows the observer to know why the process exists and why it might be chosen.

let phases: [Phase] /// Each phase adjusts the behavior of the LLM allowing for an evolving conversation.

let progressionStyle: phaseProgression /// How should the Observer progress through each phase? Linear = Progress through each phase in order, Nonlinear = jump to which ever phase seems fit.

let baseBehaviorPrompt: String /// This sets the assistants behavior. It is used in all system prompts. This should set the role, identity and overall personality of the LLM. Each phase then adjusts the way this entity acts and provides more specific instructions.

let phaseProgressionInstruction: String? = nil /// Instructs the Observer AI on how the phases should progress.


/// Not yet fully implemented
enum phaseProgression {
case linear, nonlinear

var prompt: String {
switch self {
case .linear: /// Each phase comes one after the other in order of placement in the phases array
return ""
case .nonlinear: /// No order to phases. Any phase can come at any time
return ""
}
}

}
}


/// Each phase defines a particular kind of behavior that the conversational AI can take on in a chat system
struct Phase {
var name: String /// Name to identify or reference the phase

var description: String /// Describes the phase to the Observer AI so it knows when and why to chose it

var prompt: String /// The system prompt to injected into the conversational ai that defines its behavior for the conversational phase
}
Loading