From aadab39a60e79184cad9e005dbf28a05fb3ff468 Mon Sep 17 00:00:00 2001 From: Joshua Mo Date: Tue, 4 Feb 2025 01:01:10 +0000 Subject: [PATCH 1/3] feat: agent context --- rig-core/src/completion.rs | 46 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/rig-core/src/completion.rs b/rig-core/src/completion.rs index 48ce8f8b..304f3f89 100644 --- a/rig-core/src/completion.rs +++ b/rig-core/src/completion.rs @@ -246,6 +246,52 @@ pub trait CompletionModel: Clone + Send + Sync { } } +/// A way to get the final prompt of a completion request. +/// +/// Example usage: +/// ```rust +/// use rig::{ +/// providers::openai::{Client, self}, +/// completion::CompletionRequestBuilder, +/// }; +/// +/// let openai = Client::new("your-openai-api-key"); +/// let model = openai.completion_model(openai::GPT_4O).build(); +/// +/// // Create the completion request and execute it separately +/// let request = CompletionRequestBuilder::new(model, "Who are you?".to_string()) +/// .preamble("You are Marvin from the Hitchhiker's Guide to the Galaxy.".to_string()) +/// .temperature(0.5) +/// .build(); +/// +/// let prompt_data = PromptData::from_completion_request(&request); +/// println!("Preamble: {:?}\n\nPrompt: {:?}", prompt_data.preamble(), prompt_data.prompt()) +/// +/// // .. the rest of your code here +/// ``` +pub struct PromptData { + preamble: Option, + prompt: String, +} + +impl PromptData { + pub fn from_completion_request(req: &CompletionRequest) -> Self { + let preamble = req.preamble.to_owned(); + // Add context documents to chat history + let prompt = req.prompt_with_context(); + + Self { preamble, prompt } + } + + pub fn preamble<'a>(&'a self) -> &'a Option { + &self.preamble + } + + pub fn prompt<'a>(&'a self) -> &'a str { + &self.prompt + } +} + /// Struct representing a general completion request that can be sent to a completion model provider. pub struct CompletionRequest { /// The prompt to be sent to the completion model provider From f3038081be951b3130265dcc8c82bca73c18c8bf Mon Sep 17 00:00:00 2001 From: Joshua Mo Date: Tue, 4 Feb 2025 01:08:33 +0000 Subject: [PATCH 2/3] feat: add prompt op with tracing --- rig-core/src/pipeline/agent_ops.rs | 40 ++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/rig-core/src/pipeline/agent_ops.rs b/rig-core/src/pipeline/agent_ops.rs index 320eed74..52528c3c 100644 --- a/rig-core/src/pipeline/agent_ops.rs +++ b/rig-core/src/pipeline/agent_ops.rs @@ -102,6 +102,46 @@ where Prompt::new(model) } +pub struct TracePrompt { + prompt: P, + _in: std::marker::PhantomData, +} + +impl TracePrompt { + pub(crate) fn new(prompt: P) -> Self { + Self { + prompt, + _in: std::marker::PhantomData, + } + } +} + +/// Create a new prompt operation. +/// +/// The op will prompt the `model` with the input and return the response. +pub fn prompt_with_tracing(model: P) -> TracePrompt +where + P: completion::Prompt, + In: Into + Send + Sync, +{ + TracePrompt::new(model) +} + +impl Op for TracePrompt +where + P: completion::Prompt, + In: Into + Send + Sync, +{ + type Input = In; + type Output = Result; + + async fn call(&self, input: Self::Input) -> Self::Output { + let prompt: String = input.into(); + tracing::info!("Sending prompt: {prompt}"); + self.prompt.prompt(&prompt).await + } +} + pub struct Extract where M: CompletionModel, From 4b0a48ddc7410bb5c9537e68b657e352889a4200 Mon Sep 17 00:00:00 2001 From: Joshua Mo Date: Tue, 4 Feb 2025 01:14:21 +0000 Subject: [PATCH 3/3] chore: clippy --- rig-core/src/completion.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rig-core/src/completion.rs b/rig-core/src/completion.rs index 304f3f89..f06ecf72 100644 --- a/rig-core/src/completion.rs +++ b/rig-core/src/completion.rs @@ -283,11 +283,11 @@ impl PromptData { Self { preamble, prompt } } - pub fn preamble<'a>(&'a self) -> &'a Option { - &self.preamble + pub fn preamble(&self) -> Option { + self.preamble.to_owned() } - pub fn prompt<'a>(&'a self) -> &'a str { + pub fn prompt(&self) -> &str { &self.prompt } }