Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: View static agent context #267

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions rig-core/src/completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,52 @@ pub trait CompletionModel: Clone + Send + Sync {
}
}

/// A way to get the final prompt of a completion request.
///
/// Example usage:
/// ```rust
/// use rig::{
/// providers::openai::{Client, self},
/// completion::CompletionRequestBuilder,
/// };
///
/// let openai = Client::new("your-openai-api-key");
/// let model = openai.completion_model(openai::GPT_4O).build();
///
/// // Create the completion request and execute it separately
/// let request = CompletionRequestBuilder::new(model, "Who are you?".to_string())
/// .preamble("You are Marvin from the Hitchhiker's Guide to the Galaxy.".to_string())
/// .temperature(0.5)
/// .build();
///
/// let prompt_data = PromptData::from_completion_request(&request);
/// println!("Preamble: {:?}\n\nPrompt: {:?}", prompt_data.preamble(), prompt_data.prompt())
///
/// // .. the rest of your code here
/// ```
pub struct PromptData {
preamble: Option<String>,
prompt: String,
}

impl PromptData {
pub fn from_completion_request(req: &CompletionRequest) -> Self {
let preamble = req.preamble.to_owned();
// Add context documents to chat history
let prompt = req.prompt_with_context();

Self { preamble, prompt }
}

pub fn preamble(&self) -> Option<String> {
self.preamble.to_owned()
}

pub fn prompt(&self) -> &str {
&self.prompt
}
}

/// Struct representing a general completion request that can be sent to a completion model provider.
pub struct CompletionRequest {
/// The prompt to be sent to the completion model provider
Expand Down
40 changes: 40 additions & 0 deletions rig-core/src/pipeline/agent_ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,46 @@ where
Prompt::new(model)
}

pub struct TracePrompt<P, In> {
prompt: P,
_in: std::marker::PhantomData<In>,
}

impl<P, In> TracePrompt<P, In> {
pub(crate) fn new(prompt: P) -> Self {
Self {
prompt,
_in: std::marker::PhantomData,
}
}
}

/// Create a new prompt operation.
///
/// The op will prompt the `model` with the input and return the response.
pub fn prompt_with_tracing<P, In>(model: P) -> TracePrompt<P, In>
where
P: completion::Prompt,
In: Into<String> + Send + Sync,
{
TracePrompt::new(model)
}

impl<P, In> Op for TracePrompt<P, In>
where
P: completion::Prompt,
In: Into<String> + Send + Sync,
{
type Input = In;
type Output = Result<String, completion::PromptError>;

async fn call(&self, input: Self::Input) -> Self::Output {
let prompt: String = input.into();
tracing::info!("Sending prompt: {prompt}");
self.prompt.prompt(&prompt).await
}
}

pub struct Extract<M, Input, Output>
where
M: CompletionModel,
Expand Down