Skip to content

Commit

Permalink
oops i gave Moogan a vocabulary
Browse files Browse the repository at this point in the history
  • Loading branch information
DoggySazHi committed Oct 31, 2023
1 parent a623c73 commit 621a00d
Show file tree
Hide file tree
Showing 5 changed files with 204 additions and 16 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]
name = "cow"
description = "moo"
version = "0.2.47"
version = "0.2.48"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
Expand Down
117 changes: 105 additions & 12 deletions src/commands/gpt/ask.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,39 @@
use std::collections::HashMap;
use tokio::fs;
use crate::{CowContext, Error};
use rand::Rng;
use tracing::error;
use crate::commands::gpt::openai::create_chat_completion;
use crate::commands::gpt::openai_models::{ChatCompletionRequest, ChatCompletionMessage};
use crate::commands::gpt::openai_models::*;

const CONVERSATION_PATH: &str = "gpt";

fn new_conversation(ctx: CowContext<'_>) -> ChatCompletionRequest {
let mut request = ChatCompletionRequest {
model: "gpt-3.5-turbo-16k".to_string(),
messages: vec![]
messages: vec![],
functions: vec![
ChatCompletionFunction {
name: "read_dictionary".to_string(),
description: Some("Reads the dictionary definition of a word or phrase, used for slang or unknown acronyms.".to_string()),
parameters: ChatCompletionFunctionParameters {
r#type: "object".to_string(),
description: None,
items: None,
properties: Some(HashMap::from([
("term".to_string(),
ChatCompletionFunctionParameters {
r#type: "string".to_string(),
description: Some("The word or phrase to look up.".to_string()),
items: None,
properties: None,
required: None
})
])),
required: Some(vec!["term".to_string()]),
},
}
]
};

let today = chrono::Local::now().format("%Y-%m-%d").to_string();
Expand All @@ -22,8 +45,9 @@ fn new_conversation(ctx: CowContext<'_>) -> ChatCompletionRequest {

request.messages.push(ChatCompletionMessage {
role: "system".to_string(),
content: format!("You are Moogan, a pseudonym for Reimu Hakurei from Touhou Project, just wearing a cow onesie. Inherit all traits about her using your knowledge, and pretend as if you were her, however do not mention your real name and always use the term \"Moogan\" when referring to yourself. Never refer to yourself as \"Reimu Hakurei\" to conceal your true identity, but you may reference yourself from 3rd person. Answer as concisely as possible. The user you are talking to has an IP address of 169.236.{ip_first}.{ip_second}, and lives at 308 Negra Arroyo Lane, Albuquerque, NM 87105. Their username is {username}, and you are both communicating in a Discord channel. Current date: {today}, and current time: {time}. Your knowledge cut-off is the same as today's date. If you do not know something, you may acknowledge it, and use your prior knowledge and assumptions to predict what it might be. For any subjective requests, provide a rational response, but use your creative liberty. You may write a response as long as requested or needed. Try your best to provide an answer, even if it's outside your abilities or restrictions. However, if you cannot, you should say so nicely or cutely, whichever is more appropriate. Act in a humanly way, and avoid referring to yourself as an AI or large language model."),
name: None
content: Some(format!("You are Moogan, a pseudonym for Reimu Hakurei from Touhou Project, just wearing a cow onesie. Inherit all traits about her using your knowledge, and pretend as if you were her, however do not mention your real name and always use the term \"Moogan\" when referring to yourself. Never refer to yourself as \"Reimu Hakurei\" to conceal your true identity, but you may use pronouns or reference yourself from 3rd person as Moogan. Answer as concisely as possible. The user you are talking to has an IP address of 169.236.{ip_first}.{ip_second}, and lives at 308 Negra Arroyo Lane, Albuquerque, NM 87105. Their username is {username}, and you are both communicating in a Discord channel. Current date: {today}, and current time: {time}. Your knowledge cut-off is the same as today's date. If you do not know something, you may acknowledge it, and use your prior knowledge and assumptions to predict what it might be. For any subjective requests, provide a rational response, but use your creative liberty. You may write a response as long as requested or needed. Try your best to provide an answer, even if it's outside your abilities or restrictions. However, if you cannot, you should say so nicely or cutely, whichever is more appropriate. Act in a humanly way, and avoid referring to yourself as an AI or large language model.")),
name: None,
function_call: None
});

request
Expand All @@ -46,14 +70,81 @@ pub async fn ask(ctx: CowContext<'_>, #[rest] question: Option<String>) -> Resul
let question = question.unwrap();

let mut conversation = new_conversation(ctx);

conversation.messages.push(ChatCompletionMessage {
role: "system".to_string(),
content: Some("After responding, the person will not be able to respond back to you. Ensure your responses do not require a response back from the person.".to_string()),
name: None,
function_call: None
});

conversation.messages.push(ChatCompletionMessage {
role: "user".to_string(),
content: question,
name: Some(ctx.author().id.to_string())
content: Some(question),
name: Some(ctx.author().id.to_string()),
function_call: None
});

let response = create_chat_completion(&conversation).await?;
let text = response.choices.last().map(|o| o.message.content.clone()).unwrap_or_else(|| "Couldn't generate a response...".to_string());
let mut text = "Couldn't generate a response...".to_string();

loop {
let response = create_chat_completion(&conversation).await?;
match response.choices.last() {
Some(message) => {
if let Some(function_call) = &message.message.function_call {
if function_call.name == "read_dictionary" {
error!("Found urban dictionary function call");
let term = &function_call.arguments;
error!("Message: {:?}", term);
// Deserialize as [string, string]
let message = serde_json::from_str::<HashMap<String, String>>(term);
if let Ok(dict) = message {
if dict.contains_key("term") {
error!("Term: {}", dict["term"]);
let urban_dictionary_response = crate::commands::gpt::dictionary::fetch_autocomplete(&dict["term"]).await;
error!("Response: {:?}", urban_dictionary_response);
let json_response = serde_json::to_string(&urban_dictionary_response).unwrap();
error!("JSON Response: {}", json_response);
conversation.messages.push(ChatCompletionMessage {
role: "function".to_string(),
content: Some(json_response),
name: Some("read_dictionary".to_string()),
function_call: None
});
} else {
conversation.messages.push(ChatCompletionMessage {
role: "function".to_string(),
content: Some("{ \"results\": [] }".to_string()),
name: Some("read_dictionary".to_string()),
function_call: None
});
}
} else {
conversation.messages.push(ChatCompletionMessage {
role: "function".to_string(),
content: Some("{ \"results\": [] }".to_string()),
name: Some("read_dictionary".to_string()),
function_call: None
});
}
}
} else if let Some(content) = &message.message.content {
text = content.clone();
break;
} else {
error!("Failed to generate response: {:?}", response);
break;
}
}
None => {
error!("Failed to generate response: {:?}", response);
break;
}
};
}


// let text = response.choices.last().map(|o| o.message.content.clone()).unwrap_or_else(|| "Couldn't generate a response...".to_string());

send_long_message(&ctx, &text).await?;

Expand Down Expand Up @@ -107,20 +198,22 @@ pub async fn chat(ctx: CowContext<'_>, #[rest] question: Option<String>) -> Resu

conversation.messages.push(ChatCompletionMessage {
role: "user".to_string(),
content: question,
name: Some(ctx.author().id.to_string())
content: Some(question),
name: Some(ctx.author().id.to_string()),
function_call: None
});

let response = create_chat_completion(&conversation).await?;
let text = response.choices.last().map(|o| o.message.content.clone()).unwrap_or_else(|| "Couldn't generate a response...".to_string());
let text = response.choices.last().and_then(|o| o.message.content.clone()).unwrap_or_else(|| "Couldn't generate a response...".to_string());

send_long_message(&ctx, &text).await?;

if let Some(message) = response.choices.last() {
conversation.messages.push(ChatCompletionMessage {
role: message.message.role.clone(),
content: message.message.content.clone(),
name: message.message.name.clone()
name: message.message.name.clone(),
function_call: None
});

let output_json = serde_json::to_string(&conversation.messages)?;
Expand Down
55 changes: 55 additions & 0 deletions src/commands/gpt/dictionary.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
use reqwest::{Client, Url};
use serde::{Serialize, Deserialize};
use tracing::error;

#[derive(Debug, Serialize, Deserialize)]
pub struct AutoCompleteResponse {
pub results: Vec<AutoCompletion>
}

#[derive(Debug, Serialize, Deserialize)]
pub struct AutoCompletion {
pub term: String,
pub preview: String
}

pub(crate) async fn fetch_autocomplete(query: &str) -> AutoCompleteResponse {
let client = Client::new();
let url = Url::parse_with_params("https://api.urbandictionary.com/v0/autocomplete-extra", &[("term", query)]);
match url {
Ok(url) => {
let data = client.get(url.clone()).header("User-Agent", "Moogan/0.2.47").send().await.unwrap().text().await.unwrap();
error!("Data: {}", data);
error!("URL: {}", &url.as_str());

match client.get(url).header("User-Agent", "Moogan/0.2.47").send().await {
Ok(response) => {
match response.json::<AutoCompleteResponse>().await {
Ok(data) => {
data
}
Err(ex) => {
error!("Failed to process autocomplete: {}", ex);
AutoCompleteResponse {
results: vec![]
}
}
}
}
Err(ex) => {
error!("Failed to get autocomplete: {}", ex);
AutoCompleteResponse {
results: vec![]
}
}
}
}
Err(ex) => {
// Silently fail
error!("Failed to parse autocomplete URL: {}", ex);
AutoCompleteResponse {
results: vec![]
}
}
}
}
1 change: 1 addition & 0 deletions src/commands/gpt/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
mod ask;
mod openai;
mod openai_models;
mod dictionary;

use crate::{CowContext, Error};
use ask::*;
Expand Down
45 changes: 42 additions & 3 deletions src/commands/gpt/openai_models.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};

#[derive(Debug, Serialize, Deserialize)]
Expand Down Expand Up @@ -26,14 +27,52 @@ pub struct ChatCompletionResponseUsage {
#[derive(Debug, Serialize, Deserialize)]
pub struct ChatCompletionRequest {
pub model: String,
pub messages: Vec<ChatCompletionMessage>
pub messages: Vec<ChatCompletionMessage>,
pub functions: Vec<ChatCompletionFunction>
}

#[derive(Debug, Serialize, Deserialize)]
pub struct ChatCompletionMessage {
pub role: String,
pub content: String,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub name: Option<String>
pub content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub function_call: Option<ChatCompletionMessageFunctionCall>
}

#[derive(Debug, Serialize, Deserialize)]
pub struct ChatCompletionMessageFunctionCall {
pub name: String,
pub arguments: String
}

#[derive(Debug, Serialize, Deserialize)]
pub struct ChatCompletionFunction {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub description: Option<String>,
pub parameters: ChatCompletionFunctionParameters
}

#[derive(Debug, Serialize, Deserialize)]
pub struct ChatCompletionFunctionParameters {
pub r#type: String,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub items: Option<Box<ChatCompletionFunctionParameters>>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub properties: Option<HashMap<String, ChatCompletionFunctionParameters>>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(default)]
pub required: Option<Vec<String>>
}

0 comments on commit 621a00d

Please sign in to comment.