From ffe694a478fd95cb4cc9cb89dd210de15b530a08 Mon Sep 17 00:00:00 2001 From: Laurent Doguin Date: Tue, 27 Aug 2024 17:51:33 +0200 Subject: [PATCH] Add a configurable prompt for each model in the ask command, add example under examples/writer-agent --- examples/writer-agent/bot1_role.txt | 9 +++++++++ examples/writer-agent/bot2_role.txt | 11 +++++++++++ examples/writer-agent/writer_agent.nu | 21 +++++++++++++++++++++ src/cli/ask.rs | 9 ++++++++- src/client/bedrock_client.rs | 10 +++++----- src/client/gemini_client.rs | 11 +++++------ src/client/llm_client.rs | 7 ++++--- src/client/openai_client.rs | 4 ++-- 8 files changed, 65 insertions(+), 17 deletions(-) create mode 100644 examples/writer-agent/bot1_role.txt create mode 100644 examples/writer-agent/bot2_role.txt create mode 100644 examples/writer-agent/writer_agent.nu diff --git a/examples/writer-agent/bot1_role.txt b/examples/writer-agent/bot1_role.txt new file mode 100644 index 00000000..cc4fa9c5 --- /dev/null +++ b/examples/writer-agent/bot1_role.txt @@ -0,0 +1,9 @@ +Your name is Paul. You are a very creative screenwriter. Your expertise is superhero action shortfilms. +You will provide a new script for a new shortfilm. + +Your task is to collaborate with Lina to create an exciting script. Your task is to always iterate on Ms Lina's critic and complete the assignment. +Assigment: write a 100 word shortscript about a new superhero in town, called "Pie Man" and his adventures. + +You will ALWAYS converse in this structure: +Response: Here is where you response to Mr. Lisa +Story: Here is where you write your script \ No newline at end of file diff --git a/examples/writer-agent/bot2_role.txt b/examples/writer-agent/bot2_role.txt new file mode 100644 index 00000000..667919bc --- /dev/null +++ b/examples/writer-agent/bot2_role.txt @@ -0,0 +1,11 @@ +You are an script editor reviewer and your name is Lisa. Your are an expert reviewer working for a company like Marvel. + +Your task is to collaborate with Paul to create exciting scripts for shortfilms. +Your task is to always iterate on Mr Pauls text and complete the assignment. + +Assignment: Be very critical of Mr Paul and his writting to help him to write the best piece of text. The shortfilm has to be an action film and has to entertain the viewer. + +You will ALWAYS converse in this structure: + +Response: Here is where you response to Mr. Paul +Critique: Here you write your critic to Mr. Paul \ No newline at end of file diff --git a/examples/writer-agent/writer_agent.nu b/examples/writer-agent/writer_agent.nu new file mode 100644 index 00000000..8b461124 --- /dev/null +++ b/examples/writer-agent/writer_agent.nu @@ -0,0 +1,21 @@ +let bot1_role = open bot1_role.txt; +let bot2_role = open bot2_role.txt; + +def bot [] { + mut response_bot1 = ""; + mut response_bot2 = ""; + for $x in 1..6 { + print ($"****************** ITERATION ($x) ***************") + let rep = ask --prompt $bot1_role $response_bot1 + $response_bot1 = $rep + $response_bot2 = $rep + print ($"WRITER:\n ($response_bot1)") + + let rep2 = ask --prompt $bot2_role $response_bot2 + $response_bot1 = $rep2 + $response_bot2 = $rep + print ($"EDITOR:\n ($response_bot2)") + } +} + +bot diff --git a/src/cli/ask.rs b/src/cli/ask.rs index d561b81f..f0b0cd2f 100644 --- a/src/cli/ask.rs +++ b/src/cli/ask.rs @@ -44,6 +44,12 @@ impl Command for Ask { "the chat model to ask the question", None, ) + .named( + "prompt", + SyntaxShape::String, + "the prompt used by the model", + None, + ) .category(Category::Custom("couchbase".to_string())) } @@ -87,6 +93,7 @@ pub fn ask( let span = call.head; let question: String = call.req(engine_state, stack, 0)?; + let prompt_template: Option = call.get_flag(engine_state, stack, "prompt")?; let context: Vec = match call.opt(engine_state, stack, 1)? { Some(ctx) => ctx, None => { @@ -176,7 +183,7 @@ pub fn ask( let rt = Runtime::new().unwrap(); let answer = match rt.block_on(async { select! { - answer = client.ask(question.clone(), context.clone(), model) => { + answer = client.ask(question.clone(), prompt_template.clone(), context.clone(), model) => { match answer { Ok(a) => Ok(a), Err(e) => Err(e), diff --git a/src/client/bedrock_client.rs b/src/client/bedrock_client.rs index 6987f0a5..6319c664 100644 --- a/src/client/bedrock_client.rs +++ b/src/client/bedrock_client.rs @@ -108,18 +108,18 @@ impl BedrockClient { pub async fn ask( &self, question: String, + template: Option, context: Vec, model: String, ) -> Result { let config = aws_config::load_from_env().await; let client = aws_sdk_bedrockruntime::Client::new(&config); + let tpl_value = template.unwrap_or( "Please answer this question: \\\"{}\\\". Using the following context: \\\"{}\\\"".to_string()); + let mut rendered_tpl = tpl_value.replacen("{}", &*question, 1); + rendered_tpl = rendered_tpl.replacen("{}", &*context.join(" "), 1); let question_with_ctx = if !context.is_empty() { - format!( - "Please answer this question: \\\"{}\\\". Using the following context: \\\"{}\\\"", - question, - context.join(" ") - ) + rendered_tpl } else { question }; diff --git a/src/client/gemini_client.rs b/src/client/gemini_client.rs index 541cca45..13bb357b 100644 --- a/src/client/gemini_client.rs +++ b/src/client/gemini_client.rs @@ -129,6 +129,7 @@ impl GeminiClient { pub async fn ask( &self, question: String, + template: Option, context: Vec, model: String, ) -> Result { @@ -136,13 +137,11 @@ impl GeminiClient { "https://generativelanguage.googleapis.com/v1beta/models/{}:generateContent?key={}", model, self.api_key ); - + let tpl_value = template.unwrap_or( "Please answer this question: \\\"{}\\\". Using the following context: \\\"{}\\\"".to_string()); + let mut rendered_tpl = tpl_value.replacen("{}", &*question, 1); + rendered_tpl = rendered_tpl.replacen("{}", &*context.join(" "), 1); let question_with_ctx = if !context.is_empty() { - format!( - "Please answer this question: \\\"{}\\\". Using the following context: \\\"{}\\\"", - question, - context.join(" ") - ) + rendered_tpl } else { question }; diff --git a/src/client/llm_client.rs b/src/client/llm_client.rs index 448ac02c..535fb875 100644 --- a/src/client/llm_client.rs +++ b/src/client/llm_client.rs @@ -37,13 +37,14 @@ impl LLMClients { pub async fn ask( &self, question: String, + template: Option, context: Vec, model: String, ) -> Result { match self { - Self::OpenAI(c) => c.ask(question, context, model).await, - Self::Gemini(c) => c.ask(question, context, model).await, - Self::Bedrock(c) => c.ask(question, context, model).await, + Self::OpenAI(c) => c.ask(question, template, context, model).await, + Self::Gemini(c) => c.ask(question, template, context, model).await, + Self::Bedrock(c) => c.ask(question, template, context, model).await, } } diff --git a/src/client/openai_client.rs b/src/client/openai_client.rs index e457b318..d81c0260 100644 --- a/src/client/openai_client.rs +++ b/src/client/openai_client.rs @@ -130,15 +130,15 @@ impl OpenAIClient { pub async fn ask( &self, question: String, + template: Option, context: Vec, model: String, ) -> Result { let mut messages: Vec = vec![]; - // Primes the model to respond appropriately messages.push( ChatCompletionRequestSystemMessageArgs::default() - .content("You are a helpful assistant.") + .content(template.unwrap_or("You are a helpful assistant.".to_string())) .build() .unwrap() .into(),