diff --git a/config.example.yaml b/config.example.yaml index c27861f..360182f 100644 --- a/config.example.yaml +++ b/config.example.yaml @@ -82,7 +82,7 @@ save_session: null # Controls the persistence of the session. If t compression_threshold: 4000 # Compress the session when the token count reaches or exceeds this threshold summarization_prompt: > # The text prompt used for creating a concise summary of session message 'Summarize the discussion briefly in 200 words or less to use as a prompt for future context.' -summary_prompt: > # The text prompt used for including the summary of the entire session as context to the model +summary_context_prompt: > # The text prompt used for including the summary of the entire session as context to the model 'This is a summary of the chat history as a recap: ' # ---- RAG ---- diff --git a/src/config/mod.rs b/src/config/mod.rs index e549458..ef0f3fe 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -86,7 +86,7 @@ const SYNC_MODELS_URL: &str = const SUMMARIZATION_PROMPT: &str = "Summarize the discussion briefly in 200 words or less to use as a prompt for future context."; -const SUMMARY_PROMPT: &str = "This is a summary of the chat history as a recap: "; +const SUMMARY_CONTEXT_PROMPT: &str = "This is a summary of the chat history as a recap: "; const RAG_TEMPLATE: &str = r#"Answer the query based on the context while respecting the rules. (user query, some textual context and rules, all inside xml tags) @@ -146,7 +146,7 @@ pub struct Config { pub save_session: Option, pub compression_threshold: usize, pub summarization_prompt: Option, - pub summary_prompt: Option, + pub summary_context_prompt: Option, pub rag_embedding_model: Option, pub rag_reranker_model: Option, @@ -232,7 +232,7 @@ impl Default for Config { save_session: None, compression_threshold: 4000, summarization_prompt: None, - summary_prompt: None, + summary_context_prompt: None, rag_embedding_model: None, rag_reranker_model: None, @@ -1566,13 +1566,13 @@ impl Config { .unwrap_or_else(|| SUMMARIZATION_PROMPT.into()); let input = Input::from_str(config, &prompt, None); let summary = input.fetch_chat_text().await?; - let summary_prompt = config + let summary_context_prompt = config .read() - .summary_prompt + .summary_context_prompt .clone() - .unwrap_or_else(|| SUMMARY_PROMPT.into()); + .unwrap_or_else(|| SUMMARY_CONTEXT_PROMPT.into()); if let Some(session) = config.write().session.as_mut() { - session.compress(format!("{summary_prompt}{summary}")); + session.compress(format!("{summary_context_prompt}{summary}")); } config.write().discontinuous_last_message(); Ok(()) @@ -2782,8 +2782,8 @@ impl Config { if let Some(v) = read_env_value::(&get_env_name("summarization_prompt")) { self.summarization_prompt = v; } - if let Some(v) = read_env_value::(&get_env_name("summary_prompt")) { - self.summary_prompt = v; + if let Some(v) = read_env_value::(&get_env_name("summary_context_prompt")) { + self.summary_context_prompt = v; } if let Some(v) = read_env_value::(&get_env_name("rag_embedding_model")) {