Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/vf mounts prompts #780

Merged
merged 12 commits into from
Jan 15, 2025
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ use crate::network::agent_payments_manager::my_agent_offerings_manager::MyAgentO
use crate::utils::environment::{fetch_node_environment, NodeEnvironment};
use async_trait::async_trait;
use shinkai_embedding::embedding_generator::RemoteEmbeddingGenerator;
use shinkai_fs::shinkai_file_manager::ShinkaiFileManager;
use shinkai_fs::shinkai_fs_error::ShinkaiFsError;
use shinkai_message_primitives::schemas::inbox_name::InboxName;
use shinkai_message_primitives::schemas::job::{Job, JobLike};
use shinkai_message_primitives::schemas::llm_providers::common_agent_llm_provider::ProviderOrAgent;
Expand Down Expand Up @@ -175,8 +177,8 @@ impl GenericInferenceChain {
|| !job_filenames.is_empty()
{
let ret = JobManager::search_for_chunks_in_resources(
merged_fs_files_paths,
merged_fs_folder_paths,
merged_fs_files_paths.clone(),
merged_fs_folder_paths.clone(),
job_filenames.clone(),
full_job.job_id.clone(),
full_job.scope(),
Expand Down Expand Up @@ -344,7 +346,16 @@ impl GenericInferenceChain {
}
});

let additional_files = Self::get_additional_files(
&db,
&full_job,
job_filenames.clone(),
merged_fs_files_paths.clone(),
merged_fs_folder_paths.clone(),
)?;

let mut filled_prompt = JobPromptGenerator::generic_inference_prompt(
db.clone(),
custom_system_prompt.clone(),
custom_prompt.clone(),
user_message.clone(),
Expand All @@ -355,7 +366,9 @@ impl GenericInferenceChain {
tools.clone(),
None,
full_job.job_id.clone(),
additional_files.clone(),
node_env.clone(),
db.clone(),
);

let mut iteration_count = 0;
Expand Down Expand Up @@ -451,15 +464,18 @@ impl GenericInferenceChain {
Ok(response) => response,
Err(e) => {
match &e {
LLMProviderError::ToolRouterError(ref error_msg) if error_msg.contains("Invalid function arguments") => {
LLMProviderError::ToolRouterError(ref error_msg)
if error_msg.contains("Invalid function arguments") =>
{
// For invalid arguments, we'll retry with the LLM by including the error message
// in the next prompt to help it fix the parameters
let mut function_call_with_error = function_call.clone();
function_call_with_error.response = Some(error_msg.clone());
tool_calls_history.push(function_call_with_error);

// Update prompt with error information for retry
filled_prompt = JobPromptGenerator::generic_inference_prompt(
db.clone(),
custom_system_prompt.clone(),
custom_prompt.clone(),
user_message.clone(),
Expand All @@ -473,20 +489,24 @@ impl GenericInferenceChain {
response: error_msg.clone(),
}),
full_job.job_id.clone(),
additional_files.clone(),
node_env.clone(),
db.clone(),
);

// Set flag to retry and break out of the function calls loop
iteration_count += 1;
should_retry = true;
break;
},
LLMProviderError::ToolRouterError(ref error_msg) if error_msg.contains("MissingConfigError") => {
}
LLMProviderError::ToolRouterError(ref error_msg)
if error_msg.contains("MissingConfigError") =>
{
// For missing config, we'll pass through the error directly
// This will show up in the UI prompting the user to update their config
eprintln!("Missing config error: {:?}", error_msg);
return Err(e);
},
}
_ => {
eprintln!("Error calling function: {:?}", e);
return Err(e);
Expand Down Expand Up @@ -514,13 +534,22 @@ impl GenericInferenceChain {
last_function_response = Some(function_response);
}

let additional_files = Self::get_additional_files(
&db,
&full_job,
job_filenames.clone(),
merged_fs_files_paths.clone(),
merged_fs_folder_paths.clone(),
)?;

// If we need to retry, continue the outer loop
if should_retry {
continue;
}

// 7) Call LLM again with the response (for formatting)
filled_prompt = JobPromptGenerator::generic_inference_prompt(
db.clone(),
custom_system_prompt.clone(),
custom_prompt.clone(),
user_message.clone(),
Expand All @@ -531,7 +560,9 @@ impl GenericInferenceChain {
tools.clone(),
last_function_response,
full_job.job_id.clone(),
additional_files,
node_env.clone(),
db.clone(),
);
} else {
// No more function calls required, return the final response
Expand Down Expand Up @@ -610,4 +641,32 @@ impl GenericInferenceChain {
}
}
}

pub fn get_additional_files(
db: &SqliteManager,
full_job: &Job,
job_filenames: Vec<String>,
merged_fs_files_paths: Vec<ShinkaiPath>,
merged_fs_folder_paths: Vec<ShinkaiPath>,
) -> Result<Vec<String>, ShinkaiFsError> {
let mut additional_files: Vec<String> = vec![];
// Get agent/context files
let f = ShinkaiFileManager::get_absolute_path_for_additional_files(
merged_fs_files_paths.clone(),
merged_fs_folder_paths.clone(),
)?;
additional_files.extend(f);

// Get Job files
let folder_path: Result<ShinkaiPath, shinkai_sqlite::errors::SqliteManagerError> =
db.get_job_folder_name(&full_job.job_id.clone());

if let Ok(folder_path) = folder_path {
additional_files.extend(ShinkaiFileManager::get_absolute_paths_with_folder(
job_filenames.clone(),
folder_path.path.clone(),
));
}
Ok(additional_files)
}
}
Original file line number Diff line number Diff line change
@@ -1,26 +1,27 @@
use serde_json::json;
use std::collections::HashMap;
use shinkai_fs::shinkai_file_manager::ShinkaiFileManager;
use shinkai_sqlite::SqliteManager;
use std::{collections::HashMap, fs};

use crate::llm_provider::execution::prompts::general_prompts::JobPromptGenerator;
use crate::managers::tool_router::ToolCallFunctionResponse;

use crate::network::v2_api::api_v2_commands_app_files::get_app_folder_path;
use crate::network::Node;
use crate::tools::tool_implementation::native_tools::sql_processor::get_current_tables;
use crate::utils::environment::NodeEnvironment;
use shinkai_message_primitives::schemas::prompts::Prompt;
use shinkai_message_primitives::schemas::shinkai_fs::ShinkaiFileChunkCollection;
use shinkai_message_primitives::schemas::subprompts::SubPromptType;
use shinkai_message_primitives::shinkai_message::shinkai_message::ShinkaiMessage;
use shinkai_message_primitives::{schemas::prompts::Prompt, shinkai_utils::job_scope::MinimalJobScope};
use shinkai_tools_primitives::tools::shinkai_tool::ShinkaiTool;
use std::sync::mpsc;
use std::sync::{mpsc, Arc};
use tokio::runtime::Runtime;

impl JobPromptGenerator {
/// A basic generic prompt generator
/// summary_text is the content generated by an LLM on parsing (if exist)
#[allow(clippy::too_many_arguments)]
pub fn generic_inference_prompt(
db: Arc<SqliteManager>,
custom_system_prompt: Option<String>,
custom_user_prompt: Option<String>,
user_message: String,
Expand All @@ -31,7 +32,9 @@ impl JobPromptGenerator {
tools: Vec<ShinkaiTool>,
function_call: Option<ToolCallFunctionResponse>,
job_id: String,
node_env: NodeEnvironment,
additional_files: Vec<String>,
_node_env: NodeEnvironment,
_db: Arc<SqliteManager>,
) -> Prompt {
let mut prompt = Prompt::new();

Expand Down Expand Up @@ -69,14 +72,16 @@ impl JobPromptGenerator {
// Wait for the result
let current_tables = rx.recv().unwrap();
if let Ok(current_tables) = current_tables {
prompt.add_content(
format!(
"<current_tables>\n{}\n</current_tables>\n",
current_tables.join("; \n")
),
SubPromptType::ExtraContext,
97,
);
if !current_tables.is_empty() {
prompt.add_content(
format!(
"<current_tables>\n{}\n</current_tables>\n",
current_tables.join("; \n")
),
SubPromptType::ExtraContext,
97,
);
}
}
}
}
Expand All @@ -89,16 +94,21 @@ impl JobPromptGenerator {
priority = priority.saturating_sub(1);
}
}
let folder = get_app_folder_path(node_env, job_id.clone());
let current_files = Node::v2_api_list_app_files_internal(folder.clone(), true);
if let Ok(current_files) = current_files {
if !current_files.is_empty() {
prompt.add_content(
format!("<current_files>\n{}\n</current_files>\n", current_files.join("\n")),
SubPromptType::ExtraContext,
97,
);
}
let mut all_files = vec![];
// Add job scope files
let job_scope = ShinkaiFileManager::get_absolute_path_for_job_scope(&db, &job_id);
if let Ok(job_scope) = job_scope {
all_files.extend(job_scope);
}
// Add fs files and Agent files
all_files.extend(additional_files);

if !all_files.is_empty() {
prompt.add_content(
format!("<current_files>\n{}\n</current_files>\n", all_files.join("\n")),
SubPromptType::ExtraContext,
97,
);
}
}

Expand All @@ -108,7 +118,7 @@ impl JobPromptGenerator {
if has_ret_nodes && !user_message.is_empty() {
prompt.add_content("--- start --- \n".to_string(), SubPromptType::ExtraContext, 97);
}

prompt.add_ret_node_content(ret_nodes, SubPromptType::ExtraContext, 96);

if has_ret_nodes && !user_message.is_empty() {
Expand Down
Loading