From e8004ca05dd7a6fd73823d3358ca62461622c4de Mon Sep 17 00:00:00 2001 From: Tyler Hardin Date: Wed, 13 Aug 2025 16:02:44 -0400 Subject: [PATCH] feat: add a multi-crate mode When no argument is passed on the command line, the server runs in a secondary mode that allows the LLM to pass the crate name it wants to ask about with each request. --- src/embeddings.rs | 3 +- src/main.rs | 108 +++++++--- src/server.rs | 519 ++++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 580 insertions(+), 50 deletions(-) diff --git a/src/embeddings.rs b/src/embeddings.rs index 4080a61..a6db98a 100644 --- a/src/embeddings.rs +++ b/src/embeddings.rs @@ -52,12 +52,11 @@ pub async fn generate_embeddings( const CONCURRENCY_LIMIT: usize = 8; // Number of concurrent requests const TOKEN_LIMIT: usize = 8000; // Keep a buffer below the 8192 limit - let results = stream::iter(documents.iter().enumerate()) + let results = stream::iter(documents.iter().enumerate().map(|(i, d)| (i, d.clone())).collect::>()) .map(|(index, doc)| { // Clone client, model, doc, and Arc for the async block let client = client.clone(); let model = model.to_string(); - let doc = doc.clone(); let bpe = Arc::clone(&bpe); // Clone the Arc pointer async move { diff --git a/src/main.rs b/src/main.rs index cfd2cf1..4fb7053 100644 --- a/src/main.rs +++ b/src/main.rs @@ -9,7 +9,6 @@ use crate::{ doc_loader::Document, embeddings::{generate_embeddings, CachedDocumentEmbedding, OPENAI_CLIENT}, error::ServerError, - server::RustDocsServer, // Import the updated RustDocsServer }; use async_openai::{Client as OpenAIClient, config::OpenAIConfig}; use bincode::config; @@ -38,10 +37,12 @@ use xdg::BaseDirectories; #[command(author, version, about, long_about = None)] struct Cli { /// The package ID specification (e.g., "serde@^1.0", "tokio"). + /// If not provided, server runs in "any crate mode" allowing queries for any crate. #[arg()] // Positional argument - package_spec: String, + package_spec: Option, /// Optional features to enable for the crate when generating documentation. + /// Only used in single-crate mode. #[arg(short = 'F', long, value_delimiter = ',', num_args = 0..)] // Allow multiple comma-separated values features: Option>, } @@ -67,9 +68,39 @@ async fn main() -> Result<(), ServerError> { // --- Parse CLI Arguments --- let cli = Cli::parse(); - let specid_str = cli.package_spec.trim().to_string(); // Trim whitespace - let features = cli.features.map(|f| { - f.into_iter().map(|s| s.trim().to_string()).collect() // Trim each feature + + // Initialize OpenAI Client early (needed for both modes) + let openai_client = if let Ok(api_base) = env::var("OPENAI_API_BASE") { + let config = OpenAIConfig::new().with_api_base(api_base); + OpenAIClient::with_config(config) + } else { + OpenAIClient::new() + }; + OPENAI_CLIENT + .set(openai_client.clone()) + .expect("Failed to set OpenAI client"); + + // Determine which mode to run in + match cli.package_spec { + Some(package_spec) => { + // Single-crate mode + run_single_crate_mode(package_spec, cli.features).await + } + None => { + // Any-crate mode + run_any_crate_mode().await + } + } +} + +/// Run the server in single-crate mode with pre-loaded documentation +async fn run_single_crate_mode( + package_spec: String, + features: Option>, +) -> Result<(), ServerError> { + let specid_str = package_spec.trim().to_string(); + let features = features.map(|f| { + f.into_iter().map(|s| s.trim().to_string()).collect() }); // Parse the specid string @@ -87,23 +118,19 @@ async fn main() -> Result<(), ServerError> { .unwrap_or_else(|| "*".to_string()); eprintln!( - "Target Spec: {}, Parsed Name: {}, Version Req: {}, Features: {:?}", + "Single-crate mode - Target Spec: {}, Parsed Name: {}, Version Req: {}, Features: {:?}", specid_str, crate_name, crate_version_req, features ); // --- Determine Paths (incorporating features) --- - - // Sanitize the version requirement string let sanitized_version_req = crate_version_req .replace(|c: char| !c.is_alphanumeric() && c != '.' && c != '-', "_"); - // Generate a stable hash for the features to use in the path let features_hash = hash_features(&features); - // Construct the relative path component including features hash let embeddings_relative_path = PathBuf::from(&crate_name) .join(&sanitized_version_req) - .join(&features_hash) // Add features hash as a directory level + .join(&features_hash) .join("embeddings.bin"); #[cfg(not(target_os = "windows"))] @@ -121,7 +148,6 @@ async fn main() -> Result<(), ServerError> { ServerError::Config("Could not determine cache directory on Windows".to_string()) })?; let app_cache_dir = cache_dir.join("rustdocs-mcp-server"); - // Ensure the base app cache directory exists fs::create_dir_all(&app_cache_dir).map_err(ServerError::Io)?; app_cache_dir.join(embeddings_relative_path) }; @@ -181,16 +207,7 @@ async fn main() -> Result<(), ServerError> { let mut generation_cost: Option = None; let mut documents_for_server: Vec = loaded_documents_from_cache.unwrap_or_default(); - // --- Initialize OpenAI Client (needed for question embedding even if cache hit) --- - let openai_client = if let Ok(api_base) = env::var("OPENAI_API_BASE") { - let config = OpenAIConfig::new().with_api_base(api_base); - OpenAIClient::with_config(config) - } else { - OpenAIClient::new() - }; - OPENAI_CLIENT - .set(openai_client.clone()) // Clone the client for the OnceCell - .expect("Failed to set OpenAI client"); + let openai_client = OPENAI_CLIENT.get().unwrap(); let final_embeddings = match loaded_embeddings { Some(embeddings) => { @@ -207,9 +224,8 @@ async fn main() -> Result<(), ServerError> { "Loading documents for crate: {} (Version Req: {}, Features: {:?})", crate_name, crate_version_req, features ); - // Pass features to load_documents let loaded_documents = - doc_loader::load_documents(&crate_name, &crate_version_req, features.as_ref())?; // Pass features here + doc_loader::load_documents(&crate_name, &crate_version_req, features.as_ref())?; eprintln!("Loaded {} documents.", loaded_documents.len()); documents_for_server = loaded_documents.clone(); @@ -312,29 +328,57 @@ async fn main() -> Result<(), ServerError> { ) }; - // Create the service instance using the updated ::new() - let service = RustDocsServer::new( - crate_name.clone(), // Pass crate_name directly + // Create the service instance for single-crate mode + let service = server::RustDocsSingleCrateServer::new( + crate_name.clone(), documents_for_server, final_embeddings, startup_message, )?; // --- Use standard stdio transport and ServiceExt --- - eprintln!("Rust Docs MCP server starting via stdio..."); + eprintln!("Rust Docs MCP server starting via stdio (single-crate mode)..."); - // Serve the server using the ServiceExt trait and standard stdio transport let server_handle = service.serve(stdio()).await.map_err(|e| { eprintln!("Failed to start server: {:?}", e); - ServerError::McpRuntime(e.to_string()) // Use the new McpRuntime variant + ServerError::McpRuntime(e.to_string()) })?; eprintln!("{} Docs MCP server running...", &crate_name); - // Wait for the server to complete (e.g., stdin closed) server_handle.waiting().await.map_err(|e| { eprintln!("Server encountered an error while running: {:?}", e); - ServerError::McpRuntime(e.to_string()) // Use the new McpRuntime variant + ServerError::McpRuntime(e.to_string()) + })?; + + eprintln!("Rust Docs MCP server stopped."); + Ok(()) +} + +/// Run the server in any-crate mode where documentation is loaded on-demand +async fn run_any_crate_mode() -> Result<(), ServerError> { + eprintln!("Any-crate mode - Server will load documentation on demand for any requested crate"); + + // Verify OpenAI API key is available + let _openai_api_key = env::var("OPENAI_API_KEY") + .map_err(|_| ServerError::MissingEnvVar("OPENAI_API_KEY".to_string()))?; + + // Create the service instance for any-crate mode + let service = server::RustDocsAnyCrateServer::new()?; + + // --- Use standard stdio transport and ServiceExt --- + eprintln!("Rust Docs MCP server starting via stdio (any-crate mode)..."); + + let server_handle = service.serve(stdio()).await.map_err(|e| { + eprintln!("Failed to start server: {:?}", e); + ServerError::McpRuntime(e.to_string()) + })?; + + eprintln!("Rust Docs MCP server running (any-crate mode)..."); + + server_handle.waiting().await.map_err(|e| { + eprintln!("Server encountered an error while running: {:?}", e); + ServerError::McpRuntime(e.to_string()) })?; eprintln!("Rust Docs MCP server stopped."); diff --git a/src/server.rs b/src/server.rs index 9e886ca..91631d7 100644 --- a/src/server.rs +++ b/src/server.rs @@ -1,8 +1,19 @@ use crate::{ - doc_loader::Document, - embeddings::{OPENAI_CLIENT, cosine_similarity}, + doc_loader::{self, Document}, + embeddings::{self, OPENAI_CLIENT, cosine_similarity, CachedDocumentEmbedding}, error::ServerError, // Keep ServerError for ::new() }; +use bincode::config; +use cargo::core::PackageIdSpec; +use std::{ + collections::{HashMap, hash_map::DefaultHasher}, + fs::{self, File}, + hash::{Hash, Hasher}, + io::BufReader, + path::PathBuf, +}; +#[cfg(not(target_os = "windows"))] +use xdg::BaseDirectories; use async_openai::{ types::{ ChatCompletionRequestSystemMessageArgs, ChatCompletionRequestUserMessageArgs, @@ -51,30 +62,39 @@ use serde_json::json; use std::{/* borrow::Cow, */ env, sync::Arc}; // Removed borrow::Cow use tokio::sync::Mutex; -// --- Argument Struct for the Tool --- +// --- Argument Structs for Tools --- +// For single-crate mode (no package_spec needed) #[derive(Debug, Deserialize, JsonSchema)] struct QueryRustDocsArgs { #[schemars(description = "The specific question about the crate's API or usage.")] question: String, - // Removed crate_name field as it's implicit to the server instance } -// --- Main Server Struct --- +// For any-crate mode (needs package_spec) +#[derive(Debug, Deserialize, JsonSchema)] +struct QueryAnyCrateDocsArgs { + #[schemars(description = "The package ID specification (e.g., 'serde@^1.0', 'tokio').")] + package_spec: String, + #[schemars(description = "The specific question about the crate's API or usage.")] + question: String, + #[schemars(description = "Optional features to enable for the crate when generating documentation.")] + features: Option>, +} + +// --- Main Server Struct for Single Crate Mode --- -// No longer needs ServerState, holds data directly #[derive(Clone)] // Add Clone for tool macro requirements -pub struct RustDocsServer { +pub struct RustDocsSingleCrateServer { crate_name: Arc, // Use Arc for cheap cloning documents: Arc>, embeddings: Arc)>>, peer: Arc>>>, // Uses tokio::sync::Mutex startup_message: Arc>>, // Keep the message itself startup_message_sent: Arc>, // Flag to track if sent (using tokio::sync::Mutex) - // tool_name and info are handled by ServerHandler/macros now } -impl RustDocsServer { +impl RustDocsSingleCrateServer { // Updated constructor pub fn new( crate_name: String, @@ -125,11 +145,10 @@ impl RustDocsServer { } } -// --- Tool Implementation --- +// --- Tool Implementation for Single Crate Mode --- -#[tool(tool_box)] // Add tool_box here as well, mirroring the example -// Tool methods go in a regular impl block -impl RustDocsServer { +#[tool(tool_box)] +impl RustDocsSingleCrateServer { // Define the tool using the tool macro // Name removed; will be handled dynamically by overriding list_tools/get_tool #[tool( @@ -227,6 +246,7 @@ impl RustDocsServer { let llm_model: String = env::var("LLM_MODEL") .unwrap_or_else(|_| "gpt-4o-mini-2024-07-18".to_string()); + let chat_request = CreateChatCompletionRequestArgs::default() .model(llm_model) .messages(vec![ @@ -283,10 +303,10 @@ impl RustDocsServer { } } -// --- ServerHandler Implementation --- +// --- ServerHandler Implementation for Single Crate Mode --- -#[tool(tool_box)] // Use imported tool macro directly -impl ServerHandler for RustDocsServer { +#[tool(tool_box)] +impl ServerHandler for RustDocsSingleCrateServer { fn get_info(&self) -> ServerInfo { // Define capabilities using the builder let capabilities = ServerCapabilities::builder() @@ -384,3 +404,470 @@ impl ServerHandler for RustDocsServer { }) } } + +// --- Any-Crate Mode Server --- + +#[derive(Clone)] +pub struct RustDocsAnyCrateServer { + // Cache for loaded crate documentation + cache: Arc>>, + peer: Arc>>>, +} + +struct CrateCache { + crate_name: String, + documents: Vec, + embeddings: Vec<(String, Array1)>, +} + +impl RustDocsAnyCrateServer { + pub fn new() -> Result { + Ok(Self { + cache: Arc::new(Mutex::new(HashMap::new())), + peer: Arc::new(Mutex::new(None)), + }) + } + + // Helper function to send log messages + pub fn send_log(&self, level: LoggingLevel, message: String) { + let peer_arc = Arc::clone(&self.peer); + tokio::spawn(async move { + let mut peer_guard = peer_arc.lock().await; + if let Some(peer) = peer_guard.as_mut() { + let params = LoggingMessageNotificationParam { + level, + logger: None, + data: serde_json::Value::String(message), + }; + let log_notification: LoggingMessageNotification = Notification { + method: LoggingMessageNotificationMethod, + params, + }; + let server_notification = + ServerNotification::LoggingMessageNotification(log_notification); + if let Err(e) = peer.send_notification(server_notification).await { + eprintln!("Failed to send MCP log notification: {}", e); + } + } else { + eprintln!("Log task ran but MCP peer was not connected."); + } + }); + } + + // Helper to hash features + fn hash_features(features: &Option>) -> String { + features + .as_ref() + .map(|f| { + let mut sorted_features = f.clone(); + sorted_features.sort_unstable(); + let mut hasher = DefaultHasher::new(); + sorted_features.hash(&mut hasher); + format!("{:x}", hasher.finish()) + }) + .unwrap_or_else(|| "no_features".to_string()) + } + + // Helper to load or get cached crate documentation + async fn get_or_load_crate( + &self, + package_spec: &str, + features: Option>, + ) -> Result<(String, Vec, Vec<(String, Array1)>), McpError> { + // Parse the package spec + let spec = PackageIdSpec::parse(package_spec).map_err(|e| { + McpError::invalid_params( + format!("Failed to parse package ID spec '{}': {}", package_spec, e), + None, + ) + })?; + + let crate_name = spec.name().to_string(); + let crate_version_req = spec + .version() + .map(|v| v.to_string()) + .unwrap_or_else(|| "*".to_string()); + + // Create cache key + let features_hash = Self::hash_features(&features); + let cache_key = format!("{}_{}_{}", crate_name, crate_version_req, features_hash); + + // Check if already in cache + { + let cache_guard = self.cache.lock().await; + if let Some(cached) = cache_guard.get(&cache_key) { + self.send_log( + LoggingLevel::Info, + format!("Using cached documentation for crate '{}'", crate_name), + ); + return Ok(( + cached.crate_name.clone(), + cached.documents.clone(), + cached.embeddings.clone(), + )); + } + } + + self.send_log( + LoggingLevel::Info, + format!("Loading documentation for crate '{}' (version: {}, features: {:?})", + crate_name, crate_version_req, features), + ); + + // Determine cache file path + let sanitized_version_req = crate_version_req + .replace(|c: char| !c.is_alphanumeric() && c != '.' && c != '-', "_"); + + let embeddings_relative_path = PathBuf::from(&crate_name) + .join(&sanitized_version_req) + .join(&features_hash) + .join("embeddings.bin"); + + #[cfg(not(target_os = "windows"))] + let embeddings_file_path = { + let xdg_dirs = BaseDirectories::with_prefix("rustdocs-mcp-server") + .map_err(|e| McpError::internal_error(format!("Failed to get XDG directories: {}", e), None))?; + xdg_dirs + .place_data_file(embeddings_relative_path) + .map_err(|e| McpError::internal_error(format!("IO error: {}", e), None))? + }; + + #[cfg(target_os = "windows")] + let embeddings_file_path = { + let cache_dir = dirs::cache_dir().ok_or_else(|| { + McpError::internal_error("Could not determine cache directory on Windows", None) + })?; + let app_cache_dir = cache_dir.join("rustdocs-mcp-server"); + fs::create_dir_all(&app_cache_dir) + .map_err(|e| McpError::internal_error(format!("IO error: {}", e), None))?; + app_cache_dir.join(embeddings_relative_path) + }; + + // Try to load from disk cache + let mut loaded_embeddings: Option)>> = None; + let mut loaded_documents: Option> = None; + + if embeddings_file_path.exists() { + match File::open(&embeddings_file_path) { + Ok(file) => { + let reader = BufReader::new(file); + match bincode::decode_from_reader::, _, _>( + reader, + config::standard(), + ) { + Ok(cached_data) => { + let count = cached_data.len(); + let mut embeddings = Vec::with_capacity(count); + let mut documents = Vec::with_capacity(count); + for item in cached_data { + embeddings.push((item.path.clone(), Array1::from(item.vector))); + documents.push(Document { + path: item.path, + content: item.content, + }); + } + loaded_embeddings = Some(embeddings); + loaded_documents = Some(documents); + self.send_log( + LoggingLevel::Info, + format!("Loaded {} cached embeddings for crate '{}'", count, crate_name), + ); + } + Err(e) => { + eprintln!("Failed to decode cache file: {}. Will regenerate.", e); + } + } + } + Err(e) => { + eprintln!("Failed to open cache file: {}. Will regenerate.", e); + } + } + } + + // Generate embeddings if not cached + let (final_documents, final_embeddings) = if let (Some(docs), Some(embeds)) = (loaded_documents, loaded_embeddings) { + (docs, embeds) + } else { + // Load documents + let documents = doc_loader::load_documents(&crate_name, &crate_version_req, features.as_ref()) + .map_err(|e| McpError::internal_error(format!("Failed to load documents: {}", e), None))?; + + // Generate embeddings + let openai_client = OPENAI_CLIENT + .get() + .ok_or_else(|| McpError::internal_error("OpenAI client not initialized", None))?; + + let embedding_model: String = env::var("EMBEDDING_MODEL") + .unwrap_or_else(|_| "text-embedding-3-small".to_string()); + + let (embeddings, _total_tokens) = embeddings::generate_embeddings(openai_client, &documents, &embedding_model) + .await + .map_err(|e| McpError::internal_error(format!("Failed to generate embeddings: {}", e), None))?; + + // Save to disk cache + let mut combined_cache_data: Vec = Vec::new(); + let embedding_map: HashMap> = embeddings.clone().into_iter().collect(); + + for doc in &documents { + if let Some(embedding_array) = embedding_map.get(&doc.path) { + combined_cache_data.push(CachedDocumentEmbedding { + path: doc.path.clone(), + content: doc.content.clone(), + vector: embedding_array.to_vec(), + }); + } + } + + match bincode::encode_to_vec(&combined_cache_data, config::standard()) { + Ok(encoded_bytes) => { + if let Some(parent_dir) = embeddings_file_path.parent() { + if !parent_dir.exists() { + let _ = fs::create_dir_all(parent_dir); + } + } + if let Err(e) = fs::write(&embeddings_file_path, encoded_bytes) { + eprintln!("Warning: Failed to write cache file: {}", e); + } + } + Err(e) => { + eprintln!("Warning: Failed to encode data for cache: {}", e); + } + } + + (documents, embeddings) + }; + + // Store in memory cache + { + let mut cache_guard = self.cache.lock().await; + cache_guard.insert( + cache_key, + CrateCache { + crate_name: crate_name.clone(), + documents: final_documents.clone(), + embeddings: final_embeddings.clone(), + }, + ); + } + + Ok((crate_name, final_documents, final_embeddings)) + } +} + +// --- Tool Implementation for Any-Crate Mode --- + +#[tool(tool_box)] +impl RustDocsAnyCrateServer { + #[tool( + description = "Query documentation for any Rust crate using semantic search and LLM summarization." + )] + async fn query_any_crate_docs( + &self, + #[tool(aggr)] + args: QueryAnyCrateDocsArgs, + ) -> Result { + let (crate_name, documents, embeddings) = self + .get_or_load_crate(&args.package_spec, args.features) + .await?; + + self.send_log( + LoggingLevel::Info, + format!("Received query for crate '{}': {}", crate_name, args.question), + ); + + // --- Embedding Generation for Question --- + let client = OPENAI_CLIENT + .get() + .ok_or_else(|| McpError::internal_error("OpenAI client not initialized", None))?; + + let embedding_model: String = + env::var("EMBEDDING_MODEL").unwrap_or_else(|_| "text-embedding-3-small".to_string()); + + let question_embedding_request = CreateEmbeddingRequestArgs::default() + .model(embedding_model) + .input(args.question.to_string()) + .build() + .map_err(|e| { + McpError::internal_error(format!("Failed to build embedding request: {}", e), None) + })?; + + let question_embedding_response = client + .embeddings() + .create(question_embedding_request) + .await + .map_err(|e| McpError::internal_error(format!("OpenAI API error: {}", e), None))?; + + let question_embedding = question_embedding_response.data.first().ok_or_else(|| { + McpError::internal_error("Failed to get embedding for question", None) + })?; + + let question_vector = Array1::from(question_embedding.embedding.clone()); + + // --- Find Best Matching Document --- + let mut best_match: Option<(&str, f32)> = None; + for (path, doc_embedding) in embeddings.iter() { + let score = cosine_similarity(question_vector.view(), doc_embedding.view()); + if best_match.is_none() || score > best_match.unwrap().1 { + best_match = Some((path, score)); + } + } + + // --- Generate Response using LLM --- + let response_text = match best_match { + Some((best_path, _score)) => { + eprintln!("Best match found: {}", best_path); + let context_doc = documents.iter().find(|doc| doc.path == best_path); + + if let Some(doc) = context_doc { + let system_prompt = format!( + "You are an expert technical assistant for the Rust crate '{}'. \ + Answer the user's question based *only* on the provided context. \ + If the context does not contain the answer, say so. \ + Do not make up information. Be clear, concise, and comprehensive providing example usage code when possible.", + crate_name + ); + let user_prompt = format!( + "Context:\n---\n{}\n---\n\nQuestion: {}", + doc.content, args.question + ); + + let llm_model: String = env::var("LLM_MODEL") + .unwrap_or_else(|_| "gpt-4o-mini-2024-07-18".to_string()); + + let chat_request = CreateChatCompletionRequestArgs::default() + .model(llm_model) + .messages(vec![ + ChatCompletionRequestSystemMessageArgs::default() + .content(system_prompt) + .build() + .map_err(|e| { + McpError::internal_error( + format!("Failed to build system message: {}", e), + None, + ) + })? + .into(), + ChatCompletionRequestUserMessageArgs::default() + .content(user_prompt) + .build() + .map_err(|e| { + McpError::internal_error( + format!("Failed to build user message: {}", e), + None, + ) + })? + .into(), + ]) + .build() + .map_err(|e| { + McpError::internal_error( + format!("Failed to build chat request: {}", e), + None, + ) + })?; + + let chat_response = client.chat().create(chat_request).await.map_err(|e| { + McpError::internal_error(format!("OpenAI chat API error: {}", e), None) + })?; + + chat_response + .choices + .first() + .and_then(|choice| choice.message.content.clone()) + .unwrap_or_else(|| "Error: No response from LLM.".to_string()) + } else { + "Error: Could not find content for best matching document.".to_string() + } + } + None => "Could not find any relevant document context.".to_string(), + }; + + // --- Format and Return Result --- + Ok(CallToolResult::success(vec![Content::text(format!( + "From {} docs: {}", + crate_name, response_text + ))])) + } +} + +// --- ServerHandler Implementation for Any-Crate Mode --- + +#[tool(tool_box)] +impl ServerHandler for RustDocsAnyCrateServer { + fn get_info(&self) -> ServerInfo { + let capabilities = ServerCapabilities::builder() + .enable_tools() + .enable_logging() + .build(); + + ServerInfo { + protocol_version: ProtocolVersion::V_2024_11_05, + capabilities, + server_info: Implementation { + name: "rust-docs-mcp-server".to_string(), + version: env!("CARGO_PKG_VERSION").to_string(), + }, + instructions: Some( + "This server provides tools to query documentation for any Rust crate. \ + Use the 'query_any_crate_docs' tool with a package specification and question \ + to get information about any crate's API, usage, and examples." + .to_string(), + ), + } + } + + async fn list_resources( + &self, + _request: PaginatedRequestParam, + _context: RequestContext, + ) -> Result { + Ok(ListResourcesResult { + resources: vec![], + next_cursor: None, + }) + } + + async fn read_resource( + &self, + request: ReadResourceRequestParam, + _context: RequestContext, + ) -> Result { + Err(McpError::resource_not_found( + format!("Resource URI not found: {}", request.uri), + Some(json!({ "uri": request.uri })), + )) + } + + async fn list_prompts( + &self, + _request: PaginatedRequestParam, + _context: RequestContext, + ) -> Result { + Ok(ListPromptsResult { + next_cursor: None, + prompts: Vec::new(), + }) + } + + async fn get_prompt( + &self, + request: GetPromptRequestParam, + _context: RequestContext, + ) -> Result { + Err(McpError::invalid_params( + format!("Prompt not found: {}", request.name), + None, + )) + } + + async fn list_resource_templates( + &self, + _request: PaginatedRequestParam, + _context: RequestContext, + ) -> Result { + Ok(ListResourceTemplatesResult { + next_cursor: None, + resource_templates: Vec::new(), + }) + } +}