From 14036cac845ef3ad1794094d362cf0f55e25a8d2 Mon Sep 17 00:00:00 2001 From: jokemanfire Date: Thu, 10 Apr 2025 16:18:11 +0800 Subject: [PATCH] docs: add a simple chat client for example 1. optimize the readme in root 2. add example Signed-off-by: jokemanfire 1 --- README.md | 38 ++-- examples/README.md | 1 + examples/simple-chat-client/Cargo.toml | 20 +++ examples/simple-chat-client/README.md | 15 ++ .../simple-chat-client/src/bin/simple_chat.rs | 84 +++++++++ examples/simple-chat-client/src/chat.rs | 170 ++++++++++++++++++ examples/simple-chat-client/src/client.rs | 68 +++++++ examples/simple-chat-client/src/config.rs | 89 +++++++++ examples/simple-chat-client/src/config.toml | 10 ++ examples/simple-chat-client/src/error.rs | 24 +++ examples/simple-chat-client/src/lib.rs | 6 + examples/simple-chat-client/src/model.rs | 90 ++++++++++ examples/simple-chat-client/src/tool.rs | 130 ++++++++++++++ 13 files changed, 733 insertions(+), 12 deletions(-) create mode 100644 examples/simple-chat-client/Cargo.toml create mode 100644 examples/simple-chat-client/README.md create mode 100644 examples/simple-chat-client/src/bin/simple_chat.rs create mode 100644 examples/simple-chat-client/src/chat.rs create mode 100644 examples/simple-chat-client/src/client.rs create mode 100644 examples/simple-chat-client/src/config.rs create mode 100644 examples/simple-chat-client/src/config.toml create mode 100644 examples/simple-chat-client/src/error.rs create mode 100644 examples/simple-chat-client/src/lib.rs create mode 100644 examples/simple-chat-client/src/model.rs create mode 100644 examples/simple-chat-client/src/tool.rs diff --git a/README.md b/README.md index 0e9b717d..5e139608 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", branch = "mai Start a client in one line: -```rust +```rust, ignore use rmcp::{ServiceExt, transport::TokioChildProcess}; use tokio::process::Command; @@ -37,7 +37,8 @@ async fn main() -> Result<(), Box> { } ``` -#### 1. Build a transport +
+1. Build a transport ```rust, ignore use tokio::io::{stdin, stdout}; @@ -58,48 +59,59 @@ For server, the sink item is [`ServerJsonRpcMessage`](crate::model::ServerJsonRp 4. A tuple of [`tokio::io::AsyncRead`] `R `and [`tokio::io::AsyncWrite`] `W`: `(R, W)`. For example, you can see how we build a transport through TCP stream or http upgrade so easily. [examples](examples/README.md) +
-#### 2. Build a service +
+2. Build a service You can easily build a service by using [`ServerHandler`](crates/rmcp/src/handler/server.rs) or [`ClientHandler`](crates/rmcp/src/handler/client.rs). ```rust, ignore let service = common::counter::Counter::new(); ``` +
-#### 3. Serve them together +
+3. Serve them together ```rust, ignore // this call will finish the initialization process let server = service.serve(transport).await?; ``` +
-#### 4. Interact with the server +
+4. Interact with the server Once the server is initialized, you can send requests or notifications: ```rust, ignore -// request +// request let roots = server.list_roots().await?; // or send notification server.notify_cancelled(...).await?; ``` +
-#### 5. Waiting for service shutdown +
+5. Waiting for service shutdown ```rust, ignore let quit_reason = server.waiting().await?; // or cancel it let quit_reason = server.cancel().await?; ``` +
### Use macros to declaring tool Use `toolbox` and `tool` macros to create tool quickly. -Check this [file](examples/servers/src/common/calculator.rs). +
+Example: Calculator Tool +Check this [file](examples/servers/src/common/calculator.rs). ```rust, ignore use rmcp::{ServerHandler, model::ServerInfo, schemars, tool}; @@ -150,19 +162,19 @@ impl ServerHandler for Calculator { } } } - ``` + The only thing you should do is to make the function's return type implement `IntoCallToolResult`. And you can just implement `IntoContents`, and the return value will be marked as success automatically. If you return a type of `Result` where `T` and `E` both implemented `IntoContents`, it's also OK. +
### Manage Multi Services For many cases you need to manage several service in a collection, you can call `into_dyn` to convert services into the same type. - ```rust, ignore let service = service.into_dyn(); ``` @@ -177,7 +189,7 @@ See [examples](examples/README.md) - `server`: use server side sdk - `macros`: macros default -#### Transports +### Transports - `transport-io`: Server stdio transport - `transport-sse-server`: Server SSE transport @@ -189,6 +201,8 @@ See [examples](examples/README.md) - [MCP Specification](https://spec.modelcontextprotocol.io/specification/2024-11-05/) - [Schema](https://github.com/modelcontextprotocol/specification/blob/main/schema/2024-11-05/schema.ts) -## Development with Dev Container +## Related Projects +- [containerd-mcp-server](https://github.com/modelcontextprotocol/containerd-mcp-server) - A containerd-based MCP server implementation +## Development with Dev Container See [docs/DEVCONTAINER.md](docs/DEVCONTAINER.md) for instructions on using Dev Container for development. diff --git a/examples/README.md b/examples/README.md index 7498243f..906e9fc5 100644 --- a/examples/README.md +++ b/examples/README.md @@ -75,6 +75,7 @@ # Integration - [Rig](examples/rig-integration) A stream chatbot with rig +- [Simple Chat Client](examples/simple-chat-client) A simple chat client implementation using the Model Context Protocol (MCP) SDK. # WASI diff --git a/examples/simple-chat-client/Cargo.toml b/examples/simple-chat-client/Cargo.toml new file mode 100644 index 00000000..24bcb78b --- /dev/null +++ b/examples/simple-chat-client/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "simple-chat-client" +version = "0.1.0" +edition = "2021" + +[dependencies] +tokio = { version = "1", features = ["full"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +reqwest = { version = "0.11", features = ["json"] } +anyhow = "1.0" +thiserror = "1.0" +async-trait = "0.1" +futures = "0.3" +toml = "0.8" +rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", features = [ + "client", + "transport-child-process", + "transport-sse", +], no-default-features = true } \ No newline at end of file diff --git a/examples/simple-chat-client/README.md b/examples/simple-chat-client/README.md new file mode 100644 index 00000000..2105f9f6 --- /dev/null +++ b/examples/simple-chat-client/README.md @@ -0,0 +1,15 @@ +# Simple Chat Client + +A simple chat client implementation using the Model Context Protocol (MCP) SDK. It just a example for developers to understand how to use the MCP SDK. This example use the easiest way to start a MCP server, and call the tool directly. No need embedding or complex third library or function call(because some models can't support function call).Just add tool in system prompt, and the client will call the tool automatically. + + +## Config +the config file is in `src/config.toml`. you can change the config to your own.Move the config file to `/etc/simple-chat-client/config.toml` for system-wide configuration. + +## Usage + +After configuring the config file, you can run the example: +```bash +cargo run --bin simple-chat +``` + diff --git a/examples/simple-chat-client/src/bin/simple_chat.rs b/examples/simple-chat-client/src/bin/simple_chat.rs new file mode 100644 index 00000000..48ff1725 --- /dev/null +++ b/examples/simple-chat-client/src/bin/simple_chat.rs @@ -0,0 +1,84 @@ +use std::sync::Arc; + +use anyhow::Result; +use simple_chat_client::{ + chat::ChatSession, + client::OpenAIClient, + config::Config, + tool::{Tool, ToolSet, get_mcp_tools}, +}; + +//default config path +const DEFAULT_CONFIG_PATH: &str = "/etc/simple-chat-client/config.toml"; + +#[tokio::main] +async fn main() -> Result<()> { + // load config + let config = Config::load(DEFAULT_CONFIG_PATH).await?; + + // create openai client + let api_key = config + .openai_key + .clone() + .unwrap_or_else(|| std::env::var("OPENAI_API_KEY").expect("need set api key")); + let url = config.chat_url.clone(); + println!("url is {:?}", url); + let openai_client = Arc::new(OpenAIClient::new(api_key, url)); + + // create tool set + let mut tool_set = ToolSet::default(); + + // load mcp + if config.mcp.is_some() { + let mcp_clients = config.create_mcp_clients().await?; + + for (name, client) in mcp_clients { + println!("loading mcp tools: {}", name); + let server = client.peer().clone(); + let tools = get_mcp_tools(server).await?; + + for tool in tools { + println!("adding tool: {}", tool.name()); + tool_set.add_tool(tool); + } + } + } + + // create chat session + let mut session = ChatSession::new( + openai_client, + tool_set, + config + .model_name + .unwrap_or_else(|| "gpt-4o-mini".to_string()), + ); + + // build system prompt with tool info + let mut system_prompt = + "you are a assistant, you can help user to complete various tasks. you have the following tools to use:\n".to_string(); + + // add tool info to system prompt + for tool in session.get_tools() { + system_prompt.push_str(&format!( + "\ntool name: {}\ndescription: {}\nparameters: {}\n", + tool.name(), + tool.description(), + serde_json::to_string_pretty(&tool.parameters()).unwrap_or_default() + )); + } + + // add tool call format guidance + system_prompt.push_str( + "\nif you need to call tool, please use the following format:\n\ + Tool: \n\ + Inputs: \n", + ); + + // add system prompt + session.add_system_prompt(system_prompt); + + // start chat + session.chat().await?; + + Ok(()) +} diff --git a/examples/simple-chat-client/src/chat.rs b/examples/simple-chat-client/src/chat.rs new file mode 100644 index 00000000..34386cdc --- /dev/null +++ b/examples/simple-chat-client/src/chat.rs @@ -0,0 +1,170 @@ +use std::{ + io::{self, Write}, + sync::Arc, +}; + +use anyhow::Result; +use serde_json::Value; + +use crate::{ + client::ChatClient, + model::{CompletionRequest, Message, Tool as ModelTool}, + tool::{Tool as ToolTrait, ToolSet}, +}; + +pub struct ChatSession { + client: Arc, + tool_set: ToolSet, + model: String, + messages: Vec, +} + +impl ChatSession { + pub fn new(client: Arc, tool_set: ToolSet, model: String) -> Self { + Self { + client, + tool_set, + model, + messages: Vec::new(), + } + } + + pub fn add_system_prompt(&mut self, prompt: impl ToString) { + self.messages.push(Message::system(prompt)); + } + + pub fn get_tools(&self) -> Vec> { + self.tool_set.tools() + } + + pub async fn chat(&mut self) -> Result<()> { + println!("welcome to use simple chat client, use 'exit' to quit"); + + loop { + print!("> "); + io::stdout().flush()?; + + let mut input = String::new(); + io::stdin().read_line(&mut input)?; + input = input.trim().to_string(); + + if input.is_empty() { + continue; + } + + if input == "exit" { + break; + } + + self.messages.push(Message::user(&input)); + + // prepare tool list + let tools = self.tool_set.tools(); + let tool_definitions = if !tools.is_empty() { + Some( + tools + .iter() + .map(|tool| crate::model::Tool { + name: tool.name(), + description: tool.description(), + parameters: tool.parameters(), + }) + .collect(), + ) + } else { + None + }; + + // create request + let request = CompletionRequest { + model: self.model.clone(), + messages: self.messages.clone(), + temperature: Some(0.7), + tools: tool_definitions, + }; + + // send request + let response = self.client.complete(request).await?; + + if let Some(choice) = response.choices.first() { + println!("AI: {}", choice.message.content); + self.messages.push(choice.message.clone()); + + // check if message contains tool call + if choice.message.content.contains("Tool:") { + let lines: Vec<&str> = choice.message.content.split('\n').collect(); + + // simple parse tool call + let mut tool_name = None; + let mut args_text = Vec::new(); + let mut parsing_args = false; + + for line in lines { + if line.starts_with("Tool:") { + tool_name = line.strip_prefix("Tool:").map(|s| s.trim().to_string()); + parsing_args = false; + } else if line.starts_with("Inputs:") { + parsing_args = true; + } else if parsing_args { + args_text.push(line.trim()); + } + } + + if let Some(name) = tool_name { + if let Some(tool) = self.tool_set.get_tool(&name) { + println!("calling tool: {}", name); + + // simple handle args + let args_str = args_text.join("\n"); + let args = match serde_json::from_str(&args_str) { + Ok(v) => v, + Err(_) => { + // try to handle args as string + serde_json::Value::String(args_str) + } + }; + + // call tool + match tool.call(args).await { + Ok(result) => { + println!("tool result: {}", result); + + // add tool result to dialog + self.messages.push(Message::user(result)); + } + Err(e) => { + println!("tool call failed: {}", e); + self.messages + .push(Message::user(format!("tool call failed: {}", e))); + } + } + } else { + println!("tool not found: {}", name); + } + } + } + } + } + + Ok(()) + } +} + +#[async_trait::async_trait] +impl ToolTrait for ModelTool { + fn name(&self) -> String { + self.name.clone() + } + + fn description(&self) -> String { + self.description.clone() + } + + fn parameters(&self) -> Value { + self.parameters.clone() + } + + async fn call(&self, _args: Value) -> Result { + unimplemented!("ModelTool can't be called directly, only for tool definition") + } +} diff --git a/examples/simple-chat-client/src/client.rs b/examples/simple-chat-client/src/client.rs new file mode 100644 index 00000000..c2c29220 --- /dev/null +++ b/examples/simple-chat-client/src/client.rs @@ -0,0 +1,68 @@ +use anyhow::Result; +use async_trait::async_trait; +use reqwest::Client as HttpClient; + +use crate::model::{CompletionRequest, CompletionResponse}; + +#[async_trait] +pub trait ChatClient: Send + Sync { + async fn complete(&self, request: CompletionRequest) -> Result; +} + +pub struct OpenAIClient { + api_key: String, + client: HttpClient, + base_url: String, +} + +impl OpenAIClient { + pub fn new(api_key: String, url: Option) -> Self { + let base_url = url.unwrap_or("https://api.openai.com/v1/chat/completions".to_string()); + + // create http client without proxy + let client = HttpClient::builder() + .no_proxy() + .build() + .unwrap_or_else(|_| HttpClient::new()); + + Self { + api_key, + client, + base_url, + } + } + + pub fn with_base_url(mut self, base_url: impl Into) -> Self { + self.base_url = base_url.into(); + self + } +} + +#[async_trait] +impl ChatClient for OpenAIClient { + async fn complete(&self, request: CompletionRequest) -> Result { + println!("sending request to {}", self.base_url); + println!("using api key: {}", self.api_key); + let request_json = serde_json::to_string(&request)?; + println!("request content: {}", request_json); + // no proxy + + let response = self + .client + .post(&self.base_url) + .header("Authorization", format!("Bearer {}", self.api_key)) + .header("Content-Type", "application/json") + .json(&request) + .send() + .await?; + + if !response.status().is_success() { + let error_text = response.text().await?; + println!("API error: {}", error_text); + return Err(anyhow::anyhow!("API Error: {}", error_text)); + } + + let completion: CompletionResponse = response.json().await?; + Ok(completion) + } +} diff --git a/examples/simple-chat-client/src/config.rs b/examples/simple-chat-client/src/config.rs new file mode 100644 index 00000000..9431e7b8 --- /dev/null +++ b/examples/simple-chat-client/src/config.rs @@ -0,0 +1,89 @@ +use std::{collections::HashMap, path::Path, process::Stdio}; + +use anyhow::Result; +use rmcp::{RoleClient, ServiceExt, service::RunningService}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Config { + pub openai_key: Option, + pub chat_url: Option, + pub mcp: Option, + pub model_name: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct McpConfig { + pub server: Vec, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct McpServerConfig { + pub name: String, + #[serde(flatten)] + pub transport: McpServerTransportConfig, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(tag = "protocol", rename_all = "lowercase")] +pub enum McpServerTransportConfig { + Sse { + url: String, + }, + Stdio { + command: String, + #[serde(default)] + args: Vec, + #[serde(default)] + envs: HashMap, + }, +} + +impl McpServerTransportConfig { + pub async fn start(&self) -> Result> { + let client = match self { + McpServerTransportConfig::Sse { url } => { + let transport = rmcp::transport::sse::SseTransport::start(url).await?; + ().serve(transport).await? + } + McpServerTransportConfig::Stdio { + command, + args, + envs, + } => { + let transport = rmcp::transport::child_process::TokioChildProcess::new( + tokio::process::Command::new(command) + .args(args) + .envs(envs) + .stderr(Stdio::inherit()) + .stdout(Stdio::inherit()), + )?; + ().serve(transport).await? + } + }; + Ok(client) + } +} + +impl Config { + pub async fn load(path: impl AsRef) -> Result { + let content = tokio::fs::read_to_string(path).await?; + let config: Self = toml::from_str(&content)?; + Ok(config) + } + + pub async fn create_mcp_clients( + &self, + ) -> Result>> { + let mut clients = HashMap::new(); + + if let Some(mcp_config) = &self.mcp { + for server in &mcp_config.server { + let client = server.transport.start().await?; + clients.insert(server.name.clone(), client); + } + } + + Ok(clients) + } +} diff --git a/examples/simple-chat-client/src/config.toml b/examples/simple-chat-client/src/config.toml new file mode 100644 index 00000000..42e59f98 --- /dev/null +++ b/examples/simple-chat-client/src/config.toml @@ -0,0 +1,10 @@ +openai_key = "key" +chat_url = "url" +model_name = "model_name" + +[mcp] +[[mcp.server]] +name = "MCP server name" +protocol = "stdio" +command = "MCP server path" +args = [" "] \ No newline at end of file diff --git a/examples/simple-chat-client/src/error.rs b/examples/simple-chat-client/src/error.rs new file mode 100644 index 00000000..92c86643 --- /dev/null +++ b/examples/simple-chat-client/src/error.rs @@ -0,0 +1,24 @@ +use std::fmt; + +use serde::Serialize; + +#[derive(Debug, Serialize)] +pub struct McpError { + pub message: String, +} + +impl fmt::Display for McpError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.message) + } +} + +impl std::error::Error for McpError {} + +impl McpError { + pub fn new(message: impl ToString) -> Self { + Self { + message: message.to_string(), + } + } +} diff --git a/examples/simple-chat-client/src/lib.rs b/examples/simple-chat-client/src/lib.rs new file mode 100644 index 00000000..6b2bd9d3 --- /dev/null +++ b/examples/simple-chat-client/src/lib.rs @@ -0,0 +1,6 @@ +pub mod chat; +pub mod client; +pub mod config; +pub mod error; +pub mod model; +pub mod tool; diff --git a/examples/simple-chat-client/src/model.rs b/examples/simple-chat-client/src/model.rs new file mode 100644 index 00000000..4e9aeb67 --- /dev/null +++ b/examples/simple-chat-client/src/model.rs @@ -0,0 +1,90 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Message { + pub role: String, + pub content: String, +} + +impl Message { + pub fn system(content: impl ToString) -> Self { + Self { + role: "system".to_string(), + content: content.to_string(), + } + } + + pub fn user(content: impl ToString) -> Self { + Self { + role: "user".to_string(), + content: content.to_string(), + } + } + + pub fn assistant(content: impl ToString) -> Self { + Self { + role: "assistant".to_string(), + content: content.to_string(), + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CompletionRequest { + pub model: String, + pub messages: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub temperature: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option>, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Tool { + pub name: String, + pub description: String, + pub parameters: serde_json::Value, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct CompletionResponse { + pub id: String, + pub object: String, + pub created: u64, + pub model: String, + pub choices: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Choice { + pub index: u32, + pub message: Message, + pub finish_reason: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ToolCall { + pub name: String, + pub arguments: serde_json::Value, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ToolResult { + pub success: bool, + pub contents: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Content { + pub content_type: String, + pub body: String, +} + +impl Content { + pub fn text(content: impl ToString) -> Self { + Self { + content_type: "text/plain".to_string(), + body: content.to_string(), + } + } +} diff --git a/examples/simple-chat-client/src/tool.rs b/examples/simple-chat-client/src/tool.rs new file mode 100644 index 00000000..6f999366 --- /dev/null +++ b/examples/simple-chat-client/src/tool.rs @@ -0,0 +1,130 @@ +use std::{collections::HashMap, sync::Arc}; + +use anyhow::Result; +use async_trait::async_trait; +use rmcp::{ + model::{CallToolRequestParam, Tool as McpTool}, + service::ServerSink, +}; +use serde_json::Value; + +use crate::{ + error::McpError, + model::{Content, ToolResult}, +}; + +#[async_trait] +pub trait Tool: Send + Sync { + fn name(&self) -> String; + fn description(&self) -> String; + fn parameters(&self) -> Value; + async fn call(&self, args: Value) -> Result; +} + +pub struct McpToolAdapter { + tool: McpTool, + server: ServerSink, +} + +impl McpToolAdapter { + pub fn new(tool: McpTool, server: ServerSink) -> Self { + Self { tool, server } + } +} + +#[async_trait] +impl Tool for McpToolAdapter { + fn name(&self) -> String { + self.tool.name.clone().to_string() + } + + fn description(&self) -> String { + self.tool + .description + .clone() + .unwrap_or_default() + .to_string() + } + + fn parameters(&self) -> Value { + serde_json::to_value(&self.tool.input_schema).unwrap_or(serde_json::json!({})) + } + + async fn call(&self, args: Value) -> Result { + let arguments = match args { + Value::Object(map) => Some(map), + _ => None, + }; + + let call_result = self + .server + .call_tool(CallToolRequestParam { + name: self.tool.name.clone(), + arguments, + }) + .await?; + let result = serde_json::to_string(&call_result).unwrap(); + + Ok(result) + } +} +#[derive(Default)] +pub struct ToolSet { + tools: HashMap>, +} + +impl ToolSet { + pub fn add_tool(&mut self, tool: T) { + self.tools.insert(tool.name(), Arc::new(tool)); + } + + pub fn get_tool(&self, name: &str) -> Option> { + self.tools.get(name).cloned() + } + + pub fn tools(&self) -> Vec> { + self.tools.values().cloned().collect() + } +} + +pub async fn get_mcp_tools(server: ServerSink) -> Result> { + let tools = server.list_all_tools().await?; + Ok(tools + .into_iter() + .map(|tool| McpToolAdapter::new(tool, server.clone())) + .collect()) +} + +pub trait IntoCallToolResult { + fn into_call_tool_result(self) -> Result; +} + +impl IntoCallToolResult for Result +where + T: serde::Serialize, +{ + fn into_call_tool_result(self) -> Result { + match self { + Ok(response) => { + let content = Content { + content_type: "application/json".to_string(), + body: serde_json::to_string(&response).unwrap_or_default(), + }; + Ok(ToolResult { + success: true, + contents: vec![content], + }) + } + Err(error) => { + let content = Content { + content_type: "application/json".to_string(), + body: serde_json::to_string(&error).unwrap_or_default(), + }; + Ok(ToolResult { + success: false, + contents: vec![content], + }) + } + } + } +}