Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion examples/simple-chat-client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,5 @@ rmcp = { git = "https://github.com/modelcontextprotocol/rust-sdk", features = [
"client",
"transport-child-process",
"transport-sse",
], no-default-features = true }
], no-default-features = true }
clap = { version = "4.0", features = ["derive"] }
10 changes: 5 additions & 5 deletions examples/simple-chat-client/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
A simple chat client implementation using the Model Context Protocol (MCP) SDK. It just a example for developers to understand how to use the MCP SDK. This example use the easiest way to start a MCP server, and call the tool directly. No need embedding or complex third library or function call(because some models can't support function call).Just add tool in system prompt, and the client will call the tool automatically.


## Config
the config file is in `src/config.toml`. you can change the config to your own.Move the config file to `/etc/simple-chat-client/config.toml` for system-wide configuration.

## Usage

After configuring the config file, you can run the example:
```bash
cargo run --bin simple_chat
```
./simple_chat --help # show help info
./simple_chat config > config.toml # output default config to file
./simple_chat chat --config my_config.toml # start chat with specified config
./simple_chat chat --config my_config.toml --model gpt-4o-mini # start chat with specified model
```

173 changes: 109 additions & 64 deletions examples/simple-chat-client/src/bin/simple_chat.rs
Original file line number Diff line number Diff line change
@@ -1,84 +1,129 @@
use std::sync::Arc;
use std::{process::exit, sync::Arc};

use anyhow::Result;
use clap::{Parser, Subcommand};
use simple_chat_client::{
chat::ChatSession,
client::OpenAIClient,
config::Config,
tool::{Tool, ToolSet, get_mcp_tools},
};

//default config path
const DEFAULT_CONFIG_PATH: &str = "/etc/simple-chat-client/config.toml";
#[derive(Parser)]
#[command(author, version, about = "Simple Chat Client")]
struct Cli {
/// Config file path
#[arg(short, long, value_name = "FILE")]
config: Option<String>,

#[command(subcommand)]
command: Commands,
}

#[derive(Subcommand)]
enum Commands {
/// Output default config template
Config,

/// Start chat
Chat {
/// Specify the model name
#[arg(short, long)]
model: Option<String>,
},
}

#[tokio::main]
async fn main() -> Result<()> {
// load config
let config = Config::load(DEFAULT_CONFIG_PATH).await?;

// create openai client
let api_key = config
.openai_key
.clone()
.unwrap_or_else(|| std::env::var("OPENAI_API_KEY").expect("need set api key"));
let url = config.chat_url.clone();
println!("url is {:?}", url);
let openai_client = Arc::new(OpenAIClient::new(api_key, url, config.proxy));

// create tool set
let mut tool_set = ToolSet::default();

// load mcp
if config.mcp.is_some() {
let mcp_clients = config.create_mcp_clients().await?;

for (name, client) in mcp_clients {
println!("loading mcp tools: {}", name);
let server = client.peer().clone();
let tools = get_mcp_tools(server).await?;

for tool in tools {
println!("adding tool: {}", tool.name());
tool_set.add_tool(tool);
}
let cli = Cli::parse();

match cli.command {
Commands::Config => {
println!("{}", include_str!("../config.toml"));
return Ok(());
}
}
Commands::Chat { model } => {
// load config
let config_path = cli.config;
let mut config = match config_path {
Some(path) => Config::load(&path).await?,
None => {
println!("No config file provided, using default config");
exit(-1);
}
};

// create chat session
let mut session = ChatSession::new(
openai_client,
tool_set,
config
.model_name
.unwrap_or_else(|| "gpt-4o-mini".to_string()),
);

// build system prompt with tool info
let mut system_prompt =
"you are a assistant, you can help user to complete various tasks. you have the following tools to use:\n".to_string();

// add tool info to system prompt
for tool in session.get_tools() {
system_prompt.push_str(&format!(
"\ntool name: {}\ndescription: {}\nparameters: {}\n",
tool.name(),
tool.description(),
serde_json::to_string_pretty(&tool.parameters()).unwrap_or_default()
));
}
// if command line specify model, override config file setting
if let Some(model_name) = model {
config.model_name = Some(model_name);
}

// create openai client
let api_key = config
.openai_key
.clone()
.unwrap_or_else(|| std::env::var("OPENAI_API_KEY").expect("need set api key"));
let url = config.chat_url.clone();
println!("use api address: {:?}", url);
let openai_client = Arc::new(OpenAIClient::new(api_key, url, config.proxy));

// add tool call format guidance
system_prompt.push_str(
"\nif you need to call tool, please use the following format:\n\
Tool: <tool name>\n\
Inputs: <inputs>\n",
);
// create tool set
let mut tool_set = ToolSet::default();

// add system prompt
session.add_system_prompt(system_prompt);
// load MCP
if config.mcp.is_some() {
let mcp_clients = config.create_mcp_clients().await?;

// start chat
session.chat().await?;
for (name, client) in mcp_clients {
println!("load MCP tool: {}", name);
let server = client.peer().clone();
let tools = get_mcp_tools(server).await?;

for tool in tools {
println!("add tool: {}", tool.name());
tool_set.add_tool(tool);
}
}
}

// create chat session
let mut session = ChatSession::new(
openai_client,
tool_set,
config
.model_name
.unwrap_or_else(|| "gpt-4o-mini".to_string()),
);

// build system prompt
let mut system_prompt =
"you are a assistant, you can help user to complete various tasks. you have the following tools to use:\n".to_string();

// add tool info to system prompt
for tool in session.get_tools() {
system_prompt.push_str(&format!(
"\ntool name: {}\ndescription: {}\nparameters: {}\n",
tool.name(),
tool.description(),
serde_json::to_string_pretty(&tool.parameters())
.expect("failed to serialize tool parameters")
));
}

// add tool call format guidance
system_prompt.push_str(
"\nif you need to call tool, please use the following format:\n\
Tool: <tool name>\n\
Inputs: <inputs>\n",
);

// add system prompt
session.add_system_prompt(system_prompt);

// start chat
session.chat().await?;
}
}

Ok(())
}
Loading