From bc57b5385e201f58543dd83586c730652f3875a6 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 15 Apr 2025 17:57:09 +1200 Subject: [PATCH 01/15] added support for sse urls in sdk for mcp endpoints --- src/llm.rs | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/llm.rs b/src/llm.rs index 9c9b12a..afd7920 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -43,6 +43,7 @@ pub enum SupportedModels { Gemma22BInstruct(Option), Gemma27BInstruct(Option), Gemma29BInstruct(Option), + Custom(String), } impl FromStr for SupportedModels { @@ -99,7 +100,7 @@ impl FromStr for SupportedModels { SupportedModels::Gemma29BInstruct(Some("q4f16_1".to_string())), ), - _ => Err(format!("Unsupported model: {}", s)), + _ => Ok(SupportedModels::Custom(s.to_string())), } } } @@ -114,6 +115,7 @@ impl std::fmt::Display for SupportedModels { SupportedModels::Gemma22BInstruct(_) => write!(f, "gemma-2-2b-it"), SupportedModels::Gemma27BInstruct(_) => write!(f, "gemma-2-27b-it"), SupportedModels::Gemma29BInstruct(_) => write!(f, "gemma-2-9b-it"), + SupportedModels::Custom(s) => write!(f, "{}", s), } } } @@ -130,6 +132,7 @@ pub struct BlocklessLlm { #[derive(Debug, Clone, Default, PartialEq)] pub struct LlmOptions { pub system_message: String, + pub tools_sse_urls: Vec, // pub max_tokens: u32, pub temperature: Option, pub top_p: Option, @@ -144,6 +147,9 @@ impl LlmOptions { pub fn dump(&self) -> Vec { let mut json = JsonValue::new_object(); json["system_message"] = self.system_message.clone().into(); + if !self.tools_sse_urls.is_empty() { + json["tools_sse_urls"] = self.tools_sse_urls.clone().into(); + } if let Some(temperature) = self.temperature { json["temperature"] = temperature.into(); } @@ -180,8 +186,14 @@ impl TryFrom> for LlmOptions { .ok_or(LlmErrorKind::ModelOptionsNotSet)? .to_string(); + let tools_sse_urls = json["tools_sse_urls"] + .as_str() + .map(|s| s.split(',').map(|s| s.trim().to_string()).collect()) + .unwrap_or_default(); + Ok(LlmOptions { system_message, + tools_sse_urls, temperature: json["temperature"].as_f32(), top_p: json["top_p"].as_f32(), }) From 7835c9cd8c10c689f643a84585f56d830134a0f7 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 15 Apr 2025 17:57:21 +1200 Subject: [PATCH 02/15] added mcp example --- examples/llm-mcp.rs | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 examples/llm-mcp.rs diff --git a/examples/llm-mcp.rs b/examples/llm-mcp.rs new file mode 100644 index 0000000..2402e85 --- /dev/null +++ b/examples/llm-mcp.rs @@ -0,0 +1,32 @@ +use blockless_sdk::*; + +/// This example demonstrates how to use the Blockless SDK to interact with two different LLM models +/// and use MCP to call the tools. + +fn main() { + // large model + let mut llm = BlocklessLlm::new(SupportedModels::Custom( + "Llama-3.1-8B-Instruct-q4f16_1-MLC".to_string(), + )) + .unwrap(); + + // Assume we have two tools running on different ports + // 1. http://localhost:3001/sse - add + // 2. http://localhost:3002/sse - multiply + llm.set_options(LlmOptions { + tools_sse_urls: vec![ + "http://localhost:3001/sse".to_string(), + "http://localhost:3002/sse".to_string(), + ], + ..Default::default() + }) + .unwrap(); + + let response = llm + .chat_request("Add the following numbers: 1215, 2213") + .unwrap(); + println!("llm Response: {}", response); + + let response = llm.chat_request("Multiply 1215 by 2213").unwrap(); + println!("llm Response: {}", response); +} From 6ec91fff68cdc95e06089cec610366f45a722f59 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 15 Apr 2025 17:59:25 +1200 Subject: [PATCH 03/15] readme upd --- README.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e009264..90eb183 100644 --- a/README.md +++ b/README.md @@ -68,10 +68,22 @@ cargo build --release --target wasm32-wasip1 --example httpbin ~/.bls/runtime/bls-runtime target/wasm32-wasip1/release/examples/httpbin.wasm --permission http://httpbin.org/anything ``` +### [LLM-MCP](./examples/llm-mcp.rs) + +```sh +# Build example +cargo build --release --target wasm32-wasip1 --example llm-mcp + +# Run example with blockless runtime and tool servers running +# Make sure you have the tool servers running on ports 3001 and 3002 +~/.bls/runtime/bls-runtime target/wasm32-wasip1/release/examples/llm-mcp.wasm +``` + ## Examples list | Example | Description | [Browser runtime](https://github.com/blocklessnetwork/b7s-browser) support | [Native runtime](https://github.com/blessnetwork/bls-runtime) support | | ------- | ----------- | --------------- | --------------- | | [coingecko_oracle](./examples/coingecko_oracle.rs) | Coingecko Oracle to query price of bitcoin from coingecko | ✅ | ✅ | | [httpbin](./examples/httpbin.rs) | HTTP to query anything from httpbin | ✅ | ✅ | -| [llm](./examples/llm.rs) | LLM to chat with `Llama-3.1-8B-Instruct-q4f32_1-MLC` and `SmolLM2-1.7B-Instruct-q4f16_1-MLC` models | ✅ | ❌ | +| [llm](./examples/llm.rs) | LLM to chat with `Llama-3.1-8B-Instruct-q4f32_1-MLC` and `SmolLM2-1.7B-Instruct-q4f16_1-MLC` models | ✅ | ✅ | +| [llm-mcp](./examples/llm-mcp.rs) | LLM with MCP (Model Control Protocol) demonstrating tool integration using SSE endpoints | ✅ | ❌ | From 1bc8dba0b585575af5d55d934a32d87c55260ab9 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 15 Apr 2025 17:59:55 +1200 Subject: [PATCH 04/15] cargo.toml version bump 0.1.8 -> 0.1.9 --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 3050b9b..c4401dc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "blockless-sdk" -version = "0.1.8" +version = "0.1.9" authors = ["Join.G", "Zeeshan.S"] description = "blockless runtime sdk" keywords = ["blockless", "sdk"] From 3fe067816f7b58bbd8116f519efb3efbb12151fa Mon Sep 17 00:00:00 2001 From: z Date: Tue, 15 Apr 2025 19:02:25 +1200 Subject: [PATCH 05/15] fixed json serializing and deserializing --- src/llm.rs | 48 +++++++++++++++++++++++++++++++----------------- 1 file changed, 31 insertions(+), 17 deletions(-) diff --git a/src/llm.rs b/src/llm.rs index afd7920..46ffeed 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -131,31 +131,46 @@ pub struct BlocklessLlm { #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Clone, Default, PartialEq)] pub struct LlmOptions { - pub system_message: String, - pub tools_sse_urls: Vec, - // pub max_tokens: u32, + pub system_message: Option, + pub tools_sse_urls: Option>, pub temperature: Option, pub top_p: Option, - // pub frequency_penalty: f32, - // pub presence_penalty: f32, } impl LlmOptions { - pub fn new() -> Self { - Self::default() + pub fn with_system_message(mut self, system_message: String) -> Self { + self.system_message = Some(system_message); + self } - pub fn dump(&self) -> Vec { + + pub fn with_tools_sse_urls(mut self, tools_sse_urls: Vec) -> Self { + self.tools_sse_urls = Some(tools_sse_urls); + self + } + + fn dump(&self) -> Vec { let mut json = JsonValue::new_object(); - json["system_message"] = self.system_message.clone().into(); - if !self.tools_sse_urls.is_empty() { - json["tools_sse_urls"] = self.tools_sse_urls.clone().into(); + + if let Some(system_message) = &self.system_message { + json["system_message"] = system_message.clone().into(); + } + + if let Some(tools_sse_urls) = &self.tools_sse_urls { + json["tools_sse_urls"] = tools_sse_urls.clone().into(); } + if let Some(temperature) = self.temperature { json["temperature"] = temperature.into(); } if let Some(top_p) = self.top_p { json["top_p"] = top_p.into(); } + + // If json is empty, return an empty JSON object + if json.entries().count() == 0 { + return "{}".as_bytes().to_vec(); + } + json.dump().into_bytes() } } @@ -183,17 +198,16 @@ impl TryFrom> for LlmOptions { // Extract system_message let system_message = json["system_message"] .as_str() - .ok_or(LlmErrorKind::ModelOptionsNotSet)? - .to_string(); + .map(|s| s.to_string()); + // Extract tools_sse_urls let tools_sse_urls = json["tools_sse_urls"] .as_str() - .map(|s| s.split(',').map(|s| s.trim().to_string()).collect()) - .unwrap_or_default(); + .map(|s| s.split(',').map(|s| s.trim().to_string()).collect()); Ok(LlmOptions { - system_message, - tools_sse_urls, + system_message: system_message, + tools_sse_urls: tools_sse_urls, temperature: json["temperature"].as_f32(), top_p: json["top_p"].as_f32(), }) From 20b1b1192ba0c9e9a7c4ef1fa4f2331bd7a079e7 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 15 Apr 2025 19:02:38 +1200 Subject: [PATCH 06/15] simplified llm example --- examples/llm.rs | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/examples/llm.rs b/examples/llm.rs index 5a71e73..4ac824b 100644 --- a/examples/llm.rs +++ b/examples/llm.rs @@ -18,12 +18,7 @@ fn main() { First time I ask, you name will be lucy. Second time I ask, you name will be bob. "#; - llm.set_options(LlmOptions { - system_message: prompt.to_string(), - top_p: Some(0.5), - ..Default::default() - }) - .unwrap(); + llm.set_options(LlmOptions::default().with_system_message(prompt.to_string())).unwrap(); let response = llm.chat_request("What is your name?").unwrap(); println!("llm Response: {}", response); @@ -33,13 +28,7 @@ fn main() { First time I ask, you name will be daisy. Second time I ask, you name will be hector. "#; - llm_small - .set_options(LlmOptions { - system_message: prompt_smol.to_string(), - top_p: Some(0.5), - ..Default::default() - }) - .unwrap(); + llm_small.set_options(LlmOptions::default().with_system_message(prompt_smol.to_string())).unwrap(); let response = llm_small.chat_request("What is your name?").unwrap(); println!("llm_small Response: {}", response); From 2f889391b77a26f9a6a776c196d85ee7e63867dd Mon Sep 17 00:00:00 2001 From: z Date: Tue, 15 Apr 2025 19:03:07 +1200 Subject: [PATCH 07/15] simplified llm-mcp example --- examples/llm-mcp.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/examples/llm-mcp.rs b/examples/llm-mcp.rs index 2402e85..d013764 100644 --- a/examples/llm-mcp.rs +++ b/examples/llm-mcp.rs @@ -13,13 +13,10 @@ fn main() { // Assume we have two tools running on different ports // 1. http://localhost:3001/sse - add // 2. http://localhost:3002/sse - multiply - llm.set_options(LlmOptions { - tools_sse_urls: vec![ - "http://localhost:3001/sse".to_string(), - "http://localhost:3002/sse".to_string(), - ], - ..Default::default() - }) + llm.set_options(LlmOptions::default().with_tools_sse_urls(vec![ + "http://localhost:3001/sse".to_string(), + "http://localhost:3002/sse".to_string(), + ])) .unwrap(); let response = llm From 07126a4431ece3c031bb44f57dedc563e479aafb Mon Sep 17 00:00:00 2001 From: z Date: Tue, 15 Apr 2025 19:18:53 +1200 Subject: [PATCH 08/15] cargo fmt --all --- examples/llm.rs | 7 +++++-- src/llm.rs | 4 +--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/examples/llm.rs b/examples/llm.rs index 4ac824b..f44521e 100644 --- a/examples/llm.rs +++ b/examples/llm.rs @@ -18,7 +18,8 @@ fn main() { First time I ask, you name will be lucy. Second time I ask, you name will be bob. "#; - llm.set_options(LlmOptions::default().with_system_message(prompt.to_string())).unwrap(); + llm.set_options(LlmOptions::default().with_system_message(prompt.to_string())) + .unwrap(); let response = llm.chat_request("What is your name?").unwrap(); println!("llm Response: {}", response); @@ -28,7 +29,9 @@ fn main() { First time I ask, you name will be daisy. Second time I ask, you name will be hector. "#; - llm_small.set_options(LlmOptions::default().with_system_message(prompt_smol.to_string())).unwrap(); + llm_small + .set_options(LlmOptions::default().with_system_message(prompt_smol.to_string())) + .unwrap(); let response = llm_small.chat_request("What is your name?").unwrap(); println!("llm_small Response: {}", response); diff --git a/src/llm.rs b/src/llm.rs index 46ffeed..6b5aba5 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -196,9 +196,7 @@ impl TryFrom> for LlmOptions { let json = json::parse(&json_str).map_err(|_| LlmErrorKind::ModelOptionsNotSet)?; // Extract system_message - let system_message = json["system_message"] - .as_str() - .map(|s| s.to_string()); + let system_message = json["system_message"].as_str().map(|s| s.to_string()); // Extract tools_sse_urls let tools_sse_urls = json["tools_sse_urls"] From ff04b07538fa28e26bf1bd1c0e9ce4071e8525e3 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 29 Apr 2025 17:11:48 +1200 Subject: [PATCH 09/15] SupportedModels -> Models --- examples/llm-mcp.rs | 3 +-- examples/llm.rs | 4 +-- src/llm.rs | 59 ++++++++++++++++++++++----------------------- 3 files changed, 32 insertions(+), 34 deletions(-) diff --git a/examples/llm-mcp.rs b/examples/llm-mcp.rs index d013764..bf503de 100644 --- a/examples/llm-mcp.rs +++ b/examples/llm-mcp.rs @@ -5,8 +5,7 @@ use blockless_sdk::*; fn main() { // large model - let mut llm = BlocklessLlm::new(SupportedModels::Custom( - "Llama-3.1-8B-Instruct-q4f16_1-MLC".to_string(), + let mut llm = BlocklessLlm::new(Models::Custom( )) .unwrap(); diff --git a/examples/llm.rs b/examples/llm.rs index f44521e..8be94d2 100644 --- a/examples/llm.rs +++ b/examples/llm.rs @@ -8,10 +8,10 @@ use blockless_sdk::*; /// demonstrating how the same instance maintains state between requests. fn main() { // large model - let mut llm = BlocklessLlm::new(SupportedModels::Mistral7BInstructV03(None)).unwrap(); + let mut llm = BlocklessLlm::new(Models::Mistral7BInstructV03(None)).unwrap(); // small model - let mut llm_small = BlocklessLlm::new(SupportedModels::Llama321BInstruct(None)).unwrap(); + let mut llm_small = BlocklessLlm::new(Models::Llama321BInstruct(None)).unwrap(); let prompt = r#" You are a helpful assistant. diff --git a/src/llm.rs b/src/llm.rs index 6b5aba5..bf65ec5 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -35,7 +35,7 @@ extern "C" { } #[derive(Debug, Clone)] -pub enum SupportedModels { +pub enum Models { Llama321BInstruct(Option), Llama323BInstruct(Option), Mistral7BInstructV03(Option), @@ -46,76 +46,75 @@ pub enum SupportedModels { Custom(String), } -impl FromStr for SupportedModels { +impl FromStr for Models { type Err = String; fn from_str(s: &str) -> Result { match s { // Llama 3.2 1B - "Llama-3.2-1B-Instruct" => Ok(SupportedModels::Llama321BInstruct(None)), + "Llama-3.2-1B-Instruct" => Ok(Models::Llama321BInstruct(None)), "Llama-3.2-1B-Instruct-Q6_K" | "Llama-3.2-1B-Instruct_Q6_K" | "Llama-3.2-1B-Instruct.Q6_K" => { - Ok(SupportedModels::Llama321BInstruct(Some("Q6_K".to_string()))) + Ok(Models::Llama321BInstruct(Some("Q6_K".to_string()))) } "Llama-3.2-1B-Instruct-q4f16_1" | "Llama-3.2-1B-Instruct.q4f16_1" => Ok( - SupportedModels::Llama321BInstruct(Some("q4f16_1".to_string())), + Models::Llama321BInstruct(Some("q4f16_1".to_string())), ), // Llama 3.2 3B - "Llama-3.2-3B-Instruct" => Ok(SupportedModels::Llama323BInstruct(None)), + "Llama-3.2-3B-Instruct" => Ok(Models::Llama323BInstruct(None)), "Llama-3.2-3B-Instruct-Q6_K" | "Llama-3.2-3B-Instruct_Q6_K" | "Llama-3.2-3B-Instruct.Q6_K" => { - Ok(SupportedModels::Llama323BInstruct(Some("Q6_K".to_string()))) + Ok(Models::Llama323BInstruct(Some("Q6_K".to_string()))) } "Llama-3.2-3B-Instruct-q4f16_1" | "Llama-3.2-3B-Instruct.q4f16_1" => Ok( - SupportedModels::Llama323BInstruct(Some("q4f16_1".to_string())), + Models::Llama323BInstruct(Some("q4f16_1".to_string())), ), // Mistral 7B - "Mistral-7B-Instruct-v0.3" => Ok(SupportedModels::Mistral7BInstructV03(None)), + "Mistral-7B-Instruct-v0.3" => Ok(Models::Mistral7BInstructV03(None)), "Mistral-7B-Instruct-v0.3-q4f16_1" | "Mistral-7B-Instruct-v0.3.q4f16_1" => Ok( - SupportedModels::Mistral7BInstructV03(Some("q4f16_1".to_string())), + Models::Mistral7BInstructV03(Some("q4f16_1".to_string())), ), // Mixtral 8x7B - "Mixtral-8x7B-Instruct-v0.1" => Ok(SupportedModels::Mixtral8x7BInstructV01(None)), + "Mixtral-8x7B-Instruct-v0.1" => Ok(Models::Mixtral8x7BInstructV01(None)), "Mixtral-8x7B-Instruct-v0.1-q4f16_1" | "Mixtral-8x7B-Instruct-v0.1.q4f16_1" => Ok( - SupportedModels::Mixtral8x7BInstructV01(Some("q4f16_1".to_string())), + Models::Mixtral8x7BInstructV01(Some("q4f16_1".to_string())), ), // Gemma models - "gemma-2-2b-it" => Ok(SupportedModels::Gemma22BInstruct(None)), + "gemma-2-2b-it" => Ok(Models::Gemma22BInstruct(None)), "gemma-2-2b-it-q4f16_1" | "gemma-2-2b-it.q4f16_1" => Ok( - SupportedModels::Gemma22BInstruct(Some("q4f16_1".to_string())), + Models::Gemma22BInstruct(Some("q4f16_1".to_string())), ), - "gemma-2-27b-it" => Ok(SupportedModels::Gemma27BInstruct(None)), + "gemma-2-27b-it" => Ok(Models::Gemma27BInstruct(None)), "gemma-2-27b-it-q4f16_1" | "gemma-2-27b-it.q4f16_1" => Ok( - SupportedModels::Gemma27BInstruct(Some("q4f16_1".to_string())), + Models::Gemma27BInstruct(Some("q4f16_1".to_string())), ), - "gemma-2-9b-it" => Ok(SupportedModels::Gemma29BInstruct(None)), + "gemma-2-9b-it" => Ok(Models::Gemma29BInstruct(None)), "gemma-2-9b-it-q4f16_1" | "gemma-2-9b-it.q4f16_1" => Ok( - SupportedModels::Gemma29BInstruct(Some("q4f16_1".to_string())), + Models::Gemma29BInstruct(Some("q4f16_1".to_string())), ), - - _ => Ok(SupportedModels::Custom(s.to_string())), + _ => Ok(Models::Custom(s.to_string())), } } } -impl std::fmt::Display for SupportedModels { +impl std::fmt::Display for Models { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - SupportedModels::Llama321BInstruct(_) => write!(f, "Llama-3.2-1B-Instruct"), - SupportedModels::Llama323BInstruct(_) => write!(f, "Llama-3.2-3B-Instruct"), - SupportedModels::Mistral7BInstructV03(_) => write!(f, "Mistral-7B-Instruct-v0.3"), - SupportedModels::Mixtral8x7BInstructV01(_) => write!(f, "Mixtral-8x7B-Instruct-v0.1"), - SupportedModels::Gemma22BInstruct(_) => write!(f, "gemma-2-2b-it"), - SupportedModels::Gemma27BInstruct(_) => write!(f, "gemma-2-27b-it"), - SupportedModels::Gemma29BInstruct(_) => write!(f, "gemma-2-9b-it"), - SupportedModels::Custom(s) => write!(f, "{}", s), + Models::Llama321BInstruct(_) => write!(f, "Llama-3.2-1B-Instruct"), + Models::Llama323BInstruct(_) => write!(f, "Llama-3.2-3B-Instruct"), + Models::Mistral7BInstructV03(_) => write!(f, "Mistral-7B-Instruct-v0.3"), + Models::Mixtral8x7BInstructV01(_) => write!(f, "Mixtral-8x7B-Instruct-v0.1"), + Models::Gemma22BInstruct(_) => write!(f, "gemma-2-2b-it"), + Models::Gemma27BInstruct(_) => write!(f, "gemma-2-27b-it"), + Models::Gemma29BInstruct(_) => write!(f, "gemma-2-9b-it"), + Models::Custom(s) => write!(f, "{}", s), } } } @@ -213,7 +212,7 @@ impl TryFrom> for LlmOptions { } impl BlocklessLlm { - pub fn new(model: SupportedModels) -> Result { + pub fn new(model: Models) -> Result { let model_name = model.to_string(); let mut llm: BlocklessLlm = Default::default(); llm.set_model(&model_name)?; From 65e20593e0a704dd91128175645337530447b297 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 29 Apr 2025 17:12:28 +1200 Subject: [PATCH 10/15] added mcp function call error --- src/llm.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/llm.rs b/src/llm.rs index bf65ec5..5948d40 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -342,6 +342,7 @@ pub enum LlmErrorKind { ModelShutdownFailed, // 6 Utf8Error, // 7 RuntimeError, // 8 + MCPFunctionCallError, // 9 } impl From for LlmErrorKind { @@ -354,6 +355,8 @@ impl From for LlmErrorKind { 5 => LlmErrorKind::ModelOptionsNotSet, 6 => LlmErrorKind::ModelShutdownFailed, 7 => LlmErrorKind::Utf8Error, + // 8 => LlmErrorKind::RuntimeError, + 9 => LlmErrorKind::MCPFunctionCallError, _ => LlmErrorKind::RuntimeError, } } From ebe73a1dbe381d695d475dd1a5be051fad8225e2 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 29 Apr 2025 17:12:50 +1200 Subject: [PATCH 11/15] graceful handling of tools_sse_urls --- src/llm.rs | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/src/llm.rs b/src/llm.rs index 5948d40..73e4a43 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -197,10 +197,21 @@ impl TryFrom> for LlmOptions { // Extract system_message let system_message = json["system_message"].as_str().map(|s| s.to_string()); - // Extract tools_sse_urls - let tools_sse_urls = json["tools_sse_urls"] - .as_str() - .map(|s| s.split(',').map(|s| s.trim().to_string()).collect()); + // Extract tools_sse_urls - can be an array or a comma-separated string + let tools_sse_urls = if json["tools_sse_urls"].is_array() { + // Handle array format - native runtime + Some( + json["tools_sse_urls"] + .members() + .filter_map(|v| v.as_str().map(|s| s.to_string())) + .collect(), + ) + } else if let Some(s) = json["tools_sse_urls"].as_str() { + // Handle comma-separated string format - Browser runtime + Some(s.split(',').map(|s| s.trim().to_string()).collect()) + } else { + None + }; Ok(LlmOptions { system_message: system_message, From a592849070a0aa05c3aabfe162e4f94e25082b11 Mon Sep 17 00:00:00 2001 From: z Date: Tue, 29 Apr 2025 17:14:05 +1200 Subject: [PATCH 12/15] cargo fmt --all --- src/llm.rs | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/llm.rs b/src/llm.rs index 73e4a43..c6692c1 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -57,9 +57,9 @@ impl FromStr for Models { | "Llama-3.2-1B-Instruct.Q6_K" => { Ok(Models::Llama321BInstruct(Some("Q6_K".to_string()))) } - "Llama-3.2-1B-Instruct-q4f16_1" | "Llama-3.2-1B-Instruct.q4f16_1" => Ok( - Models::Llama321BInstruct(Some("q4f16_1".to_string())), - ), + "Llama-3.2-1B-Instruct-q4f16_1" | "Llama-3.2-1B-Instruct.q4f16_1" => { + Ok(Models::Llama321BInstruct(Some("q4f16_1".to_string()))) + } // Llama 3.2 3B "Llama-3.2-3B-Instruct" => Ok(Models::Llama323BInstruct(None)), @@ -68,37 +68,37 @@ impl FromStr for Models { | "Llama-3.2-3B-Instruct.Q6_K" => { Ok(Models::Llama323BInstruct(Some("Q6_K".to_string()))) } - "Llama-3.2-3B-Instruct-q4f16_1" | "Llama-3.2-3B-Instruct.q4f16_1" => Ok( - Models::Llama323BInstruct(Some("q4f16_1".to_string())), - ), + "Llama-3.2-3B-Instruct-q4f16_1" | "Llama-3.2-3B-Instruct.q4f16_1" => { + Ok(Models::Llama323BInstruct(Some("q4f16_1".to_string()))) + } // Mistral 7B "Mistral-7B-Instruct-v0.3" => Ok(Models::Mistral7BInstructV03(None)), - "Mistral-7B-Instruct-v0.3-q4f16_1" | "Mistral-7B-Instruct-v0.3.q4f16_1" => Ok( - Models::Mistral7BInstructV03(Some("q4f16_1".to_string())), - ), + "Mistral-7B-Instruct-v0.3-q4f16_1" | "Mistral-7B-Instruct-v0.3.q4f16_1" => { + Ok(Models::Mistral7BInstructV03(Some("q4f16_1".to_string()))) + } // Mixtral 8x7B "Mixtral-8x7B-Instruct-v0.1" => Ok(Models::Mixtral8x7BInstructV01(None)), - "Mixtral-8x7B-Instruct-v0.1-q4f16_1" | "Mixtral-8x7B-Instruct-v0.1.q4f16_1" => Ok( - Models::Mixtral8x7BInstructV01(Some("q4f16_1".to_string())), - ), + "Mixtral-8x7B-Instruct-v0.1-q4f16_1" | "Mixtral-8x7B-Instruct-v0.1.q4f16_1" => { + Ok(Models::Mixtral8x7BInstructV01(Some("q4f16_1".to_string()))) + } // Gemma models "gemma-2-2b-it" => Ok(Models::Gemma22BInstruct(None)), - "gemma-2-2b-it-q4f16_1" | "gemma-2-2b-it.q4f16_1" => Ok( - Models::Gemma22BInstruct(Some("q4f16_1".to_string())), - ), + "gemma-2-2b-it-q4f16_1" | "gemma-2-2b-it.q4f16_1" => { + Ok(Models::Gemma22BInstruct(Some("q4f16_1".to_string()))) + } "gemma-2-27b-it" => Ok(Models::Gemma27BInstruct(None)), - "gemma-2-27b-it-q4f16_1" | "gemma-2-27b-it.q4f16_1" => Ok( - Models::Gemma27BInstruct(Some("q4f16_1".to_string())), - ), + "gemma-2-27b-it-q4f16_1" | "gemma-2-27b-it.q4f16_1" => { + Ok(Models::Gemma27BInstruct(Some("q4f16_1".to_string()))) + } "gemma-2-9b-it" => Ok(Models::Gemma29BInstruct(None)), - "gemma-2-9b-it-q4f16_1" | "gemma-2-9b-it.q4f16_1" => Ok( - Models::Gemma29BInstruct(Some("q4f16_1".to_string())), - ), + "gemma-2-9b-it-q4f16_1" | "gemma-2-9b-it.q4f16_1" => { + Ok(Models::Gemma29BInstruct(Some("q4f16_1".to_string()))) + } _ => Ok(Models::Custom(s.to_string())), } } From 6f2d5a2923026e3a5c23dc7c7df1b4065c2f429d Mon Sep 17 00:00:00 2001 From: z Date: Tue, 29 Apr 2025 17:31:14 +1200 Subject: [PATCH 13/15] fixed cargo clippy error --- src/llm.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/llm.rs b/src/llm.rs index c6692c1..5d6910c 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -206,16 +206,13 @@ impl TryFrom> for LlmOptions { .filter_map(|v| v.as_str().map(|s| s.to_string())) .collect(), ) - } else if let Some(s) = json["tools_sse_urls"].as_str() { - // Handle comma-separated string format - Browser runtime - Some(s.split(',').map(|s| s.trim().to_string()).collect()) } else { - None + json["tools_sse_urls"].as_str().map(|s| s.split(',').map(|s| s.trim().to_string()).collect()) }; Ok(LlmOptions { - system_message: system_message, - tools_sse_urls: tools_sse_urls, + system_message, + tools_sse_urls, temperature: json["temperature"].as_f32(), top_p: json["top_p"].as_f32(), }) From 6bd191dff6dc4d988b484eb7a3e37784c312888b Mon Sep 17 00:00:00 2001 From: z Date: Tue, 29 Apr 2025 17:32:38 +1200 Subject: [PATCH 14/15] fixed cargo fmt error --- src/llm.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/llm.rs b/src/llm.rs index 5d6910c..952104f 100644 --- a/src/llm.rs +++ b/src/llm.rs @@ -207,7 +207,9 @@ impl TryFrom> for LlmOptions { .collect(), ) } else { - json["tools_sse_urls"].as_str().map(|s| s.split(',').map(|s| s.trim().to_string()).collect()) + json["tools_sse_urls"] + .as_str() + .map(|s| s.split(',').map(|s| s.trim().to_string()).collect()) }; Ok(LlmOptions { From beca10931d1e6a72b1c32f2cd04d5be3979fa31f Mon Sep 17 00:00:00 2001 From: z Date: Tue, 29 Apr 2025 17:34:39 +1200 Subject: [PATCH 15/15] fixed mcp example --- examples/llm-mcp.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/llm-mcp.rs b/examples/llm-mcp.rs index bf503de..74d56f7 100644 --- a/examples/llm-mcp.rs +++ b/examples/llm-mcp.rs @@ -2,10 +2,10 @@ use blockless_sdk::*; /// This example demonstrates how to use the Blockless SDK to interact with two different LLM models /// and use MCP to call the tools. - fn main() { // large model let mut llm = BlocklessLlm::new(Models::Custom( + "Llama-3.1-8B-Instruct-q4f16_1-MLC".to_string(), )) .unwrap();