diff --git a/samples/README.md b/samples/README.md
index 93f3bd57..8f79ca89 100644
--- a/samples/README.md
+++ b/samples/README.md
@@ -8,7 +8,7 @@ Explore complete working examples that demonstrate how to use Foundry Local —
| Language | Samples | Description |
|----------|---------|-------------|
-| [**C#**](cs/) | 12 | .NET SDK samples including native chat, audio transcription, tool calling, model management, web server, and tutorials. Uses WinML on Windows for hardware acceleration. |
-| [**JavaScript**](js/) | 12 | Node.js SDK samples including native chat, audio transcription, Electron desktop app, Copilot SDK integration, LangChain, tool calling, web server, and tutorials. |
+| [**C#**](cs/) | 13 | .NET SDK samples including native chat, audio transcription, tool calling, model management, Learn MCP Server, web server, and tutorials. Uses WinML on Windows for hardware acceleration. |
+| [**JavaScript**](js/) | 13 | Node.js SDK samples including native chat, audio transcription, Electron desktop app, Copilot SDK integration, LangChain, Learn MCP Server, tool calling, web server, and tutorials. |
| [**Python**](python/) | 9 | Python samples using the OpenAI-compatible API, including chat, audio transcription, LangChain integration, tool calling, web server, and tutorials. |
| [**Rust**](rust/) | 8 | Rust SDK samples including native chat, audio transcription, tool calling, web server, and tutorials. |
diff --git a/samples/cs/README.md b/samples/cs/README.md
index 367c432e..9dc1e696 100644
--- a/samples/cs/README.md
+++ b/samples/cs/README.md
@@ -20,6 +20,7 @@ Both packages provide the same APIs, so the same source code works on all platfo
| [tutorial-chat-assistant](tutorial-chat-assistant/) | Build an interactive chat assistant (tutorial). |
| [tutorial-document-summarizer](tutorial-document-summarizer/) | Summarize documents with AI (tutorial). |
| [tutorial-tool-calling](tutorial-tool-calling/) | Create a tool-calling assistant (tutorial). |
+| [learn-mcp-tool-calling](learn-mcp-tool-calling/) | Create an assistant that calls [Learn MCP Server](https://learn.microsoft.com/training/support/mcp) to ground answers. |
| [tutorial-voice-to-text](tutorial-voice-to-text/) | Transcribe and summarize audio (tutorial). |
diff --git a/samples/cs/learn-mcp-tool-calling/LearnMcpToolCalling.csproj b/samples/cs/learn-mcp-tool-calling/LearnMcpToolCalling.csproj
new file mode 100644
index 00000000..a3533047
--- /dev/null
+++ b/samples/cs/learn-mcp-tool-calling/LearnMcpToolCalling.csproj
@@ -0,0 +1,50 @@
+
+
+
+ Exe
+ enable
+ enable
+
+
+
+
+ net9.0-windows10.0.26100
+ false
+ ARM64;x64
+ None
+ false
+
+
+
+
+ net9.0
+
+
+
+ $(NETCoreSdkRuntimeIdentifier)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/samples/cs/learn-mcp-tool-calling/LearnMcpToolCalling.sln b/samples/cs/learn-mcp-tool-calling/LearnMcpToolCalling.sln
new file mode 100644
index 00000000..2a4a0ade
--- /dev/null
+++ b/samples/cs/learn-mcp-tool-calling/LearnMcpToolCalling.sln
@@ -0,0 +1,19 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.0.31903.59
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LearnMcpToolCalling", "LearnMcpToolCalling.csproj", "{00000000-0000-0000-0000-000000000001}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {00000000-0000-0000-0000-000000000001}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {00000000-0000-0000-0000-000000000001}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {00000000-0000-0000-0000-000000000001}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {00000000-0000-0000-0000-000000000001}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+EndGlobal
diff --git a/samples/cs/learn-mcp-tool-calling/Program.cs b/samples/cs/learn-mcp-tool-calling/Program.cs
new file mode 100644
index 00000000..10487ea5
--- /dev/null
+++ b/samples/cs/learn-mcp-tool-calling/Program.cs
@@ -0,0 +1,317 @@
+// Foundry Local + Learn MCP Server: Local AI Doc Assistant
+// Uses Foundry Local for on-device inference and Learn MCP Server for doc retrieval.
+
+using System.Text.Json;
+using Microsoft.AI.Foundry.Local;
+using Betalgo.Ranul.OpenAI.ObjectModels.RequestModels;
+using Betalgo.Ranul.OpenAI.ObjectModels.ResponseModels;
+using Betalgo.Ranul.OpenAI.ObjectModels.SharedModels;
+using Microsoft.Extensions.Logging;
+
+CancellationToken ct = CancellationToken.None;
+
+// --- MCP endpoint ---
+const string McpEndpoint = "https://learn.microsoft.com/api/mcp";
+
+// --- Tool definitions ---
+List tools =
+[
+ new ToolDefinition
+ {
+ Type = "function",
+ Function = new FunctionDefinition()
+ {
+ Name = "search_docs",
+ Description = "Search Microsoft Learn documentation for a given query. Returns relevant documentation content with titles and URLs. Use this tool whenever the user asks about a Microsoft product, service, SDK, API, or technology.",
+ Parameters = new PropertyDefinition()
+ {
+ Type = "object",
+ Properties = new Dictionary()
+ {
+ { "query", new PropertyDefinition() { Type = "string", Description = "The search query about a Microsoft product or technology" } }
+ },
+ Required = ["query"]
+ }
+ }
+ }
+];
+
+// --- Tool implementation: call Learn MCP Server ---
+var httpClient = new HttpClient();
+
+async Task SearchDocsAsync(string query)
+{
+ Console.WriteLine($" [Searching Learn MCP Server for: \"{query}\"]");
+
+ // MCP uses JSON-RPC over streamable HTTP
+ var requestBody = JsonSerializer.Serialize(new
+ {
+ jsonrpc = "2.0",
+ id = 1,
+ method = "tools/call",
+ @params = new
+ {
+ name = "microsoft_docs_search",
+ arguments = new { query }
+ }
+ });
+
+ var request = new HttpRequestMessage(HttpMethod.Post, McpEndpoint)
+ {
+ Content = new StringContent(requestBody, System.Text.Encoding.UTF8, "application/json")
+ };
+ request.Headers.Accept.ParseAdd("application/json");
+ request.Headers.Accept.ParseAdd("text/event-stream");
+
+ var response = await httpClient.SendAsync(request, ct);
+ if (!response.IsSuccessStatusCode)
+ {
+ return JsonSerializer.Serialize(new { error = $"MCP request failed: {(int)response.StatusCode} {response.ReasonPhrase}" });
+ }
+
+ var contentType = response.Content.Headers.ContentType?.MediaType ?? "";
+ var body = await response.Content.ReadAsStringAsync(ct);
+
+ // Handle SSE/streaming response
+ if (contentType.Contains("text/event-stream"))
+ {
+ foreach (var line in body.Split('\n'))
+ {
+ if (line.StartsWith("data: "))
+ {
+ try
+ {
+ using var doc = JsonDocument.Parse(line[6..]);
+ if (doc.RootElement.TryGetProperty("result", out var result))
+ {
+ return FormatSearchResults(result);
+ }
+ }
+ catch { /* skip non-JSON lines */ }
+ }
+ }
+ return JsonSerializer.Serialize(new { error = "No result found in SSE response" });
+ }
+
+ // Handle direct JSON response
+ using var jsonDoc = JsonDocument.Parse(body);
+ if (jsonDoc.RootElement.TryGetProperty("result", out var directResult))
+ {
+ return FormatSearchResults(directResult);
+ }
+ return JsonSerializer.Serialize(new { error = "Unexpected response format" });
+}
+
+string FormatSearchResults(JsonElement result)
+{
+ // MCP tool results come as content arrays
+ var results = new List();
+
+ if (result.TryGetProperty("content", out var content)
+ && content.ValueKind == JsonValueKind.Array)
+ {
+ foreach (var item in content.EnumerateArray())
+ {
+ if (!item.TryGetProperty("type", out var typeEl)
+ || typeEl.GetString() != "text"
+ || !item.TryGetProperty("text", out var textEl))
+ {
+ continue;
+ }
+
+ var text = textEl.GetString() ?? "";
+ try
+ {
+ using var parsed = JsonDocument.Parse(text);
+ if (parsed.RootElement.TryGetProperty("results", out var searchResults)
+ && searchResults.ValueKind == JsonValueKind.Array)
+ {
+ var count = 0;
+ foreach (var r in searchResults.EnumerateArray())
+ {
+ if (count++ >= 3) break;
+ var title = r.TryGetProperty("title", out var titleEl)
+ ? titleEl.GetString() ?? "" : "";
+ var entry = $"## {title}";
+ if (r.TryGetProperty("contentUrl", out var url))
+ entry += $"\nSource: {url.GetString()}";
+ if (r.TryGetProperty("content", out var contentEl))
+ entry += $"\n{contentEl.GetString()}";
+ results.Add(entry);
+ }
+ continue;
+ }
+ }
+ catch { /* not JSON, use as-is */ }
+ results.Add(text);
+ }
+ }
+
+ if (results.Count == 0)
+ {
+ return JsonSerializer.Serialize(new { message = "No documentation found for this query." });
+ }
+
+ // Truncate to ~2000 chars to fit in model context window
+ var combined = string.Join("\n\n---\n\n", results);
+ if (combined.Length > 2000)
+ {
+ combined = combined[..2000] + "\n\n[Truncated]";
+ }
+
+ return JsonSerializer.Serialize(new
+ {
+ documentation = combined,
+ source = "Microsoft Learn (learn.microsoft.com)"
+ });
+}
+
+async Task ExecuteToolAsync(string functionName, JsonElement arguments)
+{
+ switch (functionName)
+ {
+ case "search_docs":
+ var query = arguments.GetProperty("query").GetString() ?? "";
+ return await SearchDocsAsync(query);
+
+ default:
+ return JsonSerializer.Serialize(new
+ {
+ error = $"Unknown function: {functionName}"
+ });
+ }
+}
+
+// --- Main application ---
+var config = new Configuration
+{
+ AppName = "learn_doc_assistant",
+ LogLevel = Microsoft.AI.Foundry.Local.LogLevel.Information
+};
+
+using var loggerFactory = LoggerFactory.Create(builder =>
+{
+ builder.SetMinimumLevel(
+ Microsoft.Extensions.Logging.LogLevel.Information
+ );
+});
+var logger = loggerFactory.CreateLogger();
+
+await FoundryLocalManager.CreateAsync(config, logger);
+var mgr = FoundryLocalManager.Instance;
+
+// Download and register all execution providers.
+var currentEp = "";
+await mgr.DownloadAndRegisterEpsAsync((epName, percent) =>
+{
+ if (epName != currentEp)
+ {
+ if (currentEp != "") Console.WriteLine();
+ currentEp = epName;
+ }
+ Console.Write($"\r {epName.PadRight(30)} {percent,6:F1}%");
+});
+if (currentEp != "") Console.WriteLine();
+
+var catalog = await mgr.GetCatalogAsync();
+var model = await catalog.GetModelAsync("phi-4-mini")
+ ?? throw new Exception("Model not found");
+
+await model.DownloadAsync(progress =>
+{
+ Console.Write($"\rDownloading model: {progress:F2}%");
+ if (progress >= 100f) Console.WriteLine();
+});
+
+await model.LoadAsync();
+Console.WriteLine("Model loaded and ready.");
+
+var chatClient = await model.GetChatClientAsync();
+chatClient.Settings.ToolChoice = ToolChoice.Required;
+
+var messages = new List
+{
+ new ChatMessage
+ {
+ Role = "system",
+ Content = "You are a Microsoft Learn documentation assistant. " +
+ "You MUST ALWAYS call the search_docs tool before answering ANY question. " +
+ "NEVER answer from your own knowledge. " +
+ "If the user asks about any Microsoft product, service, or technology, call search_docs first. " +
+ "Base your answer ONLY on the documentation returned by the tool. " +
+ "Include source URLs when available."
+ }
+};
+
+Console.WriteLine("\nLearn Doc Assistant ready! Ask about any Microsoft product or technology.");
+Console.WriteLine("Type 'quit' to exit.\n");
+
+while (true)
+{
+ Console.Write("You: ");
+ var userInput = Console.ReadLine();
+ if (string.IsNullOrWhiteSpace(userInput) ||
+ userInput.Equals("quit", StringComparison.OrdinalIgnoreCase) ||
+ userInput.Equals("exit", StringComparison.OrdinalIgnoreCase))
+ {
+ break;
+ }
+
+ messages.Add(new ChatMessage
+ {
+ Role = "user",
+ Content = userInput
+ });
+
+ var response = await chatClient.CompleteChatAsync(
+ messages, tools, ct
+ );
+
+ var choice = response.Choices[0].Message;
+
+ // Tool-calling loop: keep processing until the model produces a final answer
+ while (choice.ToolCalls is { Count: > 0 })
+ {
+ messages.Add(choice);
+
+ foreach (var toolCall in choice.ToolCalls)
+ {
+ var funcCall = toolCall.FunctionCall;
+ var toolArgs = JsonDocument.Parse(
+ funcCall?.Arguments ?? "{}"
+ ).RootElement;
+ var funcName = funcCall?.Name ?? "unknown";
+ Console.WriteLine(
+ $" Tool call: {funcName}({toolArgs})"
+ );
+
+ var result = await ExecuteToolAsync(funcName, toolArgs);
+ messages.Add(new ChatMessage
+ {
+ Role = "tool",
+ ToolCallId = toolCall.Id,
+ Content = result
+ });
+ }
+
+ // Let model answer naturally on follow-up (don't force tool_choice)
+ var savedToolChoice = chatClient.Settings.ToolChoice;
+ chatClient.Settings.ToolChoice = ToolChoice.Auto;
+ response = await chatClient.CompleteChatAsync(
+ messages, tools, ct
+ );
+ chatClient.Settings.ToolChoice = savedToolChoice;
+ choice = response.Choices[0].Message;
+ }
+
+ var answer = choice.Content ?? "";
+ messages.Add(new ChatMessage
+ {
+ Role = "assistant",
+ Content = answer
+ });
+ Console.WriteLine($"\nAssistant: {answer}\n");
+}
+
+await model.UnloadAsync();
+Console.WriteLine("Model unloaded. Goodbye!");
diff --git a/samples/js/README.md b/samples/js/README.md
index 28f1e7e7..16faa6af 100644
--- a/samples/js/README.md
+++ b/samples/js/README.md
@@ -17,6 +17,7 @@ These samples demonstrate how to use the Foundry Local JavaScript SDK (`foundry-
| [copilot-sdk-foundry-local](copilot-sdk-foundry-local/) | GitHub Copilot SDK integration with Foundry Local for agentic AI workflows. |
| [langchain-integration-example](langchain-integration-example/) | LangChain.js integration for building text generation chains. |
| [tool-calling-foundry-local](tool-calling-foundry-local/) | Tool calling with custom function definitions and streaming responses. |
+| [learn-mcp-tool-calling](learn-mcp-tool-calling/) | Create an assistant that calls [Learn MCP Server](https://learn.microsoft.com/training/support/mcp) to ground answers. |
| [web-server-example](web-server-example/) | Start a local OpenAI-compatible web server and call it with the OpenAI SDK. |
| [tutorial-chat-assistant](tutorial-chat-assistant/) | Build an interactive multi-turn chat assistant (tutorial). |
| [tutorial-document-summarizer](tutorial-document-summarizer/) | Summarize documents with AI (tutorial). |
diff --git a/samples/js/learn-mcp-tool-calling/app.js b/samples/js/learn-mcp-tool-calling/app.js
new file mode 100644
index 00000000..c58e31f8
--- /dev/null
+++ b/samples/js/learn-mcp-tool-calling/app.js
@@ -0,0 +1,244 @@
+// Foundry Local + Learn MCP Server: Local AI Doc Assistant
+// Uses Foundry Local for on-device inference and Learn MCP Server for doc retrieval.
+
+import { FoundryLocalManager } from 'foundry-local-sdk';
+import * as readline from 'readline';
+
+// --- MCP endpoint ---
+const MCP_ENDPOINT = 'https://learn.microsoft.com/api/mcp';
+
+// --- Tool definitions (OpenAI function-calling schema) ---
+const tools = [
+ {
+ type: 'function',
+ function: {
+ name: 'search_docs',
+ description: 'Search Microsoft Learn documentation for a given query. Returns relevant documentation content with titles and URLs. Use this tool whenever the user asks about a Microsoft product, service, SDK, API, or technology.',
+ parameters: {
+ type: 'object',
+ properties: {
+ query: {
+ type: 'string',
+ description: 'The search query about a Microsoft product or technology'
+ }
+ },
+ required: ['query']
+ }
+ }
+ }
+];
+
+// --- Tool implementation: call Learn MCP Server ---
+async function searchDocs(query) {
+ console.log(` [Searching Learn MCP Server for: "${query}"]`);
+
+ // MCP uses JSON-RPC over streamable HTTP
+ const response = await fetch(MCP_ENDPOINT, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json, text/event-stream'
+ },
+ body: JSON.stringify({
+ jsonrpc: '2.0',
+ id: 1,
+ method: 'tools/call',
+ params: {
+ name: 'microsoft_docs_search',
+ arguments: { query }
+ }
+ })
+ });
+
+ if (!response.ok) {
+ return { error: `MCP request failed: ${response.status} ${response.statusText}` };
+ }
+
+ const contentType = response.headers.get('content-type') || '';
+
+ // Handle SSE/streaming response
+ if (contentType.includes('text/event-stream')) {
+ const text = await response.text();
+ const lines = text.split('\n');
+ for (const line of lines) {
+ if (line.startsWith('data: ')) {
+ try {
+ const data = JSON.parse(line.slice(6));
+ if (data.result) {
+ return formatSearchResults(data.result);
+ }
+ } catch { /* skip non-JSON lines */ }
+ }
+ }
+ return { error: 'No result found in SSE response' };
+ }
+
+ // Handle direct JSON response
+ const data = await response.json();
+ if (data.result) {
+ return formatSearchResults(data.result);
+ }
+ return { error: 'Unexpected response format', raw: JSON.stringify(data).slice(0, 500) };
+}
+
+function formatSearchResults(result) {
+ // MCP tool results come as content arrays
+ const content = result.content || [];
+ const results = [];
+
+ for (const item of content) {
+ if (item.type === 'text') {
+ // The text may be a JSON string containing search results
+ try {
+ const parsed = JSON.parse(item.text);
+ if (parsed.results && Array.isArray(parsed.results)) {
+ for (const r of parsed.results.slice(0, 3)) {
+ let entry = `## ${r.title}`;
+ if (r.contentUrl) entry += `\nSource: ${r.contentUrl}`;
+ entry += `\n${r.content}`;
+ results.push(entry);
+ }
+ continue;
+ }
+ } catch { /* not JSON, use as-is */ }
+ results.push(item.text);
+ }
+ }
+
+ if (results.length === 0) {
+ return { message: 'No documentation found for this query.' };
+ }
+
+ // Truncate to ~2000 chars to fit in model context window
+ let combined = results.join('\n\n---\n\n');
+ if (combined.length > 2000) {
+ combined = combined.slice(0, 2000) + '\n\n[Truncated]';
+ }
+
+ return {
+ documentation: combined,
+ source: 'Microsoft Learn (learn.microsoft.com)'
+ };
+}
+
+const toolFunctions = {
+ search_docs: async (args) => searchDocs(args.query)
+};
+
+// --- Tool-calling loop ---
+async function processToolCalls(messages, response, chatClient) {
+ let choice = response.choices[0]?.message;
+
+ while (choice?.tool_calls?.length > 0) {
+ messages.push(choice);
+
+ for (const toolCall of choice.tool_calls) {
+ const functionName = toolCall.function.name;
+ const args = JSON.parse(toolCall.function.arguments);
+ console.log(` Tool call: ${functionName}(${JSON.stringify(args)})`);
+
+ const fn = toolFunctions[functionName];
+ if (!fn) {
+ messages.push({
+ role: 'tool',
+ tool_call_id: toolCall.id,
+ content: JSON.stringify({ error: `Unknown tool: ${functionName}` })
+ });
+ continue;
+ }
+
+ const result = await fn(args);
+ messages.push({
+ role: 'tool',
+ tool_call_id: toolCall.id,
+ content: JSON.stringify(result)
+ });
+ }
+
+ // Let model answer naturally on follow-up (don't force tool_choice)
+ const savedToolChoice = chatClient.settings.toolChoice;
+ chatClient.settings.toolChoice = undefined;
+ response = await chatClient.completeChat(messages, tools);
+ chatClient.settings.toolChoice = savedToolChoice;
+ choice = response.choices[0]?.message;
+ }
+
+ return choice?.content ?? '';
+}
+
+// --- Main application ---
+const manager = FoundryLocalManager.create({
+ appName: 'learn_doc_assistant',
+ logLevel: 'info'
+});
+
+let currentEp = '';
+await manager.downloadAndRegisterEps((epName, percent) => {
+ if (epName !== currentEp) {
+ if (currentEp !== '') process.stdout.write('\n');
+ currentEp = epName;
+ }
+ process.stdout.write(`\r ${epName.padEnd(30)} ${percent.toFixed(1).padStart(5)}%`);
+});
+if (currentEp !== '') process.stdout.write('\n');
+
+const model = await manager.catalog.getModel('phi-4-mini');
+
+await model.download((progress) => {
+ process.stdout.write(`\rDownloading model: ${progress.toFixed(2)}%`);
+});
+console.log('\nModel downloaded.');
+
+await model.load();
+console.log('Model loaded and ready.');
+
+const chatClient = model.createChatClient();
+chatClient.settings.toolChoice = { type: 'required' };
+
+const messages = [
+ {
+ role: 'system',
+ content:
+ 'You are a Microsoft Learn documentation assistant. ' +
+ 'You MUST ALWAYS call the search_docs tool before answering ANY question. ' +
+ 'NEVER answer from your own knowledge. ' +
+ 'If the user asks about any Microsoft product, service, or technology, call search_docs first. ' +
+ 'Base your answer ONLY on the documentation returned by the tool. ' +
+ 'Include source URLs when available.'
+ }
+];
+
+const rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout
+});
+
+const askQuestion = (prompt) =>
+ new Promise((resolve) => rl.question(prompt, resolve));
+
+console.log(
+ '\nLearn Doc Assistant ready! Ask about any Microsoft product or technology.'
+);
+console.log('Type \'quit\' to exit.\n');
+
+while (true) {
+ const userInput = await askQuestion('You: ');
+ if (
+ userInput.trim().toLowerCase() === 'quit' ||
+ userInput.trim().toLowerCase() === 'exit'
+ ) {
+ break;
+ }
+
+ messages.push({ role: 'user', content: userInput });
+
+ const response = await chatClient.completeChat(messages, tools);
+ const answer = await processToolCalls(messages, response, chatClient);
+
+ messages.push({ role: 'assistant', content: answer });
+ console.log(`\nAssistant: ${answer}\n`);
+}
+
+await model.unload();
+console.log('Model unloaded. Goodbye!');
+rl.close();
diff --git a/samples/js/learn-mcp-tool-calling/package.json b/samples/js/learn-mcp-tool-calling/package.json
new file mode 100644
index 00000000..b58458cf
--- /dev/null
+++ b/samples/js/learn-mcp-tool-calling/package.json
@@ -0,0 +1,15 @@
+{
+ "name": "learn-mcp-tool-calling",
+ "version": "1.0.0",
+ "type": "module",
+ "main": "app.js",
+ "scripts": {
+ "start": "node app.js"
+ },
+ "dependencies": {
+ "foundry-local-sdk": "latest"
+ },
+ "optionalDependencies": {
+ "foundry-local-sdk-winml": "latest"
+ }
+}