Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions examples/client/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,24 @@
"lint": "eslint src/ && prettier --ignore-path ../../.prettierignore --check .",
"lint:fix": "eslint src/ --fix && prettier --ignore-path ../../.prettierignore --write .",
"check": "npm run typecheck && npm run lint",
"test": "vitest",
"start": "npm run server",
"server": "tsx watch --clear-screen=false scripts/cli.ts server",
"client": "tsx scripts/cli.ts client"
},
"dependencies": {
"@modelcontextprotocol/client": "workspace:^",
"ajv": "catalog:runtimeShared",
"dotenv": "^17.2.3",
"zod": "catalog:runtimeShared"
},
"devDependencies": {
"@modelcontextprotocol/eslint-config": "workspace:^",
"@modelcontextprotocol/examples-shared": "workspace:^",
"@modelcontextprotocol/server": "workspace:^",
"@modelcontextprotocol/tsconfig": "workspace:^",
"@modelcontextprotocol/eslint-config": "workspace:^",
"@modelcontextprotocol/vitest-config": "workspace:^",
"tsdown": "catalog:devTools"
"tsdown": "catalog:devTools",
"vitest": "catalog:devTools"
}
}
76 changes: 76 additions & 0 deletions examples/client/src/simple-chatbot/ChatSession.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import type { LLMClient } from './LLMClient.js';
import type { Server } from './Server.js';
import type { Tool } from './Tool.js';
import { buildSystemPrompt } from './prompts.js';

/** Orchestrates the interaction between user, LLM, and tools. */
export class ChatSession {
public availableTools: Tool[] = [];

constructor(
public readonly servers: Server[],
public readonly llmClient: LLMClient
) { }


async cleanupServers(): Promise<void> {
// intentionally left blank for parity with Python stub
// Clean up all servers properly (Python reference kept for parity)
// for server in reversed(self.servers):
// try:
// await server.cleanup()
// except Exception as e:
// logging.warning(f"Warning during final cleanup: {e}")
}

/** Process the LLM response and execute tools if needed. */
async processLlmResponse(llmResponse: string): Promise<string> {
return llmResponse;
// Python reference kept for parity
// try:
// tool_call = json.loads(llm_response)
// if "tool" in tool_call and "arguments" in tool_call:
// ...
// return llm_response
// except json.JSONDecodeError:
// return llm_response
}

/** Main chat session handler. */
async start(): Promise<void> {
// 1. Initialize all servers sequentially
for (const server of this.servers) {
try {
await server.initialize();
} catch (e) {
console.error(`Failed to initialize server: ${e}`);
await this.cleanupServers();
return;
}
}

// 2. List all tools from all servers
const allTools: Tool[] = [];
for (const server of this.servers) {
const tools = await server.listTools();
console.log("vippe tools from server", server.name, tools);
if (Array.isArray(tools) && tools.length) {
allTools.push(...tools);
}
}
this.availableTools = allTools;
const toolsDescription = allTools.map((tool) => tool.formatForLlm()).join("\n");
const systemMessage = buildSystemPrompt(toolsDescription);
console.log("system message", systemMessage);


// 5. Enter chat loop: get user input, get LLM response, process response

// Python reference kept for parity
// try:
// for server in self.servers:
// ...
// finally:
// await self.cleanup_servers()
}
}
82 changes: 82 additions & 0 deletions examples/client/src/simple-chatbot/Configuration.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import * as fs from 'node:fs';

import { config } from 'dotenv';
import { z } from 'zod';

const McpServersConfigSchema = z.object({
mcpServers: z.record(z.object({
command: z.string(),
args: z.array(z.string()),
env: z.record(z.string()).optional(),
})),
});

export type McpServersConfig = z.infer<typeof McpServersConfigSchema>;

export class Configuration {
private api_key?: string;
constructor() {
Configuration.loadEnv();
this.api_key = process.env["LLM_API_KEY"];
}
static loadEnv(): void {
config();
}

static loadConfig(filePath: string): McpServersConfig {
const parsed = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
return McpServersConfigSchema.parse(parsed);
}

get llmApiKey(): string {
if (!this.api_key) throw new Error("LLM_API_KEY not found in environment variables");
return this.api_key;
}


}


// class Configuration:
// """Manages configuration and environment variables for the MCP client."""

// def __init__(self) -> None:
// """Initialize configuration with environment variables."""
// self.load_env()
// self.api_key = os.getenv("LLM_API_KEY")

// @staticmethod
// def load_env() -> None:
// """Load environment variables from .env file."""
// load_dotenv()examples/client/src/simple-chatbot/Configuration.ts

// @staticmethod
// def load_config(file_path: str) -> dict[str, Any]:
// """Load server configuration from JSON file.

// Args:
// file_path: Path to the JSON configuration file.

// Returns:
// Dict containing server configuration.

// Raises:
// FileNotFoundError: If configuration file doesn't exist.
// JSONDecodeError: If configuration file is invalid JSON.
// """
// with open(file_path, "r") as f:
// return json.load(f)

// @property
// def llm_api_key(self) -> str:
// """Get the LLM API key.

// Returns:
// The API key as a string.

// Raises:
// ValueError: If the API key is not found in environment variables.
// """
// if not self.api_key:
// raise ValueError("LLM_API_KEY not found in environment variables")
// return self.api_key
84 changes: 84 additions & 0 deletions examples/client/src/simple-chatbot/LLMClient.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
interface ResponseLike {
ok: boolean;
status: number;
statusText: string;
json: () => Promise<unknown>;
text: () => Promise<string>;
}

type FetchLike = (input: string, init?: unknown) => Promise<ResponseLike>;
declare const fetch: FetchLike;

type ChatRole = 'system' | 'user' | 'assistant';

export interface ChatMessage {
role: ChatRole;
content: string;
}

interface CompletionChoice {
message: { role: ChatRole; content: string };
}

interface CompletionResponse {
choices: CompletionChoice[];
}

export class LLMClient {
private readonly apiKey: string;
private readonly baseUrl: string;
private readonly model: string;
private readonly temperature: number;
private readonly maxTokens: number;
private readonly topP: number;
private readonly stream: boolean;

constructor(apiKey: string) {
this.apiKey = apiKey;
this.baseUrl = 'https://api.groq.com/openai/v1/chat/completions';
this.model = 'meta-llama/llama-4-scout-17b-16e-instruct';
this.temperature = 0.7;
this.maxTokens = 4096;
this.topP = 1;
this.stream = false;
}

async getResponse(messages: ChatMessage[]): Promise<string> {
const payload = {
messages,
model: this.model,
temperature: this.temperature,
max_tokens: this.maxTokens,
top_p: this.topP,
stream: this.stream,
stop: null,
};

try {
const res = await fetch(this.baseUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${this.apiKey}`,
},
body: JSON.stringify(payload),
});

if (!res.ok) {
const body = await res.text();
throw new Error(`LLM request failed: ${res.status} ${res.statusText} ${body}`);
}

const data = (await res.json()) as CompletionResponse;
const content = data?.choices?.[0]?.message?.content;
if (!content) {
throw new Error('LLM response missing content');
}
return content;
} catch (e: unknown) {
const message = e instanceof Error ? e.message : String(e);
console.error(`Error getting LLM response: ${message}`);
return `I encountered an error: ${message}. Please try again or rephrase your request.`;
}
}
}
120 changes: 120 additions & 0 deletions examples/client/src/simple-chatbot/Server.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import { Client, StdioClientTransport } from '@modelcontextprotocol/client';

import type { McpServersConfig } from './Configuration.js';
import { Tool } from './Tool.js';

export type ServerConfigEntry = McpServersConfig['mcpServers'][string];

export class Server {
public readonly name: string;
private config: ServerConfigEntry;
public client: Client | null = null;
public childPid: number | null = null;
public transport: StdioClientTransport | null = null;
// Serializes teardown to prevent concurrent cleanup races
private cleanupChain: Promise<void> = Promise.resolve();

constructor(name: string, config: ServerConfigEntry) {
this.name = name;
this.config = config;
}

async initialize(): Promise<void> {
const transport = new StdioClientTransport({
command: this.config.command,
args: this.config.args ?? [],
env: this.config.env,
});

const client = new Client({
name: `multi-server-chatbot-${this.name}`,
version: '1.0.0',
});

await client.connect(transport);

this.transport = transport;
this.client = client;
this.childPid = transport.pid;
}

async listTools(): Promise<Tool[]> {
if (!this.client) {
throw new Error(`Server ${this.name} not initialized`);
}
const toolsResponse = await this.client.listTools();
if (!toolsResponse || !toolsResponse.tools || !Array.isArray(toolsResponse.tools)) {
throw new Error(`Unexpected tools response from ${this.name}`);
}
return toolsResponse.tools.map((tool) =>
new Tool({
name: tool.name,
description: tool.description ?? '',
inputSchema: tool.inputSchema ?? {},
title: tool.title ?? null,
execution: tool.execution ?? null,
})
);
}

async executeTool(
toolName: string,
args: Record<string, unknown>,
retries = 2,
delay = 1000
) {
if(!this.client) {
throw new Error(`Server ${this.name} not initialized`);
}
let attempt = 0;
while (attempt < retries) {
console.info(`Server ${this.name}: executing tool ${toolName}, attempt ${attempt + 1} of ${retries}`);
try {
return await this.client.callTool({
name: toolName,
arguments: args
});
} catch (err) {
attempt ++;
if (attempt >= retries) {
throw err;
}
await new Promise((resolve) => setTimeout(resolve, delay));
}
}
throw new Error(`Failed to execute tool ${toolName}. Attempt ${attempt} of ${retries}`);
}

async cleanup(): Promise<void> {
this.cleanupChain = this.cleanupChain.then(async () => {
let error: unknown;
if (!(this.client || this.transport)) return;

if (this.client) {
try {
await this.client.close();
} catch (e) {
error ??= e;
}
this.client = null;
}

if (this.transport) {
try {
await this.transport.close();
} catch (e) {
error ??= e;
}
this.transport = null;
}

this.childPid = null;

if (error) {
console.error(`Error during cleanup of server ${this.name}:`, error);
}
});

return this.cleanupChain;
}
}
Loading
Loading