From 630fe5f21a49cc50a4095a14d1bd7f6713559e86 Mon Sep 17 00:00:00 2001 From: Ola Hungerford Date: Sun, 15 Jun 2025 23:28:54 -0700 Subject: [PATCH] Ad line limits and env variable configs --- src/filesystem/README.md | 16 ++++++++++++++++ src/filesystem/index.ts | 27 ++++++++++++++++++++------- 2 files changed, 36 insertions(+), 7 deletions(-) diff --git a/src/filesystem/README.md b/src/filesystem/README.md index d1621d1ef3..183b59dabd 100644 --- a/src/filesystem/README.md +++ b/src/filesystem/README.md @@ -12,6 +12,22 @@ Node.js server implementing Model Context Protocol (MCP) for filesystem operatio **Note**: The server will only allow operations within directories specified via `args`. +## Environment Variables + +- `MCP_FILESYSTEM_MAX_LINES`: Maximum number of lines for head/tail operations (default: 5000) +- `MCP_FILESYSTEM_CHUNK_SIZE`: Chunk size in bytes for file reading operations (default: 1024) + +### Example Usage with Environment Variables + +```bash +# Set custom limits +export MCP_FILESYSTEM_MAX_LINES=10000 +export MCP_FILESYSTEM_CHUNK_SIZE=2048 + +# Run the server +npx @modelcontextprotocol/server-filesystem /path/to/directory +``` + ## API ### Resources diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index 00b8782f11..88c9d70b86 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -12,9 +12,19 @@ import path from "path"; import os from 'os'; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; -import { diffLines, createTwoFilesPatch } from 'diff'; +import { createTwoFilesPatch } from 'diff'; import { minimatch } from 'minimatch'; +// Constants and config +const DEFAULT_MAX_LINES = 5000; +const DEFAULT_CHUNK_SIZE = 1024; +const config = { + maxLines: process.env.MCP_FILESYSTEM_MAX_LINES ? + parseInt(process.env.MCP_FILESYSTEM_MAX_LINES, 10) : DEFAULT_MAX_LINES, + chunkSize: process.env.MCP_FILESYSTEM_CHUNK_SIZE ? + parseInt(process.env.MCP_FILESYSTEM_CHUNK_SIZE, 10) : DEFAULT_CHUNK_SIZE +}; + // Command line argument parsing const args = process.argv.slice(2); if (args.length === 0) { @@ -97,8 +107,12 @@ async function validatePath(requestedPath: string): Promise { // Schema definitions const ReadFileArgsSchema = z.object({ path: z.string(), - tail: z.number().optional().describe('If provided, returns only the last N lines of the file'), - head: z.number().optional().describe('If provided, returns only the first N lines of the file') + tail: z.number().optional() + .describe('If provided, returns only the last N lines of the file') + .refine(n => n === undefined || n <= config.maxLines, `Maximum of ${config.maxLines} lines allowed`), + head: z.number().optional() + .describe('If provided, returns only the first N lines of the file') + .refine(n => n === undefined || n <= config.maxLines, `Maximum of ${config.maxLines} lines allowed`) }); const ReadMultipleFilesArgsSchema = z.object({ @@ -350,7 +364,6 @@ function formatSize(bytes: number): string { // Memory-efficient implementation to get the last N lines of a file async function tailFile(filePath: string, numLines: number): Promise { - const CHUNK_SIZE = 1024; // Read 1KB at a time const stats = await fs.stat(filePath); const fileSize = stats.size; @@ -361,13 +374,13 @@ async function tailFile(filePath: string, numLines: number): Promise { try { const lines: string[] = []; let position = fileSize; - let chunk = Buffer.alloc(CHUNK_SIZE); + let chunk = Buffer.alloc(config.chunkSize); let linesFound = 0; let remainingText = ''; // Read chunks from the end of the file until we have enough lines while (position > 0 && linesFound < numLines) { - const size = Math.min(CHUNK_SIZE, position); + const size = Math.min(config.chunkSize, position); position -= size; const { bytesRead } = await fileHandle.read(chunk, 0, size, position); @@ -407,7 +420,7 @@ async function headFile(filePath: string, numLines: number): Promise { const lines: string[] = []; let buffer = ''; let bytesRead = 0; - const chunk = Buffer.alloc(1024); // 1KB buffer + const chunk = Buffer.alloc(config.chunkSize); // Read chunks and count lines until we have enough or reach EOF while (lines.length < numLines) {