diff --git a/src/filesystem/README.md b/src/filesystem/README.md index 6a329004f7..15060d781c 100644 --- a/src/filesystem/README.md +++ b/src/filesystem/README.md @@ -62,6 +62,22 @@ The server's directory access control follows this flow: +## Environment Variables + +- `MCP_FILESYSTEM_MAX_LINES`: Maximum number of lines for head/tail operations (default: 5000) +- `MCP_FILESYSTEM_CHUNK_SIZE`: Chunk size in bytes for file reading operations (default: 1024) + +### Example Usage with Environment Variables + +```bash +# Set custom limits +export MCP_FILESYSTEM_MAX_LINES=10000 +export MCP_FILESYSTEM_CHUNK_SIZE=2048 + +# Run the server +npx @modelcontextprotocol/server-filesystem /path/to/directory +``` + ## API ### Resources diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index 6723f43600..1d753d47cb 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -16,11 +16,21 @@ import os from 'os'; import { randomBytes } from 'crypto'; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; -import { diffLines, createTwoFilesPatch } from 'diff'; +import { createTwoFilesPatch } from 'diff'; import { minimatch } from 'minimatch'; import { isPathWithinAllowedDirectories } from './path-validation.js'; import { getValidRootDirectories } from './roots-utils.js'; +// Constants and config +const DEFAULT_MAX_LINES = 5000; +const DEFAULT_CHUNK_SIZE = 1024; +const config = { + maxLines: process.env.MCP_FILESYSTEM_MAX_LINES ? + parseInt(process.env.MCP_FILESYSTEM_MAX_LINES, 10) : DEFAULT_MAX_LINES, + chunkSize: process.env.MCP_FILESYSTEM_CHUNK_SIZE ? + parseInt(process.env.MCP_FILESYSTEM_CHUNK_SIZE, 10) : DEFAULT_CHUNK_SIZE +}; + // Command line argument parsing const args = process.argv.slice(2); if (args.length === 0) { @@ -119,8 +129,12 @@ async function validatePath(requestedPath: string): Promise { // Schema definitions const ReadTextFileArgsSchema = z.object({ path: z.string(), - tail: z.number().optional().describe('If provided, returns only the last N lines of the file'), - head: z.number().optional().describe('If provided, returns only the first N lines of the file') + tail: z.number().optional() + .describe('If provided, returns only the last N lines of the file') + .refine(n => n === undefined || n <= config.maxLines, `Maximum of ${config.maxLines} lines allowed`), + head: z.number().optional() + .describe('If provided, returns only the first N lines of the file') + .refine(n => n === undefined || n <= config.maxLines, `Maximum of ${config.maxLines} lines allowed`) }); const ReadMediaFileArgsSchema = z.object({ @@ -388,7 +402,6 @@ function formatSize(bytes: number): string { // Memory-efficient implementation to get the last N lines of a file async function tailFile(filePath: string, numLines: number): Promise { - const CHUNK_SIZE = 1024; // Read 1KB at a time const stats = await fs.stat(filePath); const fileSize = stats.size; @@ -399,13 +412,13 @@ async function tailFile(filePath: string, numLines: number): Promise { try { const lines: string[] = []; let position = fileSize; - let chunk = Buffer.alloc(CHUNK_SIZE); + let chunk = Buffer.alloc(config.chunkSize); let linesFound = 0; let remainingText = ''; // Read chunks from the end of the file until we have enough lines while (position > 0 && linesFound < numLines) { - const size = Math.min(CHUNK_SIZE, position); + const size = Math.min(config.chunkSize, position); position -= size; const { bytesRead } = await fileHandle.read(chunk, 0, size, position); @@ -445,8 +458,8 @@ async function headFile(filePath: string, numLines: number): Promise { const lines: string[] = []; let buffer = ''; let bytesRead = 0; - const chunk = Buffer.alloc(1024); // 1KB buffer - + const chunk = Buffer.alloc(config.chunkSize); + // Read chunks and count lines until we have enough or reach EOF while (lines.length < numLines) { const result = await fileHandle.read(chunk, 0, chunk.length, bytesRead);