Merge pull request #263 from bknd-io/fix/cf-vite-writer-plugin-large-payloads

fix: implement chunked request handling for cloudflare vite dev-fs plugin
This commit is contained in:
dswbx
2025-09-18 10:00:19 +02:00
committed by GitHub

View File

@@ -25,46 +25,156 @@ export function devFsVitePlugin({
}, },
configureServer(server) { configureServer(server) {
if (!isDev) { if (!isDev) {
verbose && console.debug("[dev-fs-plugin] Not in dev mode, skipping");
return; return;
} }
// Track active chunked requests
const activeRequests = new Map<
string,
{
totalChunks: number;
filename: string;
chunks: string[];
receivedChunks: number;
}
>();
// Intercept stdout to watch for our write requests // Intercept stdout to watch for our write requests
const originalStdoutWrite = process.stdout.write; const originalStdoutWrite = process.stdout.write;
process.stdout.write = function (chunk: any, encoding?: any, callback?: any) { process.stdout.write = function (chunk: any, encoding?: any, callback?: any) {
const output = chunk.toString(); const output = chunk.toString();
// Check if this output contains our special write request // Skip our own debug output
if (output.includes("{{DEV_FS_WRITE_REQUEST}}")) { if (output.includes("[dev-fs-plugin]") || output.includes("[dev-fs-polyfill]")) {
try { // @ts-ignore
// Extract the JSON from the log line // biome-ignore lint/style/noArguments: <explanation>
const match = output.match(/{{DEV_FS_WRITE_REQUEST}} ({.*})/); return originalStdoutWrite.apply(process.stdout, arguments);
if (match) { }
const writeRequest = JSON.parse(match[1]);
if (writeRequest.type === "DEV_FS_WRITE_REQUEST") {
if (verbose) {
console.debug("[dev-fs-plugin] Intercepted write request via stdout");
}
// Process the write request immediately // Track if we process any protocol messages (to suppress output)
(async () => { let processedProtocolMessage = false;
try {
const fullPath = resolve(projectRoot, writeRequest.filename);
await nodeWriteFile(fullPath, writeRequest.data);
if (verbose) {
console.debug("[dev-fs-plugin] File written successfully!");
}
} catch (error) {
console.error("[dev-fs-plugin] Error writing file:", error);
}
})();
// Don't output the raw write request to console // Process all start markers in this output
return true; if (output.includes("{{DEV_FS_START}}")) {
const startMatches = [
...output.matchAll(/{{DEV_FS_START}} ([a-z0-9]+) (\d+) (.+)/g),
];
for (const startMatch of startMatches) {
const requestId = startMatch[1];
const totalChunks = Number.parseInt(startMatch[2]);
const filename = startMatch[3];
activeRequests.set(requestId, {
totalChunks,
filename,
chunks: new Array(totalChunks),
receivedChunks: 0,
});
verbose &&
console.debug(
`[dev-fs-plugin] Started request ${requestId} for ${filename} (${totalChunks} chunks)`,
);
}
processedProtocolMessage = true;
}
// Process all chunk data in this output
if (output.includes("{{DEV_FS_CHUNK}}")) {
const chunkMatches = [
...output.matchAll(/{{DEV_FS_CHUNK}} ([a-z0-9]+) (\d+) ([A-Za-z0-9+/=]+)/g),
];
for (const chunkMatch of chunkMatches) {
const requestId = chunkMatch[1];
const chunkIndex = Number.parseInt(chunkMatch[2]);
const chunkData = chunkMatch[3];
const request = activeRequests.get(requestId);
if (request) {
request.chunks[chunkIndex] = chunkData;
request.receivedChunks++;
verbose &&
console.debug(
`[dev-fs-plugin] Received chunk ${chunkIndex}/${request.totalChunks - 1} for ${request.filename} (length: ${chunkData.length})`,
);
// Validate base64 chunk
if (chunkData.length < 1000 && chunkIndex < request.totalChunks - 1) {
verbose &&
console.warn(
`[dev-fs-plugin] WARNING: Chunk ${chunkIndex} seems truncated (length: ${chunkData.length})`,
);
} }
} }
} catch (error) {
// Not a valid write request, continue with normal output
} }
processedProtocolMessage = true;
}
// Process all end markers in this output
if (output.includes("{{DEV_FS_END}}")) {
const endMatches = [...output.matchAll(/{{DEV_FS_END}} ([a-z0-9]+)/g)];
for (const endMatch of endMatches) {
const requestId = endMatch[1];
const request = activeRequests.get(requestId);
if (request && request.receivedChunks === request.totalChunks) {
try {
// Reconstruct the base64 string
const fullBase64 = request.chunks.join("");
verbose &&
console.debug(
`[dev-fs-plugin] Reconstructed ${request.filename} - base64 length: ${fullBase64.length}`,
);
// Decode and parse
const decodedJson = atob(fullBase64);
const writeRequest = JSON.parse(decodedJson);
if (writeRequest.type === "DEV_FS_WRITE_REQUEST") {
verbose &&
console.debug(
`[dev-fs-plugin] Processing write request for ${writeRequest.filename}`,
);
// Process the write request
(async () => {
try {
const fullPath = resolve(projectRoot, writeRequest.filename);
verbose &&
console.debug(`[dev-fs-plugin] Writing to: ${fullPath}`);
await nodeWriteFile(fullPath, writeRequest.data);
verbose &&
console.debug("[dev-fs-plugin] File written successfully!");
} catch (error) {
console.error("[dev-fs-plugin] Error writing file:", error);
}
})();
// Clean up
activeRequests.delete(requestId);
return true;
}
} catch (error) {
console.error(
"[dev-fs-plugin] Error processing chunked request:",
String(error),
);
activeRequests.delete(requestId);
}
} else if (request) {
verbose &&
console.debug(
`[dev-fs-plugin] Request ${requestId} incomplete: ${request.receivedChunks}/${request.totalChunks} chunks`,
);
}
}
processedProtocolMessage = true;
}
// If we processed any protocol messages, suppress output
if (processedProtocolMessage) {
return callback ? callback() : true;
} }
// @ts-ignore // @ts-ignore
@@ -97,7 +207,7 @@ export function devFsVitePlugin({
if (typeof globalThis !== 'undefined') { if (typeof globalThis !== 'undefined') {
globalThis.__devFsPolyfill = { globalThis.__devFsPolyfill = {
writeFile: async (filename, data) => { writeFile: async (filename, data) => {
${verbose ? "console.debug('dev-fs polyfill: Intercepting write request for', filename);" : ""} ${verbose ? "console.debug('[dev-fs-polyfill] Intercepting write request for', filename);" : ""}
// Use console logging as a communication channel // Use console logging as a communication channel
// The main process will watch for this specific log pattern // The main process will watch for this specific log pattern
@@ -108,16 +218,38 @@ if (typeof globalThis !== 'undefined') {
timestamp: Date.now() timestamp: Date.now()
}; };
// Output as a specially formatted console message // Output as a specially formatted console message with end delimiter
console.log('{{DEV_FS_WRITE_REQUEST}}', JSON.stringify(writeRequest)); // Base64 encode the JSON to avoid any control character issues
${verbose ? "console.debug('dev-fs polyfill: Write request sent via console');" : ""} const jsonString = JSON.stringify(writeRequest);
const encodedJson = btoa(jsonString);
// Split into reasonable chunks that balance performance vs reliability
const chunkSize = 2000; // 2KB chunks - safe for most environments
const chunks = [];
for (let i = 0; i < encodedJson.length; i += chunkSize) {
chunks.push(encodedJson.slice(i, i + chunkSize));
}
const requestId = Date.now().toString(36) + Math.random().toString(36).substr(2, 5);
// Send start marker (use stdout.write to avoid console display)
process.stdout.write('{{DEV_FS_START}} ' + requestId + ' ' + chunks.length + ' ' + filename + '\\n');
// Send each chunk
chunks.forEach((chunk, index) => {
process.stdout.write('{{DEV_FS_CHUNK}} ' + requestId + ' ' + index + ' ' + chunk + '\\n');
});
// Send end marker
process.stdout.write('{{DEV_FS_END}} ' + requestId + '\\n');
return Promise.resolve(); return Promise.resolve();
} }
}; };
} }`;
`;
return polyfill + code; return polyfill + code;
} else {
verbose && console.debug("[dev-fs-plugin] Not transforming", id);
} }
}, },
} satisfies Plugin; } satisfies Plugin;