Beyond Basic File Operations
The fs module is one of the most powerful built-in modules in Node.js. While basic read/write
operations get you started, production applications need streaming for large files, recursive directory
traversal, file watching, and proper use of the promises API.
📁 fs Module API Styles
Original async style with error-first callbacks
Blocking operations with Sync suffix
Modern async/await with fs/promises
The fs/promises API (Recommended)
Always prefer the promises API for modern Node.js code:
import { readFile, writeFile, appendFile, rename, unlink } from 'fs/promises';
import { existsSync } from 'fs';
async function fileOperations() {
// Write a file
await writeFile('data.json', JSON.stringify({ users: [] }, null, 2));
// Read it back
const content = await readFile('data.json', 'utf8');
const data = JSON.parse(content);
// Append to a log file
await appendFile('app.log', `[${new Date().toISOString()}] Server started\n`);
// Rename / move a file
await rename('data.json', 'backup/data.json');
// Delete a file
await unlink('backup/data.json');
// Check existence (sync is fine for this)
if (existsSync('config.json')) {
const config = JSON.parse(await readFile('config.json', 'utf8'));
console.log('Config loaded:', config);
}
}
Streaming Large Files
Never load large files entirely into memory. Use streams instead:
import { createReadStream, createWriteStream } from 'fs';
import { pipeline } from 'stream/promises';
import { createGzip, createGunzip } from 'zlib';
import { Transform } from 'stream';
// Stream a large file line by line
async function processLargeCSV(filePath) {
const stream = createReadStream(filePath, { encoding: 'utf8' });
let remainder = '';
for await (const chunk of stream) {
const lines = (remainder + chunk).split('\n');
remainder = lines.pop(); // last partial line
for (const line of lines) {
const columns = line.split(',');
// Process each row without loading entire file
}
}
}
// Compress a large file with streaming
async function compressFile(input, output) {
await pipeline(
createReadStream(input),
createGzip(),
createWriteStream(output)
);
console.log('Compression complete');
}
// Copy file with progress tracking
function copyWithProgress(src, dest) {
const readStream = createReadStream(src);
const writeStream = createWriteStream(dest);
let transferred = 0;
readStream.on('data', (chunk) => {
transferred += chunk.length;
process.stdout.write(`\rCopied: ${(transferred / 1024 / 1024).toFixed(2)} MB`);
});
return pipeline(readStream, writeStream);
}
Recursive Directory Operations
import { readdir, stat, mkdir, rm, cp } from 'fs/promises';
import { join } from 'path';
// List all files recursively (Node 18+)
async function listAllFiles(dir) {
const entries = await readdir(dir, { withFileTypes: true, recursive: true });
return entries
.filter(entry => entry.isFile())
.map(entry => join(entry.parentPath || entry.path, entry.name));
}
// Create nested directories
await mkdir('data/backups/2024/january', { recursive: true });
// Remove directory tree
await rm('temp', { recursive: true, force: true });
// Copy entire directory tree (Node 16.7+)
await cp('source-dir', 'dest-dir', { recursive: true });
// Get directory size
async function getDirSize(dir) {
let totalSize = 0;
const entries = await readdir(dir, { withFileTypes: true, recursive: true });
for (const entry of entries) {
if (entry.isFile()) {
const filePath = join(entry.parentPath || entry.path, entry.name);
const stats = await stat(filePath);
totalSize += stats.size;
}
}
return totalSize;
}
console.log(`Directory size: ${(await getDirSize('./src')) / 1024} KB`);
Watching Files and Directories
import { watch } from 'fs/promises';
import { watchFile, unwatchFile } from 'fs';
// Modern async iterator watch (Node 18+)
async function watchDirectory(dir) {
const watcher = watch(dir, { recursive: true });
console.log(`Watching ${dir} for changes...`);
for await (const event of watcher) {
console.log(`${event.eventType}: ${event.filename}`);
if (event.eventType === 'change') {
// Trigger rebuild, reload config, etc.
}
}
}
// Watch a specific file for changes (polling-based)
watchFile('config.json', { interval: 1000 }, (curr, prev) => {
if (curr.mtime > prev.mtime) {
console.log('Config file was modified, reloading...');
// Reload configuration
}
});
// Stop watching
unwatchFile('config.json');
Working with File Metadata and the Path Module
import { stat, chmod, utimes } from 'fs/promises';
import { join, resolve, basename, dirname, extname, parse, relative } from 'path';
// File metadata
const stats = await stat('package.json');
console.log({
size: stats.size, // Size in bytes
isFile: stats.isFile(),
isDirectory: stats.isDirectory(),
created: stats.birthtime,
modified: stats.mtime,
permissions: stats.mode.toString(8)
});
// Path module essentials
const filePath = '/Users/dev/projects/app/src/index.ts';
console.log(basename(filePath)); // 'index.ts'
console.log(dirname(filePath)); // '/Users/dev/projects/app/src'
console.log(extname(filePath)); // '.ts'
console.log(parse(filePath)); // { root, dir, base, ext, name }
// Build cross-platform paths
const configPath = join(__dirname, '..', 'config', 'default.json');
const absolutePath = resolve('src', 'utils', 'helpers.js');
const rel = relative('/Users/dev/project', '/Users/dev/project/src/app.js');
console.log(rel); // 'src/app.js'
Temporary Files and Atomic Writes
import { mkdtemp, writeFile, readFile, rename, rm } from 'fs/promises';
import { tmpdir } from 'os';
import { join } from 'path';
// Create a temp directory
const tempDir = await mkdtemp(join(tmpdir(), 'myapp-'));
console.log('Temp dir:', tempDir);
// e.g., /tmp/myapp-aB3xYz
// Atomic write: write to temp file, then rename
// Prevents data corruption if process crashes mid-write
async function atomicWrite(filePath, data) {
const tempPath = `${filePath}.${process.pid}.tmp`;
try {
await writeFile(tempPath, data);
await rename(tempPath, filePath);
} catch (err) {
// Clean up temp file on error
await rm(tempPath, { force: true });
throw err;
}
}
await atomicWrite('config.json', JSON.stringify(config, null, 2));
// Clean up temp directory
await rm(tempDir, { recursive: true });
💡 Key Takeaways
- • Use
fs/promisesfor all async file operations - • Stream large files instead of loading them into memory
- • Use
recursive: truefor nested directory operations - • Use atomic writes to prevent data corruption
- • Always use the
pathmodule for cross-platform compatibility