Code Examples

Real-world examples to help you get started with FluxUpload

Beginner Express.js

Basic Express Setup

A minimal setup to handle file uploads with Express.js and store files locally.

server.js
const express = require('express');
const { FluxUpload } = require('fluxupload');

const app = express();

// Initialize FluxUpload with local storage
const uploader = new FluxUpload({
  storage: {
    type: 'local',
    options: {
      directory: './uploads',
      createDirectory: true
    }
  },
  limits: {
    maxFileSize: 10 * 1024 * 1024, // 10MB
    maxFiles: 5
  },
  allowedMimeTypes: ['image/*', 'application/pdf']
});

// Upload endpoint
app.post('/upload', async (req, res) => {
  try {
    const results = await uploader.handleRequest(req);
    res.json({
      success: true,
      files: results.map(r => ({
        filename: r.filename,
        size: r.size,
        url: `/files/${r.filename}`
      }))
    });
  } catch (error) {
    res.status(error.statusCode || 500).json({
      success: false,
      error: error.message
    });
  }
});

// Serve uploaded files
app.use('/files', express.static('./uploads'));

app.listen(3000, () => {
  console.log('Server running on http://localhost:3000');
});
Intermediate AWS S3

S3 Storage with Signed URLs

Upload files directly to S3 with time-limited signed URLs for secure access.

s3-upload.js
const { FluxUpload, S3Storage, SignedUrls } = require('fluxupload');

// Initialize S3 storage (no aws-sdk needed!)
const s3Storage = new S3Storage({
  bucket: process.env.S3_BUCKET,
  region: process.env.AWS_REGION,
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
  // Optional: Custom endpoint for S3-compatible storage
  // endpoint: 'https://s3.us-west-2.amazonaws.com'
});

// Initialize signed URL generator
const signedUrls = new SignedUrls({
  secret: process.env.URL_SECRET,
  defaultExpiry: 3600 // 1 hour
});

const uploader = new FluxUpload({
  storage: s3Storage,
  limits: {
    maxFileSize: 100 * 1024 * 1024 // 100MB
  }
});

// Generate upload URL
app.get('/get-upload-url', (req, res) => {
  const url = signedUrls.sign('https://api.example.com/upload', {
    maxFileSize: 50 * 1024 * 1024,
    allowedTypes: ['image/jpeg', 'image/png'],
    userId: req.user.id,
    expiresIn: 900 // 15 minutes
  });

  res.json({ uploadUrl: url });
});

// Handle upload with signed URL validation
app.post('/upload', async (req, res) => {
  // Validate signed URL
  const validation = signedUrls.validate(req.url);
  if (!validation.valid) {
    return res.status(403).json({ error: validation.error });
  }

  const results = await uploader.handleRequest(req, {
    // Apply constraints from signed URL
    maxFileSize: validation.constraints.maxFileSize,
    allowedTypes: validation.constraints.allowedTypes
  });

  res.json({ success: true, files: results });
});
Advanced Image Processing

Image Processing Pipeline

Build a custom pipeline to resize images, generate thumbnails, and extract metadata.

image-pipeline.js
const { FluxUpload, Plugin } = require('fluxupload');

// Custom image resizer plugin
class ImageResizer extends Plugin {
  constructor(options = {}) {
    super('ImageResizer');
    this.maxWidth = options.maxWidth || 1920;
    this.maxHeight = options.maxHeight || 1080;
    this.quality = options.quality || 80;
  }

  async process(context) {
    const { stream, fileInfo, metadata } = context;

    // Only process images
    if (!fileInfo.mimeType.startsWith('image/')) {
      return context;
    }

    // Get dimensions from ImageDimensionProbe
    const { width, height } = metadata.dimensions || {};

    if (width > this.maxWidth || height > this.maxHeight) {
      // Create resize transform stream
      const resizeStream = this.createResizeStream({
        width: this.maxWidth,
        height: this.maxHeight,
        fit: 'inside'
      });

      stream.pipe(resizeStream);
      context.stream = resizeStream;
      context.metadata.resized = true;
    }

    return context;
  }
}

// Custom thumbnail generator plugin
class ThumbnailGenerator extends Plugin {
  constructor(options = {}) {
    super('ThumbnailGenerator');
    this.sizes = options.sizes || [150, 300, 600];
  }

  async process(context) {
    if (!context.fileInfo.mimeType.startsWith('image/')) {
      return context;
    }

    // Generate thumbnails after main upload
    context.metadata.thumbnails = this.sizes.map(size => ({
      size,
      path: `thumbnails/${size}/${context.fileInfo.filename}`
    }));

    return context;
  }
}

// Build the pipeline
const uploader = new FluxUpload({
  storage: { type: 'local', options: { directory: './uploads' } },
  validators: [
    'MagicByteDetector',  // Verify file type
    'ImageDimensionProbe' // Extract dimensions
  ],
  transformers: [
    new ImageResizer({ maxWidth: 1920, quality: 85 }),
    new ThumbnailGenerator({ sizes: [150, 300] }),
    'StreamHasher' // Generate content hash
  ]
});

app.post('/upload/image', async (req, res) => {
  const results = await uploader.handleRequest(req);

  res.json({
    success: true,
    files: results.map(r => ({
      original: r.path,
      thumbnails: r.metadata.thumbnails,
      dimensions: r.metadata.dimensions,
      hash: r.metadata.hash,
      resized: r.metadata.resized || false
    }))
  });
});
Advanced Multi-tenant

Multi-tenant Upload with Quotas

Implement per-user storage quotas with rate limiting and usage tracking.

multi-tenant.js
const { FluxUpload, QuotaLimiter } = require('fluxupload');

// User quotas (could be from database)
const userQuotas = {
  free: { maxStorage: 100 * 1024 * 1024, maxFileSize: 5 * 1024 * 1024 },
  pro: { maxStorage: 10 * 1024 * 1024 * 1024, maxFileSize: 100 * 1024 * 1024 },
  enterprise: { maxStorage: Infinity, maxFileSize: 500 * 1024 * 1024 }
};

// Quota checker that integrates with your user system
const quotaLimiter = new QuotaLimiter({
  getQuota: async (context) => {
    const user = context.request.user;
    const plan = user.plan || 'free';
    const quota = userQuotas[plan];

    // Get current usage from database
    const currentUsage = await db.getUserStorageUsage(user.id);

    return {
      maxFileSize: quota.maxFileSize,
      remainingStorage: quota.maxStorage - currentUsage,
      currentUsage
    };
  },
  onQuotaExceeded: (context, quota) => {
    const error = new Error(`Storage quota exceeded. Used: ${quota.currentUsage}, Limit: ${quota.maxStorage}`);
    error.code = 'QUOTA_EXCEEDED';
    error.statusCode = 413;
    throw error;
  }
});

const uploader = new FluxUpload({
  storage: {
    type: 'local',
    options: {
      directory: './uploads',
      // Organize by user ID
      generatePath: (fileInfo, context) => {
        const userId = context.request.user.id;
        return `${userId}/${Date.now()}-${fileInfo.filename}`;
      }
    }
  },
  validators: [quotaLimiter]
});

// Middleware to attach user to request
app.use('/upload', authenticate);

app.post('/upload', async (req, res) => {
  try {
    const results = await uploader.handleRequest(req);

    // Update user's storage usage
    const totalSize = results.reduce((sum, r) => sum + r.size, 0);
    await db.incrementUserStorage(req.user.id, totalSize);

    res.json({
      success: true,
      files: results,
      usage: await db.getUserStorageUsage(req.user.id)
    });
  } catch (error) {
    if (error.code === 'QUOTA_EXCEEDED') {
      return res.status(413).json({
        error: 'Storage quota exceeded',
        upgrade: '/pricing'
      });
    }
    throw error;
  }
});
Advanced Large Files

Chunked Upload for Large Files

Handle large file uploads with resumable chunked uploads and progress tracking.

server.js
const { ChunkedUploadManager } = require('fluxupload');

const chunkedManager = new ChunkedUploadManager({
  chunkSize: 5 * 1024 * 1024, // 5MB chunks
  tempDirectory: './temp-chunks',
  storage: s3Storage,
  // Auto-cleanup incomplete uploads after 24h
  cleanupAfter: 24 * 60 * 60 * 1000
});

// Initialize upload session
app.post('/upload/init', async (req, res) => {
  const { filename, fileSize, mimeType } = req.body;

  const session = await chunkedManager.initUpload({
    filename,
    fileSize,
    mimeType,
    userId: req.user.id
  });

  res.json({
    uploadId: session.id,
    chunkSize: session.chunkSize,
    totalChunks: session.totalChunks
  });
});

// Upload chunk
app.put('/upload/:uploadId/chunk/:index', async (req, res) => {
  const { uploadId, index } = req.params;

  const result = await chunkedManager.uploadChunk(
    uploadId,
    parseInt(index),
    req
  );

  res.json({
    received: result.received,
    remaining: result.remaining,
    progress: result.progress
  });
});

// Complete upload
app.post('/upload/:uploadId/complete', async (req, res) => {
  const { uploadId } = req.params;

  const result = await chunkedManager.completeUpload(uploadId);

  res.json({
    success: true,
    file: result
  });
});
client.js
async function uploadLargeFile(file, onProgress) {
  // Initialize upload
  const init = await fetch('/upload/init', {
    method: 'POST',
    headers: { 'Content-Type': 'application/json' },
    body: JSON.stringify({
      filename: file.name,
      fileSize: file.size,
      mimeType: file.type
    })
  }).then(r => r.json());

  const { uploadId, chunkSize, totalChunks } = init;

  // Upload chunks
  for (let i = 0; i < totalChunks; i++) {
    const start = i * chunkSize;
    const end = Math.min(start + chunkSize, file.size);
    const chunk = file.slice(start, end);

    await fetch(`/upload/${uploadId}/chunk/${i}`, {
      method: 'PUT',
      body: chunk
    });

    onProgress?.((i + 1) / totalChunks * 100);
  }

  // Complete upload
  const result = await fetch(`/upload/${uploadId}/complete`, {
    method: 'POST'
  }).then(r => r.json());

  return result.file;
}

// Usage
const file = document.querySelector('input[type=file]').files[0];
uploadLargeFile(file, progress => {
  console.log(`Upload progress: ${progress.toFixed(1)}%`);
});
Intermediate Security

Virus Scanning Integration

Scan uploaded files for malware using ClamAV or cloud-based scanning services.

virus-scan-plugin.js
const { Plugin } = require('fluxupload');
const { PassThrough } = require('stream');

class VirusScanPlugin extends Plugin {
  constructor(options = {}) {
    super('VirusScanner');
    this.scannerHost = options.host || 'localhost';
    this.scannerPort = options.port || 3310;
    this.maxScanSize = options.maxScanSize || 100 * 1024 * 1024;
  }

  async process(context) {
    const { stream, fileInfo, metadata } = context;

    // Skip large files (they should be scanned async)
    if (metadata.size > this.maxScanSize) {
      context.metadata.virusScan = { skipped: true, reason: 'file_too_large' };
      return context;
    }

    // Collect file data for scanning
    const chunks = [];
    const passThrough = new PassThrough();

    stream.pipe(passThrough);

    for await (const chunk of stream) {
      chunks.push(chunk);
    }

    const fileBuffer = Buffer.concat(chunks);

    // Scan with ClamAV
    const scanResult = await this.scanWithClamAV(fileBuffer);

    if (scanResult.infected) {
      const error = new Error(`Virus detected: ${scanResult.virus}`);
      error.code = 'VIRUS_DETECTED';
      error.statusCode = 422;
      throw error;
    }

    context.metadata.virusScan = {
      scanned: true,
      clean: true,
      engine: 'ClamAV',
      timestamp: new Date().toISOString()
    };

    // Create new stream from buffer
    const { Readable } = require('stream');
    context.stream = Readable.from(fileBuffer);

    return context;
  }

  async scanWithClamAV(buffer) {
    return new Promise((resolve, reject) => {
      const net = require('net');
      const socket = net.connect(this.scannerPort, this.scannerHost);

      socket.write('zINSTREAM\0');

      // Send file in chunks
      const chunkSize = 2048;
      for (let i = 0; i < buffer.length; i += chunkSize) {
        const chunk = buffer.slice(i, i + chunkSize);
        const size = Buffer.alloc(4);
        size.writeUInt32BE(chunk.length);
        socket.write(size);
        socket.write(chunk);
      }

      // End stream
      socket.write(Buffer.alloc(4));

      let response = '';
      socket.on('data', data => response += data.toString());
      socket.on('end', () => {
        const infected = !response.includes('OK');
        resolve({
          infected,
          virus: infected ? response.split(':')[1]?.trim() : null
        });
      });
      socket.on('error', reject);
    });
  }
}

module.exports = VirusScanPlugin;

Quick Reference

Built-in Validators

  • MagicByteDetector - File type verification
  • ImageDimensionProbe - Image size limits
  • QuotaLimiter - Storage quotas
  • CsrfProtection - CSRF tokens

Built-in Transformers

  • StreamHasher - SHA256/MD5 hashing
  • CompressionTransformer - Gzip/Brotli
  • EncryptionTransformer - AES encryption

Storage Options

  • LocalStorage - File system
  • S3Storage - AWS S3 / compatible
  • MemoryStorage - In-memory (testing)