Deployment

Master advanced deployment strategies for your documentation site, from simple static hosting to complex multi-environment setups.

Deployment Options

Static Site Hosting

Cloudflare Pages

The default deployment target for this project:

# Install Wrangler CLI
npm install -g wrangler

# Login to Cloudflare
wrangler auth login

# Deploy to Cloudflare Pages
pnpm build && pnpm deploy:pages

Configuration in wrangler.toml:

name = "your-docs-site"
compatibility_date = "2024-01-01"
pages_build_output_dir = "dist"

[env.production]
vars = { NODE_ENV = "production" }

[env.staging]
vars = { NODE_ENV = "staging" }

GitHub Pages

Alternative deployment option:

# .github/workflows/deploy.yml
name: Deploy to GitHub Pages

on:
  push:
    branches: [main]

jobs:
  deploy:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v3
      
      - name: Setup Node.js
        uses: actions/setup-node@v3
        with:
          node-version: '18'
          cache: 'pnpm'
      
      - name: Install dependencies
        run: pnpm install
      
      - name: Build
        run: pnpm build
      
      - name: Deploy to GitHub Pages
        uses: peaceiris/actions-gh-pages@v3
        with:
          github_token: ${{ secrets.GITHUB_TOKEN }}
          publish_dir: ./dist

Netlify

Deploy with drag-and-drop or Git integration:

# netlify.toml
[build]
  publish = "dist"
  command = "pnpm build"

[build.environment]
  NODE_VERSION = "18"

[[redirects]]
  from = "/*"
  to = "/index.html"
  status = 200
  force = false

Vercel

Zero-configuration deployment:

{
  "version": 2,
  "builds": [
    {
      "src": "package.json",
      "use": "@vercel/static-build",
      "config": {
        "distDir": "dist"
      }
    }
  ]
}

Server-Side Rendering

Astro SSR with Node.js

// astro.config.mjs
import { defineConfig } from 'astro/config';
import node from '@astrojs/node';

export default defineConfig({
  output: 'server',
  adapter: node({
    mode: 'standalone'
  })
});

Deploy to any Node.js hosting platform:

# Dockerfile
FROM node:18-alpine

WORKDIR /app
COPY package*.json ./
RUN npm ci --only=production

COPY dist ./dist

EXPOSE 4321
CMD ["node", "./dist/server/entry.mjs"]

Astro SSR with Cloudflare Workers

// astro.config.mjs
import { defineConfig } from 'astro/config';
import cloudflare from '@astrojs/cloudflare';

export default defineConfig({
  output: 'server',
  adapter: cloudflare()
});

Multi-Environment Setup

Environment Configuration

// config/environments.js
const environments = {
  development: {
    baseUrl: '',
    apiUrl: 'http://localhost:3000',
    analytics: false,
    debug: true
  },
  
  staging: {
    baseUrl: '/staging',
    apiUrl: 'https://staging-api.example.com',
    analytics: false,
    debug: true
  },
  
  production: {
    baseUrl: '/docs',
    apiUrl: 'https://api.example.com',
    analytics: true,
    debug: false
  }
};

export default environments[process.env.NODE_ENV || 'development'];

Environment-Specific Builds

# Development build
NODE_ENV=development pnpm build

# Staging build
NODE_ENV=staging pnpm build

# Production build
NODE_ENV=production pnpm build

Branch-Based Deployment

# .github/workflows/deploy-branches.yml
name: Deploy by Branch

on:
  push:
    branches: [main, staging, develop]

jobs:
  deploy:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@v3
      
      - name: Determine environment
        id: env
        run: |
          if [[ ${{ github.ref }} == 'refs/heads/main' ]]; then
            echo "env=production" >> $GITHUB_OUTPUT
          elif [[ ${{ github.ref }} == 'refs/heads/staging' ]]; then
            echo "env=staging" >> $GITHUB_OUTPUT
          else
            echo "env=development" >> $GITHUB_OUTPUT
          fi
      
      - name: Build
        run: NODE_ENV=${{ steps.env.outputs.env }} pnpm build
      
      - name: Deploy
        run: |
          if [[ "${{ steps.env.outputs.env }}" == "production" ]]; then
            wrangler pages deploy dist --project-name docs-prod
          elif [[ "${{ steps.env.outputs.env }}" == "staging" ]]; then
            wrangler pages deploy dist --project-name docs-staging
          fi

Performance Optimization

Build Optimization

// scripts/optimize-build.js
import { execSync } from 'child_process';
import { copyFileSync, mkdirSync } from 'fs';
import { glob } from 'glob';

function optimizeBuild() {
  console.log('🚀 Optimizing build...');
  
  // Compress assets
  execSync('npx imagemin dist/**/*.{jpg,png,gif} --out-dir=dist');
  
  // Generate service worker
  generateServiceWorker();
  
  // Create manifest
  createWebManifest();
  
  console.log('✅ Build optimization complete');
}

function generateServiceWorker() {
  const assets = glob.sync('dist/**/*').map(file => 
    file.replace('dist', '')
  );
  
  const swContent = `
    const CACHE_NAME = 'docs-v${Date.now()}';
    const urlsToCache = ${JSON.stringify(assets)};
    
    self.addEventListener('install', event => {
      event.waitUntil(
        caches.open(CACHE_NAME)
          .then(cache => cache.addAll(urlsToCache))
      );
    });
    
    self.addEventListener('fetch', event => {
      event.respondWith(
        caches.match(event.request)
          .then(response => response || fetch(event.request))
      );
    });
  `;
  
  writeFileSync('dist/sw.js', swContent);
}

function createWebManifest() {
  const manifest = {
    name: 'Documentation',
    short_name: 'Docs',
    description: 'Project documentation',
    start_url: '/',
    display: 'standalone',
    background_color: '#ffffff',
    theme_color: '#000000',
    icons: [
      {
        src: '/icon-192.png',
        sizes: '192x192',
        type: 'image/png'
      }
    ]
  };
  
  writeFileSync('dist/manifest.json', JSON.stringify(manifest, null, 2));
}

optimizeBuild();

CDN Integration

// config/cdn.js
const CDN_CONFIG = {
  production: {
    baseUrl: 'https://cdn.example.com/docs',
    images: 'https://images.example.com',
    assets: 'https://assets.example.com'
  },
  
  staging: {
    baseUrl: 'https://staging-cdn.example.com/docs',
    images: 'https://staging-images.example.com',
    assets: 'https://staging-assets.example.com'
  }
};

export function getAssetUrl(path, type = 'assets') {
  const config = CDN_CONFIG[process.env.NODE_ENV] || {};
  const baseUrl = config[type] || '';
  return `${baseUrl}${path}`;
}

Asset Versioning

// scripts/version-assets.js
import { createHash } from 'crypto';
import { readFileSync, writeFileSync, renameSync } from 'fs';
import { glob } from 'glob';

function versionAssets() {
  const assets = glob.sync('dist/**/*.{js,css,png,jpg,gif}');
  const manifest = {};
  
  assets.forEach(asset => {
    const content = readFileSync(asset);
    const hash = createHash('md5').update(content).digest('hex').slice(0, 8);
    
    const ext = asset.split('.').pop();
    const baseName = asset.replace(`.${ext}`, '');
    const versionedName = `${baseName}.${hash}.${ext}`;
    
    renameSync(asset, versionedName);
    manifest[asset.replace('dist/', '')] = versionedName.replace('dist/', '');
  });
  
  writeFileSync('dist/asset-manifest.json', JSON.stringify(manifest, null, 2));
}

versionAssets();

Security Considerations

Content Security Policy

// middleware/security.js
export function addSecurityHeaders(response) {
  response.headers.set('Content-Security-Policy', [
    "default-src 'self'",
    "script-src 'self' 'unsafe-inline' https://analytics.google.com",
    "style-src 'self' 'unsafe-inline' https://fonts.googleapis.com",
    "font-src 'self' https://fonts.gstatic.com",
    "img-src 'self' data: https:",
    "connect-src 'self' https://api.example.com"
  ].join('; '));
  
  response.headers.set('X-Frame-Options', 'DENY');
  response.headers.set('X-Content-Type-Options', 'nosniff');
  response.headers.set('Referrer-Policy', 'strict-origin-when-cross-origin');
  
  return response;
}

Environment Variables

# .env.production
NODE_ENV=production
SITE_URL=https://docs.example.com
API_URL=https://api.example.com
ANALYTICS_ID=G-XXXXXXXXXX

# Sensitive variables (not in version control)
DATABASE_URL=postgresql://user:pass@host:port/db
API_SECRET=your-secret-key
DEPLOY_TOKEN=your-deploy-token

Access Control

// middleware/auth.js
export async function requireAuth(request) {
  const authHeader = request.headers.get('Authorization');
  
  if (!authHeader || !authHeader.startsWith('Bearer ')) {
    return new Response('Unauthorized', { status: 401 });
  }
  
  const token = authHeader.slice(7);
  const isValid = await validateToken(token);
  
  if (!isValid) {
    return new Response('Invalid token', { status: 401 });
  }
  
  return null; // Allow request to continue
}

Monitoring and Analytics

Performance Monitoring

// utils/performance.js
export function trackPerformance() {
  if (typeof window === 'undefined') return;
  
  // Core Web Vitals
  new PerformanceObserver(list => {
    for (const entry of list.getEntries()) {
      if (entry.entryType === 'largest-contentful-paint') {
        gtag('event', 'LCP', { value: Math.round(entry.startTime) });
      }
      
      if (entry.entryType === 'first-input') {
        gtag('event', 'FID', { value: Math.round(entry.processingStart - entry.startTime) });
      }
      
      if (entry.entryType === 'layout-shift') {
        if (!entry.hadRecentInput) {
          gtag('event', 'CLS', { value: Math.round(entry.value * 1000) });
        }
      }
    }
  }).observe({ entryTypes: ['largest-contentful-paint', 'first-input', 'layout-shift'] });
}

Error Tracking

// utils/error-tracking.js
export function setupErrorTracking() {
  window.addEventListener('error', event => {
    gtag('event', 'exception', {
      description: event.error?.message || 'Unknown error',
      fatal: false
    });
  });
  
  window.addEventListener('unhandledrejection', event => {
    gtag('event', 'exception', {
      description: event.reason?.message || 'Unhandled promise rejection',
      fatal: false
    });
  });
}

Deployment Notifications

// scripts/notify-deployment.js
export async function notifyDeployment(environment, version, status) {
  const webhookUrl = process.env.SLACK_WEBHOOK_URL;
  
  if (!webhookUrl) return;
  
  const message = {
    text: `📚 Documentation deployment ${status}`,
    attachments: [{
      color: status === 'success' ? 'good' : 'danger',
      fields: [
        { title: 'Environment', value: environment, short: true },
        { title: 'Version', value: version, short: true },
        { title: 'Status', value: status, short: true }
      ]
    }]
  };
  
  await fetch(webhookUrl, {
    method: 'POST',
    headers: { 'Content-Type': 'application/json' },
    body: JSON.stringify(message)
  });
}

Troubleshooting

Common Deploy Issues

Build failures:

# Clear cache and rebuild
rm -rf node_modules/.cache
rm -rf dist
pnpm install
pnpm build

Memory issues:

# Increase Node.js memory limit
NODE_OPTIONS="--max-old-space-size=4096" pnpm build

Path issues:

// Check base URL configuration
const baseUrl = import.meta.env.BASE_URL || '/';

Debug Deployment

// scripts/debug-deploy.js
import fs from 'fs';
import path from 'path';

function debugDeployment() {
  console.log('🔍 Deployment Debug Information');
  console.log('================================');
  
  // Environment
  console.log('Environment:', process.env.NODE_ENV);
  console.log('Base URL:', process.env.BASE_URL || 'not set');
  
  // Build output
  const distExists = fs.existsSync('dist');
  console.log('Dist directory exists:', distExists);
  
  if (distExists) {
    const files = fs.readdirSync('dist');
    console.log('Dist files:', files.length);
    console.log('Index.html exists:', files.includes('index.html'));
  }
  
  // Asset manifest
  const manifestPath = 'dist/asset-manifest.json';
  if (fs.existsSync(manifestPath)) {
    const manifest = JSON.parse(fs.readFileSync(manifestPath, 'utf-8'));
    console.log('Asset manifest entries:', Object.keys(manifest).length);
  }
  
  // Package info
  const pkg = JSON.parse(fs.readFileSync('package.json', 'utf-8'));
  console.log('Package version:', pkg.version);
  console.log('Node version:', process.version);
}

debugDeployment();

Next Steps

Ready to deploy your documentation?