diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..815b38b --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,62 @@ +{ + "permissions": { + "allow": [ + "Bash(docker network:*)", + "Bash(docker-compose build:*)", + "Bash(docker compose build:*)", + "Bash(docker compose:*)", + "Bash(docker logs:*)", + "Bash(docker inspect:*)", + "Bash(curl:*)", + "Read(//tmp/**)", + "WebSearch", + "WebFetch(domain:www.npmjs.com)", + "Bash(python3:*)", + "Bash(docker exec:*)", + "Bash(docker cp:*)", + "Bash(docker restart:*)", + "Bash(./scripts/version.sh:*)", + "Bash(docker login:*)", + "Bash(git checkout:*)", + "Bash(git commit:*)", + "Bash(git config:*)", + "Bash(git push:*)", + "Bash(npm run build:*)", + "WebFetch(domain:github.com)", + "WebFetch(domain:raw.githubusercontent.com)", + "Bash(pip3 index:*)", + "Bash(GIT_ASKPASS=/tmp/git-askpass.sh git push:*)", + "Bash(npm run prisma:migrate:*)", + "Bash(npx prisma migrate dev:*)", + "Bash(npx prisma migrate:*)", + "Bash(git add:*)", + "Bash(git remote set-url:*)", + "Bash(ssh-keygen:*)", + "Read(//root/.ssh/**)", + "Bash(GIT_SSH_COMMAND=\"ssh -o StrictHostKeyChecking=accept-new -p 2222\" git push:*)", + "Bash(wget:*)", + "Bash(sudo mv:*)", + "Bash(sudo chmod:*)", + "Bash(npm run dev)", + "Bash(npx tsx:*)", + "Bash(cat:*)", + "Bash(npm test)", + "Bash(docker-compose down:*)", + "Bash(gh pr create:*)", + "Bash(git pull:*)", + "Bash(docker-compose ps:*)", + "Bash(npm test:*)", + "Bash(npm run test:*)", + "Bash(npm install:*)", + "Bash(chmod:*)", + "Bash(docker-compose logs:*)", + "Bash(ACCESS_TOKEN=\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbWlkdWFtOW4wMDAwaXE1ZHZ4NDkwbGdkIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwicm9sZSI6IlVTRVIiLCJpYXQiOjE3NjQwMzEyNTMsImV4cCI6MTc2NDAzMjE1M30.-meSwJ-PfWX1OEpgnOgLCnz94hMtz_iUgemFDCFdl34\" curl -s http://localhost:3001/api/auth/me -H \"Authorization: Bearer $ACCESS_TOKEN\")", + "Bash(/tmp/login.json)", + "Bash(/tmp/token.txt)", + "Bash(TOKEN=\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbWlkdWFtOW4wMDAwaXE1ZHZ4NDkwbGdkIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwicm9sZSI6IlVTRVIiLCJpYXQiOjE3NjQwNDU2NjUsImV4cCI6MTc2NDA0NjU2NX0.oeV_sunfHAUNfwbm1V_b1rlTPeK_NtfxiBeVo6kIH6M\")", + "Bash(docker ps:*)" + ], + "deny": [], + "ask": [] + } +} diff --git a/.env.deploy.example b/.env.deploy.example new file mode 100644 index 0000000..a0c284a --- /dev/null +++ b/.env.deploy.example @@ -0,0 +1,11 @@ +# Deployment Configuration for Basil +# Copy this file to .env.deploy and fill in your values + +# Docker Registry Configuration +DOCKER_USERNAME=your-docker-username +DOCKER_REGISTRY=docker.io +IMAGE_TAG=latest + +# Webhook Configuration (if using webhook deployment) +WEBHOOK_PORT=9000 +WEBHOOK_SECRET=changeme-to-random-secret diff --git a/.gitea/workflows/ci-cd.yml b/.gitea/workflows/ci-cd.yml new file mode 100644 index 0000000..32dd0c8 --- /dev/null +++ b/.gitea/workflows/ci-cd.yml @@ -0,0 +1,195 @@ +name: CI/CD Pipeline + +on: + push: + branches: + - main + - develop + pull_request: + branches: + - main + - develop + +env: + DOCKER_REGISTRY: docker.io + IMAGE_NAME: basil + +jobs: + test: + name: Run Tests + runs-on: ubuntu-latest + + services: + postgres: + image: postgres:16-alpine + env: + POSTGRES_USER: basil + POSTGRES_PASSWORD: basil + POSTGRES_DB: basil_test + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + + - name: Install dependencies + run: npm install + + - name: Build shared package + run: | + cd packages/shared + npm run build + + - name: Generate Prisma Client + run: | + cd packages/api + npm run prisma:generate + + - name: Run database migrations + run: | + cd packages/api + npm run prisma:migrate + env: + DATABASE_URL: postgresql://basil:basil@localhost:5432/basil_test?schema=public + + - name: Run unit tests - API + run: | + cd packages/api + npm run test + env: + DATABASE_URL: postgresql://basil:basil@localhost:5432/basil_test?schema=public + NODE_ENV: test + + - name: Run unit tests - Web + run: | + cd packages/web + npm run test + + - name: Run unit tests - Shared + run: | + cd packages/shared + npm run test + + - name: Install Playwright browsers + run: npx playwright install --with-deps + + - name: Build application for E2E tests + run: npm run build + + - name: Run E2E tests + run: npm run test:e2e + env: + DATABASE_URL: postgresql://basil:basil@localhost:5432/basil_test?schema=public + + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: test-results + path: | + packages/*/coverage/ + playwright-report/ + retention-days: 30 + + build-and-push: + name: Build and Push Docker Images + runs-on: ubuntu-latest + needs: test + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to Docker Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.DOCKER_REGISTRY }} + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Extract metadata for API + id: meta-api + uses: docker/metadata-action@v5 + with: + images: ${{ env.DOCKER_REGISTRY }}/${{ secrets.DOCKER_USERNAME }}/${{ env.IMAGE_NAME }}-api + tags: | + type=sha,prefix={{branch}}- + type=raw,value=latest,enable={{is_default_branch}} + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + + - name: Extract metadata for Web + id: meta-web + uses: docker/metadata-action@v5 + with: + images: ${{ env.DOCKER_REGISTRY }}/${{ secrets.DOCKER_USERNAME }}/${{ env.IMAGE_NAME }}-web + tags: | + type=sha,prefix={{branch}}- + type=raw,value=latest,enable={{is_default_branch}} + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + + - name: Build and push API image + uses: docker/build-push-action@v5 + with: + context: . + file: packages/api/Dockerfile + push: true + tags: ${{ steps.meta-api.outputs.tags }} + labels: ${{ steps.meta-api.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Build and push Web image + uses: docker/build-push-action@v5 + with: + context: . + file: packages/web/Dockerfile + push: true + tags: ${{ steps.meta-web.outputs.tags }} + labels: ${{ steps.meta-web.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Trigger deployment webhook + if: success() + run: | + curl -X POST ${{ secrets.DEPLOY_WEBHOOK_URL }} \ + -H "Content-Type: application/json" \ + -d '{"branch": "main", "commit": "${{ github.sha }}", "message": "${{ github.event.head_commit.message }}"}' + + lint: + name: Code Quality + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + + - name: Install dependencies + run: npm install + + - name: Run linter + run: npm run lint diff --git a/.gitignore b/.gitignore index 4dc06ea..0cc06ff 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ dist/ .env.development.local .env.test.local .env.production.local +.env.deploy # Logs logs/ @@ -28,6 +29,9 @@ logs/ npm-debug.log* yarn-debug.log* yarn-error.log* +deploy.log +webhook.log +webhook-error.log # OS .DS_Store @@ -50,6 +54,9 @@ uploads/ public/uploads/ test-uploads/ +# Backups +backups/ + # Docker .docker/ docker-compose.override.yml diff --git a/CLAUDE.md b/CLAUDE.md index 4eeea06..a3692eb 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -47,6 +47,11 @@ npm run build # Lint all packages npm run lint +# Testing +npm test # Run all unit tests +npm run test:e2e # Run E2E tests with Playwright +npm run test:e2e:ui # Run E2E tests with Playwright UI + # Docker commands npm run docker:up # Start all services (PostgreSQL, API, web) npm run docker:down # Stop all services @@ -93,6 +98,7 @@ NODE_ENV=development DATABASE_URL=postgresql://basil:basil@localhost:5432/basil?schema=public STORAGE_TYPE=local # or 's3' LOCAL_STORAGE_PATH=./uploads +BACKUP_PATH=./backups CORS_ORIGIN=http://localhost:5173 ``` @@ -104,6 +110,11 @@ S3_ACCESS_KEY_ID=your-key S3_SECRET_ACCESS_KEY=your-secret ``` +For remote PostgreSQL database, update: +``` +DATABASE_URL=postgresql://username:password@remote-host:5432/basil?schema=public +``` + ## Key Features ### Recipe Import from URL @@ -125,6 +136,13 @@ S3_SECRET_ACCESS_KEY=your-secret - S3 storage: Placeholder for AWS SDK implementation - Easy to extend for other storage providers +### Backup & Restore +- Complete data backup to single ZIP file including database and uploaded files +- Backup service in `packages/api/src/services/backup.service.ts` +- REST API for creating, listing, downloading, and restoring backups +- Automatic backup of all recipes, cookbooks, tags, and relationships +- Configurable backup storage location via `BACKUP_PATH` environment variable + ## Adding New Features ### Adding a New API Endpoint @@ -161,6 +179,22 @@ This starts: Persistent volumes: - `postgres_data` - Database storage - `uploads_data` - Uploaded images +- `backups_data` - Backup files + +### Using a Remote Database + +To use a remote PostgreSQL database instead of the local Docker container: + +1. Set the `DATABASE_URL` environment variable to point to your remote database +2. Update `docker-compose.yml` to pass the environment variable or create a `.env` file in the root +3. Optionally, remove or comment out the `postgres` service and its dependency in `docker-compose.yml` + +Example `.env` file in project root: +``` +DATABASE_URL=postgresql://username:password@remote-host:5432/basil?schema=public +``` + +The docker-compose.yml is configured to use `${DATABASE_URL:-default}` which will use the environment variable if set, or fall back to the local postgres container. ## API Reference @@ -178,6 +212,13 @@ Persistent volumes: - `search` - Search in title/description - `cuisine`, `category` - Filter by cuisine or category +**Backups:** +- `POST /api/backup` - Create a new backup (returns backup metadata) +- `GET /api/backup` - List all available backups +- `GET /api/backup/:filename` - Download a specific backup file +- `POST /api/backup/restore` - Restore from backup (accepts file upload or existing filename) +- `DELETE /api/backup/:filename` - Delete a backup file + ## Important Implementation Details ### Prisma Relations @@ -200,3 +241,32 @@ Persistent volumes: - Root `package.json` defines npm workspaces - Packages can reference each other (e.g., `@basil/shared`) - Must rebuild shared package when types change for other packages to see updates + +## CI/CD and Deployment + +Basil includes a complete CI/CD pipeline with Gitea Actions for automated testing, building, and deployment. + +**Quick Start:** +- See [CI/CD Setup Guide](docs/CI-CD-SETUP.md) for full documentation +- See [Deployment Quick Start](docs/DEPLOYMENT-QUICK-START.md) for quick reference + +**Pipeline Overview:** +1. **Test Stage**: Runs unit tests (Vitest) and E2E tests (Playwright) +2. **Build Stage**: Builds Docker images for API and Web (main branch only) +3. **Deploy Stage**: Pushes images to registry and triggers webhook deployment + +**Deployment Options:** +- **Automatic**: Push to main branch triggers full CI/CD pipeline +- **Manual**: Run `./scripts/manual-deploy.sh` for interactive deployment +- **Webhook**: Systemd service listens for deployment triggers + +**Key Files:** +- `.gitea/workflows/ci-cd.yml` - Main CI/CD workflow +- `scripts/deploy.sh` - Deployment script +- `scripts/webhook-receiver.sh` - Webhook server +- `.env.deploy.example` - Deployment configuration template + +**Required Secrets (Gitea):** +- `DOCKER_USERNAME` - Docker Hub username +- `DOCKER_PASSWORD` - Docker Hub access token +- `DEPLOY_WEBHOOK_URL` - Webhook endpoint for deployments diff --git a/docker-compose.yml b/docker-compose.yml index 84a2b30..a36bc86 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -31,12 +31,15 @@ services: environment: NODE_ENV: production PORT: 3001 - DATABASE_URL: postgresql://basil:basil@postgres:5432/basil?schema=public + # Use DATABASE_URL from .env if set, otherwise default to local postgres + DATABASE_URL: ${DATABASE_URL:-postgresql://basil:basil@postgres:5432/basil?schema=public} STORAGE_TYPE: local LOCAL_STORAGE_PATH: /app/uploads + BACKUP_PATH: /app/backups CORS_ORIGIN: https://basil.pkartchner.com volumes: - uploads_data:/app/uploads + - backups_data:/app/backups networks: - internal - traefik @@ -62,6 +65,7 @@ services: volumes: postgres_data: uploads_data: + backups_data: networks: traefik: diff --git a/docs/BACKUP.md b/docs/BACKUP.md new file mode 100644 index 0000000..5b2042d --- /dev/null +++ b/docs/BACKUP.md @@ -0,0 +1,290 @@ +# Backup & Restore Guide + +This document explains how to use Basil's backup and restore features. + +## Overview + +Basil includes a comprehensive backup system that creates complete snapshots of your recipe data, including: +- All recipes with ingredients, instructions, and metadata +- Recipe images and uploaded files +- Cookbooks and their organization +- Tags and categorization +- All relationships between entities + +Backups are stored as ZIP archives containing: +- `database.json` - Complete database export in JSON format +- `uploads/` - All uploaded images and files + +## Configuration + +### Environment Variables + +Configure the backup location in `packages/api/.env`: + +```bash +BACKUP_PATH=./backups +``` + +In Docker deployments, backups are stored in the `backups_data` volume by default at `/app/backups`. + +## API Endpoints + +### Create Backup + +Creates a new backup of all data and files. + +```bash +POST /api/backup +``` + +**Response:** +```json +{ + "success": true, + "message": "Backup created successfully", + "backup": { + "name": "basil-backup-2025-11-10T12-30-45-123Z.zip", + "path": "/app/backups/basil-backup-2025-11-10T12-30-45-123Z.zip", + "size": 1048576, + "created": "2025-11-10T12:30:45.123Z" + } +} +``` + +### List Backups + +Lists all available backups in the backup directory. + +```bash +GET /api/backup +``` + +**Response:** +```json +{ + "success": true, + "backups": [ + { + "name": "basil-backup-2025-11-10T12-30-45-123Z.zip", + "path": "/app/backups/basil-backup-2025-11-10T12-30-45-123Z.zip", + "size": 1048576, + "created": "2025-11-10T12:30:45.123Z" + } + ] +} +``` + +### Download Backup + +Downloads a specific backup file. + +```bash +GET /api/backup/:filename +``` + +**Example:** +```bash +curl -O http://localhost:3001/api/backup/basil-backup-2025-11-10T12-30-45-123Z.zip +``` + +### Restore Backup + +Restores data from a backup file. **Warning: This will delete all existing data!** + +You can restore in two ways: + +#### 1. Upload a backup file + +```bash +POST /api/backup/restore +Content-Type: multipart/form-data + +backup: +``` + +**Example:** +```bash +curl -X POST \ + -F "backup=@basil-backup-2025-11-10T12-30-45-123Z.zip" \ + http://localhost:3001/api/backup/restore +``` + +#### 2. Restore from existing backup in backup directory + +```bash +POST /api/backup/restore +Content-Type: application/json + +{ + "filename": "basil-backup-2025-11-10T12-30-45-123Z.zip" +} +``` + +**Response:** +```json +{ + "success": true, + "message": "Backup restored successfully", + "metadata": { + "version": "1.0", + "timestamp": "2025-11-10T12:30:45.123Z", + "recipeCount": 42, + "cookbookCount": 3, + "tagCount": 15 + } +} +``` + +### Delete Backup + +Deletes a backup file. + +```bash +DELETE /api/backup/:filename +``` + +## Usage Examples + +### Manual Backup via curl + +```bash +# Create a backup +curl -X POST http://localhost:3001/api/backup + +# List available backups +curl http://localhost:3001/api/backup + +# Download a backup +curl -O http://localhost:3001/api/backup/basil-backup-2025-11-10T12-30-45-123Z.zip + +# Restore from uploaded file +curl -X POST \ + -F "backup=@basil-backup-2025-11-10T12-30-45-123Z.zip" \ + http://localhost:3001/api/backup/restore + +# Restore from existing backup +curl -X POST \ + -H "Content-Type: application/json" \ + -d '{"filename": "basil-backup-2025-11-10T12-30-45-123Z.zip"}' \ + http://localhost:3001/api/backup/restore + +# Delete a backup +curl -X DELETE http://localhost:3001/api/backup/basil-backup-2025-11-10T12-30-45-123Z.zip +``` + +### Automated Backups + +You can set up automated backups using cron: + +```bash +# Add to crontab (daily backup at 2 AM) +0 2 * * * curl -X POST http://localhost:3001/api/backup +``` + +For Docker deployments: + +```bash +# Add to host crontab +0 2 * * * docker exec basil-api curl -X POST http://localhost:3001/api/backup +``` + +## Backup Storage + +### Local Development + +Backups are stored in `packages/api/backups/` by default. + +### Docker Production + +Backups are stored in the `backups_data` Docker volume, which persists across container restarts. + +To access backups from the host: + +```bash +# Copy backup from container to host +docker cp basil-api:/app/backups/basil-backup-2025-11-10T12-30-45-123Z.zip ./ + +# List backups in container +docker exec basil-api ls -lh /app/backups/ +``` + +### External Storage + +For additional safety, you should copy backups to external storage: + +```bash +# Example: Copy to external drive +docker cp basil-api:/app/backups/ /mnt/external-backup/basil/ + +# Example: Upload to S3 +aws s3 sync /path/to/backups/ s3://my-bucket/basil-backups/ + +# Example: Upload to rsync server +rsync -avz /path/to/backups/ user@backup-server:/backups/basil/ +``` + +## Best Practices + +1. **Regular Backups**: Schedule automatic backups daily or weekly +2. **External Storage**: Copy backups to external storage regularly +3. **Test Restores**: Periodically test backup restoration to ensure backups are valid +4. **Backup Before Updates**: Always create a backup before updating Basil or making major changes +5. **Retention Policy**: Keep multiple backup versions (e.g., daily for 7 days, weekly for 4 weeks, monthly for 12 months) + +## Troubleshooting + +### Backup Creation Fails + +**Error: Out of disk space** +- Check available disk space: `df -h` +- Clean up old backups: `DELETE /api/backup/:filename` +- Increase Docker volume size if using Docker + +**Error: Permission denied** +- Ensure the API has write permissions to the backup directory +- In Docker: Check volume permissions + +### Restore Fails + +**Error: Invalid backup file** +- Ensure the backup file is not corrupted +- Try downloading the backup again +- Verify the backup was created with a compatible version + +**Error: Database connection lost** +- Ensure the database is running and accessible +- Check `DATABASE_URL` environment variable +- Verify network connectivity to remote database if applicable + +### Large Backups + +If you have many recipes with large images: +- Backups may take several minutes to create +- Increase request timeout if using a reverse proxy +- Consider using external storage (S3) for images to reduce backup size + +## Security Considerations + +1. **Access Control**: Backup endpoints are not authenticated by default. Consider adding authentication middleware in production. +2. **Sensitive Data**: Backups contain all recipe data. Store backup files securely. +3. **Download URLs**: Backup download endpoints validate file paths to prevent directory traversal attacks. +4. **File Size Limits**: Restore endpoint limits upload size to 1GB by default. + +## Migration Between Environments + +Backups can be used to migrate data between environments: + +```bash +# 1. Create backup on source environment +curl -X POST http://source-server:3001/api/backup + +# 2. Download backup +curl -O http://source-server:3001/api/backup/basil-backup-2025-11-10T12-30-45-123Z.zip + +# 3. Upload to target environment +curl -X POST \ + -F "backup=@basil-backup-2025-11-10T12-30-45-123Z.zip" \ + http://target-server:3001/api/backup/restore +``` + +**Note:** When migrating, ensure both environments use compatible versions of Basil. diff --git a/docs/CI-CD-SETUP.md b/docs/CI-CD-SETUP.md new file mode 100644 index 0000000..5f5e6bb --- /dev/null +++ b/docs/CI-CD-SETUP.md @@ -0,0 +1,503 @@ +# CI/CD Setup Guide for Basil + +This document describes the complete CI/CD pipeline for the Basil recipe manager, including Gitea Actions workflows, Docker image building, and automated deployments. + +## Table of Contents + +1. [Overview](#overview) +2. [Prerequisites](#prerequisites) +3. [Gitea Actions Workflow](#gitea-actions-workflow) +4. [Docker Registry Setup](#docker-registry-setup) +5. [Deployment Methods](#deployment-methods) +6. [Configuration](#configuration) +7. [Troubleshooting](#troubleshooting) + +## Overview + +The CI/CD pipeline consists of three main stages: + +1. **Testing**: Runs unit tests (Vitest) and E2E tests (Playwright) +2. **Build & Push**: Builds Docker images and pushes to registry (on main branch only) +3. **Deploy**: Pulls new images and restarts containers on the production server + +``` +┌─────────────┐ ┌──────────────┐ ┌────────────────┐ +│ Git Push │────▶│ Gitea Actions│────▶│ Docker Registry│ +│ (main) │ │ - Test │ │ - API image │ +└─────────────┘ │ - Build │ │ - Web image │ + │ - Push │ └────────────────┘ + └──────────────┘ │ + │ │ + ▼ ▼ + ┌──────────────┐ ┌────────────────┐ + │ Webhook │────▶│ Production │ + │ Trigger │ │ Server │ + └──────────────┘ └────────────────┘ +``` + +## Prerequisites + +### For CI/CD (Gitea) + +- Gitea instance with Actions enabled +- Docker Hub account (or other registry) +- Node.js 20+ for testing + +### For Deployment Server + +- Docker and Docker Compose installed +- Bash shell +- `webhook` package (for automatic deployments) +- Network access to pull from Docker registry + +## Gitea Actions Workflow + +The workflow is defined in `.gitea/workflows/ci-cd.yml` and runs on: + +- Push to `main` or `develop` branches +- Pull requests targeting `main` or `develop` + +### Jobs + +#### 1. Test Job + +Runs all tests with a PostgreSQL service container: + +- **Unit Tests**: API, Web, and Shared packages using Vitest +- **E2E Tests**: Full application tests using Playwright +- **Database**: Temporary PostgreSQL instance for testing + +**Test Commands:** +```bash +# Run all tests locally +npm test + +# Run E2E tests +npm run test:e2e + +# Run with coverage +npm run test:coverage +``` + +#### 2. Lint Job + +Runs ESLint on all packages to ensure code quality: + +```bash +npm run lint +``` + +#### 3. Build and Push Job + +Only runs on push to `main` branch: + +1. Builds Docker images for API and Web +2. Tags with multiple tags (latest, SHA, semver) +3. Pushes to Docker registry +4. Triggers deployment webhook + +**Image Names:** +- API: `{registry}/{username}/basil-api:{tag}` +- Web: `{registry}/{username}/basil-web:{tag}` + +## Docker Registry Setup + +### 1. Create Docker Hub Account + +If using Docker Hub: +1. Sign up at https://hub.docker.com +2. Create an access token in Account Settings → Security + +### 2. Configure Gitea Secrets + +Add the following secrets to your Gitea repository: + +**Settings → Secrets → Actions** + +| Secret Name | Description | Example | +|-------------|-------------|---------| +| `DOCKER_USERNAME` | Docker Hub username | `myusername` | +| `DOCKER_PASSWORD` | Docker Hub access token | `dckr_pat_xxxxx...` | +| `DEPLOY_WEBHOOK_URL` | Webhook endpoint URL | `http://server.com:9000/hooks/basil-deploy` | + +### 3. Alternative Registries + +To use a different registry (e.g., GitHub Container Registry, GitLab): + +1. Update `DOCKER_REGISTRY` in `.gitea/workflows/ci-cd.yml`: + ```yaml + env: + DOCKER_REGISTRY: ghcr.io # or registry.gitlab.com + ``` + +2. Update login credentials accordingly + +## Deployment Methods + +### Method 1: Automatic Webhook Deployment (Recommended) + +Uses a webhook server to automatically deploy when images are pushed. + +#### Setup Steps + +1. **Copy environment template:** + ```bash + cp .env.deploy.example .env.deploy + ``` + +2. **Edit `.env.deploy`:** + ```bash + DOCKER_USERNAME=your-docker-username + DOCKER_REGISTRY=docker.io + IMAGE_TAG=latest + WEBHOOK_PORT=9000 + WEBHOOK_SECRET=your-random-secret-here + ``` + +3. **Install webhook package:** + ```bash + # Ubuntu/Debian + sudo apt-get install webhook + + # RHEL/CentOS + sudo yum install webhook + ``` + +4. **Install systemd service:** + ```bash + # Copy service file + sudo cp scripts/basil-webhook.service /etc/systemd/system/ + + # Edit service file with your settings + sudo nano /etc/systemd/system/basil-webhook.service + + # Enable and start service + sudo systemctl enable basil-webhook + sudo systemctl start basil-webhook + + # Check status + sudo systemctl status basil-webhook + ``` + +5. **Configure firewall (if needed):** + ```bash + sudo ufw allow 9000/tcp + ``` + +6. **Add webhook URL to Gitea secrets:** + ``` + DEPLOY_WEBHOOK_URL=http://your-server.com:9000/hooks/basil-deploy + ``` + + Add this header when calling the webhook: + ``` + X-Webhook-Secret: your-random-secret-here + ``` + +#### Manual Webhook Trigger + +Test webhook manually: +```bash +curl -X POST http://localhost:9000/hooks/basil-deploy \ + -H "Content-Type: application/json" \ + -H "X-Webhook-Secret: your-secret" \ + -d '{"branch": "main", "commit": "abc123"}' +``` + +### Method 2: Manual Deployment + +For manual deployments without webhooks: + +```bash +# Interactive deployment +./scripts/manual-deploy.sh + +# Or with environment variables +DOCKER_USERNAME=myuser \ +DOCKER_REGISTRY=docker.io \ +IMAGE_TAG=latest \ +./scripts/deploy.sh +``` + +The deployment script will: +1. Check Docker is running +2. Create a pre-deployment backup +3. Pull latest images from registry +4. Update docker-compose configuration +5. Restart containers +6. Perform health checks +7. Clean up old images + +### Method 3: Cron-based Deployment + +Set up a cron job for scheduled deployments: + +```bash +# Edit crontab +crontab -e + +# Add line to deploy every night at 2 AM +0 2 * * * cd /srv/docker-compose/basil && DOCKER_USERNAME=myuser ./scripts/deploy.sh >> /var/log/basil-deploy.log 2>&1 +``` + +## Configuration + +### Environment Variables + +**For Deployment Scripts:** + +| Variable | Required | Default | Description | +|----------|----------|---------|-------------| +| `DOCKER_USERNAME` | Yes | - | Docker registry username | +| `DOCKER_REGISTRY` | No | `docker.io` | Docker registry URL | +| `IMAGE_TAG` | No | `latest` | Image tag to pull | +| `WEBHOOK_PORT` | No | `9000` | Port for webhook server | +| `WEBHOOK_SECRET` | No | `changeme` | Secret for webhook authentication | + +**For Application:** + +See `packages/api/.env.example` for application configuration. + +### Docker Compose Override + +The deployment script automatically creates `docker-compose.override.yml` to use registry images instead of building from source: + +```yaml +services: + api: + image: docker.io/username/basil-api:latest + web: + image: docker.io/username/basil-web:latest +``` + +This file is in `.gitignore` and is regenerated on each deployment. + +## Monitoring and Logs + +### View Deployment Logs + +```bash +# Deployment log +tail -f deploy.log + +# Webhook log +tail -f webhook.log + +# Container logs +docker-compose logs -f api +docker-compose logs -f web +``` + +### Check Deployment Status + +```bash +# Check running containers +docker-compose ps + +# Check API health +curl http://localhost:3001/health + +# View recent deployments +grep "Deployment completed" deploy.log +``` + +### Systemd Service Logs + +```bash +# View webhook service logs +sudo journalctl -u basil-webhook -f + +# View recent errors +sudo journalctl -u basil-webhook --since "1 hour ago" -p err +``` + +## Backup and Rollback + +### Automatic Backups + +The deployment script automatically creates a backup before deploying: + +```bash +backups/pre-deploy-YYYYMMDD-HHMMSS.zip +``` + +### Manual Backup + +```bash +# Via API +curl -X POST http://localhost:3001/api/backup \ + -o backup-$(date +%Y%m%d).zip + +# Via Docker +docker exec basil-api npm run backup +``` + +### Rollback to Previous Version + +```bash +# Pull specific tag +DOCKER_USERNAME=myuser IMAGE_TAG=main-abc123 ./scripts/deploy.sh + +# Or restore from backup +curl -X POST http://localhost:3001/api/backup/restore \ + -F "file=@backups/pre-deploy-20250101-020000.zip" +``` + +## Troubleshooting + +### Tests Failing in CI + +**Check test logs in Gitea:** +1. Go to Actions tab in repository +2. Click on failed workflow run +3. Expand failed job to see detailed logs + +**Common issues:** +- Database connection: Ensure PostgreSQL service is healthy +- Missing dependencies: Check `npm install` step +- Environment variables: Verify test environment configuration + +### Images Not Pushing + +**Check Docker credentials:** +```bash +# Test Docker login +docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD + +# Verify secrets in Gitea +# Settings → Secrets → Actions +``` + +**Check registry permissions:** +- Ensure token has write permissions +- Verify repository exists on Docker Hub + +### Webhook Not Triggering + +**Check webhook service:** +```bash +# Service status +sudo systemctl status basil-webhook + +# Check if port is listening +sudo netstat -tlnp | grep 9000 + +# Test webhook endpoint +curl -I http://localhost:9000/hooks/basil-deploy +``` + +**Check firewall:** +```bash +# Ubuntu/Debian +sudo ufw status + +# RHEL/CentOS +sudo firewall-cmd --list-all +``` + +**Verify secret header:** +```bash +# Wrong secret returns 403 +curl -X POST http://localhost:9000/hooks/basil-deploy +# Should return 403 Forbidden + +# Correct secret triggers deployment +curl -X POST http://localhost:9000/hooks/basil-deploy \ + -H "X-Webhook-Secret: your-secret" +# Should return "Deployment triggered successfully" +``` + +### Deployment Fails + +**Check Docker:** +```bash +# Docker running? +docker info + +# Disk space? +df -h + +# View deployment log +tail -100 deploy.log +``` + +**Check images:** +```bash +# Can we pull images? +docker pull $DOCKER_REGISTRY/$DOCKER_USERNAME/basil-api:latest + +# Check image tags +docker images | grep basil +``` + +**Health check failures:** +```bash +# Check API logs +docker-compose logs api + +# Check database connection +docker-compose exec api npx prisma studio + +# Test API manually +curl http://localhost:3001/health +``` + +### Container Won't Start + +**Check logs:** +```bash +docker-compose logs api +docker-compose logs web +``` + +**Common issues:** +- Database migrations: Check Prisma migration logs +- Environment variables: Verify `.env` files +- Port conflicts: Check if ports 3001/5173 are available +- Volume permissions: Check uploads/backups directory permissions + +### Rollback Failed + +**Manual rollback:** +```bash +# Stop containers +docker-compose down + +# Remove override file +rm docker-compose.override.yml + +# Restore from backup +unzip backups/pre-deploy-YYYYMMDD-HHMMSS.zip -d restore-temp/ + +# Manually restore database and files +# (See backup documentation) + +# Start containers +docker-compose up -d +``` + +## Security Considerations + +1. **Webhook Secret**: Use a strong, random secret (32+ characters) +2. **Firewall**: Restrict webhook port to known IPs if possible +3. **HTTPS**: Use HTTPS for webhook endpoint in production +4. **Secrets**: Never commit secrets to git +5. **Backups**: Store backups securely with encryption +6. **Docker Registry**: Use private registries for sensitive applications + +## Best Practices + +1. **Test Locally**: Always test changes locally before pushing +2. **Review PRs**: Use pull requests for code review +3. **Monitor Logs**: Regularly check deployment and application logs +4. **Backup First**: Always backup before major deployments +5. **Tag Releases**: Use semantic versioning for releases +6. **Health Checks**: Monitor application health after deployment +7. **Rollback Plan**: Know how to rollback quickly if needed + +## Additional Resources + +- [Gitea Actions Documentation](https://docs.gitea.io/en-us/actions/) +- [Docker Documentation](https://docs.docker.com/) +- [Webhook Documentation](https://github.com/adnanh/webhook) +- [Basil Project Documentation](../CLAUDE.md) diff --git a/docs/DEPLOYMENT-QUICK-START.md b/docs/DEPLOYMENT-QUICK-START.md new file mode 100644 index 0000000..84209b3 --- /dev/null +++ b/docs/DEPLOYMENT-QUICK-START.md @@ -0,0 +1,200 @@ +# Basil Deployment Quick Start + +Quick reference for deploying Basil with CI/CD. + +## Initial Setup (One-time) + +### 1. Gitea Secrets Configuration + +Add these secrets in Gitea → Settings → Secrets → Actions: + +``` +DOCKER_USERNAME=your-dockerhub-username +DOCKER_PASSWORD=dckr_pat_xxxxxxxxxxxxx +DEPLOY_WEBHOOK_URL=http://your-server.com:9000/hooks/basil-deploy +``` + +### 2. Server Setup + +```bash +# 1. Clone repository +cd /srv/docker-compose +git clone https://your-gitea.com/user/basil.git +cd basil + +# 2. Create deployment configuration +cp .env.deploy.example .env.deploy +nano .env.deploy # Edit with your values + +# 3. Install webhook (Ubuntu/Debian) +sudo apt-get install webhook + +# 4. Install systemd service +sudo cp scripts/basil-webhook.service /etc/systemd/system/ +sudo nano /etc/systemd/system/basil-webhook.service # Edit paths and env vars +sudo systemctl enable basil-webhook +sudo systemctl start basil-webhook + +# 5. Configure firewall +sudo ufw allow 9000/tcp +``` + +## Daily Usage + +### Automatic Deployment (Recommended) + +Just push to main branch: + +```bash +git add . +git commit -m "feat: add new feature" +git push origin main +``` + +The CI/CD pipeline will: +1. ✓ Run all tests +2. ✓ Build Docker images +3. ✓ Push to registry +4. ✓ Trigger webhook +5. ✓ Deploy automatically + +### Manual Deployment + +```bash +cd /srv/docker-compose/basil +./scripts/manual-deploy.sh +``` + +## Quick Commands + +```bash +# View deployment logs +tail -f deploy.log + +# View container logs +docker-compose logs -f api +docker-compose logs -f web + +# Check deployment status +docker-compose ps + +# Restart services +docker-compose restart + +# Pull latest code and rebuild (without registry) +git pull +docker-compose up -d --build + +# Create manual backup +curl -X POST http://localhost:3001/api/backup -o backup.zip + +# Check webhook status +sudo systemctl status basil-webhook + +# Test webhook manually +curl -X POST http://localhost:9000/hooks/basil-deploy \ + -H "X-Webhook-Secret: your-secret" \ + -H "Content-Type: application/json" \ + -d '{"branch": "main"}' +``` + +## Rollback + +```bash +# Deploy specific version +DOCKER_USERNAME=myuser IMAGE_TAG=main-abc123 ./scripts/deploy.sh + +# Or restore from backup +cd backups +ls -lt # Find backup file +curl -X POST http://localhost:3001/api/backup/restore \ + -F "file=@pre-deploy-20250101-020000.zip" +``` + +## Troubleshooting One-Liners + +```bash +# Deployment failed? Check logs +tail -50 deploy.log + +# Webhook not working? Check service +sudo journalctl -u basil-webhook -n 50 + +# Containers not starting? Check Docker logs +docker-compose logs --tail=50 + +# Out of disk space? +docker system prune -a +df -h + +# Database issues? +docker-compose exec api npx prisma studio + +# Pull images manually +docker pull docker.io/$DOCKER_USERNAME/basil-api:latest +docker pull docker.io/$DOCKER_USERNAME/basil-web:latest +``` + +## Workflow Diagram + +``` +┌──────────────┐ +│ Developer │ +│ git push │ +└──────┬───────┘ + │ + ▼ +┌──────────────────────────────┐ +│ Gitea Actions │ +│ 1. Run tests (unit + E2E) │ +│ 2. Build Docker images │ +│ 3. Push to Docker Hub │ +│ 4. Call webhook │ +└──────┬───────────────────────┘ + │ + ▼ +┌──────────────────────────────┐ +│ Production Server │ +│ 1. Webhook receives call │ +│ 2. Run deploy.sh script │ +│ - Create backup │ +│ - Pull new images │ +│ - Restart containers │ +│ - Health check │ +└──────────────────────────────┘ +``` + +## File Locations + +``` +/srv/docker-compose/basil/ +├── .gitea/workflows/ci-cd.yml # CI/CD workflow +├── scripts/ +│ ├── deploy.sh # Main deployment script +│ ├── manual-deploy.sh # Interactive deployment +│ ├── webhook-receiver.sh # Webhook server +│ └── basil-webhook.service # Systemd service file +├── .env.deploy # Deployment config (gitignored) +├── deploy.log # Deployment logs +├── webhook.log # Webhook logs +└── backups/ # Automatic backups + └── pre-deploy-*.zip +``` + +## Environment Variables + +**Required:** +- `DOCKER_USERNAME` - Your Docker Hub username + +**Optional:** +- `DOCKER_REGISTRY` - Default: `docker.io` +- `IMAGE_TAG` - Default: `latest` +- `WEBHOOK_PORT` - Default: `9000` +- `WEBHOOK_SECRET` - Default: `changeme` (change this!) + +## Support + +For detailed documentation, see: +- [Full CI/CD Setup Guide](./CI-CD-SETUP.md) +- [Project Documentation](../CLAUDE.md) +- [Gitea Issues](https://your-gitea.com/user/basil/issues) diff --git a/docs/REMOTE_DATABASE.md b/docs/REMOTE_DATABASE.md new file mode 100644 index 0000000..829999c --- /dev/null +++ b/docs/REMOTE_DATABASE.md @@ -0,0 +1,388 @@ +# Remote Database Configuration + +This guide explains how to configure Basil to use a remote PostgreSQL database instead of the local Docker container. + +## Overview + +By default, Basil uses a local PostgreSQL database running in Docker. However, you can configure it to use a remote database service such as: +- AWS RDS (Relational Database Service) +- Google Cloud SQL +- Azure Database for PostgreSQL +- Digital Ocean Managed Databases +- Heroku Postgres +- Self-hosted PostgreSQL server + +## Configuration + +### 1. Update Environment Variables + +Edit `packages/api/.env` to point to your remote database: + +```bash +DATABASE_URL="postgresql://username:password@remote-host:5432/basil?schema=public" +``` + +### Connection String Format + +``` +postgresql://[username]:[password]@[host]:[port]/[database]?[options] +``` + +**Components:** +- `username` - Database user +- `password` - Database password +- `host` - Database hostname or IP +- `port` - Database port (default: 5432) +- `database` - Database name (usually "basil") +- `options` - Additional connection options (e.g., `sslmode=require`) + +### 2. Provider-Specific Examples + +#### AWS RDS + +```bash +DATABASE_URL="postgresql://basiladmin:yourpassword@basil-db.abc123.us-east-1.rds.amazonaws.com:5432/basil?schema=public" +``` + +Additional SSL configuration may be required: +```bash +DATABASE_URL="postgresql://basiladmin:yourpassword@basil-db.abc123.us-east-1.rds.amazonaws.com:5432/basil?schema=public&sslmode=require" +``` + +#### Google Cloud SQL + +```bash +DATABASE_URL="postgresql://postgres:yourpassword@127.0.0.1:5432/basil?host=/cloudsql/project:region:instance" +``` + +Or with public IP: +```bash +DATABASE_URL="postgresql://postgres:yourpassword@35.123.45.67:5432/basil?schema=public&sslmode=require" +``` + +#### Azure Database for PostgreSQL + +```bash +DATABASE_URL="postgresql://basiladmin@servername:yourpassword@servername.postgres.database.azure.com:5432/basil?schema=public&sslmode=require" +``` + +#### Digital Ocean Managed Database + +```bash +DATABASE_URL="postgresql://doadmin:yourpassword@basil-db-do-user-123456-0.b.db.ondigitalocean.com:25060/basil?sslmode=require" +``` + +#### Heroku Postgres + +Heroku provides a `DATABASE_URL` automatically: +```bash +DATABASE_URL="postgres://user:password@ec2-123-45-67-89.compute-1.amazonaws.com:5432/d1234abcd5678ef" +``` + +Note: Heroku uses `postgres://` instead of `postgresql://`, but both work with Prisma. + +#### Self-Hosted PostgreSQL + +```bash +DATABASE_URL="postgresql://basil:password@192.168.1.100:5432/basil?schema=public" +``` + +For SSL connections: +```bash +DATABASE_URL="postgresql://basil:password@postgres.example.com:5432/basil?schema=public&sslmode=require" +``` + +### 3. Docker Configuration + +When using Docker with a remote database, you need to update the configuration: + +#### Option A: Use Environment Variable (Recommended) + +Create a `.env` file in the project root: + +```bash +DATABASE_URL=postgresql://username:password@remote-host:5432/basil?schema=public +``` + +The `docker-compose.yml` is already configured to use this: +```yaml +environment: + DATABASE_URL: ${DATABASE_URL:-postgresql://basil:basil@postgres:5432/basil?schema=public} +``` + +#### Option B: Edit docker-compose.yml Directly + +Edit the `api` service in `docker-compose.yml`: + +```yaml +api: + environment: + DATABASE_URL: postgresql://username:password@remote-host:5432/basil?schema=public +``` + +#### Option C: Disable Local PostgreSQL + +If using only a remote database, you can disable the local postgres service: + +1. Comment out or remove the `postgres` service in `docker-compose.yml` +2. Remove the `depends_on` condition from the `api` service +3. Remove the unused `postgres_data` volume + +```yaml +services: + # postgres: + # image: postgres:16-alpine + # ... (commented out) + + api: + # Remove depends_on if not using local postgres + # depends_on: + # postgres: + # condition: service_healthy +``` + +### 4. Initialize Remote Database + +Before first use, you need to initialize the database schema: + +```bash +# From your development machine +cd packages/api +npm run prisma:migrate + +# Or from Docker +docker exec basil-api npx prisma migrate deploy +``` + +This will create all necessary tables and relationships. + +## SSL/TLS Configuration + +### Enabling SSL + +Most managed database services require SSL connections. Add `sslmode` to your connection string: + +```bash +# Require SSL but don't verify certificate +DATABASE_URL="postgresql://user:password@host:5432/basil?sslmode=require" + +# Require SSL and verify certificate +DATABASE_URL="postgresql://user:password@host:5432/basil?sslmode=verify-full" + +# Disable SSL (only for development/testing) +DATABASE_URL="postgresql://user:password@host:5432/basil?sslmode=disable" +``` + +### SSL Modes + +- `disable` - No SSL (not recommended for production) +- `allow` - Try SSL, fall back to non-SSL +- `prefer` - Try SSL first (default) +- `require` - Require SSL, don't verify certificate +- `verify-ca` - Require SSL, verify certificate authority +- `verify-full` - Require SSL, verify certificate and hostname + +### Custom SSL Certificates + +For custom CA certificates, you may need to configure additional options: + +```bash +DATABASE_URL="postgresql://user:password@host:5432/basil?sslmode=require&sslcert=/path/to/client-cert.pem&sslkey=/path/to/client-key.pem&sslrootcert=/path/to/ca-cert.pem" +``` + +## Connection Pooling + +For production deployments with high traffic, consider using connection pooling. + +### PgBouncer + +Example configuration with PgBouncer: + +```bash +DATABASE_URL="postgresql://user:password@pgbouncer-host:6432/basil?schema=public" +``` + +### Prisma Data Platform (Prisma Accelerate) + +For advanced connection pooling and caching: + +```bash +DATABASE_URL="prisma://accelerate.prisma-data.net/?api_key=your-api-key" +``` + +## Security Best Practices + +1. **Use Environment Variables**: Never commit credentials to git +2. **Strong Passwords**: Use generated passwords with high entropy +3. **Restrict Access**: Configure database firewall rules to only allow your application servers +4. **SSL/TLS**: Always use SSL in production +5. **Read-Only Users**: Consider using read-only database users for analytics +6. **Regular Backups**: Configure automated backups on your database service +7. **Rotate Credentials**: Periodically rotate database passwords + +## Performance Considerations + +### Network Latency + +Remote databases add network latency. Consider: +- Choose a database region close to your application servers +- Use connection pooling to reduce connection overhead +- Enable query optimization in Prisma + +### Connection Limits + +Managed databases often have connection limits: +- Check your plan's connection limit +- Configure appropriate connection pool size +- Use PgBouncer for connection multiplexing + +### Prisma Configuration + +Optimize Prisma connection settings in `packages/api/prisma/schema.prisma`: + +```prisma +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + + // Optional: Configure connection pool + // relationMode = "prisma" +} +``` + +## Monitoring + +### Connection Status + +Check database connectivity: + +```bash +# Health check endpoint +curl http://localhost:3001/health + +# Test database connection with Prisma +docker exec basil-api npx prisma db push --preview-feature +``` + +### Query Performance + +Monitor slow queries: +- Enable PostgreSQL query logging +- Use database monitoring tools (CloudWatch, Datadog, etc.) +- Analyze with `EXPLAIN ANALYZE` for slow queries + +## Troubleshooting + +### Connection Refused + +**Error: `Connection refused`** + +Possible causes: +- Incorrect host or port +- Database server not running +- Firewall blocking connection +- Security group not allowing your IP + +Solutions: +- Verify database hostname and port +- Check database server status +- Update firewall rules to allow your application's IP +- Verify security group rules (AWS, Azure, GCP) + +### Authentication Failed + +**Error: `Password authentication failed`** + +Solutions: +- Double-check username and password +- Ensure special characters in password are URL-encoded +- Verify user has permission to access the database +- Check if IP allowlist includes your server + +### SSL Required + +**Error: `SSL required` or `no pg_hba.conf entry`** + +Solutions: +- Add `sslmode=require` to connection string +- Configure database to accept non-SSL connections (dev only) +- Install required SSL certificates + +### Connection Timeout + +**Error: `Connection timeout`** + +Solutions: +- Verify network connectivity +- Check if database server is accepting connections +- Increase timeout in Prisma configuration +- Verify DNS resolution of database hostname + +### Too Many Connections + +**Error: `Too many connections`** + +Solutions: +- Reduce connection pool size +- Use PgBouncer for connection pooling +- Upgrade database plan for more connections +- Check for connection leaks in application code + +## Migration from Local to Remote + +To migrate from local Docker database to remote: + +1. **Create a backup of local data:** + ```bash + curl -X POST http://localhost:3001/api/backup + ``` + +2. **Initialize remote database:** + ```bash + # Update DATABASE_URL to point to remote + cd packages/api + npm run prisma:migrate deploy + ``` + +3. **Restore backup to remote database:** + ```bash + curl -X POST \ + -F "backup=@basil-backup-2025-11-10.zip" \ + http://localhost:3001/api/backup/restore + ``` + +4. **Verify data migration:** + - Check recipe count: `curl http://localhost:3001/api/recipes` + - Test recipe access and functionality + +5. **Update production configuration:** + - Update environment variables in production + - Restart application services + +## Cost Optimization + +### Managed Database Pricing + +Compare costs across providers: +- **AWS RDS**: Pay per instance hour + storage +- **Google Cloud SQL**: Pay per vCPU hour + storage +- **Azure Database**: Pay per vCore + storage +- **Digital Ocean**: Fixed monthly pricing by plan +- **Heroku**: Free tier available, then fixed monthly pricing + +### Tips to Reduce Costs + +1. **Right-size your instance**: Start small, scale as needed +2. **Use reserved instances**: AWS/Azure offer discounts for 1-3 year commitments +3. **Enable auto-scaling**: Scale down during low traffic periods +4. **Optimize storage**: Use standard storage instead of provisioned IOPS if possible +5. **Regular backups**: Use built-in backup services (cheaper than manual snapshots) + +## Support + +For database-specific configuration issues, consult: +- [AWS RDS Documentation](https://docs.aws.amazon.com/rds/) +- [Google Cloud SQL Documentation](https://cloud.google.com/sql/docs) +- [Azure Database Documentation](https://docs.microsoft.com/en-us/azure/postgresql/) +- [Digital Ocean Documentation](https://docs.digitalocean.com/products/databases/) +- [Prisma Database Connectors](https://www.prisma.io/docs/concepts/database-connectors/postgresql) diff --git a/packages/api/src/routes/backup.routes.ts b/packages/api/src/routes/backup.routes.ts new file mode 100644 index 0000000..3614bd5 --- /dev/null +++ b/packages/api/src/routes/backup.routes.ts @@ -0,0 +1,258 @@ +import express, { Request, Response } from 'express'; +import path from 'path'; +import fs from 'fs/promises'; +import { createBackup, restoreBackup, listBackups, deleteBackup } from '../services/backup.service'; +import multer from 'multer'; + +const router = express.Router(); + +// Configure multer for backup file uploads +const upload = multer({ + dest: '/tmp/basil-restore/', + limits: { + fileSize: 1024 * 1024 * 1024, // 1GB max + }, +}); + +// Get backup directory from env or use default +const getBackupDir = (): string => { + return process.env.BACKUP_PATH || path.join(__dirname, '../../../backups'); +}; + +/** + * POST /api/backup + * Creates a new backup of all data and files + */ +router.post('/', async (req: Request, res: Response) => { + try { + const backupDir = getBackupDir(); + await fs.mkdir(backupDir, { recursive: true }); + + const backupPath = await createBackup(backupDir); + const fileName = path.basename(backupPath); + const stats = await fs.stat(backupPath); + + res.json({ + success: true, + message: 'Backup created successfully', + backup: { + name: fileName, + path: backupPath, + size: stats.size, + created: stats.birthtime, + }, + }); + } catch (error) { + console.error('Backup creation error:', error); + res.status(500).json({ + success: false, + error: 'Failed to create backup', + message: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * GET /api/backup + * Lists all available backups + */ +router.get('/', async (req: Request, res: Response) => { + try { + const backupDir = getBackupDir(); + const backups = await listBackups(backupDir); + + res.json({ + success: true, + backups, + }); + } catch (error) { + console.error('Error listing backups:', error); + res.status(500).json({ + success: false, + error: 'Failed to list backups', + message: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * GET /api/backup/:filename + * Downloads a specific backup file + */ +router.get('/:filename', async (req: Request, res: Response) => { + try { + const { filename } = req.params; + const backupDir = getBackupDir(); + const backupPath = path.join(backupDir, filename); + + // Security check: ensure the file is within the backup directory + const resolvedPath = path.resolve(backupPath); + const resolvedBackupDir = path.resolve(backupDir); + if (!resolvedPath.startsWith(resolvedBackupDir)) { + return res.status(403).json({ + success: false, + error: 'Access denied', + }); + } + + // Check if file exists + try { + await fs.access(backupPath); + } catch { + return res.status(404).json({ + success: false, + error: 'Backup file not found', + }); + } + + // Send file + res.download(backupPath, filename, (err) => { + if (err) { + console.error('Error downloading backup:', err); + if (!res.headersSent) { + res.status(500).json({ + success: false, + error: 'Failed to download backup', + }); + } + } + }); + } catch (error) { + console.error('Error downloading backup:', error); + res.status(500).json({ + success: false, + error: 'Failed to download backup', + message: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * POST /api/backup/restore + * Restores data from a backup file + * Accepts either: + * - multipart/form-data with 'backup' file field + * - JSON with 'filename' field (for existing backup in backup directory) + */ +router.post('/restore', upload.single('backup'), async (req: Request, res: Response) => { + let backupPath: string | null = null; + let isTemporaryFile = false; + + try { + const backupDir = getBackupDir(); + + // Check if file was uploaded or filename provided + if (req.file) { + backupPath = req.file.path; + isTemporaryFile = true; + } else if (req.body.filename) { + backupPath = path.join(backupDir, req.body.filename); + + // Security check + const resolvedPath = path.resolve(backupPath); + const resolvedBackupDir = path.resolve(backupDir); + if (!resolvedPath.startsWith(resolvedBackupDir)) { + return res.status(403).json({ + success: false, + error: 'Access denied', + }); + } + + // Check if file exists + try { + await fs.access(backupPath); + } catch { + return res.status(404).json({ + success: false, + error: 'Backup file not found', + }); + } + } else { + return res.status(400).json({ + success: false, + error: 'No backup file provided. Either upload a file or specify a filename.', + }); + } + + // Perform restore + const metadata = await restoreBackup(backupPath, backupDir); + + // Clean up temporary file if it was uploaded + if (isTemporaryFile && backupPath) { + try { + await fs.unlink(backupPath); + } catch (err) { + console.warn('Failed to clean up temporary file:', err); + } + } + + res.json({ + success: true, + message: 'Backup restored successfully', + metadata, + }); + } catch (error) { + console.error('Restore error:', error); + + // Clean up temporary file on error + if (isTemporaryFile && backupPath) { + try { + await fs.unlink(backupPath); + } catch {} + } + + res.status(500).json({ + success: false, + error: 'Failed to restore backup', + message: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +/** + * DELETE /api/backup/:filename + * Deletes a backup file + */ +router.delete('/:filename', async (req: Request, res: Response) => { + try { + const { filename } = req.params; + const backupDir = getBackupDir(); + const backupPath = path.join(backupDir, filename); + + // Security check + const resolvedPath = path.resolve(backupPath); + const resolvedBackupDir = path.resolve(backupDir); + if (!resolvedPath.startsWith(resolvedBackupDir)) { + return res.status(403).json({ + success: false, + error: 'Access denied', + }); + } + + // Check if file exists + try { + await fs.access(backupPath); + } catch { + return res.status(404).json({ + success: false, + error: 'Backup file not found', + }); + } + + await deleteBackup(backupPath); + + res.json({ + success: true, + message: 'Backup deleted successfully', + }); + } catch (error) { + console.error('Error deleting backup:', error); + res.status(500).json({ + success: false, + error: 'Failed to delete backup', + message: error instanceof Error ? error.message : 'Unknown error', + }); + } +}); + +export default router; diff --git a/packages/api/src/services/backup.service.ts b/packages/api/src/services/backup.service.ts new file mode 100644 index 0000000..c9683c9 --- /dev/null +++ b/packages/api/src/services/backup.service.ts @@ -0,0 +1,437 @@ +import { PrismaClient } from '@prisma/client'; +import fs from 'fs/promises'; +import path from 'path'; +import archiver from 'archiver'; +import { createWriteStream, createReadStream } from 'fs'; +import extract from 'extract-zip'; + +const prisma = new PrismaClient(); + +export interface BackupMetadata { + version: string; + timestamp: string; + recipeCount: number; + cookbookCount: number; + tagCount: number; +} + +export interface BackupData { + metadata: BackupMetadata; + recipes: any[]; + cookbooks: any[]; + tags: any[]; + recipeTags: any[]; + cookbookRecipes: any[]; +} + +/** + * Creates a complete backup of all database data and uploaded files + * Returns the path to the backup file + */ +export async function createBackup(backupDir: string): Promise { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const backupName = `basil-backup-${timestamp}`; + const tempDir = path.join(backupDir, 'temp', backupName); + const backupFilePath = path.join(backupDir, `${backupName}.zip`); + + try { + // Create temp directory for backup assembly + await fs.mkdir(tempDir, { recursive: true }); + + // Export all database data + const backupData = await exportDatabaseData(); + + // Write database backup to JSON file + const dbBackupPath = path.join(tempDir, 'database.json'); + await fs.writeFile(dbBackupPath, JSON.stringify(backupData, null, 2)); + + // Copy uploaded files + const uploadsPath = process.env.LOCAL_STORAGE_PATH || path.join(__dirname, '../../../uploads'); + const backupUploadsPath = path.join(tempDir, 'uploads'); + + try { + await fs.access(uploadsPath); + await copyDirectory(uploadsPath, backupUploadsPath); + } catch (error) { + console.warn('No uploads directory found, skipping file backup'); + } + + // Create ZIP archive + await createZipArchive(tempDir, backupFilePath); + + // Clean up temp directory + await fs.rm(tempDir, { recursive: true, force: true }); + + return backupFilePath; + } catch (error) { + // Clean up on error + try { + await fs.rm(tempDir, { recursive: true, force: true }); + } catch {} + throw error; + } +} + +/** + * Exports all database data to a structured object + */ +async function exportDatabaseData(): Promise { + // Fetch all data with relations + const recipes = await prisma.recipe.findMany({ + include: { + sections: true, + ingredients: { + include: { + instructions: true, + }, + }, + instructions: { + include: { + ingredients: true, + }, + }, + images: true, + tags: true, + cookbooks: true, + }, + }); + + const cookbooks = await prisma.cookbook.findMany({ + include: { + recipes: true, + }, + }); + + const tags = await prisma.tag.findMany({ + include: { + recipes: true, + }, + }); + + const recipeTags = await prisma.recipeTag.findMany(); + const cookbookRecipes = await prisma.cookbookRecipe.findMany(); + + const metadata: BackupMetadata = { + version: '1.0', + timestamp: new Date().toISOString(), + recipeCount: recipes.length, + cookbookCount: cookbooks.length, + tagCount: tags.length, + }; + + return { + metadata, + recipes, + cookbooks, + tags, + recipeTags, + cookbookRecipes, + }; +} + +/** + * Restores database and files from a backup file + */ +export async function restoreBackup(backupFilePath: string, backupDir: string): Promise { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const tempDir = path.join(backupDir, 'temp', `restore-${timestamp}`); + + try { + // Extract backup archive + await fs.mkdir(tempDir, { recursive: true }); + await extract(backupFilePath, { dir: tempDir }); + + // Read and parse database backup + const dbBackupPath = path.join(tempDir, 'database.json'); + const backupData: BackupData = JSON.parse(await fs.readFile(dbBackupPath, 'utf-8')); + + // Clear existing data (in reverse order of dependencies) + await clearDatabase(); + + // Restore data (in order of dependencies) + await restoreDatabaseData(backupData); + + // Restore uploaded files + const backupUploadsPath = path.join(tempDir, 'uploads'); + const uploadsPath = process.env.LOCAL_STORAGE_PATH || path.join(__dirname, '../../../uploads'); + + try { + await fs.access(backupUploadsPath); + // Clear existing uploads + try { + await fs.rm(uploadsPath, { recursive: true, force: true }); + } catch {} + await fs.mkdir(uploadsPath, { recursive: true }); + // Restore uploads + await copyDirectory(backupUploadsPath, uploadsPath); + } catch (error) { + console.warn('No uploads in backup, skipping file restore'); + } + + // Clean up temp directory + await fs.rm(tempDir, { recursive: true, force: true }); + + return backupData.metadata; + } catch (error) { + // Clean up on error + try { + await fs.rm(tempDir, { recursive: true, force: true }); + } catch {} + throw error; + } +} + +/** + * Clears all data from the database + */ +async function clearDatabase(): Promise { + // Delete in order to respect foreign key constraints + await prisma.cookbookRecipe.deleteMany(); + await prisma.recipeTag.deleteMany(); + await prisma.ingredientInstructionMapping.deleteMany(); + await prisma.recipeImage.deleteMany(); + await prisma.instruction.deleteMany(); + await prisma.ingredient.deleteMany(); + await prisma.recipeSection.deleteMany(); + await prisma.recipe.deleteMany(); + await prisma.cookbook.deleteMany(); + await prisma.tag.deleteMany(); +} + +/** + * Restores database data from backup + */ +async function restoreDatabaseData(backupData: BackupData): Promise { + // Restore tags first (no dependencies) + for (const tag of backupData.tags) { + await prisma.tag.create({ + data: { + id: tag.id, + name: tag.name, + }, + }); + } + + // Restore cookbooks (no dependencies) + for (const cookbook of backupData.cookbooks) { + await prisma.cookbook.create({ + data: { + id: cookbook.id, + name: cookbook.name, + description: cookbook.description, + coverImageUrl: cookbook.coverImageUrl, + autoFilterCategories: cookbook.autoFilterCategories, + autoFilterTags: cookbook.autoFilterTags, + createdAt: new Date(cookbook.createdAt), + updatedAt: new Date(cookbook.updatedAt), + }, + }); + } + + // Restore recipes with all nested relations + for (const recipe of backupData.recipes) { + await prisma.recipe.create({ + data: { + id: recipe.id, + title: recipe.title, + description: recipe.description, + prepTime: recipe.prepTime, + cookTime: recipe.cookTime, + totalTime: recipe.totalTime, + servings: recipe.servings, + imageUrl: recipe.imageUrl, + sourceUrl: recipe.sourceUrl, + author: recipe.author, + cuisine: recipe.cuisine, + categories: recipe.categories, + rating: recipe.rating, + createdAt: new Date(recipe.createdAt), + updatedAt: new Date(recipe.updatedAt), + sections: { + create: recipe.sections?.map((section: any) => ({ + id: section.id, + name: section.name, + order: section.order, + timing: section.timing, + })) || [], + }, + ingredients: { + create: recipe.ingredients + ?.filter((ing: any) => !ing.sectionId) + .map((ing: any) => ({ + id: ing.id, + name: ing.name, + amount: ing.amount, + unit: ing.unit, + notes: ing.notes, + order: ing.order, + })) || [], + }, + instructions: { + create: recipe.instructions + ?.filter((inst: any) => !inst.sectionId) + .map((inst: any) => ({ + id: inst.id, + step: inst.step, + text: inst.text, + imageUrl: inst.imageUrl, + timing: inst.timing, + })) || [], + }, + images: { + create: recipe.images?.map((img: any) => ({ + id: img.id, + url: img.url, + order: img.order, + })) || [], + }, + }, + }); + + // Restore section ingredients and instructions + for (const section of recipe.sections || []) { + const sectionIngredients = recipe.ingredients?.filter((ing: any) => ing.sectionId === section.id) || []; + const sectionInstructions = recipe.instructions?.filter((inst: any) => inst.sectionId === section.id) || []; + + for (const ing of sectionIngredients) { + await prisma.ingredient.create({ + data: { + id: ing.id, + recipeId: recipe.id, + sectionId: section.id, + name: ing.name, + amount: ing.amount, + unit: ing.unit, + notes: ing.notes, + order: ing.order, + }, + }); + } + + for (const inst of sectionInstructions) { + await prisma.instruction.create({ + data: { + id: inst.id, + recipeId: recipe.id, + sectionId: section.id, + step: inst.step, + text: inst.text, + imageUrl: inst.imageUrl, + timing: inst.timing, + }, + }); + } + } + } + + // Restore ingredient-instruction mappings + for (const recipe of backupData.recipes) { + for (const instruction of recipe.instructions || []) { + for (const mapping of instruction.ingredients || []) { + await prisma.ingredientInstructionMapping.create({ + data: { + id: mapping.id, + ingredientId: mapping.ingredientId, + instructionId: mapping.instructionId, + order: mapping.order, + }, + }); + } + } + } + + // Restore recipe tags + for (const recipeTag of backupData.recipeTags) { + await prisma.recipeTag.create({ + data: { + recipeId: recipeTag.recipeId, + tagId: recipeTag.tagId, + }, + }); + } + + // Restore cookbook recipes + for (const cookbookRecipe of backupData.cookbookRecipes) { + await prisma.cookbookRecipe.create({ + data: { + id: cookbookRecipe.id, + cookbookId: cookbookRecipe.cookbookId, + recipeId: cookbookRecipe.recipeId, + addedAt: new Date(cookbookRecipe.addedAt), + }, + }); + } +} + +/** + * Creates a ZIP archive from a directory + */ +async function createZipArchive(sourceDir: string, outputPath: string): Promise { + return new Promise((resolve, reject) => { + const output = createWriteStream(outputPath); + const archive = archiver('zip', { zlib: { level: 9 } }); + + output.on('close', () => resolve()); + archive.on('error', (err) => reject(err)); + + archive.pipe(output); + archive.directory(sourceDir, false); + archive.finalize(); + }); +} + +/** + * Recursively copies a directory + */ +async function copyDirectory(source: string, destination: string): Promise { + await fs.mkdir(destination, { recursive: true }); + const entries = await fs.readdir(source, { withFileTypes: true }); + + for (const entry of entries) { + const srcPath = path.join(source, entry.name); + const destPath = path.join(destination, entry.name); + + if (entry.isDirectory()) { + await copyDirectory(srcPath, destPath); + } else { + await fs.copyFile(srcPath, destPath); + } + } +} + +/** + * Lists all available backups in the backup directory + */ +export async function listBackups(backupDir: string): Promise> { + try { + await fs.mkdir(backupDir, { recursive: true }); + const files = await fs.readdir(backupDir); + const backups = []; + + for (const file of files) { + if (file.startsWith('basil-backup-') && file.endsWith('.zip')) { + const filePath = path.join(backupDir, file); + const stats = await fs.stat(filePath); + backups.push({ + name: file, + path: filePath, + size: stats.size, + created: stats.birthtime, + }); + } + } + + // Sort by creation date, newest first + return backups.sort((a, b) => b.created.getTime() - a.created.getTime()); + } catch (error) { + console.error('Error listing backups:', error); + return []; + } +} + +/** + * Deletes a backup file + */ +export async function deleteBackup(backupFilePath: string): Promise { + await fs.unlink(backupFilePath); +} diff --git a/packages/web/package.json b/packages/web/package.json index 5544bd6..fc0f1ad 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -13,32 +13,36 @@ "test:coverage": "vitest run --coverage", "lint": "eslint . --ext ts,tsx" }, - "keywords": ["basil", "web"], + "keywords": [ + "basil", + "web" + ], "license": "MIT", "dependencies": { "@basil/shared": "^1.0.0", + "@hello-pangea/dnd": "^18.0.1", + "axios": "^1.6.5", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-router-dom": "^6.21.1", - "axios": "^1.6.5" + "react-router-dom": "^6.21.1" }, "devDependencies": { + "@testing-library/jest-dom": "^6.2.0", + "@testing-library/react": "^14.1.2", + "@testing-library/user-event": "^14.5.2", "@types/react": "^18.2.47", "@types/react-dom": "^18.2.18", "@typescript-eslint/eslint-plugin": "^6.17.0", "@typescript-eslint/parser": "^6.17.0", "@vitejs/plugin-react": "^4.2.1", + "@vitest/coverage-v8": "^1.2.0", + "@vitest/ui": "^1.2.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", + "jsdom": "^23.2.0", "typescript": "^5.3.3", "vite": "^5.0.10", - "vitest": "^1.2.0", - "@vitest/ui": "^1.2.0", - "@vitest/coverage-v8": "^1.2.0", - "@testing-library/react": "^14.1.2", - "@testing-library/jest-dom": "^6.2.0", - "@testing-library/user-event": "^14.5.2", - "jsdom": "^23.2.0" + "vitest": "^1.2.0" } } diff --git a/packages/web/src/pages/CookingMode.tsx b/packages/web/src/pages/CookingMode.tsx index 4a6ff09..3d4cee3 100644 --- a/packages/web/src/pages/CookingMode.tsx +++ b/packages/web/src/pages/CookingMode.tsx @@ -122,6 +122,13 @@ function CookingMode() { } }; + const scaleServings = (multiplier: number) => { + if (recipe?.servings) { + const newServings = Math.round(recipe.servings * multiplier); + setCurrentServings(newServings > 0 ? newServings : 1); + } + }; + const getScaledIngredientText = (ingredient: Ingredient): string => { let ingredientStr = ''; if (ingredient.amount && ingredient.unit) { @@ -226,13 +233,29 @@ function CookingMode() {
{recipe.servings && currentServings !== null && (
- - Servings: {currentServings} - +
+ + Servings: {currentServings} + +
+
+ + + + +
)} diff --git a/packages/web/src/pages/RecipeDetail.tsx b/packages/web/src/pages/RecipeDetail.tsx index b5e92f4..2dce410 100644 --- a/packages/web/src/pages/RecipeDetail.tsx +++ b/packages/web/src/pages/RecipeDetail.tsx @@ -53,6 +53,13 @@ function RecipeDetail() { setCurrentServings(recipe?.servings || null); }; + const scaleServings = (multiplier: number) => { + if (recipe?.servings) { + const newServings = Math.round(recipe.servings * multiplier); + setCurrentServings(newServings > 0 ? newServings : 1); + } + }; + const handleDelete = async () => { if (!id || !confirm('Are you sure you want to delete this recipe?')) { return; @@ -140,18 +147,34 @@ function RecipeDetail() { {recipe.totalTime && Total: {recipe.totalTime} min} {recipe.servings && currentServings !== null && (
- - Servings: {currentServings} - - {currentServings !== recipe.servings && ( - - )} + Servings: {currentServings} + + {currentServings !== recipe.servings && ( + + )} +
+
+ + + + +
)} diff --git a/packages/web/src/pages/RecipeForm.tsx b/packages/web/src/pages/RecipeForm.tsx index 36c0bb3..88f6a0f 100644 --- a/packages/web/src/pages/RecipeForm.tsx +++ b/packages/web/src/pages/RecipeForm.tsx @@ -1,6 +1,7 @@ import { useState } from 'react'; import { Recipe, RecipeSection, Ingredient, Instruction } from '@basil/shared'; import { recipesApi } from '../services/api'; +import { DragDropContext, Droppable, Draggable, DropResult } from '@hello-pangea/dnd'; interface RecipeFormProps { initialRecipe?: Partial; @@ -147,6 +148,20 @@ function RecipeForm({ initialRecipe, onSubmit, onCancel }: RecipeFormProps) { setSections(newSections); }; + const reorderSectionInstructions = (sectionIndex: number, result: DropResult) => { + if (!result.destination) return; + + const newSections = [...sections]; + const items = Array.from(newSections[sectionIndex].instructions); + const [reorderedItem] = items.splice(result.source.index, 1); + items.splice(result.destination.index, 0, reorderedItem); + + // Update step numbers + const updatedItems = items.map((item, index) => ({ ...item, step: index + 1 })); + newSections[sectionIndex].instructions = updatedItems; + setSections(newSections); + }; + // Simple mode ingredient management const addIngredient = () => { setIngredients([ @@ -185,6 +200,18 @@ function RecipeForm({ initialRecipe, onSubmit, onCancel }: RecipeFormProps) { setInstructions(newInstructions); }; + const reorderInstructions = (result: DropResult) => { + if (!result.destination) return; + + const items = Array.from(instructions); + const [reorderedItem] = items.splice(result.source.index, 1); + items.splice(result.destination.index, 0, reorderedItem); + + // Update step numbers + const updatedItems = items.map((item, index) => ({ ...item, step: index + 1 })); + setInstructions(updatedItems); + }; + const handleImageUpload = async (e: React.ChangeEvent) => { const file = e.target.files?.[0]; if (!file || !initialRecipe?.id) return; @@ -573,49 +600,77 @@ function RecipeForm({ initialRecipe, onSubmit, onCancel }: RecipeFormProps) { {/* Section Instructions */}
Instructions
- {section.instructions.map((instruction, instructionIndex) => ( -
-
{instruction.step}
-
- - updateSectionInstruction( - sectionIndex, - instructionIndex, - 'timing', - e.target.value - ) - } - placeholder="Timing (optional, e.g., 8:00am)" - className="instruction-timing-input" - /> -