Merge pull request 'feat: add CI/CD pipeline, backup system, and deployment automation' (#7) from feature/ci-cd-backup-deployment into main
Some checks failed
CI/CD Pipeline / Run Tests (push) Has been cancelled
CI/CD Pipeline / Build and Push Docker Images (push) Has been cancelled
CI/CD Pipeline / Code Quality (push) Has been cancelled
CI Pipeline / Lint Code (push) Has been cancelled
CI Pipeline / Test API Package (push) Has been cancelled
CI Pipeline / Test Web Package (push) Has been cancelled
CI Pipeline / Test Shared Package (push) Has been cancelled
CI Pipeline / Build All Packages (push) Has been cancelled
CI Pipeline / Generate Coverage Report (push) Has been cancelled
Docker Build & Deploy / Build Docker Images (push) Has been cancelled
Docker Build & Deploy / Push Docker Images (push) Has been cancelled
Docker Build & Deploy / Deploy to Staging (push) Has been cancelled
Docker Build & Deploy / Deploy to Production (push) Has been cancelled
E2E Tests / End-to-End Tests (push) Has been cancelled
E2E Tests / E2E Tests (Mobile) (push) Has been cancelled
Security Scanning / NPM Audit (push) Has been cancelled
Security Scanning / Dependency License Check (push) Has been cancelled
Security Scanning / Code Quality Scan (push) Has been cancelled
Security Scanning / Docker Image Security (push) Has been cancelled
Security Scanning / Security Summary (push) Has been cancelled
Some checks failed
CI/CD Pipeline / Run Tests (push) Has been cancelled
CI/CD Pipeline / Build and Push Docker Images (push) Has been cancelled
CI/CD Pipeline / Code Quality (push) Has been cancelled
CI Pipeline / Lint Code (push) Has been cancelled
CI Pipeline / Test API Package (push) Has been cancelled
CI Pipeline / Test Web Package (push) Has been cancelled
CI Pipeline / Test Shared Package (push) Has been cancelled
CI Pipeline / Build All Packages (push) Has been cancelled
CI Pipeline / Generate Coverage Report (push) Has been cancelled
Docker Build & Deploy / Build Docker Images (push) Has been cancelled
Docker Build & Deploy / Push Docker Images (push) Has been cancelled
Docker Build & Deploy / Deploy to Staging (push) Has been cancelled
Docker Build & Deploy / Deploy to Production (push) Has been cancelled
E2E Tests / End-to-End Tests (push) Has been cancelled
E2E Tests / E2E Tests (Mobile) (push) Has been cancelled
Security Scanning / NPM Audit (push) Has been cancelled
Security Scanning / Dependency License Check (push) Has been cancelled
Security Scanning / Code Quality Scan (push) Has been cancelled
Security Scanning / Docker Image Security (push) Has been cancelled
Security Scanning / Security Summary (push) Has been cancelled
Reviewed-on: #7
This commit was merged in pull request #7.
This commit is contained in:
62
.claude/settings.local.json
Normal file
62
.claude/settings.local.json
Normal file
@@ -0,0 +1,62 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(docker network:*)",
|
||||
"Bash(docker-compose build:*)",
|
||||
"Bash(docker compose build:*)",
|
||||
"Bash(docker compose:*)",
|
||||
"Bash(docker logs:*)",
|
||||
"Bash(docker inspect:*)",
|
||||
"Bash(curl:*)",
|
||||
"Read(//tmp/**)",
|
||||
"WebSearch",
|
||||
"WebFetch(domain:www.npmjs.com)",
|
||||
"Bash(python3:*)",
|
||||
"Bash(docker exec:*)",
|
||||
"Bash(docker cp:*)",
|
||||
"Bash(docker restart:*)",
|
||||
"Bash(./scripts/version.sh:*)",
|
||||
"Bash(docker login:*)",
|
||||
"Bash(git checkout:*)",
|
||||
"Bash(git commit:*)",
|
||||
"Bash(git config:*)",
|
||||
"Bash(git push:*)",
|
||||
"Bash(npm run build:*)",
|
||||
"WebFetch(domain:github.com)",
|
||||
"WebFetch(domain:raw.githubusercontent.com)",
|
||||
"Bash(pip3 index:*)",
|
||||
"Bash(GIT_ASKPASS=/tmp/git-askpass.sh git push:*)",
|
||||
"Bash(npm run prisma:migrate:*)",
|
||||
"Bash(npx prisma migrate dev:*)",
|
||||
"Bash(npx prisma migrate:*)",
|
||||
"Bash(git add:*)",
|
||||
"Bash(git remote set-url:*)",
|
||||
"Bash(ssh-keygen:*)",
|
||||
"Read(//root/.ssh/**)",
|
||||
"Bash(GIT_SSH_COMMAND=\"ssh -o StrictHostKeyChecking=accept-new -p 2222\" git push:*)",
|
||||
"Bash(wget:*)",
|
||||
"Bash(sudo mv:*)",
|
||||
"Bash(sudo chmod:*)",
|
||||
"Bash(npm run dev)",
|
||||
"Bash(npx tsx:*)",
|
||||
"Bash(cat:*)",
|
||||
"Bash(npm test)",
|
||||
"Bash(docker-compose down:*)",
|
||||
"Bash(gh pr create:*)",
|
||||
"Bash(git pull:*)",
|
||||
"Bash(docker-compose ps:*)",
|
||||
"Bash(npm test:*)",
|
||||
"Bash(npm run test:*)",
|
||||
"Bash(npm install:*)",
|
||||
"Bash(chmod:*)",
|
||||
"Bash(docker-compose logs:*)",
|
||||
"Bash(ACCESS_TOKEN=\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbWlkdWFtOW4wMDAwaXE1ZHZ4NDkwbGdkIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwicm9sZSI6IlVTRVIiLCJpYXQiOjE3NjQwMzEyNTMsImV4cCI6MTc2NDAzMjE1M30.-meSwJ-PfWX1OEpgnOgLCnz94hMtz_iUgemFDCFdl34\" curl -s http://localhost:3001/api/auth/me -H \"Authorization: Bearer $ACCESS_TOKEN\")",
|
||||
"Bash(/tmp/login.json)",
|
||||
"Bash(/tmp/token.txt)",
|
||||
"Bash(TOKEN=\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VySWQiOiJjbWlkdWFtOW4wMDAwaXE1ZHZ4NDkwbGdkIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwicm9sZSI6IlVTRVIiLCJpYXQiOjE3NjQwNDU2NjUsImV4cCI6MTc2NDA0NjU2NX0.oeV_sunfHAUNfwbm1V_b1rlTPeK_NtfxiBeVo6kIH6M\")",
|
||||
"Bash(docker ps:*)"
|
||||
],
|
||||
"deny": [],
|
||||
"ask": []
|
||||
}
|
||||
}
|
||||
11
.env.deploy.example
Normal file
11
.env.deploy.example
Normal file
@@ -0,0 +1,11 @@
|
||||
# Deployment Configuration for Basil
|
||||
# Copy this file to .env.deploy and fill in your values
|
||||
|
||||
# Docker Registry Configuration
|
||||
DOCKER_USERNAME=your-docker-username
|
||||
DOCKER_REGISTRY=docker.io
|
||||
IMAGE_TAG=latest
|
||||
|
||||
# Webhook Configuration (if using webhook deployment)
|
||||
WEBHOOK_PORT=9000
|
||||
WEBHOOK_SECRET=changeme-to-random-secret
|
||||
195
.gitea/workflows/ci-cd.yml
Normal file
195
.gitea/workflows/ci-cd.yml
Normal file
@@ -0,0 +1,195 @@
|
||||
name: CI/CD Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
|
||||
env:
|
||||
DOCKER_REGISTRY: docker.io
|
||||
IMAGE_NAME: basil
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run Tests
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
env:
|
||||
POSTGRES_USER: basil
|
||||
POSTGRES_PASSWORD: basil
|
||||
POSTGRES_DB: basil_test
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Build shared package
|
||||
run: |
|
||||
cd packages/shared
|
||||
npm run build
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: |
|
||||
cd packages/api
|
||||
npm run prisma:generate
|
||||
|
||||
- name: Run database migrations
|
||||
run: |
|
||||
cd packages/api
|
||||
npm run prisma:migrate
|
||||
env:
|
||||
DATABASE_URL: postgresql://basil:basil@localhost:5432/basil_test?schema=public
|
||||
|
||||
- name: Run unit tests - API
|
||||
run: |
|
||||
cd packages/api
|
||||
npm run test
|
||||
env:
|
||||
DATABASE_URL: postgresql://basil:basil@localhost:5432/basil_test?schema=public
|
||||
NODE_ENV: test
|
||||
|
||||
- name: Run unit tests - Web
|
||||
run: |
|
||||
cd packages/web
|
||||
npm run test
|
||||
|
||||
- name: Run unit tests - Shared
|
||||
run: |
|
||||
cd packages/shared
|
||||
npm run test
|
||||
|
||||
- name: Install Playwright browsers
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Build application for E2E tests
|
||||
run: npm run build
|
||||
|
||||
- name: Run E2E tests
|
||||
run: npm run test:e2e
|
||||
env:
|
||||
DATABASE_URL: postgresql://basil:basil@localhost:5432/basil_test?schema=public
|
||||
|
||||
- name: Upload test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
path: |
|
||||
packages/*/coverage/
|
||||
playwright-report/
|
||||
retention-days: 30
|
||||
|
||||
build-and-push:
|
||||
name: Build and Push Docker Images
|
||||
runs-on: ubuntu-latest
|
||||
needs: test
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.DOCKER_REGISTRY }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Extract metadata for API
|
||||
id: meta-api
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.DOCKER_REGISTRY }}/${{ secrets.DOCKER_USERNAME }}/${{ env.IMAGE_NAME }}-api
|
||||
tags: |
|
||||
type=sha,prefix={{branch}}-
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Extract metadata for Web
|
||||
id: meta-web
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.DOCKER_REGISTRY }}/${{ secrets.DOCKER_USERNAME }}/${{ env.IMAGE_NAME }}-web
|
||||
tags: |
|
||||
type=sha,prefix={{branch}}-
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Build and push API image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: packages/api/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta-api.outputs.tags }}
|
||||
labels: ${{ steps.meta-api.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push Web image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: packages/web/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.meta-web.outputs.tags }}
|
||||
labels: ${{ steps.meta-web.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment webhook
|
||||
if: success()
|
||||
run: |
|
||||
curl -X POST ${{ secrets.DEPLOY_WEBHOOK_URL }} \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"branch": "main", "commit": "${{ github.sha }}", "message": "${{ github.event.head_commit.message }}"}'
|
||||
|
||||
lint:
|
||||
name: Code Quality
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -21,6 +21,7 @@ dist/
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.deploy
|
||||
|
||||
# Logs
|
||||
logs/
|
||||
@@ -28,6 +29,9 @@ logs/
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
deploy.log
|
||||
webhook.log
|
||||
webhook-error.log
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
@@ -50,6 +54,9 @@ uploads/
|
||||
public/uploads/
|
||||
test-uploads/
|
||||
|
||||
# Backups
|
||||
backups/
|
||||
|
||||
# Docker
|
||||
.docker/
|
||||
docker-compose.override.yml
|
||||
|
||||
70
CLAUDE.md
70
CLAUDE.md
@@ -47,6 +47,11 @@ npm run build
|
||||
# Lint all packages
|
||||
npm run lint
|
||||
|
||||
# Testing
|
||||
npm test # Run all unit tests
|
||||
npm run test:e2e # Run E2E tests with Playwright
|
||||
npm run test:e2e:ui # Run E2E tests with Playwright UI
|
||||
|
||||
# Docker commands
|
||||
npm run docker:up # Start all services (PostgreSQL, API, web)
|
||||
npm run docker:down # Stop all services
|
||||
@@ -93,6 +98,7 @@ NODE_ENV=development
|
||||
DATABASE_URL=postgresql://basil:basil@localhost:5432/basil?schema=public
|
||||
STORAGE_TYPE=local # or 's3'
|
||||
LOCAL_STORAGE_PATH=./uploads
|
||||
BACKUP_PATH=./backups
|
||||
CORS_ORIGIN=http://localhost:5173
|
||||
```
|
||||
|
||||
@@ -104,6 +110,11 @@ S3_ACCESS_KEY_ID=your-key
|
||||
S3_SECRET_ACCESS_KEY=your-secret
|
||||
```
|
||||
|
||||
For remote PostgreSQL database, update:
|
||||
```
|
||||
DATABASE_URL=postgresql://username:password@remote-host:5432/basil?schema=public
|
||||
```
|
||||
|
||||
## Key Features
|
||||
|
||||
### Recipe Import from URL
|
||||
@@ -125,6 +136,13 @@ S3_SECRET_ACCESS_KEY=your-secret
|
||||
- S3 storage: Placeholder for AWS SDK implementation
|
||||
- Easy to extend for other storage providers
|
||||
|
||||
### Backup & Restore
|
||||
- Complete data backup to single ZIP file including database and uploaded files
|
||||
- Backup service in `packages/api/src/services/backup.service.ts`
|
||||
- REST API for creating, listing, downloading, and restoring backups
|
||||
- Automatic backup of all recipes, cookbooks, tags, and relationships
|
||||
- Configurable backup storage location via `BACKUP_PATH` environment variable
|
||||
|
||||
## Adding New Features
|
||||
|
||||
### Adding a New API Endpoint
|
||||
@@ -161,6 +179,22 @@ This starts:
|
||||
Persistent volumes:
|
||||
- `postgres_data` - Database storage
|
||||
- `uploads_data` - Uploaded images
|
||||
- `backups_data` - Backup files
|
||||
|
||||
### Using a Remote Database
|
||||
|
||||
To use a remote PostgreSQL database instead of the local Docker container:
|
||||
|
||||
1. Set the `DATABASE_URL` environment variable to point to your remote database
|
||||
2. Update `docker-compose.yml` to pass the environment variable or create a `.env` file in the root
|
||||
3. Optionally, remove or comment out the `postgres` service and its dependency in `docker-compose.yml`
|
||||
|
||||
Example `.env` file in project root:
|
||||
```
|
||||
DATABASE_URL=postgresql://username:password@remote-host:5432/basil?schema=public
|
||||
```
|
||||
|
||||
The docker-compose.yml is configured to use `${DATABASE_URL:-default}` which will use the environment variable if set, or fall back to the local postgres container.
|
||||
|
||||
## API Reference
|
||||
|
||||
@@ -178,6 +212,13 @@ Persistent volumes:
|
||||
- `search` - Search in title/description
|
||||
- `cuisine`, `category` - Filter by cuisine or category
|
||||
|
||||
**Backups:**
|
||||
- `POST /api/backup` - Create a new backup (returns backup metadata)
|
||||
- `GET /api/backup` - List all available backups
|
||||
- `GET /api/backup/:filename` - Download a specific backup file
|
||||
- `POST /api/backup/restore` - Restore from backup (accepts file upload or existing filename)
|
||||
- `DELETE /api/backup/:filename` - Delete a backup file
|
||||
|
||||
## Important Implementation Details
|
||||
|
||||
### Prisma Relations
|
||||
@@ -200,3 +241,32 @@ Persistent volumes:
|
||||
- Root `package.json` defines npm workspaces
|
||||
- Packages can reference each other (e.g., `@basil/shared`)
|
||||
- Must rebuild shared package when types change for other packages to see updates
|
||||
|
||||
## CI/CD and Deployment
|
||||
|
||||
Basil includes a complete CI/CD pipeline with Gitea Actions for automated testing, building, and deployment.
|
||||
|
||||
**Quick Start:**
|
||||
- See [CI/CD Setup Guide](docs/CI-CD-SETUP.md) for full documentation
|
||||
- See [Deployment Quick Start](docs/DEPLOYMENT-QUICK-START.md) for quick reference
|
||||
|
||||
**Pipeline Overview:**
|
||||
1. **Test Stage**: Runs unit tests (Vitest) and E2E tests (Playwright)
|
||||
2. **Build Stage**: Builds Docker images for API and Web (main branch only)
|
||||
3. **Deploy Stage**: Pushes images to registry and triggers webhook deployment
|
||||
|
||||
**Deployment Options:**
|
||||
- **Automatic**: Push to main branch triggers full CI/CD pipeline
|
||||
- **Manual**: Run `./scripts/manual-deploy.sh` for interactive deployment
|
||||
- **Webhook**: Systemd service listens for deployment triggers
|
||||
|
||||
**Key Files:**
|
||||
- `.gitea/workflows/ci-cd.yml` - Main CI/CD workflow
|
||||
- `scripts/deploy.sh` - Deployment script
|
||||
- `scripts/webhook-receiver.sh` - Webhook server
|
||||
- `.env.deploy.example` - Deployment configuration template
|
||||
|
||||
**Required Secrets (Gitea):**
|
||||
- `DOCKER_USERNAME` - Docker Hub username
|
||||
- `DOCKER_PASSWORD` - Docker Hub access token
|
||||
- `DEPLOY_WEBHOOK_URL` - Webhook endpoint for deployments
|
||||
|
||||
@@ -31,12 +31,15 @@ services:
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
PORT: 3001
|
||||
DATABASE_URL: postgresql://basil:basil@postgres:5432/basil?schema=public
|
||||
# Use DATABASE_URL from .env if set, otherwise default to local postgres
|
||||
DATABASE_URL: ${DATABASE_URL:-postgresql://basil:basil@postgres:5432/basil?schema=public}
|
||||
STORAGE_TYPE: local
|
||||
LOCAL_STORAGE_PATH: /app/uploads
|
||||
BACKUP_PATH: /app/backups
|
||||
CORS_ORIGIN: https://basil.pkartchner.com
|
||||
volumes:
|
||||
- uploads_data:/app/uploads
|
||||
- backups_data:/app/backups
|
||||
networks:
|
||||
- internal
|
||||
- traefik
|
||||
@@ -62,6 +65,7 @@ services:
|
||||
volumes:
|
||||
postgres_data:
|
||||
uploads_data:
|
||||
backups_data:
|
||||
|
||||
networks:
|
||||
traefik:
|
||||
|
||||
290
docs/BACKUP.md
Normal file
290
docs/BACKUP.md
Normal file
@@ -0,0 +1,290 @@
|
||||
# Backup & Restore Guide
|
||||
|
||||
This document explains how to use Basil's backup and restore features.
|
||||
|
||||
## Overview
|
||||
|
||||
Basil includes a comprehensive backup system that creates complete snapshots of your recipe data, including:
|
||||
- All recipes with ingredients, instructions, and metadata
|
||||
- Recipe images and uploaded files
|
||||
- Cookbooks and their organization
|
||||
- Tags and categorization
|
||||
- All relationships between entities
|
||||
|
||||
Backups are stored as ZIP archives containing:
|
||||
- `database.json` - Complete database export in JSON format
|
||||
- `uploads/` - All uploaded images and files
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Configure the backup location in `packages/api/.env`:
|
||||
|
||||
```bash
|
||||
BACKUP_PATH=./backups
|
||||
```
|
||||
|
||||
In Docker deployments, backups are stored in the `backups_data` volume by default at `/app/backups`.
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### Create Backup
|
||||
|
||||
Creates a new backup of all data and files.
|
||||
|
||||
```bash
|
||||
POST /api/backup
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"message": "Backup created successfully",
|
||||
"backup": {
|
||||
"name": "basil-backup-2025-11-10T12-30-45-123Z.zip",
|
||||
"path": "/app/backups/basil-backup-2025-11-10T12-30-45-123Z.zip",
|
||||
"size": 1048576,
|
||||
"created": "2025-11-10T12:30:45.123Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### List Backups
|
||||
|
||||
Lists all available backups in the backup directory.
|
||||
|
||||
```bash
|
||||
GET /api/backup
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"backups": [
|
||||
{
|
||||
"name": "basil-backup-2025-11-10T12-30-45-123Z.zip",
|
||||
"path": "/app/backups/basil-backup-2025-11-10T12-30-45-123Z.zip",
|
||||
"size": 1048576,
|
||||
"created": "2025-11-10T12:30:45.123Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Download Backup
|
||||
|
||||
Downloads a specific backup file.
|
||||
|
||||
```bash
|
||||
GET /api/backup/:filename
|
||||
```
|
||||
|
||||
**Example:**
|
||||
```bash
|
||||
curl -O http://localhost:3001/api/backup/basil-backup-2025-11-10T12-30-45-123Z.zip
|
||||
```
|
||||
|
||||
### Restore Backup
|
||||
|
||||
Restores data from a backup file. **Warning: This will delete all existing data!**
|
||||
|
||||
You can restore in two ways:
|
||||
|
||||
#### 1. Upload a backup file
|
||||
|
||||
```bash
|
||||
POST /api/backup/restore
|
||||
Content-Type: multipart/form-data
|
||||
|
||||
backup: <file>
|
||||
```
|
||||
|
||||
**Example:**
|
||||
```bash
|
||||
curl -X POST \
|
||||
-F "backup=@basil-backup-2025-11-10T12-30-45-123Z.zip" \
|
||||
http://localhost:3001/api/backup/restore
|
||||
```
|
||||
|
||||
#### 2. Restore from existing backup in backup directory
|
||||
|
||||
```bash
|
||||
POST /api/backup/restore
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"filename": "basil-backup-2025-11-10T12-30-45-123Z.zip"
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"message": "Backup restored successfully",
|
||||
"metadata": {
|
||||
"version": "1.0",
|
||||
"timestamp": "2025-11-10T12:30:45.123Z",
|
||||
"recipeCount": 42,
|
||||
"cookbookCount": 3,
|
||||
"tagCount": 15
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Delete Backup
|
||||
|
||||
Deletes a backup file.
|
||||
|
||||
```bash
|
||||
DELETE /api/backup/:filename
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Manual Backup via curl
|
||||
|
||||
```bash
|
||||
# Create a backup
|
||||
curl -X POST http://localhost:3001/api/backup
|
||||
|
||||
# List available backups
|
||||
curl http://localhost:3001/api/backup
|
||||
|
||||
# Download a backup
|
||||
curl -O http://localhost:3001/api/backup/basil-backup-2025-11-10T12-30-45-123Z.zip
|
||||
|
||||
# Restore from uploaded file
|
||||
curl -X POST \
|
||||
-F "backup=@basil-backup-2025-11-10T12-30-45-123Z.zip" \
|
||||
http://localhost:3001/api/backup/restore
|
||||
|
||||
# Restore from existing backup
|
||||
curl -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"filename": "basil-backup-2025-11-10T12-30-45-123Z.zip"}' \
|
||||
http://localhost:3001/api/backup/restore
|
||||
|
||||
# Delete a backup
|
||||
curl -X DELETE http://localhost:3001/api/backup/basil-backup-2025-11-10T12-30-45-123Z.zip
|
||||
```
|
||||
|
||||
### Automated Backups
|
||||
|
||||
You can set up automated backups using cron:
|
||||
|
||||
```bash
|
||||
# Add to crontab (daily backup at 2 AM)
|
||||
0 2 * * * curl -X POST http://localhost:3001/api/backup
|
||||
```
|
||||
|
||||
For Docker deployments:
|
||||
|
||||
```bash
|
||||
# Add to host crontab
|
||||
0 2 * * * docker exec basil-api curl -X POST http://localhost:3001/api/backup
|
||||
```
|
||||
|
||||
## Backup Storage
|
||||
|
||||
### Local Development
|
||||
|
||||
Backups are stored in `packages/api/backups/` by default.
|
||||
|
||||
### Docker Production
|
||||
|
||||
Backups are stored in the `backups_data` Docker volume, which persists across container restarts.
|
||||
|
||||
To access backups from the host:
|
||||
|
||||
```bash
|
||||
# Copy backup from container to host
|
||||
docker cp basil-api:/app/backups/basil-backup-2025-11-10T12-30-45-123Z.zip ./
|
||||
|
||||
# List backups in container
|
||||
docker exec basil-api ls -lh /app/backups/
|
||||
```
|
||||
|
||||
### External Storage
|
||||
|
||||
For additional safety, you should copy backups to external storage:
|
||||
|
||||
```bash
|
||||
# Example: Copy to external drive
|
||||
docker cp basil-api:/app/backups/ /mnt/external-backup/basil/
|
||||
|
||||
# Example: Upload to S3
|
||||
aws s3 sync /path/to/backups/ s3://my-bucket/basil-backups/
|
||||
|
||||
# Example: Upload to rsync server
|
||||
rsync -avz /path/to/backups/ user@backup-server:/backups/basil/
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Regular Backups**: Schedule automatic backups daily or weekly
|
||||
2. **External Storage**: Copy backups to external storage regularly
|
||||
3. **Test Restores**: Periodically test backup restoration to ensure backups are valid
|
||||
4. **Backup Before Updates**: Always create a backup before updating Basil or making major changes
|
||||
5. **Retention Policy**: Keep multiple backup versions (e.g., daily for 7 days, weekly for 4 weeks, monthly for 12 months)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Backup Creation Fails
|
||||
|
||||
**Error: Out of disk space**
|
||||
- Check available disk space: `df -h`
|
||||
- Clean up old backups: `DELETE /api/backup/:filename`
|
||||
- Increase Docker volume size if using Docker
|
||||
|
||||
**Error: Permission denied**
|
||||
- Ensure the API has write permissions to the backup directory
|
||||
- In Docker: Check volume permissions
|
||||
|
||||
### Restore Fails
|
||||
|
||||
**Error: Invalid backup file**
|
||||
- Ensure the backup file is not corrupted
|
||||
- Try downloading the backup again
|
||||
- Verify the backup was created with a compatible version
|
||||
|
||||
**Error: Database connection lost**
|
||||
- Ensure the database is running and accessible
|
||||
- Check `DATABASE_URL` environment variable
|
||||
- Verify network connectivity to remote database if applicable
|
||||
|
||||
### Large Backups
|
||||
|
||||
If you have many recipes with large images:
|
||||
- Backups may take several minutes to create
|
||||
- Increase request timeout if using a reverse proxy
|
||||
- Consider using external storage (S3) for images to reduce backup size
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Access Control**: Backup endpoints are not authenticated by default. Consider adding authentication middleware in production.
|
||||
2. **Sensitive Data**: Backups contain all recipe data. Store backup files securely.
|
||||
3. **Download URLs**: Backup download endpoints validate file paths to prevent directory traversal attacks.
|
||||
4. **File Size Limits**: Restore endpoint limits upload size to 1GB by default.
|
||||
|
||||
## Migration Between Environments
|
||||
|
||||
Backups can be used to migrate data between environments:
|
||||
|
||||
```bash
|
||||
# 1. Create backup on source environment
|
||||
curl -X POST http://source-server:3001/api/backup
|
||||
|
||||
# 2. Download backup
|
||||
curl -O http://source-server:3001/api/backup/basil-backup-2025-11-10T12-30-45-123Z.zip
|
||||
|
||||
# 3. Upload to target environment
|
||||
curl -X POST \
|
||||
-F "backup=@basil-backup-2025-11-10T12-30-45-123Z.zip" \
|
||||
http://target-server:3001/api/backup/restore
|
||||
```
|
||||
|
||||
**Note:** When migrating, ensure both environments use compatible versions of Basil.
|
||||
503
docs/CI-CD-SETUP.md
Normal file
503
docs/CI-CD-SETUP.md
Normal file
@@ -0,0 +1,503 @@
|
||||
# CI/CD Setup Guide for Basil
|
||||
|
||||
This document describes the complete CI/CD pipeline for the Basil recipe manager, including Gitea Actions workflows, Docker image building, and automated deployments.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Overview](#overview)
|
||||
2. [Prerequisites](#prerequisites)
|
||||
3. [Gitea Actions Workflow](#gitea-actions-workflow)
|
||||
4. [Docker Registry Setup](#docker-registry-setup)
|
||||
5. [Deployment Methods](#deployment-methods)
|
||||
6. [Configuration](#configuration)
|
||||
7. [Troubleshooting](#troubleshooting)
|
||||
|
||||
## Overview
|
||||
|
||||
The CI/CD pipeline consists of three main stages:
|
||||
|
||||
1. **Testing**: Runs unit tests (Vitest) and E2E tests (Playwright)
|
||||
2. **Build & Push**: Builds Docker images and pushes to registry (on main branch only)
|
||||
3. **Deploy**: Pulls new images and restarts containers on the production server
|
||||
|
||||
```
|
||||
┌─────────────┐ ┌──────────────┐ ┌────────────────┐
|
||||
│ Git Push │────▶│ Gitea Actions│────▶│ Docker Registry│
|
||||
│ (main) │ │ - Test │ │ - API image │
|
||||
└─────────────┘ │ - Build │ │ - Web image │
|
||||
│ - Push │ └────────────────┘
|
||||
└──────────────┘ │
|
||||
│ │
|
||||
▼ ▼
|
||||
┌──────────────┐ ┌────────────────┐
|
||||
│ Webhook │────▶│ Production │
|
||||
│ Trigger │ │ Server │
|
||||
└──────────────┘ └────────────────┘
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### For CI/CD (Gitea)
|
||||
|
||||
- Gitea instance with Actions enabled
|
||||
- Docker Hub account (or other registry)
|
||||
- Node.js 20+ for testing
|
||||
|
||||
### For Deployment Server
|
||||
|
||||
- Docker and Docker Compose installed
|
||||
- Bash shell
|
||||
- `webhook` package (for automatic deployments)
|
||||
- Network access to pull from Docker registry
|
||||
|
||||
## Gitea Actions Workflow
|
||||
|
||||
The workflow is defined in `.gitea/workflows/ci-cd.yml` and runs on:
|
||||
|
||||
- Push to `main` or `develop` branches
|
||||
- Pull requests targeting `main` or `develop`
|
||||
|
||||
### Jobs
|
||||
|
||||
#### 1. Test Job
|
||||
|
||||
Runs all tests with a PostgreSQL service container:
|
||||
|
||||
- **Unit Tests**: API, Web, and Shared packages using Vitest
|
||||
- **E2E Tests**: Full application tests using Playwright
|
||||
- **Database**: Temporary PostgreSQL instance for testing
|
||||
|
||||
**Test Commands:**
|
||||
```bash
|
||||
# Run all tests locally
|
||||
npm test
|
||||
|
||||
# Run E2E tests
|
||||
npm run test:e2e
|
||||
|
||||
# Run with coverage
|
||||
npm run test:coverage
|
||||
```
|
||||
|
||||
#### 2. Lint Job
|
||||
|
||||
Runs ESLint on all packages to ensure code quality:
|
||||
|
||||
```bash
|
||||
npm run lint
|
||||
```
|
||||
|
||||
#### 3. Build and Push Job
|
||||
|
||||
Only runs on push to `main` branch:
|
||||
|
||||
1. Builds Docker images for API and Web
|
||||
2. Tags with multiple tags (latest, SHA, semver)
|
||||
3. Pushes to Docker registry
|
||||
4. Triggers deployment webhook
|
||||
|
||||
**Image Names:**
|
||||
- API: `{registry}/{username}/basil-api:{tag}`
|
||||
- Web: `{registry}/{username}/basil-web:{tag}`
|
||||
|
||||
## Docker Registry Setup
|
||||
|
||||
### 1. Create Docker Hub Account
|
||||
|
||||
If using Docker Hub:
|
||||
1. Sign up at https://hub.docker.com
|
||||
2. Create an access token in Account Settings → Security
|
||||
|
||||
### 2. Configure Gitea Secrets
|
||||
|
||||
Add the following secrets to your Gitea repository:
|
||||
|
||||
**Settings → Secrets → Actions**
|
||||
|
||||
| Secret Name | Description | Example |
|
||||
|-------------|-------------|---------|
|
||||
| `DOCKER_USERNAME` | Docker Hub username | `myusername` |
|
||||
| `DOCKER_PASSWORD` | Docker Hub access token | `dckr_pat_xxxxx...` |
|
||||
| `DEPLOY_WEBHOOK_URL` | Webhook endpoint URL | `http://server.com:9000/hooks/basil-deploy` |
|
||||
|
||||
### 3. Alternative Registries
|
||||
|
||||
To use a different registry (e.g., GitHub Container Registry, GitLab):
|
||||
|
||||
1. Update `DOCKER_REGISTRY` in `.gitea/workflows/ci-cd.yml`:
|
||||
```yaml
|
||||
env:
|
||||
DOCKER_REGISTRY: ghcr.io # or registry.gitlab.com
|
||||
```
|
||||
|
||||
2. Update login credentials accordingly
|
||||
|
||||
## Deployment Methods
|
||||
|
||||
### Method 1: Automatic Webhook Deployment (Recommended)
|
||||
|
||||
Uses a webhook server to automatically deploy when images are pushed.
|
||||
|
||||
#### Setup Steps
|
||||
|
||||
1. **Copy environment template:**
|
||||
```bash
|
||||
cp .env.deploy.example .env.deploy
|
||||
```
|
||||
|
||||
2. **Edit `.env.deploy`:**
|
||||
```bash
|
||||
DOCKER_USERNAME=your-docker-username
|
||||
DOCKER_REGISTRY=docker.io
|
||||
IMAGE_TAG=latest
|
||||
WEBHOOK_PORT=9000
|
||||
WEBHOOK_SECRET=your-random-secret-here
|
||||
```
|
||||
|
||||
3. **Install webhook package:**
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
sudo apt-get install webhook
|
||||
|
||||
# RHEL/CentOS
|
||||
sudo yum install webhook
|
||||
```
|
||||
|
||||
4. **Install systemd service:**
|
||||
```bash
|
||||
# Copy service file
|
||||
sudo cp scripts/basil-webhook.service /etc/systemd/system/
|
||||
|
||||
# Edit service file with your settings
|
||||
sudo nano /etc/systemd/system/basil-webhook.service
|
||||
|
||||
# Enable and start service
|
||||
sudo systemctl enable basil-webhook
|
||||
sudo systemctl start basil-webhook
|
||||
|
||||
# Check status
|
||||
sudo systemctl status basil-webhook
|
||||
```
|
||||
|
||||
5. **Configure firewall (if needed):**
|
||||
```bash
|
||||
sudo ufw allow 9000/tcp
|
||||
```
|
||||
|
||||
6. **Add webhook URL to Gitea secrets:**
|
||||
```
|
||||
DEPLOY_WEBHOOK_URL=http://your-server.com:9000/hooks/basil-deploy
|
||||
```
|
||||
|
||||
Add this header when calling the webhook:
|
||||
```
|
||||
X-Webhook-Secret: your-random-secret-here
|
||||
```
|
||||
|
||||
#### Manual Webhook Trigger
|
||||
|
||||
Test webhook manually:
|
||||
```bash
|
||||
curl -X POST http://localhost:9000/hooks/basil-deploy \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-Webhook-Secret: your-secret" \
|
||||
-d '{"branch": "main", "commit": "abc123"}'
|
||||
```
|
||||
|
||||
### Method 2: Manual Deployment
|
||||
|
||||
For manual deployments without webhooks:
|
||||
|
||||
```bash
|
||||
# Interactive deployment
|
||||
./scripts/manual-deploy.sh
|
||||
|
||||
# Or with environment variables
|
||||
DOCKER_USERNAME=myuser \
|
||||
DOCKER_REGISTRY=docker.io \
|
||||
IMAGE_TAG=latest \
|
||||
./scripts/deploy.sh
|
||||
```
|
||||
|
||||
The deployment script will:
|
||||
1. Check Docker is running
|
||||
2. Create a pre-deployment backup
|
||||
3. Pull latest images from registry
|
||||
4. Update docker-compose configuration
|
||||
5. Restart containers
|
||||
6. Perform health checks
|
||||
7. Clean up old images
|
||||
|
||||
### Method 3: Cron-based Deployment
|
||||
|
||||
Set up a cron job for scheduled deployments:
|
||||
|
||||
```bash
|
||||
# Edit crontab
|
||||
crontab -e
|
||||
|
||||
# Add line to deploy every night at 2 AM
|
||||
0 2 * * * cd /srv/docker-compose/basil && DOCKER_USERNAME=myuser ./scripts/deploy.sh >> /var/log/basil-deploy.log 2>&1
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
**For Deployment Scripts:**
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
|----------|----------|---------|-------------|
|
||||
| `DOCKER_USERNAME` | Yes | - | Docker registry username |
|
||||
| `DOCKER_REGISTRY` | No | `docker.io` | Docker registry URL |
|
||||
| `IMAGE_TAG` | No | `latest` | Image tag to pull |
|
||||
| `WEBHOOK_PORT` | No | `9000` | Port for webhook server |
|
||||
| `WEBHOOK_SECRET` | No | `changeme` | Secret for webhook authentication |
|
||||
|
||||
**For Application:**
|
||||
|
||||
See `packages/api/.env.example` for application configuration.
|
||||
|
||||
### Docker Compose Override
|
||||
|
||||
The deployment script automatically creates `docker-compose.override.yml` to use registry images instead of building from source:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
api:
|
||||
image: docker.io/username/basil-api:latest
|
||||
web:
|
||||
image: docker.io/username/basil-web:latest
|
||||
```
|
||||
|
||||
This file is in `.gitignore` and is regenerated on each deployment.
|
||||
|
||||
## Monitoring and Logs
|
||||
|
||||
### View Deployment Logs
|
||||
|
||||
```bash
|
||||
# Deployment log
|
||||
tail -f deploy.log
|
||||
|
||||
# Webhook log
|
||||
tail -f webhook.log
|
||||
|
||||
# Container logs
|
||||
docker-compose logs -f api
|
||||
docker-compose logs -f web
|
||||
```
|
||||
|
||||
### Check Deployment Status
|
||||
|
||||
```bash
|
||||
# Check running containers
|
||||
docker-compose ps
|
||||
|
||||
# Check API health
|
||||
curl http://localhost:3001/health
|
||||
|
||||
# View recent deployments
|
||||
grep "Deployment completed" deploy.log
|
||||
```
|
||||
|
||||
### Systemd Service Logs
|
||||
|
||||
```bash
|
||||
# View webhook service logs
|
||||
sudo journalctl -u basil-webhook -f
|
||||
|
||||
# View recent errors
|
||||
sudo journalctl -u basil-webhook --since "1 hour ago" -p err
|
||||
```
|
||||
|
||||
## Backup and Rollback
|
||||
|
||||
### Automatic Backups
|
||||
|
||||
The deployment script automatically creates a backup before deploying:
|
||||
|
||||
```bash
|
||||
backups/pre-deploy-YYYYMMDD-HHMMSS.zip
|
||||
```
|
||||
|
||||
### Manual Backup
|
||||
|
||||
```bash
|
||||
# Via API
|
||||
curl -X POST http://localhost:3001/api/backup \
|
||||
-o backup-$(date +%Y%m%d).zip
|
||||
|
||||
# Via Docker
|
||||
docker exec basil-api npm run backup
|
||||
```
|
||||
|
||||
### Rollback to Previous Version
|
||||
|
||||
```bash
|
||||
# Pull specific tag
|
||||
DOCKER_USERNAME=myuser IMAGE_TAG=main-abc123 ./scripts/deploy.sh
|
||||
|
||||
# Or restore from backup
|
||||
curl -X POST http://localhost:3001/api/backup/restore \
|
||||
-F "file=@backups/pre-deploy-20250101-020000.zip"
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Tests Failing in CI
|
||||
|
||||
**Check test logs in Gitea:**
|
||||
1. Go to Actions tab in repository
|
||||
2. Click on failed workflow run
|
||||
3. Expand failed job to see detailed logs
|
||||
|
||||
**Common issues:**
|
||||
- Database connection: Ensure PostgreSQL service is healthy
|
||||
- Missing dependencies: Check `npm install` step
|
||||
- Environment variables: Verify test environment configuration
|
||||
|
||||
### Images Not Pushing
|
||||
|
||||
**Check Docker credentials:**
|
||||
```bash
|
||||
# Test Docker login
|
||||
docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
|
||||
|
||||
# Verify secrets in Gitea
|
||||
# Settings → Secrets → Actions
|
||||
```
|
||||
|
||||
**Check registry permissions:**
|
||||
- Ensure token has write permissions
|
||||
- Verify repository exists on Docker Hub
|
||||
|
||||
### Webhook Not Triggering
|
||||
|
||||
**Check webhook service:**
|
||||
```bash
|
||||
# Service status
|
||||
sudo systemctl status basil-webhook
|
||||
|
||||
# Check if port is listening
|
||||
sudo netstat -tlnp | grep 9000
|
||||
|
||||
# Test webhook endpoint
|
||||
curl -I http://localhost:9000/hooks/basil-deploy
|
||||
```
|
||||
|
||||
**Check firewall:**
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
sudo ufw status
|
||||
|
||||
# RHEL/CentOS
|
||||
sudo firewall-cmd --list-all
|
||||
```
|
||||
|
||||
**Verify secret header:**
|
||||
```bash
|
||||
# Wrong secret returns 403
|
||||
curl -X POST http://localhost:9000/hooks/basil-deploy
|
||||
# Should return 403 Forbidden
|
||||
|
||||
# Correct secret triggers deployment
|
||||
curl -X POST http://localhost:9000/hooks/basil-deploy \
|
||||
-H "X-Webhook-Secret: your-secret"
|
||||
# Should return "Deployment triggered successfully"
|
||||
```
|
||||
|
||||
### Deployment Fails
|
||||
|
||||
**Check Docker:**
|
||||
```bash
|
||||
# Docker running?
|
||||
docker info
|
||||
|
||||
# Disk space?
|
||||
df -h
|
||||
|
||||
# View deployment log
|
||||
tail -100 deploy.log
|
||||
```
|
||||
|
||||
**Check images:**
|
||||
```bash
|
||||
# Can we pull images?
|
||||
docker pull $DOCKER_REGISTRY/$DOCKER_USERNAME/basil-api:latest
|
||||
|
||||
# Check image tags
|
||||
docker images | grep basil
|
||||
```
|
||||
|
||||
**Health check failures:**
|
||||
```bash
|
||||
# Check API logs
|
||||
docker-compose logs api
|
||||
|
||||
# Check database connection
|
||||
docker-compose exec api npx prisma studio
|
||||
|
||||
# Test API manually
|
||||
curl http://localhost:3001/health
|
||||
```
|
||||
|
||||
### Container Won't Start
|
||||
|
||||
**Check logs:**
|
||||
```bash
|
||||
docker-compose logs api
|
||||
docker-compose logs web
|
||||
```
|
||||
|
||||
**Common issues:**
|
||||
- Database migrations: Check Prisma migration logs
|
||||
- Environment variables: Verify `.env` files
|
||||
- Port conflicts: Check if ports 3001/5173 are available
|
||||
- Volume permissions: Check uploads/backups directory permissions
|
||||
|
||||
### Rollback Failed
|
||||
|
||||
**Manual rollback:**
|
||||
```bash
|
||||
# Stop containers
|
||||
docker-compose down
|
||||
|
||||
# Remove override file
|
||||
rm docker-compose.override.yml
|
||||
|
||||
# Restore from backup
|
||||
unzip backups/pre-deploy-YYYYMMDD-HHMMSS.zip -d restore-temp/
|
||||
|
||||
# Manually restore database and files
|
||||
# (See backup documentation)
|
||||
|
||||
# Start containers
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Webhook Secret**: Use a strong, random secret (32+ characters)
|
||||
2. **Firewall**: Restrict webhook port to known IPs if possible
|
||||
3. **HTTPS**: Use HTTPS for webhook endpoint in production
|
||||
4. **Secrets**: Never commit secrets to git
|
||||
5. **Backups**: Store backups securely with encryption
|
||||
6. **Docker Registry**: Use private registries for sensitive applications
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Test Locally**: Always test changes locally before pushing
|
||||
2. **Review PRs**: Use pull requests for code review
|
||||
3. **Monitor Logs**: Regularly check deployment and application logs
|
||||
4. **Backup First**: Always backup before major deployments
|
||||
5. **Tag Releases**: Use semantic versioning for releases
|
||||
6. **Health Checks**: Monitor application health after deployment
|
||||
7. **Rollback Plan**: Know how to rollback quickly if needed
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Gitea Actions Documentation](https://docs.gitea.io/en-us/actions/)
|
||||
- [Docker Documentation](https://docs.docker.com/)
|
||||
- [Webhook Documentation](https://github.com/adnanh/webhook)
|
||||
- [Basil Project Documentation](../CLAUDE.md)
|
||||
200
docs/DEPLOYMENT-QUICK-START.md
Normal file
200
docs/DEPLOYMENT-QUICK-START.md
Normal file
@@ -0,0 +1,200 @@
|
||||
# Basil Deployment Quick Start
|
||||
|
||||
Quick reference for deploying Basil with CI/CD.
|
||||
|
||||
## Initial Setup (One-time)
|
||||
|
||||
### 1. Gitea Secrets Configuration
|
||||
|
||||
Add these secrets in Gitea → Settings → Secrets → Actions:
|
||||
|
||||
```
|
||||
DOCKER_USERNAME=your-dockerhub-username
|
||||
DOCKER_PASSWORD=dckr_pat_xxxxxxxxxxxxx
|
||||
DEPLOY_WEBHOOK_URL=http://your-server.com:9000/hooks/basil-deploy
|
||||
```
|
||||
|
||||
### 2. Server Setup
|
||||
|
||||
```bash
|
||||
# 1. Clone repository
|
||||
cd /srv/docker-compose
|
||||
git clone https://your-gitea.com/user/basil.git
|
||||
cd basil
|
||||
|
||||
# 2. Create deployment configuration
|
||||
cp .env.deploy.example .env.deploy
|
||||
nano .env.deploy # Edit with your values
|
||||
|
||||
# 3. Install webhook (Ubuntu/Debian)
|
||||
sudo apt-get install webhook
|
||||
|
||||
# 4. Install systemd service
|
||||
sudo cp scripts/basil-webhook.service /etc/systemd/system/
|
||||
sudo nano /etc/systemd/system/basil-webhook.service # Edit paths and env vars
|
||||
sudo systemctl enable basil-webhook
|
||||
sudo systemctl start basil-webhook
|
||||
|
||||
# 5. Configure firewall
|
||||
sudo ufw allow 9000/tcp
|
||||
```
|
||||
|
||||
## Daily Usage
|
||||
|
||||
### Automatic Deployment (Recommended)
|
||||
|
||||
Just push to main branch:
|
||||
|
||||
```bash
|
||||
git add .
|
||||
git commit -m "feat: add new feature"
|
||||
git push origin main
|
||||
```
|
||||
|
||||
The CI/CD pipeline will:
|
||||
1. ✓ Run all tests
|
||||
2. ✓ Build Docker images
|
||||
3. ✓ Push to registry
|
||||
4. ✓ Trigger webhook
|
||||
5. ✓ Deploy automatically
|
||||
|
||||
### Manual Deployment
|
||||
|
||||
```bash
|
||||
cd /srv/docker-compose/basil
|
||||
./scripts/manual-deploy.sh
|
||||
```
|
||||
|
||||
## Quick Commands
|
||||
|
||||
```bash
|
||||
# View deployment logs
|
||||
tail -f deploy.log
|
||||
|
||||
# View container logs
|
||||
docker-compose logs -f api
|
||||
docker-compose logs -f web
|
||||
|
||||
# Check deployment status
|
||||
docker-compose ps
|
||||
|
||||
# Restart services
|
||||
docker-compose restart
|
||||
|
||||
# Pull latest code and rebuild (without registry)
|
||||
git pull
|
||||
docker-compose up -d --build
|
||||
|
||||
# Create manual backup
|
||||
curl -X POST http://localhost:3001/api/backup -o backup.zip
|
||||
|
||||
# Check webhook status
|
||||
sudo systemctl status basil-webhook
|
||||
|
||||
# Test webhook manually
|
||||
curl -X POST http://localhost:9000/hooks/basil-deploy \
|
||||
-H "X-Webhook-Secret: your-secret" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"branch": "main"}'
|
||||
```
|
||||
|
||||
## Rollback
|
||||
|
||||
```bash
|
||||
# Deploy specific version
|
||||
DOCKER_USERNAME=myuser IMAGE_TAG=main-abc123 ./scripts/deploy.sh
|
||||
|
||||
# Or restore from backup
|
||||
cd backups
|
||||
ls -lt # Find backup file
|
||||
curl -X POST http://localhost:3001/api/backup/restore \
|
||||
-F "file=@pre-deploy-20250101-020000.zip"
|
||||
```
|
||||
|
||||
## Troubleshooting One-Liners
|
||||
|
||||
```bash
|
||||
# Deployment failed? Check logs
|
||||
tail -50 deploy.log
|
||||
|
||||
# Webhook not working? Check service
|
||||
sudo journalctl -u basil-webhook -n 50
|
||||
|
||||
# Containers not starting? Check Docker logs
|
||||
docker-compose logs --tail=50
|
||||
|
||||
# Out of disk space?
|
||||
docker system prune -a
|
||||
df -h
|
||||
|
||||
# Database issues?
|
||||
docker-compose exec api npx prisma studio
|
||||
|
||||
# Pull images manually
|
||||
docker pull docker.io/$DOCKER_USERNAME/basil-api:latest
|
||||
docker pull docker.io/$DOCKER_USERNAME/basil-web:latest
|
||||
```
|
||||
|
||||
## Workflow Diagram
|
||||
|
||||
```
|
||||
┌──────────────┐
|
||||
│ Developer │
|
||||
│ git push │
|
||||
└──────┬───────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────────────┐
|
||||
│ Gitea Actions │
|
||||
│ 1. Run tests (unit + E2E) │
|
||||
│ 2. Build Docker images │
|
||||
│ 3. Push to Docker Hub │
|
||||
│ 4. Call webhook │
|
||||
└──────┬───────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────────────┐
|
||||
│ Production Server │
|
||||
│ 1. Webhook receives call │
|
||||
│ 2. Run deploy.sh script │
|
||||
│ - Create backup │
|
||||
│ - Pull new images │
|
||||
│ - Restart containers │
|
||||
│ - Health check │
|
||||
└──────────────────────────────┘
|
||||
```
|
||||
|
||||
## File Locations
|
||||
|
||||
```
|
||||
/srv/docker-compose/basil/
|
||||
├── .gitea/workflows/ci-cd.yml # CI/CD workflow
|
||||
├── scripts/
|
||||
│ ├── deploy.sh # Main deployment script
|
||||
│ ├── manual-deploy.sh # Interactive deployment
|
||||
│ ├── webhook-receiver.sh # Webhook server
|
||||
│ └── basil-webhook.service # Systemd service file
|
||||
├── .env.deploy # Deployment config (gitignored)
|
||||
├── deploy.log # Deployment logs
|
||||
├── webhook.log # Webhook logs
|
||||
└── backups/ # Automatic backups
|
||||
└── pre-deploy-*.zip
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
**Required:**
|
||||
- `DOCKER_USERNAME` - Your Docker Hub username
|
||||
|
||||
**Optional:**
|
||||
- `DOCKER_REGISTRY` - Default: `docker.io`
|
||||
- `IMAGE_TAG` - Default: `latest`
|
||||
- `WEBHOOK_PORT` - Default: `9000`
|
||||
- `WEBHOOK_SECRET` - Default: `changeme` (change this!)
|
||||
|
||||
## Support
|
||||
|
||||
For detailed documentation, see:
|
||||
- [Full CI/CD Setup Guide](./CI-CD-SETUP.md)
|
||||
- [Project Documentation](../CLAUDE.md)
|
||||
- [Gitea Issues](https://your-gitea.com/user/basil/issues)
|
||||
388
docs/REMOTE_DATABASE.md
Normal file
388
docs/REMOTE_DATABASE.md
Normal file
@@ -0,0 +1,388 @@
|
||||
# Remote Database Configuration
|
||||
|
||||
This guide explains how to configure Basil to use a remote PostgreSQL database instead of the local Docker container.
|
||||
|
||||
## Overview
|
||||
|
||||
By default, Basil uses a local PostgreSQL database running in Docker. However, you can configure it to use a remote database service such as:
|
||||
- AWS RDS (Relational Database Service)
|
||||
- Google Cloud SQL
|
||||
- Azure Database for PostgreSQL
|
||||
- Digital Ocean Managed Databases
|
||||
- Heroku Postgres
|
||||
- Self-hosted PostgreSQL server
|
||||
|
||||
## Configuration
|
||||
|
||||
### 1. Update Environment Variables
|
||||
|
||||
Edit `packages/api/.env` to point to your remote database:
|
||||
|
||||
```bash
|
||||
DATABASE_URL="postgresql://username:password@remote-host:5432/basil?schema=public"
|
||||
```
|
||||
|
||||
### Connection String Format
|
||||
|
||||
```
|
||||
postgresql://[username]:[password]@[host]:[port]/[database]?[options]
|
||||
```
|
||||
|
||||
**Components:**
|
||||
- `username` - Database user
|
||||
- `password` - Database password
|
||||
- `host` - Database hostname or IP
|
||||
- `port` - Database port (default: 5432)
|
||||
- `database` - Database name (usually "basil")
|
||||
- `options` - Additional connection options (e.g., `sslmode=require`)
|
||||
|
||||
### 2. Provider-Specific Examples
|
||||
|
||||
#### AWS RDS
|
||||
|
||||
```bash
|
||||
DATABASE_URL="postgresql://basiladmin:yourpassword@basil-db.abc123.us-east-1.rds.amazonaws.com:5432/basil?schema=public"
|
||||
```
|
||||
|
||||
Additional SSL configuration may be required:
|
||||
```bash
|
||||
DATABASE_URL="postgresql://basiladmin:yourpassword@basil-db.abc123.us-east-1.rds.amazonaws.com:5432/basil?schema=public&sslmode=require"
|
||||
```
|
||||
|
||||
#### Google Cloud SQL
|
||||
|
||||
```bash
|
||||
DATABASE_URL="postgresql://postgres:yourpassword@127.0.0.1:5432/basil?host=/cloudsql/project:region:instance"
|
||||
```
|
||||
|
||||
Or with public IP:
|
||||
```bash
|
||||
DATABASE_URL="postgresql://postgres:yourpassword@35.123.45.67:5432/basil?schema=public&sslmode=require"
|
||||
```
|
||||
|
||||
#### Azure Database for PostgreSQL
|
||||
|
||||
```bash
|
||||
DATABASE_URL="postgresql://basiladmin@servername:yourpassword@servername.postgres.database.azure.com:5432/basil?schema=public&sslmode=require"
|
||||
```
|
||||
|
||||
#### Digital Ocean Managed Database
|
||||
|
||||
```bash
|
||||
DATABASE_URL="postgresql://doadmin:yourpassword@basil-db-do-user-123456-0.b.db.ondigitalocean.com:25060/basil?sslmode=require"
|
||||
```
|
||||
|
||||
#### Heroku Postgres
|
||||
|
||||
Heroku provides a `DATABASE_URL` automatically:
|
||||
```bash
|
||||
DATABASE_URL="postgres://user:password@ec2-123-45-67-89.compute-1.amazonaws.com:5432/d1234abcd5678ef"
|
||||
```
|
||||
|
||||
Note: Heroku uses `postgres://` instead of `postgresql://`, but both work with Prisma.
|
||||
|
||||
#### Self-Hosted PostgreSQL
|
||||
|
||||
```bash
|
||||
DATABASE_URL="postgresql://basil:password@192.168.1.100:5432/basil?schema=public"
|
||||
```
|
||||
|
||||
For SSL connections:
|
||||
```bash
|
||||
DATABASE_URL="postgresql://basil:password@postgres.example.com:5432/basil?schema=public&sslmode=require"
|
||||
```
|
||||
|
||||
### 3. Docker Configuration
|
||||
|
||||
When using Docker with a remote database, you need to update the configuration:
|
||||
|
||||
#### Option A: Use Environment Variable (Recommended)
|
||||
|
||||
Create a `.env` file in the project root:
|
||||
|
||||
```bash
|
||||
DATABASE_URL=postgresql://username:password@remote-host:5432/basil?schema=public
|
||||
```
|
||||
|
||||
The `docker-compose.yml` is already configured to use this:
|
||||
```yaml
|
||||
environment:
|
||||
DATABASE_URL: ${DATABASE_URL:-postgresql://basil:basil@postgres:5432/basil?schema=public}
|
||||
```
|
||||
|
||||
#### Option B: Edit docker-compose.yml Directly
|
||||
|
||||
Edit the `api` service in `docker-compose.yml`:
|
||||
|
||||
```yaml
|
||||
api:
|
||||
environment:
|
||||
DATABASE_URL: postgresql://username:password@remote-host:5432/basil?schema=public
|
||||
```
|
||||
|
||||
#### Option C: Disable Local PostgreSQL
|
||||
|
||||
If using only a remote database, you can disable the local postgres service:
|
||||
|
||||
1. Comment out or remove the `postgres` service in `docker-compose.yml`
|
||||
2. Remove the `depends_on` condition from the `api` service
|
||||
3. Remove the unused `postgres_data` volume
|
||||
|
||||
```yaml
|
||||
services:
|
||||
# postgres:
|
||||
# image: postgres:16-alpine
|
||||
# ... (commented out)
|
||||
|
||||
api:
|
||||
# Remove depends_on if not using local postgres
|
||||
# depends_on:
|
||||
# postgres:
|
||||
# condition: service_healthy
|
||||
```
|
||||
|
||||
### 4. Initialize Remote Database
|
||||
|
||||
Before first use, you need to initialize the database schema:
|
||||
|
||||
```bash
|
||||
# From your development machine
|
||||
cd packages/api
|
||||
npm run prisma:migrate
|
||||
|
||||
# Or from Docker
|
||||
docker exec basil-api npx prisma migrate deploy
|
||||
```
|
||||
|
||||
This will create all necessary tables and relationships.
|
||||
|
||||
## SSL/TLS Configuration
|
||||
|
||||
### Enabling SSL
|
||||
|
||||
Most managed database services require SSL connections. Add `sslmode` to your connection string:
|
||||
|
||||
```bash
|
||||
# Require SSL but don't verify certificate
|
||||
DATABASE_URL="postgresql://user:password@host:5432/basil?sslmode=require"
|
||||
|
||||
# Require SSL and verify certificate
|
||||
DATABASE_URL="postgresql://user:password@host:5432/basil?sslmode=verify-full"
|
||||
|
||||
# Disable SSL (only for development/testing)
|
||||
DATABASE_URL="postgresql://user:password@host:5432/basil?sslmode=disable"
|
||||
```
|
||||
|
||||
### SSL Modes
|
||||
|
||||
- `disable` - No SSL (not recommended for production)
|
||||
- `allow` - Try SSL, fall back to non-SSL
|
||||
- `prefer` - Try SSL first (default)
|
||||
- `require` - Require SSL, don't verify certificate
|
||||
- `verify-ca` - Require SSL, verify certificate authority
|
||||
- `verify-full` - Require SSL, verify certificate and hostname
|
||||
|
||||
### Custom SSL Certificates
|
||||
|
||||
For custom CA certificates, you may need to configure additional options:
|
||||
|
||||
```bash
|
||||
DATABASE_URL="postgresql://user:password@host:5432/basil?sslmode=require&sslcert=/path/to/client-cert.pem&sslkey=/path/to/client-key.pem&sslrootcert=/path/to/ca-cert.pem"
|
||||
```
|
||||
|
||||
## Connection Pooling
|
||||
|
||||
For production deployments with high traffic, consider using connection pooling.
|
||||
|
||||
### PgBouncer
|
||||
|
||||
Example configuration with PgBouncer:
|
||||
|
||||
```bash
|
||||
DATABASE_URL="postgresql://user:password@pgbouncer-host:6432/basil?schema=public"
|
||||
```
|
||||
|
||||
### Prisma Data Platform (Prisma Accelerate)
|
||||
|
||||
For advanced connection pooling and caching:
|
||||
|
||||
```bash
|
||||
DATABASE_URL="prisma://accelerate.prisma-data.net/?api_key=your-api-key"
|
||||
```
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
1. **Use Environment Variables**: Never commit credentials to git
|
||||
2. **Strong Passwords**: Use generated passwords with high entropy
|
||||
3. **Restrict Access**: Configure database firewall rules to only allow your application servers
|
||||
4. **SSL/TLS**: Always use SSL in production
|
||||
5. **Read-Only Users**: Consider using read-only database users for analytics
|
||||
6. **Regular Backups**: Configure automated backups on your database service
|
||||
7. **Rotate Credentials**: Periodically rotate database passwords
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Network Latency
|
||||
|
||||
Remote databases add network latency. Consider:
|
||||
- Choose a database region close to your application servers
|
||||
- Use connection pooling to reduce connection overhead
|
||||
- Enable query optimization in Prisma
|
||||
|
||||
### Connection Limits
|
||||
|
||||
Managed databases often have connection limits:
|
||||
- Check your plan's connection limit
|
||||
- Configure appropriate connection pool size
|
||||
- Use PgBouncer for connection multiplexing
|
||||
|
||||
### Prisma Configuration
|
||||
|
||||
Optimize Prisma connection settings in `packages/api/prisma/schema.prisma`:
|
||||
|
||||
```prisma
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = env("DATABASE_URL")
|
||||
|
||||
// Optional: Configure connection pool
|
||||
// relationMode = "prisma"
|
||||
}
|
||||
```
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Connection Status
|
||||
|
||||
Check database connectivity:
|
||||
|
||||
```bash
|
||||
# Health check endpoint
|
||||
curl http://localhost:3001/health
|
||||
|
||||
# Test database connection with Prisma
|
||||
docker exec basil-api npx prisma db push --preview-feature
|
||||
```
|
||||
|
||||
### Query Performance
|
||||
|
||||
Monitor slow queries:
|
||||
- Enable PostgreSQL query logging
|
||||
- Use database monitoring tools (CloudWatch, Datadog, etc.)
|
||||
- Analyze with `EXPLAIN ANALYZE` for slow queries
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Connection Refused
|
||||
|
||||
**Error: `Connection refused`**
|
||||
|
||||
Possible causes:
|
||||
- Incorrect host or port
|
||||
- Database server not running
|
||||
- Firewall blocking connection
|
||||
- Security group not allowing your IP
|
||||
|
||||
Solutions:
|
||||
- Verify database hostname and port
|
||||
- Check database server status
|
||||
- Update firewall rules to allow your application's IP
|
||||
- Verify security group rules (AWS, Azure, GCP)
|
||||
|
||||
### Authentication Failed
|
||||
|
||||
**Error: `Password authentication failed`**
|
||||
|
||||
Solutions:
|
||||
- Double-check username and password
|
||||
- Ensure special characters in password are URL-encoded
|
||||
- Verify user has permission to access the database
|
||||
- Check if IP allowlist includes your server
|
||||
|
||||
### SSL Required
|
||||
|
||||
**Error: `SSL required` or `no pg_hba.conf entry`**
|
||||
|
||||
Solutions:
|
||||
- Add `sslmode=require` to connection string
|
||||
- Configure database to accept non-SSL connections (dev only)
|
||||
- Install required SSL certificates
|
||||
|
||||
### Connection Timeout
|
||||
|
||||
**Error: `Connection timeout`**
|
||||
|
||||
Solutions:
|
||||
- Verify network connectivity
|
||||
- Check if database server is accepting connections
|
||||
- Increase timeout in Prisma configuration
|
||||
- Verify DNS resolution of database hostname
|
||||
|
||||
### Too Many Connections
|
||||
|
||||
**Error: `Too many connections`**
|
||||
|
||||
Solutions:
|
||||
- Reduce connection pool size
|
||||
- Use PgBouncer for connection pooling
|
||||
- Upgrade database plan for more connections
|
||||
- Check for connection leaks in application code
|
||||
|
||||
## Migration from Local to Remote
|
||||
|
||||
To migrate from local Docker database to remote:
|
||||
|
||||
1. **Create a backup of local data:**
|
||||
```bash
|
||||
curl -X POST http://localhost:3001/api/backup
|
||||
```
|
||||
|
||||
2. **Initialize remote database:**
|
||||
```bash
|
||||
# Update DATABASE_URL to point to remote
|
||||
cd packages/api
|
||||
npm run prisma:migrate deploy
|
||||
```
|
||||
|
||||
3. **Restore backup to remote database:**
|
||||
```bash
|
||||
curl -X POST \
|
||||
-F "backup=@basil-backup-2025-11-10.zip" \
|
||||
http://localhost:3001/api/backup/restore
|
||||
```
|
||||
|
||||
4. **Verify data migration:**
|
||||
- Check recipe count: `curl http://localhost:3001/api/recipes`
|
||||
- Test recipe access and functionality
|
||||
|
||||
5. **Update production configuration:**
|
||||
- Update environment variables in production
|
||||
- Restart application services
|
||||
|
||||
## Cost Optimization
|
||||
|
||||
### Managed Database Pricing
|
||||
|
||||
Compare costs across providers:
|
||||
- **AWS RDS**: Pay per instance hour + storage
|
||||
- **Google Cloud SQL**: Pay per vCPU hour + storage
|
||||
- **Azure Database**: Pay per vCore + storage
|
||||
- **Digital Ocean**: Fixed monthly pricing by plan
|
||||
- **Heroku**: Free tier available, then fixed monthly pricing
|
||||
|
||||
### Tips to Reduce Costs
|
||||
|
||||
1. **Right-size your instance**: Start small, scale as needed
|
||||
2. **Use reserved instances**: AWS/Azure offer discounts for 1-3 year commitments
|
||||
3. **Enable auto-scaling**: Scale down during low traffic periods
|
||||
4. **Optimize storage**: Use standard storage instead of provisioned IOPS if possible
|
||||
5. **Regular backups**: Use built-in backup services (cheaper than manual snapshots)
|
||||
|
||||
## Support
|
||||
|
||||
For database-specific configuration issues, consult:
|
||||
- [AWS RDS Documentation](https://docs.aws.amazon.com/rds/)
|
||||
- [Google Cloud SQL Documentation](https://cloud.google.com/sql/docs)
|
||||
- [Azure Database Documentation](https://docs.microsoft.com/en-us/azure/postgresql/)
|
||||
- [Digital Ocean Documentation](https://docs.digitalocean.com/products/databases/)
|
||||
- [Prisma Database Connectors](https://www.prisma.io/docs/concepts/database-connectors/postgresql)
|
||||
258
packages/api/src/routes/backup.routes.ts
Normal file
258
packages/api/src/routes/backup.routes.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import express, { Request, Response } from 'express';
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import { createBackup, restoreBackup, listBackups, deleteBackup } from '../services/backup.service';
|
||||
import multer from 'multer';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// Configure multer for backup file uploads
|
||||
const upload = multer({
|
||||
dest: '/tmp/basil-restore/',
|
||||
limits: {
|
||||
fileSize: 1024 * 1024 * 1024, // 1GB max
|
||||
},
|
||||
});
|
||||
|
||||
// Get backup directory from env or use default
|
||||
const getBackupDir = (): string => {
|
||||
return process.env.BACKUP_PATH || path.join(__dirname, '../../../backups');
|
||||
};
|
||||
|
||||
/**
|
||||
* POST /api/backup
|
||||
* Creates a new backup of all data and files
|
||||
*/
|
||||
router.post('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const backupDir = getBackupDir();
|
||||
await fs.mkdir(backupDir, { recursive: true });
|
||||
|
||||
const backupPath = await createBackup(backupDir);
|
||||
const fileName = path.basename(backupPath);
|
||||
const stats = await fs.stat(backupPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup created successfully',
|
||||
backup: {
|
||||
name: fileName,
|
||||
path: backupPath,
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Backup creation error:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to create backup',
|
||||
message: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/backup
|
||||
* Lists all available backups
|
||||
*/
|
||||
router.get('/', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const backupDir = getBackupDir();
|
||||
const backups = await listBackups(backupDir);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
backups,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error listing backups:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to list backups',
|
||||
message: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/backup/:filename
|
||||
* Downloads a specific backup file
|
||||
*/
|
||||
router.get('/:filename', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { filename } = req.params;
|
||||
const backupDir = getBackupDir();
|
||||
const backupPath = path.join(backupDir, filename);
|
||||
|
||||
// Security check: ensure the file is within the backup directory
|
||||
const resolvedPath = path.resolve(backupPath);
|
||||
const resolvedBackupDir = path.resolve(backupDir);
|
||||
if (!resolvedPath.startsWith(resolvedBackupDir)) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Access denied',
|
||||
});
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(backupPath);
|
||||
} catch {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'Backup file not found',
|
||||
});
|
||||
}
|
||||
|
||||
// Send file
|
||||
res.download(backupPath, filename, (err) => {
|
||||
if (err) {
|
||||
console.error('Error downloading backup:', err);
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to download backup',
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error downloading backup:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to download backup',
|
||||
message: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/backup/restore
|
||||
* Restores data from a backup file
|
||||
* Accepts either:
|
||||
* - multipart/form-data with 'backup' file field
|
||||
* - JSON with 'filename' field (for existing backup in backup directory)
|
||||
*/
|
||||
router.post('/restore', upload.single('backup'), async (req: Request, res: Response) => {
|
||||
let backupPath: string | null = null;
|
||||
let isTemporaryFile = false;
|
||||
|
||||
try {
|
||||
const backupDir = getBackupDir();
|
||||
|
||||
// Check if file was uploaded or filename provided
|
||||
if (req.file) {
|
||||
backupPath = req.file.path;
|
||||
isTemporaryFile = true;
|
||||
} else if (req.body.filename) {
|
||||
backupPath = path.join(backupDir, req.body.filename);
|
||||
|
||||
// Security check
|
||||
const resolvedPath = path.resolve(backupPath);
|
||||
const resolvedBackupDir = path.resolve(backupDir);
|
||||
if (!resolvedPath.startsWith(resolvedBackupDir)) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Access denied',
|
||||
});
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(backupPath);
|
||||
} catch {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'Backup file not found',
|
||||
});
|
||||
}
|
||||
} else {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
error: 'No backup file provided. Either upload a file or specify a filename.',
|
||||
});
|
||||
}
|
||||
|
||||
// Perform restore
|
||||
const metadata = await restoreBackup(backupPath, backupDir);
|
||||
|
||||
// Clean up temporary file if it was uploaded
|
||||
if (isTemporaryFile && backupPath) {
|
||||
try {
|
||||
await fs.unlink(backupPath);
|
||||
} catch (err) {
|
||||
console.warn('Failed to clean up temporary file:', err);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup restored successfully',
|
||||
metadata,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Restore error:', error);
|
||||
|
||||
// Clean up temporary file on error
|
||||
if (isTemporaryFile && backupPath) {
|
||||
try {
|
||||
await fs.unlink(backupPath);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to restore backup',
|
||||
message: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* DELETE /api/backup/:filename
|
||||
* Deletes a backup file
|
||||
*/
|
||||
router.delete('/:filename', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { filename } = req.params;
|
||||
const backupDir = getBackupDir();
|
||||
const backupPath = path.join(backupDir, filename);
|
||||
|
||||
// Security check
|
||||
const resolvedPath = path.resolve(backupPath);
|
||||
const resolvedBackupDir = path.resolve(backupDir);
|
||||
if (!resolvedPath.startsWith(resolvedBackupDir)) {
|
||||
return res.status(403).json({
|
||||
success: false,
|
||||
error: 'Access denied',
|
||||
});
|
||||
}
|
||||
|
||||
// Check if file exists
|
||||
try {
|
||||
await fs.access(backupPath);
|
||||
} catch {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
error: 'Backup file not found',
|
||||
});
|
||||
}
|
||||
|
||||
await deleteBackup(backupPath);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Backup deleted successfully',
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error deleting backup:', error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: 'Failed to delete backup',
|
||||
message: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
437
packages/api/src/services/backup.service.ts
Normal file
437
packages/api/src/services/backup.service.ts
Normal file
@@ -0,0 +1,437 @@
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import archiver from 'archiver';
|
||||
import { createWriteStream, createReadStream } from 'fs';
|
||||
import extract from 'extract-zip';
|
||||
|
||||
const prisma = new PrismaClient();
|
||||
|
||||
export interface BackupMetadata {
|
||||
version: string;
|
||||
timestamp: string;
|
||||
recipeCount: number;
|
||||
cookbookCount: number;
|
||||
tagCount: number;
|
||||
}
|
||||
|
||||
export interface BackupData {
|
||||
metadata: BackupMetadata;
|
||||
recipes: any[];
|
||||
cookbooks: any[];
|
||||
tags: any[];
|
||||
recipeTags: any[];
|
||||
cookbookRecipes: any[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a complete backup of all database data and uploaded files
|
||||
* Returns the path to the backup file
|
||||
*/
|
||||
export async function createBackup(backupDir: string): Promise<string> {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const backupName = `basil-backup-${timestamp}`;
|
||||
const tempDir = path.join(backupDir, 'temp', backupName);
|
||||
const backupFilePath = path.join(backupDir, `${backupName}.zip`);
|
||||
|
||||
try {
|
||||
// Create temp directory for backup assembly
|
||||
await fs.mkdir(tempDir, { recursive: true });
|
||||
|
||||
// Export all database data
|
||||
const backupData = await exportDatabaseData();
|
||||
|
||||
// Write database backup to JSON file
|
||||
const dbBackupPath = path.join(tempDir, 'database.json');
|
||||
await fs.writeFile(dbBackupPath, JSON.stringify(backupData, null, 2));
|
||||
|
||||
// Copy uploaded files
|
||||
const uploadsPath = process.env.LOCAL_STORAGE_PATH || path.join(__dirname, '../../../uploads');
|
||||
const backupUploadsPath = path.join(tempDir, 'uploads');
|
||||
|
||||
try {
|
||||
await fs.access(uploadsPath);
|
||||
await copyDirectory(uploadsPath, backupUploadsPath);
|
||||
} catch (error) {
|
||||
console.warn('No uploads directory found, skipping file backup');
|
||||
}
|
||||
|
||||
// Create ZIP archive
|
||||
await createZipArchive(tempDir, backupFilePath);
|
||||
|
||||
// Clean up temp directory
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
|
||||
return backupFilePath;
|
||||
} catch (error) {
|
||||
// Clean up on error
|
||||
try {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
} catch {}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports all database data to a structured object
|
||||
*/
|
||||
async function exportDatabaseData(): Promise<BackupData> {
|
||||
// Fetch all data with relations
|
||||
const recipes = await prisma.recipe.findMany({
|
||||
include: {
|
||||
sections: true,
|
||||
ingredients: {
|
||||
include: {
|
||||
instructions: true,
|
||||
},
|
||||
},
|
||||
instructions: {
|
||||
include: {
|
||||
ingredients: true,
|
||||
},
|
||||
},
|
||||
images: true,
|
||||
tags: true,
|
||||
cookbooks: true,
|
||||
},
|
||||
});
|
||||
|
||||
const cookbooks = await prisma.cookbook.findMany({
|
||||
include: {
|
||||
recipes: true,
|
||||
},
|
||||
});
|
||||
|
||||
const tags = await prisma.tag.findMany({
|
||||
include: {
|
||||
recipes: true,
|
||||
},
|
||||
});
|
||||
|
||||
const recipeTags = await prisma.recipeTag.findMany();
|
||||
const cookbookRecipes = await prisma.cookbookRecipe.findMany();
|
||||
|
||||
const metadata: BackupMetadata = {
|
||||
version: '1.0',
|
||||
timestamp: new Date().toISOString(),
|
||||
recipeCount: recipes.length,
|
||||
cookbookCount: cookbooks.length,
|
||||
tagCount: tags.length,
|
||||
};
|
||||
|
||||
return {
|
||||
metadata,
|
||||
recipes,
|
||||
cookbooks,
|
||||
tags,
|
||||
recipeTags,
|
||||
cookbookRecipes,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores database and files from a backup file
|
||||
*/
|
||||
export async function restoreBackup(backupFilePath: string, backupDir: string): Promise<BackupMetadata> {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const tempDir = path.join(backupDir, 'temp', `restore-${timestamp}`);
|
||||
|
||||
try {
|
||||
// Extract backup archive
|
||||
await fs.mkdir(tempDir, { recursive: true });
|
||||
await extract(backupFilePath, { dir: tempDir });
|
||||
|
||||
// Read and parse database backup
|
||||
const dbBackupPath = path.join(tempDir, 'database.json');
|
||||
const backupData: BackupData = JSON.parse(await fs.readFile(dbBackupPath, 'utf-8'));
|
||||
|
||||
// Clear existing data (in reverse order of dependencies)
|
||||
await clearDatabase();
|
||||
|
||||
// Restore data (in order of dependencies)
|
||||
await restoreDatabaseData(backupData);
|
||||
|
||||
// Restore uploaded files
|
||||
const backupUploadsPath = path.join(tempDir, 'uploads');
|
||||
const uploadsPath = process.env.LOCAL_STORAGE_PATH || path.join(__dirname, '../../../uploads');
|
||||
|
||||
try {
|
||||
await fs.access(backupUploadsPath);
|
||||
// Clear existing uploads
|
||||
try {
|
||||
await fs.rm(uploadsPath, { recursive: true, force: true });
|
||||
} catch {}
|
||||
await fs.mkdir(uploadsPath, { recursive: true });
|
||||
// Restore uploads
|
||||
await copyDirectory(backupUploadsPath, uploadsPath);
|
||||
} catch (error) {
|
||||
console.warn('No uploads in backup, skipping file restore');
|
||||
}
|
||||
|
||||
// Clean up temp directory
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
|
||||
return backupData.metadata;
|
||||
} catch (error) {
|
||||
// Clean up on error
|
||||
try {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
} catch {}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all data from the database
|
||||
*/
|
||||
async function clearDatabase(): Promise<void> {
|
||||
// Delete in order to respect foreign key constraints
|
||||
await prisma.cookbookRecipe.deleteMany();
|
||||
await prisma.recipeTag.deleteMany();
|
||||
await prisma.ingredientInstructionMapping.deleteMany();
|
||||
await prisma.recipeImage.deleteMany();
|
||||
await prisma.instruction.deleteMany();
|
||||
await prisma.ingredient.deleteMany();
|
||||
await prisma.recipeSection.deleteMany();
|
||||
await prisma.recipe.deleteMany();
|
||||
await prisma.cookbook.deleteMany();
|
||||
await prisma.tag.deleteMany();
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores database data from backup
|
||||
*/
|
||||
async function restoreDatabaseData(backupData: BackupData): Promise<void> {
|
||||
// Restore tags first (no dependencies)
|
||||
for (const tag of backupData.tags) {
|
||||
await prisma.tag.create({
|
||||
data: {
|
||||
id: tag.id,
|
||||
name: tag.name,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Restore cookbooks (no dependencies)
|
||||
for (const cookbook of backupData.cookbooks) {
|
||||
await prisma.cookbook.create({
|
||||
data: {
|
||||
id: cookbook.id,
|
||||
name: cookbook.name,
|
||||
description: cookbook.description,
|
||||
coverImageUrl: cookbook.coverImageUrl,
|
||||
autoFilterCategories: cookbook.autoFilterCategories,
|
||||
autoFilterTags: cookbook.autoFilterTags,
|
||||
createdAt: new Date(cookbook.createdAt),
|
||||
updatedAt: new Date(cookbook.updatedAt),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Restore recipes with all nested relations
|
||||
for (const recipe of backupData.recipes) {
|
||||
await prisma.recipe.create({
|
||||
data: {
|
||||
id: recipe.id,
|
||||
title: recipe.title,
|
||||
description: recipe.description,
|
||||
prepTime: recipe.prepTime,
|
||||
cookTime: recipe.cookTime,
|
||||
totalTime: recipe.totalTime,
|
||||
servings: recipe.servings,
|
||||
imageUrl: recipe.imageUrl,
|
||||
sourceUrl: recipe.sourceUrl,
|
||||
author: recipe.author,
|
||||
cuisine: recipe.cuisine,
|
||||
categories: recipe.categories,
|
||||
rating: recipe.rating,
|
||||
createdAt: new Date(recipe.createdAt),
|
||||
updatedAt: new Date(recipe.updatedAt),
|
||||
sections: {
|
||||
create: recipe.sections?.map((section: any) => ({
|
||||
id: section.id,
|
||||
name: section.name,
|
||||
order: section.order,
|
||||
timing: section.timing,
|
||||
})) || [],
|
||||
},
|
||||
ingredients: {
|
||||
create: recipe.ingredients
|
||||
?.filter((ing: any) => !ing.sectionId)
|
||||
.map((ing: any) => ({
|
||||
id: ing.id,
|
||||
name: ing.name,
|
||||
amount: ing.amount,
|
||||
unit: ing.unit,
|
||||
notes: ing.notes,
|
||||
order: ing.order,
|
||||
})) || [],
|
||||
},
|
||||
instructions: {
|
||||
create: recipe.instructions
|
||||
?.filter((inst: any) => !inst.sectionId)
|
||||
.map((inst: any) => ({
|
||||
id: inst.id,
|
||||
step: inst.step,
|
||||
text: inst.text,
|
||||
imageUrl: inst.imageUrl,
|
||||
timing: inst.timing,
|
||||
})) || [],
|
||||
},
|
||||
images: {
|
||||
create: recipe.images?.map((img: any) => ({
|
||||
id: img.id,
|
||||
url: img.url,
|
||||
order: img.order,
|
||||
})) || [],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Restore section ingredients and instructions
|
||||
for (const section of recipe.sections || []) {
|
||||
const sectionIngredients = recipe.ingredients?.filter((ing: any) => ing.sectionId === section.id) || [];
|
||||
const sectionInstructions = recipe.instructions?.filter((inst: any) => inst.sectionId === section.id) || [];
|
||||
|
||||
for (const ing of sectionIngredients) {
|
||||
await prisma.ingredient.create({
|
||||
data: {
|
||||
id: ing.id,
|
||||
recipeId: recipe.id,
|
||||
sectionId: section.id,
|
||||
name: ing.name,
|
||||
amount: ing.amount,
|
||||
unit: ing.unit,
|
||||
notes: ing.notes,
|
||||
order: ing.order,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
for (const inst of sectionInstructions) {
|
||||
await prisma.instruction.create({
|
||||
data: {
|
||||
id: inst.id,
|
||||
recipeId: recipe.id,
|
||||
sectionId: section.id,
|
||||
step: inst.step,
|
||||
text: inst.text,
|
||||
imageUrl: inst.imageUrl,
|
||||
timing: inst.timing,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Restore ingredient-instruction mappings
|
||||
for (const recipe of backupData.recipes) {
|
||||
for (const instruction of recipe.instructions || []) {
|
||||
for (const mapping of instruction.ingredients || []) {
|
||||
await prisma.ingredientInstructionMapping.create({
|
||||
data: {
|
||||
id: mapping.id,
|
||||
ingredientId: mapping.ingredientId,
|
||||
instructionId: mapping.instructionId,
|
||||
order: mapping.order,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Restore recipe tags
|
||||
for (const recipeTag of backupData.recipeTags) {
|
||||
await prisma.recipeTag.create({
|
||||
data: {
|
||||
recipeId: recipeTag.recipeId,
|
||||
tagId: recipeTag.tagId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Restore cookbook recipes
|
||||
for (const cookbookRecipe of backupData.cookbookRecipes) {
|
||||
await prisma.cookbookRecipe.create({
|
||||
data: {
|
||||
id: cookbookRecipe.id,
|
||||
cookbookId: cookbookRecipe.cookbookId,
|
||||
recipeId: cookbookRecipe.recipeId,
|
||||
addedAt: new Date(cookbookRecipe.addedAt),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a ZIP archive from a directory
|
||||
*/
|
||||
async function createZipArchive(sourceDir: string, outputPath: string): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const output = createWriteStream(outputPath);
|
||||
const archive = archiver('zip', { zlib: { level: 9 } });
|
||||
|
||||
output.on('close', () => resolve());
|
||||
archive.on('error', (err) => reject(err));
|
||||
|
||||
archive.pipe(output);
|
||||
archive.directory(sourceDir, false);
|
||||
archive.finalize();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively copies a directory
|
||||
*/
|
||||
async function copyDirectory(source: string, destination: string): Promise<void> {
|
||||
await fs.mkdir(destination, { recursive: true });
|
||||
const entries = await fs.readdir(source, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const srcPath = path.join(source, entry.name);
|
||||
const destPath = path.join(destination, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await copyDirectory(srcPath, destPath);
|
||||
} else {
|
||||
await fs.copyFile(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all available backups in the backup directory
|
||||
*/
|
||||
export async function listBackups(backupDir: string): Promise<Array<{ name: string; path: string; size: number; created: Date }>> {
|
||||
try {
|
||||
await fs.mkdir(backupDir, { recursive: true });
|
||||
const files = await fs.readdir(backupDir);
|
||||
const backups = [];
|
||||
|
||||
for (const file of files) {
|
||||
if (file.startsWith('basil-backup-') && file.endsWith('.zip')) {
|
||||
const filePath = path.join(backupDir, file);
|
||||
const stats = await fs.stat(filePath);
|
||||
backups.push({
|
||||
name: file,
|
||||
path: filePath,
|
||||
size: stats.size,
|
||||
created: stats.birthtime,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by creation date, newest first
|
||||
return backups.sort((a, b) => b.created.getTime() - a.created.getTime());
|
||||
} catch (error) {
|
||||
console.error('Error listing backups:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a backup file
|
||||
*/
|
||||
export async function deleteBackup(backupFilePath: string): Promise<void> {
|
||||
await fs.unlink(backupFilePath);
|
||||
}
|
||||
@@ -13,32 +13,36 @@
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"lint": "eslint . --ext ts,tsx"
|
||||
},
|
||||
"keywords": ["basil", "web"],
|
||||
"keywords": [
|
||||
"basil",
|
||||
"web"
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@basil/shared": "^1.0.0",
|
||||
"@hello-pangea/dnd": "^18.0.1",
|
||||
"axios": "^1.6.5",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-router-dom": "^6.21.1",
|
||||
"axios": "^1.6.5"
|
||||
"react-router-dom": "^6.21.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/jest-dom": "^6.2.0",
|
||||
"@testing-library/react": "^14.1.2",
|
||||
"@testing-library/user-event": "^14.5.2",
|
||||
"@types/react": "^18.2.47",
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@typescript-eslint/eslint-plugin": "^6.17.0",
|
||||
"@typescript-eslint/parser": "^6.17.0",
|
||||
"@vitejs/plugin-react": "^4.2.1",
|
||||
"@vitest/coverage-v8": "^1.2.0",
|
||||
"@vitest/ui": "^1.2.0",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.5",
|
||||
"jsdom": "^23.2.0",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.0.10",
|
||||
"vitest": "^1.2.0",
|
||||
"@vitest/ui": "^1.2.0",
|
||||
"@vitest/coverage-v8": "^1.2.0",
|
||||
"@testing-library/react": "^14.1.2",
|
||||
"@testing-library/jest-dom": "^6.2.0",
|
||||
"@testing-library/user-event": "^14.5.2",
|
||||
"jsdom": "^23.2.0"
|
||||
"vitest": "^1.2.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -122,6 +122,13 @@ function CookingMode() {
|
||||
}
|
||||
};
|
||||
|
||||
const scaleServings = (multiplier: number) => {
|
||||
if (recipe?.servings) {
|
||||
const newServings = Math.round(recipe.servings * multiplier);
|
||||
setCurrentServings(newServings > 0 ? newServings : 1);
|
||||
}
|
||||
};
|
||||
|
||||
const getScaledIngredientText = (ingredient: Ingredient): string => {
|
||||
let ingredientStr = '';
|
||||
if (ingredient.amount && ingredient.unit) {
|
||||
@@ -226,13 +233,29 @@ function CookingMode() {
|
||||
<div className="cooking-mode-controls">
|
||||
{recipe.servings && currentServings !== null && (
|
||||
<div className="servings-control">
|
||||
<button onClick={decrementServings} disabled={currentServings <= 1}>
|
||||
−
|
||||
</button>
|
||||
<span>Servings: {currentServings}</span>
|
||||
<button onClick={incrementServings}>
|
||||
+
|
||||
</button>
|
||||
<div className="servings-adjuster">
|
||||
<button onClick={decrementServings} disabled={currentServings <= 1}>
|
||||
−
|
||||
</button>
|
||||
<span>Servings: {currentServings}</span>
|
||||
<button onClick={incrementServings}>
|
||||
+
|
||||
</button>
|
||||
</div>
|
||||
<div className="quick-scale-buttons">
|
||||
<button onClick={() => scaleServings(0.5)} className="scale-button" title="Half recipe">
|
||||
½×
|
||||
</button>
|
||||
<button onClick={() => scaleServings(1.5)} className="scale-button" title="1.5× recipe">
|
||||
1.5×
|
||||
</button>
|
||||
<button onClick={() => scaleServings(2)} className="scale-button" title="Double recipe">
|
||||
2×
|
||||
</button>
|
||||
<button onClick={() => scaleServings(3)} className="scale-button" title="Triple recipe">
|
||||
3×
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -53,6 +53,13 @@ function RecipeDetail() {
|
||||
setCurrentServings(recipe?.servings || null);
|
||||
};
|
||||
|
||||
const scaleServings = (multiplier: number) => {
|
||||
if (recipe?.servings) {
|
||||
const newServings = Math.round(recipe.servings * multiplier);
|
||||
setCurrentServings(newServings > 0 ? newServings : 1);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDelete = async () => {
|
||||
if (!id || !confirm('Are you sure you want to delete this recipe?')) {
|
||||
return;
|
||||
@@ -140,18 +147,34 @@ function RecipeDetail() {
|
||||
{recipe.totalTime && <span>Total: {recipe.totalTime} min</span>}
|
||||
{recipe.servings && currentServings !== null && (
|
||||
<div className="servings-control">
|
||||
<button onClick={decrementServings} disabled={currentServings <= 1}>
|
||||
−
|
||||
</button>
|
||||
<span>Servings: {currentServings}</span>
|
||||
<button onClick={incrementServings}>
|
||||
+
|
||||
</button>
|
||||
{currentServings !== recipe.servings && (
|
||||
<button onClick={resetServings} className="reset-button">
|
||||
Reset
|
||||
<div className="servings-adjuster">
|
||||
<button onClick={decrementServings} disabled={currentServings <= 1}>
|
||||
−
|
||||
</button>
|
||||
)}
|
||||
<span>Servings: {currentServings}</span>
|
||||
<button onClick={incrementServings}>
|
||||
+
|
||||
</button>
|
||||
{currentServings !== recipe.servings && (
|
||||
<button onClick={resetServings} className="reset-button">
|
||||
Reset
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
<div className="quick-scale-buttons">
|
||||
<button onClick={() => scaleServings(0.5)} className="scale-button" title="Half recipe">
|
||||
½×
|
||||
</button>
|
||||
<button onClick={() => scaleServings(1.5)} className="scale-button" title="1.5× recipe">
|
||||
1.5×
|
||||
</button>
|
||||
<button onClick={() => scaleServings(2)} className="scale-button" title="Double recipe">
|
||||
2×
|
||||
</button>
|
||||
<button onClick={() => scaleServings(3)} className="scale-button" title="Triple recipe">
|
||||
3×
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useState } from 'react';
|
||||
import { Recipe, RecipeSection, Ingredient, Instruction } from '@basil/shared';
|
||||
import { recipesApi } from '../services/api';
|
||||
import { DragDropContext, Droppable, Draggable, DropResult } from '@hello-pangea/dnd';
|
||||
|
||||
interface RecipeFormProps {
|
||||
initialRecipe?: Partial<Recipe>;
|
||||
@@ -147,6 +148,20 @@ function RecipeForm({ initialRecipe, onSubmit, onCancel }: RecipeFormProps) {
|
||||
setSections(newSections);
|
||||
};
|
||||
|
||||
const reorderSectionInstructions = (sectionIndex: number, result: DropResult) => {
|
||||
if (!result.destination) return;
|
||||
|
||||
const newSections = [...sections];
|
||||
const items = Array.from(newSections[sectionIndex].instructions);
|
||||
const [reorderedItem] = items.splice(result.source.index, 1);
|
||||
items.splice(result.destination.index, 0, reorderedItem);
|
||||
|
||||
// Update step numbers
|
||||
const updatedItems = items.map((item, index) => ({ ...item, step: index + 1 }));
|
||||
newSections[sectionIndex].instructions = updatedItems;
|
||||
setSections(newSections);
|
||||
};
|
||||
|
||||
// Simple mode ingredient management
|
||||
const addIngredient = () => {
|
||||
setIngredients([
|
||||
@@ -185,6 +200,18 @@ function RecipeForm({ initialRecipe, onSubmit, onCancel }: RecipeFormProps) {
|
||||
setInstructions(newInstructions);
|
||||
};
|
||||
|
||||
const reorderInstructions = (result: DropResult) => {
|
||||
if (!result.destination) return;
|
||||
|
||||
const items = Array.from(instructions);
|
||||
const [reorderedItem] = items.splice(result.source.index, 1);
|
||||
items.splice(result.destination.index, 0, reorderedItem);
|
||||
|
||||
// Update step numbers
|
||||
const updatedItems = items.map((item, index) => ({ ...item, step: index + 1 }));
|
||||
setInstructions(updatedItems);
|
||||
};
|
||||
|
||||
const handleImageUpload = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (!file || !initialRecipe?.id) return;
|
||||
@@ -573,49 +600,77 @@ function RecipeForm({ initialRecipe, onSubmit, onCancel }: RecipeFormProps) {
|
||||
{/* Section Instructions */}
|
||||
<div className="subsection">
|
||||
<h5>Instructions</h5>
|
||||
{section.instructions.map((instruction, instructionIndex) => (
|
||||
<div key={instructionIndex} className="instruction-row">
|
||||
<div className="instruction-number">{instruction.step}</div>
|
||||
<div className="instruction-content">
|
||||
<input
|
||||
type="text"
|
||||
value={instruction.timing}
|
||||
onChange={(e) =>
|
||||
updateSectionInstruction(
|
||||
sectionIndex,
|
||||
instructionIndex,
|
||||
'timing',
|
||||
e.target.value
|
||||
)
|
||||
}
|
||||
placeholder="Timing (optional, e.g., 8:00am)"
|
||||
className="instruction-timing-input"
|
||||
/>
|
||||
<textarea
|
||||
value={instruction.text}
|
||||
onChange={(e) =>
|
||||
updateSectionInstruction(
|
||||
sectionIndex,
|
||||
instructionIndex,
|
||||
'text',
|
||||
e.target.value
|
||||
)
|
||||
}
|
||||
placeholder="Instruction text *"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
{section.instructions.length > 1 && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => removeSectionInstruction(sectionIndex, instructionIndex)}
|
||||
className="btn-remove"
|
||||
>
|
||||
×
|
||||
</button>
|
||||
<DragDropContext onDragEnd={(result) => reorderSectionInstructions(sectionIndex, result)}>
|
||||
<Droppable droppableId={`section-${sectionIndex}-instructions`}>
|
||||
{(provided) => (
|
||||
<div {...provided.droppableProps} ref={provided.innerRef}>
|
||||
{section.instructions.map((instruction, instructionIndex) => (
|
||||
<Draggable
|
||||
key={`section-${sectionIndex}-instruction-${instructionIndex}`}
|
||||
draggableId={`section-${sectionIndex}-instruction-${instructionIndex}`}
|
||||
index={instructionIndex}
|
||||
>
|
||||
{(provided, snapshot) => (
|
||||
<div
|
||||
ref={provided.innerRef}
|
||||
{...provided.draggableProps}
|
||||
className={`instruction-row ${snapshot.isDragging ? 'dragging' : ''}`}
|
||||
>
|
||||
<div
|
||||
{...provided.dragHandleProps}
|
||||
className="instruction-drag-handle"
|
||||
title="Drag to reorder"
|
||||
>
|
||||
⋮⋮
|
||||
</div>
|
||||
<div className="instruction-number">{instruction.step}</div>
|
||||
<div className="instruction-content">
|
||||
<input
|
||||
type="text"
|
||||
value={instruction.timing}
|
||||
onChange={(e) =>
|
||||
updateSectionInstruction(
|
||||
sectionIndex,
|
||||
instructionIndex,
|
||||
'timing',
|
||||
e.target.value
|
||||
)
|
||||
}
|
||||
placeholder="Timing (optional, e.g., 8:00am)"
|
||||
className="instruction-timing-input"
|
||||
/>
|
||||
<textarea
|
||||
value={instruction.text}
|
||||
onChange={(e) =>
|
||||
updateSectionInstruction(
|
||||
sectionIndex,
|
||||
instructionIndex,
|
||||
'text',
|
||||
e.target.value
|
||||
)
|
||||
}
|
||||
placeholder="Instruction text *"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
{section.instructions.length > 1 && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => removeSectionInstruction(sectionIndex, instructionIndex)}
|
||||
className="btn-remove"
|
||||
>
|
||||
×
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</Draggable>
|
||||
))}
|
||||
{provided.placeholder}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</Droppable>
|
||||
</DragDropContext>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => addSectionInstruction(sectionIndex)}
|
||||
@@ -676,35 +731,63 @@ function RecipeForm({ initialRecipe, onSubmit, onCancel }: RecipeFormProps) {
|
||||
{/* Instructions */}
|
||||
<div className="form-section">
|
||||
<h3>Instructions</h3>
|
||||
{instructions.map((instruction, index) => (
|
||||
<div key={index} className="instruction-row">
|
||||
<div className="instruction-number">{instruction.step}</div>
|
||||
<div className="instruction-content">
|
||||
<input
|
||||
type="text"
|
||||
value={instruction.timing}
|
||||
onChange={(e) => updateInstruction(index, 'timing', e.target.value)}
|
||||
placeholder="Timing (optional, e.g., 8:00am)"
|
||||
className="instruction-timing-input"
|
||||
/>
|
||||
<textarea
|
||||
value={instruction.text}
|
||||
onChange={(e) => updateInstruction(index, 'text', e.target.value)}
|
||||
placeholder="Instruction text *"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
{instructions.length > 1 && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => removeInstruction(index)}
|
||||
className="btn-remove"
|
||||
>
|
||||
×
|
||||
</button>
|
||||
<DragDropContext onDragEnd={reorderInstructions}>
|
||||
<Droppable droppableId="instructions">
|
||||
{(provided) => (
|
||||
<div {...provided.droppableProps} ref={provided.innerRef}>
|
||||
{instructions.map((instruction, index) => (
|
||||
<Draggable
|
||||
key={`instruction-${index}`}
|
||||
draggableId={`instruction-${index}`}
|
||||
index={index}
|
||||
>
|
||||
{(provided, snapshot) => (
|
||||
<div
|
||||
ref={provided.innerRef}
|
||||
{...provided.draggableProps}
|
||||
className={`instruction-row ${snapshot.isDragging ? 'dragging' : ''}`}
|
||||
>
|
||||
<div
|
||||
{...provided.dragHandleProps}
|
||||
className="instruction-drag-handle"
|
||||
title="Drag to reorder"
|
||||
>
|
||||
⋮⋮
|
||||
</div>
|
||||
<div className="instruction-number">{instruction.step}</div>
|
||||
<div className="instruction-content">
|
||||
<input
|
||||
type="text"
|
||||
value={instruction.timing}
|
||||
onChange={(e) => updateInstruction(index, 'timing', e.target.value)}
|
||||
placeholder="Timing (optional, e.g., 8:00am)"
|
||||
className="instruction-timing-input"
|
||||
/>
|
||||
<textarea
|
||||
value={instruction.text}
|
||||
onChange={(e) => updateInstruction(index, 'text', e.target.value)}
|
||||
placeholder="Instruction text *"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
{instructions.length > 1 && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => removeInstruction(index)}
|
||||
className="btn-remove"
|
||||
>
|
||||
×
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</Draggable>
|
||||
))}
|
||||
{provided.placeholder}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</Droppable>
|
||||
</DragDropContext>
|
||||
<button type="button" onClick={addInstruction} className="btn-secondary">
|
||||
+ Add Instruction
|
||||
</button>
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useState, useEffect } from 'react';
|
||||
import { useParams, useNavigate } from 'react-router-dom';
|
||||
import { Recipe, Ingredient, Instruction, RecipeSection, Tag } from '@basil/shared';
|
||||
import { recipesApi, tagsApi } from '../services/api';
|
||||
import { DragDropContext, Droppable, Draggable, DropResult } from '@hello-pangea/dnd';
|
||||
import '../styles/UnifiedRecipeEdit.css';
|
||||
|
||||
interface MappingChange {
|
||||
@@ -92,7 +93,10 @@ function UnifiedEditRecipe() {
|
||||
setServings(loadedRecipe.servings?.toString() || '');
|
||||
setCuisine(loadedRecipe.cuisine || '');
|
||||
setRecipeCategories(loadedRecipe.categories || []);
|
||||
setRecipeTags(loadedRecipe.tags || []);
|
||||
|
||||
// Handle tags - API returns array of {tag: {id, name}} objects, we need string[]
|
||||
const tagNames = (loadedRecipe.tags as any)?.map((t: any) => t.tag?.name || t).filter(Boolean) || [];
|
||||
setRecipeTags(tagNames);
|
||||
|
||||
// Set sections or simple mode
|
||||
const hasSections = !!(loadedRecipe.sections && loadedRecipe.sections.length > 0);
|
||||
@@ -301,6 +305,19 @@ function UnifiedEditRecipe() {
|
||||
setHasChanges(true);
|
||||
};
|
||||
|
||||
const reorderInstructions = (result: DropResult) => {
|
||||
if (!result.destination) return;
|
||||
|
||||
const items = Array.from(instructions);
|
||||
const [reorderedItem] = items.splice(result.source.index, 1);
|
||||
items.splice(result.destination.index, 0, reorderedItem);
|
||||
|
||||
// Update step numbers
|
||||
const updatedItems = items.map((item, index) => ({ ...item, step: index + 1 }));
|
||||
setInstructions(updatedItems);
|
||||
setHasChanges(true);
|
||||
};
|
||||
|
||||
// Drag and drop
|
||||
const handleIngredientDragStart = (ingredient: Ingredient) => {
|
||||
setDraggedIngredient(ingredient);
|
||||
@@ -1099,132 +1116,161 @@ function UnifiedEditRecipe() {
|
||||
<div className="instructions-panel">
|
||||
<h3>Instructions</h3>
|
||||
|
||||
<ul className="instructions-list">
|
||||
{allInstructions.map((instruction, index) => {
|
||||
const isEditing = editingInstructionId === instruction.id;
|
||||
const mappedIngredients = getMappedIngredientsForInstruction(
|
||||
instruction.id || ''
|
||||
);
|
||||
const isDragOver = dragOverInstructionId === instruction.id;
|
||||
|
||||
return (
|
||||
<li
|
||||
key={instruction.id || index}
|
||||
className={`instruction-item ${isDragOver ? 'drag-over' : ''}`}
|
||||
onDragOver={(e) => handleInstructionDragOver(e, instruction.id || '')}
|
||||
onDragLeave={handleInstructionDragLeave}
|
||||
onDrop={(e) => handleInstructionDrop(e, instruction.id || '')}
|
||||
<DragDropContext onDragEnd={reorderInstructions}>
|
||||
<Droppable droppableId="instructions">
|
||||
{(provided) => (
|
||||
<ul
|
||||
className="instructions-list"
|
||||
{...provided.droppableProps}
|
||||
ref={provided.innerRef}
|
||||
>
|
||||
<div className="instruction-header">
|
||||
<span className="step-number">Step {instruction.step}</span>
|
||||
{allInstructions.map((instruction, index) => {
|
||||
const isEditing = editingInstructionId === instruction.id;
|
||||
const mappedIngredients = getMappedIngredientsForInstruction(
|
||||
instruction.id || ''
|
||||
);
|
||||
const isDragOver = dragOverInstructionId === instruction.id;
|
||||
|
||||
{!isEditing && (
|
||||
<div className="instruction-controls">
|
||||
<button
|
||||
className="btn-edit-instruction"
|
||||
onClick={() => startEditingInstruction(instruction)}
|
||||
>
|
||||
Edit
|
||||
</button>
|
||||
<button
|
||||
className="btn-delete-instruction"
|
||||
onClick={() => removeInstruction(index)}
|
||||
>
|
||||
Delete
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isEditing ? (
|
||||
<>
|
||||
<input
|
||||
type="text"
|
||||
className="instruction-timing-input"
|
||||
value={editingInstructionTiming}
|
||||
onChange={(e) => setEditingInstructionTiming(e.target.value)}
|
||||
placeholder="Timing (optional, e.g., 8:00am)"
|
||||
/>
|
||||
<textarea
|
||||
className="instruction-text-input"
|
||||
value={editingInstructionText}
|
||||
onChange={(e) => setEditingInstructionText(e.target.value)}
|
||||
placeholder="Instruction text"
|
||||
autoFocus
|
||||
/>
|
||||
<div className="instruction-edit-actions">
|
||||
<button
|
||||
className="btn-save-instruction"
|
||||
onClick={saveEditingInstruction}
|
||||
>
|
||||
Save
|
||||
</button>
|
||||
<button
|
||||
className="btn-cancel-instruction"
|
||||
onClick={cancelEditingInstruction}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{instruction.timing && (
|
||||
<div className="instruction-timing-display">
|
||||
{instruction.timing}
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
className="instruction-text-display"
|
||||
onClick={() => startEditingInstruction(instruction)}
|
||||
title="Click to edit"
|
||||
return (
|
||||
<Draggable
|
||||
key={instruction.id || `instruction-${index}`}
|
||||
draggableId={instruction.id || `instruction-${index}`}
|
||||
index={index}
|
||||
>
|
||||
{instruction.text || <em>Click to add instruction text</em>}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Drop zone for ingredients */}
|
||||
<div className="drop-zone">
|
||||
<span className="drop-zone-header">
|
||||
Ingredients for this step:
|
||||
</span>
|
||||
|
||||
{mappedIngredients.length === 0 ? (
|
||||
<p className="no-ingredients-mapped">
|
||||
Drag ingredients here or use bulk actions
|
||||
</p>
|
||||
) : (
|
||||
<ul className="mapped-ingredients-list">
|
||||
{mappedIngredients.map((ingredient) => (
|
||||
{(provided, snapshot) => (
|
||||
<li
|
||||
key={ingredient.id}
|
||||
className="mapped-ingredient-item"
|
||||
ref={provided.innerRef}
|
||||
{...provided.draggableProps}
|
||||
className={`instruction-item ${isDragOver ? 'drag-over' : ''} ${snapshot.isDragging ? 'dragging' : ''}`}
|
||||
onDragOver={(e) => handleInstructionDragOver(e, instruction.id || '')}
|
||||
onDragLeave={handleInstructionDragLeave}
|
||||
onDrop={(e) => handleInstructionDrop(e, instruction.id || '')}
|
||||
>
|
||||
<span className="mapped-ingredient-text">
|
||||
{getIngredientText(ingredient)}
|
||||
</span>
|
||||
<button
|
||||
className="btn-remove-ingredient"
|
||||
onClick={() =>
|
||||
removeIngredientFromInstruction(
|
||||
ingredient.id || '',
|
||||
instruction.id || ''
|
||||
)
|
||||
}
|
||||
title="Remove ingredient from this step"
|
||||
>
|
||||
✕
|
||||
</button>
|
||||
<div className="instruction-header">
|
||||
<div className="instruction-header-left">
|
||||
<div
|
||||
{...provided.dragHandleProps}
|
||||
className="instruction-drag-handle"
|
||||
title="Drag to reorder"
|
||||
>
|
||||
⋮⋮
|
||||
</div>
|
||||
<span className="step-number">Step {instruction.step}</span>
|
||||
</div>
|
||||
|
||||
{!isEditing && (
|
||||
<div className="instruction-controls">
|
||||
<button
|
||||
className="btn-edit-instruction"
|
||||
onClick={() => startEditingInstruction(instruction)}
|
||||
>
|
||||
Edit
|
||||
</button>
|
||||
<button
|
||||
className="btn-delete-instruction"
|
||||
onClick={() => removeInstruction(index)}
|
||||
>
|
||||
Delete
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isEditing ? (
|
||||
<>
|
||||
<input
|
||||
type="text"
|
||||
className="instruction-timing-input"
|
||||
value={editingInstructionTiming}
|
||||
onChange={(e) => setEditingInstructionTiming(e.target.value)}
|
||||
placeholder="Timing (optional, e.g., 8:00am)"
|
||||
/>
|
||||
<textarea
|
||||
className="instruction-text-input"
|
||||
value={editingInstructionText}
|
||||
onChange={(e) => setEditingInstructionText(e.target.value)}
|
||||
placeholder="Instruction text"
|
||||
autoFocus
|
||||
/>
|
||||
<div className="instruction-edit-actions">
|
||||
<button
|
||||
className="btn-save-instruction"
|
||||
onClick={saveEditingInstruction}
|
||||
>
|
||||
Save
|
||||
</button>
|
||||
<button
|
||||
className="btn-cancel-instruction"
|
||||
onClick={cancelEditingInstruction}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
{instruction.timing && (
|
||||
<div className="instruction-timing-display">
|
||||
{instruction.timing}
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
className="instruction-text-display"
|
||||
onClick={() => startEditingInstruction(instruction)}
|
||||
title="Click to edit"
|
||||
>
|
||||
{instruction.text || <em>Click to add instruction text</em>}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Drop zone for ingredients */}
|
||||
<div className="drop-zone">
|
||||
<span className="drop-zone-header">
|
||||
Ingredients for this step:
|
||||
</span>
|
||||
|
||||
{mappedIngredients.length === 0 ? (
|
||||
<p className="no-ingredients-mapped">
|
||||
Drag ingredients here or use bulk actions
|
||||
</p>
|
||||
) : (
|
||||
<ul className="mapped-ingredients-list">
|
||||
{mappedIngredients.map((ingredient) => (
|
||||
<li
|
||||
key={ingredient.id}
|
||||
className="mapped-ingredient-item"
|
||||
>
|
||||
<span className="mapped-ingredient-text">
|
||||
{getIngredientText(ingredient)}
|
||||
</span>
|
||||
<button
|
||||
className="btn-remove-ingredient"
|
||||
onClick={() =>
|
||||
removeIngredientFromInstruction(
|
||||
ingredient.id || '',
|
||||
instruction.id || ''
|
||||
)
|
||||
}
|
||||
title="Remove ingredient from this step"
|
||||
>
|
||||
✕
|
||||
</button>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
)}
|
||||
</Draggable>
|
||||
);
|
||||
})}
|
||||
{provided.placeholder}
|
||||
</ul>
|
||||
)}
|
||||
</Droppable>
|
||||
</DragDropContext>
|
||||
|
||||
<button className="btn-add-instruction" onClick={addInstruction}>
|
||||
+ Add Instruction
|
||||
|
||||
@@ -525,6 +525,14 @@
|
||||
box-shadow: 0 4px 12px rgba(46, 125, 50, 0.2);
|
||||
}
|
||||
|
||||
.instruction-item.dragging {
|
||||
opacity: 0.6;
|
||||
background-color: #f5f5f5;
|
||||
box-shadow: 0 8px 16px rgba(0, 0, 0, 0.2);
|
||||
border-color: #1976d2;
|
||||
transform: rotate(2deg);
|
||||
}
|
||||
|
||||
.instruction-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -532,6 +540,31 @@
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
.instruction-header-left {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.75rem;
|
||||
}
|
||||
|
||||
.instruction-drag-handle {
|
||||
cursor: grab;
|
||||
color: #999;
|
||||
font-size: 1.3rem;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 0.25rem;
|
||||
user-select: none;
|
||||
transition: color 0.2s;
|
||||
}
|
||||
|
||||
.instruction-drag-handle:hover {
|
||||
color: #1976d2;
|
||||
}
|
||||
|
||||
.instruction-drag-handle:active {
|
||||
cursor: grabbing;
|
||||
}
|
||||
|
||||
.step-number {
|
||||
font-size: 1.3rem;
|
||||
font-weight: 700;
|
||||
|
||||
@@ -84,11 +84,14 @@ function fractionToDecimal(fraction: string): number {
|
||||
function parseAmount(amountStr: string): { value: number | null; range: { min: number; max: number } | null } {
|
||||
amountStr = amountStr.trim();
|
||||
|
||||
// Replace unicode fractions
|
||||
// Replace unicode fractions with decimal equivalents
|
||||
for (const [unicode, decimal] of Object.entries(UNICODE_FRACTIONS)) {
|
||||
amountStr = amountStr.replace(unicode, ` ${decimal}`);
|
||||
}
|
||||
|
||||
// Clean up extra whitespace that might have been introduced
|
||||
amountStr = amountStr.replace(/\s+/g, ' ').trim();
|
||||
|
||||
// Handle ranges: "2-3", "1 to 2", "1-2"
|
||||
const rangeMatch = amountStr.match(/^(\d+(?:\.\d+)?)\s*(?:-|to)\s*(\d+(?:\.\d+)?)$/i);
|
||||
if (rangeMatch) {
|
||||
@@ -97,7 +100,7 @@ function parseAmount(amountStr: string): { value: number | null; range: { min: n
|
||||
return { value: null, range: { min, max } };
|
||||
}
|
||||
|
||||
// Handle mixed numbers: "1 1/2", "2 3/4"
|
||||
// Handle mixed numbers: "1 1/2", "2 3/4", "1 1/2" (with any amount of whitespace)
|
||||
const mixedMatch = amountStr.match(/^(\d+)\s+(\d+)\/(\d+)$/);
|
||||
if (mixedMatch) {
|
||||
const whole = parseFloat(mixedMatch[1]);
|
||||
@@ -105,6 +108,18 @@ function parseAmount(amountStr: string): { value: number | null; range: { min: n
|
||||
return { value: whole + fraction, range: null };
|
||||
}
|
||||
|
||||
// Also try to handle space-separated numbers that might be part of decimal representation
|
||||
// e.g., "2 0.25" should be treated as "2.25"
|
||||
const spaceDecimalMatch = amountStr.match(/^(\d+)\s+(\d+(?:\.\d+)?)$/);
|
||||
if (spaceDecimalMatch) {
|
||||
const whole = parseFloat(spaceDecimalMatch[1]);
|
||||
const decimal = parseFloat(spaceDecimalMatch[2]);
|
||||
// Only treat as addition if decimal part is < 1 (otherwise it's likely separate numbers)
|
||||
if (decimal < 1) {
|
||||
return { value: whole + decimal, range: null };
|
||||
}
|
||||
}
|
||||
|
||||
// Handle simple fractions: "1/2", "3/4"
|
||||
if (amountStr.includes('/')) {
|
||||
return { value: fractionToDecimal(amountStr), range: null };
|
||||
@@ -125,15 +140,16 @@ function parseAmount(amountStr: string): { value: number | null; range: { min: n
|
||||
export function parseIngredient(ingredientStr: string): ParsedIngredient {
|
||||
const original = ingredientStr;
|
||||
|
||||
// Check for non-scalable patterns
|
||||
const nonScalablePatterns = [
|
||||
/to taste/i,
|
||||
/as needed/i,
|
||||
/for (?:serving|garnish|dusting)/i,
|
||||
/optional/i,
|
||||
// Check for non-scalable patterns at the START of the ingredient
|
||||
// These patterns should only make it non-scalable if they appear early in the string
|
||||
// Not if they're notes at the end like "2 cups flour, plus more as needed"
|
||||
const startNonScalablePatterns = [
|
||||
/^to taste/i,
|
||||
/^optional/i,
|
||||
/^for (?:serving|garnish|dusting)/i,
|
||||
];
|
||||
|
||||
const isNonScalable = nonScalablePatterns.some(pattern => pattern.test(ingredientStr));
|
||||
const isNonScalable = startNonScalablePatterns.some(pattern => pattern.test(ingredientStr));
|
||||
|
||||
if (isNonScalable) {
|
||||
return {
|
||||
|
||||
262
scripts/README.md
Normal file
262
scripts/README.md
Normal file
@@ -0,0 +1,262 @@
|
||||
# Basil Deployment Scripts
|
||||
|
||||
This directory contains scripts for automated deployment of Basil.
|
||||
|
||||
## Scripts Overview
|
||||
|
||||
### `deploy.sh`
|
||||
|
||||
Main deployment script that handles the complete deployment process.
|
||||
|
||||
**Features:**
|
||||
- Pre-deployment backup creation
|
||||
- Docker image pulling from registry
|
||||
- Container restart with health checks
|
||||
- Automatic cleanup of old images
|
||||
- Comprehensive logging
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
# With environment variables
|
||||
DOCKER_USERNAME=myuser DOCKER_REGISTRY=docker.io IMAGE_TAG=latest ./deploy.sh
|
||||
|
||||
# Or source from .env.deploy
|
||||
source ../.env.deploy && ./deploy.sh
|
||||
```
|
||||
|
||||
**Environment Variables:**
|
||||
- `DOCKER_USERNAME` (required): Docker registry username
|
||||
- `DOCKER_REGISTRY` (optional, default: docker.io): Registry URL
|
||||
- `IMAGE_TAG` (optional, default: latest): Image tag to pull
|
||||
|
||||
### `manual-deploy.sh`
|
||||
|
||||
Interactive wrapper around `deploy.sh` with user prompts.
|
||||
|
||||
**Features:**
|
||||
- Interactive prompts for configuration
|
||||
- Saves configuration to `.env.deploy` for future use
|
||||
- Confirmation before deployment
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
./manual-deploy.sh
|
||||
```
|
||||
|
||||
The script will prompt for:
|
||||
1. Docker Hub username
|
||||
2. Docker registry (default: docker.io)
|
||||
3. Image tag (default: latest)
|
||||
4. Confirmation to proceed
|
||||
|
||||
### `webhook-receiver.sh`
|
||||
|
||||
Webhook server that listens for deployment triggers from CI/CD.
|
||||
|
||||
**Features:**
|
||||
- HTTP webhook endpoint
|
||||
- Secret-based authentication
|
||||
- Automatic deployment on webhook call
|
||||
- Systemd service support
|
||||
|
||||
**Usage:**
|
||||
```bash
|
||||
# Manual run (foreground)
|
||||
WEBHOOK_PORT=9000 WEBHOOK_SECRET=mysecret ./webhook-receiver.sh
|
||||
|
||||
# Or install as systemd service (recommended)
|
||||
sudo cp basil-webhook.service /etc/systemd/system/
|
||||
sudo systemctl enable basil-webhook
|
||||
sudo systemctl start basil-webhook
|
||||
```
|
||||
|
||||
**Environment Variables:**
|
||||
- `WEBHOOK_PORT` (optional, default: 9000): Port to listen on
|
||||
- `WEBHOOK_SECRET` (optional, default: changeme): Authentication secret
|
||||
|
||||
**Webhook Endpoint:**
|
||||
```
|
||||
POST http://localhost:9000/hooks/basil-deploy
|
||||
Header: X-Webhook-Secret: your-secret
|
||||
```
|
||||
|
||||
### `basil-webhook.service`
|
||||
|
||||
Systemd service file for running webhook receiver as a system service.
|
||||
|
||||
**Installation:**
|
||||
```bash
|
||||
# 1. Copy service file
|
||||
sudo cp basil-webhook.service /etc/systemd/system/
|
||||
|
||||
# 2. Edit environment variables in the file
|
||||
sudo nano /etc/systemd/system/basil-webhook.service
|
||||
|
||||
# 3. Reload systemd
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
# 4. Enable and start service
|
||||
sudo systemctl enable basil-webhook
|
||||
sudo systemctl start basil-webhook
|
||||
|
||||
# 5. Check status
|
||||
sudo systemctl status basil-webhook
|
||||
```
|
||||
|
||||
**Service Management:**
|
||||
```bash
|
||||
# Start service
|
||||
sudo systemctl start basil-webhook
|
||||
|
||||
# Stop service
|
||||
sudo systemctl stop basil-webhook
|
||||
|
||||
# Restart service
|
||||
sudo systemctl restart basil-webhook
|
||||
|
||||
# View logs
|
||||
sudo journalctl -u basil-webhook -f
|
||||
|
||||
# Check status
|
||||
sudo systemctl status basil-webhook
|
||||
```
|
||||
|
||||
## Deployment Workflow
|
||||
|
||||
### Automatic Deployment (CI/CD)
|
||||
|
||||
1. Developer pushes to `main` branch
|
||||
2. Gitea Actions runs tests and builds images
|
||||
3. Images pushed to Docker registry
|
||||
4. Gitea Actions calls webhook endpoint
|
||||
5. Webhook server receives call and executes `deploy.sh`
|
||||
6. Production server pulls new images and restarts
|
||||
|
||||
### Manual Deployment
|
||||
|
||||
1. Run `./manual-deploy.sh`
|
||||
2. Enter configuration when prompted
|
||||
3. Confirm deployment
|
||||
4. Script executes deployment process
|
||||
|
||||
## Logs
|
||||
|
||||
All scripts log to the parent directory:
|
||||
|
||||
- `deploy.log` - Deployment script logs
|
||||
- `webhook.log` - Webhook server logs
|
||||
- `webhook-error.log` - Webhook server errors
|
||||
|
||||
**View logs:**
|
||||
```bash
|
||||
# Deployment logs
|
||||
tail -f ../deploy.log
|
||||
|
||||
# Webhook logs
|
||||
tail -f ../webhook.log
|
||||
|
||||
# Webhook errors
|
||||
tail -f ../webhook-error.log
|
||||
|
||||
# Systemd service logs
|
||||
sudo journalctl -u basil-webhook -f
|
||||
```
|
||||
|
||||
## Configuration Files
|
||||
|
||||
### `.env.deploy` (in parent directory)
|
||||
|
||||
Stores deployment configuration. Created from `.env.deploy.example`.
|
||||
|
||||
```bash
|
||||
DOCKER_USERNAME=your-dockerhub-username
|
||||
DOCKER_REGISTRY=docker.io
|
||||
IMAGE_TAG=latest
|
||||
WEBHOOK_PORT=9000
|
||||
WEBHOOK_SECRET=your-random-secret
|
||||
```
|
||||
|
||||
**Important:** This file is gitignored and should never be committed.
|
||||
|
||||
### `webhook-config.json` (auto-generated)
|
||||
|
||||
Generated by `webhook-receiver.sh`. Configures the webhook endpoint.
|
||||
|
||||
**Location:** Created in scripts directory when webhook-receiver.sh runs.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Script Permission Denied
|
||||
|
||||
```bash
|
||||
chmod +x *.sh
|
||||
```
|
||||
|
||||
### Docker Pull Fails
|
||||
|
||||
```bash
|
||||
# Check credentials
|
||||
docker login -u $DOCKER_USERNAME
|
||||
|
||||
# Check image exists
|
||||
docker pull $DOCKER_REGISTRY/$DOCKER_USERNAME/basil-api:$IMAGE_TAG
|
||||
```
|
||||
|
||||
### Webhook Not Responding
|
||||
|
||||
```bash
|
||||
# Check service status
|
||||
sudo systemctl status basil-webhook
|
||||
|
||||
# Check if port is listening
|
||||
sudo netstat -tlnp | grep 9000
|
||||
|
||||
# Check firewall
|
||||
sudo ufw status
|
||||
```
|
||||
|
||||
### Health Check Fails
|
||||
|
||||
```bash
|
||||
# Check API logs
|
||||
docker-compose logs api
|
||||
|
||||
# Check API manually
|
||||
curl http://localhost:3001/health
|
||||
|
||||
# Check database connection
|
||||
docker-compose exec api npx prisma studio
|
||||
```
|
||||
|
||||
## Security Notes
|
||||
|
||||
1. **Never commit secrets**: `.env.deploy` is gitignored
|
||||
2. **Use strong webhook secret**: 32+ character random string
|
||||
3. **Firewall webhook port**: Allow only from known IPs if possible
|
||||
4. **Use HTTPS**: Configure reverse proxy for webhook in production
|
||||
5. **Backup before deploy**: Script creates automatic backups
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
scripts/
|
||||
├── README.md # This file
|
||||
├── deploy.sh # Main deployment script
|
||||
├── manual-deploy.sh # Interactive deployment
|
||||
├── webhook-receiver.sh # Webhook server
|
||||
├── basil-webhook.service # Systemd service file
|
||||
└── webhook-config.json # Generated webhook config (auto-created)
|
||||
|
||||
Parent directory:
|
||||
├── .env.deploy # Deployment config (gitignored)
|
||||
├── .env.deploy.example # Example config
|
||||
├── deploy.log # Deployment logs (gitignored)
|
||||
├── webhook.log # Webhook logs (gitignored)
|
||||
└── backups/ # Automatic backups (gitignored)
|
||||
```
|
||||
|
||||
## References
|
||||
|
||||
- [Full CI/CD Setup Guide](../docs/CI-CD-SETUP.md)
|
||||
- [Quick Start Guide](../docs/DEPLOYMENT-QUICK-START.md)
|
||||
- [Project Documentation](../CLAUDE.md)
|
||||
22
scripts/basil-webhook.service
Normal file
22
scripts/basil-webhook.service
Normal file
@@ -0,0 +1,22 @@
|
||||
[Unit]
|
||||
Description=Basil Webhook Deployment Service
|
||||
After=network.target docker.service
|
||||
Requires=docker.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
WorkingDirectory=/srv/docker-compose/basil
|
||||
Environment="WEBHOOK_PORT=9000"
|
||||
Environment="WEBHOOK_SECRET=changeme"
|
||||
Environment="DOCKER_USERNAME=your-docker-username"
|
||||
Environment="DOCKER_REGISTRY=docker.io"
|
||||
Environment="IMAGE_TAG=latest"
|
||||
ExecStart=/srv/docker-compose/basil/scripts/webhook-receiver.sh
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
StandardOutput=append:/srv/docker-compose/basil/webhook.log
|
||||
StandardError=append:/srv/docker-compose/basil/webhook-error.log
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
198
scripts/deploy.sh
Executable file
198
scripts/deploy.sh
Executable file
@@ -0,0 +1,198 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Basil Deployment Script
|
||||
# This script pulls the latest Docker images and restarts the containers
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
# Configuration
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
|
||||
LOG_FILE="$PROJECT_DIR/deploy.log"
|
||||
BACKUP_DIR="$PROJECT_DIR/backups"
|
||||
DOCKER_REGISTRY="${DOCKER_REGISTRY:-docker.io}"
|
||||
DOCKER_USERNAME="${DOCKER_USERNAME}"
|
||||
IMAGE_TAG="${IMAGE_TAG:-latest}"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Logging function
|
||||
log() {
|
||||
echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
error() {
|
||||
echo -e "${RED}[$(date +'%Y-%m-%d %H:%M:%S')] ERROR:${NC} $1" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
warning() {
|
||||
echo -e "${YELLOW}[$(date +'%Y-%m-%d %H:%M:%S')] WARNING:${NC} $1" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Check if Docker is running
|
||||
check_docker() {
|
||||
if ! docker info > /dev/null 2>&1; then
|
||||
error "Docker is not running. Please start Docker and try again."
|
||||
exit 1
|
||||
fi
|
||||
log "Docker is running"
|
||||
}
|
||||
|
||||
# Create backup before deployment
|
||||
create_backup() {
|
||||
log "Creating pre-deployment backup..."
|
||||
|
||||
# Ensure backup directory exists
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
|
||||
# Create backup using the API if running
|
||||
if docker ps | grep -q basil-api; then
|
||||
log "Creating database backup via API..."
|
||||
curl -X POST http://localhost:3001/api/backup -o "$BACKUP_DIR/pre-deploy-$(date +%Y%m%d-%H%M%S).zip" 2>/dev/null || warning "API backup failed, continuing anyway"
|
||||
else
|
||||
warning "API container not running, skipping automatic backup"
|
||||
fi
|
||||
}
|
||||
|
||||
# Pull latest images from registry
|
||||
pull_images() {
|
||||
log "Pulling latest Docker images..."
|
||||
|
||||
if [ -z "$DOCKER_USERNAME" ]; then
|
||||
error "DOCKER_USERNAME environment variable not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Pull API image
|
||||
log "Pulling API image: ${DOCKER_REGISTRY}/${DOCKER_USERNAME}/basil-api:${IMAGE_TAG}"
|
||||
docker pull "${DOCKER_REGISTRY}/${DOCKER_USERNAME}/basil-api:${IMAGE_TAG}" || {
|
||||
error "Failed to pull API image"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Pull Web image
|
||||
log "Pulling Web image: ${DOCKER_REGISTRY}/${DOCKER_USERNAME}/basil-web:${IMAGE_TAG}"
|
||||
docker pull "${DOCKER_REGISTRY}/${DOCKER_USERNAME}/basil-web:${IMAGE_TAG}" || {
|
||||
error "Failed to pull Web image"
|
||||
exit 1
|
||||
}
|
||||
|
||||
log "Successfully pulled all images"
|
||||
}
|
||||
|
||||
# Update docker-compose.yml to use registry images
|
||||
update_docker_compose() {
|
||||
log "Updating docker-compose configuration..."
|
||||
|
||||
# Create docker-compose.override.yml to use registry images
|
||||
cat > "$PROJECT_DIR/docker-compose.override.yml" <<EOF
|
||||
services:
|
||||
api:
|
||||
image: ${DOCKER_REGISTRY}/${DOCKER_USERNAME}/basil-api:${IMAGE_TAG}
|
||||
build:
|
||||
# Override build to prevent building from source
|
||||
context: .
|
||||
dockerfile: packages/api/Dockerfile
|
||||
|
||||
web:
|
||||
image: ${DOCKER_REGISTRY}/${DOCKER_USERNAME}/basil-web:${IMAGE_TAG}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: packages/web/Dockerfile
|
||||
EOF
|
||||
|
||||
log "Docker Compose override file created"
|
||||
}
|
||||
|
||||
# Restart containers
|
||||
restart_containers() {
|
||||
log "Restarting containers..."
|
||||
|
||||
cd "$PROJECT_DIR"
|
||||
|
||||
# Stop containers
|
||||
log "Stopping containers..."
|
||||
docker-compose down || warning "Failed to stop some containers"
|
||||
|
||||
# Start containers with new images
|
||||
log "Starting containers with new images..."
|
||||
docker-compose up -d || {
|
||||
error "Failed to start containers"
|
||||
exit 1
|
||||
}
|
||||
|
||||
log "Containers restarted successfully"
|
||||
}
|
||||
|
||||
# Health check
|
||||
health_check() {
|
||||
log "Performing health checks..."
|
||||
|
||||
# Wait for API to be ready
|
||||
log "Waiting for API to be healthy..."
|
||||
MAX_RETRIES=30
|
||||
RETRY_COUNT=0
|
||||
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
if curl -f http://localhost:3001/health > /dev/null 2>&1; then
|
||||
log "API is healthy"
|
||||
break
|
||||
fi
|
||||
|
||||
RETRY_COUNT=$((RETRY_COUNT + 1))
|
||||
if [ $RETRY_COUNT -eq $MAX_RETRIES ]; then
|
||||
error "API health check failed after $MAX_RETRIES attempts"
|
||||
docker-compose logs api
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sleep 2
|
||||
done
|
||||
|
||||
# Check web container
|
||||
if docker ps | grep -q basil-web; then
|
||||
log "Web container is running"
|
||||
else
|
||||
error "Web container is not running"
|
||||
docker-compose logs web
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "All health checks passed"
|
||||
}
|
||||
|
||||
# Cleanup old images
|
||||
cleanup_old_images() {
|
||||
log "Cleaning up old Docker images..."
|
||||
docker image prune -f > /dev/null 2>&1 || warning "Failed to prune some images"
|
||||
log "Cleanup complete"
|
||||
}
|
||||
|
||||
# Main deployment flow
|
||||
main() {
|
||||
log "========================================="
|
||||
log "Starting Basil deployment"
|
||||
log "Registry: ${DOCKER_REGISTRY}"
|
||||
log "Username: ${DOCKER_USERNAME}"
|
||||
log "Tag: ${IMAGE_TAG}"
|
||||
log "========================================="
|
||||
|
||||
check_docker
|
||||
create_backup
|
||||
pull_images
|
||||
update_docker_compose
|
||||
restart_containers
|
||||
health_check
|
||||
cleanup_old_images
|
||||
|
||||
log "========================================="
|
||||
log "Deployment completed successfully!"
|
||||
log "========================================="
|
||||
}
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
73
scripts/manual-deploy.sh
Executable file
73
scripts/manual-deploy.sh
Executable file
@@ -0,0 +1,73 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Manual Deployment Script for Basil
|
||||
# Simple wrapper around deploy.sh with interactive prompts
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
# Colors
|
||||
BLUE='\033[0;34m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
echo -e "${BLUE}=========================================${NC}"
|
||||
echo -e "${BLUE} Basil Manual Deployment${NC}"
|
||||
echo -e "${BLUE}=========================================${NC}"
|
||||
echo ""
|
||||
|
||||
# Check if .env file exists for configuration
|
||||
if [ -f "$SCRIPT_DIR/../.env.deploy" ]; then
|
||||
echo -e "${GREEN}Loading configuration from .env.deploy${NC}"
|
||||
source "$SCRIPT_DIR/../.env.deploy"
|
||||
fi
|
||||
|
||||
# Prompt for Docker username if not set
|
||||
if [ -z "$DOCKER_USERNAME" ]; then
|
||||
read -p "Enter Docker Hub username: " DOCKER_USERNAME
|
||||
export DOCKER_USERNAME
|
||||
fi
|
||||
|
||||
# Prompt for Docker registry (default: docker.io)
|
||||
if [ -z "$DOCKER_REGISTRY" ]; then
|
||||
read -p "Enter Docker registry [docker.io]: " DOCKER_REGISTRY
|
||||
DOCKER_REGISTRY=${DOCKER_REGISTRY:-docker.io}
|
||||
export DOCKER_REGISTRY
|
||||
fi
|
||||
|
||||
# Prompt for image tag (default: latest)
|
||||
if [ -z "$IMAGE_TAG" ]; then
|
||||
read -p "Enter image tag [latest]: " IMAGE_TAG
|
||||
IMAGE_TAG=${IMAGE_TAG:-latest}
|
||||
export IMAGE_TAG
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e "${YELLOW}Deployment Configuration:${NC}"
|
||||
echo " Registry: $DOCKER_REGISTRY"
|
||||
echo " Username: $DOCKER_USERNAME"
|
||||
echo " Tag: $IMAGE_TAG"
|
||||
echo ""
|
||||
|
||||
read -p "Proceed with deployment? (y/N): " -n 1 -r
|
||||
echo ""
|
||||
|
||||
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||
echo "Deployment cancelled"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Save configuration for next time
|
||||
cat > "$SCRIPT_DIR/../.env.deploy" <<EOF
|
||||
DOCKER_USERNAME=$DOCKER_USERNAME
|
||||
DOCKER_REGISTRY=$DOCKER_REGISTRY
|
||||
IMAGE_TAG=$IMAGE_TAG
|
||||
EOF
|
||||
|
||||
echo -e "${GREEN}Configuration saved to .env.deploy${NC}"
|
||||
echo ""
|
||||
|
||||
# Run deployment
|
||||
bash "$SCRIPT_DIR/deploy.sh"
|
||||
106
scripts/webhook-receiver.sh
Executable file
106
scripts/webhook-receiver.sh
Executable file
@@ -0,0 +1,106 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Webhook Receiver for Basil Deployments
|
||||
# This script sets up a simple webhook endpoint that triggers deployments
|
||||
|
||||
set -e
|
||||
|
||||
# Configuration
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PORT="${WEBHOOK_PORT:-9000}"
|
||||
SECRET="${WEBHOOK_SECRET:-changeme}"
|
||||
LOG_FILE="$SCRIPT_DIR/../webhook.log"
|
||||
|
||||
# Colors
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
log() {
|
||||
echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
warning() {
|
||||
echo -e "${YELLOW}[$(date +'%Y-%m-%d %H:%M:%S')] WARNING:${NC} $1" | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
# Install webhook if not present
|
||||
install_webhook() {
|
||||
if ! command -v webhook &> /dev/null; then
|
||||
log "Installing webhook..."
|
||||
if command -v apt-get &> /dev/null; then
|
||||
sudo apt-get update && sudo apt-get install -y webhook
|
||||
elif command -v yum &> /dev/null; then
|
||||
sudo yum install -y webhook
|
||||
else
|
||||
warning "Please install 'webhook' manually: https://github.com/adnanh/webhook"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Create webhook configuration
|
||||
create_webhook_config() {
|
||||
cat > "$SCRIPT_DIR/webhook-config.json" <<EOF
|
||||
[
|
||||
{
|
||||
"id": "basil-deploy",
|
||||
"execute-command": "$SCRIPT_DIR/deploy.sh",
|
||||
"command-working-directory": "$SCRIPT_DIR/..",
|
||||
"response-message": "Deployment triggered successfully",
|
||||
"trigger-rule": {
|
||||
"and": [
|
||||
{
|
||||
"match": {
|
||||
"type": "value",
|
||||
"value": "$SECRET",
|
||||
"parameter": {
|
||||
"source": "header",
|
||||
"name": "X-Webhook-Secret"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"pass-environment-to-command": [
|
||||
{
|
||||
"envname": "DOCKER_USERNAME",
|
||||
"source": "string",
|
||||
"name": "DOCKER_USERNAME"
|
||||
},
|
||||
{
|
||||
"envname": "DOCKER_REGISTRY",
|
||||
"source": "string",
|
||||
"name": "DOCKER_REGISTRY"
|
||||
},
|
||||
{
|
||||
"envname": "IMAGE_TAG",
|
||||
"source": "string",
|
||||
"name": "IMAGE_TAG"
|
||||
}
|
||||
],
|
||||
"trigger-rule-mismatch-http-response-code": 403
|
||||
}
|
||||
]
|
||||
EOF
|
||||
log "Webhook configuration created at $SCRIPT_DIR/webhook-config.json"
|
||||
}
|
||||
|
||||
# Start webhook server
|
||||
start_webhook() {
|
||||
log "Starting webhook server on port $PORT..."
|
||||
log "Webhook URL: http://localhost:$PORT/hooks/basil-deploy"
|
||||
log "Secret: $SECRET"
|
||||
log "Press Ctrl+C to stop"
|
||||
|
||||
webhook -hooks "$SCRIPT_DIR/webhook-config.json" -port "$PORT" -verbose
|
||||
}
|
||||
|
||||
# Main
|
||||
main() {
|
||||
install_webhook
|
||||
create_webhook_config
|
||||
start_webhook
|
||||
}
|
||||
|
||||
main "$@"
|
||||
Reference in New Issue
Block a user