diff --git a/compose.override.yml b/compose.override.yml index e1e6d12..339d3dd 100644 --- a/compose.override.yml +++ b/compose.override.yml @@ -66,7 +66,7 @@ services: environment: # Development database settings POSTGRES_PASSWORD: dev_password_123 - POSTGRES_DB: ffmpeg_api_dev + POSTGRES_DB: rendiff_dev ports: # Expose postgres for local development tools @@ -96,19 +96,19 @@ services: # Development Tools mailhog: image: mailhog/mailhog:v1.0.1 - container_name: ffmpeg_dev_mailhog + container_name: rendiff_dev_mailhog ports: - "1025:1025" # SMTP - "8025:8025" # Web UI networks: - - ffmpeg-net + - rendiff-net profiles: - dev-tools # Database Admin Tool pgadmin: image: dpage/pgadmin4:latest - container_name: ffmpeg_dev_pgadmin + container_name: rendiff_dev_pgadmin environment: PGADMIN_DEFAULT_EMAIL: admin@localhost PGADMIN_DEFAULT_PASSWORD: admin @@ -120,7 +120,7 @@ services: depends_on: - postgres networks: - - ffmpeg-net + - rendiff-net profiles: - dev-tools diff --git a/docs/PERFORMANCE_OPTIMIZATION.md b/docs/PERFORMANCE_OPTIMIZATION.md index 2310876..3c63904 100644 --- a/docs/PERFORMANCE_OPTIMIZATION.md +++ b/docs/PERFORMANCE_OPTIMIZATION.md @@ -2,7 +2,7 @@ ## Overview -This FFmpeg API has been optimized for high-performance production workloads with comprehensive performance improvements implemented across all layers. +Rendiff has been optimized for high-performance production workloads with comprehensive performance improvements implemented across all layers. ## ๐Ÿš€ Performance Features Implemented @@ -291,7 +291,7 @@ ORDER BY idx_scan DESC; # GPU workloads: Limit to GPU capacity # Monitor worker memory usage -docker stats ffmpeg-api-worker-1 +docker stats rendiff-worker-1 # Adjust prefetch multiplier # Higher = better throughput, more memory usage diff --git a/docs/RUNBOOKS.md b/docs/RUNBOOKS.md index 6b505e4..647c31d 100644 --- a/docs/RUNBOOKS.md +++ b/docs/RUNBOOKS.md @@ -1,4 +1,4 @@ -# FFmpeg API Operational Runbooks +# Rendiff Operational Runbooks ## Table of Contents @@ -34,8 +34,8 @@ docker compose exec redis redis-cli ping curl -w "@curl-format.txt" -o /dev/null -s https://api.domain.com/api/v1/health # Database connections -docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c \ - "SELECT count(*) FROM pg_stat_activity WHERE datname = 'ffmpeg_api';" +docker compose exec postgres psql -U rendiff_user -d rendiff -c \ + "SELECT count(*) FROM pg_stat_activity WHERE datname = 'rendiff';" # Queue depth docker compose exec redis redis-cli llen celery @@ -61,7 +61,7 @@ docker compose exec worker-cpu celery -A worker.main inspect active docker stats --no-stream # Check database slow queries -docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c \ +docker compose exec postgres psql -U rendiff_user -d rendiff -c \ "SELECT query, mean_exec_time, calls FROM pg_stat_statements WHERE mean_exec_time > 1000 ORDER BY mean_exec_time DESC LIMIT 10;" @@ -78,7 +78,7 @@ docker compose exec redis redis-cli info memory 2. **Clear slow queries:** ```bash # Analyze and optimize slow queries - docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c \ + docker compose exec postgres psql -U rendiff_user -d rendiff -c \ "ANALYZE jobs; REINDEX TABLE jobs;" ``` @@ -340,7 +340,7 @@ find /storage -type f -mtime +7 -name "*.tmp" -ls **Check processing metrics:** ```bash # Average processing time by operation -docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c " +docker compose exec postgres psql -U rendiff_user -d rendiff -c " SELECT operations->0->>'type' as operation, AVG(EXTRACT(EPOCH FROM (completed_at - started_at))) as avg_seconds, @@ -367,10 +367,10 @@ docker compose restart worker-cpu **Check slow queries:** ```bash # Enable query logging -docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c \ +docker compose exec postgres psql -U rendiff_user -d rendiff -c \ "ALTER SYSTEM SET log_min_duration_statement = 1000;" -docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c \ +docker compose exec postgres psql -U rendiff_user -d rendiff -c \ "SELECT pg_reload_conf();" # View slow query log @@ -380,13 +380,13 @@ docker compose exec postgres tail -f /var/log/postgresql/postgresql.log | grep d **Optimize database:** ```bash # Update statistics -docker compose exec postgres vacuumdb -U ffmpeg_user -d ffmpeg_api -z +docker compose exec postgres vacuumdb -U rendiff_user -d rendiff -z # Reindex tables -docker compose exec postgres reindexdb -U ffmpeg_user -d ffmpeg_api +docker compose exec postgres reindexdb -U rendiff_user -d rendiff # Check table sizes -docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c " +docker compose exec postgres psql -U rendiff_user -d rendiff -c " SELECT schemaname AS table_schema, tablename AS table_name, @@ -425,7 +425,7 @@ LIMIT 10;" 4. **Verify restoration:** ```bash # Check data integrity - docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c \ + docker compose exec postgres psql -U rendiff_user -d rendiff -c \ "SELECT COUNT(*) FROM jobs;" # Run application tests @@ -489,8 +489,8 @@ pg_basebackup -h localhost -D /recovery -U postgres -Fp -Xs -P 1. **Add worker nodes:** ```bash # Deploy to new node - scp -r . newnode:/opt/ffmpeg-api/ - ssh newnode "cd /opt/ffmpeg-api && docker compose up -d worker-cpu" + scp -r . newnode:/opt/rendiff/ + ssh newnode "cd /opt/rendiff && docker compose up -d worker-cpu" ``` 2. **Scale services:** @@ -520,7 +520,7 @@ pg_basebackup -h localhost -D /recovery -U postgres -Fp -Xs -P 1. **Immediate response:** ```bash # Identify compromised key - docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c " + docker compose exec postgres psql -U rendiff_user -d rendiff -c " SELECT api_key_hash, last_used_at, request_count FROM api_keys WHERE last_used_at > NOW() - INTERVAL '1 hour' @@ -536,7 +536,7 @@ pg_basebackup -h localhost -D /recovery -U postgres -Fp -Xs -P docker compose logs api | grep > suspicious-activity.log # Check for data exfiltration - docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c " + docker compose exec postgres psql -U rendiff_user -d rendiff -c " SELECT COUNT(*), SUM(output_size) FROM jobs WHERE api_key = '' diff --git a/docs/SECURITY_HARDENING.md b/docs/SECURITY_HARDENING.md index 371c6ec..bdb3919 100644 --- a/docs/SECURITY_HARDENING.md +++ b/docs/SECURITY_HARDENING.md @@ -2,7 +2,7 @@ ## Overview -This FFmpeg API has been comprehensively hardened against all known security vulnerabilities. This document outlines the security features implemented and best practices for secure deployment. +Rendiff has been comprehensively hardened against all known security vulnerabilities. This document outlines the security features implemented and best practices for secure deployment. ## ๐Ÿ›ก๏ธ Security Features Implemented @@ -217,7 +217,7 @@ semgrep --config=auto api/ worker/ # Container scanning docker scout cves -trivy image ffmpeg-api:latest +trivy image rendiff:latest ``` ### **Penetration Testing Checklist** diff --git a/docs/SETUP.md b/docs/SETUP.md index 0910250..7fba05f 100644 --- a/docs/SETUP.md +++ b/docs/SETUP.md @@ -572,13 +572,13 @@ docker compose up -d --scale worker-genai=2 #### Database Optimization ```bash # Monitor database performance -docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c " +docker compose exec postgres psql -U rendiff_user -d rendiff -c " SELECT query, mean_time, calls FROM pg_stat_statements ORDER BY mean_time DESC LIMIT 10;" # Analyze table usage -docker compose exec postgres psql -U ffmpeg_user -d ffmpeg_api -c " +docker compose exec postgres psql -U rendiff_user -d rendiff -c " SELECT schemaname,tablename,attname,n_distinct,correlation FROM pg_stats WHERE tablename='jobs';" ``` @@ -604,4 +604,4 @@ FROM pg_stats WHERE tablename='jobs';" | **[๐Ÿญ Production Setup](#production-setup)** | Production best practices | Production setup | | **[๐Ÿ›ก๏ธ HTTPS/SSL Configuration](#httpssl-configuration)** | Security configuration | Security hardening | -**Need help?** Check the [troubleshooting section](#troubleshooting) or [open an issue](https://github.com/rendiffdev/ffmpeg-api/issues). \ No newline at end of file +**Need help?** Check the [troubleshooting section](#troubleshooting) or [open an issue](https://github.com/rendiffdev/rendiff-dev/issues). \ No newline at end of file diff --git a/k8s/base/api-deployment.yaml b/k8s/base/api-deployment.yaml index 19ecf86..d3acc52 100644 --- a/k8s/base/api-deployment.yaml +++ b/k8s/base/api-deployment.yaml @@ -1,43 +1,43 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ffmpeg-api - namespace: ffmpeg-api + name: rendiff + namespace: rendiff labels: - app: ffmpeg-api + app: rendiff component: api spec: replicas: 3 selector: matchLabels: - app: ffmpeg-api + app: rendiff component: api template: metadata: labels: - app: ffmpeg-api + app: rendiff component: api spec: containers: - name: api - image: ffmpeg-api:latest + image: rendiff:latest ports: - containerPort: 8000 env: - name: DATABASE_URL valueFrom: secretKeyRef: - name: ffmpeg-api-secrets + name: rendiff-secrets key: database-url - name: REDIS_URL valueFrom: secretKeyRef: - name: ffmpeg-api-secrets + name: rendiff-secrets key: redis-url - name: SECRET_KEY valueFrom: secretKeyRef: - name: ffmpeg-api-secrets + name: rendiff-secrets key: secret-key resources: requests: @@ -69,11 +69,11 @@ spec: apiVersion: v1 kind: Service metadata: - name: ffmpeg-api-service - namespace: ffmpeg-api + name: rendiff-service + namespace: rendiff spec: selector: - app: ffmpeg-api + app: rendiff component: api ports: - port: 80 diff --git a/monitoring/alerts/production-alerts.yml b/monitoring/alerts/production-alerts.yml index 9f9b8fe..36ffee9 100644 --- a/monitoring/alerts/production-alerts.yml +++ b/monitoring/alerts/production-alerts.yml @@ -1,8 +1,8 @@ -# Prometheus Alerting Rules for FFmpeg API Production +# Prometheus Alerting Rules for Rendiff Production # SLO-based alerts with multi-window burn rate groups: - - name: ffmpeg_api_availability + - name: rendiff_availability interval: 30s rules: # High Priority Alerts @@ -11,7 +11,7 @@ groups: for: 2m labels: severity: critical - service: ffmpeg-api + service: rendiff annotations: summary: "High API error rate detected" description: "API error rate is {{ $value }} errors/sec for the last 5 minutes" @@ -22,7 +22,7 @@ groups: for: 3m labels: severity: warning - service: ffmpeg-api + service: rendiff annotations: summary: "API response time is high" description: "95th percentile response time is {{ $value }}s" @@ -255,7 +255,7 @@ groups: # Health Check Alerts - alert: HealthCheckFailing - expr: up{job="ffmpeg-api"} == 0 + expr: up{job="rendiff"} == 0 for: 2m labels: severity: critical diff --git a/monitoring/dashboards/ffmpeg-api-production.json b/monitoring/dashboards/rendiff-production.json similarity index 90% rename from monitoring/dashboards/ffmpeg-api-production.json rename to monitoring/dashboards/rendiff-production.json index 3fadd9f..a3ec661 100644 --- a/monitoring/dashboards/ffmpeg-api-production.json +++ b/monitoring/dashboards/rendiff-production.json @@ -1,8 +1,8 @@ { "dashboard": { "id": null, - "uid": "ffmpeg-api-prod", - "title": "FFmpeg API - Production Operations", + "uid": "rendiff-prod", + "title": "Rendiff - Production Operations", "tags": ["ffmpeg", "api", "production", "sre"], "timezone": "browser", "schemaVersion": 38, @@ -29,7 +29,7 @@ "name": "namespace", "type": "query", "datasource": "$datasource", - "query": "label_values(up{job=\"ffmpeg-api\"}, namespace)", + "query": "label_values(up{job=\"rendiff\"}, namespace)", "current": { "value": "default" } @@ -44,7 +44,7 @@ "id": 1, "targets": [ { - "expr": "up{job=\"ffmpeg-api\"}", + "expr": "up{job=\"rendiff\"}", "legendFormat": "API", "refId": "A" }, @@ -99,7 +99,7 @@ "id": 3, "targets": [ { - "expr": "sum(rate(http_requests_total{job=\"ffmpeg-api\"}[5m])) by (status)", + "expr": "sum(rate(http_requests_total{job=\"rendiff\"}[5m])) by (status)", "legendFormat": "{{status}}xx" } ], @@ -112,7 +112,7 @@ "id": 4, "targets": [ { - "expr": "histogram_quantile(0.95, sum(rate(http_request_duration_seconds_bucket{job=\"ffmpeg-api\"}[5m])) by (le, endpoint))", + "expr": "histogram_quantile(0.95, sum(rate(http_request_duration_seconds_bucket{job=\"rendiff\"}[5m])) by (le, endpoint))", "legendFormat": "{{endpoint}}" } ], @@ -125,7 +125,7 @@ "id": 5, "targets": [ { - "expr": "ffmpeg_jobs_active{job=\"ffmpeg-api\"}", + "expr": "ffmpeg_jobs_active{job=\"rendiff\"}", "legendFormat": "{{status}}" } ], @@ -145,7 +145,7 @@ "id": 6, "targets": [ { - "expr": "sum(rate(http_requests_total{job=\"ffmpeg-api\",status=~\"5..\"}[5m])) / sum(rate(http_requests_total{job=\"ffmpeg-api\"}[5m])) * 100", + "expr": "sum(rate(http_requests_total{job=\"rendiff\",status=~\"5..\"}[5m])) / sum(rate(http_requests_total{job=\"rendiff\"}[5m])) * 100", "legendFormat": "Error %" } ], @@ -177,7 +177,7 @@ "id": 8, "targets": [ { - "expr": "ffmpeg_queue_depth{job=\"ffmpeg-api\"}", + "expr": "ffmpeg_queue_depth{job=\"rendiff\"}", "legendFormat": "{{priority}} priority" } ], @@ -304,7 +304,7 @@ "id": 17, "targets": [ { - "expr": "pg_stat_database_numbackends{datname=\"ffmpeg_api\"}", + "expr": "pg_stat_database_numbackends{datname=\"rendiff\"}", "legendFormat": "Active connections" }, { @@ -320,7 +320,7 @@ "id": 18, "targets": [ { - "expr": "rate(pg_stat_database_blks_hit{datname=\"ffmpeg_api\"}[5m]) / (rate(pg_stat_database_blks_hit{datname=\"ffmpeg_api\"}[5m]) + rate(pg_stat_database_blks_read{datname=\"ffmpeg_api\"}[5m])) * 100", + "expr": "rate(pg_stat_database_blks_hit{datname=\"rendiff\"}[5m]) / (rate(pg_stat_database_blks_hit{datname=\"rendiff\"}[5m]) + rate(pg_stat_database_blks_read{datname=\"rendiff\"}[5m])) * 100", "legendFormat": "Cache hit ratio" } ], @@ -333,7 +333,7 @@ "id": 19, "targets": [ { - "expr": "pg_database_size_bytes{datname=\"ffmpeg_api\"} / 1024 / 1024 / 1024", + "expr": "pg_database_size_bytes{datname=\"rendiff\"} / 1024 / 1024 / 1024", "legendFormat": "Database size" } ], @@ -395,7 +395,7 @@ "id": 23, "targets": [ { - "expr": "topk(10, sum by (api_key_hash) (rate(ffmpeg_api_requests_by_key_total[1h])))", + "expr": "topk(10, sum by (api_key_hash) (rate(rendiff_requests_by_key_total[1h])))", "format": "table", "instant": true } @@ -426,7 +426,7 @@ "id": 25, "targets": [ { - "expr": "(1 - (sum(rate(http_requests_total{job=\"ffmpeg-api\",status=~\"5..\"}[5m])) / sum(rate(http_requests_total{job=\"ffmpeg-api\"}[5m])))) * 100", + "expr": "(1 - (sum(rate(http_requests_total{job=\"rendiff\",status=~\"5..\"}[5m])) / sum(rate(http_requests_total{job=\"rendiff\"}[5m])))) * 100", "legendFormat": "Availability" } ], @@ -452,7 +452,7 @@ "id": 26, "targets": [ { - "expr": "histogram_quantile(0.99, sum(rate(http_request_duration_seconds_bucket{job=\"ffmpeg-api\"}[5m])) by (le))", + "expr": "histogram_quantile(0.99, sum(rate(http_request_duration_seconds_bucket{job=\"rendiff\"}[5m])) by (le))", "legendFormat": "P99 Latency" } ], @@ -508,7 +508,7 @@ "dashboardAlerts": true, "alertName": "", "dashboardTitle": "", - "tags": ["ffmpeg-api"] + "tags": ["rendiff"] } } ] diff --git a/scripts/backup-database.sh b/scripts/backup-database.sh index adf4e4c..dc7679e 100755 --- a/scripts/backup-database.sh +++ b/scripts/backup-database.sh @@ -14,7 +14,7 @@ if [ -f "$PROJECT_ROOT/.env" ]; then fi # Default configuration -BACKUP_DIR="${BACKUP_DIR:-/var/backups/ffmpeg-api}" +BACKUP_DIR="${BACKUP_DIR:-/var/backups/rendiff}" BACKUP_RETENTION_DAYS="${BACKUP_RETENTION_DAYS:-7}" BACKUP_ENCRYPTION_KEY="${BACKUP_ENCRYPTION_KEY:-}" AWS_S3_BUCKET="${AWS_S3_BUCKET:-}" @@ -24,7 +24,7 @@ LOG_LEVEL="${LOG_LEVEL:-INFO}" # Database configuration DB_HOST="${DATABASE_HOST:-localhost}" DB_PORT="${DATABASE_PORT:-5432}" -DB_NAME="${DATABASE_NAME:-ffmpeg_api}" +DB_NAME="${DATABASE_NAME:-rendiff}" DB_USER="${DATABASE_USER:-postgres}" DB_PASSWORD="${DATABASE_PASSWORD:-}" diff --git a/scripts/backup-postgres.sh b/scripts/backup-postgres.sh index bb59aa3..e8ce35f 100644 --- a/scripts/backup-postgres.sh +++ b/scripts/backup-postgres.sh @@ -6,7 +6,7 @@ set -euo pipefail # Configuration BACKUP_DIR="${BACKUP_DIR:-/backup/postgres}" -S3_BUCKET="${S3_BUCKET:-ffmpeg-api-backups}" +S3_BUCKET="${S3_BUCKET:-rendiff-backups}" RETENTION_DAYS="${RETENTION_DAYS:-30}" ENCRYPTION_KEY="${BACKUP_ENCRYPTION_KEY:-}" SLACK_WEBHOOK="${SLACK_WEBHOOK:-}" @@ -14,13 +14,13 @@ SLACK_WEBHOOK="${SLACK_WEBHOOK:-}" # Database connection DB_HOST="${POSTGRES_HOST:-postgres}" DB_PORT="${POSTGRES_PORT:-5432}" -DB_NAME="${POSTGRES_DB:-ffmpeg_api}" -DB_USER="${POSTGRES_USER:-ffmpeg_user}" +DB_NAME="${POSTGRES_DB:-rendiff}" +DB_USER="${POSTGRES_USER:-rendiff_user}" export PGPASSWORD="${POSTGRES_PASSWORD}" # Timestamp TIMESTAMP=$(date +%Y%m%d_%H%M%S) -BACKUP_NAME="ffmpeg_api_backup_${TIMESTAMP}" +BACKUP_NAME="rendiff_backup_${TIMESTAMP}" # Logging log() { diff --git a/scripts/disaster-recovery.sh b/scripts/disaster-recovery.sh index 05ae95b..502dce6 100644 --- a/scripts/disaster-recovery.sh +++ b/scripts/disaster-recovery.sh @@ -5,12 +5,12 @@ set -euo pipefail # Configuration -S3_BUCKET="${S3_BUCKET:-ffmpeg-api-backups}" +S3_BUCKET="${S3_BUCKET:-rendiff-backups}" RESTORE_DIR="${RESTORE_DIR:-/tmp/restore}" TARGET_DB_HOST="${TARGET_DB_HOST:-postgres}" TARGET_DB_PORT="${TARGET_DB_PORT:-5432}" -TARGET_DB_NAME="${TARGET_DB_NAME:-ffmpeg_api}" -TARGET_DB_USER="${TARGET_DB_USER:-ffmpeg_user}" +TARGET_DB_NAME="${TARGET_DB_NAME:-rendiff}" +TARGET_DB_USER="${TARGET_DB_USER:-rendiff_user}" export PGPASSWORD="${POSTGRES_PASSWORD}" # Recovery options @@ -34,7 +34,7 @@ list_backups() { aws s3api list-objects-v2 \ --bucket "$S3_BUCKET" \ - --prefix "postgres/ffmpeg_api_backup_" \ + --prefix "postgres/rendiff_backup_" \ --query "Contents[?ends_with(Key, '.dump.gz') || ends_with(Key, '.dump.gz.gpg')].[Key,LastModified,Size]" \ --output table } @@ -43,7 +43,7 @@ list_backups() { get_latest_backup() { aws s3api list-objects-v2 \ --bucket "$S3_BUCKET" \ - --prefix "postgres/ffmpeg_api_backup_" \ + --prefix "postgres/rendiff_backup_" \ --query "Contents[?ends_with(Key, '.dump.gz') || ends_with(Key, '.dump.gz.gpg')] | sort_by(@, &LastModified) | [-1].Key" \ --output text } @@ -288,7 +288,7 @@ main() { error "RECOVERY_TIMESTAMP required for specific recovery mode" exit 1 fi - backup_key="postgres/ffmpeg_api_backup_${RECOVERY_TIMESTAMP}.dump.gz" + backup_key="postgres/rendiff_backup_${RECOVERY_TIMESTAMP}.dump.gz" ;; list) list_backups diff --git a/scripts/docker-entrypoint.sh b/scripts/docker-entrypoint.sh index 2d7da78..6f6d6bd 100755 --- a/scripts/docker-entrypoint.sh +++ b/scripts/docker-entrypoint.sh @@ -96,7 +96,7 @@ setup_monitoring() { # Setup log rotation if available if command -v logrotate &> /dev/null; then echo "Setting up log rotation..." - cat > /etc/logrotate.d/ffmpeg-api << 'LOGROTATE_EOF' + cat > /etc/logrotate.d/rendiff << 'LOGROTATE_EOF' /app/logs/*.log { daily missingok diff --git a/scripts/health-check.sh b/scripts/health-check.sh index 85cf3fb..fbab441 100755 --- a/scripts/health-check.sh +++ b/scripts/health-check.sh @@ -1,13 +1,13 @@ #!/bin/bash -# Comprehensive health check for FFmpeg API services +# Comprehensive health check for Rendiff services set -e # Configuration POSTGRES_HOST=${POSTGRES_HOST:-postgres} POSTGRES_PORT=${POSTGRES_PORT:-5432} -POSTGRES_USER=${POSTGRES_USER:-ffmpeg_user} -POSTGRES_DB=${POSTGRES_DB:-ffmpeg_api} +POSTGRES_USER=${POSTGRES_USER:-rendiff_user} +POSTGRES_DB=${POSTGRES_DB:-rendiff} REDIS_HOST=${REDIS_HOST:-redis} REDIS_PORT=${REDIS_PORT:-6379} diff --git a/scripts/validate-stable-build.sh b/scripts/validate-stable-build.sh index 0d610a1..ac7e508 100755 --- a/scripts/validate-stable-build.sh +++ b/scripts/validate-stable-build.sh @@ -73,7 +73,7 @@ fi log "๐Ÿ”จ Testing API container build..." if docker build -f docker/api/Dockerfile.new \ --build-arg PYTHON_VERSION="$PYTHON_VERSION" \ - -t ffmpeg-api:stable-test \ + -t rendiff:stable-test \ . >> "$LOG_FILE" 2>&1; then success "API container built successfully" else @@ -118,7 +118,7 @@ log "๐Ÿ” Validating critical dependencies..." # Test API container dependencies log "Testing API container dependencies..." -if docker run --rm ffmpeg-api:stable-test python -c " +if docker run --rm rendiff:stable-test python -c " import psycopg2 import fastapi import sqlalchemy @@ -154,7 +154,7 @@ fi # Test FFmpeg installation log "๐ŸŽฌ Testing FFmpeg installation..." -if docker run --rm ffmpeg-api:stable-test ffmpeg -version | head -1 >> "$LOG_FILE" 2>&1; then +if docker run --rm rendiff:stable-test ffmpeg -version | head -1 >> "$LOG_FILE" 2>&1; then success "FFmpeg installation verified in API container" else warning "FFmpeg verification failed in API container" @@ -174,7 +174,7 @@ if docker run -d --name api-test-container \ -p 8001:8000 \ -e DATABASE_URL="sqlite:///test.db" \ -e REDIS_URL="redis://localhost:6379" \ - ffmpeg-api:stable-test >> "$LOG_FILE" 2>&1; then + rendiff:stable-test >> "$LOG_FILE" 2>&1; then # Wait for startup sleep 10 @@ -258,7 +258,7 @@ log "๐Ÿ“‹ Detailed log: $LOG_FILE" # Cleanup test images log "๐Ÿงน Cleaning up test images..." -docker rmi ffmpeg-api:stable-test ffmpeg-worker-cpu:stable-test ffmpeg-worker-gpu:stable-test 2>/dev/null || true +docker rmi rendiff:stable-test ffmpeg-worker-cpu:stable-test ffmpeg-worker-gpu:stable-test 2>/dev/null || true echo "" echo -e "${GREEN}๐ŸŽ‰ All validation tests passed!${NC}" diff --git a/scripts/versionController.sh b/scripts/versionController.sh index 0cb8fa0..e17b34f 100755 --- a/scripts/versionController.sh +++ b/scripts/versionController.sh @@ -1,6 +1,6 @@ #!/bin/bash # Fetch remote version -remote_url="https://raw.githubusercontent.com/rendiffdev/ffmpeg-api/main/VERSION" +remote_url="https://raw.githubusercontent.com/rendiffdev/rendiff-dev/main/VERSION" remote_version=$(curl -s "$remote_url") if [[ -z "$remote_version" ]]; then echo "Error: Unable to fetch remote version." diff --git a/setup.sh b/setup.sh index 0f579f4..b903e8e 100755 --- a/setup.sh +++ b/setup.sh @@ -84,7 +84,7 @@ REDIS_URL=redis://redis:6379/0 # Storage STORAGE_PATH=./storage -TEMP_PATH=/tmp/ffmpeg_api +TEMP_PATH=/tmp/rendiff # Security (Disabled for development) ENABLE_API_KEYS=false diff --git a/tests/test_integration.py b/tests/test_integration.py index de16586..127e4c3 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -1,5 +1,5 @@ """ -Integration tests for FFmpeg API +Integration tests for Rendiff API Tests end-to-end workflows and component interactions """ import asyncio @@ -22,7 +22,7 @@ @pytest.fixture(scope="session") async def test_engine(): """Create test database engine.""" - test_db_url = settings.DATABASE_URL.replace("ffmpeg_api", "ffmpeg_api_test") + test_db_url = settings.DATABASE_URL.replace("rendiff", "rendiff_test") engine = create_async_engine(test_db_url, echo=True) # Create tables @@ -59,7 +59,7 @@ async def test_client(): def sample_video(): """Create a sample video file for testing.""" # Create a minimal test video using FFmpeg - test_dir = Path(tempfile.gettempdir()) / "ffmpeg_test" + test_dir = Path(tempfile.gettempdir()) / "rendiff_test" test_dir.mkdir(exist_ok=True) video_path = test_dir / "test_video.mp4"