name: CI/CD Pipeline on: push: branches: [ main, dev, feature/* ] pull_request: branches: [ main, dev ] jobs: # ===== TESTING PHASE ===== test: runs-on: ubuntu-latest services: redis: image: redis:7-alpine ports: - 6379:6379 options: >- --health-cmd "redis-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5 steps: - name: Checkout code uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v4 with: python-version: "3.13" - name: Install uv uses: astral-sh/setup-uv@v1 with: version: "1.0.0" - name: Cache dependencies uses: actions/cache@v3 with: path: | .venv .uv_cache key: ${{ runner.os }}-uv-3.13-${{ hashFiles('**/uv.lock') }} restore-keys: ${{ runner.os }}-uv-3.13- - name: Install dependencies run: | uv sync --group dev cd panel && npm ci && cd .. - name: Verify Redis connection run: | echo "Verifying Redis connection..." max_retries=5 for attempt in $(seq 1 $max_retries); do if redis-cli ping > /dev/null 2>&1; then echo "✅ Redis is ready!" break else if [ $attempt -eq $max_retries ]; then echo "❌ Redis connection failed after $max_retries attempts" echo "⚠️ Tests may fail due to Redis unavailability" # Не выходим с ошибкой, продолжаем тесты break else echo "⚠️ Redis not ready, retrying in 2 seconds... (attempt $attempt/$max_retries)" sleep 2 fi fi done - name: Run linting and type checking run: | echo "🔍 Запускаем проверки качества кода..." # Ruff linting echo "📝 Проверяем код с помощью Ruff..." if uv run ruff check .; then echo "✅ Ruff проверка прошла успешно" else echo "❌ Ruff нашел проблемы в коде" exit 1 fi # Ruff formatting check echo "🎨 Проверяем форматирование с помощью Ruff..." if uv run ruff format --check .; then echo "✅ Форматирование корректно" else echo "❌ Код не отформатирован согласно стандартам" exit 1 fi # MyPy type checking echo "🏷️ Проверяем типы с помощью MyPy..." if uv run mypy . --ignore-missing-imports; then echo "✅ MyPy проверка прошла успешно" else echo "❌ MyPy нашел проблемы с типами" exit 1 fi - name: Setup test environment run: | echo "Setting up test environment..." # Создаем .env.test для тестов cat > .env.test << EOF DATABASE_URL=sqlite:///database.db REDIS_URL=redis://localhost:6379 TEST_MODE=true EOF # Проверяем что файл создан echo "Test environment file created:" cat .env.test - name: Initialize test database run: | echo "Initializing test database..." touch database.db uv run python -c " import time import sys from pathlib import Path # Добавляем корневую папку в путь sys.path.insert(0, str(Path.cwd())) try: from orm.base import Base from orm.community import Community, CommunityFollower, CommunityAuthor from orm.draft import Draft from orm.invite import Invite from orm.notification import Notification from orm.reaction import Reaction from orm.shout import Shout from orm.topic import Topic from orm.author import Author, AuthorBookmark, AuthorRating, AuthorFollower from storage.db import engine from sqlalchemy import inspect print('✅ Engine imported successfully') print('Creating all tables...') Base.metadata.create_all(engine) # Проверяем что таблицы созданы inspector = inspect(engine) tables = inspector.get_table_names() print(f'✅ Created tables: {tables}') # Проверяем конкретно community_author if 'community_author' in tables: print('✅ community_author table exists!') else: print('❌ community_author table missing!') print('Available tables:', tables) except Exception as e: print(f'❌ Error initializing database: {e}') import traceback traceback.print_exc() sys.exit(1) " - name: Start servers run: | chmod +x ./ci-server.py timeout 300 python ./ci-server.py & echo $! > ci-server.pid echo "Waiting for servers..." timeout 180 bash -c ' while ! (curl -f http://localhost:8000/ > /dev/null 2>&1 && \ curl -f http://localhost:3000/ > /dev/null 2>&1); do sleep 3 done echo "Servers ready!" ' - name: Run tests with retry run: | # Создаем папку для результатов тестов mkdir -p test-results # Сначала проверяем здоровье серверов echo "🏥 Проверяем здоровье серверов..." if uv run pytest tests/test_server_health.py -v; then echo "✅ Серверы здоровы!" else echo "⚠️ Тест здоровья серверов не прошел, но продолжаем..." fi for test_type in "not e2e" "integration" "e2e" "browser"; do echo "Running $test_type tests..." max_retries=3 # Увеличиваем количество попыток for attempt in $(seq 1 $max_retries); do echo "Attempt $attempt/$max_retries for $test_type tests..." # Добавляем специальные параметры для browser тестов if [ "$test_type" = "browser" ]; then echo "🚀 Запускаем browser тесты с увеличенным таймаутом..." if uv run pytest tests/ -m "$test_type" -v --tb=short --timeout=60; then echo "✅ $test_type tests passed!" break else if [ $attempt -eq $max_retries ]; then echo "⚠️ Browser tests failed after $max_retries attempts (expected in CI) - continuing..." break else echo "⚠️ Browser tests failed, retrying in 15 seconds..." sleep 15 fi fi else # Обычные тесты if uv run pytest tests/ -m "$test_type" -v --tb=short; then echo "✅ $test_type tests passed!" break else if [ $attempt -eq $max_retries ]; then echo "❌ $test_type tests failed after $max_retries attempts" exit 1 else echo "⚠️ $test_type tests failed, retrying in 10 seconds..." sleep 10 fi fi fi done done - name: Generate coverage run: | uv run pytest tests/ --cov=. --cov-report=xml --cov-report=html - name: Upload coverage uses: codecov/codecov-action@v3 with: file: ./coverage.xml fail_ci_if_error: false - name: Cleanup if: always() run: | [ -f ci-server.pid ] && kill $(cat ci-server.pid) 2>/dev/null || true pkill -f "python dev.py|npm run dev|vite|ci-server.py" || true rm -f backend.pid frontend.pid ci-server.pid # ===== CODE QUALITY PHASE ===== quality: runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v4 with: python-version: "3.13" - name: Install uv uses: astral-sh/setup-uv@v1 with: version: "1.0.0" - name: Install dependencies run: | uv sync --group lint uv sync --group dev - name: Run quality checks run: | uv run ruff check . uv run mypy . --strict # ===== DEPLOYMENT PHASE ===== deploy: runs-on: ubuntu-latest needs: [test, quality] if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/dev' environment: production steps: - name: Checkout code uses: actions/checkout@v3 with: fetch-depth: 0 - name: Deploy env: HOST_KEY: ${{ secrets.SSH_PRIVATE_KEY }} TARGET: ${{ github.ref == 'refs/heads/dev' && 'core' || 'discoursio-api' }} SERVER: ${{ github.ref == 'refs/heads/dev' && 'STAGING' || 'V' }} run: | echo "🚀 Deploying to $ENV..." mkdir -p ~/.ssh echo "$HOST_KEY" > ~/.ssh/known_hosts chmod 600 ~/.ssh/known_hosts git remote add dokku dokku@staging.discours.io:$TARGET git push dokku HEAD:main -f echo "✅ $ENV deployment completed!" # ===== SUMMARY ===== summary: runs-on: ubuntu-latest needs: [test, quality, deploy] if: always() steps: - name: Pipeline Summary run: | echo "## 🎯 CI/CD Pipeline Summary" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY echo "### 📊 Test Results: ${{ needs.test.result }}" >> $GITHUB_STEP_SUMMARY echo "### 🔍 Code Quality: ${{ needs.quality.result }}" >> $GITHUB_STEP_SUMMARY echo "### 🚀 Deployment: ${{ needs.deploy.result || 'skipped' }}" >> $GITHUB_STEP_SUMMARY echo "### 📈 Coverage: Generated (XML + HTML)" >> $GITHUB_STEP_SUMMARY