AutoGPT Platform - Full-stack CI #16022
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: AutoGPT Platform - Full-stack CI | |
| on: | |
| push: | |
| branches: [master, dev] | |
| paths: | |
| - ".github/workflows/platform-fullstack-ci.yml" | |
| - ".github/workflows/scripts/docker-ci-fix-compose-build-cache.py" | |
| - ".github/workflows/scripts/get_package_version_from_lockfile.py" | |
| - "autogpt_platform/**" | |
| pull_request: | |
| paths: | |
| - ".github/workflows/platform-fullstack-ci.yml" | |
| - ".github/workflows/scripts/docker-ci-fix-compose-build-cache.py" | |
| - ".github/workflows/scripts/get_package_version_from_lockfile.py" | |
| - "autogpt_platform/**" | |
| merge_group: | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.event_name == 'merge_group' && format('merge-queue-{0}', github.ref) || github.head_ref && format('pr-{0}', github.event.pull_request.number) || github.sha }} | |
| cancel-in-progress: ${{ github.event_name == 'pull_request' }} | |
| defaults: | |
| run: | |
| shell: bash | |
| working-directory: autogpt_platform/frontend | |
| jobs: | |
| setup: | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v6 | |
| - name: Set up pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| package_json_file: autogpt_platform/frontend/package.json | |
| - name: Set up Node | |
| uses: actions/setup-node@v6 | |
| with: | |
| node-version: "22.18.0" | |
| cache: "pnpm" | |
| cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml | |
| - name: Install dependencies to populate cache | |
| run: pnpm install --frozen-lockfile | |
| check-api-types: | |
| name: check API types | |
| runs-on: ubuntu-latest | |
| needs: setup | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v6 | |
| with: | |
| submodules: recursive | |
| # ------------------------ Backend setup ------------------------ | |
| - name: Set up Backend - Set up Python | |
| uses: actions/setup-python@v5 | |
| with: | |
| python-version: "3.12" | |
| - name: Set up Backend - Install Poetry | |
| working-directory: autogpt_platform/backend | |
| run: | | |
| POETRY_VERSION=$(python ../../.github/workflows/scripts/get_package_version_from_lockfile.py poetry) | |
| echo "Installing Poetry version ${POETRY_VERSION}" | |
| curl -sSL https://install.python-poetry.org | POETRY_VERSION=$POETRY_VERSION python3 - | |
| - name: Set up Backend - Set up dependency cache | |
| uses: actions/cache@v5 | |
| with: | |
| path: ~/.cache/pypoetry | |
| key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/backend/poetry.lock') }} | |
| - name: Set up Backend - Install dependencies | |
| working-directory: autogpt_platform/backend | |
| run: poetry install | |
| - name: Set up Backend - Generate Prisma client | |
| working-directory: autogpt_platform/backend | |
| run: poetry run prisma generate && poetry run gen-prisma-stub | |
| - name: Set up Frontend - Export OpenAPI schema from Backend | |
| working-directory: autogpt_platform/backend | |
| run: poetry run export-api-schema --output ../frontend/src/app/api/openapi.json | |
| # ------------------------ Frontend setup ------------------------ | |
| - name: Set up Frontend - Set up pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| package_json_file: autogpt_platform/frontend/package.json | |
| - name: Set up Frontend - Set up Node | |
| uses: actions/setup-node@v6 | |
| with: | |
| node-version: "22.18.0" | |
| cache: "pnpm" | |
| cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml | |
| - name: Set up Frontend - Install dependencies | |
| run: pnpm install --frozen-lockfile | |
| - name: Set up Frontend - Format OpenAPI schema | |
| id: format-schema | |
| run: pnpm prettier --write ./src/app/api/openapi.json | |
| - name: Check for API schema changes | |
| run: | | |
| if ! git diff --exit-code src/app/api/openapi.json; then | |
| echo "❌ API schema changes detected in src/app/api/openapi.json" | |
| echo "" | |
| echo "The openapi.json file has been modified after exporting the API schema." | |
| echo "This usually means changes have been made in the BE endpoints without updating the Frontend." | |
| echo "The API schema is now out of sync with the Front-end queries." | |
| echo "" | |
| echo "To fix this:" | |
| echo "\nIn the backend directory:" | |
| echo "1. Run 'poetry run export-api-schema --output ../frontend/src/app/api/openapi.json'" | |
| echo "\nIn the frontend directory:" | |
| echo "2. Run 'pnpm prettier --write src/app/api/openapi.json'" | |
| echo "3. Run 'pnpm generate:api'" | |
| echo "4. Run 'pnpm types'" | |
| echo "5. Fix any TypeScript errors that may have been introduced" | |
| echo "6. Commit and push your changes" | |
| echo "" | |
| exit 1 | |
| else | |
| echo "✅ No API schema changes detected" | |
| fi | |
| - name: Set up Frontend - Generate API client | |
| id: generate-api-client | |
| run: pnpm orval --config ./orval.config.ts | |
| # Continue with type generation & check even if there are schema changes | |
| if: success() || (steps.format-schema.outcome == 'success') | |
| - name: Check for TypeScript errors | |
| run: pnpm types | |
| if: success() || (steps.generate-api-client.outcome == 'success') | |
| e2e_test: | |
| name: end-to-end tests | |
| runs-on: big-boi | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v6 | |
| with: | |
| submodules: recursive | |
| - name: Set up Platform - Copy default supabase .env | |
| run: | | |
| cp ../.env.default ../.env | |
| - name: Set up Platform - Copy backend .env and set OpenAI API key | |
| run: | | |
| cp ../backend/.env.default ../backend/.env | |
| echo "OPENAI_INTERNAL_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> ../backend/.env | |
| echo "SCHEDULER_STARTUP_EMBEDDING_BACKFILL=false" >> ../backend/.env | |
| env: | |
| # Used by E2E test data script to generate embeddings for approved store agents | |
| OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} | |
| - name: Set up Platform - Set up Docker Buildx | |
| uses: docker/setup-buildx-action@v3 | |
| with: | |
| driver: docker-container | |
| driver-opts: network=host | |
| - name: Set up Platform - Expose GHA cache to docker buildx CLI | |
| uses: crazy-max/ghaction-github-runtime@v4 | |
| - name: Set up Platform - Build Docker images (with cache) | |
| working-directory: autogpt_platform | |
| run: | | |
| pip install pyyaml | |
| # Resolve extends and generate a flat compose file that bake can understand | |
| export NEXT_PUBLIC_SOURCEMAPS NEXT_PUBLIC_PW_TEST | |
| docker compose -f docker-compose.yml config > docker-compose.resolved.yml | |
| # Ensure NEXT_PUBLIC_SOURCEMAPS is in resolved compose | |
| # (docker compose config on some versions drops this arg) | |
| if ! grep -q "NEXT_PUBLIC_SOURCEMAPS" docker-compose.resolved.yml; then | |
| echo "Injecting NEXT_PUBLIC_SOURCEMAPS into resolved compose (docker compose config dropped it)" | |
| sed -i '/NEXT_PUBLIC_PW_TEST/a\ NEXT_PUBLIC_SOURCEMAPS: "true"' docker-compose.resolved.yml | |
| fi | |
| # Add cache configuration to the resolved compose file | |
| python ../.github/workflows/scripts/docker-ci-fix-compose-build-cache.py \ | |
| --source docker-compose.resolved.yml \ | |
| --cache-from "type=gha" \ | |
| --cache-to "type=gha,mode=max" \ | |
| --backend-hash "${{ hashFiles('autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/poetry.lock', 'autogpt_platform/backend/backend/**') }}" \ | |
| --frontend-hash "${{ hashFiles('autogpt_platform/frontend/Dockerfile', 'autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/src/**') }}-sourcemaps" \ | |
| --git-ref "${{ github.ref }}" | |
| # Build with bake using the resolved compose file (now includes cache config) | |
| docker buildx bake --allow=fs.read=.. -f docker-compose.resolved.yml --load | |
| env: | |
| NEXT_PUBLIC_PW_TEST: true | |
| NEXT_PUBLIC_SOURCEMAPS: true | |
| - name: Set up tests - Cache E2E test data | |
| id: e2e-data-cache | |
| uses: actions/cache@v5 | |
| with: | |
| path: /tmp/e2e_test_data.sql | |
| key: e2e-test-data-${{ hashFiles('autogpt_platform/backend/test/e2e_test_data.py', 'autogpt_platform/backend/migrations/**', '.github/workflows/platform-fullstack-ci.yml') }} | |
| - name: Set up Platform - Start Supabase DB + Auth | |
| run: | | |
| docker compose -f ../docker-compose.resolved.yml up -d db auth --no-build | |
| echo "Waiting for database to be ready..." | |
| timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' | |
| echo "Waiting for auth service to be ready..." | |
| timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -c "SELECT 1 FROM auth.users LIMIT 1" 2>/dev/null; do sleep 2; done' || echo "Auth schema check timeout, continuing..." | |
| - name: Set up Platform - Run migrations | |
| run: | | |
| echo "Running migrations..." | |
| docker compose -f ../docker-compose.resolved.yml run --rm migrate | |
| echo "✅ Migrations completed" | |
| env: | |
| NEXT_PUBLIC_PW_TEST: true | |
| - name: Set up tests - Load cached E2E test data | |
| if: steps.e2e-data-cache.outputs.cache-hit == 'true' | |
| run: | | |
| echo "✅ Found cached E2E test data, restoring..." | |
| { | |
| echo "SET session_replication_role = 'replica';" | |
| cat /tmp/e2e_test_data.sql | |
| echo "SET session_replication_role = 'origin';" | |
| } | docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -b | |
| # Refresh materialized views after restore | |
| docker compose -f ../docker-compose.resolved.yml exec -T db \ | |
| psql -U postgres -d postgres -b -c "SET search_path TO platform; SELECT refresh_store_materialized_views();" || true | |
| echo "✅ E2E test data restored from cache" | |
| - name: Set up Platform - Start (all other services) | |
| run: | | |
| docker compose -f ../docker-compose.resolved.yml up -d --no-build | |
| echo "Waiting for rest_server to be ready..." | |
| timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..." | |
| env: | |
| NEXT_PUBLIC_PW_TEST: true | |
| - name: Set up tests - Create E2E test data | |
| if: steps.e2e-data-cache.outputs.cache-hit != 'true' | |
| run: | | |
| echo "Creating E2E test data..." | |
| docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.resolved.yml ps -q rest_server):/tmp/e2e_test_data.py | |
| docker compose -f ../docker-compose.resolved.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || { | |
| echo "❌ E2E test data creation failed!" | |
| docker compose -f ../docker-compose.resolved.yml logs --tail=50 rest_server | |
| exit 1 | |
| } | |
| # Dump auth.users + platform schema for cache (two separate dumps) | |
| echo "Dumping database for cache..." | |
| { | |
| docker compose -f ../docker-compose.resolved.yml exec -T db \ | |
| pg_dump -U postgres --data-only --column-inserts \ | |
| --table='auth.users' postgres | |
| docker compose -f ../docker-compose.resolved.yml exec -T db \ | |
| pg_dump -U postgres --data-only --column-inserts \ | |
| --schema=platform \ | |
| --exclude-table='platform._prisma_migrations' \ | |
| --exclude-table='platform.apscheduler_jobs' \ | |
| --exclude-table='platform.apscheduler_jobs_batched_notifications' \ | |
| postgres | |
| } > /tmp/e2e_test_data.sql | |
| echo "✅ Database dump created for caching ($(wc -l < /tmp/e2e_test_data.sql) lines)" | |
| - name: Set up tests - Set up pnpm | |
| uses: pnpm/action-setup@v4 | |
| with: | |
| package_json_file: autogpt_platform/frontend/package.json | |
| - name: Set up tests - Set up Node | |
| uses: actions/setup-node@v6 | |
| with: | |
| node-version: "22.18.0" | |
| cache: "pnpm" | |
| cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml | |
| - name: Set up tests - Cache Playwright browsers | |
| uses: actions/cache@v5 | |
| with: | |
| path: ~/.cache/ms-playwright | |
| key: playwright-${{ runner.os }}-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }} | |
| restore-keys: | | |
| playwright-${{ runner.os }}- | |
| - name: Copy source maps from Docker for E2E coverage | |
| run: | | |
| FRONTEND_CONTAINER=$(docker compose -f ../docker-compose.resolved.yml ps -q frontend) | |
| docker cp "$FRONTEND_CONTAINER":/app/.next/static .next-static-coverage | |
| - name: Set up tests - Install dependencies | |
| run: pnpm install --frozen-lockfile | |
| - name: Set up tests - Install browser 'chromium' | |
| run: pnpm playwright install --with-deps chromium | |
| - name: Run Playwright E2E suite | |
| run: pnpm test:e2e:no-build | |
| continue-on-error: false | |
| - name: Upload E2E coverage to Codecov | |
| if: ${{ !cancelled() }} | |
| uses: codecov/codecov-action@v5 | |
| with: | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| flags: platform-frontend-e2e | |
| files: ./autogpt_platform/frontend/coverage/e2e/cobertura-coverage.xml | |
| disable_search: true | |
| - name: Upload Playwright report | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: playwright-report | |
| path: autogpt_platform/frontend/playwright-report | |
| if-no-files-found: ignore | |
| retention-days: 3 | |
| - name: Upload Playwright test results | |
| if: always() | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: playwright-test-results | |
| path: autogpt_platform/frontend/test-results | |
| if-no-files-found: ignore | |
| retention-days: 3 | |
| - name: Print Final Docker Compose logs | |
| if: always() | |
| run: docker compose -f ../docker-compose.resolved.yml logs |