diff --git a/.cursor/mcp.json b/.cursor/mcp.json new file mode 100644 index 00000000000..722b0bb9746 --- /dev/null +++ b/.cursor/mcp.json @@ -0,0 +1,16 @@ +{ + "mcpServers": { + "Playwright": { + "command": "npx", + "args": [ + "@playwright/mcp" + ] + }, + "Linear": { + "url": "https://mcp.linear.app/mcp" + }, + "Figma": { + "url": "https://mcp.figma.com/mcp" + } + } +} diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 5ed1a18fe27..6af2d449f2b 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -6,5 +6,5 @@ /web/STANDARDS.md @raunakab @Weves # Agent context files -/CLAUDE.md.template @Weves -/AGENTS.md.template @Weves +/CLAUDE.md @Weves +/AGENTS.md @Weves diff --git a/.github/workflows/deployment.yml b/.github/workflows/deployment.yml index 38ba44b585c..01c135d527f 100644 --- a/.github/workflows/deployment.yml +++ b/.github/workflows/deployment.yml @@ -82,7 +82,7 @@ jobs: if [[ "$TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then IS_STABLE=true fi - if [[ "$TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+-beta\.[0-9]+$ ]]; then + if [[ "$TAG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+-beta(\.[0-9]+)?$ ]]; then IS_BETA=true fi @@ -91,8 +91,8 @@ jobs: BUILD_WEB_CLOUD=true else BUILD_WEB=true - # Skip desktop builds on beta tags and nightly runs - if [[ "$IS_BETA" != "true" ]] && [[ "$IS_NIGHTLY" != "true" ]]; then + # Only build desktop for semver tags (excluding beta) + if [[ "$IS_VERSION_TAG" == "true" ]] && [[ "$IS_BETA" != "true" ]]; then BUILD_DESKTOP=true fi fi @@ -174,23 +174,10 @@ jobs: with: persist-credentials: false - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 - with: - role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }} - aws-region: us-east-2 - - - name: Get AWS Secrets - uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 - with: - secret-ids: | - MONITOR_DEPLOYMENTS_WEBHOOK, deploy/monitor-deployments-webhook - parse-json-secrets: true - - name: Send Slack notification uses: ./.github/actions/slack-notify with: - webhook-url: ${{ env.MONITOR_DEPLOYMENTS_WEBHOOK }} + webhook-url: ${{ secrets.MONITOR_DEPLOYMENTS_WEBHOOK }} failed-jobs: "• check-version-tag" title: "🚨 Version Tag Check Failed" ref-name: ${{ github.ref_name }} @@ -262,7 +249,7 @@ jobs: xdg-utils - name: setup node - uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v6.1.0 + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # ratchet:actions/setup-node@v6.2.0 with: node-version: 24 package-manager-cache: false @@ -422,7 +409,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -495,7 +482,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -555,7 +542,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -633,7 +620,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -653,6 +640,7 @@ jobs: NEXT_PUBLIC_POSTHOG_HOST=${{ secrets.POSTHOG_HOST }} NEXT_PUBLIC_SENTRY_DSN=${{ secrets.SENTRY_DSN }} NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=${{ secrets.STRIPE_PUBLISHABLE_KEY }} + NEXT_PUBLIC_RECAPTCHA_SITE_KEY=${{ vars.NEXT_PUBLIC_RECAPTCHA_SITE_KEY }} NEXT_PUBLIC_GTM_ENABLED=true NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=true NEXT_PUBLIC_INCLUDE_ERROR_POPUP_SUPPORT_LINK=true @@ -714,7 +702,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -734,6 +722,7 @@ jobs: NEXT_PUBLIC_POSTHOG_HOST=${{ secrets.POSTHOG_HOST }} NEXT_PUBLIC_SENTRY_DSN=${{ secrets.SENTRY_DSN }} NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=${{ secrets.STRIPE_PUBLISHABLE_KEY }} + NEXT_PUBLIC_RECAPTCHA_SITE_KEY=${{ vars.NEXT_PUBLIC_RECAPTCHA_SITE_KEY }} NEXT_PUBLIC_GTM_ENABLED=true NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=true NEXT_PUBLIC_INCLUDE_ERROR_POPUP_SUPPORT_LINK=true @@ -782,7 +771,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -857,7 +846,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -929,7 +918,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -988,7 +977,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -1066,7 +1055,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -1139,7 +1128,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -1200,7 +1189,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -1280,7 +1269,7 @@ jobs: buildkitd-flags: ${{ vars.DOCKER_DEBUG == 'true' && '--debug' || '' }} - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -1359,7 +1348,7 @@ jobs: buildkitd-flags: ${{ vars.DOCKER_DEBUG == 'true' && '--debug' || '' }} - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -1422,7 +1411,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ env.DOCKER_USERNAME }} password: ${{ env.DOCKER_TOKEN }} @@ -1709,19 +1698,6 @@ jobs: with: persist-credentials: false - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 - with: - role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }} - aws-region: us-east-2 - - - name: Get AWS Secrets - uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 - with: - secret-ids: | - MONITOR_DEPLOYMENTS_WEBHOOK, deploy/monitor-deployments-webhook - parse-json-secrets: true - - name: Determine failed jobs id: failed-jobs shell: bash @@ -1787,7 +1763,7 @@ jobs: - name: Send Slack notification uses: ./.github/actions/slack-notify with: - webhook-url: ${{ env.MONITOR_DEPLOYMENTS_WEBHOOK }} + webhook-url: ${{ secrets.MONITOR_DEPLOYMENTS_WEBHOOK }} failed-jobs: ${{ steps.failed-jobs.outputs.jobs }} title: "🚨 Deployment Workflow Failed" ref-name: ${{ github.ref_name }} diff --git a/.github/workflows/docker-tag-beta.yml b/.github/workflows/docker-tag-beta.yml index 0ab41dd6885..61becc2f7df 100644 --- a/.github/workflows/docker-tag-beta.yml +++ b/.github/workflows/docker-tag-beta.yml @@ -24,7 +24,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} diff --git a/.github/workflows/docker-tag-latest.yml b/.github/workflows/docker-tag-latest.yml index 96c7ab51ef0..d774ac48880 100644 --- a/.github/workflows/docker-tag-latest.yml +++ b/.github/workflows/docker-tag-latest.yml @@ -24,7 +24,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} diff --git a/.github/workflows/nightly-scan-licenses.yml b/.github/workflows/nightly-scan-licenses.yml index 6a2a2005433..8bc933984d7 100644 --- a/.github/workflows/nightly-scan-licenses.yml +++ b/.github/workflows/nightly-scan-licenses.yml @@ -33,7 +33,7 @@ jobs: persist-credentials: false - name: Set up Python - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # ratchet:actions/setup-python@v6 + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # ratchet:actions/setup-python@v6 with: python-version: '3.11' cache: 'pip' @@ -97,7 +97,7 @@ jobs: uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} diff --git a/.github/workflows/pr-database-tests.yml b/.github/workflows/pr-database-tests.yml index f9bb4bc34f7..6c77f2a4030 100644 --- a/.github/workflows/pr-database-tests.yml +++ b/.github/workflows/pr-database-tests.yml @@ -40,13 +40,16 @@ jobs: - name: Generate OpenAPI schema and Python client shell: bash + # TODO(Nik): https://linear.app/onyx-app/issue/ENG-1/update-test-infra-to-use-test-license + env: + LICENSE_ENFORCEMENT_ENABLED: "false" run: | ods openapi all # needed for pulling external images otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} diff --git a/.github/workflows/pr-desktop-build.yml b/.github/workflows/pr-desktop-build.yml new file mode 100644 index 00000000000..9bf0b3f3b96 --- /dev/null +++ b/.github/workflows/pr-desktop-build.yml @@ -0,0 +1,114 @@ +name: Build Desktop App +concurrency: + group: Build-Desktop-App-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }} + cancel-in-progress: true + +on: + merge_group: + pull_request: + branches: + - main + - "release/**" + paths: + - "desktop/**" + - ".github/workflows/pr-desktop-build.yml" + push: + tags: + - "v*.*.*" + +permissions: + contents: read + +jobs: + build-desktop: + name: Build Desktop (${{ matrix.platform }}) + runs-on: ${{ matrix.os }} + timeout-minutes: 60 + strategy: + fail-fast: false + matrix: + include: + - platform: linux + os: ubuntu-latest + target: x86_64-unknown-linux-gnu + args: "--bundles deb,rpm" + # TODO: Fix and enable the macOS build. + #- platform: macos + # os: macos-latest + # target: universal-apple-darwin + # args: "--target universal-apple-darwin" + # TODO: Fix and enable the Windows build. + #- platform: windows + # os: windows-latest + # target: x86_64-pc-windows-msvc + # args: "" + + steps: + - name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd + with: + persist-credentials: false + + - name: Setup node + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 + with: + node-version: 24 + cache: "npm" # zizmor: ignore[cache-poisoning] + cache-dependency-path: ./desktop/package-lock.json + + - name: Setup Rust + uses: dtolnay/rust-toolchain@4be9e76fd7c4901c61fb841f559994984270fce7 + with: + toolchain: stable + targets: ${{ matrix.target }} + + - name: Cache Cargo registry and build + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # zizmor: ignore[cache-poisoning] + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + desktop/src-tauri/target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('desktop/src-tauri/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Install Linux dependencies + if: matrix.platform == 'linux' + run: | + sudo apt-get update + sudo apt-get install -y \ + build-essential \ + libglib2.0-dev \ + libgirepository1.0-dev \ + libgtk-3-dev \ + libjavascriptcoregtk-4.1-dev \ + libwebkit2gtk-4.1-dev \ + libayatana-appindicator3-dev \ + gobject-introspection \ + pkg-config \ + curl \ + xdg-utils + + - name: Install npm dependencies + working-directory: ./desktop + run: npm ci + + - name: Build desktop app + working-directory: ./desktop + run: npx tauri build ${{ matrix.args }} + env: + TAURI_SIGNING_PRIVATE_KEY: "" + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: "" + + - name: Upload build artifacts + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f + with: + name: desktop-build-${{ matrix.platform }}-${{ github.run_id }} + path: | + desktop/src-tauri/target/release/bundle/ + retention-days: 7 + if-no-files-found: ignore diff --git a/.github/workflows/pr-external-dependency-unit-tests.yml b/.github/workflows/pr-external-dependency-unit-tests.yml index b06f8ec3b3d..90e1c9a8e66 100644 --- a/.github/workflows/pr-external-dependency-unit-tests.yml +++ b/.github/workflows/pr-external-dependency-unit-tests.yml @@ -110,7 +110,7 @@ jobs: # otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -118,6 +118,7 @@ jobs: - name: Create .env file for Docker Compose run: | cat < deployment/docker_compose/.env + COMPOSE_PROFILES=s3-filestore CODE_INTERPRETER_BETA_ENABLED=true DISABLE_TELEMETRY=true EOF diff --git a/.github/workflows/pr-integration-tests.yml b/.github/workflows/pr-integration-tests.yml index bc647c820f6..54e5dad2429 100644 --- a/.github/workflows/pr-integration-tests.yml +++ b/.github/workflows/pr-integration-tests.yml @@ -46,6 +46,7 @@ jobs: timeout-minutes: 45 outputs: test-dirs: ${{ steps.set-matrix.outputs.test-dirs }} + editions: ${{ steps.set-editions.outputs.editions }} steps: - name: Checkout code uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 @@ -56,7 +57,7 @@ jobs: id: set-matrix run: | # Find all leaf-level directories in both test directories - tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" ! -name "mcp" -exec basename {} \; | sort) + tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" ! -name "mcp" ! -name "no_vectordb" -exec basename {} \; | sort) connector_dirs=$(find backend/tests/integration/connector_job_tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort) # Create JSON array with directory info @@ -72,6 +73,16 @@ jobs: all_dirs="[${all_dirs%,}]" echo "test-dirs=$all_dirs" >> $GITHUB_OUTPUT + - name: Determine editions to test + id: set-editions + run: | + # On PRs, only run EE tests. On merge_group and tags, run both EE and MIT. + if [ "${{ github.event_name }}" = "pull_request" ]; then + echo 'editions=["ee"]' >> $GITHUB_OUTPUT + else + echo 'editions=["ee","mit"]' >> $GITHUB_OUTPUT + fi + build-backend-image: runs-on: [ @@ -109,7 +120,7 @@ jobs: # otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -169,7 +180,7 @@ jobs: # otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -214,7 +225,7 @@ jobs: # otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -267,7 +278,7 @@ jobs: runs-on: - runs-on - runner=4cpu-linux-arm64 - - ${{ format('run-id={0}-integration-tests-job-{1}', github.run_id, strategy['job-index']) }} + - ${{ format('run-id={0}-integration-tests-{1}-job-{2}', github.run_id, matrix.edition, strategy['job-index']) }} - extras=ecr-cache timeout-minutes: 45 @@ -275,6 +286,7 @@ jobs: fail-fast: false matrix: test-dir: ${{ fromJson(needs.discover-test-dirs.outputs.test-dirs) }} + edition: ${{ fromJson(needs.discover-test-dirs.outputs.editions) }} steps: - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 @@ -287,7 +299,7 @@ jobs: # otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -298,9 +310,11 @@ jobs: env: ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }} RUN_ID: ${{ github.run_id }} + EDITION: ${{ matrix.edition }} run: | + # Base config shared by both editions cat < deployment/docker_compose/.env - ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true + COMPOSE_PROFILES=s3-filestore AUTH_TYPE=basic POSTGRES_POOL_PRE_PING=true POSTGRES_USE_NULL_POOL=true @@ -309,11 +323,20 @@ jobs: ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID} ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID} INTEGRATION_TESTS_MODE=true - CHECK_TTL_MANAGEMENT_TASK_FREQUENCY_IN_HOURS=0.001 - AUTO_LLM_UPDATE_INTERVAL_SECONDS=10 MCP_SERVER_ENABLED=true + AUTO_LLM_UPDATE_INTERVAL_SECONDS=10 + EOF + + # EE-only config + if [ "$EDITION" = "ee" ]; then + cat <> deployment/docker_compose/.env + ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true + # TODO(Nik): https://linear.app/onyx-app/issue/ENG-1/update-test-infra-to-use-test-license + LICENSE_ENFORCEMENT_ENABLED=false + CHECK_TTL_MANAGEMENT_TASK_FREQUENCY_IN_HOURS=0.001 USE_LIGHTWEIGHT_BACKGROUND_WORKER=false EOF + fi - name: Start Docker containers run: | @@ -376,14 +399,14 @@ jobs: docker compose -f docker-compose.mock-it-services.yml \ -p mock-it-services-stack up -d - - name: Run Integration Tests for ${{ matrix.test-dir.name }} + - name: Run Integration Tests (${{ matrix.edition }}) for ${{ matrix.test-dir.name }} uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3 with: timeout_minutes: 20 max_attempts: 3 retry_wait_seconds: 10 command: | - echo "Running integration tests for ${{ matrix.test-dir.path }}..." + echo "Running ${{ matrix.edition }} integration tests for ${{ matrix.test-dir.path }}..." docker run --rm --network onyx_default \ --name test-runner \ -e POSTGRES_HOST=relational_db \ @@ -441,10 +464,143 @@ jobs: if: always() uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f with: - name: docker-all-logs-${{ matrix.test-dir.name }} + name: docker-all-logs-${{ matrix.edition }}-${{ matrix.test-dir.name }} path: ${{ github.workspace }}/docker-compose.log # ------------------------------------------------------------ + no-vectordb-tests: + needs: [build-backend-image, build-integration-image] + runs-on: + [ + runs-on, + runner=4cpu-linux-arm64, + "run-id=${{ github.run_id }}-no-vectordb-tests", + "extras=ecr-cache", + ] + timeout-minutes: 45 + + steps: + - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 + - name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 + with: + persist-credentials: false + + - name: Login to Docker Hub + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} + + - name: Create .env file for no-vectordb Docker Compose + env: + ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }} + RUN_ID: ${{ github.run_id }} + run: | + cat < deployment/docker_compose/.env + COMPOSE_PROFILES=s3-filestore + ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true + LICENSE_ENFORCEMENT_ENABLED=false + AUTH_TYPE=basic + POSTGRES_POOL_PRE_PING=true + POSTGRES_USE_NULL_POOL=true + REQUIRE_EMAIL_VERIFICATION=false + DISABLE_TELEMETRY=true + DISABLE_VECTOR_DB=true + ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID} + INTEGRATION_TESTS_MODE=true + USE_LIGHTWEIGHT_BACKGROUND_WORKER=true + EOF + + # Start only the services needed for no-vectordb mode (no Vespa, no model servers) + - name: Start Docker containers (no-vectordb) + run: | + cd deployment/docker_compose + docker compose -f docker-compose.yml -f docker-compose.no-vectordb.yml -f docker-compose.dev.yml up \ + relational_db \ + cache \ + minio \ + api_server \ + background \ + -d + id: start_docker_no_vectordb + + - name: Wait for services to be ready + run: | + echo "Starting wait-for-service script (no-vectordb)..." + start_time=$(date +%s) + timeout=300 + while true; do + current_time=$(date +%s) + elapsed_time=$((current_time - start_time)) + if [ $elapsed_time -ge $timeout ]; then + echo "Timeout reached. Service did not become ready in $timeout seconds." + exit 1 + fi + response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error") + if [ "$response" = "200" ]; then + echo "API server is ready!" + break + elif [ "$response" = "curl_error" ]; then + echo "Curl encountered an error; retrying..." + else + echo "Service not ready yet (HTTP $response). Retrying in 5 seconds..." + fi + sleep 5 + done + + - name: Run No-VectorDB Integration Tests + uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3 + with: + timeout_minutes: 20 + max_attempts: 3 + retry_wait_seconds: 10 + command: | + echo "Running no-vectordb integration tests..." + docker run --rm --network onyx_default \ + --name test-runner \ + -e POSTGRES_HOST=relational_db \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASSWORD=password \ + -e POSTGRES_DB=postgres \ + -e DB_READONLY_USER=db_readonly_user \ + -e DB_READONLY_PASSWORD=password \ + -e POSTGRES_POOL_PRE_PING=true \ + -e POSTGRES_USE_NULL_POOL=true \ + -e REDIS_HOST=cache \ + -e API_SERVER_HOST=api_server \ + -e OPENAI_API_KEY=${OPENAI_API_KEY} \ + -e TEST_WEB_HOSTNAME=test-runner \ + ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-${{ github.run_id }} \ + /app/tests/integration/tests/no_vectordb + + - name: Dump API server logs (no-vectordb) + if: always() + run: | + cd deployment/docker_compose + docker compose -f docker-compose.yml -f docker-compose.no-vectordb.yml -f docker-compose.dev.yml \ + logs --no-color api_server > $GITHUB_WORKSPACE/api_server_no_vectordb.log || true + + - name: Dump all-container logs (no-vectordb) + if: always() + run: | + cd deployment/docker_compose + docker compose -f docker-compose.yml -f docker-compose.no-vectordb.yml -f docker-compose.dev.yml \ + logs --no-color > $GITHUB_WORKSPACE/docker-compose-no-vectordb.log || true + + - name: Upload logs (no-vectordb) + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f + with: + name: docker-all-logs-no-vectordb + path: ${{ github.workspace }}/docker-compose-no-vectordb.log + + - name: Stop Docker containers (no-vectordb) + if: always() + run: | + cd deployment/docker_compose + docker compose -f docker-compose.yml -f docker-compose.no-vectordb.yml -f docker-compose.dev.yml down -v + multitenant-tests: needs: [build-backend-image, build-model-server-image, build-integration-image] @@ -465,7 +621,7 @@ jobs: persist-credentials: false - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -477,6 +633,7 @@ jobs: run: | cd deployment/docker_compose ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \ + LICENSE_ENFORCEMENT_ENABLED=false \ MULTI_TENANT=true \ AUTH_TYPE=cloud \ REQUIRE_EMAIL_VERIFICATION=false \ @@ -583,7 +740,7 @@ jobs: # NOTE: Github-hosted runners have about 20s faster queue times and are preferred here. runs-on: ubuntu-slim timeout-minutes: 45 - needs: [integration-tests, multitenant-tests] + needs: [integration-tests, no-vectordb-tests, multitenant-tests] if: ${{ always() }} steps: - name: Check job status diff --git a/.github/workflows/pr-jest-tests.yml b/.github/workflows/pr-jest-tests.yml index 09efee23aa9..e7fa59d117b 100644 --- a/.github/workflows/pr-jest-tests.yml +++ b/.github/workflows/pr-jest-tests.yml @@ -28,7 +28,7 @@ jobs: persist-credentials: false - name: Setup node - uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4 + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # ratchet:actions/setup-node@v4 with: node-version: 22 cache: "npm" diff --git a/.github/workflows/pr-mit-integration-tests.yml b/.github/workflows/pr-mit-integration-tests.yml deleted file mode 100644 index 1cac784451d..00000000000 --- a/.github/workflows/pr-mit-integration-tests.yml +++ /dev/null @@ -1,442 +0,0 @@ -name: Run MIT Integration Tests v2 -concurrency: - group: Run-MIT-Integration-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }} - cancel-in-progress: true - -on: - merge_group: - types: [checks_requested] - push: - tags: - - "v*.*.*" - -permissions: - contents: read - -env: - # Test Environment Variables - OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} - EXA_API_KEY: ${{ secrets.EXA_API_KEY }} - CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }} - CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }} - CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }} - CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }} - JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} - JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} - JIRA_API_TOKEN_SCOPED: ${{ secrets.JIRA_API_TOKEN_SCOPED }} - PERM_SYNC_SHAREPOINT_CLIENT_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_CLIENT_ID }} - PERM_SYNC_SHAREPOINT_PRIVATE_KEY: ${{ secrets.PERM_SYNC_SHAREPOINT_PRIVATE_KEY }} - PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD: ${{ secrets.PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD }} - PERM_SYNC_SHAREPOINT_DIRECTORY_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_DIRECTORY_ID }} - -jobs: - discover-test-dirs: - # NOTE: Github-hosted runners have about 20s faster queue times and are preferred here. - runs-on: ubuntu-slim - timeout-minutes: 45 - outputs: - test-dirs: ${{ steps.set-matrix.outputs.test-dirs }} - steps: - - name: Checkout code - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 - with: - persist-credentials: false - - - name: Discover test directories - id: set-matrix - run: | - # Find all leaf-level directories in both test directories - tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" ! -name "mcp" -exec basename {} \; | sort) - connector_dirs=$(find backend/tests/integration/connector_job_tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort) - - # Create JSON array with directory info - all_dirs="" - for dir in $tests_dirs; do - all_dirs="$all_dirs{\"path\":\"tests/$dir\",\"name\":\"tests-$dir\"}," - done - for dir in $connector_dirs; do - all_dirs="$all_dirs{\"path\":\"connector_job_tests/$dir\",\"name\":\"connector-$dir\"}," - done - - # Remove trailing comma and wrap in array - all_dirs="[${all_dirs%,}]" - echo "test-dirs=$all_dirs" >> $GITHUB_OUTPUT - - build-backend-image: - runs-on: - [ - runs-on, - runner=1cpu-linux-arm64, - "run-id=${{ github.run_id }}-build-backend-image", - "extras=ecr-cache", - ] - timeout-minutes: 45 - steps: - - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 - - name: Checkout code - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 - with: - persist-credentials: false - - - name: Format branch name for cache - id: format-branch - env: - PR_NUMBER: ${{ github.event.pull_request.number }} - REF_NAME: ${{ github.ref_name }} - run: | - if [ -n "${PR_NUMBER}" ]; then - CACHE_SUFFIX="${PR_NUMBER}" - else - # shellcheck disable=SC2001 - CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g') - fi - echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - - # needed for pulling Vespa, Redis, Postgres, and Minio images - # otherwise, we hit the "Unauthenticated users" limit - # https://docs.docker.com/docker-hub/usage/ - - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_TOKEN }} - - - name: Build and push Backend Docker image - uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6 - with: - context: ./backend - file: ./backend/Dockerfile - push: true - tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-backend-test-${{ github.run_id }} - cache-from: | - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }} - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }} - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache - type=registry,ref=onyxdotapp/onyx-backend:latest - cache-to: | - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache,mode=max - no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }} - - build-model-server-image: - runs-on: - [ - runs-on, - runner=1cpu-linux-arm64, - "run-id=${{ github.run_id }}-build-model-server-image", - "extras=ecr-cache", - ] - timeout-minutes: 45 - steps: - - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 - - name: Checkout code - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 - with: - persist-credentials: false - - - name: Format branch name for cache - id: format-branch - env: - PR_NUMBER: ${{ github.event.pull_request.number }} - REF_NAME: ${{ github.ref_name }} - run: | - if [ -n "${PR_NUMBER}" ]; then - CACHE_SUFFIX="${PR_NUMBER}" - else - # shellcheck disable=SC2001 - CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g') - fi - echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - - # needed for pulling Vespa, Redis, Postgres, and Minio images - # otherwise, we hit the "Unauthenticated users" limit - # https://docs.docker.com/docker-hub/usage/ - - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_TOKEN }} - - - name: Build and push Model Server Docker image - uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6 - with: - context: ./backend - file: ./backend/Dockerfile.model_server - push: true - tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-model-server-test-${{ github.run_id }} - cache-from: | - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }} - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }} - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache - type=registry,ref=onyxdotapp/onyx-model-server:latest - cache-to: | - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max - type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache,mode=max - - build-integration-image: - runs-on: - [ - runs-on, - runner=2cpu-linux-arm64, - "run-id=${{ github.run_id }}-build-integration-image", - "extras=ecr-cache", - ] - timeout-minutes: 45 - steps: - - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 - - name: Checkout code - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 - with: - persist-credentials: false - - - name: Format branch name for cache - id: format-branch - env: - PR_NUMBER: ${{ github.event.pull_request.number }} - REF_NAME: ${{ github.ref_name }} - run: | - if [ -n "${PR_NUMBER}" ]; then - CACHE_SUFFIX="${PR_NUMBER}" - else - # shellcheck disable=SC2001 - CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g') - fi - echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 - - # needed for pulling openapitools/openapi-generator-cli - # otherwise, we hit the "Unauthenticated users" limit - # https://docs.docker.com/docker-hub/usage/ - - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_TOKEN }} - - - name: Build and push integration test image with Docker Bake - env: - INTEGRATION_REPOSITORY: ${{ env.RUNS_ON_ECR_CACHE }} - TAG: integration-test-${{ github.run_id }} - CACHE_SUFFIX: ${{ steps.format-branch.outputs.cache-suffix }} - HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }} - run: | - docker buildx bake --push \ - --set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA} \ - --set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX} \ - --set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache \ - --set backend.cache-from=type=registry,ref=onyxdotapp/onyx-backend:latest \ - --set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA},mode=max \ - --set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX},mode=max \ - --set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache,mode=max \ - --set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA} \ - --set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX} \ - --set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache \ - --set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA},mode=max \ - --set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX},mode=max \ - --set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache,mode=max \ - integration - - integration-tests-mit: - needs: - [ - discover-test-dirs, - build-backend-image, - build-model-server-image, - build-integration-image, - ] - runs-on: - - runs-on - - runner=4cpu-linux-arm64 - - ${{ format('run-id={0}-integration-tests-mit-job-{1}', github.run_id, strategy['job-index']) }} - - extras=ecr-cache - timeout-minutes: 45 - - strategy: - fail-fast: false - matrix: - test-dir: ${{ fromJson(needs.discover-test-dirs.outputs.test-dirs) }} - - steps: - - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 - - name: Checkout code - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 - with: - persist-credentials: false - - # needed for pulling Vespa, Redis, Postgres, and Minio images - # otherwise, we hit the "Unauthenticated users" limit - # https://docs.docker.com/docker-hub/usage/ - - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_TOKEN }} - - # NOTE: Use pre-ping/null pool to reduce flakiness due to dropped connections - # NOTE: don't need web server for integration tests - - name: Create .env file for Docker Compose - env: - ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }} - RUN_ID: ${{ github.run_id }} - run: | - cat < deployment/docker_compose/.env - AUTH_TYPE=basic - POSTGRES_POOL_PRE_PING=true - POSTGRES_USE_NULL_POOL=true - REQUIRE_EMAIL_VERIFICATION=false - DISABLE_TELEMETRY=true - ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID} - ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID} - INTEGRATION_TESTS_MODE=true - MCP_SERVER_ENABLED=true - AUTO_LLM_UPDATE_INTERVAL_SECONDS=10 - EOF - - - name: Start Docker containers - run: | - cd deployment/docker_compose - docker compose -f docker-compose.yml -f docker-compose.dev.yml up \ - relational_db \ - index \ - cache \ - minio \ - api_server \ - inference_model_server \ - indexing_model_server \ - background \ - -d - id: start_docker - - - name: Wait for services to be ready - run: | - echo "Starting wait-for-service script..." - - wait_for_service() { - local url=$1 - local label=$2 - local timeout=${3:-300} # default 5 minutes - local start_time - start_time=$(date +%s) - - while true; do - local current_time - current_time=$(date +%s) - local elapsed_time=$((current_time - start_time)) - - if [ $elapsed_time -ge $timeout ]; then - echo "Timeout reached. ${label} did not become ready in $timeout seconds." - exit 1 - fi - - local response - response=$(curl -s -o /dev/null -w "%{http_code}" "$url" || echo "curl_error") - - if [ "$response" = "200" ]; then - echo "${label} is ready!" - break - elif [ "$response" = "curl_error" ]; then - echo "Curl encountered an error while checking ${label}. Retrying in 5 seconds..." - else - echo "${label} not ready yet (HTTP status $response). Retrying in 5 seconds..." - fi - - sleep 5 - done - } - - wait_for_service "http://localhost:8080/health" "API server" - echo "Finished waiting for services." - - - name: Start Mock Services - run: | - cd backend/tests/integration/mock_services - docker compose -f docker-compose.mock-it-services.yml \ - -p mock-it-services-stack up -d - - # NOTE: Use pre-ping/null to reduce flakiness due to dropped connections - - name: Run Integration Tests for ${{ matrix.test-dir.name }} - uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3 - with: - timeout_minutes: 20 - max_attempts: 3 - retry_wait_seconds: 10 - command: | - echo "Running integration tests for ${{ matrix.test-dir.path }}..." - docker run --rm --network onyx_default \ - --name test-runner \ - -e POSTGRES_HOST=relational_db \ - -e POSTGRES_USER=postgres \ - -e POSTGRES_PASSWORD=password \ - -e POSTGRES_DB=postgres \ - -e DB_READONLY_USER=db_readonly_user \ - -e DB_READONLY_PASSWORD=password \ - -e POSTGRES_POOL_PRE_PING=true \ - -e POSTGRES_USE_NULL_POOL=true \ - -e VESPA_HOST=index \ - -e REDIS_HOST=cache \ - -e API_SERVER_HOST=api_server \ - -e OPENAI_API_KEY=${OPENAI_API_KEY} \ - -e EXA_API_KEY=${EXA_API_KEY} \ - -e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \ - -e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \ - -e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \ - -e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \ - -e CONFLUENCE_ACCESS_TOKEN_SCOPED=${CONFLUENCE_ACCESS_TOKEN_SCOPED} \ - -e JIRA_BASE_URL=${JIRA_BASE_URL} \ - -e JIRA_USER_EMAIL=${JIRA_USER_EMAIL} \ - -e JIRA_API_TOKEN=${JIRA_API_TOKEN} \ - -e JIRA_API_TOKEN_SCOPED=${JIRA_API_TOKEN_SCOPED} \ - -e PERM_SYNC_SHAREPOINT_CLIENT_ID=${PERM_SYNC_SHAREPOINT_CLIENT_ID} \ - -e PERM_SYNC_SHAREPOINT_PRIVATE_KEY="${PERM_SYNC_SHAREPOINT_PRIVATE_KEY}" \ - -e PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD=${PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD} \ - -e PERM_SYNC_SHAREPOINT_DIRECTORY_ID=${PERM_SYNC_SHAREPOINT_DIRECTORY_ID} \ - -e TEST_WEB_HOSTNAME=test-runner \ - -e MOCK_CONNECTOR_SERVER_HOST=mock_connector_server \ - -e MOCK_CONNECTOR_SERVER_PORT=8001 \ - ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-${{ github.run_id }} \ - /app/tests/integration/${{ matrix.test-dir.path }} - - # ------------------------------------------------------------ - # Always gather logs BEFORE "down": - - name: Dump API server logs - if: always() - run: | - cd deployment/docker_compose - docker compose logs --no-color api_server > $GITHUB_WORKSPACE/api_server.log || true - - - name: Dump all-container logs (optional) - if: always() - run: | - cd deployment/docker_compose - docker compose logs --no-color > $GITHUB_WORKSPACE/docker-compose.log || true - - - name: Upload logs - if: always() - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f - with: - name: docker-all-logs-${{ matrix.test-dir.name }} - path: ${{ github.workspace }}/docker-compose.log - # ------------------------------------------------------------ - - required: - # NOTE: Github-hosted runners have about 20s faster queue times and are preferred here. - runs-on: ubuntu-slim - timeout-minutes: 45 - needs: [integration-tests-mit] - if: ${{ always() }} - steps: - - name: Check job status - if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} - run: exit 1 diff --git a/.github/workflows/pr-playwright-tests.yml b/.github/workflows/pr-playwright-tests.yml index 7b27341647a..9ad6e8cf10b 100644 --- a/.github/workflows/pr-playwright-tests.yml +++ b/.github/workflows/pr-playwright-tests.yml @@ -52,6 +52,9 @@ env: MCP_SERVER_PUBLIC_HOST: host.docker.internal MCP_SERVER_PUBLIC_URL: http://host.docker.internal:8004/mcp + # Visual regression S3 bucket (shared across all jobs) + PLAYWRIGHT_S3_BUCKET: onyx-playwright-artifacts + jobs: build-web-image: runs-on: @@ -90,7 +93,7 @@ jobs: # needed for pulling external images otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -151,7 +154,7 @@ jobs: # needed for pulling external images otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -212,7 +215,7 @@ jobs: # needed for pulling external images otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -239,6 +242,9 @@ jobs: playwright-tests: needs: [build-web-image, build-backend-image, build-model-server-image] name: Playwright Tests (${{ matrix.project }}) + permissions: + id-token: write # Required for OIDC-based AWS credential exchange (S3 access) + contents: read runs-on: - runs-on - runner=8cpu-linux-arm64 @@ -249,7 +255,7 @@ jobs: strategy: fail-fast: false matrix: - project: [admin, no-auth, exclusive] + project: [admin, exclusive] steps: - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 @@ -259,7 +265,7 @@ jobs: persist-credentials: false - name: Setup node - uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4 + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # ratchet:actions/setup-node@v4 with: node-version: 22 cache: "npm" @@ -289,7 +295,10 @@ jobs: RUN_ID: ${{ github.run_id }} run: | cat < deployment/docker_compose/.env + COMPOSE_PROFILES=s3-filestore ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true + # TODO(Nik): https://linear.app/onyx-app/issue/ENG-1/update-test-infra-to-use-test-license + LICENSE_ENFORCEMENT_ENABLED=false AUTH_TYPE=basic GEN_AI_API_KEY=${OPENAI_API_KEY_VALUE} EXA_API_KEY=${EXA_API_KEY_VALUE} @@ -299,15 +308,12 @@ jobs: ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:playwright-test-model-server-${RUN_ID} ONYX_WEB_SERVER_IMAGE=${ECR_CACHE}:playwright-test-web-${RUN_ID} EOF - if [ "${{ matrix.project }}" = "no-auth" ]; then - echo "PLAYWRIGHT_FORCE_EMPTY_LLM_PROVIDERS=true" >> deployment/docker_compose/.env - fi # needed for pulling Vespa, Redis, Postgres, and Minio images # otherwise, we hit the "Unauthenticated users" limit # https://docs.docker.com/docker-hub/usage/ - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3 + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} @@ -428,11 +434,6 @@ jobs: env: PROJECT: ${{ matrix.project }} run: | - # Create test-results directory to ensure it exists for artifact upload - mkdir -p test-results - if [ "${PROJECT}" = "no-auth" ]; then - export PLAYWRIGHT_FORCE_EMPTY_LLM_PROVIDERS=true - fi npx playwright test --project ${PROJECT} - uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f @@ -440,9 +441,134 @@ jobs: with: # Includes test results and trace.zip files name: playwright-test-results-${{ matrix.project }}-${{ github.run_id }} - path: ./web/test-results/ + path: ./web/output/playwright/ + retention-days: 30 + + - name: Upload screenshots + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f + if: always() + with: + name: playwright-screenshots-${{ matrix.project }}-${{ github.run_id }} + path: ./web/output/screenshots/ + retention-days: 30 + + # --- Visual Regression Diff --- + - name: Configure AWS credentials + if: always() + uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 + with: + role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }} + aws-region: us-east-2 + + - name: Install the latest version of uv + if: always() + uses: astral-sh/setup-uv@61cb8a9741eeb8a550a1b8544337180c0fc8476b # ratchet:astral-sh/setup-uv@v7 + with: + enable-cache: false + version: "0.9.9" + + - name: Determine baseline revision + if: always() + id: baseline-rev + env: + EVENT_NAME: ${{ github.event_name }} + BASE_REF: ${{ github.event.pull_request.base.ref }} + MERGE_GROUP_BASE_REF: ${{ github.event.merge_group.base_ref }} + GH_REF: ${{ github.ref }} + REF_NAME: ${{ github.ref_name }} + run: | + if [ "${EVENT_NAME}" = "pull_request" ]; then + # PRs compare against the base branch (e.g. main, release/2.5) + echo "rev=${BASE_REF}" >> "$GITHUB_OUTPUT" + elif [ "${EVENT_NAME}" = "merge_group" ]; then + # Merge queue compares against the target branch (e.g. refs/heads/main -> main) + echo "rev=${MERGE_GROUP_BASE_REF#refs/heads/}" >> "$GITHUB_OUTPUT" + elif [[ "${GH_REF}" == refs/tags/* ]]; then + # Tag builds compare against the tag name + echo "rev=${REF_NAME}" >> "$GITHUB_OUTPUT" + else + # Push builds (main, release/*) compare against the branch name + echo "rev=${REF_NAME}" >> "$GITHUB_OUTPUT" + fi + + - name: Generate screenshot diff report + if: always() + env: + PROJECT: ${{ matrix.project }} + PLAYWRIGHT_S3_BUCKET: ${{ env.PLAYWRIGHT_S3_BUCKET }} + BASELINE_REV: ${{ steps.baseline-rev.outputs.rev }} + run: | + uv run --no-sync --with onyx-devtools ods screenshot-diff compare \ + --project "${PROJECT}" \ + --rev "${BASELINE_REV}" + + - name: Upload visual diff report to S3 + if: always() + env: + PROJECT: ${{ matrix.project }} + PR_NUMBER: ${{ github.event.pull_request.number }} + RUN_ID: ${{ github.run_id }} + run: | + SUMMARY_FILE="web/output/screenshot-diff/${PROJECT}/summary.json" + if [ ! -f "${SUMMARY_FILE}" ]; then + echo "No summary file found — skipping S3 upload." + exit 0 + fi + + HAS_DIFF=$(jq -r '.has_differences' "${SUMMARY_FILE}") + if [ "${HAS_DIFF}" != "true" ]; then + echo "No visual differences for ${PROJECT} — skipping S3 upload." + exit 0 + fi + + aws s3 sync "web/output/screenshot-diff/${PROJECT}/" \ + "s3://${PLAYWRIGHT_S3_BUCKET}/reports/pr-${PR_NUMBER}/${RUN_ID}/${PROJECT}/" + + - name: Upload visual diff summary + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f + if: always() + with: + name: screenshot-diff-summary-${{ matrix.project }} + path: ./web/output/screenshot-diff/${{ matrix.project }}/summary.json + if-no-files-found: ignore + retention-days: 5 + + - name: Upload visual diff report artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f + if: always() + with: + name: screenshot-diff-report-${{ matrix.project }}-${{ github.run_id }} + path: ./web/output/screenshot-diff/${{ matrix.project }}/ + if-no-files-found: ignore retention-days: 30 + - name: Update S3 baselines + if: >- + success() && ( + github.ref == 'refs/heads/main' || + startsWith(github.ref, 'refs/heads/release/') || + startsWith(github.ref, 'refs/tags/v') || + ( + github.event_name == 'merge_group' && ( + github.event.merge_group.base_ref == 'refs/heads/main' || + startsWith(github.event.merge_group.base_ref, 'refs/heads/release/') + ) + ) + ) + env: + PROJECT: ${{ matrix.project }} + PLAYWRIGHT_S3_BUCKET: ${{ env.PLAYWRIGHT_S3_BUCKET }} + BASELINE_REV: ${{ steps.baseline-rev.outputs.rev }} + run: | + if [ -d "web/output/screenshots/" ] && [ "$(ls -A web/output/screenshots/)" ]; then + uv run --no-sync --with onyx-devtools ods screenshot-diff upload-baselines \ + --project "${PROJECT}" \ + --rev "${BASELINE_REV}" \ + --delete + else + echo "No screenshots to upload for ${PROJECT} — skipping baseline update." + fi + # save before stopping the containers so the logs can be captured - name: Save Docker logs if: success() || failure() @@ -460,6 +586,95 @@ jobs: name: docker-logs-${{ matrix.project }}-${{ github.run_id }} path: ${{ github.workspace }}/docker-compose.log + # Post a single combined visual regression comment after all matrix jobs finish + visual-regression-comment: + needs: [playwright-tests] + if: always() && github.event_name == 'pull_request' + runs-on: ubuntu-slim + timeout-minutes: 5 + permissions: + pull-requests: write + steps: + - name: Download visual diff summaries + uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # ratchet:actions/download-artifact@v4 + with: + pattern: screenshot-diff-summary-* + path: summaries/ + + - name: Post combined PR comment + env: + GH_TOKEN: ${{ github.token }} + PR_NUMBER: ${{ github.event.pull_request.number }} + RUN_ID: ${{ github.run_id }} + REPO: ${{ github.repository }} + S3_BUCKET: ${{ env.PLAYWRIGHT_S3_BUCKET }} + run: | + MARKER="" + + # Build the markdown table from all summary files + TABLE_HEADER="| Project | Changed | Added | Removed | Unchanged | Report |" + TABLE_DIVIDER="|---------|---------|-------|---------|-----------|--------|" + TABLE_ROWS="" + HAS_ANY_SUMMARY=false + + for SUMMARY_DIR in summaries/screenshot-diff-summary-*/; do + SUMMARY_FILE="${SUMMARY_DIR}summary.json" + if [ ! -f "${SUMMARY_FILE}" ]; then + continue + fi + + HAS_ANY_SUMMARY=true + PROJECT=$(jq -r '.project' "${SUMMARY_FILE}") + CHANGED=$(jq -r '.changed' "${SUMMARY_FILE}") + ADDED=$(jq -r '.added' "${SUMMARY_FILE}") + REMOVED=$(jq -r '.removed' "${SUMMARY_FILE}") + UNCHANGED=$(jq -r '.unchanged' "${SUMMARY_FILE}") + TOTAL=$(jq -r '.total' "${SUMMARY_FILE}") + HAS_DIFF=$(jq -r '.has_differences' "${SUMMARY_FILE}") + + if [ "${TOTAL}" = "0" ]; then + REPORT_LINK="_No screenshots_" + elif [ "${HAS_DIFF}" = "true" ]; then + REPORT_URL="https://${S3_BUCKET}.s3.us-east-2.amazonaws.com/reports/pr-${PR_NUMBER}/${RUN_ID}/${PROJECT}/index.html" + REPORT_LINK="[View Report](${REPORT_URL})" + else + REPORT_LINK="✅ No changes" + fi + + TABLE_ROWS="${TABLE_ROWS}| \`${PROJECT}\` | ${CHANGED} | ${ADDED} | ${REMOVED} | ${UNCHANGED} | ${REPORT_LINK} |\n" + done + + if [ "${HAS_ANY_SUMMARY}" = "false" ]; then + echo "No visual diff summaries found — skipping PR comment." + exit 0 + fi + + BODY=$(printf '%s\n' \ + "${MARKER}" \ + "### 🖼️ Visual Regression Report" \ + "" \ + "${TABLE_HEADER}" \ + "${TABLE_DIVIDER}" \ + "$(printf '%b' "${TABLE_ROWS}")") + + # Upsert: find existing comment with the marker, or create a new one + EXISTING_COMMENT_ID=$(gh api \ + "repos/${REPO}/issues/${PR_NUMBER}/comments" \ + --jq ".[] | select(.body | startswith(\"${MARKER}\")) | .id" \ + 2>/dev/null | head -1) + + if [ -n "${EXISTING_COMMENT_ID}" ]; then + gh api \ + --method PATCH \ + "repos/${REPO}/issues/comments/${EXISTING_COMMENT_ID}" \ + -f body="${BODY}" + else + gh api \ + --method POST \ + "repos/${REPO}/issues/${PR_NUMBER}/comments" \ + -f body="${BODY}" + fi + playwright-required: # NOTE: Github-hosted runners have about 20s faster queue times and are preferred here. runs-on: ubuntu-slim @@ -470,48 +685,3 @@ jobs: - name: Check job status if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} run: exit 1 - -# NOTE: Chromatic UI diff testing is currently disabled. -# We are using Playwright for local and CI testing without visual regression checks. -# Chromatic may be reintroduced in the future for UI diff testing if needed. - -# chromatic-tests: -# name: Chromatic Tests - -# needs: playwright-tests -# runs-on: -# [ -# runs-on, -# runner=32cpu-linux-x64, -# disk=large, -# "run-id=${{ github.run_id }}", -# ] -# steps: -# - name: Checkout code -# uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 -# with: -# fetch-depth: 0 - -# - name: Setup node -# uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4 -# with: -# node-version: 22 - -# - name: Install node dependencies -# working-directory: ./web -# run: npm ci - -# - name: Download Playwright test results -# uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # ratchet:actions/download-artifact@v4 -# with: -# name: test-results -# path: ./web/test-results - -# - name: Run Chromatic -# uses: chromaui/action@latest -# with: -# playwright: true -# projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }} -# workingDir: ./web -# env: -# CHROMATIC_ARCHIVE_LOCATION: ./test-results diff --git a/.github/workflows/pr-python-checks.yml b/.github/workflows/pr-python-checks.yml index 9a9fad349b9..a9f95d985af 100644 --- a/.github/workflows/pr-python-checks.yml +++ b/.github/workflows/pr-python-checks.yml @@ -42,6 +42,9 @@ jobs: - name: Generate OpenAPI schema and Python client shell: bash + # TODO(Nik): https://linear.app/onyx-app/issue/ENG-1/update-test-infra-to-use-test-license + env: + LICENSE_ENFORCEMENT_ENABLED: "false" run: | ods openapi all diff --git a/.github/workflows/pr-python-connector-tests.yml b/.github/workflows/pr-python-connector-tests.yml index 1e8340de05a..7e8eed47fc1 100644 --- a/.github/workflows/pr-python-connector-tests.yml +++ b/.github/workflows/pr-python-connector-tests.yml @@ -65,7 +65,7 @@ env: ZENDESK_TOKEN: ${{ secrets.ZENDESK_TOKEN }} # Salesforce - SF_USERNAME: ${{ secrets.SF_USERNAME }} + SF_USERNAME: ${{ vars.SF_USERNAME }} SF_PASSWORD: ${{ secrets.SF_PASSWORD }} SF_SECURITY_TOKEN: ${{ secrets.SF_SECURITY_TOKEN }} @@ -110,6 +110,9 @@ env: # Slack SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + # Discord + DISCORD_CONNECTOR_BOT_TOKEN: ${{ secrets.DISCORD_CONNECTOR_BOT_TOKEN }} + # Teams TEAMS_APPLICATION_ID: ${{ secrets.TEAMS_APPLICATION_ID }} TEAMS_DIRECTORY_ID: ${{ secrets.TEAMS_DIRECTORY_ID }} diff --git a/.github/workflows/pr-python-model-tests.yml b/.github/workflows/pr-python-model-tests.yml index bf2b933dab2..fb6cd790ac3 100644 --- a/.github/workflows/pr-python-model-tests.yml +++ b/.github/workflows/pr-python-model-tests.yml @@ -64,7 +64,7 @@ jobs: echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT - name: Login to Docker Hub - uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_TOKEN }} diff --git a/.github/workflows/pr-python-tests.yml b/.github/workflows/pr-python-tests.yml index b7502a020a8..54785719f14 100644 --- a/.github/workflows/pr-python-tests.yml +++ b/.github/workflows/pr-python-tests.yml @@ -27,6 +27,8 @@ jobs: PYTHONPATH: ./backend REDIS_CLOUD_PYTEST_PASSWORD: ${{ secrets.REDIS_CLOUD_PYTEST_PASSWORD }} DISABLE_TELEMETRY: "true" + # TODO(Nik): https://linear.app/onyx-app/issue/ENG-1/update-test-infra-to-use-test-license + LICENSE_ENFORCEMENT_ENABLED: "false" steps: - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 diff --git a/.github/workflows/pr-quality-checks.yml b/.github/workflows/pr-quality-checks.yml index 17599ce2bd1..ac9a9bd36f5 100644 --- a/.github/workflows/pr-quality-checks.yml +++ b/.github/workflows/pr-quality-checks.yml @@ -24,13 +24,13 @@ jobs: with: fetch-depth: 0 persist-credentials: false - - uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # ratchet:actions/setup-python@v6 + - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # ratchet:actions/setup-python@v6 with: python-version: "3.11" - name: Setup Terraform uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # ratchet:hashicorp/setup-terraform@v3 - name: Setup node - uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v6 + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # ratchet:actions/setup-node@v6 with: # zizmor: ignore[cache-poisoning] node-version: 22 cache: "npm" diff --git a/.github/workflows/preview.yml b/.github/workflows/preview.yml new file mode 100644 index 00000000000..92358a91186 --- /dev/null +++ b/.github/workflows/preview.yml @@ -0,0 +1,73 @@ +name: Preview Deployment +env: + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} + VERCEL_CLI: vercel@50.14.1 +on: + push: + branches-ignore: + - main + paths: + - "web/**" +permissions: + contents: read + pull-requests: write +jobs: + Deploy-Preview: + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd + with: + persist-credentials: false + + - name: Setup node + uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # ratchet:actions/setup-node@v4 + with: + node-version: 22 + cache: "npm" + cache-dependency-path: ./web/package-lock.json + + - name: Pull Vercel Environment Information + run: npx --yes ${{ env.VERCEL_CLI }} pull --yes --environment=preview --token=${{ secrets.VERCEL_TOKEN }} + + - name: Build Project Artifacts + run: npx --yes ${{ env.VERCEL_CLI }} build --token=${{ secrets.VERCEL_TOKEN }} + + - name: Deploy Project Artifacts to Vercel + id: deploy + run: | + DEPLOYMENT_URL=$(npx --yes ${{ env.VERCEL_CLI }} deploy --prebuilt --token=${{ secrets.VERCEL_TOKEN }}) + echo "url=$DEPLOYMENT_URL" >> "$GITHUB_OUTPUT" + + - name: Update PR comment with deployment URL + if: always() && steps.deploy.outputs.url + env: + GH_TOKEN: ${{ github.token }} + DEPLOYMENT_URL: ${{ steps.deploy.outputs.url }} + run: | + # Find the PR for this branch + PR_NUMBER=$(gh pr list --head "$GITHUB_REF_NAME" --json number --jq '.[0].number') + if [ -z "$PR_NUMBER" ]; then + echo "No open PR found for branch $GITHUB_REF_NAME, skipping comment." + exit 0 + fi + + COMMENT_MARKER="" + COMMENT_BODY="$COMMENT_MARKER + **Preview Deployment** + + | Status | Preview | Commit | Updated | + | --- | --- | --- | --- | + | ✅ | $DEPLOYMENT_URL | \`${GITHUB_SHA::7}\` | $(date -u '+%Y-%m-%d %H:%M:%S UTC') |" + + # Find existing comment by marker + EXISTING_COMMENT_ID=$(gh api "repos/$GITHUB_REPOSITORY/issues/$PR_NUMBER/comments" \ + --jq ".[] | select(.body | startswith(\"$COMMENT_MARKER\")) | .id" | head -1) + + if [ -n "$EXISTING_COMMENT_ID" ]; then + gh api "repos/$GITHUB_REPOSITORY/issues/comments/$EXISTING_COMMENT_ID" \ + --method PATCH --field body="$COMMENT_BODY" + else + gh pr comment "$PR_NUMBER" --body "$COMMENT_BODY" + fi diff --git a/.github/workflows/sandbox-deployment.yml b/.github/workflows/sandbox-deployment.yml new file mode 100644 index 00000000000..151addc2380 --- /dev/null +++ b/.github/workflows/sandbox-deployment.yml @@ -0,0 +1,290 @@ +name: Build and Push Sandbox Image on Tag + +on: + push: + tags: + - "experimental-cc4a.*" + +# Restrictive defaults; jobs declare what they need. +permissions: {} + +jobs: + check-sandbox-changes: + runs-on: ubuntu-slim + timeout-minutes: 10 + permissions: + contents: read + outputs: + sandbox-changed: ${{ steps.check.outputs.sandbox-changed }} + new-version: ${{ steps.version.outputs.new-version }} + steps: + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 + with: + persist-credentials: false + fetch-depth: 0 + + - name: Check for sandbox-relevant file changes + id: check + run: | + # Get the previous tag to diff against + CURRENT_TAG="${GITHUB_REF_NAME}" + PREVIOUS_TAG=$(git tag --sort=-creatordate | grep '^experimental-cc4a\.' | grep -v "^${CURRENT_TAG}$" | head -n 1) + + if [ -z "$PREVIOUS_TAG" ]; then + echo "No previous experimental-cc4a tag found, building unconditionally" + echo "sandbox-changed=true" >> "$GITHUB_OUTPUT" + exit 0 + fi + + echo "Comparing ${PREVIOUS_TAG}..${CURRENT_TAG}" + + # Check if any sandbox-relevant files changed + SANDBOX_PATHS=( + "backend/onyx/server/features/build/sandbox/" + ) + + CHANGED=false + for path in "${SANDBOX_PATHS[@]}"; do + if git diff --name-only "${PREVIOUS_TAG}..${CURRENT_TAG}" -- "$path" | grep -q .; then + echo "Changes detected in: $path" + CHANGED=true + break + fi + done + + echo "sandbox-changed=$CHANGED" >> "$GITHUB_OUTPUT" + + - name: Determine new sandbox version + id: version + if: steps.check.outputs.sandbox-changed == 'true' + run: | + # Query Docker Hub for the latest versioned tag + LATEST_TAG=$(curl -s "https://hub.docker.com/v2/repositories/onyxdotapp/sandbox/tags?page_size=100" \ + | jq -r '.results[].name' \ + | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' \ + | sort -V \ + | tail -n 1) + + if [ -z "$LATEST_TAG" ]; then + echo "No existing version tags found on Docker Hub, starting at 0.1.1" + NEW_VERSION="0.1.1" + else + CURRENT_VERSION="${LATEST_TAG#v}" + echo "Latest version on Docker Hub: $CURRENT_VERSION" + + # Increment patch version + MAJOR=$(echo "$CURRENT_VERSION" | cut -d. -f1) + MINOR=$(echo "$CURRENT_VERSION" | cut -d. -f2) + PATCH=$(echo "$CURRENT_VERSION" | cut -d. -f3) + NEW_PATCH=$((PATCH + 1)) + NEW_VERSION="${MAJOR}.${MINOR}.${NEW_PATCH}" + fi + + echo "New version: $NEW_VERSION" + echo "new-version=$NEW_VERSION" >> "$GITHUB_OUTPUT" + + build-sandbox-amd64: + needs: check-sandbox-changes + if: needs.check-sandbox-changes.outputs.sandbox-changed == 'true' + runs-on: + - runs-on + - runner=4cpu-linux-x64 + - run-id=${{ github.run_id }}-sandbox-amd64 + - extras=ecr-cache + timeout-minutes: 90 + environment: release + permissions: + contents: read + id-token: write + outputs: + digest: ${{ steps.build.outputs.digest }} + env: + REGISTRY_IMAGE: onyxdotapp/sandbox + steps: + - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 + + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 + with: + persist-credentials: false + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 + with: + role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }} + aws-region: us-east-2 + + - name: Get AWS Secrets + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 + with: + secret-ids: | + DOCKER_USERNAME, deploy/docker-username + DOCKER_TOKEN, deploy/docker-token + parse-json-secrets: true + + - name: Docker meta + id: meta + uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY_IMAGE }} + flavor: | + latest=false + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 + with: + username: ${{ env.DOCKER_USERNAME }} + password: ${{ env.DOCKER_TOKEN }} + + - name: Build and push AMD64 + id: build + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6 + with: + context: ./backend/onyx/server/features/build/sandbox/kubernetes/docker + file: ./backend/onyx/server/features/build/sandbox/kubernetes/docker/Dockerfile + platforms: linux/amd64 + labels: ${{ steps.meta.outputs.labels }} + cache-from: | + type=registry,ref=${{ env.REGISTRY_IMAGE }}:latest + cache-to: | + type=inline + outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true + + build-sandbox-arm64: + needs: check-sandbox-changes + if: needs.check-sandbox-changes.outputs.sandbox-changed == 'true' + runs-on: + - runs-on + - runner=4cpu-linux-arm64 + - run-id=${{ github.run_id }}-sandbox-arm64 + - extras=ecr-cache + timeout-minutes: 90 + environment: release + permissions: + contents: read + id-token: write + outputs: + digest: ${{ steps.build.outputs.digest }} + env: + REGISTRY_IMAGE: onyxdotapp/sandbox + steps: + - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 + + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6 + with: + persist-credentials: false + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 + with: + role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }} + aws-region: us-east-2 + + - name: Get AWS Secrets + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 + with: + secret-ids: | + DOCKER_USERNAME, deploy/docker-username + DOCKER_TOKEN, deploy/docker-token + parse-json-secrets: true + + - name: Docker meta + id: meta + uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY_IMAGE }} + flavor: | + latest=false + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 + with: + username: ${{ env.DOCKER_USERNAME }} + password: ${{ env.DOCKER_TOKEN }} + + - name: Build and push ARM64 + id: build + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6 + with: + context: ./backend/onyx/server/features/build/sandbox/kubernetes/docker + file: ./backend/onyx/server/features/build/sandbox/kubernetes/docker/Dockerfile + platforms: linux/arm64 + labels: ${{ steps.meta.outputs.labels }} + cache-from: | + type=registry,ref=${{ env.REGISTRY_IMAGE }}:latest + cache-to: | + type=inline + outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true + + merge-sandbox: + needs: + - check-sandbox-changes + - build-sandbox-amd64 + - build-sandbox-arm64 + runs-on: + - runs-on + - runner=2cpu-linux-x64 + - run-id=${{ github.run_id }}-merge-sandbox + - extras=ecr-cache + timeout-minutes: 30 + environment: release + permissions: + id-token: write + env: + REGISTRY_IMAGE: onyxdotapp/sandbox + steps: + - uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 + with: + role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }} + aws-region: us-east-2 + + - name: Get AWS Secrets + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 + with: + secret-ids: | + DOCKER_USERNAME, deploy/docker-username + DOCKER_TOKEN, deploy/docker-token + parse-json-secrets: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # ratchet:docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3 + with: + username: ${{ env.DOCKER_USERNAME }} + password: ${{ env.DOCKER_TOKEN }} + + - name: Docker meta + id: meta + uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY_IMAGE }} + flavor: | + latest=false + tags: | + type=raw,value=v${{ needs.check-sandbox-changes.outputs.new-version }} + type=raw,value=latest + + - name: Create and push manifest + env: + IMAGE_REPO: ${{ env.REGISTRY_IMAGE }} + AMD64_DIGEST: ${{ needs.build-sandbox-amd64.outputs.digest }} + ARM64_DIGEST: ${{ needs.build-sandbox-arm64.outputs.digest }} + META_TAGS: ${{ steps.meta.outputs.tags }} + run: | + IMAGES="${IMAGE_REPO}@${AMD64_DIGEST} ${IMAGE_REPO}@${ARM64_DIGEST}" + docker buildx imagetools create \ + $(printf '%s\n' "${META_TAGS}" | xargs -I {} echo -t {}) \ + $IMAGES diff --git a/.gitignore b/.gitignore index 35e43eb6525..0c12258ff29 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,12 @@ # editors -.vscode +.vscode/* !/.vscode/env_template.txt +!/.vscode/env.web_template.txt !/.vscode/launch.json !/.vscode/tasks.template.jsonc .zed .cursor +!/.cursor/mcp.json # macos .DS_store @@ -39,10 +41,6 @@ settings.json /backend/tests/regression/answer_quality/search_test_config.yaml *.egg-info -# Claude -AGENTS.md -CLAUDE.md - # Local .terraform directories **/.terraform/* diff --git a/.vscode/env.web_template.txt b/.vscode/env.web_template.txt new file mode 100644 index 00000000000..6754cec1cd0 --- /dev/null +++ b/.vscode/env.web_template.txt @@ -0,0 +1,16 @@ +# Copy this file to .env.web in the .vscode folder. +# Fill in the values as needed +# Web Server specific environment variables +# Minimal set needed for Next.js dev server + +# Auth +AUTH_TYPE=basic +DEV_MODE=true + +# Enable the full set of Danswer Enterprise Edition features. +# NOTE: DO NOT ENABLE THIS UNLESS YOU HAVE A PAID ENTERPRISE LICENSE (or if you +# are using this for local testing/development). +ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=false + +# Enable Onyx Craft +ENABLE_CRAFT=true diff --git a/.vscode/env_template.txt b/.vscode/env_template.txt index 35d1f0fe2f8..cd398ab3ef5 100644 --- a/.vscode/env_template.txt +++ b/.vscode/env_template.txt @@ -6,13 +6,13 @@ # processes. -# For local dev, often user Authentication is not needed. -AUTH_TYPE=disabled +AUTH_TYPE=basic +DEV_MODE=true # Always keep these on for Dev. # Logs model prompts, reasoning, and answer to stdout. -LOG_ONYX_MODEL_INTERACTIONS=True +LOG_ONYX_MODEL_INTERACTIONS=False # More verbose logging LOG_LEVEL=debug @@ -35,7 +35,6 @@ GEN_AI_API_KEY= OPENAI_API_KEY= # If answer quality isn't important for dev, use gpt-4o-mini since it's cheaper. GEN_AI_MODEL_VERSION=gpt-4o -FAST_GEN_AI_MODEL_VERSION=gpt-4o # Python stuff diff --git a/.vscode/launch.json b/.vscode/launch.json index 51be5872195..41a8164c45f 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -25,6 +25,7 @@ "Celery heavy", "Celery docfetching", "Celery docprocessing", + "Celery user_file_processing", "Celery beat" ], "presentation": { @@ -86,7 +87,7 @@ "request": "launch", "cwd": "${workspaceRoot}/web", "runtimeExecutable": "npm", - "envFile": "${workspaceFolder}/.vscode/.env", + "envFile": "${workspaceFolder}/.vscode/.env.web", "runtimeArgs": ["run", "dev"], "presentation": { "group": "2" @@ -121,7 +122,6 @@ "cwd": "${workspaceFolder}/backend", "envFile": "${workspaceFolder}/.vscode/.env", "env": { - "LOG_ONYX_MODEL_INTERACTIONS": "True", "LOG_LEVEL": "DEBUG", "PYTHONUNBUFFERED": "1" }, @@ -246,7 +246,7 @@ "--loglevel=INFO", "--hostname=light@%n", "-Q", - "vespa_metadata_sync,connector_deletion,doc_permissions_upsert,index_attempt_cleanup" + "vespa_metadata_sync,connector_deletion,doc_permissions_upsert,index_attempt_cleanup,opensearch_migration" ], "presentation": { "group": "2" @@ -275,7 +275,7 @@ "--loglevel=INFO", "--hostname=background@%n", "-Q", - "vespa_metadata_sync,connector_deletion,doc_permissions_upsert,checkpoint_cleanup,index_attempt_cleanup,docprocessing,connector_doc_fetching,user_files_indexing,connector_pruning,connector_doc_permissions_sync,connector_external_group_sync,csv_generation,kg_processing,monitoring,user_file_processing,user_file_project_sync,user_file_delete" + "vespa_metadata_sync,connector_deletion,doc_permissions_upsert,checkpoint_cleanup,index_attempt_cleanup,docprocessing,connector_doc_fetching,connector_pruning,connector_doc_permissions_sync,connector_external_group_sync,csv_generation,kg_processing,monitoring,user_file_processing,user_file_project_sync,user_file_delete,opensearch_migration" ], "presentation": { "group": "2" @@ -419,7 +419,7 @@ "--loglevel=INFO", "--hostname=docfetching@%n", "-Q", - "connector_doc_fetching,user_files_indexing" + "connector_doc_fetching" ], "presentation": { "group": "2" @@ -572,7 +572,6 @@ "cwd": "${workspaceFolder}/backend", "envFile": "${workspaceFolder}/.vscode/.env", "env": { - "LOG_ONYX_MODEL_INTERACTIONS": "True", "LOG_LEVEL": "DEBUG", "PYTHONUNBUFFERED": "1", "PYTHONPATH": "." diff --git a/CLAUDE.md.template b/AGENTS.md similarity index 94% rename from CLAUDE.md.template rename to AGENTS.md index a78d6e8e5dd..7a7bbdfab08 100644 --- a/CLAUDE.md.template +++ b/AGENTS.md @@ -1,26 +1,25 @@ -# CLAUDE.md +# PROJECT KNOWLEDGE BASE -This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. +This file provides guidance to AI agents when working with code in this repository. ## KEY NOTES - If you run into any missing python dependency errors, try running your command with `source .venv/bin/activate` \ -to assume the python venv. + to assume the python venv. - To make tests work, check the `.env` file at the root of the project to find an OpenAI key. - If using `playwright` to explore the frontend, you can usually log in with username `a@example.com` and password -`a`. The app can be accessed at `http://localhost:3000`. + `a`. The app can be accessed at `http://localhost:3000`. - You should assume that all Onyx services are running. To verify, you can check the `backend/log` directory to -make sure we see logs coming out from the relevant service. + make sure we see logs coming out from the relevant service. - To connect to the Postgres database, use: `docker exec -it onyx-relational_db-1 psql -U postgres -c ""` - When making calls to the backend, always go through the frontend. E.g. make a call to `http://localhost:3000/api/persona` not `http://localhost:8080/api/persona` - Put ALL db operations under the `backend/onyx/db` / `backend/ee/onyx/db` directories. Don't run queries -outside of those directories. + outside of those directories. ## Project Overview **Onyx** (formerly Danswer) is an open-source Gen-AI and Enterprise Search platform that connects to company documents, apps, and people. It features a modular architecture with both Community Edition (MIT licensed) and Enterprise Edition offerings. - ### Background Workers (Celery) Onyx uses Celery for asynchronous task processing with multiple specialized workers: @@ -92,6 +91,7 @@ Onyx uses Celery for asynchronous task processing with multiple specialized work Onyx supports two deployment modes for background workers, controlled by the `USE_LIGHTWEIGHT_BACKGROUND_WORKER` environment variable: **Lightweight Mode** (default, `USE_LIGHTWEIGHT_BACKGROUND_WORKER=true`): + - Runs a single consolidated `background` worker that handles all background tasks: - Light worker tasks (Vespa operations, permissions sync, deletion) - Document processing (indexing pipeline) @@ -105,12 +105,14 @@ Onyx supports two deployment modes for background workers, controlled by the `US - Default concurrency: 20 threads (increased to handle combined workload) **Standard Mode** (`USE_LIGHTWEIGHT_BACKGROUND_WORKER=false`): + - Runs separate specialized workers as documented above (light, docprocessing, docfetching, heavy, kg_processing, monitoring, user_file_processing) - Better isolation and scalability - Can scale individual workers independently based on workload - Suitable for production deployments with higher load The deployment mode affects: + - **Backend**: Worker processes spawned by supervisord or dev scripts - **Helm**: Which Kubernetes deployments are created - **Dev Environment**: Which workers `dev_run_background_jobs.py` spawns @@ -119,18 +121,18 @@ The deployment mode affects: - **Thread-based Workers**: All workers use thread pools (not processes) for stability - **Tenant Awareness**: Multi-tenant support with per-tenant task isolation. There is a -middleware layer that automatically finds the appropriate tenant ID when sending tasks -via Celery Beat. + middleware layer that automatically finds the appropriate tenant ID when sending tasks + via Celery Beat. - **Task Prioritization**: High, Medium, Low priority queues - **Monitoring**: Built-in heartbeat and liveness checking - **Failure Handling**: Automatic retry and failure recovery mechanisms - **Redis Coordination**: Inter-process communication via Redis - **PostgreSQL State**: Task state and metadata stored in PostgreSQL - #### Important Notes -**Defining Tasks**: +**Defining Tasks**: + - Always use `@shared_task` rather than `@celery_app` - Put tasks under `background/celery/tasks/` or `ee/background/celery/tasks` @@ -142,7 +144,12 @@ function. If you make any updates to a celery worker and you want to test these changes, you will need to ask me to restart the celery worker. There is no auto-restart on code-change mechanism. +**Task Time Limits**: +Since all tasks are executed in thread pools, the time limit features of Celery are silently +disabled and won't work. Timeout logic must be implemented within the task itself. + ### Code Quality + ```bash # Install and run pre-commit hooks pre-commit install @@ -154,6 +161,7 @@ NOTE: Always make sure everything is strictly typed (both in Python and Typescri ## Architecture Overview ### Technology Stack + - **Backend**: Python 3.11, FastAPI, SQLAlchemy, Alembic, Celery - **Frontend**: Next.js 15+, React 18, TypeScript, Tailwind CSS - **Database**: PostgreSQL with Redis caching @@ -435,6 +443,7 @@ function ContactForm() { **Reason:** Our custom color system uses CSS variables that automatically handle dark mode and maintain design consistency across the app. Standard Tailwind colors bypass this system. **Available color categories:** + - **Text:** `text-01` through `text-05`, `text-inverted-XX` - **Backgrounds:** `background-neutral-XX`, `background-tint-XX` (and inverted variants) - **Borders:** `border-01` through `border-05`, `border-inverted-XX` @@ -467,6 +476,7 @@ function ContactForm() { ## Database & Migrations ### Running Migrations + ```bash # Standard migrations alembic upgrade head @@ -476,6 +486,7 @@ alembic -n schema_private upgrade head ``` ### Creating Migrations + ```bash # Create migration alembic revision -m "description" @@ -488,13 +499,14 @@ Write the migration manually and place it in the file that alembic creates when ## Testing Strategy -First, you must activate the virtual environment with `source .venv/bin/activate`. +First, you must activate the virtual environment with `source .venv/bin/activate`. There are 4 main types of tests within Onyx: ### Unit Tests + These should not assume any Onyx/external services are available to be called. -Interactions with the outside world should be mocked using `unittest.mock`. Generally, only +Interactions with the outside world should be mocked using `unittest.mock`. Generally, only write these for complex, isolated modules e.g. `citation_processing.py`. To run them: @@ -504,13 +516,14 @@ pytest -xv backend/tests/unit ``` ### External Dependency Unit Tests -These tests assume that all external dependencies of Onyx are available and callable (e.g. Postgres, Redis, + +These tests assume that all external dependencies of Onyx are available and callable (e.g. Postgres, Redis, MinIO/S3, Vespa are running + OpenAI can be called + any request to the internet is fine + etc.). However, the actual Onyx containers are not running and with these tests we call the function to test directly. -We can also mock components/calls at will. +We can also mock components/calls at will. -The goal with these tests are to minimize mocking while giving some flexibility to mock things that are flakey, +The goal with these tests are to minimize mocking while giving some flexibility to mock things that are flakey, need strictly controlled behavior, or need to have their internal behavior validated (e.g. verify a function is called with certain args, something that would be impossible with proper integration tests). @@ -523,15 +536,16 @@ python -m dotenv -f .vscode/.env run -- pytest backend/tests/external_dependency ``` ### Integration Tests -Standard integration tests. Every test in `backend/tests/integration` runs against a real Onyx deployment. We cannot -mock anything in these tests. Prefer writing integration tests (or External Dependency Unit Tests if mocking/internal + +Standard integration tests. Every test in `backend/tests/integration` runs against a real Onyx deployment. We cannot +mock anything in these tests. Prefer writing integration tests (or External Dependency Unit Tests if mocking/internal verification is necessary) over any other type of test. Tests are parallelized at a directory level. -When writing integration tests, make sure to check the root `conftest.py` for useful fixtures + the `backend/tests/integration/common_utils` directory for utilities. Prefer (if one exists), calling the appropriate Manager +When writing integration tests, make sure to check the root `conftest.py` for useful fixtures + the `backend/tests/integration/common_utils` directory for utilities. Prefer (if one exists), calling the appropriate Manager class in the utils over directly calling the APIs with a library like `requests`. Prefer using fixtures rather than -calling the utilities directly (e.g. do NOT create admin users with +calling the utilities directly (e.g. do NOT create admin users with `admin_user = UserManager.create(name="admin_user")`, instead use the `admin_user` fixture). A great example of this type of test is `backend/tests/integration/dev_apis/test_simple_chat_api.py`. @@ -543,8 +557,9 @@ python -m dotenv -f .vscode/.env run -- pytest backend/tests/integration ``` ### Playwright (E2E) Tests -These tests are an even more complete version of the Integration Tests mentioned above. Has all services of Onyx -running, *including* the Web Server. + +These tests are an even more complete version of the Integration Tests mentioned above. Has all services of Onyx +running, _including_ the Web Server. Use these tests for anything that requires significant frontend <-> backend coordination. @@ -556,13 +571,11 @@ To run them: npx playwright test ``` - ## Logs When (1) writing integration tests or (2) doing live tests (e.g. curl / playwright) you can get access to logs via the `backend/log/_debug.log` file. All Onyx services (api_server, web_server, celery_X) -will be tailing their logs to this file. - +will be tailing their logs to this file. ## Security Considerations @@ -581,6 +594,7 @@ will be tailing their logs to this file. - Custom prompts and agent actions ## Creating a Plan + When creating a plan in the `plans` directory, make sure to include at least these elements: **Issues to Address** @@ -593,10 +607,10 @@ Things you come across in your research that are important to the implementation How you are going to make the changes happen. High level approach. **Tests** -What unit (use rarely), external dependency unit, integration, and playwright tests you plan to write to +What unit (use rarely), external dependency unit, integration, and playwright tests you plan to write to verify the correct behavior. Don't overtest. Usually, a given change only needs one type of test. -Do NOT include these: *Timeline*, *Rollback plan* +Do NOT include these: _Timeline_, _Rollback plan_ This is a minimal list - feel free to include more. Do NOT write code as part of your plan. Keep it high level. You can reference certain files or functions though. diff --git a/AGENTS.md.template b/AGENTS.md.template deleted file mode 100644 index dfc4c02926f..00000000000 --- a/AGENTS.md.template +++ /dev/null @@ -1,599 +0,0 @@ -# AGENTS.md - -This file provides guidance to AI agents when working with code in this repository. - -## KEY NOTES - -- If you run into any missing python dependency errors, try running your command with `source .venv/bin/activate` \ -to assume the python venv. -- To make tests work, check the `.env` file at the root of the project to find an OpenAI key. -- If using `playwright` to explore the frontend, you can usually log in with username `a@example.com` and password -`a`. The app can be accessed at `http://localhost:3000`. -- You should assume that all Onyx services are running. To verify, you can check the `backend/log` directory to -make sure we see logs coming out from the relevant service. -- To connect to the Postgres database, use: `docker exec -it onyx-relational_db-1 psql -U postgres -c ""` -- When making calls to the backend, always go through the frontend. E.g. make a call to `http://localhost:3000/api/persona` not `http://localhost:8080/api/persona` -- Put ALL db operations under the `backend/onyx/db` / `backend/ee/onyx/db` directories. Don't run queries -outside of those directories. - -## Project Overview - -**Onyx** (formerly Danswer) is an open-source Gen-AI and Enterprise Search platform that connects to company documents, apps, and people. It features a modular architecture with both Community Edition (MIT licensed) and Enterprise Edition offerings. - - -### Background Workers (Celery) - -Onyx uses Celery for asynchronous task processing with multiple specialized workers: - -#### Worker Types - -1. **Primary Worker** (`celery_app.py`) - - Coordinates core background tasks and system-wide operations - - Handles connector management, document sync, pruning, and periodic checks - - Runs with 4 threads concurrency - - Tasks: connector deletion, vespa sync, pruning, LLM model updates, user file sync - -2. **Docfetching Worker** (`docfetching`) - - Fetches documents from external data sources (connectors) - - Spawns docprocessing tasks for each document batch - - Implements watchdog monitoring for stuck connectors - - Configurable concurrency (default from env) - -3. **Docprocessing Worker** (`docprocessing`) - - Processes fetched documents through the indexing pipeline: - - Upserts documents to PostgreSQL - - Chunks documents and adds contextual information - - Embeds chunks via model server - - Writes chunks to Vespa vector database - - Updates document metadata - - Configurable concurrency (default from env) - -4. **Light Worker** (`light`) - - Handles lightweight, fast operations - - Tasks: vespa operations, document permissions sync, external group sync - - Higher concurrency for quick tasks - -5. **Heavy Worker** (`heavy`) - - Handles resource-intensive operations - - Primary task: document pruning operations - - Runs with 4 threads concurrency - -6. **KG Processing Worker** (`kg_processing`) - - Handles Knowledge Graph processing and clustering - - Builds relationships between documents - - Runs clustering algorithms - - Configurable concurrency - -7. **Monitoring Worker** (`monitoring`) - - System health monitoring and metrics collection - - Monitors Celery queues, process memory, and system status - - Single thread (monitoring doesn't need parallelism) - - Cloud-specific monitoring tasks - -8. **User File Processing Worker** (`user_file_processing`) - - Processes user-uploaded files - - Handles user file indexing and project synchronization - - Configurable concurrency - -9. **Beat Worker** (`beat`) - - Celery's scheduler for periodic tasks - - Uses DynamicTenantScheduler for multi-tenant support - - Schedules tasks like: - - Indexing checks (every 15 seconds) - - Connector deletion checks (every 20 seconds) - - Vespa sync checks (every 20 seconds) - - Pruning checks (every 20 seconds) - - KG processing (every 60 seconds) - - Monitoring tasks (every 5 minutes) - - Cleanup tasks (hourly) - -#### Worker Deployment Modes - -Onyx supports two deployment modes for background workers, controlled by the `USE_LIGHTWEIGHT_BACKGROUND_WORKER` environment variable: - -**Lightweight Mode** (default, `USE_LIGHTWEIGHT_BACKGROUND_WORKER=true`): -- Runs a single consolidated `background` worker that handles all background tasks: - - Pruning operations (from `heavy` worker) - - Knowledge graph processing (from `kg_processing` worker) - - Monitoring tasks (from `monitoring` worker) - - User file processing (from `user_file_processing` worker) -- Lower resource footprint (single worker process) -- Suitable for smaller deployments or development environments -- Default concurrency: 6 threads - -**Standard Mode** (`USE_LIGHTWEIGHT_BACKGROUND_WORKER=false`): -- Runs separate specialized workers as documented above (heavy, kg_processing, monitoring, user_file_processing) -- Better isolation and scalability -- Can scale individual workers independently based on workload -- Suitable for production deployments with higher load - -The deployment mode affects: -- **Backend**: Worker processes spawned by supervisord or dev scripts -- **Helm**: Which Kubernetes deployments are created -- **Dev Environment**: Which workers `dev_run_background_jobs.py` spawns - -#### Key Features - -- **Thread-based Workers**: All workers use thread pools (not processes) for stability -- **Tenant Awareness**: Multi-tenant support with per-tenant task isolation. There is a -middleware layer that automatically finds the appropriate tenant ID when sending tasks -via Celery Beat. -- **Task Prioritization**: High, Medium, Low priority queues -- **Monitoring**: Built-in heartbeat and liveness checking -- **Failure Handling**: Automatic retry and failure recovery mechanisms -- **Redis Coordination**: Inter-process communication via Redis -- **PostgreSQL State**: Task state and metadata stored in PostgreSQL - - -#### Important Notes - -**Defining Tasks**: -- Always use `@shared_task` rather than `@celery_app` -- Put tasks under `background/celery/tasks/` or `ee/background/celery/tasks` - -**Defining APIs**: -When creating new FastAPI APIs, do NOT use the `response_model` field. Instead, just type the -function. - -**Testing Updates**: -If you make any updates to a celery worker and you want to test these changes, you will need -to ask me to restart the celery worker. There is no auto-restart on code-change mechanism. - -### Code Quality -```bash -# Install and run pre-commit hooks -pre-commit install -pre-commit run --all-files -``` - -NOTE: Always make sure everything is strictly typed (both in Python and Typescript). - -## Architecture Overview - -### Technology Stack -- **Backend**: Python 3.11, FastAPI, SQLAlchemy, Alembic, Celery -- **Frontend**: Next.js 15+, React 18, TypeScript, Tailwind CSS -- **Database**: PostgreSQL with Redis caching -- **Search**: Vespa vector database -- **Auth**: OAuth2, SAML, multi-provider support -- **AI/ML**: LangChain, LiteLLM, multiple embedding models - -### Directory Structure - -``` -backend/ -├── onyx/ -│ ├── auth/ # Authentication & authorization -│ ├── chat/ # Chat functionality & LLM interactions -│ ├── connectors/ # Data source connectors -│ ├── db/ # Database models & operations -│ ├── document_index/ # Vespa integration -│ ├── federated_connectors/ # External search connectors -│ ├── llm/ # LLM provider integrations -│ └── server/ # API endpoints & routers -├── ee/ # Enterprise Edition features -├── alembic/ # Database migrations -└── tests/ # Test suites - -web/ -├── src/app/ # Next.js app router pages -├── src/components/ # Reusable React components -└── src/lib/ # Utilities & business logic -``` - -## Frontend Standards - -### 1. Import Standards - -**Always use absolute imports with the `@` prefix.** - -**Reason:** Moving files around becomes easier since you don't also have to update those import statements. This makes modifications to the codebase much nicer. - -```typescript -// ✅ Good -import { Button } from "@/components/ui/button"; -import { useAuth } from "@/hooks/useAuth"; -import { Text } from "@/refresh-components/texts/Text"; - -// ❌ Bad -import { Button } from "../../../components/ui/button"; -import { useAuth } from "./hooks/useAuth"; -``` - -### 2. React Component Functions - -**Prefer regular functions over arrow functions for React components.** - -**Reason:** Functions just become easier to read. - -```typescript -// ✅ Good -function UserProfile({ userId }: UserProfileProps) { - return
User Profile
-} - -// ❌ Bad -const UserProfile = ({ userId }: UserProfileProps) => { - return
User Profile
-} -``` - -### 3. Props Interface Extraction - -**Extract prop types into their own interface definitions.** - -**Reason:** Functions just become easier to read. - -```typescript -// ✅ Good -interface UserCardProps { - user: User - showActions?: boolean - onEdit?: (userId: string) => void -} - -function UserCard({ user, showActions = false, onEdit }: UserCardProps) { - return
User Card
-} - -// ❌ Bad -function UserCard({ - user, - showActions = false, - onEdit -}: { - user: User - showActions?: boolean - onEdit?: (userId: string) => void -}) { - return
User Card
-} -``` - -### 4. Spacing Guidelines - -**Prefer padding over margins for spacing.** - -**Reason:** We want to consolidate usage to paddings instead of margins. - -```typescript -// ✅ Good -
-
Content
-
- -// ❌ Bad -
-
Content
-
-``` - -### 5. Tailwind Dark Mode - -**Strictly forbid using the `dark:` modifier in Tailwind classes, except for logo icon handling.** - -**Reason:** The `colors.css` file already, VERY CAREFULLY, defines what the exact opposite colour of each light-mode colour is. Overriding this behaviour is VERY bad and will lead to horrible UI breakages. - -**Exception:** The `createLogoIcon` helper in `web/src/components/icons/icons.tsx` uses `dark:` modifiers (`dark:invert`, `dark:hidden`, `dark:block`) to handle third-party logo icons that cannot automatically adapt through `colors.css`. This is the ONLY acceptable use of dark mode modifiers. - -```typescript -// ✅ Good - Standard components use `web/tailwind-themes/tailwind.config.js` / `web/src/app/css/colors.css` -
- Content -
- -// ✅ Good - Logo icons with dark mode handling via createLogoIcon -export const GithubIcon = createLogoIcon(githubLightIcon, { - monochromatic: true, // Will apply dark:invert internally -}); - -export const GitbookIcon = createLogoIcon(gitbookLightIcon, { - darkSrc: gitbookDarkIcon, // Will use dark:hidden/dark:block internally -}); - -// ❌ Bad - Manual dark mode overrides -
- Content -
-``` - -### 6. Class Name Utilities - -**Use the `cn` utility instead of raw string formatting for classNames.** - -**Reason:** `cn`s are easier to read. They also allow for more complex types (i.e., string-arrays) to get formatted properly (it flattens each element in that string array down). As a result, it can allow things such as conditionals (i.e., `myCondition && "some-tailwind-class"`, which evaluates to `false` when `myCondition` is `false`) to get filtered out. - -```typescript -import { cn } from '@/lib/utils' - -// ✅ Good -
- Content -
- -// ❌ Bad -
- Content -
-``` - -### 7. Custom Hooks Organization - -**Follow a "hook-per-file" layout. Each hook should live in its own file within `web/src/hooks`.** - -**Reason:** This is just a layout preference. Keeps code clean. - -```typescript -// web/src/hooks/useUserData.ts -export function useUserData(userId: string) { - // hook implementation -} - -// web/src/hooks/useLocalStorage.ts -export function useLocalStorage(key: string, initialValue: T) { - // hook implementation -} -``` - -### 8. Icon Usage - -**ONLY use icons from the `web/src/icons` directory. Do NOT use icons from `react-icons`, `lucide`, or other external libraries.** - -**Reason:** We have a very carefully curated selection of icons that match our Onyx guidelines. We do NOT want to muddy those up with different aesthetic stylings. - -```typescript -// ✅ Good -import SvgX from "@/icons/x"; -import SvgMoreHorizontal from "@/icons/more-horizontal"; - -// ❌ Bad -import { User } from "lucide-react"; -import { FiSearch } from "react-icons/fi"; -``` - -**Missing Icons**: If an icon is needed but doesn't exist in the `web/src/icons` directory, import it from Figma using the Figma MCP tool and add it to the icons directory. -If you need help with this step, reach out to `raunak@onyx.app`. - -### 9. Text Rendering - -**Prefer using the `refresh-components/texts/Text` component for all text rendering. Avoid "naked" text nodes.** - -**Reason:** The `Text` component is fully compliant with the stylings provided in Figma. It provides easy utilities to specify the text-colour and font-size in the form of flags. Super duper easy. - -```typescript -// ✅ Good -import { Text } from '@/refresh-components/texts/Text' - -function UserCard({ name }: { name: string }) { - return ( - - {name} - - ) -} - -// ❌ Bad -function UserCard({ name }: { name: string }) { - return ( -
-

{name}

-

User details

-
- ) -} -``` - -### 10. Component Usage - -**Heavily avoid raw HTML input components. Always use components from the `web/src/refresh-components` or `web/lib/opal/src` directory.** - -**Reason:** We've put in a lot of effort to unify the components that are rendered in the Onyx app. Using raw components breaks the entire UI of the application, and leaves it in a muddier state than before. - -```typescript -// ✅ Good -import Button from '@/refresh-components/buttons/Button' -import InputTypeIn from '@/refresh-components/inputs/InputTypeIn' -import SvgPlusCircle from '@/icons/plus-circle' - -function ContactForm() { - return ( -
- - - - ) -} - -// ❌ Bad -function ContactForm() { - return ( -
- -