diff --git a/.github/.linkspector.yml b/.github/.linkspector.yml index cd052c53b251e..50e9359f51523 100644 --- a/.github/.linkspector.yml +++ b/.github/.linkspector.yml @@ -27,5 +27,7 @@ ignorePatterns: - pattern: "splunk.com" - pattern: "stackoverflow.com/questions" - pattern: "developer.hashicorp.com/terraform/language" + - pattern: "platform.openai.com" + - pattern: "api.openai.com" aliveStatusCodes: - 200 diff --git a/.github/actions/setup-go/action.yaml b/.github/actions/setup-go/action.yaml index 097a1b6cfd119..02b54830cdf61 100644 --- a/.github/actions/setup-go/action.yaml +++ b/.github/actions/setup-go/action.yaml @@ -4,7 +4,7 @@ description: | inputs: version: description: "The Go version to use." - default: "1.24.6" + default: "1.24.10" use-preinstalled-go: description: "Whether to use preinstalled Go." default: "false" diff --git a/.github/actions/setup-sqlc/action.yaml b/.github/actions/setup-sqlc/action.yaml index c123cb8cc3156..8e1cf8c50f4db 100644 --- a/.github/actions/setup-sqlc/action.yaml +++ b/.github/actions/setup-sqlc/action.yaml @@ -5,6 +5,13 @@ runs: using: "composite" steps: - name: Setup sqlc - uses: sqlc-dev/setup-sqlc@c0209b9199cd1cce6a14fc27cabcec491b651761 # v4.0.0 - with: - sqlc-version: "1.27.0" + # uses: sqlc-dev/setup-sqlc@c0209b9199cd1cce6a14fc27cabcec491b651761 # v4.0.0 + # with: + # sqlc-version: "1.30.0" + + # Switched to coder/sqlc fork to fix ambiguous column bug, see: + # - https://github.com/coder/sqlc/pull/1 + # - https://github.com/sqlc-dev/sqlc/pull/4159 + shell: bash + run: | + CGO_ENABLED=1 go install github.com/coder/sqlc/cmd/sqlc@aab4e865a51df0c43e1839f81a9d349b41d14f05 diff --git a/.github/actions/setup-tf/action.yaml b/.github/actions/setup-tf/action.yaml index 6f8c8c32cf38c..f79618834d9a1 100644 --- a/.github/actions/setup-tf/action.yaml +++ b/.github/actions/setup-tf/action.yaml @@ -7,5 +7,5 @@ runs: - name: Install Terraform uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # v3.1.2 with: - terraform_version: 1.13.0 + terraform_version: 1.13.4 terraform_wrapper: false diff --git a/.github/fly-wsproxies/sao-paulo-coder.toml b/.github/fly-wsproxies/sao-paulo-coder.toml deleted file mode 100644 index b6c9b964631ef..0000000000000 --- a/.github/fly-wsproxies/sao-paulo-coder.toml +++ /dev/null @@ -1,34 +0,0 @@ -app = "sao-paulo-coder" -primary_region = "gru" - -[experimental] - entrypoint = ["/bin/sh", "-c", "CODER_DERP_SERVER_RELAY_URL=\"http://[${FLY_PRIVATE_IP}]:3000\" /opt/coder wsproxy server"] - auto_rollback = true - -[build] - image = "ghcr.io/coder/coder-preview:main" - -[env] - CODER_ACCESS_URL = "https://sao-paulo.fly.dev.coder.com" - CODER_HTTP_ADDRESS = "0.0.0.0:3000" - CODER_PRIMARY_ACCESS_URL = "https://dev.coder.com" - CODER_WILDCARD_ACCESS_URL = "*--apps.sao-paulo.fly.dev.coder.com" - CODER_VERBOSE = "true" - -[http_service] - internal_port = 3000 - force_https = true - auto_stop_machines = true - auto_start_machines = true - min_machines_running = 0 - -# Ref: https://fly.io/docs/reference/configuration/#http_service-concurrency -[http_service.concurrency] - type = "requests" - soft_limit = 50 - hard_limit = 100 - -[[vm]] - cpu_kind = "shared" - cpus = 2 - memory_mb = 512 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 96d0ce23953cf..5fd91050f3032 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,7 +35,7 @@ jobs: tailnet-integration: ${{ steps.filter.outputs.tailnet-integration }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -157,7 +157,7 @@ jobs: runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -191,7 +191,7 @@ jobs: # Check for any typos - name: Check for typos - uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 # v1.38.1 + uses: crate-ci/typos@626c4bedb751ce0b7f03262ca97ddda9a076ae1c # v1.39.2 with: config: .github/workflows/typos.toml @@ -230,12 +230,12 @@ jobs: shell: bash gen: - timeout-minutes: 8 + timeout-minutes: 20 runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} if: ${{ !cancelled() }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -271,6 +271,7 @@ jobs: popd - name: make gen + timeout-minutes: 8 run: | # Remove golden files to detect discrepancy in generated files. make clean/golden-files @@ -288,10 +289,10 @@ jobs: needs: changes if: needs.changes.outputs.offlinedocs-only == 'false' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} - timeout-minutes: 7 + timeout-minutes: 20 steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -315,6 +316,7 @@ jobs: run: go install mvdan.cc/sh/v3/cmd/shfmt@v3.7.0 - name: make fmt + timeout-minutes: 7 run: | PATH="${PATH}:$(go env GOPATH)/bin" \ make --output-sync -j -B fmt @@ -341,7 +343,7 @@ jobs: - windows-2022 steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -530,7 +532,7 @@ jobs: timeout-minutes: 25 steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -579,7 +581,7 @@ jobs: timeout-minutes: 25 steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -639,7 +641,7 @@ jobs: timeout-minutes: 20 steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -666,7 +668,7 @@ jobs: timeout-minutes: 20 steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -699,7 +701,7 @@ jobs: name: ${{ matrix.variant.name }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -754,6 +756,14 @@ jobs: path: ./site/test-results/**/*.webm retention-days: 7 + - name: Upload debug log + if: always() && github.actor != 'dependabot[bot]' && runner.os == 'Linux' && !github.event.pull_request.head.repo.fork + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + with: + name: coderd-debug-logs${{ matrix.variant.premium && '-premium' || '' }} + path: ./site/e2e/test-results/debug.log + retention-days: 7 + - name: Upload pprof dumps if: always() && github.actor != 'dependabot[bot]' && runner.os == 'Linux' && !github.event.pull_request.head.repo.fork uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 @@ -771,7 +781,7 @@ jobs: if: needs.changes.outputs.site == 'true' || needs.changes.outputs.ci == 'true' steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -792,7 +802,7 @@ jobs: # the check to pass. This is desired in PRs, but not in mainline. - name: Publish to Chromatic (non-mainline) if: github.ref != 'refs/heads/main' && github.repository_owner == 'coder' - uses: chromaui/action@bc2d84ad2b60813a67d995c5582d696104a19383 # v13.3.2 + uses: chromaui/action@ac86f2ff0a458ffbce7b40698abd44c0fa34d4b6 # v13.3.3 env: NODE_OPTIONS: "--max_old_space_size=4096" STORYBOOK: true @@ -824,7 +834,7 @@ jobs: # infinitely "in progress" in mainline unless we re-review each build. - name: Publish to Chromatic (mainline) if: github.ref == 'refs/heads/main' && github.repository_owner == 'coder' - uses: chromaui/action@bc2d84ad2b60813a67d995c5582d696104a19383 # v13.3.2 + uses: chromaui/action@ac86f2ff0a458ffbce7b40698abd44c0fa34d4b6 # v13.3.3 env: NODE_OPTIONS: "--max_old_space_size=4096" STORYBOOK: true @@ -852,7 +862,7 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -923,7 +933,7 @@ jobs: if: always() steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -1043,7 +1053,7 @@ jobs: runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -1098,7 +1108,7 @@ jobs: IMAGE: ghcr.io/coder/coder-preview:${{ steps.build-docker.outputs.tag }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -1495,7 +1505,7 @@ jobs: if: needs.changes.outputs.db == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index 30d9e384149fa..e4211fc8973aa 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -36,7 +36,7 @@ jobs: verdict: ${{ steps.check.outputs.verdict }} # DEPLOY or NOOP steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -65,7 +65,7 @@ jobs: packages: write # to retag image as dogfood steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -92,7 +92,7 @@ jobs: uses: google-github-actions/setup-gcloud@aa5489c8933f4cc7a4f7d45035b3b1440c9c10db # v3.0.1 - name: Set up Flux CLI - uses: fluxcd/flux2/action@4a15fa6a023259353ef750acf1c98fe88407d4d0 # v2.7.2 + uses: fluxcd/flux2/action@b6e76ca2534f76dcb8dd94fb057cdfa923c3b641 # v2.7.3 with: # Keep this and the github action up to date with the version of flux installed in dogfood cluster version: "2.7.0" @@ -146,7 +146,7 @@ jobs: needs: deploy steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -163,12 +163,10 @@ jobs: run: | flyctl deploy --image "$IMAGE" --app paris-coder --config ./.github/fly-wsproxies/paris-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_PARIS" --yes flyctl deploy --image "$IMAGE" --app sydney-coder --config ./.github/fly-wsproxies/sydney-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SYDNEY" --yes - flyctl deploy --image "$IMAGE" --app sao-paulo-coder --config ./.github/fly-wsproxies/sao-paulo-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SAO_PAULO" --yes flyctl deploy --image "$IMAGE" --app jnb-coder --config ./.github/fly-wsproxies/jnb-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_JNB" --yes env: FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }} IMAGE: ${{ inputs.image }} TOKEN_PARIS: ${{ secrets.FLY_PARIS_CODER_PROXY_SESSION_TOKEN }} TOKEN_SYDNEY: ${{ secrets.FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN }} - TOKEN_SAO_PAULO: ${{ secrets.FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN }} TOKEN_JNB: ${{ secrets.FLY_JNB_CODER_PROXY_SESSION_TOKEN }} diff --git a/.github/workflows/doc-check.yaml b/.github/workflows/doc-check.yaml new file mode 100644 index 0000000000000..dde36f45e2080 --- /dev/null +++ b/.github/workflows/doc-check.yaml @@ -0,0 +1,205 @@ +# This workflow checks if a PR requires documentation updates. +# It creates a Coder Task that uses AI to analyze the PR changes, +# search existing docs, and comment with recommendations. +# +# Triggered by: Adding the "doc-check" label to a PR, or manual dispatch. + +name: AI Documentation Check + +on: + pull_request: + types: + - labeled + workflow_dispatch: + inputs: + pr_url: + description: "Pull Request URL to check" + required: true + type: string + template_preset: + description: "Template preset to use" + required: false + default: "" + type: string + +jobs: + doc-check: + name: Analyze PR for Documentation Updates Needed + runs-on: ubuntu-latest + if: | + (github.event.label.name == 'doc-check' || github.event_name == 'workflow_dispatch') && + (github.event.pull_request.draft == false || github.event_name == 'workflow_dispatch') + timeout-minutes: 30 + env: + CODER_URL: ${{ secrets.DOC_CHECK_CODER_URL }} + CODER_SESSION_TOKEN: ${{ secrets.DOC_CHECK_CODER_SESSION_TOKEN }} + permissions: + contents: read + pull-requests: write + actions: write + + steps: + - name: Determine PR Context + id: determine-context + env: + GITHUB_ACTOR: ${{ github.actor }} + GITHUB_EVENT_NAME: ${{ github.event_name }} + GITHUB_EVENT_PR_HTML_URL: ${{ github.event.pull_request.html_url }} + GITHUB_EVENT_PR_NUMBER: ${{ github.event.pull_request.number }} + GITHUB_EVENT_SENDER_ID: ${{ github.event.sender.id }} + GITHUB_EVENT_SENDER_LOGIN: ${{ github.event.sender.login }} + INPUTS_PR_URL: ${{ inputs.pr_url }} + INPUTS_TEMPLATE_PRESET: ${{ inputs.template_preset || '' }} + GH_TOKEN: ${{ github.token }} + run: | + echo "Using template preset: ${INPUTS_TEMPLATE_PRESET}" + echo "template_preset=${INPUTS_TEMPLATE_PRESET}" >> "${GITHUB_OUTPUT}" + + # For workflow_dispatch, use the provided PR URL + if [[ "${GITHUB_EVENT_NAME}" == "workflow_dispatch" ]]; then + if ! GITHUB_USER_ID=$(gh api "users/${GITHUB_ACTOR}" --jq '.id'); then + echo "::error::Failed to get GitHub user ID for actor ${GITHUB_ACTOR}" + exit 1 + fi + echo "Using workflow_dispatch actor: ${GITHUB_ACTOR} (ID: ${GITHUB_USER_ID})" + echo "github_user_id=${GITHUB_USER_ID}" >> "${GITHUB_OUTPUT}" + echo "github_username=${GITHUB_ACTOR}" >> "${GITHUB_OUTPUT}" + + echo "Using PR URL: ${INPUTS_PR_URL}" + # Convert /pull/ to /issues/ for create-task-action compatibility + ISSUE_URL="${INPUTS_PR_URL/\/pull\//\/issues\/}" + echo "pr_url=${ISSUE_URL}" >> "${GITHUB_OUTPUT}" + + # Extract PR number from URL for later use + PR_NUMBER=$(echo "${INPUTS_PR_URL}" | grep -oP '(?<=pull/)\d+') + echo "pr_number=${PR_NUMBER}" >> "${GITHUB_OUTPUT}" + + elif [[ "${GITHUB_EVENT_NAME}" == "pull_request" ]]; then + GITHUB_USER_ID=${GITHUB_EVENT_SENDER_ID} + echo "Using label adder: ${GITHUB_EVENT_SENDER_LOGIN} (ID: ${GITHUB_USER_ID})" + echo "github_user_id=${GITHUB_USER_ID}" >> "${GITHUB_OUTPUT}" + echo "github_username=${GITHUB_EVENT_SENDER_LOGIN}" >> "${GITHUB_OUTPUT}" + + echo "Using PR URL: ${GITHUB_EVENT_PR_HTML_URL}" + # Convert /pull/ to /issues/ for create-task-action compatibility + ISSUE_URL="${GITHUB_EVENT_PR_HTML_URL/\/pull\//\/issues\/}" + echo "pr_url=${ISSUE_URL}" >> "${GITHUB_OUTPUT}" + echo "pr_number=${GITHUB_EVENT_PR_NUMBER}" >> "${GITHUB_OUTPUT}" + + else + echo "::error::Unsupported event type: ${GITHUB_EVENT_NAME}" + exit 1 + fi + + - name: Extract changed files and build prompt + id: extract-context + env: + PR_URL: ${{ steps.determine-context.outputs.pr_url }} + PR_NUMBER: ${{ steps.determine-context.outputs.pr_number }} + GH_TOKEN: ${{ github.token }} + run: | + echo "Analyzing PR #${PR_NUMBER}" + + # Build task prompt - using unquoted heredoc so variables expand + TASK_PROMPT=$(cat <> "${GITHUB_OUTPUT}" + + - name: Checkout create-task-action + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 1 + path: ./.github/actions/create-task-action + persist-credentials: false + ref: main + repository: coder/create-task-action + + - name: Create Coder Task for Documentation Check + id: create_task + uses: ./.github/actions/create-task-action + with: + coder-url: ${{ secrets.DOC_CHECK_CODER_URL }} + coder-token: ${{ secrets.DOC_CHECK_CODER_SESSION_TOKEN }} + coder-organization: "default" + coder-template-name: coder + coder-template-preset: ${{ steps.determine-context.outputs.template_preset }} + coder-task-name-prefix: doc-check + coder-task-prompt: ${{ steps.extract-context.outputs.task_prompt }} + github-user-id: ${{ steps.determine-context.outputs.github_user_id }} + github-token: ${{ github.token }} + github-issue-url: ${{ steps.determine-context.outputs.pr_url }} + comment-on-issue: true + + - name: Write outputs + env: + TASK_CREATED: ${{ steps.create_task.outputs.task-created }} + TASK_NAME: ${{ steps.create_task.outputs.task-name }} + TASK_URL: ${{ steps.create_task.outputs.task-url }} + PR_URL: ${{ steps.determine-context.outputs.pr_url }} + run: | + { + echo "## Documentation Check Task" + echo "" + echo "**PR:** ${PR_URL}" + echo "**Task created:** ${TASK_CREATED}" + echo "**Task name:** ${TASK_NAME}" + echo "**Task URL:** ${TASK_URL}" + echo "" + echo "The Coder task is analyzing the PR changes and will comment with documentation recommendations." + } >> "${GITHUB_STEP_SUMMARY}" diff --git a/.github/workflows/docker-base.yaml b/.github/workflows/docker-base.yaml index 2998aae1b5a79..46a6998fce13c 100644 --- a/.github/workflows/docker-base.yaml +++ b/.github/workflows/docker-base.yaml @@ -38,7 +38,7 @@ jobs: if: github.repository_owner == 'coder' steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/docs-ci.yaml b/.github/workflows/docs-ci.yaml index 749bdce9b25c3..1d1c510c66dad 100644 --- a/.github/workflows/docs-ci.yaml +++ b/.github/workflows/docs-ci.yaml @@ -30,7 +30,7 @@ jobs: - name: Setup Node uses: ./.github/actions/setup-node - - uses: tj-actions/changed-files@dbf178ceecb9304128c8e0648591d71208c6e2c9 # v45.0.7 + - uses: tj-actions/changed-files@70069877f29101175ed2b055d210fe8b1d54d7d7 # v45.0.7 id: changed-files with: files: | diff --git a/.github/workflows/dogfood.yaml b/.github/workflows/dogfood.yaml index 2f47132ae43f0..752641dbf0616 100644 --- a/.github/workflows/dogfood.yaml +++ b/.github/workflows/dogfood.yaml @@ -26,7 +26,7 @@ jobs: runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-4' || 'ubuntu-latest' }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -40,7 +40,7 @@ jobs: with: # Pinning to 2.28 here, as Nix gets a "error: [json.exception.type_error.302] type must be array, but is string" # on version 2.29 and above. - nix_version: "2.28.4" + nix_version: "2.28.5" - uses: nix-community/cache-nix-action@135667ec418502fa5a3598af6fb9eb733888ce6a # v6.1.3 with: @@ -125,7 +125,7 @@ jobs: id-token: write steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml index f27e703800086..1cd13d79d84a4 100644 --- a/.github/workflows/nightly-gauntlet.yaml +++ b/.github/workflows/nightly-gauntlet.yaml @@ -27,7 +27,7 @@ jobs: - windows-2022 steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/pr-auto-assign.yaml b/.github/workflows/pr-auto-assign.yaml index 6a7370bcf78ef..6da81f35e1237 100644 --- a/.github/workflows/pr-auto-assign.yaml +++ b/.github/workflows/pr-auto-assign.yaml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/pr-cleanup.yaml b/.github/workflows/pr-cleanup.yaml index 22f2dd02c70e0..cfcd997377b0e 100644 --- a/.github/workflows/pr-cleanup.yaml +++ b/.github/workflows/pr-cleanup.yaml @@ -19,7 +19,7 @@ jobs: packages: write steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/pr-deploy.yaml b/.github/workflows/pr-deploy.yaml index eb0eb296923c3..d45e97cc9c41c 100644 --- a/.github/workflows/pr-deploy.yaml +++ b/.github/workflows/pr-deploy.yaml @@ -39,7 +39,7 @@ jobs: PR_OPEN: ${{ steps.check_pr.outputs.pr_open }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -76,7 +76,7 @@ jobs: runs-on: "ubuntu-latest" steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -184,7 +184,7 @@ jobs: pull-requests: write # needed for commenting on PRs steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -228,7 +228,7 @@ jobs: CODER_IMAGE_TAG: ${{ needs.get_info.outputs.CODER_IMAGE_TAG }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -288,7 +288,7 @@ jobs: PR_HOSTNAME: "pr${{ needs.get_info.outputs.PR_NUMBER }}.${{ secrets.PR_DEPLOYMENTS_DOMAIN }}" steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/release-validation.yaml b/.github/workflows/release-validation.yaml index 41ac3ee179f12..ada3297f81620 100644 --- a/.github/workflows/release-validation.yaml +++ b/.github/workflows/release-validation.yaml @@ -14,7 +14,7 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 7c06701836714..be1b56f07256c 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -164,7 +164,7 @@ jobs: version: ${{ steps.version.outputs.version }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -785,7 +785,7 @@ jobs: - name: Send repository-dispatch event if: ${{ !inputs.dry_run }} - uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0 + uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1 with: token: ${{ secrets.CDRCI_GITHUB_TOKEN }} repository: coder/packages @@ -802,7 +802,7 @@ jobs: # TODO: skip this if it's not a new release (i.e. a backport). This is # fine right now because it just makes a PR that we can close. - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -878,7 +878,7 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -971,7 +971,7 @@ jobs: if: ${{ !inputs.dry_run }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index c18b2d09a8233..392311ad7f7ef 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -20,7 +20,7 @@ jobs: steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -47,6 +47,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v3.29.5 + uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v3.29.5 with: sarif_file: results.sarif diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index 21452b0b89f6f..e40d828c51d5a 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -27,7 +27,7 @@ jobs: runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -40,7 +40,7 @@ jobs: uses: ./.github/actions/setup-go - name: Initialize CodeQL - uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v3.29.5 + uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v3.29.5 with: languages: go, javascript @@ -50,7 +50,7 @@ jobs: rm Makefile - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v3.29.5 + uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v3.29.5 - name: Send Slack notification on failure if: ${{ failure() }} @@ -69,7 +69,7 @@ jobs: runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -154,7 +154,7 @@ jobs: severity: "CRITICAL,HIGH" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v3.29.5 + uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v3.29.5 with: sarif_file: trivy-results.sarif category: "Trivy" diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index 75fb201bd5753..957152634d156 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -18,7 +18,7 @@ jobs: pull-requests: write steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -96,7 +96,7 @@ jobs: contents: write steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit @@ -120,12 +120,12 @@ jobs: actions: write steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit - name: Delete PR Cleanup workflow runs - uses: Mattraks/delete-workflow-runs@ab482449ba468316e9a8801e092d0405715c5e6d # v2.1.0 + uses: Mattraks/delete-workflow-runs@5bf9a1dac5c4d041c029f0a8370ddf0c5cb5aeb7 # v2.1.0 with: token: ${{ github.token }} repository: ${{ github.repository }} @@ -134,7 +134,7 @@ jobs: delete_workflow_pattern: pr-cleanup.yaml - name: Delete PR Deploy workflow skipped runs - uses: Mattraks/delete-workflow-runs@ab482449ba468316e9a8801e092d0405715c5e6d # v2.1.0 + uses: Mattraks/delete-workflow-runs@5bf9a1dac5c4d041c029f0a8370ddf0c5cb5aeb7 # v2.1.0 with: token: ${{ github.token }} repository: ${{ github.repository }} diff --git a/.github/workflows/traiage.yaml b/.github/workflows/traiage.yaml index 8560af091d348..ec08069538161 100644 --- a/.github/workflows/traiage.yaml +++ b/.github/workflows/traiage.yaml @@ -17,8 +17,8 @@ on: type: string template_preset: description: "Template preset to use" - required: true - default: "none" + required: false + default: "" type: string prefix: description: "Prefix for workspace name" @@ -67,7 +67,7 @@ jobs: GITHUB_EVENT_USER_LOGIN: ${{ github.event.sender.login }} INPUTS_ISSUE_URL: ${{ inputs.issue_url }} INPUTS_TEMPLATE_NAME: ${{ inputs.template_name || 'coder' }} - INPUTS_TEMPLATE_PRESET: ${{ inputs.template_preset || 'none'}} + INPUTS_TEMPLATE_PRESET: ${{ inputs.template_preset || ''}} INPUTS_PREFIX: ${{ inputs.prefix || 'traiage' }} GH_TOKEN: ${{ github.token }} run: | @@ -124,7 +124,7 @@ jobs: exit 1 fi - - name: Extract context key from issue + - name: Extract context key and description from issue id: extract-context env: ISSUE_URL: ${{ steps.determine-inputs.outputs.issue_url }} @@ -132,86 +132,59 @@ jobs: run: | issue_number="$(gh issue view "${ISSUE_URL}" --json number --jq '.number')" context_key="gh-${issue_number}" - echo "context_key=${context_key}" >> "${GITHUB_OUTPUT}" - echo "CONTEXT_KEY=${context_key}" >> "${GITHUB_ENV}" - - name: Download and install Coder binary - shell: bash - env: - CODER_URL: ${{ secrets.TRAIAGE_CODER_URL }} - run: | - if [ "${{ runner.arch }}" == "ARM64" ]; then - ARCH="arm64" - else - ARCH="amd64" - fi - mkdir -p "${HOME}/.local/bin" - curl -fsSL --compressed "$CODER_URL/bin/coder-linux-${ARCH}" -o "${HOME}/.local/bin/coder" - chmod +x "${HOME}/.local/bin/coder" - export PATH="$HOME/.local/bin:$PATH" - coder version - coder whoami - echo "$HOME/.local/bin" >> "${GITHUB_PATH}" - - - name: Get Coder username from GitHub actor - id: get-coder-username - env: - CODER_SESSION_TOKEN: ${{ secrets.TRAIAGE_CODER_SESSION_TOKEN }} - GH_TOKEN: ${{ github.token }} - GITHUB_USER_ID: ${{ steps.determine-inputs.outputs.github_user_id }} - run: | - user_json=$( - coder users list --github-user-id="${GITHUB_USER_ID}" --output=json + TASK_PROMPT=$(cat <> "${GITHUB_OUTPUT}" + + echo "context_key=${context_key}" >> "${GITHUB_OUTPUT}" + { + echo "TASK_PROMPT<> "${GITHUB_OUTPUT}" - name: Checkout repository uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: + fetch-depth: 1 + path: ./.github/actions/create-task-action persist-credentials: false - fetch-depth: 0 + ref: main + repository: coder/create-task-action - # TODO(Cian): this is a good use-case for 'recipes' - - name: Create Coder task - id: create-task + - name: Create Coder Task + id: create_task + uses: ./.github/actions/create-task-action + with: + coder-url: ${{ secrets.TRAIAGE_CODER_URL }} + coder-token: ${{ secrets.TRAIAGE_CODER_SESSION_TOKEN }} + coder-organization: "default" + coder-template-name: coder + coder-template-preset: ${{ steps.determine-inputs.outputs.template_preset }} + coder-task-name-prefix: gh-coder + coder-task-prompt: ${{ steps.extract-context.outputs.task_prompt }} + github-user-id: ${{ steps.determine-inputs.outputs.github_user_id }} + github-token: ${{ github.token }} + github-issue-url: ${{ steps.determine-inputs.outputs.issue_url }} + comment-on-issue: ${{ startsWith(steps.determine-inputs.outputs.issue_url, format('{0}/{1}', github.server_url, github.repository)) }} + + - name: Write outputs env: - CODER_USERNAME: ${{ steps.get-coder-username.outputs.coder_username }} - CONTEXT_KEY: ${{ steps.extract-context.outputs.context_key }} - GH_TOKEN: ${{ github.token }} - GITHUB_REPOSITORY: ${{ github.repository }} - ISSUE_URL: ${{ steps.determine-inputs.outputs.issue_url }} - PREFIX: ${{ steps.determine-inputs.outputs.prefix }} - RUN_ID: ${{ github.run_id }} - TEMPLATE_NAME: ${{ steps.determine-inputs.outputs.template_name }} - TEMPLATE_PARAMETERS: ${{ secrets.TRAIAGE_TEMPLATE_PARAMETERS }} - TEMPLATE_PRESET: ${{ steps.determine-inputs.outputs.template_preset }} + TASK_CREATED: ${{ steps.create_task.outputs.task-created }} + TASK_NAME: ${{ steps.create_task.outputs.task-name }} + TASK_URL: ${{ steps.create_task.outputs.task-url }} run: | - # Fetch issue description using `gh` CLI - #shellcheck disable=SC2016 # The template string should not be subject to shell expansion - issue_description=$(gh issue view "${ISSUE_URL}" \ - --json 'title,body,comments' \ - --template '{{printf "%s\n\n%s\n\nComments:\n" .title .body}}{{range $k, $v := .comments}} - {{index $v.author "login"}}: {{printf "%s\n" $v.body}}{{end}}') - - # Write a prompt to PROMPT_FILE - PROMPT=$(cat <> "${GITHUB_OUTPUT}" - echo "TASK_NAME=${CODER_USERNAME}/${TASK_NAME}" >> "${GITHUB_ENV}" + { + echo "**Task created:** ${TASK_CREATED}" + echo "**Task name:** ${TASK_NAME}" + echo "**Task URL**: ${TASK_URL}" + } >> "${GITHUB_STEP_SUMMARY}" diff --git a/.github/workflows/typos.toml b/.github/workflows/typos.toml index 8b3f77c1ef566..9008a998a9001 100644 --- a/.github/workflows/typos.toml +++ b/.github/workflows/typos.toml @@ -9,6 +9,7 @@ IST = "IST" MacOS = "macOS" AKS = "AKS" O_WRONLY = "O_WRONLY" +AIBridge = "AI Bridge" [default.extend-words] AKS = "AKS" diff --git a/.github/workflows/weekly-docs.yaml b/.github/workflows/weekly-docs.yaml index a7ae448902d0c..511915814d64b 100644 --- a/.github/workflows/weekly-docs.yaml +++ b/.github/workflows/weekly-docs.yaml @@ -21,7 +21,7 @@ jobs: pull-requests: write # required to post PR review comments by the action steps: - name: Harden Runner - uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 + uses: step-security/harden-runner@95d9a5deda9de15063e7595e9719c11c38c90ae2 # v2.13.2 with: egress-policy: audit diff --git a/.gitignore b/.gitignore index 9b1edcec2d8f9..ffe941080f9de 100644 --- a/.gitignore +++ b/.gitignore @@ -91,3 +91,6 @@ __debug_bin* **/.claude/settings.local.json /.env + +# Ignore plans written by AI agents. +PLAN.md diff --git a/CODEOWNERS b/CODEOWNERS index a3889d27bf16d..b62ecfc96238a 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -27,3 +27,5 @@ coderd/schedule/autostop.go @deansheather @DanielleMaywood # well as guidance from revenue. coderd/usage/ @deansheather @spikecurtis enterprise/coderd/usage/ @deansheather @spikecurtis + +.github/ @jdomeracki-coder diff --git a/Makefile b/Makefile index 7f21f1fa6da04..4997430f9dd1b 100644 --- a/Makefile +++ b/Makefile @@ -636,16 +636,17 @@ TAILNETTEST_MOCKS := \ tailnet/tailnettest/subscriptionmock.go AIBRIDGED_MOCKS := \ - enterprise/x/aibridged/aibridgedmock/clientmock.go \ - enterprise/x/aibridged/aibridgedmock/poolmock.go + enterprise/aibridged/aibridgedmock/clientmock.go \ + enterprise/aibridged/aibridgedmock/poolmock.go GEN_FILES := \ tailnet/proto/tailnet.pb.go \ agent/proto/agent.pb.go \ + agent/agentsocket/proto/agentsocket.pb.go \ provisionersdk/proto/provisioner.pb.go \ provisionerd/proto/provisionerd.pb.go \ vpn/vpn.pb.go \ - enterprise/x/aibridged/proto/aibridged.pb.go \ + enterprise/aibridged/proto/aibridged.pb.go \ $(DB_GEN_FILES) \ $(SITE_GEN_FILES) \ coderd/rbac/object_gen.go \ @@ -696,8 +697,9 @@ gen/mark-fresh: agent/proto/agent.pb.go \ provisionersdk/proto/provisioner.pb.go \ provisionerd/proto/provisionerd.pb.go \ + agent/agentsocket/proto/agentsocket.pb.go \ vpn/vpn.pb.go \ - enterprise/x/aibridged/proto/aibridged.pb.go \ + enterprise/aibridged/proto/aibridged.pb.go \ coderd/database/dump.sql \ $(DB_GEN_FILES) \ site/src/api/typesGenerated.ts \ @@ -768,8 +770,8 @@ codersdk/workspacesdk/agentconnmock/agentconnmock.go: codersdk/workspacesdk/agen go generate ./codersdk/workspacesdk/agentconnmock/ touch "$@" -$(AIBRIDGED_MOCKS): enterprise/x/aibridged/client.go enterprise/x/aibridged/pool.go - go generate ./enterprise/x/aibridged/aibridgedmock/ +$(AIBRIDGED_MOCKS): enterprise/aibridged/client.go enterprise/aibridged/pool.go + go generate ./enterprise/aibridged/aibridgedmock/ touch "$@" agent/agentcontainers/dcspec/dcspec_gen.go: \ @@ -800,6 +802,14 @@ agent/proto/agent.pb.go: agent/proto/agent.proto --go-drpc_opt=paths=source_relative \ ./agent/proto/agent.proto +agent/agentsocket/proto/agentsocket.pb.go: agent/agentsocket/proto/agentsocket.proto + protoc \ + --go_out=. \ + --go_opt=paths=source_relative \ + --go-drpc_out=. \ + --go-drpc_opt=paths=source_relative \ + ./agent/agentsocket/proto/agentsocket.proto + provisionersdk/proto/provisioner.pb.go: provisionersdk/proto/provisioner.proto protoc \ --go_out=. \ @@ -822,13 +832,13 @@ vpn/vpn.pb.go: vpn/vpn.proto --go_opt=paths=source_relative \ ./vpn/vpn.proto -enterprise/x/aibridged/proto/aibridged.pb.go: enterprise/x/aibridged/proto/aibridged.proto +enterprise/aibridged/proto/aibridged.pb.go: enterprise/aibridged/proto/aibridged.proto protoc \ --go_out=. \ --go_opt=paths=source_relative \ --go-drpc_out=. \ --go-drpc_opt=paths=source_relative \ - ./enterprise/x/aibridged/proto/aibridged.proto + ./enterprise/aibridged/proto/aibridged.proto site/src/api/typesGenerated.ts: site/node_modules/.installed $(wildcard scripts/apitypings/*) $(shell find ./codersdk $(FIND_EXCLUSIONS) -type f -name '*.go') # -C sets the directory for the go run command diff --git a/agent/agent.go b/agent/agent.go index ab882a80efa4a..0a5459ddc0e28 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -8,6 +8,7 @@ import ( "fmt" "hash/fnv" "io" + "maps" "net" "net/http" "net/netip" @@ -40,6 +41,7 @@ import ( "github.com/coder/coder/v2/agent/agentcontainers" "github.com/coder/coder/v2/agent/agentexec" "github.com/coder/coder/v2/agent/agentscripts" + "github.com/coder/coder/v2/agent/agentsocket" "github.com/coder/coder/v2/agent/agentssh" "github.com/coder/coder/v2/agent/proto" "github.com/coder/coder/v2/agent/proto/resourcesmonitor" @@ -70,16 +72,21 @@ const ( ) type Options struct { - Filesystem afero.Fs - LogDir string - TempDir string - ScriptDataDir string - Client Client - ReconnectingPTYTimeout time.Duration - EnvironmentVariables map[string]string - Logger slog.Logger - IgnorePorts map[int]string - PortCacheDuration time.Duration + Filesystem afero.Fs + LogDir string + TempDir string + ScriptDataDir string + Client Client + ReconnectingPTYTimeout time.Duration + EnvironmentVariables map[string]string + Logger slog.Logger + // IgnorePorts tells the api handler which ports to ignore when + // listing all listening ports. This is helpful to hide ports that + // are used by the agent, that the user does not care about. + IgnorePorts map[int]string + // ListeningPortsGetter is used to get the list of listening ports. Only + // tests should set this. If unset, a default that queries the OS will be used. + ListeningPortsGetter ListeningPortsGetter SSHMaxTimeout time.Duration TailnetListenPort uint16 Subsystems []codersdk.AgentSubsystem @@ -91,6 +98,8 @@ type Options struct { Devcontainers bool DevcontainerAPIOptions []agentcontainers.Option // Enable Devcontainers for these to be effective. Clock quartz.Clock + SocketServerEnabled bool + SocketPath string // Path for the agent socket server socket } type Client interface { @@ -137,9 +146,7 @@ func New(options Options) Agent { if options.ServiceBannerRefreshInterval == 0 { options.ServiceBannerRefreshInterval = 2 * time.Minute } - if options.PortCacheDuration == 0 { - options.PortCacheDuration = 1 * time.Second - } + if options.Clock == nil { options.Clock = quartz.NewReal() } @@ -153,30 +160,38 @@ func New(options Options) Agent { options.Execer = agentexec.DefaultExecer } + if options.ListeningPortsGetter == nil { + options.ListeningPortsGetter = &osListeningPortsGetter{ + cacheDuration: 1 * time.Second, + } + } + hardCtx, hardCancel := context.WithCancel(context.Background()) gracefulCtx, gracefulCancel := context.WithCancel(hardCtx) a := &agent{ - clock: options.Clock, - tailnetListenPort: options.TailnetListenPort, - reconnectingPTYTimeout: options.ReconnectingPTYTimeout, - logger: options.Logger, - gracefulCtx: gracefulCtx, - gracefulCancel: gracefulCancel, - hardCtx: hardCtx, - hardCancel: hardCancel, - coordDisconnected: make(chan struct{}), - environmentVariables: options.EnvironmentVariables, - client: options.Client, - filesystem: options.Filesystem, - logDir: options.LogDir, - tempDir: options.TempDir, - scriptDataDir: options.ScriptDataDir, - lifecycleUpdate: make(chan struct{}, 1), - lifecycleReported: make(chan codersdk.WorkspaceAgentLifecycle, 1), - lifecycleStates: []agentsdk.PostLifecycleRequest{{State: codersdk.WorkspaceAgentLifecycleCreated}}, - reportConnectionsUpdate: make(chan struct{}, 1), - ignorePorts: options.IgnorePorts, - portCacheDuration: options.PortCacheDuration, + clock: options.Clock, + tailnetListenPort: options.TailnetListenPort, + reconnectingPTYTimeout: options.ReconnectingPTYTimeout, + logger: options.Logger, + gracefulCtx: gracefulCtx, + gracefulCancel: gracefulCancel, + hardCtx: hardCtx, + hardCancel: hardCancel, + coordDisconnected: make(chan struct{}), + environmentVariables: options.EnvironmentVariables, + client: options.Client, + filesystem: options.Filesystem, + logDir: options.LogDir, + tempDir: options.TempDir, + scriptDataDir: options.ScriptDataDir, + lifecycleUpdate: make(chan struct{}, 1), + lifecycleReported: make(chan codersdk.WorkspaceAgentLifecycle, 1), + lifecycleStates: []agentsdk.PostLifecycleRequest{{State: codersdk.WorkspaceAgentLifecycleCreated}}, + reportConnectionsUpdate: make(chan struct{}, 1), + listeningPortsHandler: listeningPortsHandler{ + getter: options.ListeningPortsGetter, + ignorePorts: maps.Clone(options.IgnorePorts), + }, reportMetadataInterval: options.ReportMetadataInterval, announcementBannersRefreshInterval: options.ServiceBannerRefreshInterval, sshMaxTimeout: options.SSHMaxTimeout, @@ -190,6 +205,8 @@ func New(options Options) Agent { devcontainers: options.Devcontainers, containerAPIOptions: options.DevcontainerAPIOptions, + socketPath: options.SocketPath, + socketServerEnabled: options.SocketServerEnabled, } // Initially, we have a closed channel, reflecting the fact that we are not initially connected. // Each time we connect we replace the channel (while holding the closeMutex) with a new one @@ -202,20 +219,16 @@ func New(options Options) Agent { } type agent struct { - clock quartz.Clock - logger slog.Logger - client Client - tailnetListenPort uint16 - filesystem afero.Fs - logDir string - tempDir string - scriptDataDir string - // ignorePorts tells the api handler which ports to ignore when - // listing all listening ports. This is helpful to hide ports that - // are used by the agent, that the user does not care about. - ignorePorts map[int]string - portCacheDuration time.Duration - subsystems []codersdk.AgentSubsystem + clock quartz.Clock + logger slog.Logger + client Client + tailnetListenPort uint16 + filesystem afero.Fs + logDir string + tempDir string + scriptDataDir string + listeningPortsHandler listeningPortsHandler + subsystems []codersdk.AgentSubsystem reconnectingPTYTimeout time.Duration reconnectingPTYServer *reconnectingpty.Server @@ -271,6 +284,10 @@ type agent struct { devcontainers bool containerAPIOptions []agentcontainers.Option containerAPI *agentcontainers.API + + socketServerEnabled bool + socketPath string + socketServer *agentsocket.Server } func (a *agent) TailnetConn() *tailnet.Conn { @@ -350,9 +367,32 @@ func (a *agent) init() { s.ExperimentalContainers = a.devcontainers }, ) + + a.initSocketServer() + go a.runLoop() } +// initSocketServer initializes server that allows direct communication with a workspace agent using IPC. +func (a *agent) initSocketServer() { + if !a.socketServerEnabled { + a.logger.Info(a.hardCtx, "socket server is disabled") + return + } + + server, err := agentsocket.NewServer( + a.logger.Named("socket"), + agentsocket.WithPath(a.socketPath), + ) + if err != nil { + a.logger.Warn(a.hardCtx, "failed to create socket server", slog.Error(err), slog.F("path", a.socketPath)) + return + } + + a.socketServer = server + a.logger.Debug(a.hardCtx, "socket server started", slog.F("path", a.socketPath)) +} + // runLoop attempts to start the agent in a retry loop. // Coder may be offline temporarily, a connection issue // may be happening, but regardless after the intermittent @@ -1087,7 +1127,7 @@ func (a *agent) handleManifest(manifestOK *checkpoint) func(ctx context.Context, if err != nil { return xerrors.Errorf("fetch metadata: %w", err) } - a.logger.Info(ctx, "fetched manifest", slog.F("manifest", mp)) + a.logger.Info(ctx, "fetched manifest") manifest, err := agentsdk.ManifestFromProto(mp) if err != nil { a.logger.Critical(ctx, "failed to convert manifest", slog.F("manifest", mp), slog.Error(err)) @@ -1920,6 +1960,7 @@ func (a *agent) Close() error { lifecycleState = codersdk.WorkspaceAgentLifecycleShutdownError } } + a.setLifecycle(lifecycleState) err = a.scriptRunner.Close() @@ -1927,6 +1968,12 @@ func (a *agent) Close() error { a.logger.Error(a.hardCtx, "script runner close", slog.Error(err)) } + if a.socketServer != nil { + if err := a.socketServer.Close(); err != nil { + a.logger.Error(a.hardCtx, "socket server close", slog.Error(err)) + } + } + if err := a.containerAPI.Close(); err != nil { a.logger.Error(a.hardCtx, "container API close", slog.Error(err)) } diff --git a/agent/agentsocket/client.go b/agent/agentsocket/client.go new file mode 100644 index 0000000000000..cc8810c9871e5 --- /dev/null +++ b/agent/agentsocket/client.go @@ -0,0 +1,146 @@ +package agentsocket + +import ( + "context" + + "golang.org/x/xerrors" + "storj.io/drpc" + "storj.io/drpc/drpcconn" + + "github.com/coder/coder/v2/agent/agentsocket/proto" + "github.com/coder/coder/v2/agent/unit" +) + +// Option represents a configuration option for NewClient. +type Option func(*options) + +type options struct { + path string +} + +// WithPath sets the socket path. If not provided or empty, the client will +// auto-discover the default socket path. +func WithPath(path string) Option { + return func(opts *options) { + if path == "" { + return + } + opts.path = path + } +} + +// Client provides a client for communicating with the workspace agentsocket API. +type Client struct { + client proto.DRPCAgentSocketClient + conn drpc.Conn +} + +// NewClient creates a new socket client and opens a connection to the socket. +// If path is not provided via WithPath or is empty, it will auto-discover the +// default socket path. +func NewClient(ctx context.Context, opts ...Option) (*Client, error) { + options := &options{} + for _, opt := range opts { + opt(options) + } + + conn, err := dialSocket(ctx, options.path) + if err != nil { + return nil, xerrors.Errorf("connect to socket: %w", err) + } + + drpcConn := drpcconn.New(conn) + client := proto.NewDRPCAgentSocketClient(drpcConn) + + return &Client{ + client: client, + conn: drpcConn, + }, nil +} + +// Close closes the socket connection. +func (c *Client) Close() error { + return c.conn.Close() +} + +// Ping sends a ping request to the agent. +func (c *Client) Ping(ctx context.Context) error { + _, err := c.client.Ping(ctx, &proto.PingRequest{}) + return err +} + +// SyncStart starts a unit in the dependency graph. +func (c *Client) SyncStart(ctx context.Context, unitName unit.ID) error { + _, err := c.client.SyncStart(ctx, &proto.SyncStartRequest{ + Unit: string(unitName), + }) + return err +} + +// SyncWant declares a dependency between units. +func (c *Client) SyncWant(ctx context.Context, unitName, dependsOn unit.ID) error { + _, err := c.client.SyncWant(ctx, &proto.SyncWantRequest{ + Unit: string(unitName), + DependsOn: string(dependsOn), + }) + return err +} + +// SyncComplete marks a unit as complete in the dependency graph. +func (c *Client) SyncComplete(ctx context.Context, unitName unit.ID) error { + _, err := c.client.SyncComplete(ctx, &proto.SyncCompleteRequest{ + Unit: string(unitName), + }) + return err +} + +// SyncReady requests whether a unit is ready to be started. That is, all dependencies are satisfied. +func (c *Client) SyncReady(ctx context.Context, unitName unit.ID) (bool, error) { + resp, err := c.client.SyncReady(ctx, &proto.SyncReadyRequest{ + Unit: string(unitName), + }) + return resp.Ready, err +} + +// SyncStatus gets the status of a unit and its dependencies. +func (c *Client) SyncStatus(ctx context.Context, unitName unit.ID) (SyncStatusResponse, error) { + resp, err := c.client.SyncStatus(ctx, &proto.SyncStatusRequest{ + Unit: string(unitName), + }) + if err != nil { + return SyncStatusResponse{}, err + } + + var dependencies []DependencyInfo + for _, dep := range resp.Dependencies { + dependencies = append(dependencies, DependencyInfo{ + DependsOn: unit.ID(dep.DependsOn), + RequiredStatus: unit.Status(dep.RequiredStatus), + CurrentStatus: unit.Status(dep.CurrentStatus), + IsSatisfied: dep.IsSatisfied, + }) + } + + return SyncStatusResponse{ + UnitName: unitName, + Status: unit.Status(resp.Status), + IsReady: resp.IsReady, + Dependencies: dependencies, + }, nil +} + +// SyncStatusResponse contains the status information for a unit. +type SyncStatusResponse struct { + UnitName unit.ID `table:"unit,default_sort" json:"unit_name"` + Status unit.Status `table:"status" json:"status"` + IsReady bool `table:"ready" json:"is_ready"` + Dependencies []DependencyInfo `table:"dependencies" json:"dependencies"` +} + +// DependencyInfo contains information about a unit dependency. +type DependencyInfo struct { + DependsOn unit.ID `table:"depends on,default_sort" json:"depends_on"` + RequiredStatus unit.Status `table:"required status" json:"required_status"` + CurrentStatus unit.Status `table:"current status" json:"current_status"` + IsSatisfied bool `table:"satisfied" json:"is_satisfied"` +} diff --git a/agent/agentsocket/proto/agentsocket.pb.go b/agent/agentsocket/proto/agentsocket.pb.go new file mode 100644 index 0000000000000..b2b1d922a8045 --- /dev/null +++ b/agent/agentsocket/proto/agentsocket.pb.go @@ -0,0 +1,968 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.30.0 +// protoc v4.23.4 +// source: agent/agentsocket/proto/agentsocket.proto + +package proto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type PingRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *PingRequest) Reset() { + *x = PingRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PingRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PingRequest) ProtoMessage() {} + +func (x *PingRequest) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PingRequest.ProtoReflect.Descriptor instead. +func (*PingRequest) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{0} +} + +type PingResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *PingResponse) Reset() { + *x = PingResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PingResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PingResponse) ProtoMessage() {} + +func (x *PingResponse) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PingResponse.ProtoReflect.Descriptor instead. +func (*PingResponse) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{1} +} + +type SyncStartRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Unit string `protobuf:"bytes,1,opt,name=unit,proto3" json:"unit,omitempty"` +} + +func (x *SyncStartRequest) Reset() { + *x = SyncStartRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncStartRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncStartRequest) ProtoMessage() {} + +func (x *SyncStartRequest) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncStartRequest.ProtoReflect.Descriptor instead. +func (*SyncStartRequest) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{2} +} + +func (x *SyncStartRequest) GetUnit() string { + if x != nil { + return x.Unit + } + return "" +} + +type SyncStartResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SyncStartResponse) Reset() { + *x = SyncStartResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncStartResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncStartResponse) ProtoMessage() {} + +func (x *SyncStartResponse) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncStartResponse.ProtoReflect.Descriptor instead. +func (*SyncStartResponse) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{3} +} + +type SyncWantRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Unit string `protobuf:"bytes,1,opt,name=unit,proto3" json:"unit,omitempty"` + DependsOn string `protobuf:"bytes,2,opt,name=depends_on,json=dependsOn,proto3" json:"depends_on,omitempty"` +} + +func (x *SyncWantRequest) Reset() { + *x = SyncWantRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncWantRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncWantRequest) ProtoMessage() {} + +func (x *SyncWantRequest) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncWantRequest.ProtoReflect.Descriptor instead. +func (*SyncWantRequest) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{4} +} + +func (x *SyncWantRequest) GetUnit() string { + if x != nil { + return x.Unit + } + return "" +} + +func (x *SyncWantRequest) GetDependsOn() string { + if x != nil { + return x.DependsOn + } + return "" +} + +type SyncWantResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SyncWantResponse) Reset() { + *x = SyncWantResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncWantResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncWantResponse) ProtoMessage() {} + +func (x *SyncWantResponse) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncWantResponse.ProtoReflect.Descriptor instead. +func (*SyncWantResponse) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{5} +} + +type SyncCompleteRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Unit string `protobuf:"bytes,1,opt,name=unit,proto3" json:"unit,omitempty"` +} + +func (x *SyncCompleteRequest) Reset() { + *x = SyncCompleteRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncCompleteRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncCompleteRequest) ProtoMessage() {} + +func (x *SyncCompleteRequest) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncCompleteRequest.ProtoReflect.Descriptor instead. +func (*SyncCompleteRequest) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{6} +} + +func (x *SyncCompleteRequest) GetUnit() string { + if x != nil { + return x.Unit + } + return "" +} + +type SyncCompleteResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SyncCompleteResponse) Reset() { + *x = SyncCompleteResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncCompleteResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncCompleteResponse) ProtoMessage() {} + +func (x *SyncCompleteResponse) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncCompleteResponse.ProtoReflect.Descriptor instead. +func (*SyncCompleteResponse) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{7} +} + +type SyncReadyRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Unit string `protobuf:"bytes,1,opt,name=unit,proto3" json:"unit,omitempty"` +} + +func (x *SyncReadyRequest) Reset() { + *x = SyncReadyRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncReadyRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncReadyRequest) ProtoMessage() {} + +func (x *SyncReadyRequest) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncReadyRequest.ProtoReflect.Descriptor instead. +func (*SyncReadyRequest) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{8} +} + +func (x *SyncReadyRequest) GetUnit() string { + if x != nil { + return x.Unit + } + return "" +} + +type SyncReadyResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Ready bool `protobuf:"varint,1,opt,name=ready,proto3" json:"ready,omitempty"` +} + +func (x *SyncReadyResponse) Reset() { + *x = SyncReadyResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncReadyResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncReadyResponse) ProtoMessage() {} + +func (x *SyncReadyResponse) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncReadyResponse.ProtoReflect.Descriptor instead. +func (*SyncReadyResponse) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{9} +} + +func (x *SyncReadyResponse) GetReady() bool { + if x != nil { + return x.Ready + } + return false +} + +type SyncStatusRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Unit string `protobuf:"bytes,1,opt,name=unit,proto3" json:"unit,omitempty"` +} + +func (x *SyncStatusRequest) Reset() { + *x = SyncStatusRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncStatusRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncStatusRequest) ProtoMessage() {} + +func (x *SyncStatusRequest) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncStatusRequest.ProtoReflect.Descriptor instead. +func (*SyncStatusRequest) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{10} +} + +func (x *SyncStatusRequest) GetUnit() string { + if x != nil { + return x.Unit + } + return "" +} + +type DependencyInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Unit string `protobuf:"bytes,1,opt,name=unit,proto3" json:"unit,omitempty"` + DependsOn string `protobuf:"bytes,2,opt,name=depends_on,json=dependsOn,proto3" json:"depends_on,omitempty"` + RequiredStatus string `protobuf:"bytes,3,opt,name=required_status,json=requiredStatus,proto3" json:"required_status,omitempty"` + CurrentStatus string `protobuf:"bytes,4,opt,name=current_status,json=currentStatus,proto3" json:"current_status,omitempty"` + IsSatisfied bool `protobuf:"varint,5,opt,name=is_satisfied,json=isSatisfied,proto3" json:"is_satisfied,omitempty"` +} + +func (x *DependencyInfo) Reset() { + *x = DependencyInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DependencyInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DependencyInfo) ProtoMessage() {} + +func (x *DependencyInfo) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DependencyInfo.ProtoReflect.Descriptor instead. +func (*DependencyInfo) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{11} +} + +func (x *DependencyInfo) GetUnit() string { + if x != nil { + return x.Unit + } + return "" +} + +func (x *DependencyInfo) GetDependsOn() string { + if x != nil { + return x.DependsOn + } + return "" +} + +func (x *DependencyInfo) GetRequiredStatus() string { + if x != nil { + return x.RequiredStatus + } + return "" +} + +func (x *DependencyInfo) GetCurrentStatus() string { + if x != nil { + return x.CurrentStatus + } + return "" +} + +func (x *DependencyInfo) GetIsSatisfied() bool { + if x != nil { + return x.IsSatisfied + } + return false +} + +type SyncStatusResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Status string `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` + IsReady bool `protobuf:"varint,2,opt,name=is_ready,json=isReady,proto3" json:"is_ready,omitempty"` + Dependencies []*DependencyInfo `protobuf:"bytes,3,rep,name=dependencies,proto3" json:"dependencies,omitempty"` +} + +func (x *SyncStatusResponse) Reset() { + *x = SyncStatusResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SyncStatusResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SyncStatusResponse) ProtoMessage() {} + +func (x *SyncStatusResponse) ProtoReflect() protoreflect.Message { + mi := &file_agent_agentsocket_proto_agentsocket_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SyncStatusResponse.ProtoReflect.Descriptor instead. +func (*SyncStatusResponse) Descriptor() ([]byte, []int) { + return file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP(), []int{12} +} + +func (x *SyncStatusResponse) GetStatus() string { + if x != nil { + return x.Status + } + return "" +} + +func (x *SyncStatusResponse) GetIsReady() bool { + if x != nil { + return x.IsReady + } + return false +} + +func (x *SyncStatusResponse) GetDependencies() []*DependencyInfo { + if x != nil { + return x.Dependencies + } + return nil +} + +var File_agent_agentsocket_proto_agentsocket_proto protoreflect.FileDescriptor + +var file_agent_agentsocket_proto_agentsocket_proto_rawDesc = []byte{ + 0x0a, 0x29, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, + 0x6b, 0x65, 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, + 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x14, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x76, + 0x31, 0x22, 0x0d, 0x0a, 0x0b, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x26, 0x0a, 0x10, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x53, 0x79, 0x6e, 0x63, + 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x44, 0x0a, + 0x0f, 0x53, 0x79, 0x6e, 0x63, 0x57, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x12, 0x0a, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x75, 0x6e, 0x69, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0x5f, + 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, + 0x73, 0x4f, 0x6e, 0x22, 0x12, 0x0a, 0x10, 0x53, 0x79, 0x6e, 0x63, 0x57, 0x61, 0x6e, 0x74, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x29, 0x0a, 0x13, 0x53, 0x79, 0x6e, 0x63, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, + 0x0a, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x6e, + 0x69, 0x74, 0x22, 0x16, 0x0a, 0x14, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x26, 0x0a, 0x10, 0x53, 0x79, + 0x6e, 0x63, 0x52, 0x65, 0x61, 0x64, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, + 0x0a, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x6e, + 0x69, 0x74, 0x22, 0x29, 0x0a, 0x11, 0x53, 0x79, 0x6e, 0x63, 0x52, 0x65, 0x61, 0x64, 0x79, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x61, 0x64, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x72, 0x65, 0x61, 0x64, 0x79, 0x22, 0x27, 0x0a, + 0x11, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x22, 0xb6, 0x01, 0x0a, 0x0e, 0x44, 0x65, 0x70, 0x65, 0x6e, + 0x64, 0x65, 0x6e, 0x63, 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x6e, 0x69, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x12, 0x1d, 0x0a, + 0x0a, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0x5f, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x09, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x73, 0x4f, 0x6e, 0x12, 0x27, 0x0a, 0x0f, + 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x53, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, + 0x5f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x63, + 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x21, 0x0a, 0x0c, + 0x69, 0x73, 0x5f, 0x73, 0x61, 0x74, 0x69, 0x73, 0x66, 0x69, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x0b, 0x69, 0x73, 0x53, 0x61, 0x74, 0x69, 0x73, 0x66, 0x69, 0x65, 0x64, 0x22, + 0x91, 0x01, 0x0a, 0x12, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x19, + 0x0a, 0x08, 0x69, 0x73, 0x5f, 0x72, 0x65, 0x61, 0x64, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x07, 0x69, 0x73, 0x52, 0x65, 0x61, 0x64, 0x79, 0x12, 0x48, 0x0a, 0x0c, 0x64, 0x65, 0x70, + 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x24, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, + 0x6b, 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, + 0x79, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x0c, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, + 0x69, 0x65, 0x73, 0x32, 0xbb, 0x04, 0x0a, 0x0b, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x53, 0x6f, 0x63, + 0x6b, 0x65, 0x74, 0x12, 0x4d, 0x0a, 0x04, 0x50, 0x69, 0x6e, 0x67, 0x12, 0x21, 0x2e, 0x63, 0x6f, + 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, + 0x76, 0x31, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, + 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, + 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x5c, 0x0a, 0x09, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, + 0x26, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, + 0x6b, 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x72, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, + 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x59, 0x0a, 0x08, 0x53, 0x79, 0x6e, 0x63, 0x57, 0x61, 0x6e, 0x74, 0x12, 0x25, 0x2e, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, + 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x57, 0x61, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, + 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x57, + 0x61, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x65, 0x0a, 0x0c, 0x53, + 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x29, 0x2e, 0x63, 0x6f, + 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, + 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, + 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, + 0x6e, 0x63, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x5c, 0x0a, 0x09, 0x53, 0x79, 0x6e, 0x63, 0x52, 0x65, 0x61, 0x64, 0x79, 0x12, + 0x26, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, + 0x6b, 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x52, 0x65, 0x61, 0x64, 0x79, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, + 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x79, 0x6e, 0x63, 0x52, 0x65, 0x61, 0x64, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x5f, 0x0a, 0x0a, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x27, + 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, + 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, + 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, + 0x79, 0x6e, 0x63, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x42, 0x33, 0x5a, 0x31, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x61, + 0x67, 0x65, 0x6e, 0x74, 0x2f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_agent_agentsocket_proto_agentsocket_proto_rawDescOnce sync.Once + file_agent_agentsocket_proto_agentsocket_proto_rawDescData = file_agent_agentsocket_proto_agentsocket_proto_rawDesc +) + +func file_agent_agentsocket_proto_agentsocket_proto_rawDescGZIP() []byte { + file_agent_agentsocket_proto_agentsocket_proto_rawDescOnce.Do(func() { + file_agent_agentsocket_proto_agentsocket_proto_rawDescData = protoimpl.X.CompressGZIP(file_agent_agentsocket_proto_agentsocket_proto_rawDescData) + }) + return file_agent_agentsocket_proto_agentsocket_proto_rawDescData +} + +var file_agent_agentsocket_proto_agentsocket_proto_msgTypes = make([]protoimpl.MessageInfo, 13) +var file_agent_agentsocket_proto_agentsocket_proto_goTypes = []interface{}{ + (*PingRequest)(nil), // 0: coder.agentsocket.v1.PingRequest + (*PingResponse)(nil), // 1: coder.agentsocket.v1.PingResponse + (*SyncStartRequest)(nil), // 2: coder.agentsocket.v1.SyncStartRequest + (*SyncStartResponse)(nil), // 3: coder.agentsocket.v1.SyncStartResponse + (*SyncWantRequest)(nil), // 4: coder.agentsocket.v1.SyncWantRequest + (*SyncWantResponse)(nil), // 5: coder.agentsocket.v1.SyncWantResponse + (*SyncCompleteRequest)(nil), // 6: coder.agentsocket.v1.SyncCompleteRequest + (*SyncCompleteResponse)(nil), // 7: coder.agentsocket.v1.SyncCompleteResponse + (*SyncReadyRequest)(nil), // 8: coder.agentsocket.v1.SyncReadyRequest + (*SyncReadyResponse)(nil), // 9: coder.agentsocket.v1.SyncReadyResponse + (*SyncStatusRequest)(nil), // 10: coder.agentsocket.v1.SyncStatusRequest + (*DependencyInfo)(nil), // 11: coder.agentsocket.v1.DependencyInfo + (*SyncStatusResponse)(nil), // 12: coder.agentsocket.v1.SyncStatusResponse +} +var file_agent_agentsocket_proto_agentsocket_proto_depIdxs = []int32{ + 11, // 0: coder.agentsocket.v1.SyncStatusResponse.dependencies:type_name -> coder.agentsocket.v1.DependencyInfo + 0, // 1: coder.agentsocket.v1.AgentSocket.Ping:input_type -> coder.agentsocket.v1.PingRequest + 2, // 2: coder.agentsocket.v1.AgentSocket.SyncStart:input_type -> coder.agentsocket.v1.SyncStartRequest + 4, // 3: coder.agentsocket.v1.AgentSocket.SyncWant:input_type -> coder.agentsocket.v1.SyncWantRequest + 6, // 4: coder.agentsocket.v1.AgentSocket.SyncComplete:input_type -> coder.agentsocket.v1.SyncCompleteRequest + 8, // 5: coder.agentsocket.v1.AgentSocket.SyncReady:input_type -> coder.agentsocket.v1.SyncReadyRequest + 10, // 6: coder.agentsocket.v1.AgentSocket.SyncStatus:input_type -> coder.agentsocket.v1.SyncStatusRequest + 1, // 7: coder.agentsocket.v1.AgentSocket.Ping:output_type -> coder.agentsocket.v1.PingResponse + 3, // 8: coder.agentsocket.v1.AgentSocket.SyncStart:output_type -> coder.agentsocket.v1.SyncStartResponse + 5, // 9: coder.agentsocket.v1.AgentSocket.SyncWant:output_type -> coder.agentsocket.v1.SyncWantResponse + 7, // 10: coder.agentsocket.v1.AgentSocket.SyncComplete:output_type -> coder.agentsocket.v1.SyncCompleteResponse + 9, // 11: coder.agentsocket.v1.AgentSocket.SyncReady:output_type -> coder.agentsocket.v1.SyncReadyResponse + 12, // 12: coder.agentsocket.v1.AgentSocket.SyncStatus:output_type -> coder.agentsocket.v1.SyncStatusResponse + 7, // [7:13] is the sub-list for method output_type + 1, // [1:7] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 1, // [1:1] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name +} + +func init() { file_agent_agentsocket_proto_agentsocket_proto_init() } +func file_agent_agentsocket_proto_agentsocket_proto_init() { + if File_agent_agentsocket_proto_agentsocket_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PingRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PingResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncStartRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncStartResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncWantRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncWantResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncCompleteRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncCompleteResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncReadyRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncReadyResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncStatusRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DependencyInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_agentsocket_proto_agentsocket_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SyncStatusResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_agent_agentsocket_proto_agentsocket_proto_rawDesc, + NumEnums: 0, + NumMessages: 13, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_agent_agentsocket_proto_agentsocket_proto_goTypes, + DependencyIndexes: file_agent_agentsocket_proto_agentsocket_proto_depIdxs, + MessageInfos: file_agent_agentsocket_proto_agentsocket_proto_msgTypes, + }.Build() + File_agent_agentsocket_proto_agentsocket_proto = out.File + file_agent_agentsocket_proto_agentsocket_proto_rawDesc = nil + file_agent_agentsocket_proto_agentsocket_proto_goTypes = nil + file_agent_agentsocket_proto_agentsocket_proto_depIdxs = nil +} diff --git a/agent/agentsocket/proto/agentsocket.proto b/agent/agentsocket/proto/agentsocket.proto new file mode 100644 index 0000000000000..2da2ad7380baf --- /dev/null +++ b/agent/agentsocket/proto/agentsocket.proto @@ -0,0 +1,69 @@ +syntax = "proto3"; +option go_package = "github.com/coder/coder/v2/agent/agentsocket/proto"; + +package coder.agentsocket.v1; + +message PingRequest {} + +message PingResponse {} + +message SyncStartRequest { + string unit = 1; +} + +message SyncStartResponse {} + +message SyncWantRequest { + string unit = 1; + string depends_on = 2; +} + +message SyncWantResponse {} + +message SyncCompleteRequest { + string unit = 1; +} + +message SyncCompleteResponse {} + +message SyncReadyRequest { + string unit = 1; +} + +message SyncReadyResponse { + bool ready = 1; +} + +message SyncStatusRequest { + string unit = 1; +} + +message DependencyInfo { + string unit = 1; + string depends_on = 2; + string required_status = 3; + string current_status = 4; + bool is_satisfied = 5; +} + +message SyncStatusResponse { + string status = 1; + bool is_ready = 2; + repeated DependencyInfo dependencies = 3; +} + +// AgentSocket provides direct access to the agent over local IPC. +service AgentSocket { + // Ping the agent to check if it is alive. + rpc Ping(PingRequest) returns (PingResponse); + // Report the start of a unit. + rpc SyncStart(SyncStartRequest) returns (SyncStartResponse); + // Declare a dependency between units. + rpc SyncWant(SyncWantRequest) returns (SyncWantResponse); + // Report the completion of a unit. + rpc SyncComplete(SyncCompleteRequest) returns (SyncCompleteResponse); + // Request whether a unit is ready to be started. That is, all dependencies are satisfied. + rpc SyncReady(SyncReadyRequest) returns (SyncReadyResponse); + // Get the status of a unit and list its dependencies. + rpc SyncStatus(SyncStatusRequest) returns (SyncStatusResponse); +} diff --git a/agent/agentsocket/proto/agentsocket_drpc.pb.go b/agent/agentsocket/proto/agentsocket_drpc.pb.go new file mode 100644 index 0000000000000..f9749ee0ffa1e --- /dev/null +++ b/agent/agentsocket/proto/agentsocket_drpc.pb.go @@ -0,0 +1,311 @@ +// Code generated by protoc-gen-go-drpc. DO NOT EDIT. +// protoc-gen-go-drpc version: v0.0.34 +// source: agent/agentsocket/proto/agentsocket.proto + +package proto + +import ( + context "context" + errors "errors" + protojson "google.golang.org/protobuf/encoding/protojson" + proto "google.golang.org/protobuf/proto" + drpc "storj.io/drpc" + drpcerr "storj.io/drpc/drpcerr" +) + +type drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto struct{} + +func (drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto) Marshal(msg drpc.Message) ([]byte, error) { + return proto.Marshal(msg.(proto.Message)) +} + +func (drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { + return proto.MarshalOptions{}.MarshalAppend(buf, msg.(proto.Message)) +} + +func (drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto) Unmarshal(buf []byte, msg drpc.Message) error { + return proto.Unmarshal(buf, msg.(proto.Message)) +} + +func (drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { + return protojson.Marshal(msg.(proto.Message)) +} + +func (drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { + return protojson.Unmarshal(buf, msg.(proto.Message)) +} + +type DRPCAgentSocketClient interface { + DRPCConn() drpc.Conn + + Ping(ctx context.Context, in *PingRequest) (*PingResponse, error) + SyncStart(ctx context.Context, in *SyncStartRequest) (*SyncStartResponse, error) + SyncWant(ctx context.Context, in *SyncWantRequest) (*SyncWantResponse, error) + SyncComplete(ctx context.Context, in *SyncCompleteRequest) (*SyncCompleteResponse, error) + SyncReady(ctx context.Context, in *SyncReadyRequest) (*SyncReadyResponse, error) + SyncStatus(ctx context.Context, in *SyncStatusRequest) (*SyncStatusResponse, error) +} + +type drpcAgentSocketClient struct { + cc drpc.Conn +} + +func NewDRPCAgentSocketClient(cc drpc.Conn) DRPCAgentSocketClient { + return &drpcAgentSocketClient{cc} +} + +func (c *drpcAgentSocketClient) DRPCConn() drpc.Conn { return c.cc } + +func (c *drpcAgentSocketClient) Ping(ctx context.Context, in *PingRequest) (*PingResponse, error) { + out := new(PingResponse) + err := c.cc.Invoke(ctx, "/coder.agentsocket.v1.AgentSocket/Ping", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *drpcAgentSocketClient) SyncStart(ctx context.Context, in *SyncStartRequest) (*SyncStartResponse, error) { + out := new(SyncStartResponse) + err := c.cc.Invoke(ctx, "/coder.agentsocket.v1.AgentSocket/SyncStart", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *drpcAgentSocketClient) SyncWant(ctx context.Context, in *SyncWantRequest) (*SyncWantResponse, error) { + out := new(SyncWantResponse) + err := c.cc.Invoke(ctx, "/coder.agentsocket.v1.AgentSocket/SyncWant", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *drpcAgentSocketClient) SyncComplete(ctx context.Context, in *SyncCompleteRequest) (*SyncCompleteResponse, error) { + out := new(SyncCompleteResponse) + err := c.cc.Invoke(ctx, "/coder.agentsocket.v1.AgentSocket/SyncComplete", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *drpcAgentSocketClient) SyncReady(ctx context.Context, in *SyncReadyRequest) (*SyncReadyResponse, error) { + out := new(SyncReadyResponse) + err := c.cc.Invoke(ctx, "/coder.agentsocket.v1.AgentSocket/SyncReady", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *drpcAgentSocketClient) SyncStatus(ctx context.Context, in *SyncStatusRequest) (*SyncStatusResponse, error) { + out := new(SyncStatusResponse) + err := c.cc.Invoke(ctx, "/coder.agentsocket.v1.AgentSocket/SyncStatus", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +type DRPCAgentSocketServer interface { + Ping(context.Context, *PingRequest) (*PingResponse, error) + SyncStart(context.Context, *SyncStartRequest) (*SyncStartResponse, error) + SyncWant(context.Context, *SyncWantRequest) (*SyncWantResponse, error) + SyncComplete(context.Context, *SyncCompleteRequest) (*SyncCompleteResponse, error) + SyncReady(context.Context, *SyncReadyRequest) (*SyncReadyResponse, error) + SyncStatus(context.Context, *SyncStatusRequest) (*SyncStatusResponse, error) +} + +type DRPCAgentSocketUnimplementedServer struct{} + +func (s *DRPCAgentSocketUnimplementedServer) Ping(context.Context, *PingRequest) (*PingResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +func (s *DRPCAgentSocketUnimplementedServer) SyncStart(context.Context, *SyncStartRequest) (*SyncStartResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +func (s *DRPCAgentSocketUnimplementedServer) SyncWant(context.Context, *SyncWantRequest) (*SyncWantResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +func (s *DRPCAgentSocketUnimplementedServer) SyncComplete(context.Context, *SyncCompleteRequest) (*SyncCompleteResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +func (s *DRPCAgentSocketUnimplementedServer) SyncReady(context.Context, *SyncReadyRequest) (*SyncReadyResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +func (s *DRPCAgentSocketUnimplementedServer) SyncStatus(context.Context, *SyncStatusRequest) (*SyncStatusResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +type DRPCAgentSocketDescription struct{} + +func (DRPCAgentSocketDescription) NumMethods() int { return 6 } + +func (DRPCAgentSocketDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { + switch n { + case 0: + return "/coder.agentsocket.v1.AgentSocket/Ping", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCAgentSocketServer). + Ping( + ctx, + in1.(*PingRequest), + ) + }, DRPCAgentSocketServer.Ping, true + case 1: + return "/coder.agentsocket.v1.AgentSocket/SyncStart", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCAgentSocketServer). + SyncStart( + ctx, + in1.(*SyncStartRequest), + ) + }, DRPCAgentSocketServer.SyncStart, true + case 2: + return "/coder.agentsocket.v1.AgentSocket/SyncWant", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCAgentSocketServer). + SyncWant( + ctx, + in1.(*SyncWantRequest), + ) + }, DRPCAgentSocketServer.SyncWant, true + case 3: + return "/coder.agentsocket.v1.AgentSocket/SyncComplete", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCAgentSocketServer). + SyncComplete( + ctx, + in1.(*SyncCompleteRequest), + ) + }, DRPCAgentSocketServer.SyncComplete, true + case 4: + return "/coder.agentsocket.v1.AgentSocket/SyncReady", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCAgentSocketServer). + SyncReady( + ctx, + in1.(*SyncReadyRequest), + ) + }, DRPCAgentSocketServer.SyncReady, true + case 5: + return "/coder.agentsocket.v1.AgentSocket/SyncStatus", drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCAgentSocketServer). + SyncStatus( + ctx, + in1.(*SyncStatusRequest), + ) + }, DRPCAgentSocketServer.SyncStatus, true + default: + return "", nil, nil, nil, false + } +} + +func DRPCRegisterAgentSocket(mux drpc.Mux, impl DRPCAgentSocketServer) error { + return mux.Register(impl, DRPCAgentSocketDescription{}) +} + +type DRPCAgentSocket_PingStream interface { + drpc.Stream + SendAndClose(*PingResponse) error +} + +type drpcAgentSocket_PingStream struct { + drpc.Stream +} + +func (x *drpcAgentSocket_PingStream) SendAndClose(m *PingResponse) error { + if err := x.MsgSend(m, drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +type DRPCAgentSocket_SyncStartStream interface { + drpc.Stream + SendAndClose(*SyncStartResponse) error +} + +type drpcAgentSocket_SyncStartStream struct { + drpc.Stream +} + +func (x *drpcAgentSocket_SyncStartStream) SendAndClose(m *SyncStartResponse) error { + if err := x.MsgSend(m, drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +type DRPCAgentSocket_SyncWantStream interface { + drpc.Stream + SendAndClose(*SyncWantResponse) error +} + +type drpcAgentSocket_SyncWantStream struct { + drpc.Stream +} + +func (x *drpcAgentSocket_SyncWantStream) SendAndClose(m *SyncWantResponse) error { + if err := x.MsgSend(m, drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +type DRPCAgentSocket_SyncCompleteStream interface { + drpc.Stream + SendAndClose(*SyncCompleteResponse) error +} + +type drpcAgentSocket_SyncCompleteStream struct { + drpc.Stream +} + +func (x *drpcAgentSocket_SyncCompleteStream) SendAndClose(m *SyncCompleteResponse) error { + if err := x.MsgSend(m, drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +type DRPCAgentSocket_SyncReadyStream interface { + drpc.Stream + SendAndClose(*SyncReadyResponse) error +} + +type drpcAgentSocket_SyncReadyStream struct { + drpc.Stream +} + +func (x *drpcAgentSocket_SyncReadyStream) SendAndClose(m *SyncReadyResponse) error { + if err := x.MsgSend(m, drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +type DRPCAgentSocket_SyncStatusStream interface { + drpc.Stream + SendAndClose(*SyncStatusResponse) error +} + +type drpcAgentSocket_SyncStatusStream struct { + drpc.Stream +} + +func (x *drpcAgentSocket_SyncStatusStream) SendAndClose(m *SyncStatusResponse) error { + if err := x.MsgSend(m, drpcEncoding_File_agent_agentsocket_proto_agentsocket_proto{}); err != nil { + return err + } + return x.CloseSend() +} diff --git a/agent/agentsocket/proto/version.go b/agent/agentsocket/proto/version.go new file mode 100644 index 0000000000000..9c6f2cb2a4f80 --- /dev/null +++ b/agent/agentsocket/proto/version.go @@ -0,0 +1,17 @@ +package proto + +import "github.com/coder/coder/v2/apiversion" + +// Version history: +// +// API v1.0: +// - Initial release +// - Ping +// - Sync operations: SyncStart, SyncWant, SyncComplete, SyncWait, SyncStatus + +const ( + CurrentMajor = 1 + CurrentMinor = 0 +) + +var CurrentVersion = apiversion.New(CurrentMajor, CurrentMinor) diff --git a/agent/agentsocket/server.go b/agent/agentsocket/server.go new file mode 100644 index 0000000000000..aed3afe4f7251 --- /dev/null +++ b/agent/agentsocket/server.go @@ -0,0 +1,138 @@ +package agentsocket + +import ( + "context" + "errors" + "net" + "sync" + + "golang.org/x/xerrors" + "storj.io/drpc/drpcmux" + "storj.io/drpc/drpcserver" + + "cdr.dev/slog" + "github.com/coder/coder/v2/agent/agentsocket/proto" + "github.com/coder/coder/v2/agent/unit" + "github.com/coder/coder/v2/codersdk/drpcsdk" +) + +// Server provides access to the DRPCAgentSocketService via a Unix domain socket. +// Do not invoke Server{} directly. Use NewServer() instead. +type Server struct { + logger slog.Logger + path string + drpcServer *drpcserver.Server + service *DRPCAgentSocketService + + mu sync.Mutex + listener net.Listener + ctx context.Context + cancel context.CancelFunc + wg sync.WaitGroup +} + +// NewServer creates a new agent socket server. +func NewServer(logger slog.Logger, opts ...Option) (*Server, error) { + options := &options{} + for _, opt := range opts { + opt(options) + } + + logger = logger.Named("agentsocket-server") + server := &Server{ + logger: logger, + path: options.path, + service: &DRPCAgentSocketService{ + logger: logger, + unitManager: unit.NewManager(), + }, + } + + mux := drpcmux.New() + err := proto.DRPCRegisterAgentSocket(mux, server.service) + if err != nil { + return nil, xerrors.Errorf("failed to register drpc service: %w", err) + } + + server.drpcServer = drpcserver.NewWithOptions(mux, drpcserver.Options{ + Manager: drpcsdk.DefaultDRPCOptions(nil), + Log: func(err error) { + if errors.Is(err, context.Canceled) || + errors.Is(err, context.DeadlineExceeded) { + return + } + logger.Debug(context.Background(), "drpc server error", slog.Error(err)) + }, + }) + + listener, err := createSocket(server.path) + if err != nil { + return nil, xerrors.Errorf("create socket: %w", err) + } + + server.listener = listener + + // This context is canceled by server.Close(). + // canceling it will close all connections. + server.ctx, server.cancel = context.WithCancel(context.Background()) + + server.logger.Info(server.ctx, "agent socket server started", slog.F("path", server.path)) + + server.wg.Add(1) + go func() { + defer server.wg.Done() + server.acceptConnections() + }() + + return server, nil +} + +// Close stops the server and cleans up resources. +func (s *Server) Close() error { + s.mu.Lock() + + if s.listener == nil { + s.mu.Unlock() + return nil + } + + s.logger.Info(s.ctx, "stopping agent socket server") + + s.cancel() + + if err := s.listener.Close(); err != nil { + s.logger.Warn(s.ctx, "error closing socket listener", slog.Error(err)) + } + + s.listener = nil + + s.mu.Unlock() + + // Wait for all connections to finish + s.wg.Wait() + + if err := cleanupSocket(s.path); err != nil { + s.logger.Warn(s.ctx, "error cleaning up socket file", slog.Error(err)) + } + + s.logger.Info(s.ctx, "agent socket server stopped") + + return nil +} + +func (s *Server) acceptConnections() { + // In an edge case, Close() might race with acceptConnections() and set s.listener to nil. + // Therefore, we grab a copy of the listener under a lock. We might still get a nil listener, + // but then we know close has already run and we can return early. + s.mu.Lock() + listener := s.listener + s.mu.Unlock() + if listener == nil { + return + } + + err := s.drpcServer.Serve(s.ctx, listener) + if err != nil { + s.logger.Warn(s.ctx, "error serving drpc server", slog.Error(err)) + } +} diff --git a/agent/agentsocket/server_test.go b/agent/agentsocket/server_test.go new file mode 100644 index 0000000000000..da74039c401d1 --- /dev/null +++ b/agent/agentsocket/server_test.go @@ -0,0 +1,138 @@ +package agentsocket_test + +import ( + "context" + "path/filepath" + "runtime" + "testing" + + "github.com/google/uuid" + "github.com/spf13/afero" + "github.com/stretchr/testify/require" + + "cdr.dev/slog" + "github.com/coder/coder/v2/agent" + "github.com/coder/coder/v2/agent/agentsocket" + "github.com/coder/coder/v2/agent/agenttest" + agentproto "github.com/coder/coder/v2/agent/proto" + "github.com/coder/coder/v2/codersdk/agentsdk" + "github.com/coder/coder/v2/tailnet" + "github.com/coder/coder/v2/tailnet/tailnettest" + "github.com/coder/coder/v2/testutil" +) + +func TestServer(t *testing.T) { + t.Parallel() + + if runtime.GOOS == "windows" { + t.Skip("agentsocket is not supported on Windows") + } + + t.Run("StartStop", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(t.TempDir(), "test.sock") + logger := slog.Make().Leveled(slog.LevelDebug) + server, err := agentsocket.NewServer(logger, agentsocket.WithPath(socketPath)) + require.NoError(t, err) + require.NoError(t, server.Close()) + }) + + t.Run("AlreadyStarted", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(t.TempDir(), "test.sock") + logger := slog.Make().Leveled(slog.LevelDebug) + server1, err := agentsocket.NewServer(logger, agentsocket.WithPath(socketPath)) + require.NoError(t, err) + defer server1.Close() + _, err = agentsocket.NewServer(logger, agentsocket.WithPath(socketPath)) + require.ErrorContains(t, err, "create socket") + }) + + t.Run("AutoSocketPath", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(t.TempDir(), "test.sock") + logger := slog.Make().Leveled(slog.LevelDebug) + server, err := agentsocket.NewServer(logger, agentsocket.WithPath(socketPath)) + require.NoError(t, err) + require.NoError(t, server.Close()) + }) +} + +func TestServerWindowsNotSupported(t *testing.T) { + t.Parallel() + + if runtime.GOOS != "windows" { + t.Skip("this test only runs on Windows") + } + + t.Run("NewServer", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(t.TempDir(), "test.sock") + logger := slog.Make().Leveled(slog.LevelDebug) + _, err := agentsocket.NewServer(logger, agentsocket.WithPath(socketPath)) + require.ErrorContains(t, err, "agentsocket is not supported on Windows") + }) + + t.Run("NewClient", func(t *testing.T) { + t.Parallel() + + _, err := agentsocket.NewClient(context.Background(), agentsocket.WithPath("test.sock")) + require.ErrorContains(t, err, "agentsocket is not supported on Windows") + }) +} + +func TestAgentInitializesOnWindowsWithoutSocketServer(t *testing.T) { + t.Parallel() + + if runtime.GOOS != "windows" { + t.Skip("this test only runs on Windows") + } + + ctx := testutil.Context(t, testutil.WaitShort) + logger := testutil.Logger(t).Named("agent") + + derpMap, _ := tailnettest.RunDERPAndSTUN(t) + + coordinator := tailnet.NewCoordinator(logger) + t.Cleanup(func() { + _ = coordinator.Close() + }) + + statsCh := make(chan *agentproto.Stats, 50) + agentID := uuid.New() + manifest := agentsdk.Manifest{ + AgentID: agentID, + AgentName: "test-agent", + WorkspaceName: "test-workspace", + OwnerName: "test-user", + WorkspaceID: uuid.New(), + DERPMap: derpMap, + } + + client := agenttest.NewClient(t, logger.Named("agenttest"), agentID, manifest, statsCh, coordinator) + t.Cleanup(client.Close) + + options := agent.Options{ + Client: client, + Filesystem: afero.NewMemMapFs(), + Logger: logger.Named("agent"), + ReconnectingPTYTimeout: testutil.WaitShort, + EnvironmentVariables: map[string]string{}, + SocketPath: "", + } + + agnt := agent.New(options) + t.Cleanup(func() { + _ = agnt.Close() + }) + + startup := testutil.TryReceive(ctx, t, client.GetStartup()) + require.NotNil(t, startup, "agent should send startup message") + + err := agnt.Close() + require.NoError(t, err, "agent should close cleanly") +} diff --git a/agent/agentsocket/service.go b/agent/agentsocket/service.go new file mode 100644 index 0000000000000..60248a8fe687b --- /dev/null +++ b/agent/agentsocket/service.go @@ -0,0 +1,152 @@ +package agentsocket + +import ( + "context" + "errors" + + "golang.org/x/xerrors" + + "cdr.dev/slog" + "github.com/coder/coder/v2/agent/agentsocket/proto" + "github.com/coder/coder/v2/agent/unit" +) + +var _ proto.DRPCAgentSocketServer = (*DRPCAgentSocketService)(nil) + +var ErrUnitManagerNotAvailable = xerrors.New("unit manager not available") + +// DRPCAgentSocketService implements the DRPC agent socket service. +type DRPCAgentSocketService struct { + unitManager *unit.Manager + logger slog.Logger +} + +// Ping responds to a ping request to check if the service is alive. +func (*DRPCAgentSocketService) Ping(_ context.Context, _ *proto.PingRequest) (*proto.PingResponse, error) { + return &proto.PingResponse{}, nil +} + +// SyncStart starts a unit in the dependency graph. +func (s *DRPCAgentSocketService) SyncStart(_ context.Context, req *proto.SyncStartRequest) (*proto.SyncStartResponse, error) { + if s.unitManager == nil { + return nil, xerrors.Errorf("SyncStart: %w", ErrUnitManagerNotAvailable) + } + + unitID := unit.ID(req.Unit) + + if err := s.unitManager.Register(unitID); err != nil { + if !errors.Is(err, unit.ErrUnitAlreadyRegistered) { + return nil, xerrors.Errorf("SyncStart: %w", err) + } + } + + isReady, err := s.unitManager.IsReady(unitID) + if err != nil { + return nil, xerrors.Errorf("cannot check readiness: %w", err) + } + if !isReady { + return nil, xerrors.Errorf("cannot start unit %q: unit not ready", req.Unit) + } + + err = s.unitManager.UpdateStatus(unitID, unit.StatusStarted) + if err != nil { + return nil, xerrors.Errorf("cannot start unit %q: %w", req.Unit, err) + } + + return &proto.SyncStartResponse{}, nil +} + +// SyncWant declares a dependency between units. +func (s *DRPCAgentSocketService) SyncWant(_ context.Context, req *proto.SyncWantRequest) (*proto.SyncWantResponse, error) { + if s.unitManager == nil { + return nil, xerrors.Errorf("cannot add dependency: %w", ErrUnitManagerNotAvailable) + } + + unitID := unit.ID(req.Unit) + dependsOnID := unit.ID(req.DependsOn) + + if err := s.unitManager.Register(unitID); err != nil && !errors.Is(err, unit.ErrUnitAlreadyRegistered) { + return nil, xerrors.Errorf("cannot add dependency: %w", err) + } + + if err := s.unitManager.AddDependency(unitID, dependsOnID, unit.StatusComplete); err != nil { + return nil, xerrors.Errorf("cannot add dependency: %w", err) + } + + return &proto.SyncWantResponse{}, nil +} + +// SyncComplete marks a unit as complete in the dependency graph. +func (s *DRPCAgentSocketService) SyncComplete(_ context.Context, req *proto.SyncCompleteRequest) (*proto.SyncCompleteResponse, error) { + if s.unitManager == nil { + return nil, xerrors.Errorf("cannot complete unit: %w", ErrUnitManagerNotAvailable) + } + + unitID := unit.ID(req.Unit) + + if err := s.unitManager.UpdateStatus(unitID, unit.StatusComplete); err != nil { + return nil, xerrors.Errorf("cannot complete unit %q: %w", req.Unit, err) + } + + return &proto.SyncCompleteResponse{}, nil +} + +// SyncReady checks whether a unit is ready to be started. That is, all dependencies are satisfied. +func (s *DRPCAgentSocketService) SyncReady(_ context.Context, req *proto.SyncReadyRequest) (*proto.SyncReadyResponse, error) { + if s.unitManager == nil { + return nil, xerrors.Errorf("cannot check readiness: %w", ErrUnitManagerNotAvailable) + } + + unitID := unit.ID(req.Unit) + isReady, err := s.unitManager.IsReady(unitID) + if err != nil { + return nil, xerrors.Errorf("cannot check readiness: %w", err) + } + + return &proto.SyncReadyResponse{ + Ready: isReady, + }, nil +} + +// SyncStatus gets the status of a unit and lists its dependencies. +func (s *DRPCAgentSocketService) SyncStatus(_ context.Context, req *proto.SyncStatusRequest) (*proto.SyncStatusResponse, error) { + if s.unitManager == nil { + return nil, xerrors.Errorf("cannot get status for unit %q: %w", req.Unit, ErrUnitManagerNotAvailable) + } + + unitID := unit.ID(req.Unit) + + isReady, err := s.unitManager.IsReady(unitID) + if err != nil { + return nil, xerrors.Errorf("cannot check readiness: %w", err) + } + + dependencies, err := s.unitManager.GetAllDependencies(unitID) + switch { + case errors.Is(err, unit.ErrUnitNotFound): + dependencies = []unit.Dependency{} + case err != nil: + return nil, xerrors.Errorf("cannot get dependencies: %w", err) + } + + var depInfos []*proto.DependencyInfo + for _, dep := range dependencies { + depInfos = append(depInfos, &proto.DependencyInfo{ + Unit: string(dep.Unit), + DependsOn: string(dep.DependsOn), + RequiredStatus: string(dep.RequiredStatus), + CurrentStatus: string(dep.CurrentStatus), + IsSatisfied: dep.IsSatisfied, + }) + } + + u, err := s.unitManager.Unit(unitID) + if err != nil { + return nil, xerrors.Errorf("cannot get status for unit %q: %w", req.Unit, err) + } + return &proto.SyncStatusResponse{ + Status: string(u.Status()), + IsReady: isReady, + Dependencies: depInfos, + }, nil +} diff --git a/agent/agentsocket/service_test.go b/agent/agentsocket/service_test.go new file mode 100644 index 0000000000000..925703b63f76d --- /dev/null +++ b/agent/agentsocket/service_test.go @@ -0,0 +1,389 @@ +package agentsocket_test + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "fmt" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "cdr.dev/slog" + "github.com/coder/coder/v2/agent/agentsocket" + "github.com/coder/coder/v2/agent/unit" + "github.com/coder/coder/v2/testutil" +) + +// tempDirUnixSocket returns a temporary directory that can safely hold unix +// sockets (probably). +// +// During tests on darwin we hit the max path length limit for unix sockets +// pretty easily in the default location, so this function uses /tmp instead to +// get shorter paths. To keep paths short, we use a hash of the test name +// instead of the full test name. +func tempDirUnixSocket(t *testing.T) string { + t.Helper() + if runtime.GOOS == "darwin" { + // Use a short hash of the test name to keep the path under 104 chars + hash := sha256.Sum256([]byte(t.Name())) + hashStr := hex.EncodeToString(hash[:])[:8] // Use first 8 chars of hash + dir, err := os.MkdirTemp("/tmp", fmt.Sprintf("c-%s-", hashStr)) + require.NoError(t, err, "create temp dir for unix socket test") + t.Cleanup(func() { + err := os.RemoveAll(dir) + assert.NoError(t, err, "remove temp dir", dir) + }) + return dir + } + return t.TempDir() +} + +// newSocketClient creates a DRPC client connected to the Unix socket at the given path. +func newSocketClient(ctx context.Context, t *testing.T, socketPath string) *agentsocket.Client { + t.Helper() + + client, err := agentsocket.NewClient(ctx, agentsocket.WithPath(socketPath)) + t.Cleanup(func() { + _ = client.Close() + }) + require.NoError(t, err) + + return client +} + +func TestDRPCAgentSocketService(t *testing.T) { + t.Parallel() + + if runtime.GOOS == "windows" { + t.Skip("agentsocket is not supported on Windows") + } + + t.Run("Ping", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + err = client.Ping(ctx) + require.NoError(t, err) + }) + + t.Run("SyncStart", func(t *testing.T) { + t.Parallel() + + t.Run("NewUnit", func(t *testing.T) { + t.Parallel() + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + + status, err := client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusStarted, status.Status) + }) + + t.Run("UnitAlreadyStarted", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + // First Start + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + status, err := client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusStarted, status.Status) + + // Second Start + err = client.SyncStart(ctx, "test-unit") + require.ErrorContains(t, err, unit.ErrSameStatusAlreadySet.Error()) + + status, err = client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusStarted, status.Status) + }) + + t.Run("UnitAlreadyCompleted", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + // First start + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + + status, err := client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusStarted, status.Status) + + // Complete the unit + err = client.SyncComplete(ctx, "test-unit") + require.NoError(t, err) + + status, err = client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusComplete, status.Status) + + // Second start + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + + status, err = client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusStarted, status.Status) + }) + + t.Run("UnitNotReady", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + err = client.SyncWant(ctx, "test-unit", "dependency-unit") + require.NoError(t, err) + + err = client.SyncStart(ctx, "test-unit") + require.ErrorContains(t, err, "unit not ready") + + status, err := client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusPending, status.Status) + require.False(t, status.IsReady) + }) + }) + + t.Run("SyncWant", func(t *testing.T) { + t.Parallel() + + t.Run("NewUnits", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + // If dependency units are not registered, they are registered automatically + err = client.SyncWant(ctx, "test-unit", "dependency-unit") + require.NoError(t, err) + + status, err := client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Len(t, status.Dependencies, 1) + require.Equal(t, unit.ID("dependency-unit"), status.Dependencies[0].DependsOn) + require.Equal(t, unit.StatusComplete, status.Dependencies[0].RequiredStatus) + }) + + t.Run("DependencyAlreadyRegistered", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + // Start the dependency unit + err = client.SyncStart(ctx, "dependency-unit") + require.NoError(t, err) + + status, err := client.SyncStatus(ctx, "dependency-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusStarted, status.Status) + + // Add the dependency after the dependency unit has already started + err = client.SyncWant(ctx, "test-unit", "dependency-unit") + + // Dependencies can be added even if the dependency unit has already started + require.NoError(t, err) + + // The dependency is now reflected in the test unit's status + status, err = client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.ID("dependency-unit"), status.Dependencies[0].DependsOn) + require.Equal(t, unit.StatusComplete, status.Dependencies[0].RequiredStatus) + }) + + t.Run("DependencyAddedAfterDependentStarted", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + // Start the dependent unit + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + + status, err := client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.StatusStarted, status.Status) + + // Add the dependency after the dependency unit has already started + err = client.SyncWant(ctx, "test-unit", "dependency-unit") + + // Dependencies can be added even if the dependent unit has already started. + // The dependency applies the next time a unit is started. The current status is not updated. + // This is to allow flexible dependency management. It does mean that users of this API should + // take care to add dependencies before they start their dependent units. + require.NoError(t, err) + + // The dependency is now reflected in the test unit's status + status, err = client.SyncStatus(ctx, "test-unit") + require.NoError(t, err) + require.Equal(t, unit.ID("dependency-unit"), status.Dependencies[0].DependsOn) + require.Equal(t, unit.StatusComplete, status.Dependencies[0].RequiredStatus) + }) + }) + + t.Run("SyncReady", func(t *testing.T) { + t.Parallel() + + t.Run("UnregisteredUnit", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + ready, err := client.SyncReady(ctx, "unregistered-unit") + require.NoError(t, err) + require.True(t, ready) + }) + + t.Run("UnitNotReady", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + // Register a unit with an unsatisfied dependency + err = client.SyncWant(ctx, "test-unit", "dependency-unit") + require.NoError(t, err) + + // Check readiness - should be false because dependency is not satisfied + ready, err := client.SyncReady(ctx, "test-unit") + require.NoError(t, err) + require.False(t, ready) + }) + + t.Run("UnitReady", func(t *testing.T) { + t.Parallel() + + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + ctx := testutil.Context(t, testutil.WaitShort) + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err) + defer server.Close() + + client := newSocketClient(ctx, t, socketPath) + + // Register a unit with no dependencies - should be ready immediately + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + + // Check readiness - should be true + ready, err := client.SyncReady(ctx, "test-unit") + require.NoError(t, err) + require.True(t, ready) + + // Also test a unit with satisfied dependencies + err = client.SyncWant(ctx, "dependent-unit", "test-unit") + require.NoError(t, err) + + // Complete the dependency + err = client.SyncComplete(ctx, "test-unit") + require.NoError(t, err) + + // Now dependent-unit should be ready + ready, err = client.SyncReady(ctx, "dependent-unit") + require.NoError(t, err) + require.True(t, ready) + }) + }) +} diff --git a/agent/agentsocket/socket_unix.go b/agent/agentsocket/socket_unix.go new file mode 100644 index 0000000000000..7492fb1d033c8 --- /dev/null +++ b/agent/agentsocket/socket_unix.go @@ -0,0 +1,73 @@ +//go:build !windows + +package agentsocket + +import ( + "context" + "net" + "os" + "path/filepath" + "time" + + "golang.org/x/xerrors" +) + +const defaultSocketPath = "/tmp/coder-agent.sock" + +func createSocket(path string) (net.Listener, error) { + if path == "" { + path = defaultSocketPath + } + + if !isSocketAvailable(path) { + return nil, xerrors.Errorf("socket path %s is not available", path) + } + + if err := os.Remove(path); err != nil && !os.IsNotExist(err) { + return nil, xerrors.Errorf("remove existing socket: %w", err) + } + + parentDir := filepath.Dir(path) + if err := os.MkdirAll(parentDir, 0o700); err != nil { + return nil, xerrors.Errorf("create socket directory: %w", err) + } + + listener, err := net.Listen("unix", path) + if err != nil { + return nil, xerrors.Errorf("listen on unix socket: %w", err) + } + + if err := os.Chmod(path, 0o600); err != nil { + _ = listener.Close() + return nil, xerrors.Errorf("set socket permissions: %w", err) + } + return listener, nil +} + +func cleanupSocket(path string) error { + return os.Remove(path) +} + +func isSocketAvailable(path string) bool { + if _, err := os.Stat(path); os.IsNotExist(err) { + return true + } + + // Try to connect to see if it's actually listening. + dialer := net.Dialer{Timeout: 10 * time.Second} + conn, err := dialer.Dial("unix", path) + if err != nil { + return true + } + _ = conn.Close() + return false +} + +func dialSocket(ctx context.Context, path string) (net.Conn, error) { + if path == "" { + path = defaultSocketPath + } + + dialer := net.Dialer{} + return dialer.DialContext(ctx, "unix", path) +} diff --git a/agent/agentsocket/socket_windows.go b/agent/agentsocket/socket_windows.go new file mode 100644 index 0000000000000..e39c8ae3d9236 --- /dev/null +++ b/agent/agentsocket/socket_windows.go @@ -0,0 +1,22 @@ +//go:build windows + +package agentsocket + +import ( + "context" + "net" + + "golang.org/x/xerrors" +) + +func createSocket(_ string) (net.Listener, error) { + return nil, xerrors.New("agentsocket is not supported on Windows") +} + +func cleanupSocket(_ string) error { + return nil +} + +func dialSocket(_ context.Context, _ string) (net.Conn, error) { + return nil, xerrors.New("agentsocket is not supported on Windows") +} diff --git a/agent/api.go b/agent/api.go index f417a046c24a6..a631286c40a02 100644 --- a/agent/api.go +++ b/agent/api.go @@ -2,41 +2,31 @@ package agent import ( "net/http" - "sync" - "time" "github.com/go-chi/chi/v5" "github.com/google/uuid" "github.com/coder/coder/v2/coderd/httpapi" + "github.com/coder/coder/v2/coderd/httpmw/loggermw" + "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/codersdk/workspacesdk" + "github.com/coder/coder/v2/httpmw" ) func (a *agent) apiHandler() http.Handler { r := chi.NewRouter() + r.Use( + httpmw.Recover(a.logger), + tracing.StatusWriterMiddleware, + loggermw.Logger(a.logger), + ) r.Get("/", func(rw http.ResponseWriter, r *http.Request) { httpapi.Write(r.Context(), rw, http.StatusOK, codersdk.Response{ Message: "Hello from the agent!", }) }) - // Make a copy to ensure the map is not modified after the handler is - // created. - cpy := make(map[int]string) - for k, b := range a.ignorePorts { - cpy[k] = b - } - - cacheDuration := 1 * time.Second - if a.portCacheDuration > 0 { - cacheDuration = a.portCacheDuration - } - - lp := &listeningPortsHandler{ - ignorePorts: cpy, - cacheDuration: cacheDuration, - } - if a.devcontainers { r.Mount("/api/v0/containers", a.containerAPI.Routes()) } else if manifest := a.manifest.Load(); manifest != nil && manifest.ParentID != uuid.Nil { @@ -57,7 +47,7 @@ func (a *agent) apiHandler() http.Handler { promHandler := PrometheusMetricsHandler(a.prometheusRegistry, a.logger) - r.Get("/api/v0/listening-ports", lp.handler) + r.Get("/api/v0/listening-ports", a.listeningPortsHandler.handler) r.Get("/api/v0/netcheck", a.HandleNetcheck) r.Post("/api/v0/list-directory", a.HandleLS) r.Get("/api/v0/read-file", a.HandleReadFile) @@ -72,22 +62,21 @@ func (a *agent) apiHandler() http.Handler { return r } -type listeningPortsHandler struct { - ignorePorts map[int]string - cacheDuration time.Duration +type ListeningPortsGetter interface { + GetListeningPorts() ([]codersdk.WorkspaceAgentListeningPort, error) +} - //nolint: unused // used on some but not all platforms - mut sync.Mutex - //nolint: unused // used on some but not all platforms - ports []codersdk.WorkspaceAgentListeningPort - //nolint: unused // used on some but not all platforms - mtime time.Time +type listeningPortsHandler struct { + // In production code, this is set to an osListeningPortsGetter, but it can be overridden for + // testing. + getter ListeningPortsGetter + ignorePorts map[int]string } // handler returns a list of listening ports. This is tested by coderd's // TestWorkspaceAgentListeningPorts test. func (lp *listeningPortsHandler) handler(rw http.ResponseWriter, r *http.Request) { - ports, err := lp.getListeningPorts() + ports, err := lp.getter.GetListeningPorts() if err != nil { httpapi.Write(r.Context(), rw, http.StatusInternalServerError, codersdk.Response{ Message: "Could not scan for listening ports.", @@ -96,7 +85,20 @@ func (lp *listeningPortsHandler) handler(rw http.ResponseWriter, r *http.Request return } + filteredPorts := make([]codersdk.WorkspaceAgentListeningPort, 0, len(ports)) + for _, port := range ports { + if port.Port < workspacesdk.AgentMinimumListeningPort { + continue + } + + // Ignore ports that we've been told to ignore. + if _, ok := lp.ignorePorts[int(port.Port)]; ok { + continue + } + filteredPorts = append(filteredPorts, port) + } + httpapi.Write(r.Context(), rw, http.StatusOK, codersdk.WorkspaceAgentListeningPortsResponse{ - Ports: ports, + Ports: filteredPorts, }) } diff --git a/agent/files.go b/agent/files.go index f2a9ac6edc581..4ac707c602419 100644 --- a/agent/files.go +++ b/agent/files.go @@ -250,7 +250,9 @@ func (a *agent) editFile(ctx context.Context, path string, edits []workspacesdk. transforms[i] = replace.String(edit.Search, edit.Replace) } - tmpfile, err := afero.TempFile(a.filesystem, "", filepath.Base(path)) + // Create an adjacent file to ensure it will be on the same device and can be + // moved atomically. + tmpfile, err := afero.TempFile(a.filesystem, filepath.Dir(path), filepath.Base(path)) if err != nil { return http.StatusInternalServerError, err } diff --git a/agent/ports_supported.go b/agent/ports_supported.go index efa554de983d3..30df6caf7acbe 100644 --- a/agent/ports_supported.go +++ b/agent/ports_supported.go @@ -3,16 +3,23 @@ package agent import ( + "sync" "time" "github.com/cakturk/go-netstat/netstat" "golang.org/x/xerrors" "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/codersdk/workspacesdk" ) -func (lp *listeningPortsHandler) getListeningPorts() ([]codersdk.WorkspaceAgentListeningPort, error) { +type osListeningPortsGetter struct { + cacheDuration time.Duration + mut sync.Mutex + ports []codersdk.WorkspaceAgentListeningPort + mtime time.Time +} + +func (lp *osListeningPortsGetter) GetListeningPorts() ([]codersdk.WorkspaceAgentListeningPort, error) { lp.mut.Lock() defer lp.mut.Unlock() @@ -33,12 +40,7 @@ func (lp *listeningPortsHandler) getListeningPorts() ([]codersdk.WorkspaceAgentL seen := make(map[uint16]struct{}, len(tabs)) ports := []codersdk.WorkspaceAgentListeningPort{} for _, tab := range tabs { - if tab.LocalAddr == nil || tab.LocalAddr.Port < workspacesdk.AgentMinimumListeningPort { - continue - } - - // Ignore ports that we've been told to ignore. - if _, ok := lp.ignorePorts[int(tab.LocalAddr.Port)]; ok { + if tab.LocalAddr == nil { continue } diff --git a/agent/ports_supported_internal_test.go b/agent/ports_supported_internal_test.go new file mode 100644 index 0000000000000..e16bd8a0c88ae --- /dev/null +++ b/agent/ports_supported_internal_test.go @@ -0,0 +1,45 @@ +//go:build linux || (windows && amd64) + +package agent + +import ( + "net" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func TestOSListeningPortsGetter(t *testing.T) { + t.Parallel() + + uut := &osListeningPortsGetter{ + cacheDuration: 1 * time.Hour, + } + + l, err := net.Listen("tcp", "localhost:0") + require.NoError(t, err) + defer l.Close() + + ports, err := uut.GetListeningPorts() + require.NoError(t, err) + found := false + for _, port := range ports { + // #nosec G115 - Safe conversion as TCP port numbers are within uint16 range (0-65535) + if port.Port == uint16(l.Addr().(*net.TCPAddr).Port) { + found = true + break + } + } + require.True(t, found) + + // check that we cache the ports + err = l.Close() + require.NoError(t, err) + portsNew, err := uut.GetListeningPorts() + require.NoError(t, err) + require.Equal(t, ports, portsNew) + + // note that it's unsafe to try to assert that a port does not exist in the response + // because the OS may reallocate the port very quickly. +} diff --git a/agent/ports_unsupported.go b/agent/ports_unsupported.go index 89ca4f1755e52..661956a3fcc0b 100644 --- a/agent/ports_unsupported.go +++ b/agent/ports_unsupported.go @@ -2,9 +2,17 @@ package agent -import "github.com/coder/coder/v2/codersdk" +import ( + "time" -func (*listeningPortsHandler) getListeningPorts() ([]codersdk.WorkspaceAgentListeningPort, error) { + "github.com/coder/coder/v2/codersdk" +) + +type osListeningPortsGetter struct { + cacheDuration time.Duration +} + +func (*osListeningPortsGetter) GetListeningPorts() ([]codersdk.WorkspaceAgentListeningPort, error) { // Can't scan for ports on non-linux or non-windows_amd64 systems at the // moment. The UI will not show any "no ports found" message to the user, so // the user won't suspect a thing. diff --git a/agent/unit/graph.go b/agent/unit/graph.go index 3d8a6703addf2..e9388680c10d1 100644 --- a/agent/unit/graph.go +++ b/agent/unit/graph.go @@ -58,7 +58,7 @@ func (g *Graph[EdgeType, VertexType]) AddEdge(from, to VertexType, edge EdgeType toID := g.getOrCreateVertexID(to) if g.canReach(to, from) { - return xerrors.Errorf("adding edge (%v -> %v) would create a cycle", from, to) + return xerrors.Errorf("adding edge (%v -> %v): %w", from, to, ErrCycleDetected) } g.gonumGraph.SetEdge(simple.Edge{F: simple.Node(fromID), T: simple.Node(toID)}) diff --git a/agent/unit/graph_test.go b/agent/unit/graph_test.go index 3c76756aee88c..f7d1117be74b3 100644 --- a/agent/unit/graph_test.go +++ b/agent/unit/graph_test.go @@ -148,8 +148,7 @@ func TestGraph(t *testing.T) { graph := &testGraph{} unit1 := &testGraphVertex{Name: "unit1"} err := graph.AddEdge(unit1, unit1, testEdgeCompleted) - require.Error(t, err) - require.ErrorContains(t, err, fmt.Sprintf("adding edge (%v -> %v) would create a cycle", unit1, unit1)) + require.ErrorIs(t, err, unit.ErrCycleDetected) return graph }, @@ -160,8 +159,7 @@ func TestGraph(t *testing.T) { err := graph.AddEdge(unit1, unit2, testEdgeCompleted) require.NoError(t, err) err = graph.AddEdge(unit2, unit1, testEdgeStarted) - require.Error(t, err) - require.ErrorContains(t, err, fmt.Sprintf("adding edge (%v -> %v) would create a cycle", unit2, unit1)) + require.ErrorIs(t, err, unit.ErrCycleDetected) return graph }, @@ -341,7 +339,7 @@ func TestGraphThreadSafety(t *testing.T) { // Verify all attempts correctly returned cycle error for i, err := range cycleErrors { require.Error(t, err, "goroutine %d should have detected cycle", i) - require.Contains(t, err.Error(), "would create a cycle") + require.ErrorIs(t, err, unit.ErrCycleDetected) } // Verify graph remains valid (original chain intact) diff --git a/agent/unit/manager.go b/agent/unit/manager.go new file mode 100644 index 0000000000000..88185d3f5ee26 --- /dev/null +++ b/agent/unit/manager.go @@ -0,0 +1,290 @@ +package unit + +import ( + "errors" + "fmt" + "sync" + + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/util/slice" +) + +var ( + ErrUnitIDRequired = xerrors.New("unit name is required") + ErrUnitNotFound = xerrors.New("unit not found") + ErrUnitAlreadyRegistered = xerrors.New("unit already registered") + ErrCannotUpdateOtherUnit = xerrors.New("cannot update other unit's status") + ErrDependenciesNotSatisfied = xerrors.New("unit dependencies not satisfied") + ErrSameStatusAlreadySet = xerrors.New("same status already set") + ErrCycleDetected = xerrors.New("cycle detected") + ErrFailedToAddDependency = xerrors.New("failed to add dependency") +) + +// Status represents the status of a unit. +type Status string + +var _ fmt.Stringer = Status("") + +func (s Status) String() string { + if s == StatusNotRegistered { + return "not registered" + } + return string(s) +} + +// Status constants for dependency tracking. +const ( + StatusNotRegistered Status = "" + StatusPending Status = "pending" + StatusStarted Status = "started" + StatusComplete Status = "completed" +) + +// ID provides a type narrowed representation of the unique identifier of a unit. +type ID string + +// Unit represents a point-in-time snapshot of a vertex in the dependency graph. +// Units may depend on other units, or be depended on by other units. The unit struct +// is not aware of updates made to the dependency graph after it is initialized and should +// not be cached. +type Unit struct { + id ID + status Status + // ready is true if all dependencies are satisfied. + // It does not have an accessor method on Unit, because a unit cannot know whether it is ready. + // Only the Manager can calculate whether a unit is ready based on knowledge of the dependency graph. + // To discourage use of an outdated readiness value, only the Manager should set and return this field. + ready bool +} + +func (u Unit) ID() ID { + return u.id +} + +func (u Unit) Status() Status { + return u.status +} + +// Dependency represents a dependency relationship between units. +type Dependency struct { + Unit ID + DependsOn ID + RequiredStatus Status + CurrentStatus Status + IsSatisfied bool +} + +// Manager provides reactive dependency tracking over a Graph. +// It manages Unit registration, dependency relationships, and status updates +// with automatic recalculation of readiness when dependencies are satisfied. +type Manager struct { + mu sync.RWMutex + + // The underlying graph that stores dependency relationships + graph *Graph[Status, ID] + + // Store vertex instances for each unit to ensure consistent references + units map[ID]Unit +} + +// NewManager creates a new Manager instance. +func NewManager() *Manager { + return &Manager{ + graph: &Graph[Status, ID]{}, + units: make(map[ID]Unit), + } +} + +// Register adds a unit to the manager if it is not already registered. +// If a Unit is already registered (per the ID field), it is not updated. +func (m *Manager) Register(id ID) error { + m.mu.Lock() + defer m.mu.Unlock() + + if id == "" { + return xerrors.Errorf("registering unit %q: %w", id, ErrUnitIDRequired) + } + + if m.registered(id) { + return xerrors.Errorf("registering unit %q: %w", id, ErrUnitAlreadyRegistered) + } + + m.units[id] = Unit{ + id: id, + status: StatusPending, + ready: true, + } + + return nil +} + +// registered checks if a unit is registered in the manager. +func (m *Manager) registered(id ID) bool { + return m.units[id].status != StatusNotRegistered +} + +// Unit fetches a unit from the manager. If the unit does not exist, +// it returns the Unit zero-value as a placeholder unit, because +// units may depend on other units that have not yet been created. +func (m *Manager) Unit(id ID) (Unit, error) { + if id == "" { + return Unit{}, xerrors.Errorf("unit ID cannot be empty: %w", ErrUnitIDRequired) + } + + m.mu.RLock() + defer m.mu.RUnlock() + + return m.units[id], nil +} + +func (m *Manager) IsReady(id ID) (bool, error) { + if id == "" { + return false, xerrors.Errorf("unit ID cannot be empty: %w", ErrUnitIDRequired) + } + + m.mu.RLock() + defer m.mu.RUnlock() + + if !m.registered(id) { + return true, nil + } + + return m.units[id].ready, nil +} + +// AddDependency adds a dependency relationship between units. +// The unit depends on the dependsOn unit reaching the requiredStatus. +func (m *Manager) AddDependency(unit ID, dependsOn ID, requiredStatus Status) error { + m.mu.Lock() + defer m.mu.Unlock() + + switch { + case unit == "": + return xerrors.Errorf("dependent name cannot be empty: %w", ErrUnitIDRequired) + case dependsOn == "": + return xerrors.Errorf("dependency name cannot be empty: %w", ErrUnitIDRequired) + case !m.registered(unit): + return xerrors.Errorf("dependent unit %q must be registered first: %w", unit, ErrUnitNotFound) + } + + // Add the dependency edge to the graph + // The edge goes from unit to dependsOn, representing the dependency + err := m.graph.AddEdge(unit, dependsOn, requiredStatus) + if err != nil { + return xerrors.Errorf("adding edge for unit %q: %w", unit, errors.Join(ErrFailedToAddDependency, err)) + } + + // Recalculate readiness for the unit since it now has a new dependency + m.recalculateReadinessUnsafe(unit) + + return nil +} + +// UpdateStatus updates a unit's status and recalculates readiness for affected dependents. +func (m *Manager) UpdateStatus(unit ID, newStatus Status) error { + m.mu.Lock() + defer m.mu.Unlock() + + switch { + case unit == "": + return xerrors.Errorf("updating status for unit %q: %w", unit, ErrUnitIDRequired) + case !m.registered(unit): + return xerrors.Errorf("unit %q must be registered first: %w", unit, ErrUnitNotFound) + } + + u := m.units[unit] + if u.status == newStatus { + return xerrors.Errorf("checking status for unit %q: %w", unit, ErrSameStatusAlreadySet) + } + + u.status = newStatus + m.units[unit] = u + + // Get all units that depend on this one (reverse adjacent vertices) + dependents := m.graph.GetReverseAdjacentVertices(unit) + + // Recalculate readiness for all dependents + for _, dependent := range dependents { + m.recalculateReadinessUnsafe(dependent.From) + } + + return nil +} + +// recalculateReadinessUnsafe recalculates the readiness state for a unit. +// This method assumes the caller holds the write lock. +func (m *Manager) recalculateReadinessUnsafe(unit ID) { + u := m.units[unit] + dependencies := m.graph.GetForwardAdjacentVertices(unit) + + allSatisfied := true + for _, dependency := range dependencies { + requiredStatus := dependency.Edge + dependsOnUnit := m.units[dependency.To] + if dependsOnUnit.status != requiredStatus { + allSatisfied = false + break + } + } + + u.ready = allSatisfied + m.units[unit] = u +} + +// GetGraph returns the underlying graph for visualization and debugging. +// This should be used carefully as it exposes the internal graph structure. +func (m *Manager) GetGraph() *Graph[Status, ID] { + return m.graph +} + +// GetAllDependencies returns all dependencies for a unit, both satisfied and unsatisfied. +func (m *Manager) GetAllDependencies(unit ID) ([]Dependency, error) { + m.mu.RLock() + defer m.mu.RUnlock() + + if unit == "" { + return nil, xerrors.Errorf("unit ID cannot be empty: %w", ErrUnitIDRequired) + } + + if !m.registered(unit) { + return nil, xerrors.Errorf("checking registration for unit %q: %w", unit, ErrUnitNotFound) + } + + dependencies := m.graph.GetForwardAdjacentVertices(unit) + + var allDependencies []Dependency + + for _, dependency := range dependencies { + dependsOnUnit := m.units[dependency.To] + requiredStatus := dependency.Edge + allDependencies = append(allDependencies, Dependency{ + Unit: unit, + DependsOn: dependency.To, + RequiredStatus: requiredStatus, + CurrentStatus: dependsOnUnit.status, + IsSatisfied: dependsOnUnit.status == requiredStatus, + }) + } + + return allDependencies, nil +} + +// GetUnmetDependencies returns a list of unsatisfied dependencies for a unit. +func (m *Manager) GetUnmetDependencies(unit ID) ([]Dependency, error) { + allDependencies, err := m.GetAllDependencies(unit) + if err != nil { + return nil, err + } + + var unmetDependencies []Dependency = slice.Filter(allDependencies, func(dependency Dependency) bool { + return !dependency.IsSatisfied + }) + + return unmetDependencies, nil +} + +// ExportDOT exports the dependency graph to DOT format for visualization. +func (m *Manager) ExportDOT(name string) (string, error) { + return m.graph.ToDOT(name) +} diff --git a/agent/unit/manager_test.go b/agent/unit/manager_test.go new file mode 100644 index 0000000000000..1729a047a9b54 --- /dev/null +++ b/agent/unit/manager_test.go @@ -0,0 +1,743 @@ +package unit_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/agent/unit" +) + +const ( + unitA unit.ID = "serviceA" + unitB unit.ID = "serviceB" + unitC unit.ID = "serviceC" + unitD unit.ID = "serviceD" +) + +func TestManager_UnitValidation(t *testing.T) { + t.Parallel() + + t.Run("Empty Unit Name", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + err := manager.Register("") + require.ErrorIs(t, err, unit.ErrUnitIDRequired) + err = manager.AddDependency("", unitA, unit.StatusStarted) + require.ErrorIs(t, err, unit.ErrUnitIDRequired) + err = manager.AddDependency(unitA, "", unit.StatusStarted) + require.ErrorIs(t, err, unit.ErrUnitIDRequired) + dependencies, err := manager.GetAllDependencies("") + require.ErrorIs(t, err, unit.ErrUnitIDRequired) + require.Len(t, dependencies, 0) + unmetDependencies, err := manager.GetUnmetDependencies("") + require.ErrorIs(t, err, unit.ErrUnitIDRequired) + require.Len(t, unmetDependencies, 0) + err = manager.UpdateStatus("", unit.StatusStarted) + require.ErrorIs(t, err, unit.ErrUnitIDRequired) + isReady, err := manager.IsReady("") + require.ErrorIs(t, err, unit.ErrUnitIDRequired) + require.False(t, isReady) + u, err := manager.Unit("") + require.ErrorIs(t, err, unit.ErrUnitIDRequired) + assert.Equal(t, unit.Unit{}, u) + }) +} + +func TestManager_Register(t *testing.T) { + t.Parallel() + + t.Run("RegisterNewUnit", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given: a unit is registered + err := manager.Register(unitA) + require.NoError(t, err) + + // Then: the unit should be ready (no dependencies) + u, err := manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unitA, u.ID()) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err := manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) + + t.Run("RegisterDuplicateUnit", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given: a unit is registered + err := manager.Register(unitA) + require.NoError(t, err) + + // Newly registered units have StatusPending. We update the unit status to StatusStarted, + // so we can later assert that it is not overwritten back to StatusPending by the second + // register call + manager.UpdateStatus(unitA, unit.StatusStarted) + + // When: the unit is registered again + err = manager.Register(unitA) + + // Then: a descriptive error should be returned + require.ErrorIs(t, err, unit.ErrUnitAlreadyRegistered) + + // Then: the unit status should not be overwritten + u, err := manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unit.StatusStarted, u.Status()) + isReady, err := manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) + + t.Run("RegisterMultipleUnits", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given: multiple units are registered + unitIDs := []unit.ID{unitA, unitB, unitC} + for _, unit := range unitIDs { + err := manager.Register(unit) + require.NoError(t, err) + } + + // Then: all units should be ready initially + for _, unitID := range unitIDs { + u, err := manager.Unit(unitID) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err := manager.IsReady(unitID) + require.NoError(t, err) + assert.True(t, isReady) + } + }) +} + +func TestManager_AddDependency(t *testing.T) { + t.Parallel() + + t.Run("AddDependencyBetweenRegisteredUnits", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given: units A and B are registered + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + + // Given: Unit A depends on Unit B being unit.StatusStarted + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: Unit A should not be ready (depends on B) + u, err := manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err := manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // Then: Unit B should still be ready (no dependencies) + u, err = manager.Unit(unitB) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err = manager.IsReady(unitB) + require.NoError(t, err) + assert.True(t, isReady) + + // When: Unit B is started + err = manager.UpdateStatus(unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: Unit A should be ready, because its dependency is now in the desired state. + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + + // When: Unit B is stopped + err = manager.UpdateStatus(unitB, unit.StatusPending) + require.NoError(t, err) + + // Then: Unit A should no longer be ready, because its dependency is not in the desired state. + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + }) + + t.Run("AddDependencyByAnUnregisteredDependentUnit", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given Unit B is registered + err := manager.Register(unitB) + require.NoError(t, err) + + // Given Unit A depends on Unit B being started + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + + // Then: a descriptive error communicates that the dependency cannot be added + // because the dependent unit must be registered first. + require.ErrorIs(t, err, unit.ErrUnitNotFound) + }) + + t.Run("AddDependencyOnAnUnregisteredUnit", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given unit A is registered + err := manager.Register(unitA) + require.NoError(t, err) + + // Given Unit B is not yet registered + // And Unit A depends on Unit B being started + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: The dependency should be visible in Unit A's status + dependencies, err := manager.GetAllDependencies(unitA) + require.NoError(t, err) + require.Len(t, dependencies, 1) + assert.Equal(t, unitB, dependencies[0].DependsOn) + assert.Equal(t, unit.StatusStarted, dependencies[0].RequiredStatus) + assert.False(t, dependencies[0].IsSatisfied) + + u, err := manager.Unit(unitB) + require.NoError(t, err) + assert.Equal(t, unit.StatusNotRegistered, u.Status()) + + // Then: Unit A should not be ready, because it depends on Unit B + isReady, err := manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // When: Unit B is registered + err = manager.Register(unitB) + require.NoError(t, err) + + // Then: Unit A should still not be ready. + // Unit B is not registered, but it has not been started as required by the dependency. + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // When: Unit B is started + err = manager.UpdateStatus(unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: Unit A should be ready, because its dependency is now in the desired state. + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) + + t.Run("AddDependencyCreatesACyclicDependency", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Register units + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + err = manager.Register(unitC) + require.NoError(t, err) + err = manager.Register(unitD) + require.NoError(t, err) + + // A depends on B + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + // B depends on C + err = manager.AddDependency(unitB, unitC, unit.StatusStarted) + require.NoError(t, err) + + // C depends on D + err = manager.AddDependency(unitC, unitD, unit.StatusStarted) + require.NoError(t, err) + + // Try to make D depend on A (creates indirect cycle) + err = manager.AddDependency(unitD, unitA, unit.StatusStarted) + require.ErrorIs(t, err, unit.ErrCycleDetected) + }) + + t.Run("UpdatingADependency", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given units A and B are registered + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + + // Given Unit A depends on Unit B being unit.StatusStarted + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + + // When: The dependency is updated to unit.StatusComplete + err = manager.AddDependency(unitA, unitB, unit.StatusComplete) + require.NoError(t, err) + + // Then: Unit A should only have one dependency, and it should be unit.StatusComplete + dependencies, err := manager.GetAllDependencies(unitA) + require.NoError(t, err) + require.Len(t, dependencies, 1) + assert.Equal(t, unit.StatusComplete, dependencies[0].RequiredStatus) + }) +} + +func TestManager_UpdateStatus(t *testing.T) { + t.Parallel() + + t.Run("UpdateStatusTriggersReadinessRecalculation", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given units A and B are registered + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + + // Given Unit A depends on Unit B being unit.StatusStarted + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: Unit A should not be ready (depends on B) + u, err := manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err := manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // When: Unit B is started + err = manager.UpdateStatus(unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: Unit A should be ready, because its dependency is now in the desired state. + u, err = manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) + + t.Run("UpdateStatusWithUnregisteredUnit", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given Unit A is not registered + // When: Unit A is updated to unit.StatusStarted + err := manager.UpdateStatus(unitA, unit.StatusStarted) + + // Then: a descriptive error communicates that the unit must be registered first. + require.ErrorIs(t, err, unit.ErrUnitNotFound) + }) + + t.Run("LinearChainDependencies", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given units A, B, and C are registered + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + err = manager.Register(unitC) + require.NoError(t, err) + + // Create chain: A depends on B being "started", B depends on C being "completed" + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + err = manager.AddDependency(unitB, unitC, unit.StatusComplete) + require.NoError(t, err) + + // Then: only Unit C should be ready (no dependencies) + u, err := manager.Unit(unitC) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err := manager.IsReady(unitC) + require.NoError(t, err) + assert.True(t, isReady) + + u, err = manager.Unit(unitB) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err = manager.IsReady(unitB) + require.NoError(t, err) + assert.False(t, isReady) + + u, err = manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // When: Unit C is completed + err = manager.UpdateStatus(unitC, unit.StatusComplete) + require.NoError(t, err) + + // Then: Unit B should be ready, because its dependency is now in the desired state. + u, err = manager.Unit(unitB) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err = manager.IsReady(unitB) + require.NoError(t, err) + assert.True(t, isReady) + + u, err = manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + u, err = manager.Unit(unitB) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err = manager.IsReady(unitB) + require.NoError(t, err) + assert.True(t, isReady) + + // When: Unit B is started + err = manager.UpdateStatus(unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: Unit A should be ready, because its dependency is now in the desired state. + u, err = manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unit.StatusPending, u.Status()) + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) +} + +func TestManager_GetUnmetDependencies(t *testing.T) { + t.Parallel() + + t.Run("GetUnmetDependenciesForUnitWithNoDependencies", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given: Unit A is registered + err := manager.Register(unitA) + require.NoError(t, err) + + // Given: Unit A has no dependencies + // Then: Unit A should have no unmet dependencies + unmet, err := manager.GetUnmetDependencies(unitA) + require.NoError(t, err) + assert.Empty(t, unmet) + }) + + t.Run("GetUnmetDependenciesForUnitWithUnsatisfiedDependencies", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + + // Given: Unit A depends on Unit B being unit.StatusStarted + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + + unmet, err := manager.GetUnmetDependencies(unitA) + require.NoError(t, err) + require.Len(t, unmet, 1) + + assert.Equal(t, unitA, unmet[0].Unit) + assert.Equal(t, unitB, unmet[0].DependsOn) + assert.Equal(t, unit.StatusStarted, unmet[0].RequiredStatus) + assert.False(t, unmet[0].IsSatisfied) + }) + + t.Run("GetUnmetDependenciesForUnitWithSatisfiedDependencies", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given: Unit A and Unit B are registered + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + + // Given: Unit A depends on Unit B being unit.StatusStarted + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + + // When: Unit B is started + err = manager.UpdateStatus(unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: Unit A should have no unmet dependencies + unmet, err := manager.GetUnmetDependencies(unitA) + require.NoError(t, err) + assert.Empty(t, unmet) + }) + + t.Run("GetUnmetDependenciesForUnregisteredUnit", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // When: Unit A is requested + unmet, err := manager.GetUnmetDependencies(unitA) + + // Then: a descriptive error communicates that the unit must be registered first. + require.ErrorIs(t, err, unit.ErrUnitNotFound) + assert.Nil(t, unmet) + }) +} + +func TestManager_MultipleDependencies(t *testing.T) { + t.Parallel() + + t.Run("UnitWithMultipleDependencies", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Register all units + units := []unit.ID{unitA, unitB, unitC, unitD} + for _, unit := range units { + err := manager.Register(unit) + require.NoError(t, err) + } + + // A depends on B being unit.StatusStarted AND C being "started" + err := manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + err = manager.AddDependency(unitA, unitC, unit.StatusStarted) + require.NoError(t, err) + + // A should not be ready (depends on both B and C) + isReady, err := manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // Update B to unit.StatusStarted - A should still not be ready (needs C too) + err = manager.UpdateStatus(unitB, unit.StatusStarted) + require.NoError(t, err) + + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // Update C to "started" - A should now be ready + err = manager.UpdateStatus(unitC, unit.StatusStarted) + require.NoError(t, err) + + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) + + t.Run("ComplexDependencyChain", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Register all units + units := []unit.ID{unitA, unitB, unitC, unitD} + for _, unit := range units { + err := manager.Register(unit) + require.NoError(t, err) + } + + // Create complex dependency graph: + // A depends on B being unit.StatusStarted AND C being "started" + err := manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + err = manager.AddDependency(unitA, unitC, unit.StatusStarted) + require.NoError(t, err) + // B depends on D being "completed" + err = manager.AddDependency(unitB, unitD, unit.StatusComplete) + require.NoError(t, err) + // C depends on D being "completed" + err = manager.AddDependency(unitC, unitD, unit.StatusComplete) + require.NoError(t, err) + + // Initially only D is ready + isReady, err := manager.IsReady(unitD) + require.NoError(t, err) + assert.True(t, isReady) + isReady, err = manager.IsReady(unitB) + require.NoError(t, err) + assert.False(t, isReady) + isReady, err = manager.IsReady(unitC) + require.NoError(t, err) + assert.False(t, isReady) + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // Update D to "completed" - B and C should become ready + err = manager.UpdateStatus(unitD, unit.StatusComplete) + require.NoError(t, err) + + isReady, err = manager.IsReady(unitB) + require.NoError(t, err) + assert.True(t, isReady) + isReady, err = manager.IsReady(unitC) + require.NoError(t, err) + assert.True(t, isReady) + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // Update B to unit.StatusStarted - A should still not be ready (needs C) + err = manager.UpdateStatus(unitB, unit.StatusStarted) + require.NoError(t, err) + + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // Update C to "started" - A should now be ready + err = manager.UpdateStatus(unitC, unit.StatusStarted) + require.NoError(t, err) + + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) + + t.Run("DifferentStatusTypes", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Register units + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + err = manager.Register(unitC) + require.NoError(t, err) + + // Given: Unit A depends on Unit B being unit.StatusStarted + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + // Given: Unit A depends on Unit C being "completed" + err = manager.AddDependency(unitA, unitC, unit.StatusComplete) + require.NoError(t, err) + + // When: Unit B is started + err = manager.UpdateStatus(unitB, unit.StatusStarted) + require.NoError(t, err) + + // Then: Unit A should not be ready, because only one of its dependencies is in the desired state. + // It still requires Unit C to be completed. + isReady, err := manager.IsReady(unitA) + require.NoError(t, err) + assert.False(t, isReady) + + // When: Unit C is completed + err = manager.UpdateStatus(unitC, unit.StatusComplete) + require.NoError(t, err) + + // Then: Unit A should be ready, because both of its dependencies are in the desired state. + isReady, err = manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) +} + +func TestManager_IsReady(t *testing.T) { + t.Parallel() + + t.Run("IsReadyWithUnregisteredUnit", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Given: a unit is not registered + u, err := manager.Unit(unitA) + require.NoError(t, err) + assert.Equal(t, unit.StatusNotRegistered, u.Status()) + // Then: the unit is not ready + isReady, err := manager.IsReady(unitA) + require.NoError(t, err) + assert.True(t, isReady) + }) +} + +func TestManager_ToDOT(t *testing.T) { + t.Parallel() + + t.Run("ExportSimpleGraph", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Register units + err := manager.Register(unitA) + require.NoError(t, err) + err = manager.Register(unitB) + require.NoError(t, err) + + // Add dependency + err = manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + + dot, err := manager.ExportDOT("test") + require.NoError(t, err) + assert.NotEmpty(t, dot) + assert.Contains(t, dot, "digraph") + }) + + t.Run("ExportComplexGraph", func(t *testing.T) { + t.Parallel() + + manager := unit.NewManager() + + // Register all units + units := []unit.ID{unitA, unitB, unitC, unitD} + for _, unit := range units { + err := manager.Register(unit) + require.NoError(t, err) + } + + // Create complex dependency graph + // A depends on B and C, B depends on D, C depends on D + err := manager.AddDependency(unitA, unitB, unit.StatusStarted) + require.NoError(t, err) + err = manager.AddDependency(unitA, unitC, unit.StatusStarted) + require.NoError(t, err) + err = manager.AddDependency(unitB, unitD, unit.StatusComplete) + require.NoError(t, err) + err = manager.AddDependency(unitC, unitD, unit.StatusComplete) + require.NoError(t, err) + + dot, err := manager.ExportDOT("complex") + require.NoError(t, err) + assert.NotEmpty(t, dot) + assert.Contains(t, dot, "digraph") + }) +} diff --git a/cli/agent.go b/cli/agent.go index c0bccc7769418..56a8720a4116f 100644 --- a/cli/agent.go +++ b/cli/agent.go @@ -11,6 +11,7 @@ import ( "os" "path/filepath" "runtime" + "slices" "strconv" "strings" "time" @@ -56,6 +57,8 @@ func workspaceAgent() *serpent.Command { devcontainers bool devcontainerProjectDiscovery bool devcontainerDiscoveryAutostart bool + socketServerEnabled bool + socketPath string ) agentAuth := &AgentAuth{} cmd := &serpent.Command{ @@ -201,18 +204,15 @@ func workspaceAgent() *serpent.Command { // Enable pprof handler // This prevents the pprof import from being accidentally deleted. _ = pprof.Handler - pprofSrvClose := ServeHandler(ctx, logger, nil, pprofAddress, "pprof") - defer pprofSrvClose() - if port, err := extractPort(pprofAddress); err == nil { - ignorePorts[port] = "pprof" - } - - if port, err := extractPort(prometheusAddress); err == nil { - ignorePorts[port] = "prometheus" - } + if pprofAddress != "" { + pprofSrvClose := ServeHandler(ctx, logger, nil, pprofAddress, "pprof") + defer pprofSrvClose() - if port, err := extractPort(debugAddress); err == nil { - ignorePorts[port] = "debug" + if port, err := extractPort(pprofAddress); err == nil { + ignorePorts[port] = "pprof" + } + } else { + logger.Debug(ctx, "pprof address is empty, disabling pprof server") } executablePath, err := os.Executable() @@ -276,6 +276,28 @@ func workspaceAgent() *serpent.Command { for { prometheusRegistry := prometheus.NewRegistry() + promHandler := agent.PrometheusMetricsHandler(prometheusRegistry, logger) + var serverClose []func() + if prometheusAddress != "" { + prometheusSrvClose := ServeHandler(ctx, logger, promHandler, prometheusAddress, "prometheus") + serverClose = append(serverClose, prometheusSrvClose) + + if port, err := extractPort(prometheusAddress); err == nil { + ignorePorts[port] = "prometheus" + } + } else { + logger.Debug(ctx, "prometheus address is empty, disabling prometheus server") + } + + if debugAddress != "" { + // ServerHandle depends on `agnt.HTTPDebug()`, but `agnt` + // depends on `ignorePorts`. Keep this if statement in sync + // with below. + if port, err := extractPort(debugAddress); err == nil { + ignorePorts[port] = "debug" + } + } + agnt := agent.New(agent.Options{ Client: client, Logger: logger, @@ -297,12 +319,19 @@ func workspaceAgent() *serpent.Command { agentcontainers.WithProjectDiscovery(devcontainerProjectDiscovery), agentcontainers.WithDiscoveryAutostart(devcontainerDiscoveryAutostart), }, + SocketPath: socketPath, + SocketServerEnabled: socketServerEnabled, }) - promHandler := agent.PrometheusMetricsHandler(prometheusRegistry, logger) - prometheusSrvClose := ServeHandler(ctx, logger, promHandler, prometheusAddress, "prometheus") - - debugSrvClose := ServeHandler(ctx, logger, agnt.HTTPDebug(), debugAddress, "debug") + if debugAddress != "" { + // ServerHandle depends on `agnt.HTTPDebug()`, but `agnt` + // depends on `ignorePorts`. Keep this if statement in sync + // with above. + debugSrvClose := ServeHandler(ctx, logger, agnt.HTTPDebug(), debugAddress, "debug") + serverClose = append(serverClose, debugSrvClose) + } else { + logger.Debug(ctx, "debug address is empty, disabling debug server") + } select { case <-ctx.Done(): @@ -314,8 +343,11 @@ func workspaceAgent() *serpent.Command { } lastErr = agnt.Close() - debugSrvClose() - prometheusSrvClose() + + slices.Reverse(serverClose) + for _, closeFunc := range serverClose { + closeFunc() + } if mustExit { break @@ -449,6 +481,19 @@ func workspaceAgent() *serpent.Command { Description: "Allow the agent to autostart devcontainer projects it discovers based on their configuration.", Value: serpent.BoolOf(&devcontainerDiscoveryAutostart), }, + { + Flag: "socket-server-enabled", + Default: "false", + Env: "CODER_AGENT_SOCKET_SERVER_ENABLED", + Description: "Enable the agent socket server.", + Value: serpent.BoolOf(&socketServerEnabled), + }, + { + Flag: "socket-path", + Env: "CODER_AGENT_SOCKET_PATH", + Description: "Specify the path for the agent socket.", + Value: serpent.StringOf(&socketPath), + }, } agentAuth.AttachOptions(cmd, false) return cmd diff --git a/cli/agent_test.go b/cli/agent_test.go index b0b8cbcc97aa6..0d0594d8a699e 100644 --- a/cli/agent_test.go +++ b/cli/agent_test.go @@ -178,6 +178,51 @@ func TestWorkspaceAgent(t *testing.T) { require.Greater(t, atomic.LoadInt64(&called), int64(0), "expected coderd to be reached with custom headers") require.Greater(t, atomic.LoadInt64(&derpCalled), int64(0), "expected /derp to be called with custom headers") }) + + t.Run("DisabledServers", func(t *testing.T) { + t.Parallel() + + client, db := coderdtest.NewWithDatabase(t, nil) + user := coderdtest.CreateFirstUser(t, client) + r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).WithAgent().Do() + + logDir := t.TempDir() + inv, _ := clitest.New(t, + "agent", + "--auth", "token", + "--agent-token", r.AgentToken, + "--agent-url", client.URL.String(), + "--log-dir", logDir, + "--pprof-address", "", + "--prometheus-address", "", + "--debug-address", "", + ) + + clitest.Start(t, inv) + + // Verify the agent is connected and working. + resources := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID). + MatchResources(matchAgentWithVersion).Wait() + require.Len(t, resources, 1) + require.Len(t, resources[0].Agents, 1) + require.NotEmpty(t, resources[0].Agents[0].Version) + + // Verify the servers are not listening by checking the log for disabled + // messages. + require.Eventually(t, func() bool { + logContent, err := os.ReadFile(filepath.Join(logDir, "coder-agent.log")) + if err != nil { + return false + } + logStr := string(logContent) + return strings.Contains(logStr, "pprof address is empty, disabling pprof server") && + strings.Contains(logStr, "prometheus address is empty, disabling prometheus server") && + strings.Contains(logStr, "debug address is empty, disabling debug server") + }, testutil.WaitLong, testutil.IntervalMedium) + }) } func matchAgentWithVersion(rs []codersdk.WorkspaceResource) bool { diff --git a/cli/clitest/clitest.go b/cli/clitest/clitest.go index 8d1f5302ce7ba..3e506a26b6d59 100644 --- a/cli/clitest/clitest.go +++ b/cli/clitest/clitest.go @@ -28,7 +28,9 @@ import ( ) // New creates a CLI instance with a configuration pointed to a -// temporary testing directory. +// temporary testing directory. The invocation is set up to use a +// global config directory for the given testing.TB, and keyring +// usage disabled. func New(t testing.TB, args ...string) (*serpent.Invocation, config.Root) { var root cli.RootCmd @@ -59,6 +61,15 @@ func NewWithCommand( t testing.TB, cmd *serpent.Command, args ...string, ) (*serpent.Invocation, config.Root) { configDir := config.Root(t.TempDir()) + // Keyring usage is disabled here when --global-config is set because many existing + // tests expect the session token to be stored on disk and is not properly instrumented + // for parallel testing against the actual operating system keyring. + invArgs := append([]string{"--global-config", string(configDir)}, args...) + return setupInvocation(t, cmd, invArgs...), configDir +} + +func setupInvocation(t testing.TB, cmd *serpent.Command, args ...string, +) *serpent.Invocation { // I really would like to fail test on error logs, but realistically, turning on by default // in all our CLI tests is going to create a lot of flaky noise. logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}). @@ -66,16 +77,21 @@ func NewWithCommand( Named("cli") i := &serpent.Invocation{ Command: cmd, - Args: append([]string{"--global-config", string(configDir)}, args...), + Args: args, Stdin: io.LimitReader(nil, 0), Stdout: (&logWriter{prefix: "stdout", log: logger}), Stderr: (&logWriter{prefix: "stderr", log: logger}), Logger: logger, } t.Logf("invoking command: %s %s", cmd.Name(), strings.Join(i.Args, " ")) + return i +} - // These can be overridden by the test. - return i, configDir +func NewWithDefaultKeyringCommand(t testing.TB, cmd *serpent.Command, args ...string, +) (*serpent.Invocation, config.Root) { + configDir := config.Root(t.TempDir()) + invArgs := append([]string{"--global-config", string(configDir)}, args...) + return setupInvocation(t, cmd, invArgs...), configDir } // SetupConfig applies the URL and SessionToken of the client to the config. diff --git a/cli/create.go b/cli/create.go index 05fe0824b5be1..225d05950e77c 100644 --- a/cli/create.go +++ b/cli/create.go @@ -577,53 +577,57 @@ func prepWorkspaceBuild(inv *serpent.Invocation, client *codersdk.Client, args p return nil, xerrors.Errorf("template version git auth: %w", err) } - // Run a dry-run with the given parameters to check correctness - dryRun, err := client.CreateTemplateVersionDryRun(inv.Context(), templateVersion.ID, codersdk.CreateTemplateVersionDryRunRequest{ - WorkspaceName: args.NewWorkspaceName, - RichParameterValues: buildParameters, - }) - if err != nil { - return nil, xerrors.Errorf("begin workspace dry-run: %w", err) - } + // Only perform dry-run for workspace creation and updates + // Skip for start and restart to avoid unnecessary delays + if args.Action == WorkspaceCreate || args.Action == WorkspaceUpdate { + // Run a dry-run with the given parameters to check correctness + dryRun, err := client.CreateTemplateVersionDryRun(inv.Context(), templateVersion.ID, codersdk.CreateTemplateVersionDryRunRequest{ + WorkspaceName: args.NewWorkspaceName, + RichParameterValues: buildParameters, + }) + if err != nil { + return nil, xerrors.Errorf("begin workspace dry-run: %w", err) + } - matchedProvisioners, err := client.TemplateVersionDryRunMatchedProvisioners(inv.Context(), templateVersion.ID, dryRun.ID) - if err != nil { - return nil, xerrors.Errorf("get matched provisioners: %w", err) - } - cliutil.WarnMatchedProvisioners(inv.Stdout, &matchedProvisioners, dryRun) - _, _ = fmt.Fprintln(inv.Stdout, "Planning workspace...") - err = cliui.ProvisionerJob(inv.Context(), inv.Stdout, cliui.ProvisionerJobOptions{ - Fetch: func() (codersdk.ProvisionerJob, error) { - return client.TemplateVersionDryRun(inv.Context(), templateVersion.ID, dryRun.ID) - }, - Cancel: func() error { - return client.CancelTemplateVersionDryRun(inv.Context(), templateVersion.ID, dryRun.ID) - }, - Logs: func() (<-chan codersdk.ProvisionerJobLog, io.Closer, error) { - return client.TemplateVersionDryRunLogsAfter(inv.Context(), templateVersion.ID, dryRun.ID, 0) - }, - // Don't show log output for the dry-run unless there's an error. - Silent: true, - }) - if err != nil { - // TODO (Dean): reprompt for parameter values if we deem it to - // be a validation error - return nil, xerrors.Errorf("dry-run workspace: %w", err) - } + matchedProvisioners, err := client.TemplateVersionDryRunMatchedProvisioners(inv.Context(), templateVersion.ID, dryRun.ID) + if err != nil { + return nil, xerrors.Errorf("get matched provisioners: %w", err) + } + cliutil.WarnMatchedProvisioners(inv.Stdout, &matchedProvisioners, dryRun) + _, _ = fmt.Fprintln(inv.Stdout, "Planning workspace...") + err = cliui.ProvisionerJob(inv.Context(), inv.Stdout, cliui.ProvisionerJobOptions{ + Fetch: func() (codersdk.ProvisionerJob, error) { + return client.TemplateVersionDryRun(inv.Context(), templateVersion.ID, dryRun.ID) + }, + Cancel: func() error { + return client.CancelTemplateVersionDryRun(inv.Context(), templateVersion.ID, dryRun.ID) + }, + Logs: func() (<-chan codersdk.ProvisionerJobLog, io.Closer, error) { + return client.TemplateVersionDryRunLogsAfter(inv.Context(), templateVersion.ID, dryRun.ID, 0) + }, + // Don't show log output for the dry-run unless there's an error. + Silent: true, + }) + if err != nil { + // TODO (Dean): reprompt for parameter values if we deem it to + // be a validation error + return nil, xerrors.Errorf("dry-run workspace: %w", err) + } - resources, err := client.TemplateVersionDryRunResources(inv.Context(), templateVersion.ID, dryRun.ID) - if err != nil { - return nil, xerrors.Errorf("get workspace dry-run resources: %w", err) - } + resources, err := client.TemplateVersionDryRunResources(inv.Context(), templateVersion.ID, dryRun.ID) + if err != nil { + return nil, xerrors.Errorf("get workspace dry-run resources: %w", err) + } - err = cliui.WorkspaceResources(inv.Stdout, resources, cliui.WorkspaceResourcesOptions{ - WorkspaceName: args.NewWorkspaceName, - // Since agents haven't connected yet, hiding this makes more sense. - HideAgentState: true, - Title: "Workspace Preview", - }) - if err != nil { - return nil, xerrors.Errorf("get resources: %w", err) + err = cliui.WorkspaceResources(inv.Stdout, resources, cliui.WorkspaceResourcesOptions{ + WorkspaceName: args.NewWorkspaceName, + // Since agents haven't connected yet, hiding this makes more sense. + HideAgentState: true, + Title: "Workspace Preview", + }) + if err != nil { + return nil, xerrors.Errorf("get resources: %w", err) + } } return buildParameters, nil diff --git a/cli/exp_scaletest.go b/cli/exp_scaletest.go index 559ffbebd165d..d8dd10da327cc 100644 --- a/cli/exp_scaletest.go +++ b/cli/exp_scaletest.go @@ -64,7 +64,9 @@ func (r *RootCmd) scaletestCmd() *serpent.Command { r.scaletestWorkspaceTraffic(), r.scaletestAutostart(), r.scaletestNotifications(), + r.scaletestTaskStatus(), r.scaletestSMTP(), + r.scaletestPrebuilds(), }, } @@ -384,6 +386,88 @@ func (s *scaletestPrometheusFlags) attach(opts *serpent.OptionSet) { ) } +// workspaceTargetFlags holds common flags for targeting specific workspaces in scale tests. +type workspaceTargetFlags struct { + template string + targetWorkspaces string + useHostLogin bool +} + +// attach adds the workspace target flags to the given options set. +func (f *workspaceTargetFlags) attach(opts *serpent.OptionSet) { + *opts = append(*opts, + serpent.Option{ + Flag: "template", + FlagShorthand: "t", + Env: "CODER_SCALETEST_TEMPLATE", + Description: "Name or ID of the template. Traffic generation will be limited to workspaces created from this template.", + Value: serpent.StringOf(&f.template), + }, + serpent.Option{ + Flag: "target-workspaces", + Env: "CODER_SCALETEST_TARGET_WORKSPACES", + Description: "Target a specific range of workspaces in the format [START]:[END] (exclusive). Example: 0:10 will target the 10 first alphabetically sorted workspaces (0-9).", + Value: serpent.StringOf(&f.targetWorkspaces), + }, + serpent.Option{ + Flag: "use-host-login", + Env: "CODER_SCALETEST_USE_HOST_LOGIN", + Default: "false", + Description: "Connect as the currently logged in user.", + Value: serpent.BoolOf(&f.useHostLogin), + }, + ) +} + +// getTargetedWorkspaces retrieves the workspaces based on the template filter and target range. warnWriter is where to +// write a warning message if any workspaces were skipped due to ownership mismatch. +func (f *workspaceTargetFlags) getTargetedWorkspaces(ctx context.Context, client *codersdk.Client, organizationIDs []uuid.UUID, warnWriter io.Writer) ([]codersdk.Workspace, error) { + // Validate template if provided + if f.template != "" { + _, err := parseTemplate(ctx, client, organizationIDs, f.template) + if err != nil { + return nil, xerrors.Errorf("parse template: %w", err) + } + } + + // Parse target range + targetStart, targetEnd, err := parseTargetRange("workspaces", f.targetWorkspaces) + if err != nil { + return nil, xerrors.Errorf("parse target workspaces: %w", err) + } + + // Determine owner based on useHostLogin + var owner string + if f.useHostLogin { + owner = codersdk.Me + } + + // Get workspaces + workspaces, numSkipped, err := getScaletestWorkspaces(ctx, client, owner, f.template) + if err != nil { + return nil, err + } + if numSkipped > 0 { + cliui.Warnf(warnWriter, "CODER_DISABLE_OWNER_WORKSPACE_ACCESS is set on the deployment.\n\t%d workspace(s) were skipped due to ownership mismatch.\n\tSet --use-host-login to only target workspaces you own.", numSkipped) + } + + // Adjust targetEnd if not specified + if targetEnd == 0 { + targetEnd = len(workspaces) + } + + // Validate range + if len(workspaces) == 0 { + return nil, xerrors.Errorf("no scaletest workspaces exist") + } + if targetEnd > len(workspaces) { + return nil, xerrors.Errorf("target workspace end %d is greater than the number of workspaces %d", targetEnd, len(workspaces)) + } + + // Return the sliced workspaces + return workspaces[targetStart:targetEnd], nil +} + func requireAdmin(ctx context.Context, client *codersdk.Client) (codersdk.User, error) { me, err := client.User(ctx, codersdk.Me) if err != nil { @@ -1193,12 +1277,10 @@ func (r *RootCmd) scaletestWorkspaceTraffic() *serpent.Command { bytesPerTick int64 ssh bool disableDirect bool - useHostLogin bool app string - template string - targetWorkspaces string workspaceProxyURL string + targetFlags = &workspaceTargetFlags{} tracingFlags = &scaletestTracingFlags{} strategy = &scaletestStrategyFlags{} cleanupStrategy = newScaletestCleanupStrategy() @@ -1243,15 +1325,9 @@ func (r *RootCmd) scaletestWorkspaceTraffic() *serpent.Command { }, } - if template != "" { - _, err := parseTemplate(ctx, client, me.OrganizationIDs, template) - if err != nil { - return xerrors.Errorf("parse template: %w", err) - } - } - targetWorkspaceStart, targetWorkspaceEnd, err := parseTargetRange("workspaces", targetWorkspaces) + workspaces, err := targetFlags.getTargetedWorkspaces(ctx, client, me.OrganizationIDs, inv.Stdout) if err != nil { - return xerrors.Errorf("parse target workspaces: %w", err) + return err } appHost, err := client.AppHost(ctx) @@ -1259,30 +1335,6 @@ func (r *RootCmd) scaletestWorkspaceTraffic() *serpent.Command { return xerrors.Errorf("get app host: %w", err) } - var owner string - if useHostLogin { - owner = codersdk.Me - } - - workspaces, numSkipped, err := getScaletestWorkspaces(inv.Context(), client, owner, template) - if err != nil { - return err - } - if numSkipped > 0 { - cliui.Warnf(inv.Stdout, "CODER_DISABLE_OWNER_WORKSPACE_ACCESS is set on the deployment.\n\t%d workspace(s) were skipped due to ownership mismatch.\n\tSet --use-host-login to only target workspaces you own.", numSkipped) - } - - if targetWorkspaceEnd == 0 { - targetWorkspaceEnd = len(workspaces) - } - - if len(workspaces) == 0 { - return xerrors.Errorf("no scaletest workspaces exist") - } - if targetWorkspaceEnd > len(workspaces) { - return xerrors.Errorf("target workspace end %d is greater than the number of workspaces %d", targetWorkspaceEnd, len(workspaces)) - } - tracerProvider, closeTracing, tracingEnabled, err := tracingFlags.provider(ctx) if err != nil { return xerrors.Errorf("create tracer provider: %w", err) @@ -1307,10 +1359,6 @@ func (r *RootCmd) scaletestWorkspaceTraffic() *serpent.Command { th := harness.NewTestHarness(strategy.toStrategy(), cleanupStrategy.toStrategy()) for idx, ws := range workspaces { - if idx < targetWorkspaceStart || idx >= targetWorkspaceEnd { - continue - } - var ( agent codersdk.WorkspaceAgent name = "workspace-traffic" @@ -1415,19 +1463,6 @@ func (r *RootCmd) scaletestWorkspaceTraffic() *serpent.Command { } cmd.Options = []serpent.Option{ - { - Flag: "template", - FlagShorthand: "t", - Env: "CODER_SCALETEST_TEMPLATE", - Description: "Name or ID of the template. Traffic generation will be limited to workspaces created from this template.", - Value: serpent.StringOf(&template), - }, - { - Flag: "target-workspaces", - Env: "CODER_SCALETEST_TARGET_WORKSPACES", - Description: "Target a specific range of workspaces in the format [START]:[END] (exclusive). Example: 0:10 will target the 10 first alphabetically sorted workspaces (0-9).", - Value: serpent.StringOf(&targetWorkspaces), - }, { Flag: "bytes-per-tick", Env: "CODER_SCALETEST_WORKSPACE_TRAFFIC_BYTES_PER_TICK", @@ -1463,13 +1498,6 @@ func (r *RootCmd) scaletestWorkspaceTraffic() *serpent.Command { Description: "Send WebSocket traffic to a workspace app (proxied via coderd), cannot be used with --ssh.", Value: serpent.StringOf(&app), }, - { - Flag: "use-host-login", - Env: "CODER_SCALETEST_USE_HOST_LOGIN", - Default: "false", - Description: "Connect as the currently logged in user.", - Value: serpent.BoolOf(&useHostLogin), - }, { Flag: "workspace-proxy-url", Env: "CODER_SCALETEST_WORKSPACE_PROXY_URL", @@ -1479,6 +1507,7 @@ func (r *RootCmd) scaletestWorkspaceTraffic() *serpent.Command { }, } + targetFlags.attach(&cmd.Options) tracingFlags.attach(&cmd.Options) strategy.attach(&cmd.Options) cleanupStrategy.attach(&cmd.Options) diff --git a/cli/exp_scaletest_notifications.go b/cli/exp_scaletest_notifications.go index 1ea47858933f1..074343e10b3cc 100644 --- a/cli/exp_scaletest_notifications.go +++ b/cli/exp_scaletest_notifications.go @@ -3,6 +3,7 @@ package cli import ( + "bytes" "context" "fmt" "net/http" @@ -29,12 +30,13 @@ import ( func (r *RootCmd) scaletestNotifications() *serpent.Command { var ( - userCount int64 - ownerUserPercentage float64 - notificationTimeout time.Duration - dialTimeout time.Duration - noCleanup bool - smtpAPIURL string + userCount int64 + templateAdminPercentage float64 + notificationTimeout time.Duration + smtpRequestTimeout time.Duration + dialTimeout time.Duration + noCleanup bool + smtpAPIURL string tracingFlags = &scaletestTracingFlags{} @@ -77,24 +79,24 @@ func (r *RootCmd) scaletestNotifications() *serpent.Command { return xerrors.Errorf("--user-count must be greater than 0") } - if ownerUserPercentage < 0 || ownerUserPercentage > 100 { - return xerrors.Errorf("--owner-user-percentage must be between 0 and 100") + if templateAdminPercentage < 0 || templateAdminPercentage > 100 { + return xerrors.Errorf("--template-admin-percentage must be between 0 and 100") } if smtpAPIURL != "" && !strings.HasPrefix(smtpAPIURL, "http://") && !strings.HasPrefix(smtpAPIURL, "https://") { return xerrors.Errorf("--smtp-api-url must start with http:// or https://") } - ownerUserCount := int64(float64(userCount) * ownerUserPercentage / 100) - if ownerUserCount == 0 && ownerUserPercentage > 0 { - ownerUserCount = 1 + templateAdminCount := int64(float64(userCount) * templateAdminPercentage / 100) + if templateAdminCount == 0 && templateAdminPercentage > 0 { + templateAdminCount = 1 } - regularUserCount := userCount - ownerUserCount + regularUserCount := userCount - templateAdminCount _, _ = fmt.Fprintf(inv.Stderr, "Distribution plan:\n") _, _ = fmt.Fprintf(inv.Stderr, " Total users: %d\n", userCount) - _, _ = fmt.Fprintf(inv.Stderr, " Owner users: %d (%.1f%%)\n", ownerUserCount, ownerUserPercentage) - _, _ = fmt.Fprintf(inv.Stderr, " Regular users: %d (%.1f%%)\n", regularUserCount, 100.0-ownerUserPercentage) + _, _ = fmt.Fprintf(inv.Stderr, " Template admins: %d (%.1f%%)\n", templateAdminCount, templateAdminPercentage) + _, _ = fmt.Fprintf(inv.Stderr, " Regular users: %d (%.1f%%)\n", regularUserCount, 100.0-templateAdminPercentage) outputs, err := output.parse() if err != nil { @@ -127,13 +129,12 @@ func (r *RootCmd) scaletestNotifications() *serpent.Command { _, _ = fmt.Fprintln(inv.Stderr, "Creating users...") dialBarrier := &sync.WaitGroup{} - ownerWatchBarrier := &sync.WaitGroup{} + templateAdminWatchBarrier := &sync.WaitGroup{} dialBarrier.Add(int(userCount)) - ownerWatchBarrier.Add(int(ownerUserCount)) + templateAdminWatchBarrier.Add(int(templateAdminCount)) expectedNotificationIDs := map[uuid.UUID]struct{}{ - notificationsLib.TemplateUserAccountCreated: {}, - notificationsLib.TemplateUserAccountDeleted: {}, + notificationsLib.TemplateTemplateDeleted: {}, } triggerTimes := make(map[uuid.UUID]chan time.Time, len(expectedNotificationIDs)) @@ -141,20 +142,31 @@ func (r *RootCmd) scaletestNotifications() *serpent.Command { triggerTimes[id] = make(chan time.Time, 1) } + smtpHTTPTransport := &http.Transport{ + MaxConnsPerHost: 512, + MaxIdleConnsPerHost: 512, + IdleConnTimeout: 60 * time.Second, + } + smtpHTTPClient := &http.Client{ + Transport: smtpHTTPTransport, + } + configs := make([]notifications.Config, 0, userCount) - for range ownerUserCount { + for range templateAdminCount { config := notifications.Config{ User: createusers.Config{ OrganizationID: me.OrganizationIDs[0], }, - Roles: []string{codersdk.RoleOwner}, + Roles: []string{codersdk.RoleTemplateAdmin}, NotificationTimeout: notificationTimeout, DialTimeout: dialTimeout, DialBarrier: dialBarrier, - ReceivingWatchBarrier: ownerWatchBarrier, + ReceivingWatchBarrier: templateAdminWatchBarrier, ExpectedNotificationsIDs: expectedNotificationIDs, Metrics: metrics, SMTPApiURL: smtpAPIURL, + SMTPRequestTimeout: smtpRequestTimeout, + SMTPHttpClient: smtpHTTPClient, } if err := config.Validate(); err != nil { return xerrors.Errorf("validate config: %w", err) @@ -170,9 +182,8 @@ func (r *RootCmd) scaletestNotifications() *serpent.Command { NotificationTimeout: notificationTimeout, DialTimeout: dialTimeout, DialBarrier: dialBarrier, - ReceivingWatchBarrier: ownerWatchBarrier, + ReceivingWatchBarrier: templateAdminWatchBarrier, Metrics: metrics, - SMTPApiURL: smtpAPIURL, } if err := config.Validate(); err != nil { return xerrors.Errorf("validate config: %w", err) @@ -180,7 +191,7 @@ func (r *RootCmd) scaletestNotifications() *serpent.Command { configs = append(configs, config) } - go triggerUserNotifications( + go triggerNotifications( ctx, logger, client, @@ -261,23 +272,30 @@ func (r *RootCmd) scaletestNotifications() *serpent.Command { Required: true, }, { - Flag: "owner-user-percentage", - Env: "CODER_SCALETEST_NOTIFICATION_OWNER_USER_PERCENTAGE", + Flag: "template-admin-percentage", + Env: "CODER_SCALETEST_NOTIFICATION_TEMPLATE_ADMIN_PERCENTAGE", Default: "20.0", - Description: "Percentage of users to assign Owner role to (0-100).", - Value: serpent.Float64Of(&ownerUserPercentage), + Description: "Percentage of users to assign Template Admin role to (0-100).", + Value: serpent.Float64Of(&templateAdminPercentage), }, { Flag: "notification-timeout", Env: "CODER_SCALETEST_NOTIFICATION_TIMEOUT", - Default: "5m", + Default: "10m", Description: "How long to wait for notifications after triggering.", Value: serpent.DurationOf(¬ificationTimeout), }, + { + Flag: "smtp-request-timeout", + Env: "CODER_SCALETEST_SMTP_REQUEST_TIMEOUT", + Default: "5m", + Description: "Timeout for SMTP requests.", + Value: serpent.DurationOf(&smtpRequestTimeout), + }, { Flag: "dial-timeout", Env: "CODER_SCALETEST_DIAL_TIMEOUT", - Default: "2m", + Default: "10m", Description: "Timeout for dialing the notification websocket endpoint.", Value: serpent.DurationOf(&dialTimeout), }, @@ -379,9 +397,9 @@ func computeNotificationLatencies( return nil } -// triggerUserNotifications waits for all test users to connect, -// then creates and deletes a test user to trigger notification events for testing. -func triggerUserNotifications( +// triggerNotifications waits for all test users to connect, +// then creates and deletes a test template to trigger notification events for testing. +func triggerNotifications( ctx context.Context, logger slog.Logger, client *codersdk.Client, @@ -414,34 +432,49 @@ func triggerUserNotifications( return } - const ( - triggerUsername = "scaletest-trigger-user" - triggerEmail = "scaletest-trigger@example.com" - ) + logger.Info(ctx, "creating test template to test notifications") - logger.Info(ctx, "creating test user to test notifications", - slog.F("username", triggerUsername), - slog.F("email", triggerEmail), - slog.F("org_id", orgID)) + // Upload empty template file. + file, err := client.Upload(ctx, codersdk.ContentTypeTar, bytes.NewReader([]byte{})) + if err != nil { + logger.Error(ctx, "upload test template", slog.Error(err)) + return + } + logger.Info(ctx, "test template uploaded", slog.F("file_id", file.ID)) + + // Create template version. + version, err := client.CreateTemplateVersion(ctx, orgID, codersdk.CreateTemplateVersionRequest{ + StorageMethod: codersdk.ProvisionerStorageMethodFile, + FileID: file.ID, + Provisioner: codersdk.ProvisionerTypeEcho, + }) + if err != nil { + logger.Error(ctx, "create test template version", slog.Error(err)) + return + } + logger.Info(ctx, "test template version created", slog.F("template_version_id", version.ID)) - testUser, err := client.CreateUserWithOrgs(ctx, codersdk.CreateUserRequestWithOrgs{ - OrganizationIDs: []uuid.UUID{orgID}, - Username: triggerUsername, - Email: triggerEmail, - Password: "test-password-123", + // Create template. + testTemplate, err := client.CreateTemplate(ctx, orgID, codersdk.CreateTemplateRequest{ + Name: "scaletest-test-template", + Description: "scaletest-test-template", + VersionID: version.ID, }) if err != nil { - logger.Error(ctx, "create test user", slog.Error(err)) + logger.Error(ctx, "create test template", slog.Error(err)) return } - expectedNotifications[notificationsLib.TemplateUserAccountCreated] <- time.Now() + logger.Info(ctx, "test template created", slog.F("template_id", testTemplate.ID)) - err = client.DeleteUser(ctx, testUser.ID) + // Delete template to trigger notification. + err = client.DeleteTemplate(ctx, testTemplate.ID) if err != nil { - logger.Error(ctx, "delete test user", slog.Error(err)) + logger.Error(ctx, "delete test template", slog.Error(err)) return } - expectedNotifications[notificationsLib.TemplateUserAccountDeleted] <- time.Now() - close(expectedNotifications[notificationsLib.TemplateUserAccountCreated]) - close(expectedNotifications[notificationsLib.TemplateUserAccountDeleted]) + logger.Info(ctx, "test template deleted", slog.F("template_id", testTemplate.ID)) + + // Record expected notification. + expectedNotifications[notificationsLib.TemplateTemplateDeleted] <- time.Now() + close(expectedNotifications[notificationsLib.TemplateTemplateDeleted]) } diff --git a/cli/exp_scaletest_prebuilds.go b/cli/exp_scaletest_prebuilds.go new file mode 100644 index 0000000000000..8e05bacae258d --- /dev/null +++ b/cli/exp_scaletest_prebuilds.go @@ -0,0 +1,297 @@ +//go:build !slim + +package cli + +import ( + "fmt" + "net/http" + "os/signal" + "strconv" + "sync" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/harness" + "github.com/coder/coder/v2/scaletest/prebuilds" + "github.com/coder/quartz" + "github.com/coder/serpent" +) + +func (r *RootCmd) scaletestPrebuilds() *serpent.Command { + var ( + numTemplates int64 + numPresets int64 + numPresetPrebuilds int64 + templateVersionJobTimeout time.Duration + prebuildWorkspaceTimeout time.Duration + noCleanup bool + + tracingFlags = &scaletestTracingFlags{} + timeoutStrategy = &timeoutFlags{} + cleanupStrategy = newScaletestCleanupStrategy() + output = &scaletestOutputFlags{} + prometheusFlags = &scaletestPrometheusFlags{} + ) + + cmd := &serpent.Command{ + Use: "prebuilds", + Short: "Creates prebuild workspaces on the Coder server.", + Handler: func(inv *serpent.Invocation) error { + ctx := inv.Context() + client, err := r.InitClient(inv) + if err != nil { + return err + } + + notifyCtx, stop := signal.NotifyContext(ctx, StopSignals...) + defer stop() + ctx = notifyCtx + + me, err := requireAdmin(ctx, client) + if err != nil { + return err + } + + client.HTTPClient = &http.Client{ + Transport: &codersdk.HeaderTransport{ + Transport: http.DefaultTransport, + Header: map[string][]string{ + codersdk.BypassRatelimitHeader: {"true"}, + }, + }, + } + + if numTemplates <= 0 { + return xerrors.Errorf("--num-templates must be greater than 0") + } + if numPresets <= 0 { + return xerrors.Errorf("--num-presets must be greater than 0") + } + if numPresetPrebuilds <= 0 { + return xerrors.Errorf("--num-preset-prebuilds must be greater than 0") + } + + outputs, err := output.parse() + if err != nil { + return xerrors.Errorf("parse output flags: %w", err) + } + + tracerProvider, closeTracing, tracingEnabled, err := tracingFlags.provider(ctx) + if err != nil { + return xerrors.Errorf("create tracer provider: %w", err) + } + defer func() { + _, _ = fmt.Fprintln(inv.Stderr, "\nUploading traces...") + if err := closeTracing(ctx); err != nil { + _, _ = fmt.Fprintf(inv.Stderr, "\nError uploading traces: %+v\n", err) + } + _, _ = fmt.Fprintf(inv.Stderr, "Waiting %s for prometheus metrics to be scraped\n", prometheusFlags.Wait) + <-time.After(prometheusFlags.Wait) + }() + tracer := tracerProvider.Tracer(scaletestTracerName) + + reg := prometheus.NewRegistry() + metrics := prebuilds.NewMetrics(reg) + + logger := inv.Logger + prometheusSrvClose := ServeHandler(ctx, logger, promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), prometheusFlags.Address, "prometheus") + defer prometheusSrvClose() + + err = client.PutPrebuildsSettings(ctx, codersdk.PrebuildsSettings{ + ReconciliationPaused: true, + }) + if err != nil { + return xerrors.Errorf("pause prebuilds: %w", err) + } + + setupBarrier := new(sync.WaitGroup) + setupBarrier.Add(int(numTemplates)) + creationBarrier := new(sync.WaitGroup) + creationBarrier.Add(int(numTemplates)) + deletionSetupBarrier := new(sync.WaitGroup) + deletionSetupBarrier.Add(1) + deletionBarrier := new(sync.WaitGroup) + deletionBarrier.Add(int(numTemplates)) + + th := harness.NewTestHarness(timeoutStrategy.wrapStrategy(harness.ConcurrentExecutionStrategy{}), cleanupStrategy.toStrategy()) + + for i := range numTemplates { + id := strconv.Itoa(int(i)) + cfg := prebuilds.Config{ + OrganizationID: me.OrganizationIDs[0], + NumPresets: int(numPresets), + NumPresetPrebuilds: int(numPresetPrebuilds), + TemplateVersionJobTimeout: templateVersionJobTimeout, + PrebuildWorkspaceTimeout: prebuildWorkspaceTimeout, + Metrics: metrics, + SetupBarrier: setupBarrier, + CreationBarrier: creationBarrier, + DeletionSetupBarrier: deletionSetupBarrier, + DeletionBarrier: deletionBarrier, + Clock: quartz.NewReal(), + } + err := cfg.Validate() + if err != nil { + return xerrors.Errorf("validate config: %w", err) + } + + var runner harness.Runnable = prebuilds.NewRunner(client, cfg) + if tracingEnabled { + runner = &runnableTraceWrapper{ + tracer: tracer, + spanName: fmt.Sprintf("prebuilds/%s", id), + runner: runner, + } + } + + th.AddRun("prebuilds", id, runner) + } + + _, _ = fmt.Fprintf(inv.Stderr, "Creating %d templates with %d presets and %d prebuilds per preset...\n", + numTemplates, numPresets, numPresetPrebuilds) + _, _ = fmt.Fprintf(inv.Stderr, "Total expected prebuilds: %d\n", numTemplates*numPresets*numPresetPrebuilds) + + testCtx, testCancel := timeoutStrategy.toContext(ctx) + defer testCancel() + + runErrCh := make(chan error, 1) + go func() { + runErrCh <- th.Run(testCtx) + }() + + _, _ = fmt.Fprintln(inv.Stderr, "Waiting for all templates to be created...") + setupBarrier.Wait() + _, _ = fmt.Fprintln(inv.Stderr, "All templates created") + + err = client.PutPrebuildsSettings(ctx, codersdk.PrebuildsSettings{ + ReconciliationPaused: false, + }) + if err != nil { + return xerrors.Errorf("resume prebuilds: %w", err) + } + + _, _ = fmt.Fprintln(inv.Stderr, "Waiting for all prebuilds to be created...") + creationBarrier.Wait() + _, _ = fmt.Fprintln(inv.Stderr, "All prebuilds created") + + err = client.PutPrebuildsSettings(ctx, codersdk.PrebuildsSettings{ + ReconciliationPaused: true, + }) + if err != nil { + return xerrors.Errorf("pause prebuilds before deletion: %w", err) + } + + _, _ = fmt.Fprintln(inv.Stderr, "Prebuilds paused, signaling runners to prepare for deletion") + deletionSetupBarrier.Done() + + _, _ = fmt.Fprintln(inv.Stderr, "Waiting for all templates to be updated with 0 prebuilds...") + deletionBarrier.Wait() + _, _ = fmt.Fprintln(inv.Stderr, "All templates updated") + + err = client.PutPrebuildsSettings(ctx, codersdk.PrebuildsSettings{ + ReconciliationPaused: false, + }) + if err != nil { + return xerrors.Errorf("resume prebuilds for deletion: %w", err) + } + + _, _ = fmt.Fprintln(inv.Stderr, "Waiting for all prebuilds to be deleted...") + err = <-runErrCh + if err != nil { + return xerrors.Errorf("run test harness (harness failure, not a test failure): %w", err) + } + + // If the command was interrupted, skip cleanup & stats + if notifyCtx.Err() != nil { + return notifyCtx.Err() + } + + res := th.Results() + for _, o := range outputs { + err = o.write(res, inv.Stdout) + if err != nil { + return xerrors.Errorf("write output %q to %q: %w", o.format, o.path, err) + } + } + + if !noCleanup { + _, _ = fmt.Fprintln(inv.Stderr, "\nStarting cleanup (deleting templates)...") + + cleanupCtx, cleanupCancel := cleanupStrategy.toContext(ctx) + defer cleanupCancel() + + err = th.Cleanup(cleanupCtx) + if err != nil { + return xerrors.Errorf("cleanup tests: %w", err) + } + + // If the cleanup was interrupted, skip stats + if notifyCtx.Err() != nil { + return notifyCtx.Err() + } + } + + if res.TotalFail > 0 { + return xerrors.New("prebuild creation test failed, see above for more details") + } + + return nil + }, + } + + cmd.Options = serpent.OptionSet{ + { + Flag: "num-templates", + Env: "CODER_SCALETEST_PREBUILDS_NUM_TEMPLATES", + Default: "1", + Description: "Number of templates to create for the test.", + Value: serpent.Int64Of(&numTemplates), + }, + { + Flag: "num-presets", + Env: "CODER_SCALETEST_PREBUILDS_NUM_PRESETS", + Default: "1", + Description: "Number of presets per template.", + Value: serpent.Int64Of(&numPresets), + }, + { + Flag: "num-preset-prebuilds", + Env: "CODER_SCALETEST_PREBUILDS_NUM_PRESET_PREBUILDS", + Default: "1", + Description: "Number of prebuilds per preset.", + Value: serpent.Int64Of(&numPresetPrebuilds), + }, + { + Flag: "template-version-job-timeout", + Env: "CODER_SCALETEST_PREBUILDS_TEMPLATE_VERSION_JOB_TIMEOUT", + Default: "5m", + Description: "Timeout for template version provisioning jobs.", + Value: serpent.DurationOf(&templateVersionJobTimeout), + }, + { + Flag: "prebuild-workspace-timeout", + Env: "CODER_SCALETEST_PREBUILDS_WORKSPACE_TIMEOUT", + Default: "10m", + Description: "Timeout for all prebuild workspaces to be created/deleted.", + Value: serpent.DurationOf(&prebuildWorkspaceTimeout), + }, + { + Flag: "skip-cleanup", + Env: "CODER_SCALETEST_PREBUILDS_SKIP_CLEANUP", + Description: "Skip cleanup (deletion test) and leave resources intact.", + Value: serpent.BoolOf(&noCleanup), + }, + } + + tracingFlags.attach(&cmd.Options) + timeoutStrategy.attach(&cmd.Options) + cleanupStrategy.attach(&cmd.Options) + output.attach(&cmd.Options) + prometheusFlags.attach(&cmd.Options) + + return cmd +} diff --git a/cli/exp_scaletest_taskstatus.go b/cli/exp_scaletest_taskstatus.go new file mode 100644 index 0000000000000..8621d7d2ae798 --- /dev/null +++ b/cli/exp_scaletest_taskstatus.go @@ -0,0 +1,275 @@ +//go:build !slim + +package cli + +import ( + "context" + "fmt" + "net/http" + "sync" + "time" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "github.com/coder/serpent" + + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/harness" + "github.com/coder/coder/v2/scaletest/taskstatus" +) + +const ( + taskStatusTestName = "task-status" +) + +func (r *RootCmd) scaletestTaskStatus() *serpent.Command { + var ( + count int64 + template string + workspaceNamePrefix string + appSlug string + reportStatusPeriod time.Duration + reportStatusDuration time.Duration + baselineDuration time.Duration + tracingFlags = &scaletestTracingFlags{} + prometheusFlags = &scaletestPrometheusFlags{} + timeoutStrategy = &timeoutFlags{} + cleanupStrategy = newScaletestCleanupStrategy() + output = &scaletestOutputFlags{} + ) + orgContext := NewOrganizationContext() + + cmd := &serpent.Command{ + Use: "task-status", + Short: "Generates load on the Coder server by simulating task status reporting", + Long: `This test creates external workspaces and simulates AI agents reporting task status. +After all runners connect, it waits for the baseline duration before triggering status reporting.`, + Handler: func(inv *serpent.Invocation) error { + ctx := inv.Context() + + outputs, err := output.parse() + if err != nil { + return xerrors.Errorf("could not parse --output flags: %w", err) + } + + client, err := r.InitClient(inv) + if err != nil { + return err + } + + org, err := orgContext.Selected(inv, client) + if err != nil { + return err + } + + _, err = requireAdmin(ctx, client) + if err != nil { + return err + } + + // Disable rate limits for this test + client.HTTPClient = &http.Client{ + Transport: &codersdk.HeaderTransport{ + Transport: http.DefaultTransport, + Header: map[string][]string{ + codersdk.BypassRatelimitHeader: {"true"}, + }, + }, + } + + // Find the template + tpl, err := parseTemplate(ctx, client, []uuid.UUID{org.ID}, template) + if err != nil { + return xerrors.Errorf("parse template %q: %w", template, err) + } + templateID := tpl.ID + + reg := prometheus.NewRegistry() + metrics := taskstatus.NewMetrics(reg) + + logger := slog.Make(sloghuman.Sink(inv.Stdout)).Leveled(slog.LevelDebug) + prometheusSrvClose := ServeHandler(ctx, logger, promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), prometheusFlags.Address, "prometheus") + defer prometheusSrvClose() + + tracerProvider, closeTracing, tracingEnabled, err := tracingFlags.provider(ctx) + if err != nil { + return xerrors.Errorf("create tracer provider: %w", err) + } + defer func() { + // Allow time for traces to flush even if command context is + // canceled. This is a no-op if tracing is not enabled. + _, _ = fmt.Fprintln(inv.Stderr, "\nUploading traces...") + if err := closeTracing(ctx); err != nil { + _, _ = fmt.Fprintf(inv.Stderr, "\nError uploading traces: %+v\n", err) + } + // Wait for prometheus metrics to be scraped + _, _ = fmt.Fprintf(inv.Stderr, "Waiting %s for prometheus metrics to be scraped\n", prometheusFlags.Wait) + <-time.After(prometheusFlags.Wait) + }() + tracer := tracerProvider.Tracer(scaletestTracerName) + + // Setup shared resources for coordination + connectedWaitGroup := &sync.WaitGroup{} + connectedWaitGroup.Add(int(count)) + startReporting := make(chan struct{}) + + // Create the test harness + th := harness.NewTestHarness( + timeoutStrategy.wrapStrategy(harness.ConcurrentExecutionStrategy{}), + cleanupStrategy.toStrategy(), + ) + + // Create runners + for i := range count { + workspaceName := fmt.Sprintf("%s-%d", workspaceNamePrefix, i) + cfg := taskstatus.Config{ + TemplateID: templateID, + WorkspaceName: workspaceName, + AppSlug: appSlug, + ConnectedWaitGroup: connectedWaitGroup, + StartReporting: startReporting, + ReportStatusPeriod: reportStatusPeriod, + ReportStatusDuration: reportStatusDuration, + Metrics: metrics, + MetricLabelValues: []string{}, + } + + if err := cfg.Validate(); err != nil { + return xerrors.Errorf("validate config for runner %d: %w", i, err) + } + + var runner harness.Runnable = taskstatus.NewRunner(client, cfg) + if tracingEnabled { + runner = &runnableTraceWrapper{ + tracer: tracer, + spanName: fmt.Sprintf("%s/%d", taskStatusTestName, i), + runner: runner, + } + } + th.AddRun(taskStatusTestName, workspaceName, runner) + } + + // Start the test in a separate goroutine so we can coordinate timing + testCtx, testCancel := timeoutStrategy.toContext(ctx) + defer testCancel() + testDone := make(chan error) + go func() { + testDone <- th.Run(testCtx) + }() + + // Wait for all runners to connect + logger.Info(ctx, "waiting for all runners to connect") + waitCtx, waitCancel := context.WithTimeout(ctx, 5*time.Minute) + defer waitCancel() + + connectDone := make(chan struct{}) + go func() { + connectedWaitGroup.Wait() + close(connectDone) + }() + + select { + case <-waitCtx.Done(): + return xerrors.Errorf("timeout waiting for runners to connect") + case <-connectDone: + logger.Info(ctx, "all runners connected") + } + + // Wait for baseline duration + logger.Info(ctx, "waiting for baseline duration", slog.F("duration", baselineDuration)) + select { + case <-ctx.Done(): + return ctx.Err() + case <-time.After(baselineDuration): + } + + // Trigger all runners to start reporting + logger.Info(ctx, "triggering runners to start reporting task status") + close(startReporting) + + // Wait for the test to complete + err = <-testDone + if err != nil { + return xerrors.Errorf("run test harness: %w", err) + } + + res := th.Results() + for _, o := range outputs { + err = o.write(res, inv.Stdout) + if err != nil { + return xerrors.Errorf("write output %q to %q: %w", o.format, o.path, err) + } + } + + cleanupCtx, cleanupCancel := cleanupStrategy.toContext(ctx) + defer cleanupCancel() + err = th.Cleanup(cleanupCtx) + if err != nil { + return xerrors.Errorf("cleanup tests: %w", err) + } + + if res.TotalFail > 0 { + return xerrors.New("load test failed, see above for more details") + } + + return nil + }, + } + + cmd.Options = serpent.OptionSet{ + { + Flag: "count", + Description: "Number of concurrent runners to create.", + Default: "10", + Value: serpent.Int64Of(&count), + }, + { + Flag: "template", + Description: "Name or UUID of the template to use for the scale test. The template MUST include a coder_external_agent and a coder_app.", + Default: "scaletest-task-status", + Value: serpent.StringOf(&template), + }, + { + Flag: "workspace-name-prefix", + Description: "Prefix for workspace names (will be suffixed with index).", + Default: "scaletest-task-status", + Value: serpent.StringOf(&workspaceNamePrefix), + }, + { + Flag: "app-slug", + Description: "Slug of the app designated as the AI Agent.", + Default: "ai-agent", + Value: serpent.StringOf(&appSlug), + }, + { + Flag: "report-status-period", + Description: "Time between reporting task statuses.", + Default: "10s", + Value: serpent.DurationOf(&reportStatusPeriod), + }, + { + Flag: "report-status-duration", + Description: "Total time to report task statuses after baseline.", + Default: "15m", + Value: serpent.DurationOf(&reportStatusDuration), + }, + { + Flag: "baseline-duration", + Description: "Duration to wait after all runners connect before starting to report status.", + Default: "10m", + Value: serpent.DurationOf(&baselineDuration), + }, + } + orgContext.AttachOptions(cmd) + output.attach(&cmd.Options) + tracingFlags.attach(&cmd.Options) + prometheusFlags.attach(&cmd.Options) + timeoutStrategy.attach(&cmd.Options) + cleanupStrategy.attach(&cmd.Options) + return cmd +} diff --git a/cli/keyring_test.go b/cli/keyring_test.go new file mode 100644 index 0000000000000..7cb190845a31b --- /dev/null +++ b/cli/keyring_test.go @@ -0,0 +1,426 @@ +package cli_test + +import ( + "bytes" + "crypto/rand" + "encoding/binary" + "fmt" + "net/url" + "os" + "path" + "runtime" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/cli" + "github.com/coder/coder/v2/cli/clitest" + "github.com/coder/coder/v2/cli/config" + "github.com/coder/coder/v2/cli/sessionstore" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/pty/ptytest" + "github.com/coder/serpent" +) + +// keyringTestServiceName generates a unique service name for keyring tests +// using the test name and a nanosecond timestamp to prevent collisions. +func keyringTestServiceName(t *testing.T) string { + t.Helper() + var n uint32 + err := binary.Read(rand.Reader, binary.BigEndian, &n) + if err != nil { + t.Fatal(err) + } + return fmt.Sprintf("%s_%v_%d", t.Name(), time.Now().UnixNano(), n) +} + +type keyringTestEnv struct { + serviceName string + keyring sessionstore.Keyring + inv *serpent.Invocation + cfg config.Root + clientURL *url.URL +} + +func setupKeyringTestEnv(t *testing.T, clientURL string, args ...string) keyringTestEnv { + t.Helper() + + var root cli.RootCmd + + cmd, err := root.Command(root.AGPL()) + require.NoError(t, err) + + serviceName := keyringTestServiceName(t) + root.WithKeyringServiceName(serviceName) + root.UseKeyringWithGlobalConfig() + + inv, cfg := clitest.NewWithDefaultKeyringCommand(t, cmd, args...) + + parsedURL, err := url.Parse(clientURL) + require.NoError(t, err) + + backend := sessionstore.NewKeyringWithService(serviceName) + t.Cleanup(func() { + _ = backend.Delete(parsedURL) + }) + + return keyringTestEnv{serviceName, backend, inv, cfg, parsedURL} +} + +func TestUseKeyring(t *testing.T) { + // Verify that the --use-keyring flag default opts into using a keyring backend + // for storing session tokens instead of plain text files. + t.Parallel() + + t.Run("Login", func(t *testing.T) { + t.Parallel() + + if runtime.GOOS != "windows" && runtime.GOOS != "darwin" { + t.Skip("keyring is not supported on this OS") + } + + // Create a test server + client := coderdtest.New(t, nil) + coderdtest.CreateFirstUser(t, client) + + // Create a pty for interactive prompts + pty := ptytest.New(t) + + // Create CLI invocation which defaults to using the keyring + env := setupKeyringTestEnv(t, client.URL.String(), + "login", + "--force-tty", + "--no-open", + client.URL.String()) + inv := env.inv + inv.Stdin = pty.Input() + inv.Stdout = pty.Output() + + // Run login in background + doneChan := make(chan struct{}) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + // Provide the token when prompted + pty.ExpectMatch("Paste your token here:") + pty.WriteLine(client.SessionToken()) + pty.ExpectMatch("Welcome to Coder") + <-doneChan + + // Verify that session file was NOT created (using keyring instead) + sessionFile := path.Join(string(env.cfg), "session") + _, err := os.Stat(sessionFile) + require.True(t, os.IsNotExist(err), "session file should not exist when using keyring") + + // Verify that the credential IS stored in OS keyring + cred, err := env.keyring.Read(env.clientURL) + require.NoError(t, err, "credential should be stored in OS keyring") + require.Equal(t, client.SessionToken(), cred, "stored token should match login token") + }) + + t.Run("Logout", func(t *testing.T) { + t.Parallel() + + if runtime.GOOS != "windows" && runtime.GOOS != "darwin" { + t.Skip("keyring is not supported on this OS") + } + + // Create a test server + client := coderdtest.New(t, nil) + coderdtest.CreateFirstUser(t, client) + + // Create a pty for interactive prompts + pty := ptytest.New(t) + + // First, login with the keyring (default) + env := setupKeyringTestEnv(t, client.URL.String(), + "login", + "--force-tty", + "--no-open", + client.URL.String(), + ) + loginInv := env.inv + loginInv.Stdin = pty.Input() + loginInv.Stdout = pty.Output() + + doneChan := make(chan struct{}) + go func() { + defer close(doneChan) + err := loginInv.Run() + assert.NoError(t, err) + }() + + pty.ExpectMatch("Paste your token here:") + pty.WriteLine(client.SessionToken()) + pty.ExpectMatch("Welcome to Coder") + <-doneChan + + // Verify credential exists in OS keyring + cred, err := env.keyring.Read(env.clientURL) + require.NoError(t, err, "read credential should succeed before logout") + require.NotEmpty(t, cred, "credential should exist before logout") + + // Now logout using the same keyring service name + var logoutRoot cli.RootCmd + logoutCmd, err := logoutRoot.Command(logoutRoot.AGPL()) + require.NoError(t, err) + logoutRoot.WithKeyringServiceName(env.serviceName) + logoutRoot.UseKeyringWithGlobalConfig() + + logoutInv, _ := clitest.NewWithDefaultKeyringCommand(t, logoutCmd, + "logout", + "--yes", + "--global-config", string(env.cfg), + ) + + var logoutOut bytes.Buffer + logoutInv.Stdout = &logoutOut + + err = logoutInv.Run() + require.NoError(t, err, "logout should succeed") + + // Verify the credential was deleted from OS keyring + _, err = env.keyring.Read(env.clientURL) + require.ErrorIs(t, err, os.ErrNotExist, "credential should be deleted from keyring after logout") + }) + + t.Run("DefaultFileStorage", func(t *testing.T) { + t.Parallel() + + if runtime.GOOS != "linux" { + t.Skip("file storage is the default for Linux") + } + + // Create a test server + client := coderdtest.New(t, nil) + coderdtest.CreateFirstUser(t, client) + + // Create a pty for interactive prompts + pty := ptytest.New(t) + + env := setupKeyringTestEnv(t, client.URL.String(), + "login", + "--force-tty", + "--no-open", + client.URL.String(), + ) + inv := env.inv + inv.Stdin = pty.Input() + inv.Stdout = pty.Output() + + doneChan := make(chan struct{}) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + pty.ExpectMatch("Paste your token here:") + pty.WriteLine(client.SessionToken()) + pty.ExpectMatch("Welcome to Coder") + <-doneChan + + // Verify that session file WAS created (not using keyring) + sessionFile := path.Join(string(env.cfg), "session") + _, err := os.Stat(sessionFile) + require.NoError(t, err, "session file should exist when NOT using --use-keyring on Linux") + + // Read and verify the token from file + content, err := os.ReadFile(sessionFile) + require.NoError(t, err, "should be able to read session file") + require.Equal(t, client.SessionToken(), string(content), "file should contain the session token") + }) + + t.Run("EnvironmentVariable", func(t *testing.T) { + t.Parallel() + + // Create a test server + client := coderdtest.New(t, nil) + coderdtest.CreateFirstUser(t, client) + + // Create a pty for interactive prompts + pty := ptytest.New(t) + + // Login using CODER_USE_KEYRING environment variable set to disable keyring usage, + // which should have the same behavior on all platforms. + env := setupKeyringTestEnv(t, client.URL.String(), + "login", + "--force-tty", + "--no-open", + client.URL.String(), + ) + inv := env.inv + inv.Stdin = pty.Input() + inv.Stdout = pty.Output() + inv.Environ.Set("CODER_USE_KEYRING", "false") + + doneChan := make(chan struct{}) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + pty.ExpectMatch("Paste your token here:") + pty.WriteLine(client.SessionToken()) + pty.ExpectMatch("Welcome to Coder") + <-doneChan + + // Verify that session file WAS created (not using keyring) + sessionFile := path.Join(string(env.cfg), "session") + _, err := os.Stat(sessionFile) + require.NoError(t, err, "session file should exist when CODER_USE_KEYRING set to false") + + // Read and verify the token from file + content, err := os.ReadFile(sessionFile) + require.NoError(t, err, "should be able to read session file") + require.Equal(t, client.SessionToken(), string(content), "file should contain the session token") + }) + + t.Run("DisableKeyringWithFlag", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, nil) + coderdtest.CreateFirstUser(t, client) + pty := ptytest.New(t) + + // Login with --use-keyring=false to explicitly disable keyring usage, which + // should have the same behavior on all platforms. + env := setupKeyringTestEnv(t, client.URL.String(), + "login", + "--use-keyring=false", + "--force-tty", + "--no-open", + client.URL.String(), + ) + inv := env.inv + inv.Stdin = pty.Input() + inv.Stdout = pty.Output() + + doneChan := make(chan struct{}) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + pty.ExpectMatch("Paste your token here:") + pty.WriteLine(client.SessionToken()) + pty.ExpectMatch("Welcome to Coder") + <-doneChan + + // Verify that session file WAS created (not using keyring) + sessionFile := path.Join(string(env.cfg), "session") + _, err := os.Stat(sessionFile) + require.NoError(t, err, "session file should exist when --use-keyring=false is specified") + + // Read and verify the token from file + content, err := os.ReadFile(sessionFile) + require.NoError(t, err, "should be able to read session file") + require.Equal(t, client.SessionToken(), string(content), "file should contain the session token") + }) +} + +func TestUseKeyringUnsupportedOS(t *testing.T) { + // Verify that on unsupported operating systems, file-based storage is used + // automatically even when --use-keyring is set to true (the default). + t.Parallel() + + // Only run this on an unsupported OS. + if runtime.GOOS == "windows" || runtime.GOOS == "darwin" { + t.Skipf("Skipping unsupported OS test on %s where keyring is supported", runtime.GOOS) + } + + t.Run("LoginWithDefaultKeyring", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, nil) + coderdtest.CreateFirstUser(t, client) + pty := ptytest.New(t) + + env := setupKeyringTestEnv(t, client.URL.String(), + "login", + "--force-tty", + "--no-open", + client.URL.String(), + ) + inv := env.inv + inv.Stdin = pty.Input() + inv.Stdout = pty.Output() + + doneChan := make(chan struct{}) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + pty.ExpectMatch("Paste your token here:") + pty.WriteLine(client.SessionToken()) + pty.ExpectMatch("Welcome to Coder") + <-doneChan + + // Verify that session file WAS created (automatic fallback to file storage) + sessionFile := path.Join(string(env.cfg), "session") + _, err := os.Stat(sessionFile) + require.NoError(t, err, "session file should exist due to automatic fallback to file storage") + + content, err := os.ReadFile(sessionFile) + require.NoError(t, err, "should be able to read session file") + require.Equal(t, client.SessionToken(), string(content), "file should contain the session token") + }) + + t.Run("LogoutWithDefaultKeyring", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, nil) + coderdtest.CreateFirstUser(t, client) + pty := ptytest.New(t) + + // First login to create a session (will use file storage due to automatic fallback) + env := setupKeyringTestEnv(t, client.URL.String(), + "login", + "--force-tty", + "--no-open", + client.URL.String(), + ) + loginInv := env.inv + loginInv.Stdin = pty.Input() + loginInv.Stdout = pty.Output() + + doneChan := make(chan struct{}) + go func() { + defer close(doneChan) + err := loginInv.Run() + assert.NoError(t, err) + }() + + pty.ExpectMatch("Paste your token here:") + pty.WriteLine(client.SessionToken()) + pty.ExpectMatch("Welcome to Coder") + <-doneChan + + // Verify session file exists + sessionFile := path.Join(string(env.cfg), "session") + _, err := os.Stat(sessionFile) + require.NoError(t, err, "session file should exist before logout") + + // Now logout - should succeed and delete the file + logoutEnv := setupKeyringTestEnv(t, client.URL.String(), + "logout", + "--yes", + "--global-config", string(env.cfg), + ) + + err = logoutEnv.inv.Run() + require.NoError(t, err, "logout should succeed with automatic file storage fallback") + + _, err = os.Stat(sessionFile) + require.True(t, os.IsNotExist(err), "session file should be deleted after logout") + }) +} diff --git a/cli/login.go b/cli/login.go index fcba1ee50eb74..d95eb7475dedd 100644 --- a/cli/login.go +++ b/cli/login.go @@ -19,6 +19,7 @@ import ( "github.com/coder/pretty" "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/coder/v2/cli/sessionstore" "github.com/coder/coder/v2/coderd/userpassword" "github.com/coder/coder/v2/codersdk" "github.com/coder/serpent" @@ -114,9 +115,11 @@ func (r *RootCmd) loginWithPassword( } sessionToken := resp.SessionToken - config := r.createConfig() - err = config.Session().Write(sessionToken) + err = r.ensureTokenBackend().Write(client.URL, sessionToken) if err != nil { + if xerrors.Is(err, sessionstore.ErrNotImplemented) { + return errKeyringNotSupported + } return xerrors.Errorf("write session token: %w", err) } @@ -149,11 +152,15 @@ func (r *RootCmd) login() *serpent.Command { useTokenForSession bool ) cmd := &serpent.Command{ - Use: "login []", - Short: "Authenticate with Coder deployment", + Use: "login []", + Short: "Authenticate with Coder deployment", + Long: "By default, the session token is stored in the operating system keyring on " + + "macOS and Windows and a plain text file on Linux. Use the --use-keyring flag " + + "or CODER_USE_KEYRING environment variable to change the storage mechanism.", Middleware: serpent.RequireRangeArgs(0, 1), Handler: func(inv *serpent.Invocation) error { ctx := inv.Context() + rawURL := "" var urlSource string @@ -198,6 +205,15 @@ func (r *RootCmd) login() *serpent.Command { return err } + // Check keyring availability before prompting the user for a token to fail fast. + if r.useKeyring { + backend := r.ensureTokenBackend() + _, err := backend.Read(client.URL) + if err != nil && xerrors.Is(err, sessionstore.ErrNotImplemented) { + return errKeyringNotSupported + } + } + hasFirstUser, err := client.HasFirstUser(ctx) if err != nil { return xerrors.Errorf("Failed to check server %q for first user, is the URL correct and is coder accessible from your browser? Error - has initial user: %w", serverURL.String(), err) @@ -394,8 +410,11 @@ func (r *RootCmd) login() *serpent.Command { } config := r.createConfig() - err = config.Session().Write(sessionToken) + err = r.ensureTokenBackend().Write(client.URL, sessionToken) if err != nil { + if xerrors.Is(err, sessionstore.ErrNotImplemented) { + return errKeyringNotSupported + } return xerrors.Errorf("write session token: %w", err) } err = config.URL().Write(serverURL.String()) diff --git a/cli/logout.go b/cli/logout.go index 33cd55cc81042..db10c3abe4315 100644 --- a/cli/logout.go +++ b/cli/logout.go @@ -8,6 +8,7 @@ import ( "golang.org/x/xerrors" "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/coder/v2/cli/sessionstore" "github.com/coder/serpent" ) @@ -46,11 +47,15 @@ func (r *RootCmd) logout() *serpent.Command { errors = append(errors, xerrors.Errorf("remove URL file: %w", err)) } - err = config.Session().Delete() + err = r.ensureTokenBackend().Delete(client.URL) // Only throw error if the session configuration file is present, // otherwise the user is already logged out, and we proceed - if err != nil && !os.IsNotExist(err) { - errors = append(errors, xerrors.Errorf("remove session file: %w", err)) + if err != nil && !xerrors.Is(err, os.ErrNotExist) { + if xerrors.Is(err, sessionstore.ErrNotImplemented) { + errors = append(errors, errKeyringNotSupported) + } else { + errors = append(errors, xerrors.Errorf("remove session token: %w", err)) + } } err = config.Organization().Delete() diff --git a/cli/root.go b/cli/root.go index c44c0625c2c34..1aa45ae42d75f 100644 --- a/cli/root.go +++ b/cli/root.go @@ -37,6 +37,7 @@ import ( "github.com/coder/coder/v2/cli/cliui" "github.com/coder/coder/v2/cli/config" "github.com/coder/coder/v2/cli/gitauth" + "github.com/coder/coder/v2/cli/sessionstore" "github.com/coder/coder/v2/cli/telemetry" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" @@ -54,6 +55,8 @@ var ( // ErrSilent is a sentinel error that tells the command handler to just exit with a non-zero error, but not print // anything. ErrSilent = xerrors.New("silent error") + + errKeyringNotSupported = xerrors.New("keyring storage is not supported on this operating system; omit --use-keyring to use file-based storage") ) const ( @@ -68,12 +71,14 @@ const ( varVerbose = "verbose" varDisableDirect = "disable-direct-connections" varDisableNetworkTelemetry = "disable-network-telemetry" + varUseKeyring = "use-keyring" notLoggedInMessage = "You are not logged in. Try logging in using '%s login '." envNoVersionCheck = "CODER_NO_VERSION_WARNING" envNoFeatureWarning = "CODER_NO_FEATURE_WARNING" envSessionToken = "CODER_SESSION_TOKEN" + envUseKeyring = "CODER_USE_KEYRING" //nolint:gosec envAgentToken = "CODER_AGENT_TOKEN" //nolint:gosec @@ -99,6 +104,7 @@ func (r *RootCmd) CoreSubcommands() []*serpent.Command { r.resetPassword(), r.sharing(), r.state(), + r.tasksCommand(), r.templates(), r.tokens(), r.users(), @@ -144,7 +150,7 @@ func (r *RootCmd) AGPLExperimental() []*serpent.Command { r.mcpCommand(), r.promptExample(), r.rptyCommand(), - r.tasksCommand(), + r.syncCommand(), r.boundary(), } } @@ -474,6 +480,17 @@ func (r *RootCmd) Command(subcommands []*serpent.Command) (*serpent.Command, err Value: serpent.BoolOf(&r.disableNetworkTelemetry), Group: globalGroup, }, + { + Flag: varUseKeyring, + Env: envUseKeyring, + Description: "Store and retrieve session tokens using the operating system " + + "keyring. This flag is ignored and file-based storage is used when " + + "--global-config is set or keyring usage is not supported on the current " + + "platform. Set to false to force file-based storage on supported platforms.", + Default: "true", + Value: serpent.BoolOf(&r.useKeyring), + Group: globalGroup, + }, { Flag: "debug-http", Description: "Debug codersdk HTTP requests.", @@ -508,6 +525,7 @@ func (r *RootCmd) Command(subcommands []*serpent.Command) (*serpent.Command, err type RootCmd struct { clientURL *url.URL token string + tokenBackend sessionstore.Backend globalConfig string header []string headerCommand string @@ -519,9 +537,12 @@ type RootCmd struct { disableDirect bool debugHTTP bool - disableNetworkTelemetry bool - noVersionCheck bool - noFeatureWarning bool + disableNetworkTelemetry bool + noVersionCheck bool + noFeatureWarning bool + useKeyring bool + keyringServiceName string + useKeyringWithGlobalConfig bool } // InitClient creates and configures a new client with authentication, telemetry, @@ -549,14 +570,19 @@ func (r *RootCmd) InitClient(inv *serpent.Invocation) (*codersdk.Client, error) return nil, err } } - // Read the token stored on disk. if r.token == "" { - r.token, err = conf.Session().Read() + tok, err := r.ensureTokenBackend().Read(r.clientURL) // Even if there isn't a token, we don't care. // Some API routes can be unauthenticated. - if err != nil && !os.IsNotExist(err) { + if err != nil && !xerrors.Is(err, os.ErrNotExist) { + if xerrors.Is(err, sessionstore.ErrNotImplemented) { + return nil, errKeyringNotSupported + } return nil, err } + if tok != "" { + r.token = tok + } } // Configure HTTP client with transport wrappers @@ -588,7 +614,6 @@ func (r *RootCmd) InitClient(inv *serpent.Invocation) (*codersdk.Client, error) // This allows commands to run without requiring authentication, but still use auth if available. func (r *RootCmd) TryInitClient(inv *serpent.Invocation) (*codersdk.Client, error) { conf := r.createConfig() - var err error // Read the client URL stored on disk. if r.clientURL == nil || r.clientURL.String() == "" { rawURL, err := conf.URL().Read() @@ -605,14 +630,19 @@ func (r *RootCmd) TryInitClient(inv *serpent.Invocation) (*codersdk.Client, erro } } } - // Read the token stored on disk. if r.token == "" { - r.token, err = conf.Session().Read() + tok, err := r.ensureTokenBackend().Read(r.clientURL) // Even if there isn't a token, we don't care. // Some API routes can be unauthenticated. - if err != nil && !os.IsNotExist(err) { + if err != nil && !xerrors.Is(err, os.ErrNotExist) { + if xerrors.Is(err, sessionstore.ErrNotImplemented) { + return nil, errKeyringNotSupported + } return nil, err } + if tok != "" { + r.token = tok + } } // Only configure the client if we have a URL @@ -688,6 +718,45 @@ func (r *RootCmd) createUnauthenticatedClient(ctx context.Context, serverURL *ur return client, nil } +// ensureTokenBackend returns the session token storage backend, creating it if necessary. +// This must be called after flags are parsed so we can respect the value of the --use-keyring +// flag. +func (r *RootCmd) ensureTokenBackend() sessionstore.Backend { + if r.tokenBackend == nil { + // Checking for the --global-config directory being set is a bit wonky but necessary + // to allow extensions that invoke the CLI with this flag (e.g. VS code) to continue + // working without modification. In the future we should modify these extensions to + // either access the credential in the keyring (like Coder Desktop) or some other + // approach that doesn't rely on the session token being stored on disk. + assumeExtensionInUse := r.globalConfig != config.DefaultDir() && !r.useKeyringWithGlobalConfig + keyringSupported := runtime.GOOS == "windows" || runtime.GOOS == "darwin" + if r.useKeyring && !assumeExtensionInUse && keyringSupported { + serviceName := sessionstore.DefaultServiceName + if r.keyringServiceName != "" { + serviceName = r.keyringServiceName + } + r.tokenBackend = sessionstore.NewKeyringWithService(serviceName) + } else { + r.tokenBackend = sessionstore.NewFile(r.createConfig) + } + } + return r.tokenBackend +} + +// WithKeyringServiceName sets a custom keyring service name for testing purposes. +// This allows tests to use isolated keyring storage while still exercising the +// genuine storage backend selection logic in ensureTokenBackend(). +func (r *RootCmd) WithKeyringServiceName(serviceName string) { + r.keyringServiceName = serviceName +} + +// UseKeyringWithGlobalConfig enables the use of the keyring storage backend +// when the --global-config directory is set. This is only intended as an override +// for tests, which require specifying the global config directory for test isolation. +func (r *RootCmd) UseKeyringWithGlobalConfig() { + r.useKeyringWithGlobalConfig = true +} + type AgentAuth struct { // Agent Client config agentToken string diff --git a/cli/root_test.go b/cli/root_test.go index b9b230413859b..4e4c9c2399654 100644 --- a/cli/root_test.go +++ b/cli/root_test.go @@ -72,6 +72,31 @@ func TestCommandHelp(t *testing.T) { Name: "coder provisioner jobs list --output json", Cmd: []string{"provisioner", "jobs", "list", "--output", "json"}, }, + // TODO (SasSwart): Remove these once the sync commands are promoted out of experimental. + clitest.CommandHelpCase{ + Name: "coder exp sync --help", + Cmd: []string{"exp", "sync", "--help"}, + }, + clitest.CommandHelpCase{ + Name: "coder exp sync ping --help", + Cmd: []string{"exp", "sync", "ping", "--help"}, + }, + clitest.CommandHelpCase{ + Name: "coder exp sync start --help", + Cmd: []string{"exp", "sync", "start", "--help"}, + }, + clitest.CommandHelpCase{ + Name: "coder exp sync want --help", + Cmd: []string{"exp", "sync", "want", "--help"}, + }, + clitest.CommandHelpCase{ + Name: "coder exp sync complete --help", + Cmd: []string{"exp", "sync", "complete", "--help"}, + }, + clitest.CommandHelpCase{ + Name: "coder exp sync status --help", + Cmd: []string{"exp", "sync", "status", "--help"}, + }, )) } diff --git a/cli/server.go b/cli/server.go index b12f5e0189c47..e8e2d24de1873 100644 --- a/cli/server.go +++ b/cli/server.go @@ -1029,7 +1029,7 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. defer shutdownConns() // Ensures that old database entries are cleaned up over time! - purger := dbpurge.New(ctx, logger.Named("dbpurge"), options.Database, quartz.NewReal()) + purger := dbpurge.New(ctx, logger.Named("dbpurge"), options.Database, options.DeploymentValues, quartz.NewReal()) defer purger.Close() // Updates workspace usage @@ -1476,6 +1476,7 @@ func newProvisionerDaemon( Listener: terraformServer, Logger: provisionerLogger, WorkDirectory: workDir, + Experiments: coderAPI.Experiments, }, CachePath: tfDir, Tracer: tracer, @@ -2142,21 +2143,33 @@ func startBuiltinPostgres(ctx context.Context, cfg config.Root, logger slog.Logg } stdlibLogger := slog.Stdlib(ctx, logger.Named("postgres"), slog.LevelDebug) - // If the port is not defined, an available port will be found dynamically. + // If the port is not defined, an available port will be found dynamically. This has + // implications in CI because here is no way to tell Postgres to use an ephemeral + // port, so to avoid flaky tests in CI we need to retry EmbeddedPostgres.Start in + // case of a race condition where the port we quickly listen on and close in + // embeddedPostgresURL() is not free by the time the embedded postgres starts up. + // The maximum retry attempts _should_ cover most cases where port conflicts occur + // in CI and cause flaky tests. maxAttempts := 1 _, err = cfg.PostgresPort().Read() + // Important: if retryPortDiscovery is changed to not include testing.Testing(), + // the retry logic below also needs to be updated to ensure we don't delete an + // existing database retryPortDiscovery := errors.Is(err, os.ErrNotExist) && testing.Testing() if retryPortDiscovery { - // There is no way to tell Postgres to use an ephemeral port, so in order to avoid - // flaky tests in CI we need to retry EmbeddedPostgres.Start in case of a race - // condition where the port we quickly listen on and close in embeddedPostgresURL() - // is not free by the time the embedded postgres starts up. This maximum_should - // cover most cases where port conflicts occur in CI and cause flaky tests. maxAttempts = 3 } var startErr error for attempt := 0; attempt < maxAttempts; attempt++ { + if retryPortDiscovery && attempt > 0 { + // Clean up the data and runtime directories and the port file from the + // previous failed attempt to ensure a clean slate for the next attempt. + _ = os.RemoveAll(filepath.Join(cfg.PostgresPath(), "data")) + _ = os.RemoveAll(filepath.Join(cfg.PostgresPath(), "runtime")) + _ = cfg.PostgresPort().Delete() + } + // Ensure a password and port have been generated. connectionURL, err := embeddedPostgresURL(cfg) if err != nil { @@ -2203,11 +2216,6 @@ func startBuiltinPostgres(ctx context.Context, cfg config.Root, logger slog.Logg slog.F("port", pgPort), slog.Error(startErr), ) - - if retryPortDiscovery { - // Since a retry is needed, we wipe the port stored here at the beginning of the loop. - _ = cfg.PostgresPort().Delete() - } } return "", nil, xerrors.Errorf("failed to start built-in PostgreSQL after %d attempts. "+ diff --git a/cli/sessionstore/sessionstore.go b/cli/sessionstore/sessionstore.go new file mode 100644 index 0000000000000..57f1c269bf8cc --- /dev/null +++ b/cli/sessionstore/sessionstore.go @@ -0,0 +1,237 @@ +// Package sessionstore provides CLI session token storage mechanisms. +// Operating system keyring storage is intended to have compatibility with other Coder +// applications (e.g. Coder Desktop, Coder provider for JetBrains Toolbox, etc) so that +// applications can read/write the same credential stored in the keyring. +// +// Note that we aren't using an existing Go package zalando/go-keyring here for a few +// reasons. 1) It prescribes the format of the target credential name in the OS keyrings, +// which makes our life difficult for compatibility with other Coder applications. 2) +// It uses init functions that make it difficult to test with. As a result, the OS +// keyring implementations may be adapted from zalando/go-keyring source (i.e. Windows). +package sessionstore + +import ( + "encoding/json" + "errors" + "net/url" + "os" + "strings" + + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/cli/config" +) + +// Backend is a storage backend for session tokens. +type Backend interface { + // Read returns the session token for the given server URL or an error, if any. It + // will return os.ErrNotExist if no token exists for the given URL. + Read(serverURL *url.URL) (string, error) + // Write stores the session token for the given server URL. + Write(serverURL *url.URL, token string) error + // Delete removes the session token for the given server URL or an error, if any. + // It will return os.ErrNotExist error if no token exists to delete. + Delete(serverURL *url.URL) error +} + +var ( + + // ErrSetDataTooBig is returned if `keyringProvider.Set` was called with too much data. + // On macOS: The combination of service, username & password should not exceed ~3000 bytes + // On Windows: The service is limited to 32KiB while the password is limited to 2560 bytes + ErrSetDataTooBig = xerrors.New("data passed to Set was too big") + + // ErrNotImplemented represents when keyring usage is not implemented on the current + // operating system. + ErrNotImplemented = xerrors.New("not implemented") +) + +const ( + // DefaultServiceName is the service name used in keyrings for storing Coder CLI session + // tokens. + DefaultServiceName = "coder-v2-credentials" +) + +// keyringProvider represents an operating system keyring. The expectation +// is these methods operate on the user/login keyring. +type keyringProvider interface { + // Set stores the given credential for a service name in the operating system + // keyring. + Set(service, credential string) error + // Get retrieves the credential from the keyring. It must return os.ErrNotExist + // if the credential is not found. + Get(service string) ([]byte, error) + // Delete deletes the credential from the keyring. It must return os.ErrNotExist + // if the credential is not found. + Delete(service string) error +} + +// credential represents a single credential entry. +type credential struct { + CoderURL string `json:"coder_url"` + APIToken string `json:"api_token"` +} + +// credentialsMap represents the JSON structure stored in the operating system keyring. +// It supports storing multiple credentials for different server URLs. +type credentialsMap map[string]credential + +// normalizeHost returns a normalized version of the URL host for use as a map key. +func normalizeHost(u *url.URL) (string, error) { + if u == nil || u.Host == "" { + return "", xerrors.New("nil server URL") + } + return strings.TrimSpace(strings.ToLower(u.Host)), nil +} + +// parseCredentialsJSON parses the JSON from the keyring into a credentialsMap. +func parseCredentialsJSON(jsonData []byte) (credentialsMap, error) { + if len(jsonData) == 0 { + return make(credentialsMap), nil + } + + var creds credentialsMap + if err := json.Unmarshal(jsonData, &creds); err != nil { + return nil, xerrors.Errorf("unmarshal credentials: %w", err) + } + + return creds, nil +} + +// Keyring is a Backend that exclusively stores the session token in the operating +// system keyring. Happy path usage of this type should start with NewKeyring. +// It stores a JSON object in the keyring that supports multiple credentials for +// different server URLs, providing compatibility with Coder Desktop and other Coder +// applications. +type Keyring struct { + provider keyringProvider + serviceName string +} + +// NewKeyringWithService creates a Keyring Backend that stores credentials under the +// specified service name. Generally, DefaultServiceName should be provided as the service +// name except in tests which may need parameterization to avoid conflicting keyring use. +func NewKeyringWithService(serviceName string) Keyring { + return Keyring{ + provider: operatingSystemKeyring{}, + serviceName: serviceName, + } +} + +func (o Keyring) Read(serverURL *url.URL) (string, error) { + host, err := normalizeHost(serverURL) + if err != nil { + return "", err + } + + credJSON, err := o.provider.Get(o.serviceName) + if err != nil { + return "", err + } + if len(credJSON) == 0 { + return "", os.ErrNotExist + } + + creds, err := parseCredentialsJSON(credJSON) + if err != nil { + return "", xerrors.Errorf("read: parse existing credentials: %w", err) + } + + // Return the credential for the specified URL + cred, ok := creds[host] + if !ok { + return "", os.ErrNotExist + } + return cred.APIToken, nil +} + +func (o Keyring) Write(serverURL *url.URL, token string) error { + host, err := normalizeHost(serverURL) + if err != nil { + return err + } + + existingJSON, err := o.provider.Get(o.serviceName) + if err != nil && !errors.Is(err, os.ErrNotExist) { + return xerrors.Errorf("read existing credentials: %w", err) + } + + creds, err := parseCredentialsJSON(existingJSON) + if err != nil { + return xerrors.Errorf("write: parse existing credentials: %w", err) + } + + // Upsert the credential for this URL. + creds[host] = credential{ + CoderURL: host, + APIToken: token, + } + + credsJSON, err := json.Marshal(creds) + if err != nil { + return xerrors.Errorf("marshal credentials: %w", err) + } + + err = o.provider.Set(o.serviceName, string(credsJSON)) + if err != nil { + return xerrors.Errorf("write credentials to keyring: %w", err) + } + return nil +} + +func (o Keyring) Delete(serverURL *url.URL) error { + host, err := normalizeHost(serverURL) + if err != nil { + return err + } + + existingJSON, err := o.provider.Get(o.serviceName) + if err != nil { + return err + } + + creds, err := parseCredentialsJSON(existingJSON) + if err != nil { + return xerrors.Errorf("failed to parse existing credentials: %w", err) + } + + if _, ok := creds[host]; !ok { + return os.ErrNotExist + } + + delete(creds, host) + + // Delete the entire keyring entry when no credentials remain. + if len(creds) == 0 { + return o.provider.Delete(o.serviceName) + } + + // Write back the updated credentials map. + credsJSON, err := json.Marshal(creds) + if err != nil { + return xerrors.Errorf("failed to marshal credentials: %w", err) + } + + return o.provider.Set(o.serviceName, string(credsJSON)) +} + +// File is a Backend that exclusively stores the session token in a file on disk. +type File struct { + config func() config.Root +} + +func NewFile(f func() config.Root) *File { + return &File{config: f} +} + +func (f *File) Read(_ *url.URL) (string, error) { + return f.config().Session().Read() +} + +func (f *File) Write(_ *url.URL, token string) error { + return f.config().Session().Write(token) +} + +func (f *File) Delete(_ *url.URL) error { + return f.config().Session().Delete() +} diff --git a/cli/sessionstore/sessionstore_darwin.go b/cli/sessionstore/sessionstore_darwin.go new file mode 100644 index 0000000000000..be398d42e7049 --- /dev/null +++ b/cli/sessionstore/sessionstore_darwin.go @@ -0,0 +1,105 @@ +//go:build darwin + +package sessionstore + +import ( + "encoding/base64" + "fmt" + "io" + "os" + "os/exec" + "regexp" + "strings" +) + +const ( + // fixedUsername is the fixed username used for all keychain entries. + // Since our interface only uses service names, we use a constant username. + fixedUsername = "coder-login-credentials" + + execPathKeychain = "/usr/bin/security" + notFoundStr = "could not be found" +) + +// operatingSystemKeyring implements keyringProvider for macOS. +// It is largely adapted from the zalando/go-keyring package. +type operatingSystemKeyring struct{} + +func (operatingSystemKeyring) Set(service, credential string) error { + // if the added secret has multiple lines or some non ascii, + // macOS will hex encode it on return. To avoid getting garbage, we + // encode all passwords + password := base64.StdEncoding.EncodeToString([]byte(credential)) + + cmd := exec.Command(execPathKeychain, "-i") + stdIn, err := cmd.StdinPipe() + if err != nil { + return err + } + + if err = cmd.Start(); err != nil { + return err + } + + command := fmt.Sprintf("add-generic-password -U -s %s -a %s -w %s\n", + shellEscape(service), + shellEscape(fixedUsername), + shellEscape(password)) + if len(command) > 4096 { + return ErrSetDataTooBig + } + + if _, err := io.WriteString(stdIn, command); err != nil { + return err + } + + if err = stdIn.Close(); err != nil { + return err + } + + return cmd.Wait() +} + +func (operatingSystemKeyring) Get(service string) ([]byte, error) { + out, err := exec.Command( + execPathKeychain, + "find-generic-password", + "-s", service, + "-wa", fixedUsername).CombinedOutput() + if err != nil { + if strings.Contains(string(out), notFoundStr) { + return nil, os.ErrNotExist + } + return nil, err + } + + trimStr := strings.TrimSpace(string(out)) + return base64.StdEncoding.DecodeString(trimStr) +} + +func (operatingSystemKeyring) Delete(service string) error { + out, err := exec.Command( + execPathKeychain, + "delete-generic-password", + "-s", service, + "-a", fixedUsername).CombinedOutput() + if strings.Contains(string(out), notFoundStr) { + return os.ErrNotExist + } + return err +} + +// shellEscape returns a shell-escaped version of the string s. +// This is adapted from github.com/zalando/go-keyring/internal/shellescape. +func shellEscape(s string) string { + if len(s) == 0 { + return "''" + } + + pattern := regexp.MustCompile(`[^\w@%+=:,./-]`) + if pattern.MatchString(s) { + return "'" + strings.ReplaceAll(s, "'", "'\"'\"'") + "'" + } + + return s +} diff --git a/cli/sessionstore/sessionstore_darwin_test.go b/cli/sessionstore/sessionstore_darwin_test.go new file mode 100644 index 0000000000000..a90ee12d96cc1 --- /dev/null +++ b/cli/sessionstore/sessionstore_darwin_test.go @@ -0,0 +1,34 @@ +//go:build darwin + +package sessionstore_test + +import ( + "encoding/base64" + "os/exec" + "testing" +) + +const ( + execPathKeychain = "/usr/bin/security" + fixedUsername = "coder-login-credentials" +) + +func readRawKeychainCredential(t *testing.T, service string) []byte { + t.Helper() + + out, err := exec.Command( + execPathKeychain, + "find-generic-password", + "-s", service, + "-wa", fixedUsername).CombinedOutput() + if err != nil { + t.Fatal(err) + } + + dst := make([]byte, base64.StdEncoding.DecodedLen(len(out))) + n, err := base64.StdEncoding.Decode(dst, out) + if err != nil { + t.Fatal(err) + } + return dst[:n] +} diff --git a/cli/sessionstore/sessionstore_internal_test.go b/cli/sessionstore/sessionstore_internal_test.go new file mode 100644 index 0000000000000..baf2efa2f49d6 --- /dev/null +++ b/cli/sessionstore/sessionstore_internal_test.go @@ -0,0 +1,121 @@ +package sessionstore + +import ( + "encoding/json" + "net/url" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNormalizeHost(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + url *url.URL + want string + wantErr bool + }{ + { + name: "StandardHost", + url: &url.URL{Host: "coder.example.com"}, + want: "coder.example.com", + }, + { + name: "HostWithPort", + url: &url.URL{Host: "coder.example.com:8080"}, + want: "coder.example.com:8080", + }, + { + name: "UppercaseHost", + url: &url.URL{Host: "CODER.EXAMPLE.COM"}, + want: "coder.example.com", + }, + { + name: "HostWithWhitespace", + url: &url.URL{Host: " coder.example.com "}, + want: "coder.example.com", + }, + { + name: "NilURL", + url: nil, + want: "", + wantErr: true, + }, + { + name: "EmptyHost", + url: &url.URL{Host: ""}, + want: "", + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + got, err := normalizeHost(tt.url) + if tt.wantErr { + require.Error(t, err) + return + } + require.NoError(t, err) + require.Equal(t, tt.want, got) + }) + } +} + +func TestParseCredentialsJSON(t *testing.T) { + t.Parallel() + + t.Run("Empty", func(t *testing.T) { + t.Parallel() + creds, err := parseCredentialsJSON(nil) + require.NoError(t, err) + require.NotNil(t, creds) + require.Empty(t, creds) + }) + + t.Run("NewFormat", func(t *testing.T) { + t.Parallel() + jsonData := []byte(`{ + "coder1.example.com": {"coder_url": "coder1.example.com", "api_token": "token1"}, + "coder2.example.com": {"coder_url": "coder2.example.com", "api_token": "token2"} + }`) + creds, err := parseCredentialsJSON(jsonData) + require.NoError(t, err) + require.Len(t, creds, 2) + require.Equal(t, "token1", creds["coder1.example.com"].APIToken) + require.Equal(t, "token2", creds["coder2.example.com"].APIToken) + }) + + t.Run("InvalidJSON", func(t *testing.T) { + t.Parallel() + jsonData := []byte(`{invalid json}`) + _, err := parseCredentialsJSON(jsonData) + require.Error(t, err) + }) +} + +func TestCredentialsMap_RoundTrip(t *testing.T) { + t.Parallel() + + creds := credentialsMap{ + "coder1.example.com": { + CoderURL: "coder1.example.com", + APIToken: "token1", + }, + "coder2.example.com:8080": { + CoderURL: "coder2.example.com:8080", + APIToken: "token2", + }, + } + + jsonData, err := json.Marshal(creds) + require.NoError(t, err) + + parsed, err := parseCredentialsJSON(jsonData) + require.NoError(t, err) + + require.Equal(t, creds, parsed) +} diff --git a/cli/sessionstore/sessionstore_other.go b/cli/sessionstore/sessionstore_other.go new file mode 100644 index 0000000000000..a71458a360c94 --- /dev/null +++ b/cli/sessionstore/sessionstore_other.go @@ -0,0 +1,17 @@ +//go:build !windows && !darwin + +package sessionstore + +type operatingSystemKeyring struct{} + +func (operatingSystemKeyring) Set(_, _ string) error { + return ErrNotImplemented +} + +func (operatingSystemKeyring) Get(_ string) ([]byte, error) { + return nil, ErrNotImplemented +} + +func (operatingSystemKeyring) Delete(_ string) error { + return ErrNotImplemented +} diff --git a/cli/sessionstore/sessionstore_other_test.go b/cli/sessionstore/sessionstore_other_test.go new file mode 100644 index 0000000000000..b924a95d12897 --- /dev/null +++ b/cli/sessionstore/sessionstore_other_test.go @@ -0,0 +1,10 @@ +//go:build !windows && !darwin + +package sessionstore_test + +import "testing" + +func readRawKeychainCredential(t *testing.T, _ string) []byte { + t.Fatal("not implemented") + return nil +} diff --git a/cli/sessionstore/sessionstore_test.go b/cli/sessionstore/sessionstore_test.go new file mode 100644 index 0000000000000..1ecb0279918fd --- /dev/null +++ b/cli/sessionstore/sessionstore_test.go @@ -0,0 +1,408 @@ +package sessionstore_test + +import ( + "encoding/json" + "errors" + "fmt" + "net/url" + "os" + "path" + "runtime" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/cli/config" + "github.com/coder/coder/v2/cli/sessionstore" +) + +type storedCredentials map[string]struct { + CoderURL string `json:"coder_url"` + APIToken string `json:"api_token"` +} + +// Generate a test service name for use with the OS keyring. It uses a combination +// of the test name and a nanosecond timestamp to prevent collisions. +func keyringTestServiceName(t *testing.T) string { + t.Helper() + return t.Name() + "_" + fmt.Sprintf("%v", time.Now().UnixNano()) +} + +func TestKeyring(t *testing.T) { + t.Parallel() + + if runtime.GOOS != "windows" && runtime.GOOS != "darwin" { + t.Skip("linux is not supported yet") + } + + // This test exercises use of the operating system keyring. As a result, + // the operating system keyring is expected to be available. + + const ( + testURL = "http://127.0.0.1:1337" + testURL2 = "http://127.0.0.1:1338" + ) + + t.Run("ReadNonExistent", func(t *testing.T) { + t.Parallel() + + backend := sessionstore.NewKeyringWithService(keyringTestServiceName(t)) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + t.Cleanup(func() { _ = backend.Delete(srvURL) }) + + _, err = backend.Read(srvURL) + require.Error(t, err) + require.True(t, os.IsNotExist(err), "expected os.ErrNotExist when reading non-existent token") + }) + + t.Run("DeleteNonExistent", func(t *testing.T) { + t.Parallel() + + backend := sessionstore.NewKeyringWithService(keyringTestServiceName(t)) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + t.Cleanup(func() { _ = backend.Delete(srvURL) }) + + err = backend.Delete(srvURL) + require.Error(t, err) + require.True(t, errors.Is(err, os.ErrNotExist), "expected os.ErrNotExist when deleting non-existent token") + }) + + t.Run("WriteAndRead", func(t *testing.T) { + t.Parallel() + + backend := sessionstore.NewKeyringWithService(keyringTestServiceName(t)) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + t.Cleanup(func() { _ = backend.Delete(srvURL) }) + + dir := t.TempDir() + expSessionFile := path.Join(dir, "session") + + const inputToken = "test-keyring-token-12345" + err = backend.Write(srvURL, inputToken) + require.NoError(t, err) + + // Verify no session file was created (keyring stores in OS keyring, not file) + _, err = os.Stat(expSessionFile) + require.True(t, errors.Is(err, os.ErrNotExist), "expected session token file to not exist when using keyring") + + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, inputToken, token) + + // Clean up + err = backend.Delete(srvURL) + require.NoError(t, err) + }) + + t.Run("WriteAndDelete", func(t *testing.T) { + t.Parallel() + + backend := sessionstore.NewKeyringWithService(keyringTestServiceName(t)) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + t.Cleanup(func() { _ = backend.Delete(srvURL) }) + + const inputToken = "test-keyring-token-67890" + err = backend.Write(srvURL, inputToken) + require.NoError(t, err) + + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, inputToken, token) + + err = backend.Delete(srvURL) + require.NoError(t, err) + + _, err = backend.Read(srvURL) + require.Error(t, err) + require.True(t, os.IsNotExist(err), "expected os.ErrNotExist after deleting token") + }) + + t.Run("OverwriteToken", func(t *testing.T) { + t.Parallel() + + backend := sessionstore.NewKeyringWithService(keyringTestServiceName(t)) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + t.Cleanup(func() { _ = backend.Delete(srvURL) }) + + // Write first token + const firstToken = "first-keyring-token" + err = backend.Write(srvURL, firstToken) + require.NoError(t, err) + + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, firstToken, token) + + // Overwrite with second token + const secondToken = "second-keyring-token" + err = backend.Write(srvURL, secondToken) + require.NoError(t, err) + + token, err = backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, secondToken, token) + + // Clean up + err = backend.Delete(srvURL) + require.NoError(t, err) + }) + + t.Run("MultipleServers", func(t *testing.T) { + t.Parallel() + + backend := sessionstore.NewKeyringWithService(keyringTestServiceName(t)) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + srvURL2, err := url.Parse(testURL2) + require.NoError(t, err) + + t.Cleanup(func() { + _ = backend.Delete(srvURL) + _ = backend.Delete(srvURL2) + }) + + // Write token for server 1 + const token1 = "token-for-server-1" + err = backend.Write(srvURL, token1) + require.NoError(t, err) + + // Write token for server 2 (should NOT overwrite server 1) + const token2 = "token-for-server-2" + err = backend.Write(srvURL2, token2) + require.NoError(t, err) + + // Read server 1's credential + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, token1, token) + + // Read server 2's credential + token, err = backend.Read(srvURL2) + require.NoError(t, err) + require.Equal(t, token2, token) + + // Delete server 1's credential + err = backend.Delete(srvURL) + require.NoError(t, err) + + // Verify server 1's credential is gone + _, err = backend.Read(srvURL) + require.Error(t, err) + require.True(t, os.IsNotExist(err)) + + // Verify server 2's credential still exists + token, err = backend.Read(srvURL2) + require.NoError(t, err) + require.Equal(t, token2, token) + + // Clean up remaining credentials + err = backend.Delete(srvURL2) + require.NoError(t, err) + }) + + t.Run("StorageFormat", func(t *testing.T) { + t.Parallel() + // The storage format must remain consistent to ensure we don't break + // compatibility with other Coder related applications that may read + // or decode the same credential. + + const testURL1 = "http://127.0.0.1:1337" + srv1URL, err := url.Parse(testURL1) + require.NoError(t, err) + + const testURL2 = "http://127.0.0.1:1338" + srv2URL, err := url.Parse(testURL2) + require.NoError(t, err) + + serviceName := keyringTestServiceName(t) + backend := sessionstore.NewKeyringWithService(serviceName) + t.Cleanup(func() { + _ = backend.Delete(srv1URL) + _ = backend.Delete(srv2URL) + }) + + // Write token for server 1 + const token1 = "token-server-1" + err = backend.Write(srv1URL, token1) + require.NoError(t, err) + + // Write token for server 2 (should NOT overwrite server 1's token) + const token2 = "token-server-2" + err = backend.Write(srv2URL, token2) + require.NoError(t, err) + + // Verify both credentials are stored in the raw format and can + // be extracted through the Backend API. + rawCredential := readRawKeychainCredential(t, serviceName) + + storedCreds := make(storedCredentials) + err = json.Unmarshal(rawCredential, &storedCreds) + require.NoError(t, err, "unmarshalling stored credentials") + + // Both credentials should exist + require.Len(t, storedCreds, 2) + require.Equal(t, token1, storedCreds[srv1URL.Host].APIToken) + require.Equal(t, token2, storedCreds[srv2URL.Host].APIToken) + + // Read individual credentials + token, err := backend.Read(srv1URL) + require.NoError(t, err) + require.Equal(t, token1, token) + + token, err = backend.Read(srv2URL) + require.NoError(t, err) + require.Equal(t, token2, token) + + // Cleanup + err = backend.Delete(srv1URL) + require.NoError(t, err) + err = backend.Delete(srv2URL) + require.NoError(t, err) + }) +} + +func TestFile(t *testing.T) { + const ( + testURL = "http://127.0.0.1:1337" + testURL2 = "http://127.0.0.1:1338" + ) + + t.Parallel() + + t.Run("ReadNonExistent", func(t *testing.T) { + t.Parallel() + + dir := t.TempDir() + backend := sessionstore.NewFile(func() config.Root { return config.Root(dir) }) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + + _, err = backend.Read(srvURL) + require.Error(t, err) + require.True(t, os.IsNotExist(err)) + }) + + t.Run("WriteAndRead", func(t *testing.T) { + t.Parallel() + + dir := t.TempDir() + backend := sessionstore.NewFile(func() config.Root { return config.Root(dir) }) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + + // Write a token + const inputToken = "test-token-12345" + err = backend.Write(srvURL, inputToken) + require.NoError(t, err) + + // Verify the session file was created + sessionFile := config.Root(dir).Session() + require.True(t, sessionFile.Exists()) + + // Read the token back + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, inputToken, token) + }) + + t.Run("WriteAndDelete", func(t *testing.T) { + t.Parallel() + + dir := t.TempDir() + backend := sessionstore.NewFile(func() config.Root { return config.Root(dir) }) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + + // Write a token + const inputToken = "test-token-67890" + err = backend.Write(srvURL, inputToken) + require.NoError(t, err) + + // Verify the token was written + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, inputToken, token) + + // Delete the token + err = backend.Delete(srvURL) + require.NoError(t, err) + + // Verify the token is gone + _, err = backend.Read(srvURL) + require.Error(t, err) + require.True(t, os.IsNotExist(err)) + }) + + t.Run("DeleteNonExistent", func(t *testing.T) { + t.Parallel() + + dir := t.TempDir() + backend := sessionstore.NewFile(func() config.Root { return config.Root(dir) }) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + + // Attempt to delete a non-existent token + err = backend.Delete(srvURL) + require.Error(t, err) + require.True(t, os.IsNotExist(err)) + }) + + t.Run("OverwriteToken", func(t *testing.T) { + t.Parallel() + + dir := t.TempDir() + backend := sessionstore.NewFile(func() config.Root { return config.Root(dir) }) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + + // Write first token + const firstToken = "first-token" + err = backend.Write(srvURL, firstToken) + require.NoError(t, err) + + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, firstToken, token) + + // Overwrite with second token + const secondToken = "second-token" + err = backend.Write(srvURL, secondToken) + require.NoError(t, err) + + token, err = backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, secondToken, token) + }) + + t.Run("WriteIgnoresURL", func(t *testing.T) { + t.Parallel() + + dir := t.TempDir() + backend := sessionstore.NewFile(func() config.Root { return config.Root(dir) }) + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + srvURL2, err := url.Parse(testURL2) + require.NoError(t, err) + + //nolint:gosec // Write with first URL test token + const firstToken = "token-for-url1" + err = backend.Write(srvURL, firstToken) + require.NoError(t, err) + + //nolint:gosec // Write with second URL - should overwrite + const secondToken = "token-for-url2" + err = backend.Write(srvURL2, secondToken) + require.NoError(t, err) + + // Should have the second token (File backend doesn't differentiate by URL) + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, secondToken, token) + }) +} diff --git a/cli/sessionstore/sessionstore_windows.go b/cli/sessionstore/sessionstore_windows.go new file mode 100644 index 0000000000000..3dd38c19da31d --- /dev/null +++ b/cli/sessionstore/sessionstore_windows.go @@ -0,0 +1,60 @@ +//go:build windows + +package sessionstore + +import ( + "errors" + "os" + "syscall" + + "github.com/danieljoos/wincred" +) + +// operatingSystemKeyring implements keyringProvider and uses Windows Credential Manager. +// It is largely adapted from the zalando/go-keyring package. +type operatingSystemKeyring struct{} + +func (operatingSystemKeyring) Set(service, credential string) error { + // password may not exceed 2560 bytes (https://github.com/jaraco/keyring/issues/540#issuecomment-968329967) + if len(credential) > 2560 { + return ErrSetDataTooBig + } + + // service may not exceed 512 bytes (might need more testing) + if len(service) >= 512 { + return ErrSetDataTooBig + } + + // service may not exceed 32k but problems occur before that + // so we limit it to 30k + if len(service) > 1024*30 { + return ErrSetDataTooBig + } + + cred := wincred.NewGenericCredential(service) + cred.CredentialBlob = []byte(credential) + cred.Persist = wincred.PersistLocalMachine + return cred.Write() +} + +func (operatingSystemKeyring) Get(service string) ([]byte, error) { + cred, err := wincred.GetGenericCredential(service) + if err != nil { + if errors.Is(err, syscall.ERROR_NOT_FOUND) { + return nil, os.ErrNotExist + } + return nil, err + } + return cred.CredentialBlob, nil +} + +func (operatingSystemKeyring) Delete(service string) error { + cred, err := wincred.GetGenericCredential(service) + if err != nil { + if errors.Is(err, syscall.ERROR_NOT_FOUND) { + return os.ErrNotExist + } + return err + } + return cred.Delete() +} diff --git a/cli/sessionstore/sessionstore_windows_test.go b/cli/sessionstore/sessionstore_windows_test.go new file mode 100644 index 0000000000000..ef643d3033dba --- /dev/null +++ b/cli/sessionstore/sessionstore_windows_test.go @@ -0,0 +1,74 @@ +//go:build windows + +package sessionstore_test + +import ( + "encoding/json" + "net/url" + "os" + "testing" + + "github.com/danieljoos/wincred" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/cli/sessionstore" +) + +func readRawKeychainCredential(t *testing.T, serviceName string) []byte { + t.Helper() + + winCred, err := wincred.GetGenericCredential(serviceName) + if err != nil { + t.Fatal(err) + } + return winCred.CredentialBlob +} + +func TestWindowsKeyring_WriteReadDelete(t *testing.T) { + t.Parallel() + + const testURL = "http://127.0.0.1:1337" + srvURL, err := url.Parse(testURL) + require.NoError(t, err) + + serviceName := keyringTestServiceName(t) + backend := sessionstore.NewKeyringWithService(serviceName) + t.Cleanup(func() { _ = backend.Delete(srvURL) }) + + // Verify no token exists initially + _, err = backend.Read(srvURL) + require.ErrorIs(t, err, os.ErrNotExist) + + // Write a token + const inputToken = "test-token-12345" + err = backend.Write(srvURL, inputToken) + require.NoError(t, err) + + // Verify the credential is stored in Windows Credential Manager with correct format + winCred, err := wincred.GetGenericCredential(serviceName) + require.NoError(t, err, "getting windows credential") + + storedCreds := make(storedCredentials) + err = json.Unmarshal(winCred.CredentialBlob, &storedCreds) + require.NoError(t, err, "unmarshalling stored credentials") + + // Verify the stored values + require.Len(t, storedCreds, 1) + cred, ok := storedCreds[srvURL.Host] + require.True(t, ok, "credential for URL should exist") + require.Equal(t, inputToken, cred.APIToken) + require.Equal(t, srvURL.Host, cred.CoderURL) + + // Read the token back + token, err := backend.Read(srvURL) + require.NoError(t, err) + require.Equal(t, inputToken, token) + + // Delete the token + err = backend.Delete(srvURL) + require.NoError(t, err) + + // Verify token is deleted + _, err = backend.Read(srvURL) + require.ErrorIs(t, err, os.ErrNotExist) +} diff --git a/cli/sync.go b/cli/sync.go new file mode 100644 index 0000000000000..1d3d344ba6f67 --- /dev/null +++ b/cli/sync.go @@ -0,0 +1,35 @@ +package cli + +import ( + "github.com/coder/serpent" +) + +func (r *RootCmd) syncCommand() *serpent.Command { + var socketPath string + + cmd := &serpent.Command{ + Use: "sync", + Short: "Manage unit dependencies for coordinated startup", + Long: "Commands for orchestrating unit startup order in workspaces. Units are most commonly coder scripts. Use these commands to declare dependencies between units, coordinate their startup sequence, and ensure units start only after their dependencies are ready. This helps prevent race conditions and startup failures.", + Handler: func(i *serpent.Invocation) error { + return i.Command.HelpHandler(i) + }, + Children: []*serpent.Command{ + r.syncPing(&socketPath), + r.syncStart(&socketPath), + r.syncWant(&socketPath), + r.syncComplete(&socketPath), + r.syncStatus(&socketPath), + }, + Options: serpent.OptionSet{ + { + Flag: "socket-path", + Env: "CODER_AGENT_SOCKET_PATH", + Description: "Specify the path for the agent socket.", + Value: serpent.StringOf(&socketPath), + }, + }, + } + + return cmd +} diff --git a/cli/sync_complete.go b/cli/sync_complete.go new file mode 100644 index 0000000000000..88a8117d1aa7d --- /dev/null +++ b/cli/sync_complete.go @@ -0,0 +1,47 @@ +package cli + +import ( + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/agent/agentsocket" + "github.com/coder/coder/v2/agent/unit" + "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/serpent" +) + +func (*RootCmd) syncComplete(socketPath *string) *serpent.Command { + cmd := &serpent.Command{ + Use: "complete ", + Short: "Mark a unit as complete", + Long: "Mark a unit as complete. Indicating to other units that it has completed its work. This allows units that depend on it to proceed with their startup.", + Handler: func(i *serpent.Invocation) error { + ctx := i.Context() + + if len(i.Args) != 1 { + return xerrors.New("exactly one unit name is required") + } + unit := unit.ID(i.Args[0]) + + opts := []agentsocket.Option{} + if *socketPath != "" { + opts = append(opts, agentsocket.WithPath(*socketPath)) + } + + client, err := agentsocket.NewClient(ctx, opts...) + if err != nil { + return xerrors.Errorf("connect to agent socket: %w", err) + } + defer client.Close() + + if err := client.SyncComplete(ctx, unit); err != nil { + return xerrors.Errorf("complete unit failed: %w", err) + } + + cliui.Info(i.Stdout, "Success") + + return nil + }, + } + + return cmd +} diff --git a/cli/sync_ping.go b/cli/sync_ping.go new file mode 100644 index 0000000000000..2e5e517375f06 --- /dev/null +++ b/cli/sync_ping.go @@ -0,0 +1,42 @@ +package cli + +import ( + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/agent/agentsocket" + "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/serpent" +) + +func (*RootCmd) syncPing(socketPath *string) *serpent.Command { + cmd := &serpent.Command{ + Use: "ping", + Short: "Test agent socket connectivity and health", + Long: "Test connectivity to the local Coder agent socket to verify the agent is running and responsive. Useful for troubleshooting startup issues or verifying the agent is accessible before running other sync commands.", + Handler: func(i *serpent.Invocation) error { + ctx := i.Context() + + opts := []agentsocket.Option{} + if *socketPath != "" { + opts = append(opts, agentsocket.WithPath(*socketPath)) + } + + client, err := agentsocket.NewClient(ctx, opts...) + if err != nil { + return xerrors.Errorf("connect to agent socket: %w", err) + } + defer client.Close() + + err = client.Ping(ctx) + if err != nil { + return xerrors.Errorf("ping failed: %w", err) + } + + cliui.Info(i.Stdout, "Success") + + return nil + }, + } + + return cmd +} diff --git a/cli/sync_start.go b/cli/sync_start.go new file mode 100644 index 0000000000000..c114a9b4ade08 --- /dev/null +++ b/cli/sync_start.go @@ -0,0 +1,101 @@ +package cli + +import ( + "context" + "time" + + "golang.org/x/xerrors" + + "github.com/coder/serpent" + + "github.com/coder/coder/v2/agent/agentsocket" + "github.com/coder/coder/v2/agent/unit" + "github.com/coder/coder/v2/cli/cliui" +) + +const ( + syncPollInterval = 1 * time.Second +) + +func (*RootCmd) syncStart(socketPath *string) *serpent.Command { + var timeout time.Duration + + cmd := &serpent.Command{ + Use: "start ", + Short: "Wait until all unit dependencies are satisfied", + Long: "Wait until all dependencies are satisfied, consider the unit to have started, then allow it to proceed. This command polls until dependencies are ready, then marks the unit as started.", + Handler: func(i *serpent.Invocation) error { + ctx := i.Context() + + if len(i.Args) != 1 { + return xerrors.New("exactly one unit name is required") + } + unitName := unit.ID(i.Args[0]) + + if timeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, timeout) + defer cancel() + } + + opts := []agentsocket.Option{} + if *socketPath != "" { + opts = append(opts, agentsocket.WithPath(*socketPath)) + } + + client, err := agentsocket.NewClient(ctx, opts...) + if err != nil { + return xerrors.Errorf("connect to agent socket: %w", err) + } + defer client.Close() + + ready, err := client.SyncReady(ctx, unitName) + if err != nil { + return xerrors.Errorf("error checking dependencies: %w", err) + } + + if !ready { + cliui.Infof(i.Stdout, "Waiting for dependencies of unit '%s' to be satisfied...", unitName) + + ticker := time.NewTicker(syncPollInterval) + defer ticker.Stop() + + pollLoop: + for { + select { + case <-ctx.Done(): + if ctx.Err() == context.DeadlineExceeded { + return xerrors.Errorf("timeout waiting for dependencies of unit '%s'", unitName) + } + return ctx.Err() + case <-ticker.C: + ready, err := client.SyncReady(ctx, unitName) + if err != nil { + return xerrors.Errorf("error checking dependencies: %w", err) + } + if ready { + break pollLoop + } + } + } + } + + if err := client.SyncStart(ctx, unitName); err != nil { + return xerrors.Errorf("start unit failed: %w", err) + } + + cliui.Info(i.Stdout, "Success") + + return nil + }, + } + + cmd.Options = append(cmd.Options, serpent.Option{ + Flag: "timeout", + Description: "Maximum time to wait for dependencies (e.g., 30s, 5m). 5m by default.", + Value: serpent.DurationOf(&timeout), + Default: "5m", + }) + + return cmd +} diff --git a/cli/sync_status.go b/cli/sync_status.go new file mode 100644 index 0000000000000..87e3c4ccdf6da --- /dev/null +++ b/cli/sync_status.go @@ -0,0 +1,88 @@ +package cli + +import ( + "fmt" + + "golang.org/x/xerrors" + + "github.com/coder/serpent" + + "github.com/coder/coder/v2/agent/agentsocket" + "github.com/coder/coder/v2/agent/unit" + "github.com/coder/coder/v2/cli/cliui" +) + +func (*RootCmd) syncStatus(socketPath *string) *serpent.Command { + formatter := cliui.NewOutputFormatter( + cliui.ChangeFormatterData( + cliui.TableFormat( + []agentsocket.DependencyInfo{}, + []string{ + "depends on", + "required status", + "current status", + "satisfied", + }, + ), + func(data any) (any, error) { + resp, ok := data.(agentsocket.SyncStatusResponse) + if !ok { + return nil, xerrors.Errorf("expected agentsocket.SyncStatusResponse, got %T", data) + } + return resp.Dependencies, nil + }), + cliui.JSONFormat(), + ) + + cmd := &serpent.Command{ + Use: "status ", + Short: "Show unit status and dependency state", + Long: "Show the current status of a unit, whether it is ready to start, and lists its dependencies. Shows which dependencies are satisfied and which are still pending. Supports multiple output formats.", + Handler: func(i *serpent.Invocation) error { + ctx := i.Context() + + if len(i.Args) != 1 { + return xerrors.New("exactly one unit name is required") + } + unit := unit.ID(i.Args[0]) + + opts := []agentsocket.Option{} + if *socketPath != "" { + opts = append(opts, agentsocket.WithPath(*socketPath)) + } + + client, err := agentsocket.NewClient(ctx, opts...) + if err != nil { + return xerrors.Errorf("connect to agent socket: %w", err) + } + defer client.Close() + + statusResp, err := client.SyncStatus(ctx, unit) + if err != nil { + return xerrors.Errorf("get status failed: %w", err) + } + + var out string + header := fmt.Sprintf("Unit: %s\nStatus: %s\nReady: %t\n\nDependencies:\n", unit, statusResp.Status, statusResp.IsReady) + if formatter.FormatID() == "table" && len(statusResp.Dependencies) == 0 { + out = header + "No dependencies found" + } else { + out, err = formatter.Format(ctx, statusResp) + if err != nil { + return xerrors.Errorf("format status: %w", err) + } + + if formatter.FormatID() == "table" { + out = header + out + } + } + + _, _ = fmt.Fprintln(i.Stdout, out) + + return nil + }, + } + + formatter.AttachOptions(&cmd.Options) + return cmd +} diff --git a/cli/sync_test.go b/cli/sync_test.go new file mode 100644 index 0000000000000..42dc38cbe699d --- /dev/null +++ b/cli/sync_test.go @@ -0,0 +1,330 @@ +//go:build !windows + +package cli_test + +import ( + "bytes" + "context" + "os" + "path/filepath" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "cdr.dev/slog" + "github.com/coder/coder/v2/agent/agentsocket" + "github.com/coder/coder/v2/cli/clitest" + "github.com/coder/coder/v2/testutil" +) + +// setupSocketServer creates an agentsocket server at a temporary path for testing. +// Returns the socket path and a cleanup function. The path should be passed to +// sync commands via the --socket-path flag. +func setupSocketServer(t *testing.T) (path string, cleanup func()) { + t.Helper() + + // Use a temporary socket path for each test + socketPath := filepath.Join(tempDirUnixSocket(t), "test.sock") + + // Create parent directory if needed + parentDir := filepath.Dir(socketPath) + err := os.MkdirAll(parentDir, 0o700) + require.NoError(t, err, "create socket directory") + + server, err := agentsocket.NewServer( + slog.Make().Leveled(slog.LevelDebug), + agentsocket.WithPath(socketPath), + ) + require.NoError(t, err, "create socket server") + + // Return cleanup function + return socketPath, func() { + err := server.Close() + require.NoError(t, err, "close socket server") + _ = os.Remove(socketPath) + } +} + +func TestSyncCommands_Golden(t *testing.T) { + t.Parallel() + + t.Run("ping", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "ping", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/ping_success", outBuf.Bytes(), nil) + }) + + t.Run("start_no_dependencies", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "start", "test-unit", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/start_no_dependencies", outBuf.Bytes(), nil) + }) + + t.Run("start_with_dependencies", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + // Set up dependency: test-unit depends on dep-unit + client, err := agentsocket.NewClient(ctx, agentsocket.WithPath(path)) + require.NoError(t, err) + + // Declare dependency + err = client.SyncWant(ctx, "test-unit", "dep-unit") + require.NoError(t, err) + client.Close() + + // Start a goroutine to complete the dependency after a short delay + // This simulates the dependency being satisfied while start is waiting + // The delay ensures the "Waiting..." message appears in the output + done := make(chan error, 1) + go func() { + // Wait a moment to let the start command begin waiting and print the message + time.Sleep(100 * time.Millisecond) + + compCtx := context.Background() + compClient, err := agentsocket.NewClient(compCtx, agentsocket.WithPath(path)) + if err != nil { + done <- err + return + } + defer compClient.Close() + + // Start and complete the dependency unit + err = compClient.SyncStart(compCtx, "dep-unit") + if err != nil { + done <- err + return + } + err = compClient.SyncComplete(compCtx, "dep-unit") + done <- err + }() + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "start", "test-unit", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + // Run the start command - it should wait for the dependency + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + + // Ensure the completion goroutine finished + select { + case err := <-done: + require.NoError(t, err, "complete dependency") + case <-time.After(time.Second): + // Goroutine should have finished by now + } + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/start_with_dependencies", outBuf.Bytes(), nil) + }) + + t.Run("want", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "want", "test-unit", "dep-unit", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/want_success", outBuf.Bytes(), nil) + }) + + t.Run("complete", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + // First start the unit + client, err := agentsocket.NewClient(ctx, agentsocket.WithPath(path)) + require.NoError(t, err) + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + client.Close() + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "complete", "test-unit", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/complete_success", outBuf.Bytes(), nil) + }) + + t.Run("status_pending", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + // Set up a unit with unsatisfied dependency + client, err := agentsocket.NewClient(ctx, agentsocket.WithPath(path)) + require.NoError(t, err) + err = client.SyncWant(ctx, "test-unit", "dep-unit") + require.NoError(t, err) + client.Close() + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "status", "test-unit", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/status_pending", outBuf.Bytes(), nil) + }) + + t.Run("status_started", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + // Start a unit + client, err := agentsocket.NewClient(ctx, agentsocket.WithPath(path)) + require.NoError(t, err) + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + client.Close() + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "status", "test-unit", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/status_started", outBuf.Bytes(), nil) + }) + + t.Run("status_completed", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + // Start and complete a unit + client, err := agentsocket.NewClient(ctx, agentsocket.WithPath(path)) + require.NoError(t, err) + err = client.SyncStart(ctx, "test-unit") + require.NoError(t, err) + err = client.SyncComplete(ctx, "test-unit") + require.NoError(t, err) + client.Close() + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "status", "test-unit", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/status_completed", outBuf.Bytes(), nil) + }) + + t.Run("status_with_dependencies", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + // Set up a unit with dependencies, some satisfied, some not + client, err := agentsocket.NewClient(ctx, agentsocket.WithPath(path)) + require.NoError(t, err) + err = client.SyncWant(ctx, "test-unit", "dep-1") + require.NoError(t, err) + err = client.SyncWant(ctx, "test-unit", "dep-2") + require.NoError(t, err) + // Complete dep-1, leave dep-2 incomplete + err = client.SyncStart(ctx, "dep-1") + require.NoError(t, err) + err = client.SyncComplete(ctx, "dep-1") + require.NoError(t, err) + client.Close() + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "status", "test-unit", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/status_with_dependencies", outBuf.Bytes(), nil) + }) + + t.Run("status_json_format", func(t *testing.T) { + t.Parallel() + path, cleanup := setupSocketServer(t) + defer cleanup() + + ctx := testutil.Context(t, testutil.WaitShort) + + // Set up a unit with dependencies + client, err := agentsocket.NewClient(ctx, agentsocket.WithPath(path)) + require.NoError(t, err) + err = client.SyncWant(ctx, "test-unit", "dep-unit") + require.NoError(t, err) + err = client.SyncStart(ctx, "dep-unit") + require.NoError(t, err) + err = client.SyncComplete(ctx, "dep-unit") + require.NoError(t, err) + client.Close() + + var outBuf bytes.Buffer + inv, _ := clitest.New(t, "exp", "sync", "status", "test-unit", "--output", "json", "--socket-path", path) + inv.Stdout = &outBuf + inv.Stderr = &outBuf + + err = inv.WithContext(ctx).Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, "TestSyncCommands_Golden/status_json_format", outBuf.Bytes(), nil) + }) +} diff --git a/cli/sync_want.go b/cli/sync_want.go new file mode 100644 index 0000000000000..10df920563087 --- /dev/null +++ b/cli/sync_want.go @@ -0,0 +1,49 @@ +package cli + +import ( + "golang.org/x/xerrors" + + "github.com/coder/serpent" + + "github.com/coder/coder/v2/agent/agentsocket" + "github.com/coder/coder/v2/agent/unit" + "github.com/coder/coder/v2/cli/cliui" +) + +func (*RootCmd) syncWant(socketPath *string) *serpent.Command { + cmd := &serpent.Command{ + Use: "want ", + Short: "Declare that a unit depends on another unit completing before it can start", + Long: "Declare that a unit depends on another unit completing before it can start. The unit specified first will not start until the second has signaled that it has completed.", + Handler: func(i *serpent.Invocation) error { + ctx := i.Context() + + if len(i.Args) != 2 { + return xerrors.New("exactly two arguments are required: unit and depends-on") + } + dependentUnit := unit.ID(i.Args[0]) + dependsOn := unit.ID(i.Args[1]) + + opts := []agentsocket.Option{} + if *socketPath != "" { + opts = append(opts, agentsocket.WithPath(*socketPath)) + } + + client, err := agentsocket.NewClient(ctx, opts...) + if err != nil { + return xerrors.Errorf("connect to agent socket: %w", err) + } + defer client.Close() + + if err := client.SyncWant(ctx, dependentUnit, dependsOn); err != nil { + return xerrors.Errorf("declare dependency failed: %w", err) + } + + cliui.Info(i.Stdout, "Success") + + return nil + }, + } + + return cmd +} diff --git a/cli/exp_task.go b/cli/task.go similarity index 91% rename from cli/exp_task.go rename to cli/task.go index b7a0ada15be42..865d1869bf850 100644 --- a/cli/exp_task.go +++ b/cli/task.go @@ -8,7 +8,7 @@ func (r *RootCmd) tasksCommand() *serpent.Command { cmd := &serpent.Command{ Use: "task", Aliases: []string{"tasks"}, - Short: "Experimental task commands.", + Short: "Manage tasks", Handler: func(i *serpent.Invocation) error { return i.Command.HelpHandler(i) }, diff --git a/cli/exp_task_create.go b/cli/task_create.go similarity index 90% rename from cli/exp_task_create.go rename to cli/task_create.go index b506d679eba3f..9f300b6336d53 100644 --- a/cli/exp_task_create.go +++ b/cli/task_create.go @@ -28,27 +28,27 @@ func (r *RootCmd) taskCreate() *serpent.Command { cmd := &serpent.Command{ Use: "create [input]", - Short: "Create an experimental task", + Short: "Create a task", Long: FormatExamples( Example{ Description: "Create a task with direct input", - Command: "coder exp task create \"Add authentication to the user service\"", + Command: "coder task create \"Add authentication to the user service\"", }, Example{ Description: "Create a task with stdin input", - Command: "echo \"Add authentication to the user service\" | coder exp task create", + Command: "echo \"Add authentication to the user service\" | coder task create", }, Example{ Description: "Create a task with a specific name", - Command: "coder exp task create --name task1 \"Add authentication to the user service\"", + Command: "coder task create --name task1 \"Add authentication to the user service\"", }, Example{ Description: "Create a task from a specific template / preset", - Command: "coder exp task create --template backend-dev --preset \"My Preset\" \"Add authentication to the user service\"", + Command: "coder task create --template backend-dev --preset \"My Preset\" \"Add authentication to the user service\"", }, Example{ Description: "Create a task for another user (requires appropriate permissions)", - Command: "coder exp task create --owner user@example.com \"Add authentication to the user service\"", + Command: "coder task create --owner user@example.com \"Add authentication to the user service\"", }, ), Middleware: serpent.Chain( @@ -111,8 +111,7 @@ func (r *RootCmd) taskCreate() *serpent.Command { } var ( - ctx = inv.Context() - expClient = codersdk.NewExperimentalClient(client) + ctx = inv.Context() taskInput string templateVersionID uuid.UUID @@ -208,7 +207,7 @@ func (r *RootCmd) taskCreate() *serpent.Command { templateVersionPresetID = preset.ID } - task, err := expClient.CreateTask(ctx, ownerArg, codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, ownerArg, codersdk.CreateTaskRequest{ Name: taskName, TemplateVersionID: templateVersionID, TemplateVersionPresetID: templateVersionPresetID, diff --git a/cli/exp_task_create_test.go b/cli/task_create_test.go similarity index 99% rename from cli/exp_task_create_test.go rename to cli/task_create_test.go index aea11e437828b..d5b4098a47e2f 100644 --- a/cli/exp_task_create_test.go +++ b/cli/task_create_test.go @@ -69,7 +69,7 @@ func TestTaskCreate(t *testing.T) { ActiveVersionID: templateVersionID, }, }) - case fmt.Sprintf("/api/experimental/tasks/%s", username): + case fmt.Sprintf("/api/v2/tasks/%s", username): var req codersdk.CreateTaskRequest if !httpapi.Read(ctx, w, r, &req) { return @@ -329,7 +329,7 @@ func TestTaskCreate(t *testing.T) { ctx = testutil.Context(t, testutil.WaitShort) srv = httptest.NewServer(tt.handler(t, ctx)) client = codersdk.New(testutil.MustURL(t, srv.URL)) - args = []string{"exp", "task", "create"} + args = []string{"task", "create"} sb strings.Builder err error ) diff --git a/cli/exp_task_delete.go b/cli/task_delete.go similarity index 82% rename from cli/exp_task_delete.go rename to cli/task_delete.go index 1611e4196e6c0..ac41b0192f8e7 100644 --- a/cli/exp_task_delete.go +++ b/cli/task_delete.go @@ -17,19 +17,19 @@ import ( func (r *RootCmd) taskDelete() *serpent.Command { cmd := &serpent.Command{ Use: "delete [ ...]", - Short: "Delete experimental tasks", + Short: "Delete tasks", Long: FormatExamples( Example{ Description: "Delete a single task.", - Command: "$ coder exp task delete task1", + Command: "$ coder task delete task1", }, Example{ Description: "Delete multiple tasks.", - Command: "$ coder exp task delete task1 task2 task3", + Command: "$ coder task delete task1 task2 task3", }, Example{ Description: "Delete a task without confirmation.", - Command: "$ coder exp task delete task4 --yes", + Command: "$ coder task delete task4 --yes", }, ), Middleware: serpent.Chain( @@ -44,11 +44,10 @@ func (r *RootCmd) taskDelete() *serpent.Command { if err != nil { return err } - exp := codersdk.NewExperimentalClient(client) var tasks []codersdk.Task for _, identifier := range inv.Args { - task, err := exp.TaskByIdentifier(ctx, identifier) + task, err := client.TaskByIdentifier(ctx, identifier) if err != nil { return xerrors.Errorf("resolve task %q: %w", identifier, err) } @@ -71,7 +70,7 @@ func (r *RootCmd) taskDelete() *serpent.Command { for i, task := range tasks { display := displayList[i] - if err := exp.DeleteTask(ctx, task.OwnerName, task.ID); err != nil { + if err := client.DeleteTask(ctx, task.OwnerName, task.ID); err != nil { return xerrors.Errorf("delete task %q: %w", display, err) } _, _ = fmt.Fprintln( diff --git a/cli/exp_task_delete_test.go b/cli/task_delete_test.go similarity index 72% rename from cli/exp_task_delete_test.go rename to cli/task_delete_test.go index e90ee8c5b19ba..2d28845c73d3d 100644 --- a/cli/exp_task_delete_test.go +++ b/cli/task_delete_test.go @@ -56,20 +56,15 @@ func TestExpTaskDelete(t *testing.T) { taskID := uuid.MustParse(id1) return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": + case r.Method == http.MethodGet && r.URL.Path == "/api/v2/tasks/me/exists": c.nameResolves.Add(1) - httpapi.Write(r.Context(), w, http.StatusOK, struct { - Tasks []codersdk.Task `json:"tasks"` - Count int `json:"count"` - }{ - Tasks: []codersdk.Task{{ + httpapi.Write(r.Context(), w, http.StatusOK, + codersdk.Task{ ID: taskID, Name: "exists", OwnerName: "me", - }}, - Count: 1, - }) - case r.Method == http.MethodDelete && r.URL.Path == "/api/experimental/tasks/me/"+id1: + }) + case r.Method == http.MethodDelete && r.URL.Path == "/api/v2/tasks/me/"+id1: c.deleteCalls.Add(1) w.WriteHeader(http.StatusAccepted) default: @@ -87,13 +82,13 @@ func TestExpTaskDelete(t *testing.T) { buildHandler: func(c *testCounters) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks/me/"+id2: + case r.Method == http.MethodGet && r.URL.Path == "/api/v2/tasks/me/"+id2: httpapi.Write(r.Context(), w, http.StatusOK, codersdk.Task{ ID: uuid.MustParse(id2), OwnerName: "me", Name: "uuid-task", }) - case r.Method == http.MethodDelete && r.URL.Path == "/api/experimental/tasks/me/"+id2: + case r.Method == http.MethodDelete && r.URL.Path == "/api/v2/tasks/me/"+id2: c.deleteCalls.Add(1) w.WriteHeader(http.StatusAccepted) default: @@ -107,32 +102,26 @@ func TestExpTaskDelete(t *testing.T) { name: "Multiple_YesFlag", args: []string{"--yes", "first", id4}, buildHandler: func(c *testCounters) http.HandlerFunc { - firstID := uuid.MustParse(id3) return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": + case r.Method == http.MethodGet && r.URL.Path == "/api/v2/tasks/me/first": c.nameResolves.Add(1) - httpapi.Write(r.Context(), w, http.StatusOK, struct { - Tasks []codersdk.Task `json:"tasks"` - Count int `json:"count"` - }{ - Tasks: []codersdk.Task{{ - ID: firstID, - Name: "first", - OwnerName: "me", - }}, - Count: 1, + httpapi.Write(r.Context(), w, http.StatusOK, codersdk.Task{ + ID: uuid.MustParse(id3), + Name: "first", + OwnerName: "me", }) - case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks/me/"+id4: + case r.Method == http.MethodGet && r.URL.Path == "/api/v2/tasks/me/"+id4: + c.nameResolves.Add(1) httpapi.Write(r.Context(), w, http.StatusOK, codersdk.Task{ ID: uuid.MustParse(id4), OwnerName: "me", - Name: "uuid-task-2", + Name: "uuid-task-4", }) - case r.Method == http.MethodDelete && r.URL.Path == "/api/experimental/tasks/me/"+id3: + case r.Method == http.MethodDelete && r.URL.Path == "/api/v2/tasks/me/"+id3: c.deleteCalls.Add(1) w.WriteHeader(http.StatusAccepted) - case r.Method == http.MethodDelete && r.URL.Path == "/api/experimental/tasks/me/"+id4: + case r.Method == http.MethodDelete && r.URL.Path == "/api/v2/tasks/me/"+id4: c.deleteCalls.Add(1) w.WriteHeader(http.StatusAccepted) default: @@ -141,7 +130,7 @@ func TestExpTaskDelete(t *testing.T) { } }, wantDeleteCalls: 2, - wantNameResolves: 1, + wantNameResolves: 2, wantDeletedMessage: 2, }, { @@ -151,7 +140,7 @@ func TestExpTaskDelete(t *testing.T) { buildHandler: func(_ *testCounters) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": + case r.Method == http.MethodGet && r.URL.Path == "/api/v2/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": httpapi.Write(r.Context(), w, http.StatusOK, struct { Tasks []codersdk.Task `json:"tasks"` Count int `json:"count"` @@ -174,20 +163,14 @@ func TestExpTaskDelete(t *testing.T) { taskID := uuid.MustParse(id5) return func(w http.ResponseWriter, r *http.Request) { switch { - case r.Method == http.MethodGet && r.URL.Path == "/api/experimental/tasks" && r.URL.Query().Get("q") == "owner:\"me\"": + case r.Method == http.MethodGet && r.URL.Path == "/api/v2/tasks/me/bad": c.nameResolves.Add(1) - httpapi.Write(r.Context(), w, http.StatusOK, struct { - Tasks []codersdk.Task `json:"tasks"` - Count int `json:"count"` - }{ - Tasks: []codersdk.Task{{ - ID: taskID, - Name: "bad", - OwnerName: "me", - }}, - Count: 1, + httpapi.Write(r.Context(), w, http.StatusOK, codersdk.Task{ + ID: taskID, + Name: "bad", + OwnerName: "me", }) - case r.Method == http.MethodDelete && r.URL.Path == "/api/experimental/tasks/me/"+id5: + case r.Method == http.MethodDelete && r.URL.Path == "/api/v2/tasks/me/bad": httpapi.InternalServerError(w, xerrors.New("boom")) default: httpapi.InternalServerError(w, xerrors.New("unwanted path: "+r.Method+" "+r.URL.Path)) @@ -210,7 +193,7 @@ func TestExpTaskDelete(t *testing.T) { client := codersdk.New(testutil.MustURL(t, srv.URL)) - args := append([]string{"exp", "task", "delete"}, tc.args...) + args := append([]string{"task", "delete"}, tc.args...) inv, root := clitest.New(t, args...) inv = inv.WithContext(ctx) clitest.SetupConfig(t, client, root) diff --git a/cli/exp_task_list.go b/cli/task_list.go similarity index 90% rename from cli/exp_task_list.go rename to cli/task_list.go index 89b313a1f49c5..1f13c85a05920 100644 --- a/cli/exp_task_list.go +++ b/cli/task_list.go @@ -69,27 +69,27 @@ func (r *RootCmd) taskList() *serpent.Command { cmd := &serpent.Command{ Use: "list", - Short: "List experimental tasks", + Short: "List tasks", Long: FormatExamples( Example{ Description: "List tasks for the current user.", - Command: "coder exp task list", + Command: "coder task list", }, Example{ Description: "List tasks for a specific user.", - Command: "coder exp task list --user someone-else", + Command: "coder task list --user someone-else", }, Example{ Description: "List all tasks you can view.", - Command: "coder exp task list --all", + Command: "coder task list --all", }, Example{ Description: "List all your running tasks.", - Command: "coder exp task list --status running", + Command: "coder task list --status running", }, Example{ Description: "As above, but only show IDs.", - Command: "coder exp task list --status running --quiet", + Command: "coder task list --status running --quiet", }, ), Aliases: []string{"ls"}, @@ -135,14 +135,13 @@ func (r *RootCmd) taskList() *serpent.Command { } ctx := inv.Context() - exp := codersdk.NewExperimentalClient(client) targetUser := strings.TrimSpace(user) if targetUser == "" && !all { targetUser = codersdk.Me } - tasks, err := exp.Tasks(ctx, &codersdk.TasksFilter{ + tasks, err := client.Tasks(ctx, &codersdk.TasksFilter{ Owner: targetUser, Status: codersdk.TaskStatus(statusFilter), }) diff --git a/cli/exp_task_list_test.go b/cli/task_list_test.go similarity index 75% rename from cli/exp_task_list_test.go rename to cli/task_list_test.go index d297310dc4fc3..c9b91486bb8c5 100644 --- a/cli/exp_task_list_test.go +++ b/cli/task_list_test.go @@ -2,7 +2,6 @@ package cli_test import ( "bytes" - "context" "database/sql" "encoding/json" "io" @@ -19,10 +18,7 @@ import ( "github.com/coder/coder/v2/cli/clitest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbfake" - "github.com/coder/coder/v2/coderd/database/dbgen" - "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/pty/ptytest" @@ -43,76 +39,22 @@ func makeAITask(t *testing.T, db database.Store, orgID, adminID, ownerID uuid.UU }, }).Do() - ws := database.WorkspaceTable{ + build := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ OrganizationID: orgID, OwnerID: ownerID, TemplateID: tv.Template.ID, - } - build := dbfake.WorkspaceBuild(t, db, ws). + }). Seed(database.WorkspaceBuild{ TemplateVersionID: tv.TemplateVersion.ID, Transition: transition, - }).WithAgent().Do() - dbgen.WorkspaceBuildParameters(t, db, []database.WorkspaceBuildParameter{ - { - WorkspaceBuildID: build.Build.ID, - Name: codersdk.AITaskPromptParameterName, - Value: prompt, - }, - }) - agents, err := db.GetWorkspaceAgentsByWorkspaceAndBuildNumber( - dbauthz.AsSystemRestricted(context.Background()), - database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams{ - WorkspaceID: build.Workspace.ID, - BuildNumber: build.Build.BuildNumber, - }, - ) - require.NoError(t, err) - require.NotEmpty(t, agents) - agentID := agents[0].ID - - // Create a workspace app and set it as the sidebar app. - app := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{ - AgentID: agentID, - Slug: "task-sidebar", - DisplayName: "Task Sidebar", - External: false, - }) - - // Update build flags to reference the sidebar app and HasAITask=true. - err = db.UpdateWorkspaceBuildFlagsByID( - dbauthz.AsSystemRestricted(context.Background()), - database.UpdateWorkspaceBuildFlagsByIDParams{ - ID: build.Build.ID, - HasAITask: sql.NullBool{Bool: true, Valid: true}, - HasExternalAgent: sql.NullBool{Bool: false, Valid: false}, - SidebarAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, - UpdatedAt: build.Build.UpdatedAt, - }, - ) - require.NoError(t, err) - - // Create a task record in the tasks table for the new data model. - task := dbgen.Task(t, db, database.TaskTable{ - OrganizationID: orgID, - OwnerID: ownerID, - Name: build.Workspace.Name, - WorkspaceID: uuid.NullUUID{UUID: build.Workspace.ID, Valid: true}, - TemplateVersionID: tv.TemplateVersion.ID, - TemplateParameters: []byte("{}"), - Prompt: prompt, - CreatedAt: dbtime.Now(), - }) - - // Link the task to the workspace app. - dbgen.TaskWorkspaceApp(t, db, database.TaskWorkspaceApp{ - TaskID: task.ID, - WorkspaceBuildNumber: build.Build.BuildNumber, - WorkspaceAgentID: uuid.NullUUID{UUID: agentID, Valid: true}, - WorkspaceAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, - }) - - return task + }). + WithAgent(). + WithTask(database.TaskTable{ + Prompt: prompt, + }, nil). + Do() + + return build.Task } func TestExpTaskList(t *testing.T) { @@ -127,7 +69,7 @@ func TestExpTaskList(t *testing.T) { owner := coderdtest.CreateFirstUser(t, client) memberClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) - inv, root := clitest.New(t, "exp", "task", "list") + inv, root := clitest.New(t, "task", "list") clitest.SetupConfig(t, memberClient, root) pty := ptytest.New(t).Attach(inv) @@ -151,7 +93,7 @@ func TestExpTaskList(t *testing.T) { wantPrompt := "build me a web app" task := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, wantPrompt) - inv, root := clitest.New(t, "exp", "task", "list", "--column", "id,name,status,initial prompt") + inv, root := clitest.New(t, "task", "list", "--column", "id,name,status,initial prompt") clitest.SetupConfig(t, memberClient, root) pty := ptytest.New(t).Attach(inv) @@ -180,7 +122,7 @@ func TestExpTaskList(t *testing.T) { pausedTask := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStop, "stop me please") // Use JSON output to reliably validate filtering. - inv, root := clitest.New(t, "exp", "task", "list", "--status=paused", "--output=json") + inv, root := clitest.New(t, "task", "list", "--status=paused", "--output=json") clitest.SetupConfig(t, memberClient, root) ctx := testutil.Context(t, testutil.WaitShort) @@ -211,7 +153,7 @@ func TestExpTaskList(t *testing.T) { _ = makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStart, "other-task") task := makeAITask(t, db, owner.OrganizationID, owner.UserID, owner.UserID, database.WorkspaceTransitionStart, "me-task") - inv, root := clitest.New(t, "exp", "task", "list", "--user", "me") + inv, root := clitest.New(t, "task", "list", "--user", "me") //nolint:gocritic // Owner client is intended here smoke test the member task not showing up. clitest.SetupConfig(t, client, root) @@ -238,7 +180,7 @@ func TestExpTaskList(t *testing.T) { task2 := makeAITask(t, db, owner.OrganizationID, owner.UserID, memberUser.ID, database.WorkspaceTransitionStop, "stop me please") // Given: We add the `--quiet` flag - inv, root := clitest.New(t, "exp", "task", "list", "--quiet") + inv, root := clitest.New(t, "task", "list", "--quiet") clitest.SetupConfig(t, memberClient, root) ctx := testutil.Context(t, testutil.WaitShort) @@ -282,7 +224,7 @@ func TestExpTaskList_OwnerCanListOthers(t *testing.T) { t.Parallel() // As the owner, list only member A tasks. - inv, root := clitest.New(t, "exp", "task", "list", "--user", memberAUser.Username, "--output=json") + inv, root := clitest.New(t, "task", "list", "--user", memberAUser.Username, "--output=json") //nolint:gocritic // Owner client is intended here to allow member tasks to be listed. clitest.SetupConfig(t, ownerClient, root) @@ -310,7 +252,7 @@ func TestExpTaskList_OwnerCanListOthers(t *testing.T) { // As the owner, list all tasks to verify both member tasks are present. // Use JSON output to reliably validate filtering. - inv, root := clitest.New(t, "exp", "task", "list", "--all", "--output=json") + inv, root := clitest.New(t, "task", "list", "--all", "--output=json") //nolint:gocritic // Owner client is intended here to allow all tasks to be listed. clitest.SetupConfig(t, ownerClient, root) diff --git a/cli/exp_task_logs.go b/cli/task_logs.go similarity index 85% rename from cli/exp_task_logs.go rename to cli/task_logs.go index d1d4a826cd9ce..87e2e8112fda1 100644 --- a/cli/exp_task_logs.go +++ b/cli/task_logs.go @@ -28,7 +28,7 @@ func (r *RootCmd) taskLogs() *serpent.Command { Long: FormatExamples( Example{ Description: "Show logs for a given task.", - Command: "coder exp task logs task1", + Command: "coder task logs task1", }), Middleware: serpent.Chain( serpent.RequireNArgs(1), @@ -41,16 +41,15 @@ func (r *RootCmd) taskLogs() *serpent.Command { var ( ctx = inv.Context() - exp = codersdk.NewExperimentalClient(client) identifier = inv.Args[0] ) - task, err := exp.TaskByIdentifier(ctx, identifier) + task, err := client.TaskByIdentifier(ctx, identifier) if err != nil { return xerrors.Errorf("resolve task %q: %w", identifier, err) } - logs, err := exp.TaskLogs(ctx, codersdk.Me, task.ID) + logs, err := client.TaskLogs(ctx, codersdk.Me, task.ID) if err != nil { return xerrors.Errorf("get task logs: %w", err) } diff --git a/cli/exp_task_logs_test.go b/cli/task_logs_test.go similarity index 92% rename from cli/exp_task_logs_test.go rename to cli/task_logs_test.go index 859ff135d0d63..bad8811c10562 100644 --- a/cli/exp_task_logs_test.go +++ b/cli/task_logs_test.go @@ -46,7 +46,7 @@ func Test_TaskLogs(t *testing.T) { userClient := client // user already has access to their own workspace var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "logs", task.Name, "--output", "json") + inv, root := clitest.New(t, "task", "logs", task.Name, "--output", "json") inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -72,7 +72,7 @@ func Test_TaskLogs(t *testing.T) { userClient := client var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "logs", task.ID.String(), "--output", "json") + inv, root := clitest.New(t, "task", "logs", task.ID.String(), "--output", "json") inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -98,7 +98,7 @@ func Test_TaskLogs(t *testing.T) { userClient := client var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "logs", task.ID.String()) + inv, root := clitest.New(t, "task", "logs", task.ID.String()) inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -121,7 +121,7 @@ func Test_TaskLogs(t *testing.T) { userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "logs", "doesnotexist") + inv, root := clitest.New(t, "task", "logs", "doesnotexist") inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -139,7 +139,7 @@ func Test_TaskLogs(t *testing.T) { userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "logs", uuid.Nil.String()) + inv, root := clitest.New(t, "task", "logs", uuid.Nil.String()) inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -155,7 +155,7 @@ func Test_TaskLogs(t *testing.T) { client, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskLogsErr(assert.AnError)) userClient := client - inv, root := clitest.New(t, "exp", "task", "logs", task.ID.String()) + inv, root := clitest.New(t, "task", "logs", task.ID.String()) clitest.SetupConfig(t, userClient, root) err := inv.WithContext(ctx).Run() diff --git a/cli/exp_task_send.go b/cli/task_send.go similarity index 78% rename from cli/exp_task_send.go rename to cli/task_send.go index e8985d55d97da..97f1555a838a5 100644 --- a/cli/exp_task_send.go +++ b/cli/task_send.go @@ -17,10 +17,10 @@ func (r *RootCmd) taskSend() *serpent.Command { Short: "Send input to a task", Long: FormatExamples(Example{ Description: "Send direct input to a task.", - Command: "coder exp task send task1 \"Please also add unit tests\"", + Command: "coder task send task1 \"Please also add unit tests\"", }, Example{ Description: "Send input from stdin to a task.", - Command: "echo \"Please also add unit tests\" | coder exp task send task1 --stdin", + Command: "echo \"Please also add unit tests\" | coder task send task1 --stdin", }), Middleware: serpent.RequireRangeArgs(1, 2), Options: serpent.OptionSet{ @@ -39,7 +39,6 @@ func (r *RootCmd) taskSend() *serpent.Command { var ( ctx = inv.Context() - exp = codersdk.NewExperimentalClient(client) identifier = inv.Args[0] taskInput string @@ -60,12 +59,12 @@ func (r *RootCmd) taskSend() *serpent.Command { taskInput = inv.Args[1] } - task, err := exp.TaskByIdentifier(ctx, identifier) + task, err := client.TaskByIdentifier(ctx, identifier) if err != nil { return xerrors.Errorf("resolve task: %w", err) } - if err = exp.TaskSend(ctx, codersdk.Me, task.ID, codersdk.TaskSendRequest{Input: taskInput}); err != nil { + if err = client.TaskSend(ctx, codersdk.Me, task.ID, codersdk.TaskSendRequest{Input: taskInput}); err != nil { return xerrors.Errorf("send input to task: %w", err) } diff --git a/cli/exp_task_send_test.go b/cli/task_send_test.go similarity index 90% rename from cli/exp_task_send_test.go rename to cli/task_send_test.go index 3529cf2e0b9b5..e36fce443f1d3 100644 --- a/cli/exp_task_send_test.go +++ b/cli/task_send_test.go @@ -30,7 +30,7 @@ func Test_TaskSend(t *testing.T) { userClient := client var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "send", task.Name, "carry on with the task") + inv, root := clitest.New(t, "task", "send", task.Name, "carry on with the task") inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -46,7 +46,7 @@ func Test_TaskSend(t *testing.T) { userClient := client var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "send", task.ID.String(), "carry on with the task") + inv, root := clitest.New(t, "task", "send", task.ID.String(), "carry on with the task") inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -62,7 +62,7 @@ func Test_TaskSend(t *testing.T) { userClient := client var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "send", task.Name, "--stdin") + inv, root := clitest.New(t, "task", "send", task.Name, "--stdin") inv.Stdout = &stdout inv.Stdin = strings.NewReader("carry on with the task") clitest.SetupConfig(t, userClient, root) @@ -80,7 +80,7 @@ func Test_TaskSend(t *testing.T) { userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "send", "doesnotexist", "some task input") + inv, root := clitest.New(t, "task", "send", "doesnotexist", "some task input") inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -98,7 +98,7 @@ func Test_TaskSend(t *testing.T) { userClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "send", uuid.Nil.String(), "some task input") + inv, root := clitest.New(t, "task", "send", uuid.Nil.String(), "some task input") inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) @@ -114,7 +114,7 @@ func Test_TaskSend(t *testing.T) { userClient, task := setupCLITaskTest(ctx, t, fakeAgentAPITaskSendErr(t, assert.AnError)) var stdout strings.Builder - inv, root := clitest.New(t, "exp", "task", "send", task.Name, "some task input") + inv, root := clitest.New(t, "task", "send", task.Name, "some task input") inv.Stdout = &stdout clitest.SetupConfig(t, userClient, root) diff --git a/cli/exp_task_status.go b/cli/task_status.go similarity index 95% rename from cli/exp_task_status.go rename to cli/task_status.go index 1bd77f5f7f5b3..7c91cd55e9637 100644 --- a/cli/exp_task_status.go +++ b/cli/task_status.go @@ -47,11 +47,11 @@ func (r *RootCmd) taskStatus() *serpent.Command { Long: FormatExamples( Example{ Description: "Show the status of a given task.", - Command: "coder exp task status task1", + Command: "coder task status task1", }, Example{ Description: "Watch the status of a given task until it completes (idle or stopped).", - Command: "coder exp task status task1 --watch", + Command: "coder task status task1 --watch", }, ), Use: "status", @@ -83,10 +83,9 @@ func (r *RootCmd) taskStatus() *serpent.Command { } ctx := i.Context() - exp := codersdk.NewExperimentalClient(client) identifier := i.Args[0] - task, err := exp.TaskByIdentifier(ctx, identifier) + task, err := client.TaskByIdentifier(ctx, identifier) if err != nil { return err } @@ -107,7 +106,7 @@ func (r *RootCmd) taskStatus() *serpent.Command { // TODO: implement streaming updates instead of polling lastStatusRow := tsr for range t.C { - task, err := exp.TaskByID(ctx, task.ID) + task, err := client.TaskByID(ctx, task.ID) if err != nil { return err } diff --git a/cli/exp_task_status_test.go b/cli/task_status_test.go similarity index 69% rename from cli/exp_task_status_test.go rename to cli/task_status_test.go index f15222d51b0fb..0c0d7facaf72b 100644 --- a/cli/exp_task_status_test.go +++ b/cli/task_status_test.go @@ -36,17 +36,9 @@ func Test_TaskStatus(t *testing.T) { hf: func(ctx context.Context, _ time.Time) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { - case "/api/experimental/tasks": - if r.URL.Query().Get("q") == "owner:\"me\"" { - httpapi.Write(ctx, w, http.StatusOK, struct { - Tasks []codersdk.Task `json:"tasks"` - Count int `json:"count"` - }{ - Tasks: []codersdk.Task{}, - Count: 0, - }) - return - } + case "/api/v2/tasks/me/doesnotexist": + httpapi.ResourceNotFound(w) + return default: t.Errorf("unexpected path: %s", r.URL.Path) } @@ -60,35 +52,7 @@ func Test_TaskStatus(t *testing.T) { hf: func(ctx context.Context, now time.Time) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { - case "/api/experimental/tasks": - if r.URL.Query().Get("q") == "owner:\"me\"" { - httpapi.Write(ctx, w, http.StatusOK, struct { - Tasks []codersdk.Task `json:"tasks"` - Count int `json:"count"` - }{ - Tasks: []codersdk.Task{{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Name: "exists", - OwnerName: "me", - WorkspaceStatus: codersdk.WorkspaceStatusRunning, - CreatedAt: now, - UpdatedAt: now, - CurrentState: &codersdk.TaskStateEntry{ - State: codersdk.TaskStateWorking, - Timestamp: now, - Message: "Thinking furiously...", - }, - WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ - Healthy: true, - }, - WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), - Status: codersdk.TaskStatusActive, - }}, - Count: 1, - }) - return - } - case "/api/experimental/tasks/me/11111111-1111-1111-1111-111111111111": + case "/api/v2/tasks/me/exists": httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), WorkspaceStatus: codersdk.WorkspaceStatusRunning, @@ -124,31 +88,22 @@ func Test_TaskStatus(t *testing.T) { var calls atomic.Int64 return func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { - case "/api/experimental/tasks": - if r.URL.Query().Get("q") == "owner:\"me\"" { - // Return initial task state for --watch test - httpapi.Write(ctx, w, http.StatusOK, struct { - Tasks []codersdk.Task `json:"tasks"` - Count int `json:"count"` - }{ - Tasks: []codersdk.Task{{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Name: "exists", - OwnerName: "me", - WorkspaceStatus: codersdk.WorkspaceStatusPending, - CreatedAt: now.Add(-5 * time.Second), - UpdatedAt: now.Add(-5 * time.Second), - WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ - Healthy: true, - }, - WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), - Status: codersdk.TaskStatusPending, - }}, - Count: 1, - }) - return - } - case "/api/experimental/tasks/me/11111111-1111-1111-1111-111111111111": + case "/api/v2/tasks/me/exists": + httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + Name: "exists", + OwnerName: "me", + WorkspaceStatus: codersdk.WorkspaceStatusPending, + CreatedAt: now.Add(-5 * time.Second), + UpdatedAt: now.Add(-5 * time.Second), + WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ + Healthy: true, + }, + WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), + Status: codersdk.TaskStatusPending, + }) + return + case "/api/v2/tasks/me/11111111-1111-1111-1111-111111111111": defer calls.Add(1) switch calls.Load() { case 0: @@ -234,6 +189,7 @@ func Test_TaskStatus(t *testing.T) { "owner_id": "00000000-0000-0000-0000-000000000000", "owner_name": "me", "name": "exists", + "display_name": "Task exists", "template_id": "00000000-0000-0000-0000-000000000000", "template_version_id": "00000000-0000-0000-0000-000000000000", "template_name": "", @@ -263,40 +219,19 @@ func Test_TaskStatus(t *testing.T) { ts := time.Date(2025, 8, 26, 12, 34, 56, 0, time.UTC) return func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { - case "/api/experimental/tasks": - if r.URL.Query().Get("q") == "owner:\"me\"" { - httpapi.Write(ctx, w, http.StatusOK, struct { - Tasks []codersdk.Task `json:"tasks"` - Count int `json:"count"` - }{ - Tasks: []codersdk.Task{{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - Name: "exists", - OwnerName: "me", - WorkspaceStatus: codersdk.WorkspaceStatusRunning, - CreatedAt: ts, - UpdatedAt: ts, - CurrentState: &codersdk.TaskStateEntry{ - State: codersdk.TaskStateWorking, - Timestamp: ts.Add(time.Second), - Message: "Thinking furiously...", - }, - WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ - Healthy: true, - }, - WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), - Status: codersdk.TaskStatusActive, - }}, - Count: 1, - }) - return - } - case "/api/experimental/tasks/me/11111111-1111-1111-1111-111111111111": + case "/api/v2/tasks/me/exists": httpapi.Write(ctx, w, http.StatusOK, codersdk.Task{ - ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), - WorkspaceStatus: codersdk.WorkspaceStatusRunning, - CreatedAt: ts, - UpdatedAt: ts, + ID: uuid.MustParse("11111111-1111-1111-1111-111111111111"), + Name: "exists", + DisplayName: "Task exists", + OwnerName: "me", + WorkspaceAgentHealth: &codersdk.WorkspaceAgentHealth{ + Healthy: true, + }, + WorkspaceAgentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), + WorkspaceStatus: codersdk.WorkspaceStatusRunning, + CreatedAt: ts, + UpdatedAt: ts, CurrentState: &codersdk.TaskStateEntry{ State: codersdk.TaskStateWorking, Timestamp: ts.Add(time.Second), @@ -321,7 +256,7 @@ func Test_TaskStatus(t *testing.T) { srv = httptest.NewServer(http.HandlerFunc(tc.hf(ctx, now))) client = codersdk.New(testutil.MustURL(t, srv.URL)) sb = strings.Builder{} - args = []string{"exp", "task", "status", "--watch-interval", testutil.IntervalFast.String()} + args = []string{"task", "status", "--watch-interval", testutil.IntervalFast.String()} ) t.Cleanup(srv.Close) diff --git a/cli/exp_task_test.go b/cli/task_test.go similarity index 90% rename from cli/exp_task_test.go rename to cli/task_test.go index d2d3728aeb280..fca04372600d8 100644 --- a/cli/exp_task_test.go +++ b/cli/task_test.go @@ -53,7 +53,6 @@ func Test_Tasks(t *testing.T) { taskName = strings.ReplaceAll(testutil.GetRandomName(t), "_", "-") ) - //nolint:paralleltest // The sub-tests of this test must be run sequentially. for _, tc := range []struct { name string cmdArgs []string @@ -61,14 +60,14 @@ func Test_Tasks(t *testing.T) { }{ { name: "create task", - cmdArgs: []string{"exp", "task", "create", "test task input for " + t.Name(), "--name", taskName, "--template", taskTpl.Name}, + cmdArgs: []string{"task", "create", "test task input for " + t.Name(), "--name", taskName, "--template", taskTpl.Name}, assertFn: func(stdout string, userClient *codersdk.Client) { require.Contains(t, stdout, taskName, "task name should be in output") }, }, { name: "list tasks after create", - cmdArgs: []string{"exp", "task", "list", "--output", "json"}, + cmdArgs: []string{"task", "list", "--output", "json"}, assertFn: func(stdout string, userClient *codersdk.Client) { var tasks []codersdk.Task err := json.NewDecoder(strings.NewReader(stdout)).Decode(&tasks) @@ -89,7 +88,7 @@ func Test_Tasks(t *testing.T) { }, { name: "get task status after create", - cmdArgs: []string{"exp", "task", "status", taskName, "--output", "json"}, + cmdArgs: []string{"task", "status", taskName, "--output", "json"}, assertFn: func(stdout string, userClient *codersdk.Client) { var task codersdk.Task require.NoError(t, json.NewDecoder(strings.NewReader(stdout)).Decode(&task), "should unmarshal task status") @@ -99,12 +98,12 @@ func Test_Tasks(t *testing.T) { }, { name: "send task message", - cmdArgs: []string{"exp", "task", "send", taskName, "hello"}, + cmdArgs: []string{"task", "send", taskName, "hello"}, // Assertions for this happen in the fake agent API handler. }, { name: "read task logs", - cmdArgs: []string{"exp", "task", "logs", taskName, "--output", "json"}, + cmdArgs: []string{"task", "logs", taskName, "--output", "json"}, assertFn: func(stdout string, userClient *codersdk.Client) { var logs []codersdk.TaskLogEntry require.NoError(t, json.NewDecoder(strings.NewReader(stdout)).Decode(&logs), "should unmarshal task logs") @@ -119,12 +118,11 @@ func Test_Tasks(t *testing.T) { }, { name: "delete task", - cmdArgs: []string{"exp", "task", "delete", taskName, "--yes"}, + cmdArgs: []string{"task", "delete", taskName, "--yes"}, assertFn: func(stdout string, userClient *codersdk.Client) { // The task should eventually no longer show up in the list of tasks testutil.Eventually(ctx, t, func(ctx context.Context) bool { - expClient := codersdk.NewExperimentalClient(userClient) - tasks, err := expClient.Tasks(ctx, &codersdk.TasksFilter{}) + tasks, err := userClient.Tasks(ctx, &codersdk.TasksFilter{}) if !assert.NoError(t, err) { return false } @@ -135,16 +133,15 @@ func Test_Tasks(t *testing.T) { }, }, } { - t.Run(tc.name, func(t *testing.T) { - var stdout strings.Builder - inv, root := clitest.New(t, tc.cmdArgs...) - inv.Stdout = &stdout - clitest.SetupConfig(t, userClient, root) - require.NoError(t, inv.WithContext(ctx).Run()) - if tc.assertFn != nil { - tc.assertFn(stdout.String(), userClient) - } - }) + t.Logf("test case: %q", tc.name) + var stdout strings.Builder + inv, root := clitest.New(t, tc.cmdArgs...) + inv.Stdout = &stdout + clitest.SetupConfig(t, userClient, root) + require.NoError(t, inv.WithContext(ctx).Run(), tc.name) + if tc.assertFn != nil { + tc.assertFn(stdout.String(), userClient) + } } } @@ -250,8 +247,7 @@ func setupCLITaskTest(ctx context.Context, t *testing.T, agentAPIHandlers map[st template := createAITaskTemplate(t, client, owner.OrganizationID, withSidebarURL(fakeAPI.URL()), withAgentToken(authToken)) wantPrompt := "test prompt" - exp := codersdk.NewExperimentalClient(userClient) - task, err := exp.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + task, err := userClient.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: wantPrompt, Name: "test-task", @@ -293,7 +289,6 @@ func createAITaskTemplate(t *testing.T, client *codersdk.Client, orgID uuid.UUID { Type: &proto.Response_Plan{ Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }, }, @@ -328,9 +323,7 @@ func createAITaskTemplate(t *testing.T, client *codersdk.Client, orgID uuid.UUID }, AiTasks: []*proto.AITask{ { - SidebarApp: &proto.AITaskSidebarApp{ - Id: taskAppID.String(), - }, + AppId: taskAppID.String(), }, }, }, diff --git a/cli/testdata/TestSyncCommands_Golden/complete_success.golden b/cli/testdata/TestSyncCommands_Golden/complete_success.golden new file mode 100644 index 0000000000000..35821117c8757 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/complete_success.golden @@ -0,0 +1 @@ +Success diff --git a/cli/testdata/TestSyncCommands_Golden/ping_success.golden b/cli/testdata/TestSyncCommands_Golden/ping_success.golden new file mode 100644 index 0000000000000..35821117c8757 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/ping_success.golden @@ -0,0 +1 @@ +Success diff --git a/cli/testdata/TestSyncCommands_Golden/start_no_dependencies.golden b/cli/testdata/TestSyncCommands_Golden/start_no_dependencies.golden new file mode 100644 index 0000000000000..35821117c8757 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/start_no_dependencies.golden @@ -0,0 +1 @@ +Success diff --git a/cli/testdata/TestSyncCommands_Golden/start_with_dependencies.golden b/cli/testdata/TestSyncCommands_Golden/start_with_dependencies.golden new file mode 100644 index 0000000000000..23256e9ad1275 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/start_with_dependencies.golden @@ -0,0 +1,2 @@ +Waiting for dependencies of unit 'test-unit' to be satisfied... +Success diff --git a/cli/testdata/TestSyncCommands_Golden/status_completed.golden b/cli/testdata/TestSyncCommands_Golden/status_completed.golden new file mode 100644 index 0000000000000..3fee6f914a988 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/status_completed.golden @@ -0,0 +1,6 @@ +Unit: test-unit +Status: completed +Ready: true + +Dependencies: +No dependencies found diff --git a/cli/testdata/TestSyncCommands_Golden/status_json_format.golden b/cli/testdata/TestSyncCommands_Golden/status_json_format.golden new file mode 100644 index 0000000000000..d84b2c9d715e6 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/status_json_format.golden @@ -0,0 +1,13 @@ +{ + "unit_name": "test-unit", + "status": "pending", + "is_ready": true, + "dependencies": [ + { + "depends_on": "dep-unit", + "required_status": "completed", + "current_status": "completed", + "is_satisfied": true + } + ] +} diff --git a/cli/testdata/TestSyncCommands_Golden/status_pending.golden b/cli/testdata/TestSyncCommands_Golden/status_pending.golden new file mode 100644 index 0000000000000..5c7e32726317a --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/status_pending.golden @@ -0,0 +1,7 @@ +Unit: test-unit +Status: pending +Ready: false + +Dependencies: +DEPENDS ON REQUIRED STATUS CURRENT STATUS SATISFIED +dep-unit completed not registered false diff --git a/cli/testdata/TestSyncCommands_Golden/status_started.golden b/cli/testdata/TestSyncCommands_Golden/status_started.golden new file mode 100644 index 0000000000000..0f9fc841fbb49 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/status_started.golden @@ -0,0 +1,6 @@ +Unit: test-unit +Status: started +Ready: true + +Dependencies: +No dependencies found diff --git a/cli/testdata/TestSyncCommands_Golden/status_with_dependencies.golden b/cli/testdata/TestSyncCommands_Golden/status_with_dependencies.golden new file mode 100644 index 0000000000000..50d86f5051835 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/status_with_dependencies.golden @@ -0,0 +1,8 @@ +Unit: test-unit +Status: pending +Ready: false + +Dependencies: +DEPENDS ON REQUIRED STATUS CURRENT STATUS SATISFIED +dep-1 completed completed true +dep-2 completed not registered false diff --git a/cli/testdata/TestSyncCommands_Golden/want_success.golden b/cli/testdata/TestSyncCommands_Golden/want_success.golden new file mode 100644 index 0000000000000..35821117c8757 --- /dev/null +++ b/cli/testdata/TestSyncCommands_Golden/want_success.golden @@ -0,0 +1 @@ +Success diff --git a/cli/testdata/coder_--help.golden b/cli/testdata/coder_--help.golden index 09dd4c3bce3a5..ab13e2af71e0f 100644 --- a/cli/testdata/coder_--help.golden +++ b/cli/testdata/coder_--help.golden @@ -53,6 +53,7 @@ SUBCOMMANDS: stop Stop a workspace support Commands for troubleshooting issues with a Coder deployment. + task Manage tasks templates Manage templates tokens Manage personal access tokens unfavorite Remove a workspace from your favorites @@ -108,6 +109,13 @@ variables or flags. --url url, $CODER_URL URL to a deployment. + --use-keyring bool, $CODER_USE_KEYRING (default: true) + Store and retrieve session tokens using the operating system keyring. + This flag is ignored and file-based storage is used when + --global-config is set or keyring usage is not supported on the + current platform. Set to false to force file-based storage on + supported platforms. + -v, --verbose bool, $CODER_VERBOSE Enable verbose output. diff --git a/cli/testdata/coder_agent_--help.golden b/cli/testdata/coder_agent_--help.golden index 1f25fc6941ea1..d262c0d0c7618 100644 --- a/cli/testdata/coder_agent_--help.golden +++ b/cli/testdata/coder_agent_--help.golden @@ -67,6 +67,12 @@ OPTIONS: --script-data-dir string, $CODER_AGENT_SCRIPT_DATA_DIR (default: /tmp) Specify the location for storing script data. + --socket-path string, $CODER_AGENT_SOCKET_PATH + Specify the path for the agent socket. + + --socket-server-enabled bool, $CODER_AGENT_SOCKET_SERVER_ENABLED (default: false) + Enable the agent socket server. + --ssh-max-timeout duration, $CODER_AGENT_SSH_MAX_TIMEOUT (default: 72h) Specify the max timeout for a SSH connection, it is advisable to set it to a minimum of 60s, but no more than 72h. diff --git a/cli/testdata/coder_exp_sync_--help.golden b/cli/testdata/coder_exp_sync_--help.golden new file mode 100644 index 0000000000000..b30447351cdc6 --- /dev/null +++ b/cli/testdata/coder_exp_sync_--help.golden @@ -0,0 +1,27 @@ +coder v0.0.0-devel + +USAGE: + coder exp sync [flags] + + Manage unit dependencies for coordinated startup + + Commands for orchestrating unit startup order in workspaces. Units are most + commonly coder scripts. Use these commands to declare dependencies between + units, coordinate their startup sequence, and ensure units start only after + their dependencies are ready. This helps prevent race conditions and startup + failures. + +SUBCOMMANDS: + complete Mark a unit as complete + ping Test agent socket connectivity and health + start Wait until all unit dependencies are satisfied + status Show unit status and dependency state + want Declare that a unit depends on another unit completing before it + can start + +OPTIONS: + --socket-path string, $CODER_AGENT_SOCKET_PATH + Specify the path for the agent socket. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_exp_sync_complete_--help.golden b/cli/testdata/coder_exp_sync_complete_--help.golden new file mode 100644 index 0000000000000..580d5a588b61a --- /dev/null +++ b/cli/testdata/coder_exp_sync_complete_--help.golden @@ -0,0 +1,12 @@ +coder v0.0.0-devel + +USAGE: + coder exp sync complete + + Mark a unit as complete + + Mark a unit as complete. Indicating to other units that it has completed its + work. This allows units that depend on it to proceed with their startup. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_exp_sync_ping_--help.golden b/cli/testdata/coder_exp_sync_ping_--help.golden new file mode 100644 index 0000000000000..58444940b69cd --- /dev/null +++ b/cli/testdata/coder_exp_sync_ping_--help.golden @@ -0,0 +1,13 @@ +coder v0.0.0-devel + +USAGE: + coder exp sync ping + + Test agent socket connectivity and health + + Test connectivity to the local Coder agent socket to verify the agent is + running and responsive. Useful for troubleshooting startup issues or verifying + the agent is accessible before running other sync commands. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_exp_sync_start_--help.golden b/cli/testdata/coder_exp_sync_start_--help.golden new file mode 100644 index 0000000000000..d87483130da9b --- /dev/null +++ b/cli/testdata/coder_exp_sync_start_--help.golden @@ -0,0 +1,17 @@ +coder v0.0.0-devel + +USAGE: + coder exp sync start [flags] + + Wait until all unit dependencies are satisfied + + Wait until all dependencies are satisfied, consider the unit to have started, + then allow it to proceed. This command polls until dependencies are ready, + then marks the unit as started. + +OPTIONS: + --timeout duration (default: 5m) + Maximum time to wait for dependencies (e.g., 30s, 5m). 5m by default. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_exp_sync_status_--help.golden b/cli/testdata/coder_exp_sync_status_--help.golden new file mode 100644 index 0000000000000..ce7d8617be172 --- /dev/null +++ b/cli/testdata/coder_exp_sync_status_--help.golden @@ -0,0 +1,20 @@ +coder v0.0.0-devel + +USAGE: + coder exp sync status [flags] + + Show unit status and dependency state + + Show the current status of a unit, whether it is ready to start, and lists its + dependencies. Shows which dependencies are satisfied and which are still + pending. Supports multiple output formats. + +OPTIONS: + -c, --column [depends on|required status|current status|satisfied] (default: depends on,required status,current status,satisfied) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_exp_sync_want_--help.golden b/cli/testdata/coder_exp_sync_want_--help.golden new file mode 100644 index 0000000000000..0076f94ea90f8 --- /dev/null +++ b/cli/testdata/coder_exp_sync_want_--help.golden @@ -0,0 +1,13 @@ +coder v0.0.0-devel + +USAGE: + coder exp sync want + + Declare that a unit depends on another unit completing before it can start + + Declare that a unit depends on another unit completing before it can start. + The unit specified first will not start until the second has signaled that it + has completed. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_list_--output_json.golden b/cli/testdata/coder_list_--output_json.golden index 66afcf563dfbd..8da57536338f8 100644 --- a/cli/testdata/coder_list_--output_json.golden +++ b/cli/testdata/coder_list_--output_json.golden @@ -90,6 +90,7 @@ "allow_renames": false, "favorite": false, "next_start_at": "====[timestamp]=====", - "is_prebuild": false + "is_prebuild": false, + "task_id": null } ] diff --git a/cli/testdata/coder_login_--help.golden b/cli/testdata/coder_login_--help.golden index e4109a494ed39..96129d8a55c57 100644 --- a/cli/testdata/coder_login_--help.golden +++ b/cli/testdata/coder_login_--help.golden @@ -5,6 +5,10 @@ USAGE: Authenticate with Coder deployment + By default, the session token is stored in the operating system keyring on + macOS and Windows and a plain text file on Linux. Use the --use-keyring flag + or CODER_USE_KEYRING environment variable to change the storage mechanism. + OPTIONS: --first-user-email string, $CODER_FIRST_USER_EMAIL Specifies an email address to use if creating the first user for the diff --git a/cli/testdata/coder_provisioner_list_--output_json.golden b/cli/testdata/coder_provisioner_list_--output_json.golden index 32de8cbd857f4..3749b159aeebf 100644 --- a/cli/testdata/coder_provisioner_list_--output_json.golden +++ b/cli/testdata/coder_provisioner_list_--output_json.golden @@ -7,7 +7,7 @@ "last_seen_at": "====[timestamp]=====", "name": "test-daemon", "version": "v0.0.0-devel", - "api_version": "1.11", + "api_version": "1.12", "provisioners": [ "echo" ], diff --git a/cli/testdata/coder_server_--help.golden b/cli/testdata/coder_server_--help.golden index 447ce1ae4fce2..1444f8096148f 100644 --- a/cli/testdata/coder_server_--help.golden +++ b/cli/testdata/coder_server_--help.golden @@ -80,6 +80,50 @@ OPTIONS: Periodically check for new releases of Coder and inform the owner. The check is performed once per day. +AI BRIDGE OPTIONS: + --aibridge-anthropic-base-url string, $CODER_AIBRIDGE_ANTHROPIC_BASE_URL (default: https://api.anthropic.com/) + The base URL of the Anthropic API. + + --aibridge-anthropic-key string, $CODER_AIBRIDGE_ANTHROPIC_KEY + The key to authenticate against the Anthropic API. + + --aibridge-bedrock-access-key string, $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY + The access key to authenticate against the AWS Bedrock API. + + --aibridge-bedrock-access-key-secret string, $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET + The access key secret to use with the access key to authenticate + against the AWS Bedrock API. + + --aibridge-bedrock-model string, $CODER_AIBRIDGE_BEDROCK_MODEL (default: global.anthropic.claude-sonnet-4-5-20250929-v1:0) + The model to use when making requests to the AWS Bedrock API. + + --aibridge-bedrock-region string, $CODER_AIBRIDGE_BEDROCK_REGION + The AWS Bedrock API region. + + --aibridge-bedrock-small-fastmodel string, $CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL (default: global.anthropic.claude-haiku-4-5-20251001-v1:0) + The small fast model to use when making requests to the AWS Bedrock + API. Claude Code uses Haiku-class models to perform background tasks. + See + https://docs.claude.com/en/docs/claude-code/settings#environment-variables. + + --aibridge-retention duration, $CODER_AIBRIDGE_RETENTION (default: 60d) + Length of time to retain data such as interceptions and all related + records (token, prompt, tool use). + + --aibridge-enabled bool, $CODER_AIBRIDGE_ENABLED (default: false) + Whether to start an in-memory aibridged instance. + + --aibridge-inject-coder-mcp-tools bool, $CODER_AIBRIDGE_INJECT_CODER_MCP_TOOLS (default: false) + Whether to inject Coder's MCP tools into intercepted AI Bridge + requests (requires the "oauth2" and "mcp-server-http" experiments to + be enabled). + + --aibridge-openai-base-url string, $CODER_AIBRIDGE_OPENAI_BASE_URL (default: https://api.openai.com/v1/) + The base URL of the OpenAI API. + + --aibridge-openai-key string, $CODER_AIBRIDGE_OPENAI_KEY + The key to authenticate against the OpenAI API. + CLIENT OPTIONS: These options change the behavior of how clients interact with the Coder. Clients include the Coder CLI, Coder Desktop, IDE extensions, and the web UI. diff --git a/cli/testdata/coder_task_--help.golden b/cli/testdata/coder_task_--help.golden new file mode 100644 index 0000000000000..c6fa004de06af --- /dev/null +++ b/cli/testdata/coder_task_--help.golden @@ -0,0 +1,19 @@ +coder v0.0.0-devel + +USAGE: + coder task + + Manage tasks + + Aliases: tasks + +SUBCOMMANDS: + create Create a task + delete Delete tasks + list List tasks + logs Show a task's logs + send Send input to a task + status Show the status of a task. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_task_create_--help.golden b/cli/testdata/coder_task_create_--help.golden new file mode 100644 index 0000000000000..4bded64e67c80 --- /dev/null +++ b/cli/testdata/coder_task_create_--help.golden @@ -0,0 +1,51 @@ +coder v0.0.0-devel + +USAGE: + coder task create [flags] [input] + + Create a task + + - Create a task with direct input: + + $ coder task create "Add authentication to the user service" + + - Create a task with stdin input: + + $ echo "Add authentication to the user service" | coder task create + + - Create a task with a specific name: + + $ coder task create --name task1 "Add authentication to the user service" + + - Create a task from a specific template / preset: + + $ coder task create --template backend-dev --preset "My Preset" "Add + authentication to the user service" + + - Create a task for another user (requires appropriate permissions): + + $ coder task create --owner user@example.com "Add authentication to the + user service" + +OPTIONS: + -O, --org string, $CODER_ORGANIZATION + Select which organization (uuid or name) to use. + + --name string + Specify the name of the task. If you do not specify one, a name will + be generated for you. + + --owner string (default: me) + Specify the owner of the task. Defaults to the current user. + + --preset string, $CODER_TASK_PRESET_NAME (default: none) + -q, --quiet bool + Only display the created task's ID. + + --stdin bool + Reads from stdin for the task input. + + --template string, $CODER_TASK_TEMPLATE_NAME + --template-version string, $CODER_TASK_TEMPLATE_VERSION +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_task_delete_--help.golden b/cli/testdata/coder_task_delete_--help.golden new file mode 100644 index 0000000000000..b0169410a9293 --- /dev/null +++ b/cli/testdata/coder_task_delete_--help.golden @@ -0,0 +1,27 @@ +coder v0.0.0-devel + +USAGE: + coder task delete [flags] [ ...] + + Delete tasks + + Aliases: rm + + - Delete a single task.: + + $ $ coder task delete task1 + + - Delete multiple tasks.: + + $ $ coder task delete task1 task2 task3 + + - Delete a task without confirmation.: + + $ $ coder task delete task4 --yes + +OPTIONS: + -y, --yes bool + Bypass prompts. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_task_list_--help.golden b/cli/testdata/coder_task_list_--help.golden new file mode 100644 index 0000000000000..8836e065449bd --- /dev/null +++ b/cli/testdata/coder_task_list_--help.golden @@ -0,0 +1,50 @@ +coder v0.0.0-devel + +USAGE: + coder task list [flags] + + List tasks + + Aliases: ls + + - List tasks for the current user.: + + $ coder task list + + - List tasks for a specific user.: + + $ coder task list --user someone-else + + - List all tasks you can view.: + + $ coder task list --all + + - List all your running tasks.: + + $ coder task list --status running + + - As above, but only show IDs.: + + $ coder task list --status running --quiet + +OPTIONS: + -a, --all bool (default: false) + List tasks for all users you can view. + + -c, --column [id|organization id|owner id|owner name|owner avatar url|name|display name|template id|template version id|template name|template display name|template icon|workspace id|workspace name|workspace status|workspace build number|workspace agent id|workspace agent lifecycle|workspace agent health|workspace app id|initial prompt|status|state|message|created at|updated at|state changed] (default: name,status,state,state changed,message) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. + + -q, --quiet bool (default: false) + Only display task IDs. + + --status pending|initializing|active|paused|error|unknown + Filter by task status. + + --user string + List tasks for the specified user (username, "me"). + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_task_logs_--help.golden b/cli/testdata/coder_task_logs_--help.golden new file mode 100644 index 0000000000000..5175249b6d1d3 --- /dev/null +++ b/cli/testdata/coder_task_logs_--help.golden @@ -0,0 +1,20 @@ +coder v0.0.0-devel + +USAGE: + coder task logs [flags] + + Show a task's logs + + - Show logs for a given task.: + + $ coder task logs task1 + +OPTIONS: + -c, --column [id|content|type|time] (default: type,content) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_task_send_--help.golden b/cli/testdata/coder_task_send_--help.golden new file mode 100644 index 0000000000000..d0966008b41a3 --- /dev/null +++ b/cli/testdata/coder_task_send_--help.golden @@ -0,0 +1,21 @@ +coder v0.0.0-devel + +USAGE: + coder task send [flags] [ | --stdin] + + Send input to a task + + - Send direct input to a task.: + + $ coder task send task1 "Please also add unit tests" + + - Send input from stdin to a task.: + + $ echo "Please also add unit tests" | coder task send task1 --stdin + +OPTIONS: + --stdin bool + Reads the input from stdin. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_task_status_--help.golden b/cli/testdata/coder_task_status_--help.golden new file mode 100644 index 0000000000000..f1a1ed62381be --- /dev/null +++ b/cli/testdata/coder_task_status_--help.golden @@ -0,0 +1,30 @@ +coder v0.0.0-devel + +USAGE: + coder task status [flags] + + Show the status of a task. + + Aliases: stat + + - Show the status of a given task.: + + $ coder task status task1 + + - Watch the status of a given task until it completes (idle or stopped).: + + $ coder task status task1 --watch + +OPTIONS: + -c, --column [id|organization id|owner id|owner name|owner avatar url|name|display name|template id|template version id|template name|template display name|template icon|workspace id|workspace name|workspace status|workspace build number|workspace agent id|workspace agent lifecycle|workspace agent health|workspace app id|initial prompt|status|state|message|created at|updated at|state changed|healthy] (default: state changed,status,healthy,state,message) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. + + --watch bool (default: false) + Watch the task status output. This will stream updates to the terminal + until the underlying workspace is stopped. + +——— +Run `coder --help` for a list of global options. diff --git a/cli/testdata/server-config.yaml.golden b/cli/testdata/server-config.yaml.golden index cbabf0474f291..c8f1f94a85f23 100644 --- a/cli/testdata/server-config.yaml.golden +++ b/cli/testdata/server-config.yaml.golden @@ -714,8 +714,7 @@ workspace_prebuilds: # (default: 3, type: int) failure_hard_limit: 3 aibridge: - # Whether to start an in-memory aibridged instance ("aibridge" experiment must be - # enabled, too). + # Whether to start an in-memory aibridged instance. # (default: false, type: bool) enabled: false # The base URL of the OpenAI API. @@ -748,3 +747,11 @@ aibridge: # https://docs.claude.com/en/docs/claude-code/settings#environment-variables. # (default: global.anthropic.claude-haiku-4-5-20251001-v1:0, type: string) bedrock_small_fast_model: global.anthropic.claude-haiku-4-5-20251001-v1:0 + # Whether to inject Coder's MCP tools into intercepted AI Bridge requests + # (requires the "oauth2" and "mcp-server-http" experiments to be enabled). + # (default: false, type: bool) + inject_coder_mcp_tools: false + # Length of time to retain data such as interceptions and all related records + # (token, prompt, tool use). + # (default: 60d, type: duration) + retention: 1440h0m0s diff --git a/coderd/agentapi/api.go b/coderd/agentapi/api.go index f8f72a5d0d6ea..252e6b5c08449 100644 --- a/coderd/agentapi/api.go +++ b/coderd/agentapi/api.go @@ -36,6 +36,8 @@ import ( "github.com/coder/quartz" ) +const workspaceCacheRefreshInterval = 5 * time.Minute + // API implements the DRPC agent API interface from agent/proto. This struct is // instantiated once per agent connection and kept alive for the duration of the // session. @@ -54,6 +56,8 @@ type API struct { *SubAgentAPI *tailnet.DRPCService + cachedWorkspaceFields *CachedWorkspaceFields + mu sync.Mutex } @@ -65,7 +69,7 @@ type Options struct { WorkspaceID uuid.UUID OrganizationID uuid.UUID - Ctx context.Context + AuthenticatedCtx context.Context Log slog.Logger Clock quartz.Clock Database database.Store @@ -92,7 +96,7 @@ type Options struct { UpdateAgentMetricsFn func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) } -func New(opts Options) *API { +func New(opts Options, workspace database.Workspace) *API { if opts.Clock == nil { opts.Clock = quartz.NewReal() } @@ -114,6 +118,13 @@ func New(opts Options) *API { WorkspaceID: opts.WorkspaceID, } + // Don't cache details for prebuilds, though the cached fields will eventually be updated + // by the refresh routine once the prebuild workspace is claimed. + api.cachedWorkspaceFields = &CachedWorkspaceFields{} + if !workspace.IsPrebuild() { + api.cachedWorkspaceFields.UpdateValues(workspace) + } + api.AnnouncementBannerAPI = &AnnouncementBannerAPI{ appearanceFetcher: opts.AppearanceFetcher, } @@ -139,6 +150,7 @@ func New(opts Options) *API { api.StatsAPI = &StatsAPI{ AgentFn: api.agent, + Workspace: api.cachedWorkspaceFields, Database: opts.Database, Log: opts.Log, StatsReporter: opts.StatsReporter, @@ -162,10 +174,11 @@ func New(opts Options) *API { } api.MetadataAPI = &MetadataAPI{ - AgentFn: api.agent, - Database: opts.Database, - Pubsub: opts.Pubsub, - Log: opts.Log, + AgentFn: api.agent, + Workspace: api.cachedWorkspaceFields, + Database: opts.Database, + Pubsub: opts.Pubsub, + Log: opts.Log, } api.LogsAPI = &LogsAPI{ @@ -205,6 +218,10 @@ func New(opts Options) *API { Database: opts.Database, } + // Start background cache refresh loop to handle workspace changes + // like prebuild claims where owner_id and other fields may be modified in the DB. + go api.startCacheRefreshLoop(opts.AuthenticatedCtx) + return api } @@ -254,6 +271,56 @@ func (a *API) agent(ctx context.Context) (database.WorkspaceAgent, error) { return agent, nil } +// refreshCachedWorkspace periodically updates the cached workspace fields. +// This ensures that changes like prebuild claims (which modify owner_id, name, etc.) +// are eventually reflected in the cache without requiring agent reconnection. +func (a *API) refreshCachedWorkspace(ctx context.Context) { + ws, err := a.opts.Database.GetWorkspaceByID(ctx, a.opts.WorkspaceID) + if err != nil { + a.opts.Log.Warn(ctx, "failed to refresh cached workspace fields", slog.Error(err)) + a.cachedWorkspaceFields.Clear() + return + } + + if ws.IsPrebuild() { + return + } + + // If we still have the same values, skip the update and logging calls. + if a.cachedWorkspaceFields.identity.Equal(database.WorkspaceIdentityFromWorkspace(ws)) { + return + } + // Update fields that can change during workspace lifecycle (e.g., AutostartSchedule) + a.cachedWorkspaceFields.UpdateValues(ws) + + a.opts.Log.Debug(ctx, "refreshed cached workspace fields", + slog.F("workspace_id", ws.ID), + slog.F("owner_id", ws.OwnerID), + slog.F("name", ws.Name)) +} + +// startCacheRefreshLoop runs a background goroutine that periodically refreshes +// the cached workspace fields. This is primarily needed to handle prebuild claims +// where the owner_id and other fields change while the agent connection persists. +func (a *API) startCacheRefreshLoop(ctx context.Context) { + // Refresh every 5 minutes. This provides a reasonable balance between: + // - Keeping cache fresh for prebuild claims and other workspace updates + // - Minimizing unnecessary database queries + ticker := a.opts.Clock.TickerFunc(ctx, workspaceCacheRefreshInterval, func() error { + a.refreshCachedWorkspace(ctx) + return nil + }, "cache_refresh") + + // We need to wait on the ticker exiting. + _ = ticker.Wait() + + a.opts.Log.Debug(ctx, "cache refresh loop exited, invalidating the workspace cache on agent API", + slog.F("workspace_id", a.cachedWorkspaceFields.identity.ID), + slog.F("owner_id", a.cachedWorkspaceFields.identity.OwnerUsername), + slog.F("name", a.cachedWorkspaceFields.identity.Name)) + a.cachedWorkspaceFields.Clear() +} + func (a *API) publishWorkspaceUpdate(ctx context.Context, agent *database.WorkspaceAgent, kind wspubsub.WorkspaceEventKind) error { a.opts.PublishWorkspaceUpdateFn(ctx, a.opts.OwnerID, wspubsub.WorkspaceEvent{ Kind: kind, diff --git a/coderd/agentapi/cached_workspace.go b/coderd/agentapi/cached_workspace.go new file mode 100644 index 0000000000000..7c1bc0ff63359 --- /dev/null +++ b/coderd/agentapi/cached_workspace.go @@ -0,0 +1,52 @@ +package agentapi + +import ( + "sync" + + "github.com/coder/coder/v2/coderd/database" +) + +// CachedWorkspaceFields contains workspace data that is safe to cache for the +// duration of an agent connection. These fields are used to reduce database calls +// in high-frequency operations like stats reporting and metadata updates. +// Prebuild workspaces should not be cached using this struct within the API struct, +// however some of these fields for a workspace can be updated live so there is a +// routine in the API for refreshing the workspace on a timed interval. +// +// IMPORTANT: ACL fields (GroupACL, UserACL) are NOT cached because they can be +// modified in the database and we must use fresh data for authorization checks. +type CachedWorkspaceFields struct { + lock sync.RWMutex + + identity database.WorkspaceIdentity +} + +func (cws *CachedWorkspaceFields) Clear() { + cws.lock.Lock() + defer cws.lock.Unlock() + cws.identity = database.WorkspaceIdentity{} +} + +func (cws *CachedWorkspaceFields) UpdateValues(ws database.Workspace) { + cws.lock.Lock() + defer cws.lock.Unlock() + cws.identity.ID = ws.ID + cws.identity.OwnerID = ws.OwnerID + cws.identity.OrganizationID = ws.OrganizationID + cws.identity.TemplateID = ws.TemplateID + cws.identity.Name = ws.Name + cws.identity.OwnerUsername = ws.OwnerUsername + cws.identity.TemplateName = ws.TemplateName + cws.identity.AutostartSchedule = ws.AutostartSchedule +} + +// Returns the Workspace, true, unless the workspace has not been cached (nuked or was a prebuild). +func (cws *CachedWorkspaceFields) AsWorkspaceIdentity() (database.WorkspaceIdentity, bool) { + cws.lock.RLock() + defer cws.lock.RUnlock() + // Should we be more explicit about all fields being set to be valid? + if cws.identity.Equal(database.WorkspaceIdentity{}) { + return database.WorkspaceIdentity{}, false + } + return cws.identity, true +} diff --git a/coderd/agentapi/cached_workspace_test.go b/coderd/agentapi/cached_workspace_test.go new file mode 100644 index 0000000000000..bc1231bf706b2 --- /dev/null +++ b/coderd/agentapi/cached_workspace_test.go @@ -0,0 +1,97 @@ +package agentapi_test + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/agentapi" + "github.com/coder/coder/v2/coderd/database" +) + +func TestCacheClear(t *testing.T) { + t.Parallel() + + var ( + user = database.User{ + ID: uuid.New(), + Username: "bill", + } + template = database.Template{ + ID: uuid.New(), + Name: "tpl", + } + workspace = database.Workspace{ + ID: uuid.New(), + OwnerID: user.ID, + OwnerUsername: user.Username, + TemplateID: template.ID, + Name: "xyz", + TemplateName: template.Name, + } + workspaceAsCacheFields = agentapi.CachedWorkspaceFields{} + ) + + workspaceAsCacheFields.UpdateValues(database.Workspace{ + ID: workspace.ID, + OwnerID: workspace.OwnerID, + OwnerUsername: workspace.OwnerUsername, + TemplateID: workspace.TemplateID, + Name: workspace.Name, + TemplateName: workspace.TemplateName, + AutostartSchedule: workspace.AutostartSchedule, + }, + ) + + emptyCws := agentapi.CachedWorkspaceFields{} + workspaceAsCacheFields.Clear() + wsi, ok := workspaceAsCacheFields.AsWorkspaceIdentity() + require.False(t, ok) + ecwsi, ok := emptyCws.AsWorkspaceIdentity() + require.False(t, ok) + require.True(t, ecwsi.Equal(wsi)) +} + +func TestCacheUpdate(t *testing.T) { + t.Parallel() + + var ( + user = database.User{ + ID: uuid.New(), + Username: "bill", + } + template = database.Template{ + ID: uuid.New(), + Name: "tpl", + } + workspace = database.Workspace{ + ID: uuid.New(), + OwnerID: user.ID, + OwnerUsername: user.Username, + TemplateID: template.ID, + Name: "xyz", + TemplateName: template.Name, + } + workspaceAsCacheFields = agentapi.CachedWorkspaceFields{} + ) + + workspaceAsCacheFields.UpdateValues(database.Workspace{ + ID: workspace.ID, + OwnerID: workspace.OwnerID, + OwnerUsername: workspace.OwnerUsername, + TemplateID: workspace.TemplateID, + Name: workspace.Name, + TemplateName: workspace.TemplateName, + AutostartSchedule: workspace.AutostartSchedule, + }, + ) + + cws := agentapi.CachedWorkspaceFields{} + cws.UpdateValues(workspace) + wsi, ok := workspaceAsCacheFields.AsWorkspaceIdentity() + require.True(t, ok) + cwsi, ok := cws.AsWorkspaceIdentity() + require.True(t, ok) + require.True(t, wsi.Equal(cwsi)) +} diff --git a/coderd/agentapi/metadata.go b/coderd/agentapi/metadata.go index 0c3e0c8630b01..756422f856ad7 100644 --- a/coderd/agentapi/metadata.go +++ b/coderd/agentapi/metadata.go @@ -12,15 +12,17 @@ import ( "cdr.dev/slog" agentproto "github.com/coder/coder/v2/agent/proto" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/database/pubsub" ) type MetadataAPI struct { - AgentFn func(context.Context) (database.WorkspaceAgent, error) - Database database.Store - Pubsub pubsub.Pubsub - Log slog.Logger + AgentFn func(context.Context) (database.WorkspaceAgent, error) + Workspace *CachedWorkspaceFields + Database database.Store + Pubsub pubsub.Pubsub + Log slog.Logger TimeNowFn func() time.Time // defaults to dbtime.Now() } @@ -107,7 +109,19 @@ func (a *MetadataAPI) BatchUpdateMetadata(ctx context.Context, req *agentproto.B ) } - err = a.Database.UpdateWorkspaceAgentMetadata(ctx, dbUpdate) + // Inject RBAC object into context for dbauthz fast path, avoid having to + // call GetWorkspaceByAgentID on every metadata update. + rbacCtx := ctx + if dbws, ok := a.Workspace.AsWorkspaceIdentity(); ok { + rbacCtx, err = dbauthz.WithWorkspaceRBAC(ctx, dbws.RBACObject()) + if err != nil { + // Don't error level log here, will exit the function. We want to fall back to GetWorkspaceByAgentID. + //nolint:gocritic + a.Log.Debug(ctx, "Cached workspace was present but RBAC object was invalid", slog.F("err", err)) + } + } + + err = a.Database.UpdateWorkspaceAgentMetadata(rbacCtx, dbUpdate) if err != nil { return nil, xerrors.Errorf("update workspace agent metadata in database: %w", err) } diff --git a/coderd/agentapi/metadata_test.go b/coderd/agentapi/metadata_test.go index ee37f3d4dc044..1ba02d037fef5 100644 --- a/coderd/agentapi/metadata_test.go +++ b/coderd/agentapi/metadata_test.go @@ -2,12 +2,14 @@ package agentapi_test import ( "context" + "database/sql" "encoding/json" "sync/atomic" "testing" "time" "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" "google.golang.org/protobuf/types/known/timestamppb" @@ -15,10 +17,14 @@ import ( agentproto "github.com/coder/coder/v2/agent/proto" "github.com/coder/coder/v2/coderd/agentapi" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbmock" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/database/pubsub" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) type fakePublisher struct { @@ -84,9 +90,10 @@ func TestBatchUpdateMetadata(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, - Pubsub: pub, - Log: testutil.Logger(t), + Workspace: &agentapi.CachedWorkspaceFields{}, + Database: dbM, + Pubsub: pub, + Log: testutil.Logger(t), TimeNowFn: func() time.Time { return now }, @@ -169,9 +176,10 @@ func TestBatchUpdateMetadata(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, - Pubsub: pub, - Log: testutil.Logger(t), + Workspace: &agentapi.CachedWorkspaceFields{}, + Database: dbM, + Pubsub: pub, + Log: testutil.Logger(t), TimeNowFn: func() time.Time { return now }, @@ -238,9 +246,10 @@ func TestBatchUpdateMetadata(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, - Pubsub: pub, - Log: testutil.Logger(t), + Workspace: &agentapi.CachedWorkspaceFields{}, + Database: dbM, + Pubsub: pub, + Log: testutil.Logger(t), TimeNowFn: func() time.Time { return now }, @@ -272,4 +281,421 @@ func TestBatchUpdateMetadata(t *testing.T) { Keys: []string{req.Metadata[0].Key, req.Metadata[1].Key, req.Metadata[2].Key}, }, gotEvent) }) + + // Test RBAC fast path with valid RBAC object - should NOT call GetWorkspaceByAgentID + // This test verifies that when a valid RBAC object is present in context, the dbauthz layer + // uses the fast path and skips the GetWorkspaceByAgentID database call. + t.Run("WorkspaceCached_SkipsDBCall", func(t *testing.T) { + t.Parallel() + + var ( + ctrl = gomock.NewController(t) + dbM = dbmock.NewMockStore(ctrl) + pub = &fakePublisher{} + now = dbtime.Now() + // Set up consistent IDs that represent a valid workspace->agent relationship + workspaceID = uuid.MustParse("12345678-1234-1234-1234-123456789012") + ownerID = uuid.MustParse("87654321-4321-4321-4321-210987654321") + orgID = uuid.MustParse("11111111-1111-1111-1111-111111111111") + agentID = uuid.MustParse("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa") + ) + + agent := database.WorkspaceAgent{ + ID: agentID, + // In a real scenario, this agent would belong to a resource in the workspace above + } + + req := &agentproto.BatchUpdateMetadataRequest{ + Metadata: []*agentproto.Metadata{ + { + Key: "test_key", + Result: &agentproto.WorkspaceAgentMetadata_Result{ + CollectedAt: timestamppb.New(now.Add(-time.Second)), + Age: 1, + Value: "test_value", + }, + }, + }, + } + + // Expect UpdateWorkspaceAgentMetadata to be called + dbM.EXPECT().UpdateWorkspaceAgentMetadata(gomock.Any(), database.UpdateWorkspaceAgentMetadataParams{ + WorkspaceAgentID: agent.ID, + Key: []string{"test_key"}, + Value: []string{"test_value"}, + Error: []string{""}, + CollectedAt: []time.Time{now}, + }).Return(nil) + + // DO NOT expect GetWorkspaceByAgentID - the fast path should skip this call + // If GetWorkspaceByAgentID is called, the test will fail with "unexpected call" + + // dbauthz will call Wrappers() to check for wrapped databases + dbM.EXPECT().Wrappers().Return([]string{}).AnyTimes() + + // Set up dbauthz to test the actual authorization layer + auth := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) + accessControlStore := &atomic.Pointer[dbauthz.AccessControlStore]{} + var acs dbauthz.AccessControlStore = dbauthz.AGPLTemplateAccessControlStore{} + accessControlStore.Store(&acs) + + api := &agentapi.MetadataAPI{ + AgentFn: func(_ context.Context) (database.WorkspaceAgent, error) { + return agent, nil + }, + Workspace: &agentapi.CachedWorkspaceFields{}, + Database: dbauthz.New(dbM, auth, testutil.Logger(t), accessControlStore), + Pubsub: pub, + Log: testutil.Logger(t), + TimeNowFn: func() time.Time { + return now + }, + } + + api.Workspace.UpdateValues(database.Workspace{ + ID: workspaceID, + OwnerID: ownerID, + OrganizationID: orgID, + }) + + // Create context with system actor so authorization passes + ctx := dbauthz.AsSystemRestricted(context.Background()) + resp, err := api.BatchUpdateMetadata(ctx, req) + require.NoError(t, err) + require.NotNil(t, resp) + }) + // Test RBAC slow path - invalid RBAC object should fall back to GetWorkspaceByAgentID + // This test verifies that when the RBAC object has invalid IDs (nil UUIDs), the dbauthz layer + // falls back to the slow path and calls GetWorkspaceByAgentID. + t.Run("InvalidWorkspaceCached_RequiresDBCall", func(t *testing.T) { + t.Parallel() + + var ( + ctrl = gomock.NewController(t) + dbM = dbmock.NewMockStore(ctrl) + pub = &fakePublisher{} + now = dbtime.Now() + workspaceID = uuid.MustParse("12345678-1234-1234-1234-123456789012") + ownerID = uuid.MustParse("87654321-4321-4321-4321-210987654321") + orgID = uuid.MustParse("11111111-1111-1111-1111-111111111111") + agentID = uuid.MustParse("bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb") + ) + + agent := database.WorkspaceAgent{ + ID: agentID, + } + + req := &agentproto.BatchUpdateMetadataRequest{ + Metadata: []*agentproto.Metadata{ + { + Key: "test_key", + Result: &agentproto.WorkspaceAgentMetadata_Result{ + CollectedAt: timestamppb.New(now.Add(-time.Second)), + Age: 1, + Value: "test_value", + }, + }, + }, + } + + // EXPECT GetWorkspaceByAgentID to be called because the RBAC fast path validation fails + dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agentID).Return(database.Workspace{ + ID: workspaceID, + OwnerID: ownerID, + OrganizationID: orgID, + }, nil) + + // Expect UpdateWorkspaceAgentMetadata to be called after authorization + dbM.EXPECT().UpdateWorkspaceAgentMetadata(gomock.Any(), database.UpdateWorkspaceAgentMetadataParams{ + WorkspaceAgentID: agent.ID, + Key: []string{"test_key"}, + Value: []string{"test_value"}, + Error: []string{""}, + CollectedAt: []time.Time{now}, + }).Return(nil) + + // dbauthz will call Wrappers() to check for wrapped databases + dbM.EXPECT().Wrappers().Return([]string{}).AnyTimes() + + // Set up dbauthz to test the actual authorization layer + auth := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) + accessControlStore := &atomic.Pointer[dbauthz.AccessControlStore]{} + var acs dbauthz.AccessControlStore = dbauthz.AGPLTemplateAccessControlStore{} + accessControlStore.Store(&acs) + + api := &agentapi.MetadataAPI{ + AgentFn: func(_ context.Context) (database.WorkspaceAgent, error) { + return agent, nil + }, + + Workspace: &agentapi.CachedWorkspaceFields{}, + Database: dbauthz.New(dbM, auth, testutil.Logger(t), accessControlStore), + Pubsub: pub, + Log: testutil.Logger(t), + TimeNowFn: func() time.Time { + return now + }, + } + + // Create an invalid RBAC object with nil UUIDs for owner/org + // This will fail dbauthz fast path validation and trigger GetWorkspaceByAgentID + api.Workspace.UpdateValues(database.Workspace{ + ID: uuid.MustParse("cccccccc-cccc-cccc-cccc-cccccccccccc"), + OwnerID: uuid.Nil, // Invalid: fails dbauthz fast path validation + OrganizationID: uuid.Nil, // Invalid: fails dbauthz fast path validation + }) + + // Create context with system actor so authorization passes + ctx := dbauthz.AsSystemRestricted(context.Background()) + resp, err := api.BatchUpdateMetadata(ctx, req) + require.NoError(t, err) + require.NotNil(t, resp) + }) + // Test RBAC slow path - no RBAC object in context + // This test verifies that when no RBAC object is present in context, the dbauthz layer + // falls back to the slow path and calls GetWorkspaceByAgentID. + t.Run("WorkspaceNotCached_RequiresDBCall", func(t *testing.T) { + t.Parallel() + + var ( + ctrl = gomock.NewController(t) + dbM = dbmock.NewMockStore(ctrl) + pub = &fakePublisher{} + now = dbtime.Now() + workspaceID = uuid.MustParse("12345678-1234-1234-1234-123456789012") + ownerID = uuid.MustParse("87654321-4321-4321-4321-210987654321") + orgID = uuid.MustParse("11111111-1111-1111-1111-111111111111") + agentID = uuid.MustParse("dddddddd-dddd-dddd-dddd-dddddddddddd") + ) + + agent := database.WorkspaceAgent{ + ID: agentID, + } + + req := &agentproto.BatchUpdateMetadataRequest{ + Metadata: []*agentproto.Metadata{ + { + Key: "test_key", + Result: &agentproto.WorkspaceAgentMetadata_Result{ + CollectedAt: timestamppb.New(now.Add(-time.Second)), + Age: 1, + Value: "test_value", + }, + }, + }, + } + + // EXPECT GetWorkspaceByAgentID to be called because no RBAC object is in context + dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agentID).Return(database.Workspace{ + ID: workspaceID, + OwnerID: ownerID, + OrganizationID: orgID, + }, nil) + + // Expect UpdateWorkspaceAgentMetadata to be called after authorization + dbM.EXPECT().UpdateWorkspaceAgentMetadata(gomock.Any(), database.UpdateWorkspaceAgentMetadataParams{ + WorkspaceAgentID: agent.ID, + Key: []string{"test_key"}, + Value: []string{"test_value"}, + Error: []string{""}, + CollectedAt: []time.Time{now}, + }).Return(nil) + + // dbauthz will call Wrappers() to check for wrapped databases + dbM.EXPECT().Wrappers().Return([]string{}).AnyTimes() + + // Set up dbauthz to test the actual authorization layer + auth := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) + accessControlStore := &atomic.Pointer[dbauthz.AccessControlStore]{} + var acs dbauthz.AccessControlStore = dbauthz.AGPLTemplateAccessControlStore{} + accessControlStore.Store(&acs) + + api := &agentapi.MetadataAPI{ + AgentFn: func(_ context.Context) (database.WorkspaceAgent, error) { + return agent, nil + }, + Workspace: &agentapi.CachedWorkspaceFields{}, + Database: dbauthz.New(dbM, auth, testutil.Logger(t), accessControlStore), + Pubsub: pub, + Log: testutil.Logger(t), + TimeNowFn: func() time.Time { + return now + }, + } + + // Create context with system actor so authorization passes + ctx := dbauthz.AsSystemRestricted(context.Background()) + resp, err := api.BatchUpdateMetadata(ctx, req) + require.NoError(t, err) + require.NotNil(t, resp) + }) + + // Test cache refresh - AutostartSchedule updated + // This test verifies that the cache refresh mechanism actually calls GetWorkspaceByID + // and updates the cached workspace fields when the workspace is modified (e.g., autostart schedule changes). + t.Run("CacheRefreshed_AutostartScheduleUpdated", func(t *testing.T) { + t.Parallel() + + var ( + ctrl = gomock.NewController(t) + dbM = dbmock.NewMockStore(ctrl) + pub = &fakePublisher{} + now = dbtime.Now() + mClock = quartz.NewMock(t) + tickerTrap = mClock.Trap().TickerFunc("cache_refresh") + + workspaceID = uuid.MustParse("12345678-1234-1234-1234-123456789012") + ownerID = uuid.MustParse("87654321-4321-4321-4321-210987654321") + orgID = uuid.MustParse("11111111-1111-1111-1111-111111111111") + templateID = uuid.MustParse("aaaabbbb-cccc-dddd-eeee-ffffffff0000") + agentID = uuid.MustParse("ffffffff-ffff-ffff-ffff-ffffffffffff") + ) + + agent := database.WorkspaceAgent{ + ID: agentID, + } + + // Initial workspace - has Monday-Friday 9am autostart + initialWorkspace := database.Workspace{ + ID: workspaceID, + OwnerID: ownerID, + OrganizationID: orgID, + TemplateID: templateID, + Name: "my-workspace", + OwnerUsername: "testuser", + TemplateName: "test-template", + AutostartSchedule: sql.NullString{Valid: true, String: "CRON_TZ=UTC 0 9 * * 1-5"}, + } + + // Updated workspace - user changed autostart to 5pm and renamed workspace + updatedWorkspace := database.Workspace{ + ID: workspaceID, + OwnerID: ownerID, + OrganizationID: orgID, + TemplateID: templateID, + Name: "my-workspace-renamed", // Changed! + OwnerUsername: "testuser", + TemplateName: "test-template", + AutostartSchedule: sql.NullString{Valid: true, String: "CRON_TZ=UTC 0 17 * * 1-5"}, // Changed! + DormantAt: sql.NullTime{}, + } + + req := &agentproto.BatchUpdateMetadataRequest{ + Metadata: []*agentproto.Metadata{ + { + Key: "test_key", + Result: &agentproto.WorkspaceAgentMetadata_Result{ + CollectedAt: timestamppb.New(now.Add(-time.Second)), + Age: 1, + Value: "test_value", + }, + }, + }, + } + + // EXPECT GetWorkspaceByID to be called during cache refresh + // This is the key assertion - proves the refresh mechanism is working + dbM.EXPECT().GetWorkspaceByID(gomock.Any(), workspaceID).Return(updatedWorkspace, nil) + + // API needs to fetch the agent when calling metadata update + dbM.EXPECT().GetWorkspaceAgentByID(gomock.Any(), agentID).Return(agent, nil) + + // After refresh, metadata update should work with updated cache + dbM.EXPECT().UpdateWorkspaceAgentMetadata(gomock.Any(), gomock.Any()).DoAndReturn( + func(ctx context.Context, params database.UpdateWorkspaceAgentMetadataParams) error { + require.Equal(t, agent.ID, params.WorkspaceAgentID) + require.Equal(t, []string{"test_key"}, params.Key) + require.Equal(t, []string{"test_value"}, params.Value) + require.Equal(t, []string{""}, params.Error) + require.Len(t, params.CollectedAt, 1) + return nil + }, + ).AnyTimes() + + // May call GetWorkspaceByAgentID if slow path is used before refresh + dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agentID).Return(updatedWorkspace, nil).AnyTimes() + + // dbauthz will call Wrappers() + dbM.EXPECT().Wrappers().Return([]string{}).AnyTimes() + + // Set up dbauthz + auth := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) + accessControlStore := &atomic.Pointer[dbauthz.AccessControlStore]{} + var acs dbauthz.AccessControlStore = dbauthz.AGPLTemplateAccessControlStore{} + accessControlStore.Store(&acs) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // Create roles with workspace permissions + userRoles := rbac.Roles([]rbac.Role{ + { + Identifier: rbac.RoleMember(), + User: []rbac.Permission{ + { + Negate: false, + ResourceType: rbac.ResourceWorkspace.Type, + Action: policy.WildcardSymbol, + }, + }, + ByOrgID: map[string]rbac.OrgPermissions{ + orgID.String(): { + Member: []rbac.Permission{ + { + Negate: false, + ResourceType: rbac.ResourceWorkspace.Type, + Action: policy.WildcardSymbol, + }, + }, + }, + }, + }, + }) + + agentScope := rbac.WorkspaceAgentScope(rbac.WorkspaceAgentScopeParams{ + WorkspaceID: workspaceID, + OwnerID: ownerID, + TemplateID: templateID, + VersionID: uuid.New(), + }) + + ctxWithActor := dbauthz.As(ctx, rbac.Subject{ + Type: rbac.SubjectTypeUser, + FriendlyName: "testuser", + Email: "testuser@example.com", + ID: ownerID.String(), + Roles: userRoles, + Groups: []string{orgID.String()}, + Scope: agentScope, + }.WithCachedASTValue()) + + // Create full API with cached workspace fields (initial state) + api := agentapi.New(agentapi.Options{ + AuthenticatedCtx: ctxWithActor, + AgentID: agentID, + WorkspaceID: workspaceID, + OwnerID: ownerID, + OrganizationID: orgID, + Database: dbauthz.New(dbM, auth, testutil.Logger(t), accessControlStore), + Log: testutil.Logger(t), + Clock: mClock, + Pubsub: pub, + }, initialWorkspace) // Cache is initialized with 9am schedule and "my-workspace" name + + // Wait for ticker to be set up and release it so it can fire + tickerTrap.MustWait(ctx).MustRelease(ctx) + tickerTrap.Close() + + // Advance clock to trigger cache refresh and wait for it to complete + _, aw := mClock.AdvanceNext() + aw.MustWait(ctx) + + // At this point, GetWorkspaceByID should have been called and cache updated + // The cache now has the 5pm schedule and "my-workspace-renamed" name + + // Now call metadata update to verify the refreshed cache works + resp, err := api.MetadataAPI.BatchUpdateMetadata(ctxWithActor, req) + require.NoError(t, err) + require.NotNil(t, resp) + }) } diff --git a/coderd/agentapi/stats.go b/coderd/agentapi/stats.go index 3108d17f75b14..40533ea3fe0dd 100644 --- a/coderd/agentapi/stats.go +++ b/coderd/agentapi/stats.go @@ -17,6 +17,7 @@ import ( type StatsAPI struct { AgentFn func(context.Context) (database.WorkspaceAgent, error) + Workspace *CachedWorkspaceFields Database database.Store Log slog.Logger StatsReporter *workspacestats.Reporter @@ -46,14 +47,21 @@ func (a *StatsAPI) UpdateStats(ctx context.Context, req *agentproto.UpdateStatsR if err != nil { return nil, err } - getWorkspaceAgentByIDRow, err := a.Database.GetWorkspaceByAgentID(ctx, workspaceAgent.ID) - if err != nil { - return nil, xerrors.Errorf("get workspace by agent ID %q: %w", workspaceAgent.ID, err) + + // If cache is empty (prebuild or invalid), fall back to DB + var ws database.WorkspaceIdentity + var ok bool + if ws, ok = a.Workspace.AsWorkspaceIdentity(); !ok { + w, err := a.Database.GetWorkspaceByAgentID(ctx, workspaceAgent.ID) + if err != nil { + return nil, xerrors.Errorf("get workspace by agent ID %q: %w", workspaceAgent.ID, err) + } + ws = database.WorkspaceIdentityFromWorkspace(w) } - workspace := getWorkspaceAgentByIDRow + a.Log.Debug(ctx, "read stats report", slog.F("interval", a.AgentStatsRefreshInterval), - slog.F("workspace_id", workspace.ID), + slog.F("workspace_id", ws.ID), slog.F("payload", req), ) @@ -70,9 +78,8 @@ func (a *StatsAPI) UpdateStats(ctx context.Context, req *agentproto.UpdateStatsR err = a.StatsReporter.ReportAgentStats( ctx, a.now(), - workspace, + ws, workspaceAgent, - getWorkspaceAgentByIDRow.TemplateName, req.Stats, false, ) diff --git a/coderd/agentapi/stats_test.go b/coderd/agentapi/stats_test.go index aec2d68b71c12..c5cc2bd262114 100644 --- a/coderd/agentapi/stats_test.go +++ b/coderd/agentapi/stats_test.go @@ -52,8 +52,19 @@ func TestUpdateStates(t *testing.T) { ID: uuid.New(), Name: "abc", } + workspaceAsCacheFields = agentapi.CachedWorkspaceFields{} ) + workspaceAsCacheFields.UpdateValues(database.Workspace{ + ID: workspace.ID, + OwnerID: workspace.OwnerID, + OwnerUsername: workspace.OwnerUsername, + TemplateID: workspace.TemplateID, + Name: workspace.Name, + TemplateName: workspace.TemplateName, + AutostartSchedule: workspace.AutostartSchedule, + }) + t.Run("OK", func(t *testing.T) { t.Parallel() @@ -111,7 +122,8 @@ func TestUpdateStates(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, + Workspace: &workspaceAsCacheFields, + Database: dbM, StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, @@ -136,9 +148,6 @@ func TestUpdateStates(t *testing.T) { } defer wut.Close() - // Workspace gets fetched. - dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) - // We expect an activity bump because ConnectionCount > 0. dbM.EXPECT().ActivityBumpWorkspace(gomock.Any(), database.ActivityBumpWorkspaceParams{ WorkspaceID: workspace.ID, @@ -223,7 +232,8 @@ func TestUpdateStates(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, + Workspace: &workspaceAsCacheFields, + Database: dbM, StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, @@ -239,9 +249,6 @@ func TestUpdateStates(t *testing.T) { }, } - // Workspace gets fetched. - dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) - _, err := api.UpdateStats(context.Background(), req) require.NoError(t, err) }) @@ -260,7 +267,8 @@ func TestUpdateStates(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, + Workspace: &workspaceAsCacheFields, + Database: dbM, StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, @@ -333,11 +341,17 @@ func TestUpdateStates(t *testing.T) { }, } ) + // need to overwrite the cached fields for this test, but the struct has a lock + ws := agentapi.CachedWorkspaceFields{} + ws.UpdateValues(workspace) + // ws.AutostartSchedule = workspace.AutostartSchedule + api := agentapi.StatsAPI{ AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, + Workspace: &ws, + Database: dbM, StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, @@ -362,9 +376,6 @@ func TestUpdateStates(t *testing.T) { } defer wut.Close() - // Workspace gets fetched. - dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) - // We expect an activity bump because ConnectionCount > 0. However, the // next autostart time will be set on the bump. dbM.EXPECT().ActivityBumpWorkspace(gomock.Any(), database.ActivityBumpWorkspaceParams{ @@ -451,7 +462,8 @@ func TestUpdateStates(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, + Workspace: &workspaceAsCacheFields, + Database: dbM, StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ Database: dbM, Pubsub: ps, @@ -478,9 +490,6 @@ func TestUpdateStates(t *testing.T) { }, } - // Workspace gets fetched. - dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) - // We expect an activity bump because ConnectionCount > 0. dbM.EXPECT().ActivityBumpWorkspace(gomock.Any(), database.ActivityBumpWorkspaceParams{ WorkspaceID: workspace.ID, diff --git a/coderd/aitasks.go b/coderd/aitasks.go index 8415a01454d0a..2313ee745fa16 100644 --- a/coderd/aitasks.go +++ b/coderd/aitasks.go @@ -13,7 +13,9 @@ import ( "github.com/google/uuid" "golang.org/x/xerrors" - "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/taskname" + + aiagentapi "github.com/coder/agentapi-sdk-go" "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbtime" @@ -23,75 +25,21 @@ import ( "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/searchquery" - "github.com/coder/coder/v2/coderd/taskname" + "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" - - aiagentapi "github.com/coder/agentapi-sdk-go" ) -// This endpoint is experimental and not guaranteed to be stable, so we're not -// generating public-facing documentation for it. -func (api *API) aiTasksPrompts(rw http.ResponseWriter, r *http.Request) { - ctx := r.Context() - - buildIDsParam := r.URL.Query().Get("build_ids") - if buildIDsParam == "" { - httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "build_ids query parameter is required", - }) - return - } - - // Parse build IDs - buildIDStrings := strings.Split(buildIDsParam, ",") - buildIDs := make([]uuid.UUID, 0, len(buildIDStrings)) - for _, idStr := range buildIDStrings { - id, err := uuid.Parse(strings.TrimSpace(idStr)) - if err != nil { - httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: fmt.Sprintf("Invalid build ID format: %s", idStr), - Detail: err.Error(), - }) - return - } - buildIDs = append(buildIDs, id) - } - - parameters, err := api.Database.GetWorkspaceBuildParametersByBuildIDs(ctx, buildIDs) - if err != nil { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error fetching workspace build parameters.", - Detail: err.Error(), - }) - return - } - - promptsByBuildID := make(map[string]string, len(parameters)) - for _, param := range parameters { - if param.Name != codersdk.AITaskPromptParameterName { - continue - } - buildID := param.WorkspaceBuildID.String() - promptsByBuildID[buildID] = param.Value - } - - httpapi.Write(ctx, rw, http.StatusOK, codersdk.AITasksPromptsResponse{ - Prompts: promptsByBuildID, - }) -} - // @Summary Create a new AI task -// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. -// @ID create-task +// @ID create-a-new-ai-task // @Security CoderSessionToken -// @Tags Experimental +// @Accept json +// @Produce json +// @Tags Tasks // @Param user path string true "Username, user ID, or 'me' for the authenticated user" // @Param request body codersdk.CreateTaskRequest true "Create task request" // @Success 201 {object} codersdk.Task -// @Router /api/experimental/tasks/{user} [post] -// -// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. -// This endpoint creates a new task for the given user. +// @Router /tasks/{user} [post] func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { var ( ctx = r.Context() @@ -143,7 +91,7 @@ func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { if !templateVersion.HasAITask.Valid || !templateVersion.HasAITask.Bool { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: fmt.Sprintf(`Template does not have required parameter %q`, codersdk.AITaskPromptParameterName), + Message: `Template does not have a valid "coder_ai_task" resource.`, }) return } @@ -159,18 +107,45 @@ func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { } } - if taskName == "" { - taskName = taskname.GenerateFallback() + taskDisplayName := strings.TrimSpace(req.DisplayName) + if taskDisplayName != "" { + if len(taskDisplayName) > 64 { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Display name must be 64 characters or less.", + }) + return + } + } - if anthropicAPIKey := taskname.GetAnthropicAPIKeyFromEnv(); anthropicAPIKey != "" { - anthropicModel := taskname.GetAnthropicModelFromEnv() + // Generate task name and display name if either is not provided + if taskName == "" || taskDisplayName == "" { + generatedTaskName := taskname.Generate(ctx, api.Logger, req.Input) - generatedName, err := taskname.Generate(ctx, req.Input, taskname.WithAPIKey(anthropicAPIKey), taskname.WithModel(anthropicModel)) - if err != nil { - api.Logger.Error(ctx, "unable to generate task name", slog.Error(err)) - } else { - taskName = generatedName - } + if taskName == "" { + taskName = generatedTaskName.Name + } + if taskDisplayName == "" { + taskDisplayName = generatedTaskName.DisplayName + } + } + + // Check if the template defines the AI Prompt parameter. + templateParams, err := api.Database.GetTemplateVersionParameters(ctx, req.TemplateVersionID) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching template parameters.", + Detail: err.Error(), + }) + return + } + + var richParams []codersdk.WorkspaceBuildParameter + if _, hasAIPromptParam := slice.Find(templateParams, func(param database.TemplateVersionParameter) bool { + return param.Name == codersdk.AITaskPromptParameterName + }); hasAIPromptParam { + // Only add the AI Prompt parameter if the template defines it. + richParams = []codersdk.WorkspaceBuildParameter{ + {Name: codersdk.AITaskPromptParameterName, Value: req.Input}, } } @@ -178,9 +153,7 @@ func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { Name: taskName, TemplateVersionID: req.TemplateVersionID, TemplateVersionPresetID: req.TemplateVersionPresetID, - RichParameterValues: []codersdk.WorkspaceBuildParameter{ - {Name: codersdk.AITaskPromptParameterName, Value: req.Input}, - }, + RichParameterValues: richParams, } var owner workspaceOwner @@ -245,6 +218,7 @@ func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { OrganizationID: templateVersion.OrganizationID, OwnerID: owner.ID, Name: taskName, + DisplayName: taskDisplayName, WorkspaceID: uuid.NullUUID{}, // Will be set after workspace creation. TemplateVersionID: templateVersion.ID, TemplateParameters: []byte("{}"), @@ -303,15 +277,21 @@ func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { func taskFromDBTaskAndWorkspace(dbTask database.Task, ws codersdk.Workspace) codersdk.Task { var taskAgentLifecycle *codersdk.WorkspaceAgentLifecycle var taskAgentHealth *codersdk.WorkspaceAgentHealth + var taskAppHealth *codersdk.WorkspaceAppHealth + + if dbTask.WorkspaceAgentLifecycleState.Valid { + taskAgentLifecycle = ptr.Ref(codersdk.WorkspaceAgentLifecycle(dbTask.WorkspaceAgentLifecycleState.WorkspaceAgentLifecycleState)) + } + if dbTask.WorkspaceAppHealth.Valid { + taskAppHealth = ptr.Ref(codersdk.WorkspaceAppHealth(dbTask.WorkspaceAppHealth.WorkspaceAppHealth)) + } - // If we have an agent ID from the task, find the agent details in the - // workspace. + // If we have an agent ID from the task, find the agent health info if dbTask.WorkspaceAgentID.Valid { findTaskAgentLoop: for _, resource := range ws.LatestBuild.Resources { for _, agent := range resource.Agents { if agent.ID == dbTask.WorkspaceAgentID.UUID { - taskAgentLifecycle = &agent.LifecycleState taskAgentHealth = &agent.Health break findTaskAgentLoop } @@ -319,21 +299,7 @@ func taskFromDBTaskAndWorkspace(dbTask database.Task, ws codersdk.Workspace) cod } } - // Ignore 'latest app status' if it is older than the latest build and the - // latest build is a 'start' transition. This ensures that you don't show a - // stale app status from a previous build. For stop transitions, there is - // still value in showing the latest app status. - var currentState *codersdk.TaskStateEntry - if ws.LatestAppStatus != nil { - if ws.LatestBuild.Transition != codersdk.WorkspaceTransitionStart || ws.LatestAppStatus.CreatedAt.After(ws.LatestBuild.CreatedAt) { - currentState = &codersdk.TaskStateEntry{ - Timestamp: ws.LatestAppStatus.CreatedAt, - State: codersdk.TaskState(ws.LatestAppStatus.State), - Message: ws.LatestAppStatus.Message, - URI: ws.LatestAppStatus.URI, - } - } - } + currentState := deriveTaskCurrentState(dbTask, ws, taskAgentLifecycle, taskAppHealth) return codersdk.Task{ ID: dbTask.ID, @@ -342,6 +308,7 @@ func taskFromDBTaskAndWorkspace(dbTask database.Task, ws codersdk.Workspace) cod OwnerName: dbTask.OwnerUsername, OwnerAvatarURL: dbTask.OwnerAvatarUrl, Name: dbTask.Name, + DisplayName: dbTask.DisplayName, TemplateID: ws.TemplateID, TemplateVersionID: dbTask.TemplateVersionID, TemplateName: ws.TemplateName, @@ -363,17 +330,81 @@ func taskFromDBTaskAndWorkspace(dbTask database.Task, ws codersdk.Workspace) cod } } +// deriveTaskCurrentState determines the current state of a task based on the +// workspace's latest app status and initialization phase. +// Returns nil if no valid state can be determined. +func deriveTaskCurrentState( + dbTask database.Task, + ws codersdk.Workspace, + taskAgentLifecycle *codersdk.WorkspaceAgentLifecycle, + taskAppHealth *codersdk.WorkspaceAppHealth, +) *codersdk.TaskStateEntry { + var currentState *codersdk.TaskStateEntry + + // Ignore 'latest app status' if it is older than the latest build and the + // latest build is a 'start' transition. This ensures that you don't show a + // stale app status from a previous build. For stop transitions, there is + // still value in showing the latest app status. + if ws.LatestAppStatus != nil { + if ws.LatestBuild.Transition != codersdk.WorkspaceTransitionStart || ws.LatestAppStatus.CreatedAt.After(ws.LatestBuild.CreatedAt) { + currentState = &codersdk.TaskStateEntry{ + Timestamp: ws.LatestAppStatus.CreatedAt, + State: codersdk.TaskState(ws.LatestAppStatus.State), + Message: ws.LatestAppStatus.Message, + URI: ws.LatestAppStatus.URI, + } + } + } + + // If no valid agent state was found for the current build and the task is initializing, + // provide a descriptive initialization message. + if currentState == nil && dbTask.Status == database.TaskStatusInitializing { + message := "Initializing workspace" + + switch { + case ws.LatestBuild.Status == codersdk.WorkspaceStatusPending || + ws.LatestBuild.Status == codersdk.WorkspaceStatusStarting: + message = fmt.Sprintf("Workspace is %s", ws.LatestBuild.Status) + case taskAgentLifecycle != nil: + switch { + case *taskAgentLifecycle == codersdk.WorkspaceAgentLifecycleCreated: + message = "Agent is connecting" + case *taskAgentLifecycle == codersdk.WorkspaceAgentLifecycleStarting: + message = "Agent is starting" + case *taskAgentLifecycle == codersdk.WorkspaceAgentLifecycleReady: + if taskAppHealth != nil && *taskAppHealth == codersdk.WorkspaceAppHealthInitializing { + message = "App is initializing" + } else { + // In case the workspace app is not initializing, + // the overall task status should be updated accordingly + message = "Initializing workspace applications" + } + default: + // In case the workspace agent is not initializing, + // the overall task status should be updated accordingly + message = "Initializing workspace agent" + } + } + + currentState = &codersdk.TaskStateEntry{ + Timestamp: ws.LatestBuild.CreatedAt, + State: codersdk.TaskStateWorking, + Message: message, + URI: "", + } + } + + return currentState +} + // @Summary List AI tasks -// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. -// @ID list-tasks +// @ID list-ai-tasks // @Security CoderSessionToken -// @Tags Experimental +// @Produce json +// @Tags Tasks // @Param q query string false "Search query for filtering tasks. Supports: owner:, organization:, status:" // @Success 200 {object} codersdk.TasksListResponse -// @Router /api/experimental/tasks [get] -// -// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. -// tasksList is an experimental endpoint to list tasks. +// @Router /tasks [get] func (api *API) tasksList(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) @@ -466,20 +497,15 @@ func (api *API) convertTasks(ctx context.Context, requesterID uuid.UUID, dbTasks return result, nil } -// @Summary Get AI task by ID -// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. -// @ID get-task +// @Summary Get AI task by ID or name +// @ID get-ai-task-by-id-or-name // @Security CoderSessionToken -// @Tags Experimental +// @Produce json +// @Tags Tasks // @Param user path string true "Username, user ID, or 'me' for the authenticated user" -// @Param task path string true "Task ID" format(uuid) +// @Param task path string true "Task ID, or task name" // @Success 200 {object} codersdk.Task -// @Router /api/experimental/tasks/{user}/{task} [get] -// -// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. -// taskGet is an experimental endpoint to fetch a single AI task by ID -// (workspace ID). It returns a synthesized task response including -// prompt and status. +// @Router /tasks/{user}/{task} [get] func (api *API) taskGet(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) @@ -544,20 +570,14 @@ func (api *API) taskGet(rw http.ResponseWriter, r *http.Request) { httpapi.Write(ctx, rw, http.StatusOK, taskResp) } -// @Summary Delete AI task by ID -// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. -// @ID delete-task +// @Summary Delete AI task +// @ID delete-ai-task // @Security CoderSessionToken -// @Tags Experimental +// @Tags Tasks // @Param user path string true "Username, user ID, or 'me' for the authenticated user" -// @Param task path string true "Task ID" format(uuid) -// @Success 202 "Task deletion initiated" -// @Router /api/experimental/tasks/{user}/{task} [delete] -// -// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. -// taskDelete is an experimental endpoint to delete a task by ID. -// It creates a delete workspace build and returns 202 Accepted if the build was -// created. +// @Param task path string true "Task ID, or task name" +// @Success 202 +// @Router /tasks/{user}/{task} [delete] func (api *API) taskDelete(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) @@ -618,21 +638,96 @@ func (api *API) taskDelete(rw http.ResponseWriter, r *http.Request) { rw.WriteHeader(http.StatusAccepted) } +// @Summary Update AI task input +// @ID update-ai-task-input +// @Security CoderSessionToken +// @Accept json +// @Tags Tasks +// @Param user path string true "Username, user ID, or 'me' for the authenticated user" +// @Param task path string true "Task ID, or task name" +// @Param request body codersdk.UpdateTaskInputRequest true "Update task input request" +// @Success 204 +// @Router /tasks/{user}/{task}/input [patch] +func (api *API) taskUpdateInput(rw http.ResponseWriter, r *http.Request) { + var ( + ctx = r.Context() + task = httpmw.TaskParam(r) + auditor = api.Auditor.Load() + taskResourceInfo = audit.AdditionalFields{} + ) + + aReq, commitAudit := audit.InitRequest[database.TaskTable](rw, &audit.RequestParams{ + Audit: *auditor, + Log: api.Logger, + Request: r, + Action: database.AuditActionWrite, + AdditionalFields: taskResourceInfo, + }) + defer commitAudit() + aReq.Old = task.TaskTable() + aReq.UpdateOrganizationID(task.OrganizationID) + + var req codersdk.UpdateTaskInputRequest + if !httpapi.Read(ctx, rw, r, &req) { + return + } + + if strings.TrimSpace(req.Input) == "" { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Task input is required.", + }) + return + } + + var updatedTask database.TaskTable + if err := api.Database.InTx(func(tx database.Store) error { + task, err := tx.GetTaskByID(ctx, task.ID) + if err != nil { + return httperror.NewResponseError(http.StatusInternalServerError, codersdk.Response{ + Message: "Failed to fetch task.", + Detail: err.Error(), + }) + } + + if task.Status != database.TaskStatusPaused { + return httperror.NewResponseError(http.StatusConflict, codersdk.Response{ + Message: "Unable to update task input, task must be paused.", + Detail: "Please stop the task's workspace before updating the input.", + }) + } + + updatedTask, err = tx.UpdateTaskPrompt(ctx, database.UpdateTaskPromptParams{ + ID: task.ID, + Prompt: req.Input, + }) + if err != nil { + return httperror.NewResponseError(http.StatusInternalServerError, codersdk.Response{ + Message: "Failed to update task input.", + Detail: err.Error(), + }) + } + + return nil + }, nil); err != nil { + httperror.WriteResponseError(ctx, rw, err) + return + } + + aReq.New = updatedTask + + httpapi.Write(ctx, rw, http.StatusNoContent, nil) +} + // @Summary Send input to AI task -// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. -// @ID send-task-input +// @ID send-input-to-ai-task // @Security CoderSessionToken -// @Tags Experimental +// @Accept json +// @Tags Tasks // @Param user path string true "Username, user ID, or 'me' for the authenticated user" -// @Param task path string true "Task ID" format(uuid) +// @Param task path string true "Task ID, or task name" // @Param request body codersdk.TaskSendRequest true "Task input request" -// @Success 204 "Input sent successfully" -// @Router /api/experimental/tasks/{user}/{task}/send [post] -// -// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. -// taskSend submits task input to the task app by dialing the agent -// directly over the tailnet. We enforce ApplicationConnect RBAC on the -// workspace and validate the task app health. +// @Success 204 +// @Router /tasks/{user}/{task}/send [post] func (api *API) taskSend(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() task := httpmw.TaskParam(r) @@ -693,18 +788,14 @@ func (api *API) taskSend(rw http.ResponseWriter, r *http.Request) { } // @Summary Get AI task logs -// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable. -// @ID get-task-logs +// @ID get-ai-task-logs // @Security CoderSessionToken -// @Tags Experimental +// @Produce json +// @Tags Tasks // @Param user path string true "Username, user ID, or 'me' for the authenticated user" -// @Param task path string true "Task ID" format(uuid) +// @Param task path string true "Task ID, or task name" // @Success 200 {object} codersdk.TaskLogsResponse -// @Router /api/experimental/tasks/{user}/{task}/logs [get] -// -// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable. -// taskLogs reads task output by dialing the agent directly over the tailnet. -// We enforce ApplicationConnect RBAC on the workspace and validate the task app health. +// @Router /tasks/{user}/{task}/logs [get] func (api *API) taskLogs(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() task := httpmw.TaskParam(r) diff --git a/coderd/aitasks_internal_test.go b/coderd/aitasks_internal_test.go new file mode 100644 index 0000000000000..0c087c653befd --- /dev/null +++ b/coderd/aitasks_internal_test.go @@ -0,0 +1,223 @@ +package coderd + +import ( + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/codersdk" +) + +func TestDeriveTaskCurrentState_Unit(t *testing.T) { + t.Parallel() + + now := time.Now() + tests := []struct { + name string + task database.Task + agentLifecycle *codersdk.WorkspaceAgentLifecycle + appHealth *codersdk.WorkspaceAppHealth + latestAppStatus *codersdk.WorkspaceAppStatus + latestBuild codersdk.WorkspaceBuild + expectCurrentState bool + expectedTimestamp time.Time + expectedState codersdk.TaskState + expectedMessage string + }{ + { + name: "NoAppStatus", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusActive, + }, + agentLifecycle: nil, + appHealth: nil, + latestAppStatus: nil, + latestBuild: codersdk.WorkspaceBuild{ + Transition: codersdk.WorkspaceTransitionStart, + CreatedAt: now, + }, + expectCurrentState: false, + }, + { + name: "BuildStartTransition_AppStatus_NewerThanBuild", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusActive, + }, + agentLifecycle: nil, + appHealth: nil, + latestAppStatus: &codersdk.WorkspaceAppStatus{ + State: codersdk.WorkspaceAppStatusStateWorking, + Message: "Task is working", + CreatedAt: now.Add(1 * time.Minute), + }, + latestBuild: codersdk.WorkspaceBuild{ + Transition: codersdk.WorkspaceTransitionStart, + CreatedAt: now, + }, + expectCurrentState: true, + expectedTimestamp: now.Add(1 * time.Minute), + expectedState: codersdk.TaskState(codersdk.WorkspaceAppStatusStateWorking), + expectedMessage: "Task is working", + }, + { + name: "BuildStartTransition_StaleAppStatus_OlderThanBuild", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusActive, + }, + agentLifecycle: nil, + appHealth: nil, + latestAppStatus: &codersdk.WorkspaceAppStatus{ + State: codersdk.WorkspaceAppStatusStateComplete, + Message: "Previous task completed", + CreatedAt: now.Add(-1 * time.Minute), + }, + latestBuild: codersdk.WorkspaceBuild{ + Transition: codersdk.WorkspaceTransitionStart, + CreatedAt: now, + }, + expectCurrentState: false, + }, + { + name: "BuildStopTransition", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusActive, + }, + agentLifecycle: nil, + appHealth: nil, + latestAppStatus: &codersdk.WorkspaceAppStatus{ + State: codersdk.WorkspaceAppStatusStateComplete, + Message: "Task completed before stop", + CreatedAt: now.Add(-1 * time.Minute), + }, + latestBuild: codersdk.WorkspaceBuild{ + Transition: codersdk.WorkspaceTransitionStop, + CreatedAt: now, + }, + expectCurrentState: true, + expectedTimestamp: now.Add(-1 * time.Minute), + expectedState: codersdk.TaskState(codersdk.WorkspaceAppStatusStateComplete), + expectedMessage: "Task completed before stop", + }, + { + name: "TaskInitializing_WorkspacePending", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusInitializing, + }, + agentLifecycle: nil, + appHealth: nil, + latestAppStatus: nil, + latestBuild: codersdk.WorkspaceBuild{ + Status: codersdk.WorkspaceStatusPending, + CreatedAt: now, + }, + expectCurrentState: true, + expectedTimestamp: now, + expectedState: codersdk.TaskStateWorking, + expectedMessage: "Workspace is pending", + }, + { + name: "TaskInitializing_WorkspaceStarting", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusInitializing, + }, + agentLifecycle: nil, + appHealth: nil, + latestAppStatus: nil, + latestBuild: codersdk.WorkspaceBuild{ + Status: codersdk.WorkspaceStatusStarting, + CreatedAt: now, + }, + expectCurrentState: true, + expectedTimestamp: now, + expectedState: codersdk.TaskStateWorking, + expectedMessage: "Workspace is starting", + }, + { + name: "TaskInitializing_AgentConnecting", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusInitializing, + }, + agentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleCreated), + appHealth: nil, + latestAppStatus: nil, + latestBuild: codersdk.WorkspaceBuild{ + Status: codersdk.WorkspaceStatusRunning, + CreatedAt: now, + }, + expectCurrentState: true, + expectedTimestamp: now, + expectedState: codersdk.TaskStateWorking, + expectedMessage: "Agent is connecting", + }, + { + name: "TaskInitializing_AgentStarting", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusInitializing, + }, + agentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleStarting), + appHealth: nil, + latestAppStatus: nil, + latestBuild: codersdk.WorkspaceBuild{ + Status: codersdk.WorkspaceStatusRunning, + CreatedAt: now, + }, + expectCurrentState: true, + expectedTimestamp: now, + expectedState: codersdk.TaskStateWorking, + expectedMessage: "Agent is starting", + }, + { + name: "TaskInitializing_AppInitializing", + task: database.Task{ + ID: uuid.New(), + Status: database.TaskStatusInitializing, + }, + agentLifecycle: ptr.Ref(codersdk.WorkspaceAgentLifecycleReady), + appHealth: ptr.Ref(codersdk.WorkspaceAppHealthInitializing), + latestAppStatus: nil, + latestBuild: codersdk.WorkspaceBuild{ + Status: codersdk.WorkspaceStatusRunning, + CreatedAt: now, + }, + expectCurrentState: true, + expectedTimestamp: now, + expectedState: codersdk.TaskStateWorking, + expectedMessage: "App is initializing", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ws := codersdk.Workspace{ + LatestBuild: tt.latestBuild, + LatestAppStatus: tt.latestAppStatus, + } + + currentState := deriveTaskCurrentState(tt.task, ws, tt.agentLifecycle, tt.appHealth) + + if tt.expectCurrentState { + require.NotNil(t, currentState) + assert.Equal(t, tt.expectedTimestamp.UTC(), currentState.Timestamp.UTC()) + assert.Equal(t, tt.expectedState, currentState.State) + assert.Equal(t, tt.expectedMessage, currentState.Message) + } else { + assert.Nil(t, currentState) + } + }) + } +} diff --git a/coderd/aitasks_test.go b/coderd/aitasks_test.go index 80af3e993e97a..3301f8bdd5f31 100644 --- a/coderd/aitasks_test.go +++ b/coderd/aitasks_test.go @@ -7,10 +7,8 @@ import ( "io" "net/http" "net/http/httptest" - "strings" "testing" "time" - "unicode/utf8" "github.com/google/uuid" "github.com/stretchr/testify/assert" @@ -25,6 +23,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/notifications/notificationstest" "github.com/coder/coder/v2/coderd/util/slice" @@ -35,128 +34,6 @@ import ( "github.com/coder/coder/v2/testutil" ) -func TestAITasksPrompts(t *testing.T) { - t.Parallel() - - t.Run("EmptyBuildIDs", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t, &coderdtest.Options{}) - _ = coderdtest.CreateFirstUser(t, client) - experimentalClient := codersdk.NewExperimentalClient(client) - - ctx := testutil.Context(t, testutil.WaitShort) - - // Test with empty build IDs - prompts, err := experimentalClient.AITaskPrompts(ctx, []uuid.UUID{}) - require.NoError(t, err) - require.Empty(t, prompts.Prompts) - }) - - t.Run("MultipleBuilds", func(t *testing.T) { - t.Parallel() - - adminClient := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - first := coderdtest.CreateFirstUser(t, adminClient) - memberClient, _ := coderdtest.CreateAnotherUser(t, adminClient, first.OrganizationID) - - ctx := testutil.Context(t, testutil.WaitLong) - - // Create a template with parameters - version := coderdtest.CreateTemplateVersion(t, adminClient, first.OrganizationID, &echo.Responses{ - Parse: echo.ParseComplete, - ProvisionPlan: []*proto.Response{{ - Type: &proto.Response_Plan{ - Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{ - { - Name: "param1", - Type: "string", - DefaultValue: "default1", - }, - { - Name: codersdk.AITaskPromptParameterName, - Type: "string", - DefaultValue: "default2", - }, - }, - }, - }, - }}, - ProvisionApply: echo.ApplyComplete, - }) - template := coderdtest.CreateTemplate(t, adminClient, first.OrganizationID, version.ID) - coderdtest.AwaitTemplateVersionJobCompleted(t, adminClient, version.ID) - - // Create two workspaces with different parameters - workspace1 := coderdtest.CreateWorkspace(t, memberClient, template.ID, func(request *codersdk.CreateWorkspaceRequest) { - request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: "param1", Value: "value1a"}, - {Name: codersdk.AITaskPromptParameterName, Value: "value2a"}, - } - }) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, memberClient, workspace1.LatestBuild.ID) - - workspace2 := coderdtest.CreateWorkspace(t, memberClient, template.ID, func(request *codersdk.CreateWorkspaceRequest) { - request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: "param1", Value: "value1b"}, - {Name: codersdk.AITaskPromptParameterName, Value: "value2b"}, - } - }) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, memberClient, workspace2.LatestBuild.ID) - - workspace3 := coderdtest.CreateWorkspace(t, adminClient, template.ID, func(request *codersdk.CreateWorkspaceRequest) { - request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ - {Name: "param1", Value: "value1c"}, - {Name: codersdk.AITaskPromptParameterName, Value: "value2c"}, - } - }) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, adminClient, workspace3.LatestBuild.ID) - allBuildIDs := []uuid.UUID{workspace1.LatestBuild.ID, workspace2.LatestBuild.ID, workspace3.LatestBuild.ID} - - experimentalMemberClient := codersdk.NewExperimentalClient(memberClient) - // Test parameters endpoint as member - prompts, err := experimentalMemberClient.AITaskPrompts(ctx, allBuildIDs) - require.NoError(t, err) - // we expect 2 prompts because the member client does not have access to workspace3 - // since it was created by the admin client - require.Len(t, prompts.Prompts, 2) - - // Check workspace1 parameters - build1Prompt := prompts.Prompts[workspace1.LatestBuild.ID.String()] - require.Equal(t, "value2a", build1Prompt) - - // Check workspace2 parameters - build2Prompt := prompts.Prompts[workspace2.LatestBuild.ID.String()] - require.Equal(t, "value2b", build2Prompt) - - experimentalAdminClient := codersdk.NewExperimentalClient(adminClient) - // Test parameters endpoint as admin - // we expect 3 prompts because the admin client has access to all workspaces - prompts, err = experimentalAdminClient.AITaskPrompts(ctx, allBuildIDs) - require.NoError(t, err) - require.Len(t, prompts.Prompts, 3) - - // Check workspace3 parameters - build3Prompt := prompts.Prompts[workspace3.LatestBuild.ID.String()] - require.Equal(t, "value2c", build3Prompt) - }) - - t.Run("NonExistentBuildIDs", func(t *testing.T) { - t.Parallel() - client := coderdtest.New(t, &coderdtest.Options{}) - _ = coderdtest.CreateFirstUser(t, client) - - ctx := testutil.Context(t, testutil.WaitShort) - - // Test with non-existent build IDs - nonExistentID := uuid.New() - experimentalClient := codersdk.NewExperimentalClient(client) - prompts, err := experimentalClient.AITaskPrompts(ctx, []uuid.UUID{nonExistentID}) - require.NoError(t, err) - require.Empty(t, prompts.Prompts) - }) -} - func TestTasks(t *testing.T) { t.Parallel() @@ -188,7 +65,6 @@ func TestTasks(t *testing.T) { { Type: &proto.Response_Plan{ Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }, }, @@ -248,8 +124,7 @@ func TestTasks(t *testing.T) { // Create a task with a specific prompt using the new data model. wantPrompt := "build me a web app" - exp := codersdk.NewExperimentalClient(client) - task, err := exp.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: wantPrompt, }) @@ -259,10 +134,13 @@ func TestTasks(t *testing.T) { // Wait for the workspace to be built. workspace, err := client.Workspace(ctx, task.WorkspaceID.UUID) require.NoError(t, err) + if assert.True(t, workspace.TaskID.Valid, "task id should be set on workspace") { + assert.Equal(t, task.ID, workspace.TaskID.UUID, "workspace task id should match") + } coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) // List tasks via experimental API and verify the prompt and status mapping. - tasks, err := exp.Tasks(ctx, &codersdk.TasksFilter{Owner: codersdk.Me}) + tasks, err := client.Tasks(ctx, &codersdk.TasksFilter{Owner: codersdk.Me}) require.NoError(t, err) got, ok := slice.Find(tasks, func(t codersdk.Task) bool { return t.ID == task.ID }) @@ -279,15 +157,15 @@ func TestTasks(t *testing.T) { t.Parallel() var ( - client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - ctx = testutil.Context(t, testutil.WaitLong) - user = coderdtest.CreateFirstUser(t, client) - template = createAITemplate(t, client, user) - wantPrompt = "review my code" - exp = codersdk.NewExperimentalClient(client) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + ctx = testutil.Context(t, testutil.WaitLong) + user = coderdtest.CreateFirstUser(t, client) + anotherUser, _ = coderdtest.CreateAnotherUser(t, client, user.OrganizationID) + template = createAITemplate(t, client, user) + wantPrompt = "review my code" ) - task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: wantPrompt, }) @@ -297,6 +175,9 @@ func TestTasks(t *testing.T) { // Get the workspace and wait for it to be ready. ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) require.NoError(t, err) + if assert.True(t, ws.TaskID.Valid, "task id should be set on workspace") { + assert.Equal(t, task.ID, ws.TaskID.UUID, "workspace task id should match") + } coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) ws = coderdtest.MustWorkspace(t, client, task.WorkspaceID.UUID) // Assert invariant: the workspace has exactly one resource with one agent with one app. @@ -318,7 +199,7 @@ func TestTasks(t *testing.T) { require.NoError(t, err) // Fetch the task by ID via experimental API and verify fields. - updated, err := exp.TaskByID(ctx, task.ID) + updated, err := client.TaskByID(ctx, task.ID) require.NoError(t, err) assert.Equal(t, task.ID, updated.ID, "task ID should match") @@ -331,23 +212,44 @@ func TestTasks(t *testing.T) { assert.Equal(t, taskAppID, updated.WorkspaceAppID.UUID, "workspace app id should match") assert.NotEmpty(t, updated.WorkspaceStatus, "task status should not be empty") + // Fetch the task by name and verify the same result + byName, err := client.TaskByOwnerAndName(ctx, codersdk.Me, task.Name) + require.NoError(t, err) + require.Equal(t, byName, updated) + + // Another member user should not be able to fetch the task + _, err = anotherUser.TaskByID(ctx, task.ID) + require.Error(t, err, "fetching task should fail by ID for another member user") + var sdkErr *codersdk.Error + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusNotFound, sdkErr.StatusCode()) + // Also test by name + _, err = anotherUser.TaskByOwnerAndName(ctx, task.OwnerName, task.Name) + require.Error(t, err, "fetching task should fail by name for another member user") + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusNotFound, sdkErr.StatusCode()) + // Stop the workspace coderdtest.MustTransitionWorkspace(t, client, task.WorkspaceID.UUID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) // Verify that the previous status still remains - updated, err = exp.TaskByID(ctx, task.ID) + updated, err = client.TaskByID(ctx, task.ID) require.NoError(t, err) assert.NotNil(t, updated.CurrentState, "current state should not be nil") assert.Equal(t, "all done", updated.CurrentState.Message) assert.Equal(t, codersdk.TaskStateComplete, updated.CurrentState.State) + previousCurrentState := updated.CurrentState // Start the workspace again coderdtest.MustTransitionWorkspace(t, client, task.WorkspaceID.UUID, codersdk.WorkspaceTransitionStop, codersdk.WorkspaceTransitionStart) - // Verify that the status from the previous build is no longer present - updated, err = exp.TaskByID(ctx, task.ID) + // Verify that the status from the previous build has been cleared + // and replaced by the agent initialization status. + updated, err = client.TaskByID(ctx, task.ID) require.NoError(t, err) - assert.Nil(t, updated.CurrentState, "current state should be nil") + assert.NotEqual(t, previousCurrentState, updated.CurrentState) + assert.Equal(t, codersdk.TaskStateWorking, updated.CurrentState.State) + assert.NotEqual(t, "all done", updated.CurrentState.Message) }) t.Run("Delete", func(t *testing.T) { @@ -362,8 +264,7 @@ func TestTasks(t *testing.T) { ctx := testutil.Context(t, testutil.WaitLong) - exp := codersdk.NewExperimentalClient(client) - task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "delete me", }) @@ -371,9 +272,12 @@ func TestTasks(t *testing.T) { require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) require.NoError(t, err) + if assert.True(t, ws.TaskID.Valid, "task id should be set on workspace") { + assert.Equal(t, task.ID, ws.TaskID.UUID, "workspace task id should match") + } coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) - err = exp.DeleteTask(ctx, "me", task.ID) + err = client.DeleteTask(ctx, "me", task.ID) require.NoError(t, err, "delete task request should be accepted") // Poll until the workspace is deleted. @@ -395,8 +299,7 @@ func TestTasks(t *testing.T) { ctx := testutil.Context(t, testutil.WaitShort) - exp := codersdk.NewExperimentalClient(client) - err := exp.DeleteTask(ctx, "me", uuid.New()) + err := client.DeleteTask(ctx, "me", uuid.New()) var sdkErr *codersdk.Error require.Error(t, err, "expected an error for non-existent task") @@ -417,10 +320,12 @@ func TestTasks(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) ws := coderdtest.CreateWorkspace(t, client, template.ID) + if assert.False(t, ws.TaskID.Valid, "task id should not be set on non-task workspace") { + assert.Zero(t, ws.TaskID, "non-task workspace task id should be empty") + } coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) - exp := codersdk.NewExperimentalClient(client) - err := exp.DeleteTask(ctx, "me", ws.ID) + err := client.DeleteTask(ctx, "me", ws.ID) var sdkErr *codersdk.Error require.Error(t, err, "expected an error for non-task workspace delete via tasks endpoint") @@ -439,8 +344,7 @@ func TestTasks(t *testing.T) { ctx := testutil.Context(t, testutil.WaitShort) - exp := codersdk.NewExperimentalClient(client) - task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "delete me not", }) @@ -452,10 +356,9 @@ func TestTasks(t *testing.T) { // Another regular org member without elevated permissions. otherClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) - expOther := codersdk.NewExperimentalClient(otherClient) // Attempt to delete the owner's task as a non-owner without permissions. - err = expOther.DeleteTask(ctx, "me", task.ID) + err = otherClient.DeleteTask(ctx, "me", task.ID) var authErr *codersdk.Error require.Error(t, err, "expected an authorization error when deleting another user's task") @@ -466,15 +369,48 @@ func TestTasks(t *testing.T) { } }) - t.Run("NoWorkspace", func(t *testing.T) { + t.Run("DeletedWorkspace", func(t *testing.T) { + t.Parallel() + + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + template := createAITemplate(t, client, user) + ctx := testutil.Context(t, testutil.WaitLong) + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "delete me", + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + // Mark the workspace as deleted directly in the database, bypassing provisionerd. + require.NoError(t, db.UpdateWorkspaceDeletedByID(dbauthz.AsProvisionerd(ctx), database.UpdateWorkspaceDeletedByIDParams{ + ID: ws.ID, + Deleted: true, + })) + // We should still be able to fetch the task if its workspace was deleted. + // Provisionerdserver will attempt delete the related task when deleting a workspace. + // This test ensures that we can still handle the case where, for some reason, the + // task has not been marked as deleted, but the workspace has. + task, err = client.TaskByID(ctx, task.ID) + require.NoError(t, err, "fetching a task should still work if its related workspace is deleted") + err = client.DeleteTask(ctx, task.OwnerID.String(), task.ID) + require.NoError(t, err, "should be possible to delete a task with no workspace") + }) + + t.Run("DeletingTaskWorkspaceDeletesTask", func(t *testing.T) { t.Parallel() client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) user := coderdtest.CreateFirstUser(t, client) template := createAITemplate(t, client, user) + ctx := testutil.Context(t, testutil.WaitLong) - exp := codersdk.NewExperimentalClient(client) - task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "delete me", }) @@ -482,14 +418,19 @@ func TestTasks(t *testing.T) { require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) require.NoError(t, err) + if assert.True(t, ws.TaskID.Valid, "task id should be set on workspace") { + assert.Equal(t, task.ID, ws.TaskID.UUID, "workspace task id should match") + } coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) - // Delete the task workspace + + // When; the task workspace is deleted coderdtest.MustTransitionWorkspace(t, client, ws.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionDelete) - // We should still be able to fetch the task after deleting its workspace - task, err = exp.TaskByID(ctx, task.ID) - require.NoError(t, err, "fetching a task should still work after deleting its related workspace") - err = exp.DeleteTask(ctx, task.OwnerID.String(), task.ID) - require.NoError(t, err, "should be possible to delete a task with no workspace") + // Then: the task associated with the workspace is also deleted + _, err = client.TaskByID(ctx, task.ID) + require.Error(t, err, "expected an error fetching the task") + var sdkErr *codersdk.Error + require.ErrorAs(t, err, &sdkErr, "expected a codersdk.Error") + require.Equal(t, http.StatusNotFound, sdkErr.StatusCode()) }) }) @@ -544,10 +485,9 @@ func TestTasks(t *testing.T) { userClient, _ = coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) agentAuthToken = uuid.NewString() template = createAITemplate(t, client, owner, withAgentToken(agentAuthToken), withSidebarURL(srv.URL)) - exp = codersdk.NewExperimentalClient(userClient) ) - task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := userClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "send me food", }) @@ -560,7 +500,7 @@ func TestTasks(t *testing.T) { coderdtest.AwaitWorkspaceBuildJobCompleted(t, userClient, ws.LatestBuild.ID) // Fetch the task by ID via experimental API and verify fields. - task, err = exp.TaskByID(ctx, task.ID) + task, err = client.TaskByID(ctx, task.ID) require.NoError(t, err) require.NotZero(t, task.WorkspaceBuildNumber) require.True(t, task.WorkspaceAgentID.Valid) @@ -586,7 +526,7 @@ func TestTasks(t *testing.T) { coderdtest.NewWorkspaceAgentWaiter(t, userClient, ws.ID).WaitFor(coderdtest.AgentsReady) // Fetch the task by ID via experimental API and verify fields. - task, err = exp.TaskByID(ctx, task.ID) + task, err = client.TaskByID(ctx, task.ID) require.NoError(t, err) // Make the sidebar app unhealthy initially. @@ -596,7 +536,7 @@ func TestTasks(t *testing.T) { }) require.NoError(t, err) - err = exp.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ + err = client.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ Input: "Hello, Agent!", }) require.Error(t, err, "wanted error due to unhealthy sidebar app") @@ -610,7 +550,7 @@ func TestTasks(t *testing.T) { statusResponse = agentapisdk.AgentStatus("bad") - err = exp.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ + err = client.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ Input: "Hello, Agent!", }) require.Error(t, err, "wanted error due to bad status") @@ -619,7 +559,7 @@ func TestTasks(t *testing.T) { //nolint:tparallel // Not intended to run in parallel. t.Run("SendOK", func(t *testing.T) { - err = exp.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ + err = client.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ Input: "Hello, Agent!", }) require.NoError(t, err, "wanted no error due to healthy sidebar app and stable status") @@ -627,7 +567,7 @@ func TestTasks(t *testing.T) { //nolint:tparallel // Not intended to run in parallel. t.Run("MissingContent", func(t *testing.T) { - err = exp.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ + err = client.TaskSend(ctx, "me", task.ID, codersdk.TaskSendRequest{ Input: "", }) require.Error(t, err, "wanted error due to missing content") @@ -645,8 +585,7 @@ func TestTasks(t *testing.T) { _ = coderdtest.CreateFirstUser(t, client) ctx := testutil.Context(t, testutil.WaitShort) - exp := codersdk.NewExperimentalClient(client) - err := exp.TaskSend(ctx, "me", uuid.New(), codersdk.TaskSendRequest{ + err := client.TaskSend(ctx, "me", uuid.New(), codersdk.TaskSendRequest{ Input: "hi", }) @@ -712,10 +651,9 @@ func TestTasks(t *testing.T) { owner = coderdtest.CreateFirstUser(t, client) agentAuthToken = uuid.NewString() template = createAITemplate(t, client, owner, withAgentToken(agentAuthToken), withSidebarURL(srv.URL)) - exp = codersdk.NewExperimentalClient(client) ) - task, err := exp.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "show logs", }) @@ -728,7 +666,7 @@ func TestTasks(t *testing.T) { coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) // Fetch the task by ID via experimental API and verify fields. - task, err = exp.TaskByID(ctx, task.ID) + task, err = client.TaskByIdentifier(ctx, task.ID.String()) require.NoError(t, err) require.NotZero(t, task.WorkspaceBuildNumber) require.True(t, task.WorkspaceAgentID.Valid) @@ -754,13 +692,13 @@ func TestTasks(t *testing.T) { coderdtest.NewWorkspaceAgentWaiter(t, client, ws.ID).WaitFor(coderdtest.AgentsReady) // Fetch the task by ID via experimental API and verify fields. - task, err = exp.TaskByID(ctx, task.ID) + task, err = client.TaskByID(ctx, task.ID) require.NoError(t, err) //nolint:tparallel // Not intended to run in parallel. t.Run("OK", func(t *testing.T) { // Fetch logs. - resp, err := exp.TaskLogs(ctx, "me", task.ID) + resp, err := client.TaskLogs(ctx, "me", task.ID) require.NoError(t, err) require.Len(t, resp.Logs, 3) assert.Equal(t, 0, resp.Logs[0].ID) @@ -780,7 +718,7 @@ func TestTasks(t *testing.T) { t.Run("UpstreamError", func(t *testing.T) { shouldReturnError = true t.Cleanup(func() { shouldReturnError = false }) - _, err := exp.TaskLogs(ctx, "me", task.ID) + _, err := client.TaskLogs(ctx, "me", task.ID) var sdkErr *codersdk.Error require.Error(t, err) @@ -788,6 +726,205 @@ func TestTasks(t *testing.T) { require.Equal(t, http.StatusBadGateway, sdkErr.StatusCode()) }) }) + + t.Run("UpdateInput", func(t *testing.T) { + tests := []struct { + name string + disableProvisioner bool + transition database.WorkspaceTransition + cancelTransition bool + deleteTask bool + taskInput string + wantStatus codersdk.TaskStatus + wantErr string + wantErrStatusCode int + }{ + { + name: "TaskStatusInitializing", + // We want to disable the provisioner so that the task + // never gets provisioned (ensuring it stays in Initializing). + disableProvisioner: true, + taskInput: "Valid prompt", + wantStatus: codersdk.TaskStatusInitializing, + wantErr: "Unable to update", + wantErrStatusCode: http.StatusConflict, + }, + { + name: "TaskStatusPaused", + transition: database.WorkspaceTransitionStop, + taskInput: "Valid prompt", + wantStatus: codersdk.TaskStatusPaused, + }, + { + name: "TaskStatusError", + transition: database.WorkspaceTransitionStart, + cancelTransition: true, + taskInput: "Valid prompt", + wantStatus: codersdk.TaskStatusError, + wantErr: "Unable to update", + wantErrStatusCode: http.StatusConflict, + }, + { + name: "EmptyPrompt", + transition: database.WorkspaceTransitionStop, + // We want to ensure an empty prompt is rejected. + taskInput: "", + wantStatus: codersdk.TaskStatusPaused, + wantErr: "Task input is required.", + wantErrStatusCode: http.StatusBadRequest, + }, + { + name: "TaskDeleted", + transition: database.WorkspaceTransitionStop, + deleteTask: true, + taskInput: "Valid prompt", + wantErr: httpapi.ResourceNotFoundResponse.Message, + wantErrStatusCode: http.StatusNotFound, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + client, provisioner := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitLong) + + template := createAITemplate(t, client, user) + + if tt.disableProvisioner { + provisioner.Close() + } + + // Given: We create a task + task, err := client.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "initial prompt", + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace ID") + + if !tt.disableProvisioner { + // Given: The Task is running + workspace, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + // Given: We transition the task's workspace + build := coderdtest.CreateWorkspaceBuild(t, client, workspace, tt.transition) + if tt.cancelTransition { + // Given: We cancel the workspace build + err := client.CancelWorkspaceBuild(ctx, build.ID, codersdk.CancelWorkspaceBuildParams{}) + require.NoError(t, err) + + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, build.ID) + + // Then: We expect it to be canceled + build, err = client.WorkspaceBuild(ctx, build.ID) + require.NoError(t, err) + require.Equal(t, codersdk.WorkspaceStatusCanceled, build.Status) + } else { + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, build.ID) + } + } + + if tt.deleteTask { + err = client.DeleteTask(ctx, codersdk.Me, task.ID) + require.NoError(t, err) + } else { + // Given: Task has expected status + task, err = client.TaskByID(ctx, task.ID) + require.NoError(t, err) + require.Equal(t, tt.wantStatus, task.Status) + } + + // When: We attempt to update the task input + err = client.UpdateTaskInput(ctx, task.OwnerName, task.ID, codersdk.UpdateTaskInputRequest{ + Input: tt.taskInput, + }) + if tt.wantErr != "" { + require.ErrorContains(t, err, tt.wantErr) + + if tt.wantErrStatusCode != 0 { + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, tt.wantErrStatusCode, apiErr.StatusCode()) + } + + if !tt.deleteTask { + // Then: We expect the input to **not** be updated + task, err = client.TaskByID(ctx, task.ID) + require.NoError(t, err) + require.NotEqual(t, tt.taskInput, task.InitialPrompt) + } + } else { + require.NoError(t, err) + + if !tt.deleteTask { + // Then: We expect the input to be updated + task, err = client.TaskByID(ctx, task.ID) + require.NoError(t, err) + require.Equal(t, tt.taskInput, task.InitialPrompt) + } + } + }) + } + + t.Run("NonExistentTask", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitShort) + + // Attempt to update prompt for non-existent task + err := client.UpdateTaskInput(ctx, user.UserID.String(), uuid.New(), codersdk.UpdateTaskInputRequest{ + Input: "Should fail", + }) + require.Error(t, err) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusNotFound, apiErr.StatusCode()) + }) + + t.Run("UnauthorizedUser", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + anotherUser, _ := coderdtest.CreateAnotherUser(t, client, user.OrganizationID) + ctx := testutil.Context(t, testutil.WaitLong) + + template := createAITemplate(t, client, user) + + // Create a task as the first user + task, err := client.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "initial prompt", + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid) + + // Wait for workspace to complete + workspace, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + // Stop the workspace + build := coderdtest.CreateWorkspaceBuild(t, client, workspace, database.WorkspaceTransitionStop) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, build.ID) + + // Attempt to update prompt as another user should fail with 404 Not Found + err = anotherUser.UpdateTaskInput(ctx, task.OwnerName, task.ID, codersdk.UpdateTaskInputRequest{ + Input: "Should fail - unauthorized", + }) + require.Error(t, err) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusNotFound, apiErr.StatusCode()) + }) + }) } func TestTasksCreate(t *testing.T) { @@ -805,6 +942,49 @@ func TestTasksCreate(t *testing.T) { client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) user := coderdtest.CreateFirstUser(t, client) + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + HasAiTasks: true, + }}}, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: taskPrompt, + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid) + + ws, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + assert.NotEmpty(t, task.Name) + assert.Equal(t, template.ID, task.TemplateID) + + parameters, err := client.WorkspaceBuildParameters(ctx, ws.LatestBuild.ID) + require.NoError(t, err) + require.Len(t, parameters, 0) + }) + + t.Run("OK AIPromptBackCompat", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + + taskPrompt = "Some task prompt" + ) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + // Given: A template with an "AI Prompt" parameter version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ Parse: echo.ParseComplete, @@ -819,10 +999,8 @@ func TestTasksCreate(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - expClient := codersdk.NewExperimentalClient(client) - // When: We attempt to create a Task. - task, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: taskPrompt, }) @@ -849,14 +1027,17 @@ func TestTasksCreate(t *testing.T) { t.Parallel() tests := []struct { - name string - taskName string - expectFallbackName bool - expectError string + name string + taskName string + taskDisplayName string + expectFallbackName bool + expectFallbackDisplayName bool + expectError string }{ { - name: "ValidName", - taskName: "a-valid-task-name", + name: "ValidName", + taskName: "a-valid-task-name", + expectFallbackDisplayName: true, }, { name: "NotValidName", @@ -866,8 +1047,37 @@ func TestTasksCreate(t *testing.T) { { name: "NoNameProvided", taskName: "", + taskDisplayName: "A valid task display name", expectFallbackName: true, }, + { + name: "ValidDisplayName", + taskDisplayName: "A valid task display name", + expectFallbackName: true, + }, + { + name: "NotValidDisplayName", + taskDisplayName: "This is a task display name with a length greater than 64 characters.", + expectError: "Display name must be 64 characters or less.", + }, + { + name: "NoDisplayNameProvided", + taskName: "a-valid-task-name", + taskDisplayName: "", + expectFallbackDisplayName: true, + }, + { + name: "ValidNameAndDisplayName", + taskName: "a-valid-task-name", + taskDisplayName: "A valid task display name", + }, + { + name: "NoNameAndDisplayNameProvided", + taskName: "", + taskDisplayName: "", + expectFallbackName: true, + expectFallbackDisplayName: true, + }, } for _, tt := range tests { @@ -875,16 +1085,14 @@ func TestTasksCreate(t *testing.T) { t.Parallel() var ( - ctx = testutil.Context(t, testutil.WaitShort) - client = coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) - expClient = codersdk.NewExperimentalClient(client) - user = coderdtest.CreateFirstUser(t, client) - version = coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + ctx = testutil.Context(t, testutil.WaitShort) + client = coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user = coderdtest.CreateFirstUser(t, client) + version = coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ Parse: echo.ParseComplete, ProvisionApply: echo.ApplyComplete, ProvisionPlan: []*proto.Response{ {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }}}, }, @@ -895,10 +1103,11 @@ func TestTasksCreate(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) // When: We attempt to create a Task. - task, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "Some prompt", Name: tt.taskName, + DisplayName: tt.taskDisplayName, }) if tt.expectError == "" { require.NoError(t, err) @@ -912,8 +1121,17 @@ func TestTasksCreate(t *testing.T) { if !tt.expectFallbackName { require.Equal(t, tt.taskName, task.Name) } + + // Then: We expect the correct display name to have been picked. + require.NotEmpty(t, task.DisplayName) + if !tt.expectFallbackDisplayName { + require.Equal(t, tt.taskDisplayName, task.DisplayName) + } } else { - require.ErrorContains(t, err, tt.expectError) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusBadRequest, apiErr.StatusCode()) + require.Equal(t, apiErr.Message, tt.expectError) } }) } @@ -936,10 +1154,8 @@ func TestTasksCreate(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - expClient := codersdk.NewExperimentalClient(client) - // When: We attempt to create a Task. - _, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + _, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: taskPrompt, }) @@ -968,10 +1184,8 @@ func TestTasksCreate(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) _ = coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - expClient := codersdk.NewExperimentalClient(client) - // When: We attempt to create a Task with an invalid template version ID. - _, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + _, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: uuid.New(), Input: taskPrompt, }) @@ -1000,7 +1214,6 @@ func TestTasksCreate(t *testing.T) { ProvisionApply: echo.ApplyComplete, ProvisionPlan: []*proto.Response{ {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }}}, }, @@ -1008,9 +1221,7 @@ func TestTasksCreate(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - expClient := codersdk.NewExperimentalClient(client) - - task, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: taskPrompt, }) @@ -1060,7 +1271,6 @@ func TestTasksCreate(t *testing.T) { ProvisionApply: echo.ApplyComplete, ProvisionPlan: []*proto.Response{ {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }}}, }, @@ -1068,9 +1278,7 @@ func TestTasksCreate(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - expClient := codersdk.NewExperimentalClient(client) - - task, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: taskPrompt, Name: taskName, @@ -1097,7 +1305,6 @@ func TestTasksCreate(t *testing.T) { ProvisionApply: echo.ApplyComplete, ProvisionPlan: []*proto.Response{ {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }}}, }, @@ -1105,16 +1312,14 @@ func TestTasksCreate(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - expClient := codersdk.NewExperimentalClient(client) - - task1, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task1, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "First task", Name: "task-1", }) require.NoError(t, err) - task2, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task2, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "Second task", Name: "task-2", @@ -1150,7 +1355,6 @@ func TestTasksCreate(t *testing.T) { ProvisionApply: echo.ApplyComplete, ProvisionPlan: []*proto.Response{ {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }}}, }, @@ -1163,18 +1367,15 @@ func TestTasksCreate(t *testing.T) { ProvisionApply: echo.ApplyComplete, ProvisionPlan: []*proto.Response{ {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ - Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, HasAiTasks: true, }}}, }, }, template.ID) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version2.ID) - expClient := codersdk.NewExperimentalClient(client) - // Create a task using version 2 to verify the template_version_id is // stored correctly. - task, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ TemplateVersionID: version2.ID, Input: "Use version 2", }) @@ -1200,6 +1401,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent bool notificationTemplate uuid.UUID taskPrompt string + agentLifecycle database.WorkspaceAgentLifecycleState }{ // Should not send a notification when the agent app is not an AI task. { @@ -1247,6 +1449,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: true, notificationTemplate: notifications.TemplateTaskIdle, taskPrompt: "InitialTemplateTaskIdle", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, }, // Should send TemplateTaskWorking when the AI task transitions to 'Working' from 'Idle'. { @@ -1260,6 +1463,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: true, notificationTemplate: notifications.TemplateTaskWorking, taskPrompt: "TemplateTaskWorkingFromIdle", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, }, // Should send TemplateTaskIdle when the AI task transitions to 'Idle'. { @@ -1270,6 +1474,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: true, notificationTemplate: notifications.TemplateTaskIdle, taskPrompt: "TemplateTaskIdle", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, }, // Long task prompts should be truncated to 160 characters. { @@ -1280,6 +1485,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: true, notificationTemplate: notifications.TemplateTaskIdle, taskPrompt: "This is a very long task prompt that should be truncated to 160 characters. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, }, // Should send TemplateTaskCompleted when the AI task transitions to 'Complete'. { @@ -1290,6 +1496,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: true, notificationTemplate: notifications.TemplateTaskCompleted, taskPrompt: "TemplateTaskCompleted", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, }, // Should send TemplateTaskFailed when the AI task transitions to 'Failure'. { @@ -1300,6 +1507,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: true, notificationTemplate: notifications.TemplateTaskFailed, taskPrompt: "TemplateTaskFailed", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, }, // Should send TemplateTaskCompleted when the AI task transitions from 'Idle' to 'Complete'. { @@ -1310,6 +1518,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: true, notificationTemplate: notifications.TemplateTaskCompleted, taskPrompt: "TemplateTaskCompletedFromIdle", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, }, // Should send TemplateTaskFailed when the AI task transitions from 'Idle' to 'Failure'. { @@ -1320,6 +1529,7 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: true, notificationTemplate: notifications.TemplateTaskFailed, taskPrompt: "TemplateTaskFailedFromIdle", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, }, // Should NOT send notification when transitioning from 'Complete' to 'Complete' (no change). { @@ -1339,6 +1549,37 @@ func TestTasksNotification(t *testing.T) { isNotificationSent: false, taskPrompt: "NoNotificationFailureToFailure", }, + // Should NOT send notification when agent is in 'starting' lifecycle state (agent startup). + { + name: "AgentStarting_NoNotification", + latestAppStatuses: nil, + newAppStatus: codersdk.WorkspaceAppStatusStateIdle, + isAITask: true, + isNotificationSent: false, + taskPrompt: "AgentStarting_NoNotification", + agentLifecycle: database.WorkspaceAgentLifecycleStateStarting, + }, + // Should NOT send notification when agent is in 'created' lifecycle state (agent not started). + { + name: "AgentCreated_NoNotification", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateWorking}, + newAppStatus: codersdk.WorkspaceAppStatusStateIdle, + isAITask: true, + isNotificationSent: false, + taskPrompt: "AgentCreated_NoNotification", + agentLifecycle: database.WorkspaceAgentLifecycleStateCreated, + }, + // Should send notification when agent is in 'ready' lifecycle state (agent fully started). + { + name: "AgentReady_SendNotification", + latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateWorking}, + newAppStatus: codersdk.WorkspaceAppStatusStateIdle, + isAITask: true, + isNotificationSent: true, + notificationTemplate: notifications.TemplateTaskIdle, + taskPrompt: "AgentReady_SendNotification", + agentLifecycle: database.WorkspaceAgentLifecycleStateReady, + }, } { t.Run(tc.name, func(t *testing.T) { t.Parallel() @@ -1357,31 +1598,57 @@ func TestTasksNotification(t *testing.T) { // Given: a workspace build with an agent containing an App workspaceAgentAppID := uuid.New() workspaceBuildID := uuid.New() - workspaceBuildSeed := database.WorkspaceBuild{ + workspaceBuilder := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: ownerUser.OrganizationID, + OwnerID: memberUser.ID, + }).Seed(database.WorkspaceBuild{ ID: workspaceBuildID, - } + }) if tc.isAITask { - workspaceBuildSeed = database.WorkspaceBuild{ - ID: workspaceBuildID, - // AI Task configuration - HasAITask: sql.NullBool{Bool: true, Valid: true}, - AITaskSidebarAppID: uuid.NullUUID{UUID: workspaceAgentAppID, Valid: true}, + workspaceBuilder = workspaceBuilder. + WithTask(database.TaskTable{ + Prompt: tc.taskPrompt, + }, &proto.App{ + Id: workspaceAgentAppID.String(), + Slug: "ccw", + }) + } else { + workspaceBuilder = workspaceBuilder. + WithAgent(func(agent []*proto.Agent) []*proto.Agent { + agent[0].Apps = []*proto.App{{ + Id: workspaceAgentAppID.String(), + Slug: "ccw", + }} + return agent + }) + } + workspaceBuild := workspaceBuilder.Do() + + // Given: set the agent lifecycle state if specified + if tc.agentLifecycle != "" { + workspace := coderdtest.MustWorkspace(t, client, workspaceBuild.Workspace.ID) + agentID := workspace.LatestBuild.Resources[0].Agents[0].ID + + var ( + startedAt sql.NullTime + readyAt sql.NullTime + ) + if tc.agentLifecycle == database.WorkspaceAgentLifecycleStateReady { + startedAt = sql.NullTime{Time: dbtime.Now(), Valid: true} + readyAt = sql.NullTime{Time: dbtime.Now(), Valid: true} + } else if tc.agentLifecycle == database.WorkspaceAgentLifecycleStateStarting { + startedAt = sql.NullTime{Time: dbtime.Now(), Valid: true} } + + // nolint:gocritic // This is a system restricted operation for test setup. + err := db.UpdateWorkspaceAgentLifecycleStateByID(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceAgentLifecycleStateByIDParams{ + ID: agentID, + LifecycleState: tc.agentLifecycle, + StartedAt: startedAt, + ReadyAt: readyAt, + }) + require.NoError(t, err) } - workspaceBuild := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ - OrganizationID: ownerUser.OrganizationID, - OwnerID: memberUser.ID, - }).Seed(workspaceBuildSeed).Params(database.WorkspaceBuildParameter{ - WorkspaceBuildID: workspaceBuildID, - Name: codersdk.AITaskPromptParameterName, - Value: tc.taskPrompt, - }).WithAgent(func(agent []*proto.Agent) []*proto.Agent { - agent[0].Apps = []*proto.App{{ - Id: workspaceAgentAppID.String(), - Slug: "ccw", - }} - return agent - }).Do() // Given: the workspace agent app has previous statuses agentClient := agentsdk.New(client.URL, agentsdk.WithFixedToken(workspaceBuild.AgentToken)) @@ -1422,13 +1689,7 @@ func TestTasksNotification(t *testing.T) { require.Len(t, sent, 1) require.Equal(t, memberUser.ID, sent[0].UserID) require.Len(t, sent[0].Labels, 2) - // NOTE: len(string) is the number of bytes in the string, not the number of runes. - require.LessOrEqual(t, utf8.RuneCountInString(sent[0].Labels["task"]), 160) - if len(tc.taskPrompt) > 160 { - require.Contains(t, tc.taskPrompt, strings.TrimSuffix(sent[0].Labels["task"], "…")) - } else { - require.Equal(t, tc.taskPrompt, sent[0].Labels["task"]) - } + require.Equal(t, workspaceBuild.Task.Name, sent[0].Labels["task"]) require.Equal(t, workspace.Name, sent[0].Labels["workspace"]) } else { // Then: No notification is sent diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 45b220eb9d255..f80959192d279 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -85,7 +85,7 @@ const docTemplate = `{ } } }, - "/api/experimental/aibridge/interceptions": { + "/aibridge/interceptions": { "get": { "security": [ { @@ -96,10 +96,10 @@ const docTemplate = `{ "application/json" ], "tags": [ - "AIBridge" + "AI Bridge" ], - "summary": "List AIBridge interceptions", - "operationId": "list-aibridge-interceptions", + "summary": "List AI Bridge interceptions", + "operationId": "list-ai-bridge-interceptions", "parameters": [ { "type": "string", @@ -136,233 +136,6 @@ const docTemplate = `{ } } }, - "/api/experimental/tasks": { - "get": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": [ - "Experimental" - ], - "summary": "List AI tasks", - "operationId": "list-tasks", - "parameters": [ - { - "type": "string", - "description": "Search query for filtering tasks. Supports: owner:\u003cusername/uuid/me\u003e, organization:\u003corg-name/uuid\u003e, status:\u003cstatus\u003e", - "name": "q", - "in": "query" - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.TasksListResponse" - } - } - } - } - }, - "/api/experimental/tasks/{user}": { - "post": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": [ - "Experimental" - ], - "summary": "Create a new AI task", - "operationId": "create-task", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "description": "Create task request", - "name": "request", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/codersdk.CreateTaskRequest" - } - } - ], - "responses": { - "201": { - "description": "Created", - "schema": { - "$ref": "#/definitions/codersdk.Task" - } - } - } - } - }, - "/api/experimental/tasks/{user}/{task}": { - "get": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": [ - "Experimental" - ], - "summary": "Get AI task by ID", - "operationId": "get-task", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "uuid", - "description": "Task ID", - "name": "task", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.Task" - } - } - } - }, - "delete": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": [ - "Experimental" - ], - "summary": "Delete AI task by ID", - "operationId": "delete-task", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "uuid", - "description": "Task ID", - "name": "task", - "in": "path", - "required": true - } - ], - "responses": { - "202": { - "description": "Task deletion initiated" - } - } - } - }, - "/api/experimental/tasks/{user}/{task}/logs": { - "get": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": [ - "Experimental" - ], - "summary": "Get AI task logs", - "operationId": "get-task-logs", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "uuid", - "description": "Task ID", - "name": "task", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.TaskLogsResponse" - } - } - } - } - }, - "/api/experimental/tasks/{user}/{task}/send": { - "post": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": [ - "Experimental" - ], - "summary": "Send input to AI task", - "operationId": "send-task-input", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "uuid", - "description": "Task ID", - "name": "task", - "in": "path", - "required": true - }, - { - "description": "Task input request", - "name": "request", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/codersdk.TaskSendRequest" - } - } - ], - "responses": { - "204": { - "description": "Input sent successfully" - } - } - } - }, "/appearance": { "get": { "security": [ @@ -5582,47 +5355,346 @@ const docTemplate = `{ } } }, - "/settings/idpsync/organization/config": { - "patch": { + "/settings/idpsync/organization/config": { + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Enterprise" + ], + "summary": "Update organization IdP Sync config", + "operationId": "update-organization-idp-sync-config", + "parameters": [ + { + "description": "New config values", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.PatchOrganizationIDPSyncConfigRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.OrganizationSyncSettings" + } + } + } + } + }, + "/settings/idpsync/organization/mapping": { + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Enterprise" + ], + "summary": "Update organization IdP Sync mapping", + "operationId": "update-organization-idp-sync-mapping", + "parameters": [ + { + "description": "Description of the mappings to add and remove", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.PatchOrganizationIDPSyncMappingRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.OrganizationSyncSettings" + } + } + } + } + }, + "/tailnet": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Agents" + ], + "summary": "User-scoped tailnet RPC connection", + "operationId": "user-scoped-tailnet-rpc-connection", + "responses": { + "101": { + "description": "Switching Protocols" + } + } + } + }, + "/tasks": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Tasks" + ], + "summary": "List AI tasks", + "operationId": "list-ai-tasks", + "parameters": [ + { + "type": "string", + "description": "Search query for filtering tasks. Supports: owner:\u003cusername/uuid/me\u003e, organization:\u003corg-name/uuid\u003e, status:\u003cstatus\u003e", + "name": "q", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.TasksListResponse" + } + } + } + } + }, + "/tasks/{user}": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Tasks" + ], + "summary": "Create a new AI task", + "operationId": "create-a-new-ai-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "description": "Create task request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.CreateTaskRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + } + }, + "/tasks/{user}/{task}": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Tasks" + ], + "summary": "Get AI task by ID or name", + "operationId": "get-ai-task-by-id-or-name", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + }, + "delete": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Tasks" + ], + "summary": "Delete AI task", + "operationId": "delete-ai-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "202": { + "description": "Accepted" + } + } + } + }, + "/tasks/{user}/{task}/input": { + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": [ + "application/json" + ], + "tags": [ + "Tasks" + ], + "summary": "Update AI task input", + "operationId": "update-ai-task-input", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true + }, + { + "description": "Update task input request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateTaskInputRequest" + } + } + ], + "responses": { + "204": { + "description": "No Content" + } + } + } + }, + "/tasks/{user}/{task}/logs": { + "get": { "security": [ { "CoderSessionToken": [] } ], - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], "tags": [ - "Enterprise" + "Tasks" ], - "summary": "Update organization IdP Sync config", - "operationId": "update-organization-idp-sync-config", + "summary": "Get AI task logs", + "operationId": "get-ai-task-logs", "parameters": [ { - "description": "New config values", - "name": "request", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/codersdk.PatchOrganizationIDPSyncConfigRequest" - } + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true } ], "responses": { "200": { "description": "OK", "schema": { - "$ref": "#/definitions/codersdk.OrganizationSyncSettings" + "$ref": "#/definitions/codersdk.TaskLogsResponse" } } } } }, - "/settings/idpsync/organization/mapping": { - "patch": { + "/tasks/{user}/{task}/send": { + "post": { "security": [ { "CoderSessionToken": [] @@ -5631,50 +5703,39 @@ const docTemplate = `{ "consumes": [ "application/json" ], - "produces": [ - "application/json" - ], "tags": [ - "Enterprise" + "Tasks" ], - "summary": "Update organization IdP Sync mapping", - "operationId": "update-organization-idp-sync-mapping", + "summary": "Send input to AI task", + "operationId": "send-input-to-ai-task", "parameters": [ { - "description": "Description of the mappings to add and remove", + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true + }, + { + "description": "Task input request", "name": "request", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/codersdk.PatchOrganizationIDPSyncMappingRequest" - } - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.OrganizationSyncSettings" + "$ref": "#/definitions/codersdk.TaskSendRequest" } } - } - } - }, - "/tailnet": { - "get": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": [ - "Agents" ], - "summary": "User-scoped tailnet RPC connection", - "operationId": "user-scoped-tailnet-rpc-connection", "responses": { - "101": { - "description": "Switching Protocols" + "204": { + "description": "No Content" } } } @@ -6002,6 +6063,41 @@ const docTemplate = `{ } } }, + "/templates/{template}/prebuilds/invalidate": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Enterprise" + ], + "summary": "Invalidate presets for template", + "operationId": "invalidate-presets-for-template", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Template ID", + "name": "template", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.InvalidatePresetsResponse" + } + } + } + } + }, "/templates/{template}/versions": { "get": { "security": [ @@ -8291,6 +8387,84 @@ const docTemplate = `{ } } }, + "/users/{user}/preferences": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Get user preference settings", + "operationId": "get-user-preference-settings", + "parameters": [ + { + "type": "string", + "description": "User ID, name, or me", + "name": "user", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.UserPreferenceSettings" + } + } + } + }, + "put": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Users" + ], + "summary": "Update user preference settings", + "operationId": "update-user-preference-settings", + "parameters": [ + { + "type": "string", + "description": "User ID, name, or me", + "name": "user", + "in": "path", + "required": true + }, + { + "description": "New preference settings", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateUserPreferenceSettingsRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.UserPreferenceSettings" + } + } + } + } + }, "/users/{user}/profile": { "put": { "security": [ @@ -11700,14 +11874,23 @@ const docTemplate = `{ "enabled": { "type": "boolean" }, + "inject_coder_mcp_tools": { + "type": "boolean" + }, "openai": { "$ref": "#/definitions/codersdk.AIBridgeOpenAIConfig" + }, + "retention": { + "type": "integer" } } }, "codersdk.AIBridgeInterception": { "type": "object", "properties": { + "api_key_id": { + "type": "string" + }, "ended_at": { "type": "string", "format": "date-time" @@ -13193,6 +13376,9 @@ const docTemplate = `{ "codersdk.CreateTaskRequest": { "type": "object", "properties": { + "display_name": { + "type": "string" + }, "input": { "type": "string" }, @@ -14317,15 +14503,15 @@ const docTemplate = `{ "oauth2", "mcp-server-http", "workspace-sharing", - "aibridge" + "terraform-directory-reuse" ], "x-enum-comments": { - "ExperimentAIBridge": "Enables AI Bridge functionality.", "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", "ExperimentExample": "This isn't used for anything.", "ExperimentMCPServerHTTP": "Enables the MCP HTTP server functionality.", "ExperimentNotifications": "Sends notifications via SMTP and webhooks following certain events.", "ExperimentOAuth2": "Enables OAuth2 provider functionality.", + "ExperimentTerraformWorkspace": "Enables reuse of existing terraform directory for builds", "ExperimentWebPush": "Enables web push notifications through the browser.", "ExperimentWorkspaceSharing": "Enables updating workspace ACLs for sharing with users and groups.", "ExperimentWorkspaceUsage": "Enables the new workspace usage tracking." @@ -14339,7 +14525,7 @@ const docTemplate = `{ "ExperimentOAuth2", "ExperimentMCPServerHTTP", "ExperimentWorkspaceSharing", - "ExperimentAIBridge" + "ExperimentTerraformWorkspace" ] }, "codersdk.ExternalAPIKeyScopes": { @@ -14883,6 +15069,31 @@ const docTemplate = `{ "InsightsReportIntervalWeek" ] }, + "codersdk.InvalidatePresetsResponse": { + "type": "object", + "properties": { + "invalidated": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.InvalidatedPreset" + } + } + } + }, + "codersdk.InvalidatedPreset": { + "type": "object", + "properties": { + "preset_name": { + "type": "string" + }, + "template_name": { + "type": "string" + }, + "template_version_name": { + "type": "string" + } + } + }, "codersdk.IssueReconnectingPTYSignedTokenRequest": { "type": "object", "required": [ @@ -15438,6 +15649,9 @@ const docTemplate = `{ "type": "string" } }, + "revocation_endpoint": { + "type": "string" + }, "scopes_supported": { "type": "array", "items": { @@ -17764,6 +17978,9 @@ const docTemplate = `{ "current_state": { "$ref": "#/definitions/codersdk.TaskStateEntry" }, + "display_name": { + "type": "string" + }, "id": { "type": "string", "format": "uuid" @@ -18129,6 +18346,9 @@ const docTemplate = `{ }, "use_classic_parameter_flow": { "type": "boolean" + }, + "use_terraform_workspace_cache": { + "type": "boolean" } } }, @@ -18950,6 +19170,14 @@ const docTemplate = `{ } } }, + "codersdk.UpdateTaskInputRequest": { + "type": "object", + "properties": { + "input": { + "type": "string" + } + } + }, "codersdk.UpdateTemplateACL": { "type": "object", "properties": { @@ -19057,6 +19285,10 @@ const docTemplate = `{ "use_classic_parameter_flow": { "description": "UseClassicParameterFlow is a flag that switches the default behavior to use the classic\nparameter flow when creating a workspace. This only affects deployments with the experiment\n\"dynamic-parameters\" enabled. This setting will live for a period after the experiment is\nmade the default.\nAn \"opt-out\" is present in case the new feature breaks some existing templates.", "type": "boolean" + }, + "use_terraform_workspace_cache": { + "description": "UseTerraformWorkspaceCache allows optionally specifying whether to use cached\nterraform directories for workspaces created from this template. This field\nonly applies when the correct experiment is enabled. This field is subject to\nbeing removed in the future.", + "type": "boolean" } } }, @@ -19100,6 +19332,14 @@ const docTemplate = `{ } } }, + "codersdk.UpdateUserPreferenceSettingsRequest": { + "type": "object", + "properties": { + "task_notification_alert_dismissed": { + "type": "boolean" + } + } + }, "codersdk.UpdateUserProfileRequest": { "type": "object", "required": [ @@ -19485,6 +19725,14 @@ const docTemplate = `{ } } }, + "codersdk.UserPreferenceSettings": { + "type": "object", + "properties": { + "task_notification_alert_dismissed": { + "type": "boolean" + } + } + }, "codersdk.UserQuietHoursScheduleConfig": { "type": "object", "properties": { @@ -19715,6 +19963,14 @@ const docTemplate = `{ "description": "OwnerName is the username of the owner of the workspace.", "type": "string" }, + "task_id": { + "description": "TaskID, if set, indicates that the workspace is relevant to the given codersdk.Task.", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, "template_active_version_id": { "type": "string", "format": "uuid" @@ -20521,11 +20777,6 @@ const docTemplate = `{ "codersdk.WorkspaceBuild": { "type": "object", "properties": { - "ai_task_sidebar_app_id": { - "description": "Deprecated: This field has been replaced with ` + "`" + `TaskAppID` + "`" + `", - "type": "string", - "format": "uuid" - }, "build_number": { "type": "integer" }, @@ -20541,6 +20792,7 @@ const docTemplate = `{ "format": "date-time" }, "has_ai_task": { + "description": "Deprecated: This field has been deprecated in favor of Task WorkspaceID.", "type": "boolean" }, "has_external_agent": { @@ -20604,10 +20856,6 @@ const docTemplate = `{ } ] }, - "task_app_id": { - "type": "string", - "format": "uuid" - }, "template_version_id": { "type": "string", "format": "uuid" diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 8e6a0030cb83b..8d54e2b3109b7 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -65,7 +65,7 @@ } } }, - "/api/experimental/aibridge/interceptions": { + "/aibridge/interceptions": { "get": { "security": [ { @@ -73,9 +73,9 @@ } ], "produces": ["application/json"], - "tags": ["AIBridge"], - "summary": "List AIBridge interceptions", - "operationId": "list-aibridge-interceptions", + "tags": ["AI Bridge"], + "summary": "List AI Bridge interceptions", + "operationId": "list-ai-bridge-interceptions", "parameters": [ { "type": "string", @@ -112,221 +112,6 @@ } } }, - "/api/experimental/tasks": { - "get": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": ["Experimental"], - "summary": "List AI tasks", - "operationId": "list-tasks", - "parameters": [ - { - "type": "string", - "description": "Search query for filtering tasks. Supports: owner:\u003cusername/uuid/me\u003e, organization:\u003corg-name/uuid\u003e, status:\u003cstatus\u003e", - "name": "q", - "in": "query" - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.TasksListResponse" - } - } - } - } - }, - "/api/experimental/tasks/{user}": { - "post": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": ["Experimental"], - "summary": "Create a new AI task", - "operationId": "create-task", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "description": "Create task request", - "name": "request", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/codersdk.CreateTaskRequest" - } - } - ], - "responses": { - "201": { - "description": "Created", - "schema": { - "$ref": "#/definitions/codersdk.Task" - } - } - } - } - }, - "/api/experimental/tasks/{user}/{task}": { - "get": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": ["Experimental"], - "summary": "Get AI task by ID", - "operationId": "get-task", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "uuid", - "description": "Task ID", - "name": "task", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.Task" - } - } - } - }, - "delete": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": ["Experimental"], - "summary": "Delete AI task by ID", - "operationId": "delete-task", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "uuid", - "description": "Task ID", - "name": "task", - "in": "path", - "required": true - } - ], - "responses": { - "202": { - "description": "Task deletion initiated" - } - } - } - }, - "/api/experimental/tasks/{user}/{task}/logs": { - "get": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": ["Experimental"], - "summary": "Get AI task logs", - "operationId": "get-task-logs", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "uuid", - "description": "Task ID", - "name": "task", - "in": "path", - "required": true - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.TaskLogsResponse" - } - } - } - } - }, - "/api/experimental/tasks/{user}/{task}/send": { - "post": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "tags": ["Experimental"], - "summary": "Send input to AI task", - "operationId": "send-task-input", - "parameters": [ - { - "type": "string", - "description": "Username, user ID, or 'me' for the authenticated user", - "name": "user", - "in": "path", - "required": true - }, - { - "type": "string", - "format": "uuid", - "description": "Task ID", - "name": "task", - "in": "path", - "required": true - }, - { - "description": "Task input request", - "name": "request", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/codersdk.TaskSendRequest" - } - } - ], - "responses": { - "204": { - "description": "Input sent successfully" - } - } - } - }, "/appearance": { "get": { "security": [ @@ -4934,16 +4719,236 @@ } ], "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.OrganizationSyncSettings" - } + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.OrganizationSyncSettings" + } + } + } + } + }, + "/settings/idpsync/organization/config": { + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": ["application/json"], + "produces": ["application/json"], + "tags": ["Enterprise"], + "summary": "Update organization IdP Sync config", + "operationId": "update-organization-idp-sync-config", + "parameters": [ + { + "description": "New config values", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.PatchOrganizationIDPSyncConfigRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.OrganizationSyncSettings" + } + } + } + } + }, + "/settings/idpsync/organization/mapping": { + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": ["application/json"], + "produces": ["application/json"], + "tags": ["Enterprise"], + "summary": "Update organization IdP Sync mapping", + "operationId": "update-organization-idp-sync-mapping", + "parameters": [ + { + "description": "Description of the mappings to add and remove", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.PatchOrganizationIDPSyncMappingRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.OrganizationSyncSettings" + } + } + } + } + }, + "/tailnet": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Agents"], + "summary": "User-scoped tailnet RPC connection", + "operationId": "user-scoped-tailnet-rpc-connection", + "responses": { + "101": { + "description": "Switching Protocols" + } + } + } + }, + "/tasks": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Tasks"], + "summary": "List AI tasks", + "operationId": "list-ai-tasks", + "parameters": [ + { + "type": "string", + "description": "Search query for filtering tasks. Supports: owner:\u003cusername/uuid/me\u003e, organization:\u003corg-name/uuid\u003e, status:\u003cstatus\u003e", + "name": "q", + "in": "query" + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.TasksListResponse" + } + } + } + } + }, + "/tasks/{user}": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": ["application/json"], + "produces": ["application/json"], + "tags": ["Tasks"], + "summary": "Create a new AI task", + "operationId": "create-a-new-ai-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "description": "Create task request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.CreateTaskRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + } + }, + "/tasks/{user}/{task}": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Tasks"], + "summary": "Get AI task by ID or name", + "operationId": "get-ai-task-by-id-or-name", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.Task" + } + } + } + }, + "delete": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Tasks"], + "summary": "Delete AI task", + "operationId": "delete-ai-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true + } + ], + "responses": { + "202": { + "description": "Accepted" } } } }, - "/settings/idpsync/organization/config": { + "/tasks/{user}/{task}/input": { "patch": { "security": [ { @@ -4951,77 +4956,117 @@ } ], "consumes": ["application/json"], - "produces": ["application/json"], - "tags": ["Enterprise"], - "summary": "Update organization IdP Sync config", - "operationId": "update-organization-idp-sync-config", + "tags": ["Tasks"], + "summary": "Update AI task input", + "operationId": "update-ai-task-input", "parameters": [ { - "description": "New config values", + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true + }, + { + "description": "Update task input request", "name": "request", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/codersdk.PatchOrganizationIDPSyncConfigRequest" + "$ref": "#/definitions/codersdk.UpdateTaskInputRequest" } } ], "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/codersdk.OrganizationSyncSettings" - } + "204": { + "description": "No Content" } } } }, - "/settings/idpsync/organization/mapping": { - "patch": { + "/tasks/{user}/{task}/logs": { + "get": { "security": [ { "CoderSessionToken": [] } ], - "consumes": ["application/json"], "produces": ["application/json"], - "tags": ["Enterprise"], - "summary": "Update organization IdP Sync mapping", - "operationId": "update-organization-idp-sync-mapping", + "tags": ["Tasks"], + "summary": "Get AI task logs", + "operationId": "get-ai-task-logs", "parameters": [ { - "description": "Description of the mappings to add and remove", - "name": "request", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/codersdk.PatchOrganizationIDPSyncMappingRequest" - } + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true } ], "responses": { "200": { "description": "OK", "schema": { - "$ref": "#/definitions/codersdk.OrganizationSyncSettings" + "$ref": "#/definitions/codersdk.TaskLogsResponse" } } } } }, - "/tailnet": { - "get": { + "/tasks/{user}/{task}/send": { + "post": { "security": [ { "CoderSessionToken": [] } ], - "tags": ["Agents"], - "summary": "User-scoped tailnet RPC connection", - "operationId": "user-scoped-tailnet-rpc-connection", + "consumes": ["application/json"], + "tags": ["Tasks"], + "summary": "Send input to AI task", + "operationId": "send-input-to-ai-task", + "parameters": [ + { + "type": "string", + "description": "Username, user ID, or 'me' for the authenticated user", + "name": "user", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Task ID, or task name", + "name": "task", + "in": "path", + "required": true + }, + { + "description": "Task input request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.TaskSendRequest" + } + } + ], "responses": { - "101": { - "description": "Switching Protocols" + "204": { + "description": "No Content" } } } @@ -5309,6 +5354,37 @@ } } }, + "/templates/{template}/prebuilds/invalidate": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Enterprise"], + "summary": "Invalidate presets for template", + "operationId": "invalidate-presets-for-template", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Template ID", + "name": "template", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.InvalidatePresetsResponse" + } + } + } + } + }, "/templates/{template}/versions": { "get": { "security": [ @@ -7342,6 +7418,74 @@ } } }, + "/users/{user}/preferences": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Users"], + "summary": "Get user preference settings", + "operationId": "get-user-preference-settings", + "parameters": [ + { + "type": "string", + "description": "User ID, name, or me", + "name": "user", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.UserPreferenceSettings" + } + } + } + }, + "put": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": ["application/json"], + "produces": ["application/json"], + "tags": ["Users"], + "summary": "Update user preference settings", + "operationId": "update-user-preference-settings", + "parameters": [ + { + "type": "string", + "description": "User ID, name, or me", + "name": "user", + "in": "path", + "required": true + }, + { + "description": "New preference settings", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateUserPreferenceSettingsRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.UserPreferenceSettings" + } + } + } + } + }, "/users/{user}/profile": { "put": { "security": [ @@ -10396,14 +10540,23 @@ "enabled": { "type": "boolean" }, + "inject_coder_mcp_tools": { + "type": "boolean" + }, "openai": { "$ref": "#/definitions/codersdk.AIBridgeOpenAIConfig" + }, + "retention": { + "type": "integer" } } }, "codersdk.AIBridgeInterception": { "type": "object", "properties": { + "api_key_id": { + "type": "string" + }, "ended_at": { "type": "string", "format": "date-time" @@ -11843,6 +11996,9 @@ "codersdk.CreateTaskRequest": { "type": "object", "properties": { + "display_name": { + "type": "string" + }, "input": { "type": "string" }, @@ -12924,15 +13080,15 @@ "oauth2", "mcp-server-http", "workspace-sharing", - "aibridge" + "terraform-directory-reuse" ], "x-enum-comments": { - "ExperimentAIBridge": "Enables AI Bridge functionality.", "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", "ExperimentExample": "This isn't used for anything.", "ExperimentMCPServerHTTP": "Enables the MCP HTTP server functionality.", "ExperimentNotifications": "Sends notifications via SMTP and webhooks following certain events.", "ExperimentOAuth2": "Enables OAuth2 provider functionality.", + "ExperimentTerraformWorkspace": "Enables reuse of existing terraform directory for builds", "ExperimentWebPush": "Enables web push notifications through the browser.", "ExperimentWorkspaceSharing": "Enables updating workspace ACLs for sharing with users and groups.", "ExperimentWorkspaceUsage": "Enables the new workspace usage tracking." @@ -12946,7 +13102,7 @@ "ExperimentOAuth2", "ExperimentMCPServerHTTP", "ExperimentWorkspaceSharing", - "ExperimentAIBridge" + "ExperimentTerraformWorkspace" ] }, "codersdk.ExternalAPIKeyScopes": { @@ -13481,6 +13637,31 @@ "InsightsReportIntervalWeek" ] }, + "codersdk.InvalidatePresetsResponse": { + "type": "object", + "properties": { + "invalidated": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.InvalidatedPreset" + } + } + } + }, + "codersdk.InvalidatedPreset": { + "type": "object", + "properties": { + "preset_name": { + "type": "string" + }, + "template_name": { + "type": "string" + }, + "template_version_name": { + "type": "string" + } + } + }, "codersdk.IssueReconnectingPTYSignedTokenRequest": { "type": "object", "required": ["agentID", "url"], @@ -13992,6 +14173,9 @@ "type": "string" } }, + "revocation_endpoint": { + "type": "string" + }, "scopes_supported": { "type": "array", "items": { @@ -16252,6 +16436,9 @@ "current_state": { "$ref": "#/definitions/codersdk.TaskStateEntry" }, + "display_name": { + "type": "string" + }, "id": { "type": "string", "format": "uuid" @@ -16604,6 +16791,9 @@ }, "use_classic_parameter_flow": { "type": "boolean" + }, + "use_terraform_workspace_cache": { + "type": "boolean" } } }, @@ -17381,6 +17571,14 @@ } } }, + "codersdk.UpdateTaskInputRequest": { + "type": "object", + "properties": { + "input": { + "type": "string" + } + } + }, "codersdk.UpdateTemplateACL": { "type": "object", "properties": { @@ -17488,6 +17686,10 @@ "use_classic_parameter_flow": { "description": "UseClassicParameterFlow is a flag that switches the default behavior to use the classic\nparameter flow when creating a workspace. This only affects deployments with the experiment\n\"dynamic-parameters\" enabled. This setting will live for a period after the experiment is\nmade the default.\nAn \"opt-out\" is present in case the new feature breaks some existing templates.", "type": "boolean" + }, + "use_terraform_workspace_cache": { + "description": "UseTerraformWorkspaceCache allows optionally specifying whether to use cached\nterraform directories for workspaces created from this template. This field\nonly applies when the correct experiment is enabled. This field is subject to\nbeing removed in the future.", + "type": "boolean" } } }, @@ -17526,6 +17728,14 @@ } } }, + "codersdk.UpdateUserPreferenceSettingsRequest": { + "type": "object", + "properties": { + "task_notification_alert_dismissed": { + "type": "boolean" + } + } + }, "codersdk.UpdateUserProfileRequest": { "type": "object", "required": ["username"], @@ -17886,6 +18096,14 @@ } } }, + "codersdk.UserPreferenceSettings": { + "type": "object", + "properties": { + "task_notification_alert_dismissed": { + "type": "boolean" + } + } + }, "codersdk.UserQuietHoursScheduleConfig": { "type": "object", "properties": { @@ -18101,6 +18319,14 @@ "description": "OwnerName is the username of the owner of the workspace.", "type": "string" }, + "task_id": { + "description": "TaskID, if set, indicates that the workspace is relevant to the given codersdk.Task.", + "allOf": [ + { + "$ref": "#/definitions/uuid.NullUUID" + } + ] + }, "template_active_version_id": { "type": "string", "format": "uuid" @@ -18855,11 +19081,6 @@ "codersdk.WorkspaceBuild": { "type": "object", "properties": { - "ai_task_sidebar_app_id": { - "description": "Deprecated: This field has been replaced with `TaskAppID`", - "type": "string", - "format": "uuid" - }, "build_number": { "type": "integer" }, @@ -18875,6 +19096,7 @@ "format": "date-time" }, "has_ai_task": { + "description": "Deprecated: This field has been deprecated in favor of Task WorkspaceID.", "type": "boolean" }, "has_external_agent": { @@ -18934,10 +19156,6 @@ } ] }, - "task_app_id": { - "type": "string", - "format": "uuid" - }, "template_version_id": { "type": "string", "format": "uuid" diff --git a/coderd/audit.go b/coderd/audit.go index e43ed1c5128ec..3a3237a9fed50 100644 --- a/coderd/audit.go +++ b/coderd/audit.go @@ -509,11 +509,11 @@ func (api *API) auditLogResourceLink(ctx context.Context, alog database.GetAudit if err != nil { return "" } - workspace, err := api.Database.GetWorkspaceByID(ctx, task.WorkspaceID.UUID) + user, err := api.Database.GetUserByID(ctx, task.OwnerID) if err != nil { return "" } - return fmt.Sprintf("/tasks/%s/%s", workspace.OwnerName, task.Name) + return fmt.Sprintf("/tasks/%s/%s", user.Username, task.ID) default: return "" diff --git a/coderd/autobuild/lifecycle_executor_test.go b/coderd/autobuild/lifecycle_executor_test.go index 263a9e7e13c77..0610c781fe966 100644 --- a/coderd/autobuild/lifecycle_executor_test.go +++ b/coderd/autobuild/lifecycle_executor_test.go @@ -1764,3 +1764,174 @@ func TestExecutorAutostartSkipsWhenNoProvisionersAvailable(t *testing.T) { assert.Len(t, stats.Transitions, 1, "should create builds when provisioners are available") } + +func TestExecutorTaskWorkspace(t *testing.T) { + t.Parallel() + + createTaskTemplate := func(t *testing.T, client *codersdk.Client, orgID uuid.UUID, ctx context.Context, defaultTTL time.Duration) codersdk.Template { + t.Helper() + + taskAppID := uuid.New() + version := coderdtest.CreateTemplateVersion(t, client, orgID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{HasAiTasks: true}, + }, + }, + }, + ProvisionApply: []*proto.Response{ + { + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{ + { + Agents: []*proto.Agent{ + { + Id: uuid.NewString(), + Name: "dev", + Auth: &proto.Agent_Token{ + Token: uuid.NewString(), + }, + Apps: []*proto.App{ + { + Id: taskAppID.String(), + Slug: "task-app", + }, + }, + }, + }, + }, + }, + AiTasks: []*proto.AITask{ + { + AppId: taskAppID.String(), + }, + }, + }, + }, + }, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, orgID, version.ID) + + if defaultTTL > 0 { + _, err := client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ + DefaultTTLMillis: defaultTTL.Milliseconds(), + }) + require.NoError(t, err) + } + + return template + } + + createTaskWorkspace := func(t *testing.T, client *codersdk.Client, template codersdk.Template, ctx context.Context, input string) codersdk.Workspace { + t.Helper() + + task, err := client.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: input, + }) + require.NoError(t, err) + require.True(t, task.WorkspaceID.Valid, "task should have a workspace") + + workspace, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + return workspace + } + + t.Run("Autostart", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ + AutobuildTicker: tickCh, + IncludeProvisionerDaemon: true, + AutobuildStats: statsCh, + }) + admin = coderdtest.CreateFirstUser(t, client) + ) + + // Given: A task workspace + template := createTaskTemplate(t, client, admin.OrganizationID, ctx, 0) + workspace := createTaskWorkspace(t, client, template, ctx, "test task for autostart") + + // Given: The task workspace has an autostart schedule + err := client.UpdateWorkspaceAutostart(ctx, workspace.ID, codersdk.UpdateWorkspaceAutostartRequest{ + Schedule: ptr.Ref(sched.String()), + }) + require.NoError(t, err) + + // Given: That the workspace is in a stopped state. + workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, map[string]string{}) + require.NoError(t, err) + + // When: the autobuild executor ticks after the scheduled time + go func() { + tickTime := sched.Next(workspace.LatestBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime + close(tickCh) + }() + + // Then: We expect to see a start transition + stats := <-statsCh + require.Len(t, stats.Transitions, 1, "lifecycle executor should transition the task workspace") + assert.Contains(t, stats.Transitions, workspace.ID, "task workspace should be in transitions") + assert.Equal(t, database.WorkspaceTransitionStart, stats.Transitions[workspace.ID], "should autostart the workspace") + require.Empty(t, stats.Errors, "should have no errors when managing task workspaces") + }) + + t.Run("Autostop", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ + AutobuildTicker: tickCh, + IncludeProvisionerDaemon: true, + AutobuildStats: statsCh, + }) + admin = coderdtest.CreateFirstUser(t, client) + ) + + // Given: A task workspace with an 8 hour deadline + template := createTaskTemplate(t, client, admin.OrganizationID, ctx, 8*time.Hour) + workspace := createTaskWorkspace(t, client, template, ctx, "test task for autostop") + + // Given: The workspace is currently running + workspace = coderdtest.MustWorkspace(t, client, workspace.ID) + require.Equal(t, codersdk.WorkspaceTransitionStart, workspace.LatestBuild.Transition) + require.NotZero(t, workspace.LatestBuild.Deadline, "workspace should have a deadline for autostop") + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, map[string]string{}) + require.NoError(t, err) + + // When: the autobuild executor ticks after the deadline + go func() { + tickTime := workspace.LatestBuild.Deadline.Time.Add(time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime + close(tickCh) + }() + + // Then: We expect to see a stop transition + stats := <-statsCh + require.Len(t, stats.Transitions, 1, "lifecycle executor should transition the task workspace") + assert.Contains(t, stats.Transitions, workspace.ID, "task workspace should be in transitions") + assert.Equal(t, database.WorkspaceTransitionStop, stats.Transitions[workspace.ID], "should autostop the workspace") + require.Empty(t, stats.Errors, "should have no errors when managing task workspaces") + }) +} diff --git a/coderd/coderd.go b/coderd/coderd.go index a1f94bfa6fee7..e79a2226ba1f6 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -99,6 +99,7 @@ import ( "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/healthsdk" + sharedhttpmw "github.com/coder/coder/v2/httpmw" "github.com/coder/coder/v2/provisionersdk" "github.com/coder/coder/v2/site" "github.com/coder/coder/v2/tailnet" @@ -610,6 +611,7 @@ func New(options *Options) *API { dbRolluper: options.DatabaseRolluper, } api.WorkspaceAppsProvider = workspaceapps.NewDBTokenProvider( + ctx, options.Logger.Named("workspaceapps"), options.AccessURL, options.Authorizer, @@ -860,7 +862,7 @@ func New(options *Options) *API { prometheusMW := httpmw.Prometheus(options.PrometheusRegistry) r.Use( - httpmw.Recover(api.Logger), + sharedhttpmw.Recover(api.Logger), httpmw.WithProfilingLabels, tracing.StatusWriterMiddleware, tracing.Middleware(api.TracerProvider), @@ -1021,10 +1023,10 @@ func New(options *Options) *API { apiRateLimiter, httpmw.ReportCLITelemetry(api.Logger, options.Telemetry), ) - r.Route("/aitasks", func(r chi.Router) { - r.Use(apiKeyMiddleware) - r.Get("/prompts", api.aiTasksPrompts) - }) + + // NOTE(DanielleMaywood): + // Tasks have been promoted to stable, but we have guaranteed a single release transition period + // where these routes must remain. These should be removed no earlier than Coder v2.30.0 r.Route("/tasks", func(r chi.Router) { r.Use(apiKeyMiddleware) @@ -1038,6 +1040,7 @@ func New(options *Options) *API { r.Use(httpmw.ExtractTaskParam(options.Database)) r.Get("/", api.taskGet) r.Delete("/", api.taskDelete) + r.Patch("/input", api.taskUpdateInput) r.Post("/send", api.taskSend) r.Get("/logs", api.taskLogs) }) @@ -1333,6 +1336,8 @@ func New(options *Options) *API { }) r.Get("/appearance", api.userAppearanceSettings) r.Put("/appearance", api.putUserAppearanceSettings) + r.Get("/preferences", api.userPreferenceSettings) + r.Put("/preferences", api.putUserPreferenceSettings) r.Route("/password", func(r chi.Router) { r.Use(httpmw.RateLimit(options.LoginRateLimit, time.Minute)) r.Put("/", api.putUserPassword) @@ -1651,6 +1656,25 @@ func New(options *Options) *API { r.Route("/init-script", func(r chi.Router) { r.Get("/{os}/{arch}", api.initScript) }) + r.Route("/tasks", func(r chi.Router) { + r.Use(apiKeyMiddleware) + + r.Get("/", api.tasksList) + + r.Route("/{user}", func(r chi.Router) { + r.Use(httpmw.ExtractOrganizationMembersParam(options.Database, api.HTTPAuth.Authorize)) + r.Post("/", api.tasksCreate) + + r.Route("/{task}", func(r chi.Router) { + r.Use(httpmw.ExtractTaskParam(options.Database)) + r.Get("/", api.taskGet) + r.Delete("/", api.taskDelete) + r.Patch("/input", api.taskUpdateInput) + r.Post("/send", api.taskSend) + r.Get("/logs", api.taskLogs) + }) + }) + }) }) if options.SwaggerEndpoint { @@ -2002,6 +2026,7 @@ func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, n api.NotificationsEnqueuer, &api.PrebuildsReconciler, api.ProvisionerdServerMetrics, + api.Experiments, ) if err != nil { return nil, err diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 463ee888f6f22..ac362295f0e00 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -1604,7 +1604,7 @@ func (nopcloser) Close() error { return nil } // SDKError coerces err into an SDK error. func SDKError(t testing.TB, err error) *codersdk.Error { var cerr *codersdk.Error - require.True(t, errors.As(err, &cerr), "should be SDK error, got %w", err) + require.True(t, errors.As(err, &cerr), "should be SDK error, got %s", err) return cerr } diff --git a/coderd/database/check_constraint.go b/coderd/database/check_constraint.go index 8b1917b7697db..c8752b207de16 100644 --- a/coderd/database/check_constraint.go +++ b/coderd/database/check_constraint.go @@ -6,15 +6,14 @@ type CheckConstraint string // CheckConstraint enums. const ( - CheckAPIKeysAllowListNotEmpty CheckConstraint = "api_keys_allow_list_not_empty" // api_keys - CheckOneTimePasscodeSet CheckConstraint = "one_time_passcode_set" // users - CheckUsersUsernameMinLength CheckConstraint = "users_username_min_length" // users - CheckMaxProvisionerLogsLength CheckConstraint = "max_provisioner_logs_length" // provisioner_jobs - CheckMaxLogsLength CheckConstraint = "max_logs_length" // workspace_agents - CheckSubsystemsNotNone CheckConstraint = "subsystems_not_none" // workspace_agents - CheckWorkspaceBuildsAiTaskSidebarAppIDRequired CheckConstraint = "workspace_builds_ai_task_sidebar_app_id_required" // workspace_builds - CheckWorkspaceBuildsDeadlineBelowMaxDeadline CheckConstraint = "workspace_builds_deadline_below_max_deadline" // workspace_builds - CheckTelemetryLockEventTypeConstraint CheckConstraint = "telemetry_lock_event_type_constraint" // telemetry_locks - CheckValidationMonotonicOrder CheckConstraint = "validation_monotonic_order" // template_version_parameters - CheckUsageEventTypeCheck CheckConstraint = "usage_event_type_check" // usage_events + CheckAPIKeysAllowListNotEmpty CheckConstraint = "api_keys_allow_list_not_empty" // api_keys + CheckOneTimePasscodeSet CheckConstraint = "one_time_passcode_set" // users + CheckUsersUsernameMinLength CheckConstraint = "users_username_min_length" // users + CheckMaxProvisionerLogsLength CheckConstraint = "max_provisioner_logs_length" // provisioner_jobs + CheckMaxLogsLength CheckConstraint = "max_logs_length" // workspace_agents + CheckSubsystemsNotNone CheckConstraint = "subsystems_not_none" // workspace_agents + CheckWorkspaceBuildsDeadlineBelowMaxDeadline CheckConstraint = "workspace_builds_deadline_below_max_deadline" // workspace_builds + CheckTelemetryLockEventTypeConstraint CheckConstraint = "telemetry_lock_event_type_constraint" // telemetry_locks + CheckValidationMonotonicOrder CheckConstraint = "validation_monotonic_order" // template_version_parameters + CheckUsageEventTypeCheck CheckConstraint = "usage_event_type_check" // usage_events ) diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go index 2e1770d47dc87..8126ea435e838 100644 --- a/coderd/database/db2sdk/db2sdk.go +++ b/coderd/database/db2sdk/db2sdk.go @@ -974,6 +974,9 @@ func AIBridgeInterception(interception database.AIBridgeInterception, initiator UserPrompts: sdkUserPrompts, ToolUsages: sdkToolUsages, } + if interception.APIKeyID.Valid { + intc.APIKeyID = &interception.APIKeyID.String + } if interception.EndedAt.Valid { intc.EndedAt = &interception.EndedAt.Time } @@ -1018,6 +1021,18 @@ func AIBridgeToolUsage(usage database.AIBridgeToolUsage) codersdk.AIBridgeToolUs } } +func InvalidatedPresets(invalidatedPresets []database.UpdatePresetsLastInvalidatedAtRow) []codersdk.InvalidatedPreset { + var presets []codersdk.InvalidatedPreset + for _, p := range invalidatedPresets { + presets = append(presets, codersdk.InvalidatedPreset{ + TemplateName: p.TemplateName, + TemplateVersionName: p.TemplateVersionName, + PresetName: p.TemplateVersionPresetName, + }) + } + return presets +} + func jsonOrEmptyMap(rawMessage pqtype.NullRawMessage) map[string]any { var m map[string]any if !rawMessage.Valid { diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 01d3c4f26ae85..a4d801512e730 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -217,10 +217,10 @@ var ( rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, // Unsure why provisionerd needs update and read personal rbac.ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal}, - rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop}, + rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent}, rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionCreateAgent}, - // Provisionerd needs to read and update tasks associated with workspaces. - rbac.ResourceTask.Type: {policy.ActionRead, policy.ActionUpdate}, + // Provisionerd needs to read, update, and delete tasks associated with workspaces. + rbac.ResourceTask.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, rbac.ResourceApiKey.Type: {policy.WildcardSymbol}, // When org scoped provisioner credentials are implemented, // this can be reduced to read a specific org. @@ -254,6 +254,7 @@ var ( rbac.ResourceFile.Type: {policy.ActionRead}, // Required to read terraform files rbac.ResourceNotificationMessage.Type: {policy.ActionCreate, policy.ActionRead}, rbac.ResourceSystem.Type: {policy.WildcardSymbol}, + rbac.ResourceTask.Type: {policy.ActionRead, policy.ActionUpdate}, rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, rbac.ResourceUser.Type: {policy.ActionRead}, rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop}, @@ -595,19 +596,19 @@ var ( // See aibridged package. subjectAibridged = rbac.Subject{ Type: rbac.SubjectAibridged, - FriendlyName: "AIBridge Daemon", + FriendlyName: "AI Bridge Daemon", ID: uuid.Nil.String(), Roles: rbac.Roles([]rbac.Role{ { Identifier: rbac.RoleIdentifier{Name: "aibridged"}, - DisplayName: "AIBridge Daemon", + DisplayName: "AI Bridge Daemon", Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceUser.Type: { policy.ActionRead, // Required to validate API key owner is active. policy.ActionReadPersonal, // Required to read users' external auth links. // TODO: this is too broad; reduce scope to just external_auth_links by creating separate resource. }, rbac.ResourceApiKey.Type: {policy.ActionRead}, // Validate API keys. - rbac.ResourceAibridgeInterception.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceAibridgeInterception.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, }), User: []rbac.Permission{}, ByOrgID: map[string]rbac.OrgPermissions{}, @@ -1640,6 +1641,15 @@ func (q *querier) DeleteCustomRole(ctx context.Context, arg database.DeleteCusto return q.db.DeleteCustomRole(ctx, arg) } +func (q *querier) DeleteExpiredAPIKeys(ctx context.Context, arg database.DeleteExpiredAPIKeysParams) (int64, error) { + // Requires DELETE across all API keys. + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceApiKey); err != nil { + return 0, err + } + + return q.db.DeleteExpiredAPIKeys(ctx, arg) +} + func (q *querier) DeleteExternalAuthLink(ctx context.Context, arg database.DeleteExternalAuthLinkParams) error { return fetchAndExec(q.log, q.auth, policy.ActionUpdatePersonal, func(ctx context.Context, arg database.DeleteExternalAuthLinkParams) (database.ExternalAuthLink, error) { //nolint:gosimple @@ -1722,6 +1732,13 @@ func (q *querier) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Contex return q.db.DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx, arg) } +func (q *querier) DeleteOldAIBridgeRecords(ctx context.Context, beforeTime time.Time) (int32, error) { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceAibridgeInterception); err != nil { + return -1, err + } + return q.db.DeleteOldAIBridgeRecords(ctx, beforeTime) +} + func (q *querier) DeleteOldAuditLogConnectionEvents(ctx context.Context, threshold database.DeleteOldAuditLogConnectionEventsParams) error { // `ResourceSystem` is deprecated, but it doesn't make sense to add // `policy.ActionDelete` to `ResourceAuditLog`, since this is the one and @@ -2409,11 +2426,11 @@ func (q *querier) GetLatestCryptoKeyByFeature(ctx context.Context, feature datab return q.db.GetLatestCryptoKeyByFeature(ctx, feature) } -func (q *querier) GetLatestWorkspaceAppStatusesByAppID(ctx context.Context, appID uuid.UUID) ([]database.WorkspaceAppStatus, error) { +func (q *querier) GetLatestWorkspaceAppStatusByAppID(ctx context.Context, appID uuid.UUID) (database.WorkspaceAppStatus, error) { if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { - return nil, err + return database.WorkspaceAppStatus{}, err } - return q.db.GetLatestWorkspaceAppStatusesByAppID(ctx, appID) + return q.db.GetLatestWorkspaceAppStatusByAppID(ctx, appID) } func (q *querier) GetLatestWorkspaceAppStatusesByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAppStatus, error) { @@ -2648,6 +2665,13 @@ func (q *querier) GetOrganizationsByUserID(ctx context.Context, userID database. return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetOrganizationsByUserID)(ctx, userID) } +func (q *querier) GetOrganizationsWithPrebuildStatus(ctx context.Context, arg database.GetOrganizationsWithPrebuildStatusParams) ([]database.GetOrganizationsWithPrebuildStatusRow, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOrganization.All()); err != nil { + return nil, err + } + return q.db.GetOrganizationsWithPrebuildStatus(ctx, arg) +} + func (q *querier) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { version, err := q.db.GetTemplateVersionByJobID(ctx, jobID) if err != nil { @@ -2981,6 +3005,10 @@ func (q *querier) GetTaskByID(ctx context.Context, id uuid.UUID) (database.Task, return fetch(q.log, q.auth, q.db.GetTaskByID)(ctx, id) } +func (q *querier) GetTaskByOwnerIDAndName(ctx context.Context, arg database.GetTaskByOwnerIDAndNameParams) (database.Task, error) { + return fetch(q.log, q.auth, q.db.GetTaskByOwnerIDAndName)(ctx, arg) +} + func (q *querier) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) { return fetch(q.log, q.auth, q.db.GetTaskByWorkspaceID)(ctx, workspaceID) } @@ -3403,6 +3431,17 @@ func (q *querier) GetUserStatusCounts(ctx context.Context, arg database.GetUserS return q.db.GetUserStatusCounts(ctx, arg) } +func (q *querier) GetUserTaskNotificationAlertDismissed(ctx context.Context, userID uuid.UUID) (bool, error) { + user, err := q.db.GetUserByID(ctx, userID) + if err != nil { + return false, err + } + if err := q.authorizeContext(ctx, policy.ActionReadPersonal, user); err != nil { + return false, err + } + return q.db.GetUserTaskNotificationAlertDismissed(ctx, userID) +} + func (q *querier) GetUserTerminalFont(ctx context.Context, userID uuid.UUID) (string, error) { u, err := q.db.GetUserByID(ctx, userID) if err != nil { @@ -4933,10 +4972,10 @@ func (q *querier) UpdateOrganizationDeletedByID(ctx context.Context, arg databas return deleteQ(q.log, q.auth, q.db.GetOrganizationByID, deleteF)(ctx, arg.ID) } -func (q *querier) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]uuid.UUID, error) { +func (q *querier) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]database.UpdatePrebuildProvisionerJobWithCancelRow, error) { // Prebuild operation for canceling pending prebuild jobs from non-active template versions if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourcePrebuiltWorkspace); err != nil { - return []uuid.UUID{}, err + return []database.UpdatePrebuildProvisionerJobWithCancelRow{}, err } return q.db.UpdatePrebuildProvisionerJobWithCancel(ctx, arg) } @@ -4960,6 +4999,20 @@ func (q *querier) UpdatePresetPrebuildStatus(ctx context.Context, arg database.U return q.db.UpdatePresetPrebuildStatus(ctx, arg) } +func (q *querier) UpdatePresetsLastInvalidatedAt(ctx context.Context, arg database.UpdatePresetsLastInvalidatedAtParams) ([]database.UpdatePresetsLastInvalidatedAtRow, error) { + // Fetch template to check authorization + template, err := q.db.GetTemplateByID(ctx, arg.TemplateID) + if err != nil { + return nil, err + } + + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { + return nil, err + } + + return q.db.UpdatePresetsLastInvalidatedAt(ctx, arg) +} + func (q *querier) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerDaemon); err != nil { return err @@ -5088,6 +5141,21 @@ func (q *querier) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg return q.db.UpdateTailnetPeerStatusByCoordinator(ctx, arg) } +func (q *querier) UpdateTaskPrompt(ctx context.Context, arg database.UpdateTaskPromptParams) (database.TaskTable, error) { + // An actor is allowed to update the prompt of a task if they have + // permission to update the task (same as UpdateTaskWorkspaceID). + task, err := q.db.GetTaskByID(ctx, arg.ID) + if err != nil { + return database.TaskTable{}, err + } + + if err := q.authorizeContext(ctx, policy.ActionUpdate, task.RBACObject()); err != nil { + return database.TaskTable{}, err + } + + return q.db.UpdateTaskPrompt(ctx, arg) +} + func (q *querier) UpdateTaskWorkspaceID(ctx context.Context, arg database.UpdateTaskWorkspaceIDParams) (database.TaskTable, error) { // An actor is allowed to update the workspace ID of a task if they are the // owner of the task and workspace or have the appropriate permissions. @@ -5407,6 +5475,17 @@ func (q *querier) UpdateUserStatus(ctx context.Context, arg database.UpdateUserS return updateWithReturn(q.log, q.auth, fetch, q.db.UpdateUserStatus)(ctx, arg) } +func (q *querier) UpdateUserTaskNotificationAlertDismissed(ctx context.Context, arg database.UpdateUserTaskNotificationAlertDismissedParams) (bool, error) { + user, err := q.db.GetUserByID(ctx, arg.UserID) + if err != nil { + return false, err + } + if err := q.authorizeContext(ctx, policy.ActionUpdatePersonal, user); err != nil { + return false, err + } + return q.db.UpdateUserTaskNotificationAlertDismissed(ctx, arg) +} + func (q *querier) UpdateUserTerminalFont(ctx context.Context, arg database.UpdateUserTerminalFontParams) (database.UserConfig, error) { u, err := q.db.GetUserByID(ctx, arg.UserID) if err != nil { @@ -5499,6 +5578,22 @@ func (q *querier) UpdateWorkspaceAgentLogOverflowByID(ctx context.Context, arg d } func (q *querier) UpdateWorkspaceAgentMetadata(ctx context.Context, arg database.UpdateWorkspaceAgentMetadataParams) error { + // Fast path: Check if we have an RBAC object in context. + // This is set by the workspace agent RPC handler to avoid the expensive + // GetWorkspaceByAgentID query for every metadata update. + // NOTE: The cached RBAC object is refreshed every 5 minutes in agentapi/api.go. + if rbacObj, ok := WorkspaceRBACFromContext(ctx); ok { + // Errors here will result in falling back to the GetWorkspaceAgentByID query, skipping + // the cache in case the cached data is stale. + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbacObj); err == nil { + return q.db.UpdateWorkspaceAgentMetadata(ctx, arg) + } + q.log.Debug(ctx, "fast path authorization failed, using slow path", + slog.F("agent_id", arg.WorkspaceAgentID)) + } + + // Slow path: Fallback to fetching the workspace for authorization if the RBAC object is not present (or is invalid) + // in the request context. workspace, err := q.db.GetWorkspaceByAgentID(ctx, arg.WorkspaceAgentID) if err != nil { return err diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 8cf622a4347f3..e70da620e167f 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -7,6 +7,7 @@ import ( "fmt" "net" "reflect" + "strconv" "testing" "time" @@ -216,6 +217,14 @@ func (s *MethodTestSuite) TestAPIKey() { dbm.EXPECT().DeleteAPIKeyByID(gomock.Any(), key.ID).Return(nil).AnyTimes() check.Args(key.ID).Asserts(key, policy.ActionDelete).Returns() })) + s.Run("DeleteExpiredAPIKeys", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + args := database.DeleteExpiredAPIKeysParams{ + Before: time.Date(2025, 11, 21, 0, 0, 0, 0, time.UTC), + LimitCount: 1000, + } + dbm.EXPECT().DeleteExpiredAPIKeys(gomock.Any(), args).Return(int64(0), nil).AnyTimes() + check.Args(args).Asserts(rbac.ResourceApiKey, policy.ActionDelete).Returns(int64(0)) + })) s.Run("GetAPIKeyByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { key := testutil.Fake(s.T(), faker, database.APIKey{}) dbm.EXPECT().GetAPIKeyByID(gomock.Any(), key.ID).Return(key, nil).AnyTimes() @@ -646,10 +655,13 @@ func (s *MethodTestSuite) TestProvisionerJob() { PresetID: uuid.NullUUID{UUID: uuid.New(), Valid: true}, Now: dbtime.Now(), } - jobIDs := []uuid.UUID{uuid.New(), uuid.New()} + canceledJobs := []database.UpdatePrebuildProvisionerJobWithCancelRow{ + {ID: uuid.New(), WorkspaceID: uuid.New(), TemplateID: uuid.New(), TemplateVersionPresetID: uuid.NullUUID{UUID: uuid.New(), Valid: true}}, + {ID: uuid.New(), WorkspaceID: uuid.New(), TemplateID: uuid.New(), TemplateVersionPresetID: uuid.NullUUID{UUID: uuid.New(), Valid: true}}, + } - dbm.EXPECT().UpdatePrebuildProvisionerJobWithCancel(gomock.Any(), arg).Return(jobIDs, nil).AnyTimes() - check.Args(arg).Asserts(rbac.ResourcePrebuiltWorkspace, policy.ActionUpdate).Returns(jobIDs) + dbm.EXPECT().UpdatePrebuildProvisionerJobWithCancel(gomock.Any(), arg).Return(canceledJobs, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourcePrebuiltWorkspace, policy.ActionUpdate).Returns(canceledJobs) })) s.Run("GetProvisionerJobsByIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { org := testutil.Fake(s.T(), faker, database.Organization{}) @@ -1312,6 +1324,13 @@ func (s *MethodTestSuite) TestTemplate() { dbm.EXPECT().UpsertTemplateUsageStats(gomock.Any()).Return(nil).AnyTimes() check.Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) + s.Run("UpdatePresetsLastInvalidatedAt", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdatePresetsLastInvalidatedAtParams{LastInvalidatedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, TemplateID: t1.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdatePresetsLastInvalidatedAt(gomock.Any(), arg).Return([]database.UpdatePresetsLastInvalidatedAtRow{}, nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate) + })) } func (s *MethodTestSuite) TestUser() { @@ -1459,6 +1478,21 @@ func (s *MethodTestSuite) TestUser() { dbm.EXPECT().UpdateUserTerminalFont(gomock.Any(), arg).Return(uc, nil).AnyTimes() check.Args(arg).Asserts(u, policy.ActionUpdatePersonal).Returns(uc) })) + s.Run("GetUserTaskNotificationAlertDismissed", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().GetUserTaskNotificationAlertDismissed(gomock.Any(), u.ID).Return(false, nil).AnyTimes() + check.Args(u.ID).Asserts(u, policy.ActionReadPersonal).Returns(false) + })) + s.Run("UpdateUserTaskNotificationAlertDismissed", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + user := testutil.Fake(s.T(), faker, database.User{}) + userConfig := database.UserConfig{UserID: user.ID, Key: "task_notification_alert_dismissed", Value: "false"} + userConfigValue, _ := strconv.ParseBool(userConfig.Value) + arg := database.UpdateUserTaskNotificationAlertDismissedParams{UserID: user.ID, TaskNotificationAlertDismissed: userConfigValue} + dbm.EXPECT().GetUserByID(gomock.Any(), user.ID).Return(user, nil).AnyTimes() + dbm.EXPECT().UpdateUserTaskNotificationAlertDismissed(gomock.Any(), arg).Return(false, nil).AnyTimes() + check.Args(arg).Asserts(user, policy.ActionUpdatePersonal).Returns(userConfigValue) + })) s.Run("UpdateUserStatus", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { u := testutil.Fake(s.T(), faker, database.User{}) arg := database.UpdateUserStatusParams{ID: u.ID, Status: u.Status, UpdatedAt: u.UpdatedAt} @@ -2158,7 +2192,7 @@ func (s *MethodTestSuite) TestWorkspace() { }) res := testutil.Fake(s.T(), faker, database.WorkspaceResource{JobID: b.JobID}) agt := testutil.Fake(s.T(), faker, database.WorkspaceAgent{ResourceID: res.ID}) - app := testutil.Fake(s.T(), faker, database.WorkspaceApp{AgentID: agt.ID}) + _ = testutil.Fake(s.T(), faker, database.WorkspaceApp{AgentID: agt.ID}) dbm.EXPECT().GetWorkspaceByID(gomock.Any(), w.ID).Return(w, nil).AnyTimes() dbm.EXPECT().GetWorkspaceBuildByID(gomock.Any(), b.ID).Return(b, nil).AnyTimes() @@ -2167,7 +2201,6 @@ func (s *MethodTestSuite) TestWorkspace() { ID: b.ID, HasAITask: sql.NullBool{Bool: true, Valid: true}, HasExternalAgent: sql.NullBool{Bool: true, Valid: true}, - SidebarAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, UpdatedAt: b.UpdatedAt, }).Asserts(w, policy.ActionUpdate) })) @@ -2372,6 +2405,17 @@ func (s *MethodTestSuite) TestTasks() { dbm.EXPECT().GetTaskByID(gomock.Any(), task.ID).Return(task, nil).AnyTimes() check.Args(task.ID).Asserts(task, policy.ActionRead).Returns(task) })) + s.Run("GetTaskByOwnerIDAndName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + task := testutil.Fake(s.T(), faker, database.Task{}) + dbm.EXPECT().GetTaskByOwnerIDAndName(gomock.Any(), database.GetTaskByOwnerIDAndNameParams{ + OwnerID: task.OwnerID, + Name: task.Name, + }).Return(task, nil).AnyTimes() + check.Args(database.GetTaskByOwnerIDAndNameParams{ + OwnerID: task.OwnerID, + Name: task.Name, + }).Asserts(task, policy.ActionRead).Returns(task) + })) s.Run("DeleteTask", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { task := testutil.Fake(s.T(), faker, database.Task{}) arg := database.DeleteTaskParams{ @@ -2429,6 +2473,22 @@ func (s *MethodTestSuite) TestTasks() { check.Args(arg).Asserts(task, policy.ActionUpdate, ws, policy.ActionUpdate).Returns(database.TaskTable{}) })) + s.Run("UpdateTaskPrompt", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + task := testutil.Fake(s.T(), faker, database.Task{}) + arg := database.UpdateTaskPromptParams{ + ID: task.ID, + Prompt: "Updated prompt text", + } + + // Create a copy of the task with the updated prompt + updatedTask := task + updatedTask.Prompt = arg.Prompt + + dbm.EXPECT().GetTaskByID(gomock.Any(), task.ID).Return(task, nil).AnyTimes() + dbm.EXPECT().UpdateTaskPrompt(gomock.Any(), arg).Return(updatedTask.TaskTable(), nil).AnyTimes() + + check.Args(arg).Asserts(task, policy.ActionUpdate).Returns(updatedTask.TaskTable()) + })) s.Run("GetTaskByWorkspaceID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { task := testutil.Fake(s.T(), faker, database.Task{}) task.WorkspaceID = uuid.NullUUID{UUID: uuid.New(), Valid: true} @@ -2820,9 +2880,9 @@ func (s *MethodTestSuite) TestSystemFunctions() { dbm.EXPECT().UpdateUserLinkedID(gomock.Any(), arg).Return(l, nil).AnyTimes() check.Args(arg).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(l) })) - s.Run("GetLatestWorkspaceAppStatusesByAppID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + s.Run("GetLatestWorkspaceAppStatusByAppID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { appID := uuid.New() - dbm.EXPECT().GetLatestWorkspaceAppStatusesByAppID(gomock.Any(), appID).Return([]database.WorkspaceAppStatus{}, nil).AnyTimes() + dbm.EXPECT().GetLatestWorkspaceAppStatusByAppID(gomock.Any(), appID).Return(database.WorkspaceAppStatus{}, nil).AnyTimes() check.Args(appID).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetLatestWorkspaceAppStatusesByWorkspaceIDs", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { @@ -3756,6 +3816,14 @@ func (s *MethodTestSuite) TestPrebuilds() { dbm.EXPECT().GetPrebuildMetrics(gomock.Any()).Return([]database.GetPrebuildMetricsRow{}, nil).AnyTimes() check.Args().Asserts(rbac.ResourceWorkspace.All(), policy.ActionRead) })) + s.Run("GetOrganizationsWithPrebuildStatus", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + arg := database.GetOrganizationsWithPrebuildStatusParams{ + UserID: uuid.New(), + GroupName: "test", + } + dbm.EXPECT().GetOrganizationsWithPrebuildStatus(gomock.Any(), arg).Return([]database.GetOrganizationsWithPrebuildStatusRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceOrganization.All(), policy.ActionRead) + })) s.Run("GetPrebuildsSettings", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { dbm.EXPECT().GetPrebuildsSettings(gomock.Any()).Return("{}", nil).AnyTimes() check.Args().Asserts() @@ -4626,6 +4694,12 @@ func (s *MethodTestSuite) TestAIBridge() { db.EXPECT().UpdateAIBridgeInterceptionEnded(gomock.Any(), params).Return(intc, nil).AnyTimes() check.Args(params).Asserts(intc, policy.ActionUpdate).Returns(intc) })) + + s.Run("DeleteOldAIBridgeRecords", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t := dbtime.Now() + db.EXPECT().DeleteOldAIBridgeRecords(gomock.Any(), t).Return(int32(0), nil).AnyTimes() + check.Args(t).Asserts(rbac.ResourceAibridgeInterception, policy.ActionDelete) + })) } func (s *MethodTestSuite) TestTelemetry() { diff --git a/coderd/database/dbauthz/workspace_rbac_context.go b/coderd/database/dbauthz/workspace_rbac_context.go new file mode 100644 index 0000000000000..1c1b375f14272 --- /dev/null +++ b/coderd/database/dbauthz/workspace_rbac_context.go @@ -0,0 +1,41 @@ +package dbauthz + +import ( + "context" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/rbac" +) + +func isWorkspaceRBACObjectEmpty(rbacObj rbac.Object) bool { + // if any of these are true then the rbac.Object work a workspace is considered empty + return rbacObj.Owner == "" || rbacObj.OrgID == "" || rbacObj.Owner == uuid.Nil.String() || rbacObj.OrgID == uuid.Nil.String() +} + +type workspaceRBACContextKey struct{} + +// WithWorkspaceRBAC attaches a workspace RBAC object to the context. +// RBAC fields on this RBAC object should not be used. +// +// This is primarily used by the workspace agent RPC handler to cache workspace +// authorization data for the duration of an agent connection. +func WithWorkspaceRBAC(ctx context.Context, rbacObj rbac.Object) (context.Context, error) { + if rbacObj.Type != rbac.ResourceWorkspace.Type { + return ctx, xerrors.New("RBAC Object must be of type Workspace") + } + if isWorkspaceRBACObjectEmpty(rbacObj) { + return ctx, xerrors.Errorf("cannot attach empty RBAC object to context: %+v", rbacObj) + } + if len(rbacObj.ACLGroupList) != 0 || len(rbacObj.ACLUserList) != 0 { + return ctx, xerrors.New("ACL fields for Workspace RBAC object must be nullified, the can be changed during runtime and should not be cached") + } + return context.WithValue(ctx, workspaceRBACContextKey{}, rbacObj), nil +} + +// WorkspaceRBACFromContext attempts to retrieve the workspace RBAC object from context. +func WorkspaceRBACFromContext(ctx context.Context) (rbac.Object, bool) { + obj, ok := ctx.Value(workspaceRBACContextKey{}).(rbac.Object) + return obj, ok +} diff --git a/coderd/database/dbfake/dbfake.go b/coderd/database/dbfake/dbfake.go index b812be6e16a82..97558b4b8b928 100644 --- a/coderd/database/dbfake/dbfake.go +++ b/coderd/database/dbfake/dbfake.go @@ -189,7 +189,6 @@ func (b WorkspaceBuildBuilder) doInTX() WorkspaceResponse { Bool: true, Valid: true, } - b.seed.AITaskSidebarAppID = uuid.NullUUID{UUID: b.taskAppID, Valid: true} } resp := WorkspaceResponse{ @@ -362,12 +361,20 @@ func (b WorkspaceBuildBuilder) doInTX() WorkspaceResponse { require.Fail(b.t, "task app not configured but workspace is a task workspace") } - app := mustWorkspaceAppByWorkspaceAndBuildAndAppID(ownerCtx, b.t, b.db, resp.Workspace.ID, resp.Build.BuildNumber, b.taskAppID) + workspaceAgentID := uuid.NullUUID{} + workspaceAppID := uuid.NullUUID{} + // Workspace agent and app are only properly set upon job completion + if b.jobStatus != database.ProvisionerJobStatusPending && b.jobStatus != database.ProvisionerJobStatusRunning { + app := mustWorkspaceAppByWorkspaceAndBuildAndAppID(ownerCtx, b.t, b.db, resp.Workspace.ID, resp.Build.BuildNumber, b.taskAppID) + workspaceAgentID = uuid.NullUUID{UUID: app.AgentID, Valid: true} + workspaceAppID = uuid.NullUUID{UUID: app.ID, Valid: true} + } + _, err = b.db.UpsertTaskWorkspaceApp(ownerCtx, database.UpsertTaskWorkspaceAppParams{ TaskID: task.ID, WorkspaceBuildNumber: resp.Build.BuildNumber, - WorkspaceAgentID: uuid.NullUUID{UUID: app.AgentID, Valid: true}, - WorkspaceAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, + WorkspaceAgentID: workspaceAgentID, + WorkspaceAppID: workspaceAppID, }) require.NoError(b.t, err, "upsert task workspace app") b.logger.Debug(context.Background(), "linked task to workspace build", @@ -606,6 +613,7 @@ func (t TemplateVersionBuilder) Do() TemplateVersionResponse { IsDefault: false, Description: preset.Description, Icon: preset.Icon, + LastInvalidatedAt: preset.LastInvalidatedAt, }) t.logger.Debug(context.Background(), "added preset", slog.F("preset_id", prst.ID), @@ -623,6 +631,7 @@ func (t TemplateVersionBuilder) Do() TemplateVersionResponse { } payload, err := json.Marshal(provisionerdserver.TemplateVersionImportJob{ + TemplateID: t.seed.TemplateID, TemplateVersionID: t.seed.ID, }) require.NoError(t.t, err) diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index 532460700a1e2..de0a3b384515a 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -14,6 +14,8 @@ import ( "testing" "time" + "cdr.dev/slog" + "github.com/google/uuid" "github.com/sqlc-dev/pqtype" "github.com/stretchr/testify/require" @@ -175,6 +177,13 @@ func APIKey(t testing.TB, db database.Store, seed database.APIKey, munge ...func } } + // It does not make sense for the created_at to be after the expires_at. + // So if expires is set, change the default created_at to be 24 hours before. + var createdAt time.Time + if !seed.ExpiresAt.IsZero() && seed.CreatedAt.IsZero() { + createdAt = seed.ExpiresAt.Add(-24 * time.Hour) + } + params := database.InsertAPIKeyParams{ ID: takeFirst(seed.ID, id), // 0 defaults to 86400 at the db layer @@ -184,7 +193,7 @@ func APIKey(t testing.TB, db database.Store, seed database.APIKey, munge ...func UserID: takeFirst(seed.UserID, uuid.New()), LastUsed: takeFirst(seed.LastUsed, dbtime.Now()), ExpiresAt: takeFirst(seed.ExpiresAt, dbtime.Now().Add(time.Hour)), - CreatedAt: takeFirst(seed.CreatedAt, dbtime.Now()), + CreatedAt: takeFirst(seed.CreatedAt, createdAt, dbtime.Now()), UpdatedAt: takeFirst(seed.UpdatedAt, dbtime.Now()), LoginType: takeFirst(seed.LoginType, database.LoginTypePassword), Scopes: takeFirstSlice([]database.APIKeyScope(seed.Scopes), []database.APIKeyScope{database.ApiKeyScopeCoderAll}), @@ -451,7 +460,6 @@ func WorkspaceBuild(t testing.TB, db database.Store, orig database.WorkspaceBuil buildID := takeFirst(orig.ID, uuid.New()) jobID := takeFirst(orig.JobID, uuid.New()) hasAITask := takeFirst(orig.HasAITask, sql.NullBool{}) - sidebarAppID := takeFirst(orig.AITaskSidebarAppID, uuid.NullUUID{}) hasExternalAgent := takeFirst(orig.HasExternalAgent, sql.NullBool{}) var build database.WorkspaceBuild err := db.InTx(func(db database.Store) error { @@ -491,7 +499,6 @@ func WorkspaceBuild(t testing.TB, db database.Store, orig database.WorkspaceBuil ID: buildID, HasAITask: hasAITask, HasExternalAgent: hasExternalAgent, - SidebarAppID: sidebarAppID, UpdatedAt: dbtime.Now(), })) } @@ -1430,6 +1437,7 @@ func Preset(t testing.TB, db database.Store, seed database.InsertPresetParams) d IsDefault: seed.IsDefault, Description: seed.Description, Icon: seed.Icon, + LastInvalidatedAt: seed.LastInvalidatedAt, }) require.NoError(t, err, "insert preset") return preset @@ -1498,6 +1506,7 @@ func ClaimPrebuild( func AIBridgeInterception(t testing.TB, db database.Store, seed database.InsertAIBridgeInterceptionParams, endedAt *time.Time) database.AIBridgeInterception { interception, err := db.InsertAIBridgeInterception(genCtx, database.InsertAIBridgeInterceptionParams{ ID: takeFirst(seed.ID, uuid.New()), + APIKeyID: seed.APIKeyID, InitiatorID: takeFirst(seed.InitiatorID, uuid.New()), Provider: takeFirst(seed.Provider, "provider"), Model: takeFirst(seed.Model, "model"), @@ -1575,11 +1584,13 @@ func Task(t testing.TB, db database.Store, orig database.TaskTable) database.Tas parameters = json.RawMessage([]byte("{}")) } + taskName := taskname.Generate(genCtx, slog.Make(), orig.Prompt) task, err := db.InsertTask(genCtx, database.InsertTaskParams{ ID: takeFirst(orig.ID, uuid.New()), OrganizationID: orig.OrganizationID, OwnerID: orig.OwnerID, - Name: takeFirst(orig.Name, taskname.GenerateFallback()), + Name: takeFirst(orig.Name, taskName.Name), + DisplayName: takeFirst(orig.DisplayName, taskName.DisplayName), WorkspaceID: orig.WorkspaceID, TemplateVersionID: orig.TemplateVersionID, TemplateParameters: parameters, diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go index 1bd8fda62470a..cdad3598b35f7 100644 --- a/coderd/database/dbmetrics/querymetrics.go +++ b/coderd/database/dbmetrics/querymetrics.go @@ -312,6 +312,13 @@ func (m queryMetricsStore) DeleteCustomRole(ctx context.Context, arg database.De return r0 } +func (m queryMetricsStore) DeleteExpiredAPIKeys(ctx context.Context, arg database.DeleteExpiredAPIKeysParams) (int64, error) { + start := time.Now() + r0, r1 := m.s.DeleteExpiredAPIKeys(ctx, arg) + m.queryLatencies.WithLabelValues("DeleteExpiredAPIKeys").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) DeleteExternalAuthLink(ctx context.Context, arg database.DeleteExternalAuthLinkParams) error { start := time.Now() r0 := m.s.DeleteExternalAuthLink(ctx, arg) @@ -389,6 +396,13 @@ func (m queryMetricsStore) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx conte return r0 } +func (m queryMetricsStore) DeleteOldAIBridgeRecords(ctx context.Context, beforeTime time.Time) (int32, error) { + start := time.Now() + r0, r1 := m.s.DeleteOldAIBridgeRecords(ctx, beforeTime) + m.queryLatencies.WithLabelValues("DeleteOldAIBridgeRecords").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) DeleteOldAuditLogConnectionEvents(ctx context.Context, threshold database.DeleteOldAuditLogConnectionEventsParams) error { start := time.Now() r0 := m.s.DeleteOldAuditLogConnectionEvents(ctx, threshold) @@ -1019,10 +1033,10 @@ func (m queryMetricsStore) GetLatestCryptoKeyByFeature(ctx context.Context, feat return r0, r1 } -func (m queryMetricsStore) GetLatestWorkspaceAppStatusesByAppID(ctx context.Context, appID uuid.UUID) ([]database.WorkspaceAppStatus, error) { +func (m queryMetricsStore) GetLatestWorkspaceAppStatusByAppID(ctx context.Context, appID uuid.UUID) (database.WorkspaceAppStatus, error) { start := time.Now() - r0, r1 := m.s.GetLatestWorkspaceAppStatusesByAppID(ctx, appID) - m.queryLatencies.WithLabelValues("GetLatestWorkspaceAppStatusesByAppID").Observe(time.Since(start).Seconds()) + r0, r1 := m.s.GetLatestWorkspaceAppStatusByAppID(ctx, appID) + m.queryLatencies.WithLabelValues("GetLatestWorkspaceAppStatusByAppID").Observe(time.Since(start).Seconds()) return r0, r1 } @@ -1243,6 +1257,13 @@ func (m queryMetricsStore) GetOrganizationsByUserID(ctx context.Context, userID return organizations, err } +func (m queryMetricsStore) GetOrganizationsWithPrebuildStatus(ctx context.Context, arg database.GetOrganizationsWithPrebuildStatusParams) ([]database.GetOrganizationsWithPrebuildStatusRow, error) { + start := time.Now() + r0, r1 := m.s.GetOrganizationsWithPrebuildStatus(ctx, arg) + m.queryLatencies.WithLabelValues("GetOrganizationsWithPrebuildStatus").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { start := time.Now() schemas, err := m.s.GetParameterSchemasByJobID(ctx, jobID) @@ -1523,6 +1544,13 @@ func (m queryMetricsStore) GetTaskByID(ctx context.Context, id uuid.UUID) (datab return r0, r1 } +func (m queryMetricsStore) GetTaskByOwnerIDAndName(ctx context.Context, arg database.GetTaskByOwnerIDAndNameParams) (database.Task, error) { + start := time.Now() + r0, r1 := m.s.GetTaskByOwnerIDAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetTaskByOwnerIDAndName").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) { start := time.Now() r0, r1 := m.s.GetTaskByWorkspaceID(ctx, workspaceID) @@ -1817,6 +1845,13 @@ func (m queryMetricsStore) GetUserStatusCounts(ctx context.Context, arg database return r0, r1 } +func (m queryMetricsStore) GetUserTaskNotificationAlertDismissed(ctx context.Context, userID uuid.UUID) (bool, error) { + start := time.Now() + r0, r1 := m.s.GetUserTaskNotificationAlertDismissed(ctx, userID) + m.queryLatencies.WithLabelValues("GetUserTaskNotificationAlertDismissed").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetUserTerminalFont(ctx context.Context, userID uuid.UUID) (string, error) { start := time.Now() r0, r1 := m.s.GetUserTerminalFont(ctx, userID) @@ -3042,7 +3077,7 @@ func (m queryMetricsStore) UpdateOrganizationDeletedByID(ctx context.Context, ar return r0 } -func (m queryMetricsStore) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]uuid.UUID, error) { +func (m queryMetricsStore) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]database.UpdatePrebuildProvisionerJobWithCancelRow, error) { start := time.Now() r0, r1 := m.s.UpdatePrebuildProvisionerJobWithCancel(ctx, arg) m.queryLatencies.WithLabelValues("UpdatePrebuildProvisionerJobWithCancel").Observe(time.Since(start).Seconds()) @@ -3056,6 +3091,13 @@ func (m queryMetricsStore) UpdatePresetPrebuildStatus(ctx context.Context, arg d return r0 } +func (m queryMetricsStore) UpdatePresetsLastInvalidatedAt(ctx context.Context, arg database.UpdatePresetsLastInvalidatedAtParams) ([]database.UpdatePresetsLastInvalidatedAtRow, error) { + start := time.Now() + r0, r1 := m.s.UpdatePresetsLastInvalidatedAt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdatePresetsLastInvalidatedAt").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { start := time.Now() r0 := m.s.UpdateProvisionerDaemonLastSeenAt(ctx, arg) @@ -3119,6 +3161,13 @@ func (m queryMetricsStore) UpdateTailnetPeerStatusByCoordinator(ctx context.Cont return r0 } +func (m queryMetricsStore) UpdateTaskPrompt(ctx context.Context, arg database.UpdateTaskPromptParams) (database.TaskTable, error) { + start := time.Now() + r0, r1 := m.s.UpdateTaskPrompt(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTaskPrompt").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) UpdateTaskWorkspaceID(ctx context.Context, arg database.UpdateTaskWorkspaceIDParams) (database.TaskTable, error) { start := time.Now() r0, r1 := m.s.UpdateTaskWorkspaceID(ctx, arg) @@ -3308,6 +3357,13 @@ func (m queryMetricsStore) UpdateUserStatus(ctx context.Context, arg database.Up return user, err } +func (m queryMetricsStore) UpdateUserTaskNotificationAlertDismissed(ctx context.Context, arg database.UpdateUserTaskNotificationAlertDismissedParams) (bool, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserTaskNotificationAlertDismissed(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserTaskNotificationAlertDismissed").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) UpdateUserTerminalFont(ctx context.Context, arg database.UpdateUserTerminalFontParams) (database.UserConfig, error) { start := time.Now() r0, r1 := m.s.UpdateUserTerminalFont(ctx, arg) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 1983092aa53f0..03de5508e54d9 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -554,6 +554,21 @@ func (mr *MockStoreMockRecorder) DeleteCustomRole(ctx, arg any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteCustomRole", reflect.TypeOf((*MockStore)(nil).DeleteCustomRole), ctx, arg) } +// DeleteExpiredAPIKeys mocks base method. +func (m *MockStore) DeleteExpiredAPIKeys(ctx context.Context, arg database.DeleteExpiredAPIKeysParams) (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteExpiredAPIKeys", ctx, arg) + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeleteExpiredAPIKeys indicates an expected call of DeleteExpiredAPIKeys. +func (mr *MockStoreMockRecorder) DeleteExpiredAPIKeys(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteExpiredAPIKeys", reflect.TypeOf((*MockStore)(nil).DeleteExpiredAPIKeys), ctx, arg) +} + // DeleteExternalAuthLink mocks base method. func (m *MockStore) DeleteExternalAuthLink(ctx context.Context, arg database.DeleteExternalAuthLinkParams) error { m.ctrl.T.Helper() @@ -709,6 +724,21 @@ func (mr *MockStoreMockRecorder) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOAuth2ProviderAppTokensByAppAndUserID", reflect.TypeOf((*MockStore)(nil).DeleteOAuth2ProviderAppTokensByAppAndUserID), ctx, arg) } +// DeleteOldAIBridgeRecords mocks base method. +func (m *MockStore) DeleteOldAIBridgeRecords(ctx context.Context, beforeTime time.Time) (int32, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteOldAIBridgeRecords", ctx, beforeTime) + ret0, _ := ret[0].(int32) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeleteOldAIBridgeRecords indicates an expected call of DeleteOldAIBridgeRecords. +func (mr *MockStoreMockRecorder) DeleteOldAIBridgeRecords(ctx, beforeTime any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOldAIBridgeRecords", reflect.TypeOf((*MockStore)(nil).DeleteOldAIBridgeRecords), ctx, beforeTime) +} + // DeleteOldAuditLogConnectionEvents mocks base method. func (m *MockStore) DeleteOldAuditLogConnectionEvents(ctx context.Context, arg database.DeleteOldAuditLogConnectionEventsParams) error { m.ctrl.T.Helper() @@ -2142,19 +2172,19 @@ func (mr *MockStoreMockRecorder) GetLatestCryptoKeyByFeature(ctx, feature any) * return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestCryptoKeyByFeature", reflect.TypeOf((*MockStore)(nil).GetLatestCryptoKeyByFeature), ctx, feature) } -// GetLatestWorkspaceAppStatusesByAppID mocks base method. -func (m *MockStore) GetLatestWorkspaceAppStatusesByAppID(ctx context.Context, appID uuid.UUID) ([]database.WorkspaceAppStatus, error) { +// GetLatestWorkspaceAppStatusByAppID mocks base method. +func (m *MockStore) GetLatestWorkspaceAppStatusByAppID(ctx context.Context, appID uuid.UUID) (database.WorkspaceAppStatus, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestWorkspaceAppStatusesByAppID", ctx, appID) - ret0, _ := ret[0].([]database.WorkspaceAppStatus) + ret := m.ctrl.Call(m, "GetLatestWorkspaceAppStatusByAppID", ctx, appID) + ret0, _ := ret[0].(database.WorkspaceAppStatus) ret1, _ := ret[1].(error) return ret0, ret1 } -// GetLatestWorkspaceAppStatusesByAppID indicates an expected call of GetLatestWorkspaceAppStatusesByAppID. -func (mr *MockStoreMockRecorder) GetLatestWorkspaceAppStatusesByAppID(ctx, appID any) *gomock.Call { +// GetLatestWorkspaceAppStatusByAppID indicates an expected call of GetLatestWorkspaceAppStatusByAppID. +func (mr *MockStoreMockRecorder) GetLatestWorkspaceAppStatusByAppID(ctx, appID any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestWorkspaceAppStatusesByAppID", reflect.TypeOf((*MockStore)(nil).GetLatestWorkspaceAppStatusesByAppID), ctx, appID) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestWorkspaceAppStatusByAppID", reflect.TypeOf((*MockStore)(nil).GetLatestWorkspaceAppStatusByAppID), ctx, appID) } // GetLatestWorkspaceAppStatusesByWorkspaceIDs mocks base method. @@ -2622,6 +2652,21 @@ func (mr *MockStoreMockRecorder) GetOrganizationsByUserID(ctx, arg any) *gomock. return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrganizationsByUserID", reflect.TypeOf((*MockStore)(nil).GetOrganizationsByUserID), ctx, arg) } +// GetOrganizationsWithPrebuildStatus mocks base method. +func (m *MockStore) GetOrganizationsWithPrebuildStatus(ctx context.Context, arg database.GetOrganizationsWithPrebuildStatusParams) ([]database.GetOrganizationsWithPrebuildStatusRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetOrganizationsWithPrebuildStatus", ctx, arg) + ret0, _ := ret[0].([]database.GetOrganizationsWithPrebuildStatusRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetOrganizationsWithPrebuildStatus indicates an expected call of GetOrganizationsWithPrebuildStatus. +func (mr *MockStoreMockRecorder) GetOrganizationsWithPrebuildStatus(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOrganizationsWithPrebuildStatus", reflect.TypeOf((*MockStore)(nil).GetOrganizationsWithPrebuildStatus), ctx, arg) +} + // GetParameterSchemasByJobID mocks base method. func (m *MockStore) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { m.ctrl.T.Helper() @@ -3222,6 +3267,21 @@ func (mr *MockStoreMockRecorder) GetTaskByID(ctx, id any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTaskByID", reflect.TypeOf((*MockStore)(nil).GetTaskByID), ctx, id) } +// GetTaskByOwnerIDAndName mocks base method. +func (m *MockStore) GetTaskByOwnerIDAndName(ctx context.Context, arg database.GetTaskByOwnerIDAndNameParams) (database.Task, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTaskByOwnerIDAndName", ctx, arg) + ret0, _ := ret[0].(database.Task) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTaskByOwnerIDAndName indicates an expected call of GetTaskByOwnerIDAndName. +func (mr *MockStoreMockRecorder) GetTaskByOwnerIDAndName(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTaskByOwnerIDAndName", reflect.TypeOf((*MockStore)(nil).GetTaskByOwnerIDAndName), ctx, arg) +} + // GetTaskByWorkspaceID mocks base method. func (m *MockStore) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) { m.ctrl.T.Helper() @@ -3882,6 +3942,21 @@ func (mr *MockStoreMockRecorder) GetUserStatusCounts(ctx, arg any) *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserStatusCounts", reflect.TypeOf((*MockStore)(nil).GetUserStatusCounts), ctx, arg) } +// GetUserTaskNotificationAlertDismissed mocks base method. +func (m *MockStore) GetUserTaskNotificationAlertDismissed(ctx context.Context, userID uuid.UUID) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetUserTaskNotificationAlertDismissed", ctx, userID) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetUserTaskNotificationAlertDismissed indicates an expected call of GetUserTaskNotificationAlertDismissed. +func (mr *MockStoreMockRecorder) GetUserTaskNotificationAlertDismissed(ctx, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserTaskNotificationAlertDismissed", reflect.TypeOf((*MockStore)(nil).GetUserTaskNotificationAlertDismissed), ctx, userID) +} + // GetUserTerminalFont mocks base method. func (m *MockStore) GetUserTerminalFont(ctx context.Context, userID uuid.UUID) (string, error) { m.ctrl.T.Helper() @@ -6540,10 +6615,10 @@ func (mr *MockStoreMockRecorder) UpdateOrganizationDeletedByID(ctx, arg any) *go } // UpdatePrebuildProvisionerJobWithCancel mocks base method. -func (m *MockStore) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]uuid.UUID, error) { +func (m *MockStore) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg database.UpdatePrebuildProvisionerJobWithCancelParams) ([]database.UpdatePrebuildProvisionerJobWithCancelRow, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "UpdatePrebuildProvisionerJobWithCancel", ctx, arg) - ret0, _ := ret[0].([]uuid.UUID) + ret0, _ := ret[0].([]database.UpdatePrebuildProvisionerJobWithCancelRow) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -6568,6 +6643,21 @@ func (mr *MockStoreMockRecorder) UpdatePresetPrebuildStatus(ctx, arg any) *gomoc return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdatePresetPrebuildStatus", reflect.TypeOf((*MockStore)(nil).UpdatePresetPrebuildStatus), ctx, arg) } +// UpdatePresetsLastInvalidatedAt mocks base method. +func (m *MockStore) UpdatePresetsLastInvalidatedAt(ctx context.Context, arg database.UpdatePresetsLastInvalidatedAtParams) ([]database.UpdatePresetsLastInvalidatedAtRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdatePresetsLastInvalidatedAt", ctx, arg) + ret0, _ := ret[0].([]database.UpdatePresetsLastInvalidatedAtRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdatePresetsLastInvalidatedAt indicates an expected call of UpdatePresetsLastInvalidatedAt. +func (mr *MockStoreMockRecorder) UpdatePresetsLastInvalidatedAt(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdatePresetsLastInvalidatedAt", reflect.TypeOf((*MockStore)(nil).UpdatePresetsLastInvalidatedAt), ctx, arg) +} + // UpdateProvisionerDaemonLastSeenAt mocks base method. func (m *MockStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { m.ctrl.T.Helper() @@ -6695,6 +6785,21 @@ func (mr *MockStoreMockRecorder) UpdateTailnetPeerStatusByCoordinator(ctx, arg a return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTailnetPeerStatusByCoordinator", reflect.TypeOf((*MockStore)(nil).UpdateTailnetPeerStatusByCoordinator), ctx, arg) } +// UpdateTaskPrompt mocks base method. +func (m *MockStore) UpdateTaskPrompt(ctx context.Context, arg database.UpdateTaskPromptParams) (database.TaskTable, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateTaskPrompt", ctx, arg) + ret0, _ := ret[0].(database.TaskTable) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateTaskPrompt indicates an expected call of UpdateTaskPrompt. +func (mr *MockStoreMockRecorder) UpdateTaskPrompt(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTaskPrompt", reflect.TypeOf((*MockStore)(nil).UpdateTaskPrompt), ctx, arg) +} + // UpdateTaskWorkspaceID mocks base method. func (m *MockStore) UpdateTaskWorkspaceID(ctx context.Context, arg database.UpdateTaskWorkspaceIDParams) (database.TaskTable, error) { m.ctrl.T.Helper() @@ -7084,6 +7189,21 @@ func (mr *MockStoreMockRecorder) UpdateUserStatus(ctx, arg any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateUserStatus", reflect.TypeOf((*MockStore)(nil).UpdateUserStatus), ctx, arg) } +// UpdateUserTaskNotificationAlertDismissed mocks base method. +func (m *MockStore) UpdateUserTaskNotificationAlertDismissed(ctx context.Context, arg database.UpdateUserTaskNotificationAlertDismissedParams) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateUserTaskNotificationAlertDismissed", ctx, arg) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateUserTaskNotificationAlertDismissed indicates an expected call of UpdateUserTaskNotificationAlertDismissed. +func (mr *MockStoreMockRecorder) UpdateUserTaskNotificationAlertDismissed(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateUserTaskNotificationAlertDismissed", reflect.TypeOf((*MockStore)(nil).UpdateUserTaskNotificationAlertDismissed), ctx, arg) +} + // UpdateUserTerminalFont mocks base method. func (m *MockStore) UpdateUserTerminalFont(ctx context.Context, arg database.UpdateUserTerminalFontParams) (database.UserConfig, error) { m.ctrl.T.Helper() diff --git a/coderd/database/dbpurge/dbpurge.go b/coderd/database/dbpurge/dbpurge.go index 067fe1f0499e3..0356636ff10c5 100644 --- a/coderd/database/dbpurge/dbpurge.go +++ b/coderd/database/dbpurge/dbpurge.go @@ -13,6 +13,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/pproflabel" + "github.com/coder/coder/v2/codersdk" "github.com/coder/quartz" ) @@ -36,7 +37,7 @@ const ( // It is the caller's responsibility to call Close on the returned instance. // // This is for cleaning up old, unused resources from the database that take up space. -func New(ctx context.Context, logger slog.Logger, db database.Store, clk quartz.Clock) io.Closer { +func New(ctx context.Context, logger slog.Logger, db database.Store, vals *codersdk.DeploymentValues, clk quartz.Clock) io.Closer { closed := make(chan struct{}) ctx, cancelFunc := context.WithCancel(ctx) @@ -77,6 +78,19 @@ func New(ctx context.Context, logger slog.Logger, db database.Store, clk quartz. if err := tx.ExpirePrebuildsAPIKeys(ctx, dbtime.Time(start)); err != nil { return xerrors.Errorf("failed to expire prebuilds user api keys: %w", err) } + expiredAPIKeys, err := tx.DeleteExpiredAPIKeys(ctx, database.DeleteExpiredAPIKeysParams{ + // Leave expired keys for a week to allow the backend to know the difference + // between a 404 and an expired key. This purge code is just to bound the size of + // the table to something more reasonable. + Before: dbtime.Time(start.Add(time.Hour * 24 * 7 * -1)), + // There could be a lot of expired keys here, so set a limit to prevent this + // taking too long. + // This runs every 10 minutes, so it deletes ~1.5m keys per day at most. + LimitCount: 10000, + }) + if err != nil { + return xerrors.Errorf("failed to delete expired api keys: %w", err) + } deleteOldTelemetryLocksBefore := start.Add(-maxTelemetryHeartbeatAge) if err := tx.DeleteOldTelemetryLocks(ctx, deleteOldTelemetryLocksBefore); err != nil { return xerrors.Errorf("failed to delete old telemetry locks: %w", err) @@ -90,7 +104,18 @@ func New(ctx context.Context, logger slog.Logger, db database.Store, clk quartz. return xerrors.Errorf("failed to delete old audit log connection events: %w", err) } - logger.Debug(ctx, "purged old database entries", slog.F("duration", clk.Since(start))) + deleteAIBridgeRecordsBefore := start.Add(-vals.AI.BridgeConfig.Retention.Value()) + // nolint:gocritic // Needs to run as aibridge context. + purgedAIBridgeRecords, err := tx.DeleteOldAIBridgeRecords(dbauthz.AsAIBridged(ctx), deleteAIBridgeRecordsBefore) + if err != nil { + return xerrors.Errorf("failed to delete old aibridge records: %w", err) + } + + logger.Debug(ctx, "purged old database entries", + slog.F("expired_api_keys", expiredAPIKeys), + slog.F("aibridge_records", purgedAIBridgeRecords), + slog.F("duration", clk.Since(start)), + ) return nil }, database.DefaultTXOptions().WithID("db_purge")); err != nil { diff --git a/coderd/database/dbpurge/dbpurge_test.go b/coderd/database/dbpurge/dbpurge_test.go index 74bf36639fbb5..0a4de8c922be9 100644 --- a/coderd/database/dbpurge/dbpurge_test.go +++ b/coderd/database/dbpurge/dbpurge_test.go @@ -33,6 +33,7 @@ import ( "github.com/coder/coder/v2/provisionersdk" "github.com/coder/coder/v2/testutil" "github.com/coder/quartz" + "github.com/coder/serpent" ) func TestMain(m *testing.M) { @@ -51,7 +52,7 @@ func TestPurge(t *testing.T) { done := awaitDoTick(ctx, t, clk) mDB := dbmock.NewMockStore(gomock.NewController(t)) mDB.EXPECT().InTx(gomock.Any(), database.DefaultTXOptions().WithID("db_purge")).Return(nil).Times(2) - purger := dbpurge.New(context.Background(), testutil.Logger(t), mDB, clk) + purger := dbpurge.New(context.Background(), testutil.Logger(t), mDB, &codersdk.DeploymentValues{}, clk) <-done // wait for doTick() to run. require.NoError(t, purger.Close()) } @@ -129,7 +130,7 @@ func TestDeleteOldWorkspaceAgentStats(t *testing.T) { }) // when - closer := dbpurge.New(ctx, logger, db, clk) + closer := dbpurge.New(ctx, logger, db, &codersdk.DeploymentValues{}, clk) defer closer.Close() // then @@ -154,7 +155,7 @@ func TestDeleteOldWorkspaceAgentStats(t *testing.T) { // Start a new purger to immediately trigger delete after rollup. _ = closer.Close() - closer = dbpurge.New(ctx, logger, db, clk) + closer = dbpurge.New(ctx, logger, db, &codersdk.DeploymentValues{}, clk) defer closer.Close() // then @@ -245,7 +246,7 @@ func TestDeleteOldWorkspaceAgentLogs(t *testing.T) { // After dbpurge completes, the ticker is reset. Trap this call. done := awaitDoTick(ctx, t, clk) - closer := dbpurge.New(ctx, logger, db, clk) + closer := dbpurge.New(ctx, logger, db, &codersdk.DeploymentValues{}, clk) defer closer.Close() <-done // doTick() has now run. @@ -466,7 +467,7 @@ func TestDeleteOldProvisionerDaemons(t *testing.T) { require.NoError(t, err) // when - closer := dbpurge.New(ctx, logger, db, clk) + closer := dbpurge.New(ctx, logger, db, &codersdk.DeploymentValues{}, clk) defer closer.Close() // then @@ -570,7 +571,7 @@ func TestDeleteOldAuditLogConnectionEvents(t *testing.T) { // Run the purge done := awaitDoTick(ctx, t, clk) - closer := dbpurge.New(ctx, logger, db, clk) + closer := dbpurge.New(ctx, logger, db, &codersdk.DeploymentValues{}, clk) defer closer.Close() // Wait for tick testutil.TryReceive(ctx, t, done) @@ -733,7 +734,7 @@ func TestDeleteOldTelemetryHeartbeats(t *testing.T) { require.NoError(t, err) done := awaitDoTick(ctx, t, clk) - closer := dbpurge.New(ctx, logger, db, clk) + closer := dbpurge.New(ctx, logger, db, &codersdk.DeploymentValues{}, clk) defer closer.Close() <-done // doTick() has now run. @@ -757,3 +758,172 @@ func TestDeleteOldTelemetryHeartbeats(t *testing.T) { return totalCount == 2 && oldCount == 0 }, testutil.WaitShort, testutil.IntervalFast, "it should delete old telemetry heartbeats") } + +func TestDeleteOldAIBridgeRecords(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + + clk := quartz.NewMock(t) + now := time.Date(2025, 1, 15, 7, 30, 0, 0, time.UTC) + retentionPeriod := 30 * 24 * time.Hour // 30 days + afterThreshold := now.Add(-retentionPeriod).Add(-24 * time.Hour) // 31 days ago (older than threshold) + beforeThreshold := now.Add(-15 * 24 * time.Hour) // 15 days ago (newer than threshold) + closeBeforeThreshold := now.Add(-retentionPeriod).Add(24 * time.Hour) // 29 days ago + clk.Set(now).MustWait(ctx) + + db, _ := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure()) + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}) + user := dbgen.User(t, db, database.User{}) + + // Create old AI Bridge interception (should be deleted) + oldInterception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + ID: uuid.New(), + APIKeyID: sql.NullString{}, + InitiatorID: user.ID, + Provider: "anthropic", + Model: "claude-3-5-sonnet", + StartedAt: afterThreshold, + }, &afterThreshold) + + // Create old interception with related records (should all be deleted) + oldInterceptionWithRelated := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + ID: uuid.New(), + APIKeyID: sql.NullString{}, + InitiatorID: user.ID, + Provider: "openai", + Model: "gpt-4", + StartedAt: afterThreshold, + }, &afterThreshold) + + _ = dbgen.AIBridgeTokenUsage(t, db, database.InsertAIBridgeTokenUsageParams{ + ID: uuid.New(), + InterceptionID: oldInterceptionWithRelated.ID, + ProviderResponseID: "resp-1", + InputTokens: 100, + OutputTokens: 50, + CreatedAt: afterThreshold, + }) + + _ = dbgen.AIBridgeUserPrompt(t, db, database.InsertAIBridgeUserPromptParams{ + ID: uuid.New(), + InterceptionID: oldInterceptionWithRelated.ID, + ProviderResponseID: "resp-1", + Prompt: "test prompt", + CreatedAt: afterThreshold, + }) + + _ = dbgen.AIBridgeToolUsage(t, db, database.InsertAIBridgeToolUsageParams{ + ID: uuid.New(), + InterceptionID: oldInterceptionWithRelated.ID, + ProviderResponseID: "resp-1", + Tool: "test-tool", + ServerUrl: sql.NullString{String: "http://test", Valid: true}, + Input: "{}", + Injected: true, + CreatedAt: afterThreshold, + }) + + // Create recent AI Bridge interception (should be kept) + recentInterception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + ID: uuid.New(), + APIKeyID: sql.NullString{}, + InitiatorID: user.ID, + Provider: "anthropic", + Model: "claude-3-5-sonnet", + StartedAt: beforeThreshold, + }, &beforeThreshold) + + // Create interception close to threshold (should be kept) + nearThresholdInterception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + ID: uuid.New(), + APIKeyID: sql.NullString{}, + InitiatorID: user.ID, + Provider: "anthropic", + Model: "claude-3-5-sonnet", + StartedAt: closeBeforeThreshold, + }, &closeBeforeThreshold) + + _ = dbgen.AIBridgeTokenUsage(t, db, database.InsertAIBridgeTokenUsageParams{ + ID: uuid.New(), + InterceptionID: nearThresholdInterception.ID, + ProviderResponseID: "resp-1", + InputTokens: 100, + OutputTokens: 50, + CreatedAt: closeBeforeThreshold, + }) + + _ = dbgen.AIBridgeUserPrompt(t, db, database.InsertAIBridgeUserPromptParams{ + ID: uuid.New(), + InterceptionID: nearThresholdInterception.ID, + ProviderResponseID: "resp-1", + Prompt: "test prompt", + CreatedAt: closeBeforeThreshold, + }) + + _ = dbgen.AIBridgeToolUsage(t, db, database.InsertAIBridgeToolUsageParams{ + ID: uuid.New(), + InterceptionID: nearThresholdInterception.ID, + ProviderResponseID: "resp-1", + Tool: "test-tool", + ServerUrl: sql.NullString{String: "http://test", Valid: true}, + Input: "{}", + Injected: true, + CreatedAt: closeBeforeThreshold, + }) + + // Run the purge with configured retention period + done := awaitDoTick(ctx, t, clk) + closer := dbpurge.New(ctx, logger, db, &codersdk.DeploymentValues{ + AI: codersdk.AIConfig{ + BridgeConfig: codersdk.AIBridgeConfig{ + Retention: serpent.Duration(retentionPeriod), + }, + }, + }, clk) + defer closer.Close() + // Wait for tick + testutil.TryReceive(ctx, t, done) + + // Verify results by querying all AI Bridge records + interceptions, err := db.GetAIBridgeInterceptions(ctx) + require.NoError(t, err) + + // Extract interception IDs for comparison + interceptionIDs := make([]uuid.UUID, len(interceptions)) + for i, interception := range interceptions { + interceptionIDs[i] = interception.ID + } + + require.NotContains(t, interceptionIDs, oldInterception.ID, "old interception should be deleted") + require.NotContains(t, interceptionIDs, oldInterceptionWithRelated.ID, "old interception with related records should be deleted") + + // Verify related records were also deleted + oldTokenUsages, err := db.GetAIBridgeTokenUsagesByInterceptionID(ctx, oldInterceptionWithRelated.ID) + require.NoError(t, err) + require.Empty(t, oldTokenUsages, "old token usages should be deleted") + + oldUserPrompts, err := db.GetAIBridgeUserPromptsByInterceptionID(ctx, oldInterceptionWithRelated.ID) + require.NoError(t, err) + require.Empty(t, oldUserPrompts, "old user prompts should be deleted") + + oldToolUsages, err := db.GetAIBridgeToolUsagesByInterceptionID(ctx, oldInterceptionWithRelated.ID) + require.NoError(t, err) + require.Empty(t, oldToolUsages, "old tool usages should be deleted") + + require.Contains(t, interceptionIDs, recentInterception.ID, "recent interception should be kept") + require.Contains(t, interceptionIDs, nearThresholdInterception.ID, "near threshold interception should be kept") + + // Verify related records were NOT deleted + newTokenUsages, err := db.GetAIBridgeTokenUsagesByInterceptionID(ctx, nearThresholdInterception.ID) + require.NoError(t, err) + require.Len(t, newTokenUsages, 1, "near threshold token usages should not be deleted") + + newUserPrompts, err := db.GetAIBridgeUserPromptsByInterceptionID(ctx, nearThresholdInterception.ID) + require.NoError(t, err) + require.Len(t, newUserPrompts, 1, "near threshold user prompts should not be deleted") + + newToolUsages, err := db.GetAIBridgeToolUsagesByInterceptionID(ctx, nearThresholdInterception.ID) + require.NoError(t, err) + require.Len(t, newToolUsages, 1, "near threshold tool usages should not be deleted") +} diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 837c657402bf2..7c067824598db 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -1056,7 +1056,8 @@ CREATE TABLE aibridge_interceptions ( model text NOT NULL, started_at timestamp with time zone NOT NULL, metadata jsonb, - ended_at timestamp with time zone + ended_at timestamp with time zone, + api_key_id text ); COMMENT ON TABLE aibridge_interceptions IS 'Audit log of requests intercepted by AI Bridge'; @@ -1825,9 +1826,12 @@ CREATE TABLE tasks ( template_parameters jsonb DEFAULT '{}'::jsonb NOT NULL, prompt text NOT NULL, created_at timestamp with time zone NOT NULL, - deleted_at timestamp with time zone + deleted_at timestamp with time zone, + display_name character varying(127) DEFAULT ''::character varying NOT NULL ); +COMMENT ON COLUMN tasks.display_name IS 'Display name is a custom, human-friendly task name.'; + CREATE VIEW visible_users AS SELECT users.id, users.username, @@ -1948,9 +1952,7 @@ CREATE TABLE workspace_builds ( max_deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL, template_version_preset_id uuid, has_ai_task boolean, - ai_task_sidebar_app_id uuid, has_external_agent boolean, - CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required CHECK (((((has_ai_task IS NULL) OR (has_ai_task = false)) AND (ai_task_sidebar_app_id IS NULL)) OR ((has_ai_task = true) AND (ai_task_sidebar_app_id IS NOT NULL)))), CONSTRAINT workspace_builds_deadline_below_max_deadline CHECK ((((deadline <> '0001-01-01 00:00:00+00'::timestamp with time zone) AND (deadline <= max_deadline)) OR (max_deadline = '0001-01-01 00:00:00+00'::timestamp with time zone))) ); @@ -1965,33 +1967,23 @@ CREATE VIEW tasks_with_status AS tasks.prompt, tasks.created_at, tasks.deleted_at, + tasks.display_name, CASE - WHEN ((tasks.workspace_id IS NULL) OR (latest_build.job_status IS NULL)) THEN 'pending'::task_status - WHEN (latest_build.job_status = 'failed'::provisioner_job_status) THEN 'error'::task_status - WHEN ((latest_build.transition = ANY (ARRAY['stop'::workspace_transition, 'delete'::workspace_transition])) AND (latest_build.job_status = 'succeeded'::provisioner_job_status)) THEN 'paused'::task_status - WHEN ((latest_build.transition = 'start'::workspace_transition) AND (latest_build.job_status = 'pending'::provisioner_job_status)) THEN 'initializing'::task_status - WHEN ((latest_build.transition = 'start'::workspace_transition) AND (latest_build.job_status = ANY (ARRAY['running'::provisioner_job_status, 'succeeded'::provisioner_job_status]))) THEN - CASE - WHEN agent_status."none" THEN 'initializing'::task_status - WHEN agent_status.connecting THEN 'initializing'::task_status - WHEN agent_status.connected THEN - CASE - WHEN app_status.any_unhealthy THEN 'error'::task_status - WHEN app_status.any_initializing THEN 'initializing'::task_status - WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status - ELSE 'unknown'::task_status - END - ELSE 'unknown'::task_status - END - ELSE 'unknown'::task_status + WHEN (tasks.workspace_id IS NULL) THEN 'pending'::task_status + WHEN (build_status.status <> 'active'::task_status) THEN build_status.status + WHEN (agent_status.status <> 'active'::task_status) THEN agent_status.status + ELSE app_status.status END AS status, + jsonb_build_object('build', jsonb_build_object('transition', latest_build_raw.transition, 'job_status', latest_build_raw.job_status, 'computed', build_status.status), 'agent', jsonb_build_object('lifecycle_state', agent_raw.lifecycle_state, 'computed', agent_status.status), 'app', jsonb_build_object('health', app_raw.health, 'computed', app_status.status)) AS status_debug, task_app.workspace_build_number, task_app.workspace_agent_id, task_app.workspace_app_id, + agent_raw.lifecycle_state AS workspace_agent_lifecycle_state, + app_raw.health AS workspace_app_health, task_owner.owner_username, task_owner.owner_name, task_owner.owner_avatar_url - FROM (((((tasks + FROM ((((((((tasks CROSS JOIN LATERAL ( SELECT vu.username AS owner_username, vu.name AS owner_name, vu.avatar_url AS owner_avatar_url @@ -2009,17 +2001,36 @@ CREATE VIEW tasks_with_status AS workspace_build.job_id FROM (workspace_builds workspace_build JOIN provisioner_jobs provisioner_job ON ((provisioner_job.id = workspace_build.job_id))) - WHERE ((workspace_build.workspace_id = tasks.workspace_id) AND (workspace_build.build_number = task_app.workspace_build_number))) latest_build ON (true)) - CROSS JOIN LATERAL ( SELECT (count(*) = 0) AS "none", - bool_or((workspace_agent.lifecycle_state = ANY (ARRAY['created'::workspace_agent_lifecycle_state, 'starting'::workspace_agent_lifecycle_state]))) AS connecting, - bool_and((workspace_agent.lifecycle_state = 'ready'::workspace_agent_lifecycle_state)) AS connected + WHERE ((workspace_build.workspace_id = tasks.workspace_id) AND (workspace_build.build_number = task_app.workspace_build_number))) latest_build_raw ON (true)) + LEFT JOIN LATERAL ( SELECT workspace_agent.lifecycle_state FROM workspace_agents workspace_agent - WHERE (workspace_agent.id = task_app.workspace_agent_id)) agent_status) - CROSS JOIN LATERAL ( SELECT bool_or((workspace_app.health = 'unhealthy'::workspace_app_health)) AS any_unhealthy, - bool_or((workspace_app.health = 'initializing'::workspace_app_health)) AS any_initializing, - bool_and((workspace_app.health = ANY (ARRAY['healthy'::workspace_app_health, 'disabled'::workspace_app_health]))) AS all_healthy_or_disabled + WHERE (workspace_agent.id = task_app.workspace_agent_id)) agent_raw ON (true)) + LEFT JOIN LATERAL ( SELECT workspace_app.health FROM workspace_apps workspace_app - WHERE (workspace_app.id = task_app.workspace_app_id)) app_status) + WHERE (workspace_app.id = task_app.workspace_app_id)) app_raw ON (true)) + CROSS JOIN LATERAL ( SELECT + CASE + WHEN (latest_build_raw.job_status IS NULL) THEN 'pending'::task_status + WHEN (latest_build_raw.job_status = ANY (ARRAY['failed'::provisioner_job_status, 'canceling'::provisioner_job_status, 'canceled'::provisioner_job_status])) THEN 'error'::task_status + WHEN ((latest_build_raw.transition = ANY (ARRAY['stop'::workspace_transition, 'delete'::workspace_transition])) AND (latest_build_raw.job_status = 'succeeded'::provisioner_job_status)) THEN 'paused'::task_status + WHEN ((latest_build_raw.transition = 'start'::workspace_transition) AND (latest_build_raw.job_status = 'pending'::provisioner_job_status)) THEN 'initializing'::task_status + WHEN ((latest_build_raw.transition = 'start'::workspace_transition) AND (latest_build_raw.job_status = ANY (ARRAY['running'::provisioner_job_status, 'succeeded'::provisioner_job_status]))) THEN 'active'::task_status + ELSE 'unknown'::task_status + END AS status) build_status) + CROSS JOIN LATERAL ( SELECT + CASE + WHEN ((agent_raw.lifecycle_state IS NULL) OR (agent_raw.lifecycle_state = ANY (ARRAY['created'::workspace_agent_lifecycle_state, 'starting'::workspace_agent_lifecycle_state]))) THEN 'initializing'::task_status + WHEN (agent_raw.lifecycle_state = ANY (ARRAY['ready'::workspace_agent_lifecycle_state, 'start_timeout'::workspace_agent_lifecycle_state, 'start_error'::workspace_agent_lifecycle_state])) THEN 'active'::task_status + WHEN (agent_raw.lifecycle_state <> ALL (ARRAY['created'::workspace_agent_lifecycle_state, 'starting'::workspace_agent_lifecycle_state, 'ready'::workspace_agent_lifecycle_state, 'start_timeout'::workspace_agent_lifecycle_state, 'start_error'::workspace_agent_lifecycle_state])) THEN 'unknown'::task_status + ELSE 'unknown'::task_status + END AS status) agent_status) + CROSS JOIN LATERAL ( SELECT + CASE + WHEN (app_raw.health = 'initializing'::workspace_app_health) THEN 'initializing'::task_status + WHEN (app_raw.health = 'unhealthy'::workspace_app_health) THEN 'error'::task_status + WHEN (app_raw.health = ANY (ARRAY['healthy'::workspace_app_health, 'disabled'::workspace_app_health])) THEN 'active'::task_status + ELSE 'unknown'::task_status + END AS status) app_status) WHERE (tasks.deleted_at IS NULL); CREATE TABLE telemetry_items ( @@ -2163,7 +2174,8 @@ CREATE TABLE template_version_presets ( scheduling_timezone text DEFAULT ''::text NOT NULL, is_default boolean DEFAULT false NOT NULL, description character varying(128) DEFAULT ''::character varying NOT NULL, - icon character varying(256) DEFAULT ''::character varying NOT NULL + icon character varying(256) DEFAULT ''::character varying NOT NULL, + last_invalidated_at timestamp with time zone ); COMMENT ON COLUMN template_version_presets.description IS 'Short text describing the preset (max 128 characters).'; @@ -2287,7 +2299,8 @@ CREATE TABLE templates ( activity_bump bigint DEFAULT '3600000000000'::bigint NOT NULL, max_port_sharing_level app_sharing_level DEFAULT 'owner'::app_sharing_level NOT NULL, use_classic_parameter_flow boolean DEFAULT false NOT NULL, - cors_behavior cors_behavior DEFAULT 'simple'::cors_behavior NOT NULL + cors_behavior cors_behavior DEFAULT 'simple'::cors_behavior NOT NULL, + use_terraform_workspace_cache boolean DEFAULT false NOT NULL ); COMMENT ON COLUMN templates.default_ttl IS 'The default duration for autostop for workspaces created from this template.'; @@ -2310,6 +2323,8 @@ COMMENT ON COLUMN templates.deprecated IS 'If set to a non empty string, the tem COMMENT ON COLUMN templates.use_classic_parameter_flow IS 'Determines whether to default to the dynamic parameter creation flow for this template or continue using the legacy classic parameter creation flow.This is a template wide setting, the template admin can revert to the classic flow if there are any issues. An escape hatch is required, as workspace creation is a core workflow and cannot break. This column will be removed when the dynamic parameter creation flow is stable.'; +COMMENT ON COLUMN templates.use_terraform_workspace_cache IS 'Determines whether to keep terraform directories cached between runs for workspaces created from this template. When enabled, this can significantly speed up the `terraform init` step at the cost of increased disk usage. This is an opt-in experience, as it prevents modules from being updated, and therefore is a behavioral difference from the default.'; + CREATE VIEW template_with_names AS SELECT templates.id, templates.created_at, @@ -2341,6 +2356,7 @@ CREATE VIEW template_with_names AS templates.max_port_sharing_level, templates.use_classic_parameter_flow, templates.cors_behavior, + templates.use_terraform_workspace_cache, COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url, COALESCE(visible_users.username, ''::text) AS created_by_username, COALESCE(visible_users.name, ''::text) AS created_by_name, @@ -2703,7 +2719,6 @@ CREATE VIEW workspace_build_with_user AS workspace_builds.max_deadline, workspace_builds.template_version_preset_id, workspace_builds.has_ai_task, - workspace_builds.ai_task_sidebar_app_id, workspace_builds.has_external_agent, COALESCE(visible_users.avatar_url, ''::text) AS initiator_by_avatar_url, COALESCE(visible_users.username, ''::text) AS initiator_by_username, @@ -2922,11 +2937,13 @@ CREATE VIEW workspaces_expanded AS templates.name AS template_name, templates.display_name AS template_display_name, templates.icon AS template_icon, - templates.description AS template_description - FROM (((workspaces + templates.description AS template_description, + tasks.id AS task_id + FROM ((((workspaces JOIN visible_users ON ((workspaces.owner_id = visible_users.id))) JOIN organizations ON ((workspaces.organization_id = organizations.id))) - JOIN templates ON ((workspaces.template_id = templates.id))); + JOIN templates ON ((workspaces.template_id = templates.id))) + LEFT JOIN tasks ON ((workspaces.id = tasks.workspace_id))); COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.'; @@ -3420,6 +3437,8 @@ CREATE INDEX workspace_agent_stats_template_id_created_at_user_id_idx ON workspa COMMENT ON INDEX workspace_agent_stats_template_id_created_at_user_id_idx IS 'Support index for template insights endpoint to build interval reports faster.'; +CREATE INDEX workspace_agents_auth_instance_id_deleted_idx ON workspace_agents USING btree (auth_instance_id, deleted); + CREATE INDEX workspace_agents_auth_token_idx ON workspace_agents USING btree (auth_token); CREATE INDEX workspace_agents_resource_id_idx ON workspace_agents USING btree (resource_id); @@ -3802,9 +3821,6 @@ ALTER TABLE ONLY workspace_apps ALTER TABLE ONLY workspace_build_parameters ADD CONSTRAINT workspace_build_parameters_workspace_build_id_fkey FOREIGN KEY (workspace_build_id) REFERENCES workspace_builds(id) ON DELETE CASCADE; -ALTER TABLE ONLY workspace_builds - ADD CONSTRAINT workspace_builds_ai_task_sidebar_app_id_fkey FOREIGN KEY (ai_task_sidebar_app_id) REFERENCES workspace_apps(id); - ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; diff --git a/coderd/database/foreign_key_constraint.go b/coderd/database/foreign_key_constraint.go index 6737275dd340e..0c295e4316ee3 100644 --- a/coderd/database/foreign_key_constraint.go +++ b/coderd/database/foreign_key_constraint.go @@ -94,7 +94,6 @@ const ( ForeignKeyWorkspaceAppStatusesWorkspaceID ForeignKeyConstraint = "workspace_app_statuses_workspace_id_fkey" // ALTER TABLE ONLY workspace_app_statuses ADD CONSTRAINT workspace_app_statuses_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id); ForeignKeyWorkspaceAppsAgentID ForeignKeyConstraint = "workspace_apps_agent_id_fkey" // ALTER TABLE ONLY workspace_apps ADD CONSTRAINT workspace_apps_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; ForeignKeyWorkspaceBuildParametersWorkspaceBuildID ForeignKeyConstraint = "workspace_build_parameters_workspace_build_id_fkey" // ALTER TABLE ONLY workspace_build_parameters ADD CONSTRAINT workspace_build_parameters_workspace_build_id_fkey FOREIGN KEY (workspace_build_id) REFERENCES workspace_builds(id) ON DELETE CASCADE; - ForeignKeyWorkspaceBuildsAiTaskSidebarAppID ForeignKeyConstraint = "workspace_builds_ai_task_sidebar_app_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_ai_task_sidebar_app_id_fkey FOREIGN KEY (ai_task_sidebar_app_id) REFERENCES workspace_apps(id); ForeignKeyWorkspaceBuildsJobID ForeignKeyConstraint = "workspace_builds_job_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; ForeignKeyWorkspaceBuildsTemplateVersionID ForeignKeyConstraint = "workspace_builds_template_version_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; ForeignKeyWorkspaceBuildsTemplateVersionPresetID ForeignKeyConstraint = "workspace_builds_template_version_preset_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_template_version_preset_id_fkey FOREIGN KEY (template_version_preset_id) REFERENCES template_version_presets(id) ON DELETE SET NULL; diff --git a/coderd/database/migrations/000371_api_key_scopes_array_allow_list.up.sql b/coderd/database/migrations/000371_api_key_scopes_array_allow_list.up.sql index b38bf89880bed..12fb99f89f83f 100644 --- a/coderd/database/migrations/000371_api_key_scopes_array_allow_list.up.sql +++ b/coderd/database/migrations/000371_api_key_scopes_array_allow_list.up.sql @@ -141,13 +141,19 @@ ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_proxy:read'; ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_proxy:update'; -- End enum extensions +-- Purge old API keys to speed up the migration for large deployments. +-- Note: that problem should be solved in coderd once PR 20863 is released: +-- https://github.com/coder/coder/blob/main/coderd/database/dbpurge/dbpurge.go#L85 +DELETE FROM api_keys WHERE expires_at < NOW() - INTERVAL '7 days'; + -- Add new columns without defaults; backfill; then enforce NOT NULL ALTER TABLE api_keys ADD COLUMN scopes api_key_scope[]; ALTER TABLE api_keys ADD COLUMN allow_list text[]; -- Backfill existing rows for compatibility -UPDATE api_keys SET scopes = ARRAY[scope::api_key_scope]; -UPDATE api_keys SET allow_list = ARRAY['*:*']; +UPDATE api_keys SET + scopes = ARRAY[scope::api_key_scope], + allow_list = ARRAY['*:*']; -- Enforce NOT NULL ALTER TABLE api_keys ALTER COLUMN scopes SET NOT NULL; diff --git a/coderd/database/migrations/000393_workspaces_expanded_task_id.down.sql b/coderd/database/migrations/000393_workspaces_expanded_task_id.down.sql new file mode 100644 index 0000000000000..ed30e6a0f64f3 --- /dev/null +++ b/coderd/database/migrations/000393_workspaces_expanded_task_id.down.sql @@ -0,0 +1,39 @@ +DROP VIEW workspaces_expanded; + +-- Recreate the view from 000354_workspace_acl.up.sql +CREATE VIEW workspaces_expanded AS + SELECT workspaces.id, + workspaces.created_at, + workspaces.updated_at, + workspaces.owner_id, + workspaces.organization_id, + workspaces.template_id, + workspaces.deleted, + workspaces.name, + workspaces.autostart_schedule, + workspaces.ttl, + workspaces.last_used_at, + workspaces.dormant_at, + workspaces.deleting_at, + workspaces.automatic_updates, + workspaces.favorite, + workspaces.next_start_at, + workspaces.group_acl, + workspaces.user_acl, + visible_users.avatar_url AS owner_avatar_url, + visible_users.username AS owner_username, + visible_users.name AS owner_name, + organizations.name AS organization_name, + organizations.display_name AS organization_display_name, + organizations.icon AS organization_icon, + organizations.description AS organization_description, + templates.name AS template_name, + templates.display_name AS template_display_name, + templates.icon AS template_icon, + templates.description AS template_description + FROM (((workspaces + JOIN visible_users ON ((workspaces.owner_id = visible_users.id))) + JOIN organizations ON ((workspaces.organization_id = organizations.id))) + JOIN templates ON ((workspaces.template_id = templates.id))); + +COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.'; diff --git a/coderd/database/migrations/000393_workspaces_expanded_task_id.up.sql b/coderd/database/migrations/000393_workspaces_expanded_task_id.up.sql new file mode 100644 index 0000000000000..f01354e65bd50 --- /dev/null +++ b/coderd/database/migrations/000393_workspaces_expanded_task_id.up.sql @@ -0,0 +1,42 @@ +DROP VIEW workspaces_expanded; + +-- Add nullable task_id to workspaces_expanded view +CREATE VIEW workspaces_expanded AS + SELECT workspaces.id, + workspaces.created_at, + workspaces.updated_at, + workspaces.owner_id, + workspaces.organization_id, + workspaces.template_id, + workspaces.deleted, + workspaces.name, + workspaces.autostart_schedule, + workspaces.ttl, + workspaces.last_used_at, + workspaces.dormant_at, + workspaces.deleting_at, + workspaces.automatic_updates, + workspaces.favorite, + workspaces.next_start_at, + workspaces.group_acl, + workspaces.user_acl, + visible_users.avatar_url AS owner_avatar_url, + visible_users.username AS owner_username, + visible_users.name AS owner_name, + organizations.name AS organization_name, + organizations.display_name AS organization_display_name, + organizations.icon AS organization_icon, + organizations.description AS organization_description, + templates.name AS template_name, + templates.display_name AS template_display_name, + templates.icon AS template_icon, + templates.description AS template_description, + tasks.id AS task_id + FROM ((((workspaces + JOIN visible_users ON ((workspaces.owner_id = visible_users.id))) + JOIN organizations ON ((workspaces.organization_id = organizations.id))) + JOIN templates ON ((workspaces.template_id = templates.id))) + LEFT JOIN tasks ON ((workspaces.id = tasks.workspace_id))); + +COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.'; + diff --git a/coderd/database/migrations/000394_drop_workspace_build_ai_task_sidebar_app_id_required.down.sql b/coderd/database/migrations/000394_drop_workspace_build_ai_task_sidebar_app_id_required.down.sql new file mode 100644 index 0000000000000..c079189235a62 --- /dev/null +++ b/coderd/database/migrations/000394_drop_workspace_build_ai_task_sidebar_app_id_required.down.sql @@ -0,0 +1,4 @@ +-- WARNING: Restoring this constraint after running a newer version of coderd +-- and using tasks is bound to break this constraint. +ALTER TABLE workspace_builds +ADD CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required CHECK (((((has_ai_task IS NULL) OR (has_ai_task = false)) AND (ai_task_sidebar_app_id IS NULL)) OR ((has_ai_task = true) AND (ai_task_sidebar_app_id IS NOT NULL)))); diff --git a/coderd/database/migrations/000394_drop_workspace_build_ai_task_sidebar_app_id_required.up.sql b/coderd/database/migrations/000394_drop_workspace_build_ai_task_sidebar_app_id_required.up.sql new file mode 100644 index 0000000000000..4703b6f764a56 --- /dev/null +++ b/coderd/database/migrations/000394_drop_workspace_build_ai_task_sidebar_app_id_required.up.sql @@ -0,0 +1,4 @@ +-- We no longer need to enforce this constraint as tasks have their own data +-- model. +ALTER TABLE workspace_builds +DROP CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required; diff --git a/coderd/database/migrations/000395_drop_ai_task_sidebar_app_id_from_workspace_builds.down.sql b/coderd/database/migrations/000395_drop_ai_task_sidebar_app_id_from_workspace_builds.down.sql new file mode 100644 index 0000000000000..440eda07ad873 --- /dev/null +++ b/coderd/database/migrations/000395_drop_ai_task_sidebar_app_id_from_workspace_builds.down.sql @@ -0,0 +1,45 @@ +ALTER TABLE workspace_builds ADD COLUMN ai_task_sidebar_app_id UUID; +ALTER TABLE workspace_builds ADD CONSTRAINT workspace_builds_ai_task_sidebar_app_id_fkey FOREIGN KEY (ai_task_sidebar_app_id) REFERENCES workspace_apps(id); + +DROP VIEW workspace_build_with_user; +-- Restore view. +CREATE VIEW workspace_build_with_user AS +SELECT + workspace_builds.id, + workspace_builds.created_at, + workspace_builds.updated_at, + workspace_builds.workspace_id, + workspace_builds.template_version_id, + workspace_builds.build_number, + workspace_builds.transition, + workspace_builds.initiator_id, + workspace_builds.provisioner_state, + workspace_builds.job_id, + workspace_builds.deadline, + workspace_builds.reason, + workspace_builds.daily_cost, + workspace_builds.max_deadline, + workspace_builds.template_version_preset_id, + workspace_builds.has_ai_task, + workspace_builds.ai_task_sidebar_app_id, + workspace_builds.has_external_agent, + COALESCE( + visible_users.avatar_url, + '' :: text + ) AS initiator_by_avatar_url, + COALESCE( + visible_users.username, + '' :: text + ) AS initiator_by_username, + COALESCE(visible_users.name, '' :: text) AS initiator_by_name +FROM + ( + workspace_builds + LEFT JOIN visible_users ON ( + ( + workspace_builds.initiator_id = visible_users.id + ) + ) + ); + +COMMENT ON VIEW workspace_build_with_user IS 'Joins in the username + avatar url of the initiated by user.'; diff --git a/coderd/database/migrations/000395_drop_ai_task_sidebar_app_id_from_workspace_builds.up.sql b/coderd/database/migrations/000395_drop_ai_task_sidebar_app_id_from_workspace_builds.up.sql new file mode 100644 index 0000000000000..e55bf2763eefc --- /dev/null +++ b/coderd/database/migrations/000395_drop_ai_task_sidebar_app_id_from_workspace_builds.up.sql @@ -0,0 +1,43 @@ +-- We're dropping the ai_task_sidebar_app_id column. +DROP VIEW workspace_build_with_user; +CREATE VIEW workspace_build_with_user AS +SELECT + workspace_builds.id, + workspace_builds.created_at, + workspace_builds.updated_at, + workspace_builds.workspace_id, + workspace_builds.template_version_id, + workspace_builds.build_number, + workspace_builds.transition, + workspace_builds.initiator_id, + workspace_builds.provisioner_state, + workspace_builds.job_id, + workspace_builds.deadline, + workspace_builds.reason, + workspace_builds.daily_cost, + workspace_builds.max_deadline, + workspace_builds.template_version_preset_id, + workspace_builds.has_ai_task, + workspace_builds.has_external_agent, + COALESCE( + visible_users.avatar_url, + '' :: text + ) AS initiator_by_avatar_url, + COALESCE( + visible_users.username, + '' :: text + ) AS initiator_by_username, + COALESCE(visible_users.name, '' :: text) AS initiator_by_name +FROM + ( + workspace_builds + LEFT JOIN visible_users ON ( + ( + workspace_builds.initiator_id = visible_users.id + ) + ) + ); + +COMMENT ON VIEW workspace_build_with_user IS 'Joins in the username + avatar url of the initiated by user.'; + +ALTER TABLE workspace_builds DROP COLUMN ai_task_sidebar_app_id; diff --git a/coderd/database/migrations/000396_add_aibridge_interceptions_api_key_id.down.sql b/coderd/database/migrations/000396_add_aibridge_interceptions_api_key_id.down.sql new file mode 100644 index 0000000000000..c11331436e525 --- /dev/null +++ b/coderd/database/migrations/000396_add_aibridge_interceptions_api_key_id.down.sql @@ -0,0 +1 @@ +ALTER TABLE aibridge_interceptions DROP COLUMN api_key_id; diff --git a/coderd/database/migrations/000396_add_aibridge_interceptions_api_key_id.up.sql b/coderd/database/migrations/000396_add_aibridge_interceptions_api_key_id.up.sql new file mode 100644 index 0000000000000..2d85765d6d464 --- /dev/null +++ b/coderd/database/migrations/000396_add_aibridge_interceptions_api_key_id.up.sql @@ -0,0 +1,2 @@ + -- column is nullable to not break interceptions recorded before this column was added +ALTER TABLE aibridge_interceptions ADD COLUMN api_key_id text; diff --git a/coderd/database/migrations/000397_experimental_terraform_workspaces.down.sql b/coderd/database/migrations/000397_experimental_terraform_workspaces.down.sql new file mode 100644 index 0000000000000..394c31975a901 --- /dev/null +++ b/coderd/database/migrations/000397_experimental_terraform_workspaces.down.sql @@ -0,0 +1,26 @@ +DROP VIEW template_with_names; +-- Drop the column +ALTER TABLE templates DROP COLUMN use_terraform_workspace_cache; + +-- Update the template_with_names view by recreating it. +CREATE VIEW template_with_names AS +SELECT + templates.*, + COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url, + COALESCE(visible_users.username, ''::text) AS created_by_username, + COALESCE(visible_users.name, ''::text) AS created_by_name, + COALESCE(organizations.name, ''::text) AS organization_name, + COALESCE(organizations.display_name, ''::text) AS organization_display_name, + COALESCE(organizations.icon, ''::text) AS organization_icon +FROM + templates + LEFT JOIN + visible_users + ON + templates.created_by = visible_users.id + LEFT JOIN + organizations + ON templates.organization_id = organizations.id +; + +COMMENT ON VIEW template_with_names IS 'Joins in the display name information such as username, avatar, and organization name.'; diff --git a/coderd/database/migrations/000397_experimental_terraform_workspaces.up.sql b/coderd/database/migrations/000397_experimental_terraform_workspaces.up.sql new file mode 100644 index 0000000000000..3b6a57e01b5ef --- /dev/null +++ b/coderd/database/migrations/000397_experimental_terraform_workspaces.up.sql @@ -0,0 +1,33 @@ +-- Default to `false`. Users will have to manually opt into the terraform workspace cache feature. +ALTER TABLE templates ADD COLUMN use_terraform_workspace_cache BOOL NOT NULL DEFAULT false; + +COMMENT ON COLUMN templates.use_terraform_workspace_cache IS + 'Determines whether to keep terraform directories cached between runs for workspaces created from this template. ' + 'When enabled, this can significantly speed up the `terraform init` step at the cost of increased disk usage. ' + 'This is an opt-in experience, as it prevents modules from being updated, and therefore is a behavioral difference ' + 'from the default.'; + ; + +-- Update the template_with_names view by recreating it. +DROP VIEW template_with_names; +CREATE VIEW template_with_names AS +SELECT + templates.*, + COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url, + COALESCE(visible_users.username, ''::text) AS created_by_username, + COALESCE(visible_users.name, ''::text) AS created_by_name, + COALESCE(organizations.name, ''::text) AS organization_name, + COALESCE(organizations.display_name, ''::text) AS organization_display_name, + COALESCE(organizations.icon, ''::text) AS organization_icon +FROM + templates + LEFT JOIN + visible_users + ON + templates.created_by = visible_users.id + LEFT JOIN + organizations + ON templates.organization_id = organizations.id +; + +COMMENT ON VIEW template_with_names IS 'Joins in the display name information such as username, avatar, and organization name.'; diff --git a/coderd/database/migrations/000398_update_task_status_view.down.sql b/coderd/database/migrations/000398_update_task_status_view.down.sql new file mode 100644 index 0000000000000..a9380ec962b9a --- /dev/null +++ b/coderd/database/migrations/000398_update_task_status_view.down.sql @@ -0,0 +1,82 @@ +-- Restore previous view. +DROP VIEW IF EXISTS tasks_with_status; + +CREATE VIEW + tasks_with_status +AS + SELECT + tasks.*, + CASE + WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status + + WHEN latest_build.job_status = 'failed' THEN 'error'::task_status + + WHEN latest_build.transition IN ('stop', 'delete') + AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status + + WHEN latest_build.transition = 'start' + AND latest_build.job_status = 'pending' THEN 'initializing'::task_status + + WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN + CASE + WHEN agent_status.none THEN 'initializing'::task_status + WHEN agent_status.connecting THEN 'initializing'::task_status + WHEN agent_status.connected THEN + CASE + WHEN app_status.any_unhealthy THEN 'error'::task_status + WHEN app_status.any_initializing THEN 'initializing'::task_status + WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END + + ELSE 'unknown'::task_status + END AS status, + task_app.*, + task_owner.* + FROM + tasks + CROSS JOIN LATERAL ( + SELECT + vu.username AS owner_username, + vu.name AS owner_name, + vu.avatar_url AS owner_avatar_url + FROM visible_users vu + WHERE vu.id = tasks.owner_id + ) task_owner + LEFT JOIN LATERAL ( + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps task_app + WHERE task_id = tasks.id + ORDER BY workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM workspace_builds workspace_build + JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id + WHERE workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build ON TRUE + CROSS JOIN LATERAL ( + SELECT + COUNT(*) = 0 AS none, + bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting, + bool_and(workspace_agent.lifecycle_state = 'ready') AS connected + FROM workspace_agents workspace_agent + WHERE workspace_agent.id = task_app.workspace_agent_id + ) agent_status + CROSS JOIN LATERAL ( + SELECT + bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy, + bool_or(workspace_app.health = 'initializing') AS any_initializing, + bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled + FROM workspace_apps workspace_app + WHERE workspace_app.id = task_app.workspace_app_id + ) app_status + WHERE + tasks.deleted_at IS NULL; diff --git a/coderd/database/migrations/000398_update_task_status_view.up.sql b/coderd/database/migrations/000398_update_task_status_view.up.sql new file mode 100644 index 0000000000000..f05df3c5b82ed --- /dev/null +++ b/coderd/database/migrations/000398_update_task_status_view.up.sql @@ -0,0 +1,142 @@ +-- Update task status in view. +DROP VIEW IF EXISTS tasks_with_status; + +CREATE VIEW + tasks_with_status +AS + SELECT + tasks.*, + -- Combine component statuses with precedence: build -> agent -> app. + CASE + WHEN tasks.workspace_id IS NULL THEN 'pending'::task_status + WHEN build_status.status != 'active' THEN build_status.status::task_status + WHEN agent_status.status != 'active' THEN agent_status.status::task_status + ELSE app_status.status::task_status + END AS status, + -- Attach debug information for troubleshooting status. + jsonb_build_object( + 'build', jsonb_build_object( + 'transition', latest_build_raw.transition, + 'job_status', latest_build_raw.job_status, + 'computed', build_status.status + ), + 'agent', jsonb_build_object( + 'lifecycle_state', agent_raw.lifecycle_state, + 'computed', agent_status.status + ), + 'app', jsonb_build_object( + 'health', app_raw.health, + 'computed', app_status.status + ) + ) AS status_debug, + task_app.*, + agent_raw.lifecycle_state AS workspace_agent_lifecycle_state, + app_raw.health AS workspace_app_health, + task_owner.* + FROM + tasks + CROSS JOIN LATERAL ( + SELECT + vu.username AS owner_username, + vu.name AS owner_name, + vu.avatar_url AS owner_avatar_url + FROM + visible_users vu + WHERE + vu.id = tasks.owner_id + ) task_owner + LEFT JOIN LATERAL ( + SELECT + task_app.workspace_build_number, + task_app.workspace_agent_id, + task_app.workspace_app_id + FROM + task_workspace_apps task_app + WHERE + task_id = tasks.id + ORDER BY + task_app.workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + + -- Join the raw data for computing task status. + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM + workspace_builds workspace_build + JOIN + provisioner_jobs provisioner_job + ON provisioner_job.id = workspace_build.job_id + WHERE + workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build_raw ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_agent.lifecycle_state + FROM + workspace_agents workspace_agent + WHERE + workspace_agent.id = task_app.workspace_agent_id + ) agent_raw ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_app.health + FROM + workspace_apps workspace_app + WHERE + workspace_app.id = task_app.workspace_app_id + ) app_raw ON TRUE + + -- Compute the status for each component. + CROSS JOIN LATERAL ( + SELECT + CASE + WHEN latest_build_raw.job_status IS NULL THEN 'pending'::task_status + WHEN latest_build_raw.job_status IN ('failed', 'canceling', 'canceled') THEN 'error'::task_status + WHEN + latest_build_raw.transition IN ('stop', 'delete') + AND latest_build_raw.job_status = 'succeeded' THEN 'paused'::task_status + WHEN + latest_build_raw.transition = 'start' + AND latest_build_raw.job_status = 'pending' THEN 'initializing'::task_status + -- Build is running or done, defer to agent/app status. + WHEN + latest_build_raw.transition = 'start' + AND latest_build_raw.job_status IN ('running', 'succeeded') THEN 'active'::task_status + ELSE 'unknown'::task_status + END AS status + ) build_status + CROSS JOIN LATERAL ( + SELECT + CASE + -- No agent or connecting. + WHEN + agent_raw.lifecycle_state IS NULL + OR agent_raw.lifecycle_state IN ('created', 'starting') THEN 'initializing'::task_status + -- Agent is running, defer to app status. + -- NOTE(mafredri): The start_error/start_timeout states means connected, but some startup script failed. + -- This may or may not affect the task status but this has to be caught by app health check. + WHEN agent_raw.lifecycle_state IN ('ready', 'start_timeout', 'start_error') THEN 'active'::task_status + -- If the agent is shutting down or turned off, this is an unknown state because we would expect a stop + -- build to be running. + -- This is essentially equal to: `IN ('shutting_down', 'shutdown_timeout', 'shutdown_error', 'off')`, + -- but we cannot use them because the values were added in a migration. + WHEN agent_raw.lifecycle_state NOT IN ('created', 'starting', 'ready', 'start_timeout', 'start_error') THEN 'unknown'::task_status + ELSE 'unknown'::task_status + END AS status + ) agent_status + CROSS JOIN LATERAL ( + SELECT + CASE + WHEN app_raw.health = 'initializing' THEN 'initializing'::task_status + WHEN app_raw.health = 'unhealthy' THEN 'error'::task_status + WHEN app_raw.health IN ('healthy', 'disabled') THEN 'active'::task_status + ELSE 'unknown'::task_status + END AS status + ) app_status + WHERE + tasks.deleted_at IS NULL; diff --git a/coderd/database/migrations/000399_template_version_presets_last_invalidated_at.down.sql b/coderd/database/migrations/000399_template_version_presets_last_invalidated_at.down.sql new file mode 100644 index 0000000000000..d8f4efc31615f --- /dev/null +++ b/coderd/database/migrations/000399_template_version_presets_last_invalidated_at.down.sql @@ -0,0 +1 @@ +ALTER TABLE template_version_presets DROP COLUMN last_invalidated_at; diff --git a/coderd/database/migrations/000399_template_version_presets_last_invalidated_at.up.sql b/coderd/database/migrations/000399_template_version_presets_last_invalidated_at.up.sql new file mode 100644 index 0000000000000..87488aa41c671 --- /dev/null +++ b/coderd/database/migrations/000399_template_version_presets_last_invalidated_at.up.sql @@ -0,0 +1 @@ +ALTER TABLE template_version_presets ADD COLUMN last_invalidated_at TIMESTAMPTZ; diff --git a/coderd/database/migrations/000400_add_task_display_name.down.sql b/coderd/database/migrations/000400_add_task_display_name.down.sql new file mode 100644 index 0000000000000..b054907de1777 --- /dev/null +++ b/coderd/database/migrations/000400_add_task_display_name.down.sql @@ -0,0 +1,87 @@ +-- Drop view first before removing the display_name column from tasks +DROP VIEW IF EXISTS tasks_with_status; + +-- Remove display_name column from tasks +ALTER TABLE tasks DROP COLUMN display_name; + +-- Recreate view without the display_name column. +-- This restores the view to its previous state after removing display_name from tasks. +CREATE VIEW + tasks_with_status +AS +SELECT + tasks.*, + CASE + WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status + + WHEN latest_build.job_status = 'failed' THEN 'error'::task_status + + WHEN latest_build.transition IN ('stop', 'delete') + AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status + + WHEN latest_build.transition = 'start' + AND latest_build.job_status = 'pending' THEN 'initializing'::task_status + + WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN + CASE + WHEN agent_status.none THEN 'initializing'::task_status + WHEN agent_status.connecting THEN 'initializing'::task_status + WHEN agent_status.connected THEN + CASE + WHEN app_status.any_unhealthy THEN 'error'::task_status + WHEN app_status.any_initializing THEN 'initializing'::task_status + WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status + ELSE 'unknown'::task_status + END + ELSE 'unknown'::task_status + END + + ELSE 'unknown'::task_status + END AS status, + task_app.*, + task_owner.* +FROM + tasks + CROSS JOIN LATERAL ( + SELECT + vu.username AS owner_username, + vu.name AS owner_name, + vu.avatar_url AS owner_avatar_url + FROM visible_users vu + WHERE vu.id = tasks.owner_id + ) task_owner + LEFT JOIN LATERAL ( + SELECT workspace_build_number, workspace_agent_id, workspace_app_id + FROM task_workspace_apps task_app + WHERE task_id = tasks.id + ORDER BY workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM workspace_builds workspace_build + JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id + WHERE workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build ON TRUE + CROSS JOIN LATERAL ( + SELECT + COUNT(*) = 0 AS none, + bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting, + bool_and(workspace_agent.lifecycle_state = 'ready') AS connected + FROM workspace_agents workspace_agent + WHERE workspace_agent.id = task_app.workspace_agent_id + ) agent_status + CROSS JOIN LATERAL ( + SELECT + bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy, + bool_or(workspace_app.health = 'initializing') AS any_initializing, + bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled + FROM workspace_apps workspace_app + WHERE workspace_app.id = task_app.workspace_app_id + ) app_status + WHERE + tasks.deleted_at IS NULL; diff --git a/coderd/database/migrations/000400_add_task_display_name.up.sql b/coderd/database/migrations/000400_add_task_display_name.up.sql new file mode 100644 index 0000000000000..591802ce1e438 --- /dev/null +++ b/coderd/database/migrations/000400_add_task_display_name.up.sql @@ -0,0 +1,158 @@ +-- Add display_name column to tasks table +ALTER TABLE tasks ADD COLUMN display_name VARCHAR(127) NOT NULL DEFAULT ''; +COMMENT ON COLUMN tasks.display_name IS 'Display name is a custom, human-friendly task name.'; + +-- Backfill existing tasks with truncated prompt as display name +-- Replace newlines/tabs with spaces, truncate to 64 characters and add ellipsis if truncated +UPDATE tasks +SET display_name = CASE + WHEN LENGTH(REGEXP_REPLACE(prompt, E'[\\n\\r\\t]+', ' ', 'g')) > 64 + THEN LEFT(REGEXP_REPLACE(prompt, E'[\\n\\r\\t]+', ' ', 'g'), 63) || '…' + ELSE REGEXP_REPLACE(prompt, E'[\\n\\r\\t]+', ' ', 'g') + END +WHERE display_name = ''; + +-- Recreate the tasks_with_status view to pick up the new display_name column. +-- PostgreSQL resolves the tasks.* wildcard when the view is created, not when +-- it's queried, so the view must be recreated after adding columns to tasks. +DROP VIEW IF EXISTS tasks_with_status; + +CREATE VIEW + tasks_with_status +AS +SELECT + tasks.*, + -- Combine component statuses with precedence: build -> agent -> app. + CASE + WHEN tasks.workspace_id IS NULL THEN 'pending'::task_status + WHEN build_status.status != 'active' THEN build_status.status::task_status + WHEN agent_status.status != 'active' THEN agent_status.status::task_status + ELSE app_status.status::task_status + END AS status, + -- Attach debug information for troubleshooting status. + jsonb_build_object( + 'build', jsonb_build_object( + 'transition', latest_build_raw.transition, + 'job_status', latest_build_raw.job_status, + 'computed', build_status.status + ), + 'agent', jsonb_build_object( + 'lifecycle_state', agent_raw.lifecycle_state, + 'computed', agent_status.status + ), + 'app', jsonb_build_object( + 'health', app_raw.health, + 'computed', app_status.status + ) + ) AS status_debug, + task_app.*, + agent_raw.lifecycle_state AS workspace_agent_lifecycle_state, + app_raw.health AS workspace_app_health, + task_owner.* +FROM + tasks + CROSS JOIN LATERAL ( + SELECT + vu.username AS owner_username, + vu.name AS owner_name, + vu.avatar_url AS owner_avatar_url + FROM + visible_users vu + WHERE + vu.id = tasks.owner_id + ) task_owner + LEFT JOIN LATERAL ( + SELECT + task_app.workspace_build_number, + task_app.workspace_agent_id, + task_app.workspace_app_id + FROM + task_workspace_apps task_app + WHERE + task_id = tasks.id + ORDER BY + task_app.workspace_build_number DESC + LIMIT 1 + ) task_app ON TRUE + + -- Join the raw data for computing task status. + LEFT JOIN LATERAL ( + SELECT + workspace_build.transition, + provisioner_job.job_status, + workspace_build.job_id + FROM + workspace_builds workspace_build + JOIN + provisioner_jobs provisioner_job + ON provisioner_job.id = workspace_build.job_id + WHERE + workspace_build.workspace_id = tasks.workspace_id + AND workspace_build.build_number = task_app.workspace_build_number + ) latest_build_raw ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_agent.lifecycle_state + FROM + workspace_agents workspace_agent + WHERE + workspace_agent.id = task_app.workspace_agent_id + ) agent_raw ON TRUE + LEFT JOIN LATERAL ( + SELECT + workspace_app.health + FROM + workspace_apps workspace_app + WHERE + workspace_app.id = task_app.workspace_app_id + ) app_raw ON TRUE + + -- Compute the status for each component. + CROSS JOIN LATERAL ( + SELECT + CASE + WHEN latest_build_raw.job_status IS NULL THEN 'pending'::task_status + WHEN latest_build_raw.job_status IN ('failed', 'canceling', 'canceled') THEN 'error'::task_status + WHEN + latest_build_raw.transition IN ('stop', 'delete') + AND latest_build_raw.job_status = 'succeeded' THEN 'paused'::task_status + WHEN + latest_build_raw.transition = 'start' + AND latest_build_raw.job_status = 'pending' THEN 'initializing'::task_status + -- Build is running or done, defer to agent/app status. + WHEN + latest_build_raw.transition = 'start' + AND latest_build_raw.job_status IN ('running', 'succeeded') THEN 'active'::task_status + ELSE 'unknown'::task_status + END AS status + ) build_status + CROSS JOIN LATERAL ( + SELECT + CASE + -- No agent or connecting. + WHEN + agent_raw.lifecycle_state IS NULL + OR agent_raw.lifecycle_state IN ('created', 'starting') THEN 'initializing'::task_status + -- Agent is running, defer to app status. + -- NOTE(mafredri): The start_error/start_timeout states means connected, but some startup script failed. + -- This may or may not affect the task status but this has to be caught by app health check. + WHEN agent_raw.lifecycle_state IN ('ready', 'start_timeout', 'start_error') THEN 'active'::task_status + -- If the agent is shutting down or turned off, this is an unknown state because we would expect a stop + -- build to be running. + -- This is essentially equal to: `IN ('shutting_down', 'shutdown_timeout', 'shutdown_error', 'off')`, + -- but we cannot use them because the values were added in a migration. + WHEN agent_raw.lifecycle_state NOT IN ('created', 'starting', 'ready', 'start_timeout', 'start_error') THEN 'unknown'::task_status + ELSE 'unknown'::task_status + END AS status + ) agent_status + CROSS JOIN LATERAL ( + SELECT + CASE + WHEN app_raw.health = 'initializing' THEN 'initializing'::task_status + WHEN app_raw.health = 'unhealthy' THEN 'error'::task_status + WHEN app_raw.health IN ('healthy', 'disabled') THEN 'active'::task_status + ELSE 'unknown'::task_status + END AS status + ) app_status + WHERE + tasks.deleted_at IS NULL; diff --git a/coderd/database/migrations/000401_add_workspace_agents_index.down.sql b/coderd/database/migrations/000401_add_workspace_agents_index.down.sql new file mode 100644 index 0000000000000..3b2a25345fc2b --- /dev/null +++ b/coderd/database/migrations/000401_add_workspace_agents_index.down.sql @@ -0,0 +1 @@ +DROP INDEX IF EXISTS public.workspace_agents_auth_instance_id_deleted_idx; diff --git a/coderd/database/migrations/000401_add_workspace_agents_index.up.sql b/coderd/database/migrations/000401_add_workspace_agents_index.up.sql new file mode 100644 index 0000000000000..db67cb400f171 --- /dev/null +++ b/coderd/database/migrations/000401_add_workspace_agents_index.up.sql @@ -0,0 +1 @@ +CREATE INDEX IF NOT EXISTS workspace_agents_auth_instance_id_deleted_idx ON public.workspace_agents (auth_instance_id, deleted); diff --git a/coderd/database/migrations/testdata/fixtures/000371_add_api_key_and_oauth2_provider_app_token.up.sql b/coderd/database/migrations/testdata/fixtures/000371_add_api_key_and_oauth2_provider_app_token.up.sql new file mode 100644 index 0000000000000..cd597539971f1 --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000371_add_api_key_and_oauth2_provider_app_token.up.sql @@ -0,0 +1,57 @@ +-- Ensure api_keys and oauth2_provider_app_tokens have live data after +-- migration 000371 deletes expired rows. +INSERT INTO api_keys ( + id, + hashed_secret, + user_id, + last_used, + expires_at, + created_at, + updated_at, + login_type, + lifetime_seconds, + ip_address, + token_name, + scopes, + allow_list +) +VALUES ( + 'fixture-api-key', + '\xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + '30095c71-380b-457a-8995-97b8ee6e5307', + NOW() - INTERVAL '1 hour', + NOW() + INTERVAL '30 days', + NOW() - INTERVAL '1 day', + NOW() - INTERVAL '1 day', + 'password', + 86400, + '0.0.0.0', + 'fixture-api-key', + ARRAY['workspace:read']::api_key_scope[], + ARRAY['*:*'] +) +ON CONFLICT (id) DO NOTHING; + +INSERT INTO oauth2_provider_app_tokens ( + id, + created_at, + expires_at, + hash_prefix, + refresh_hash, + app_secret_id, + api_key_id, + audience, + user_id +) +VALUES ( + '9f92f3c9-811f-4f6f-9a1c-3f2eed1f9f15', + NOW() - INTERVAL '30 minutes', + NOW() + INTERVAL '30 days', + CAST('fixture-hash-prefix' AS bytea), + CAST('fixture-refresh-hash' AS bytea), + 'b0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', + 'fixture-api-key', + 'https://coder.example.com', + '30095c71-380b-457a-8995-97b8ee6e5307' +) +ON CONFLICT (id) DO NOTHING; diff --git a/coderd/database/modelmethods.go b/coderd/database/modelmethods.go index b3202342e3ffa..5e92f305e0bca 100644 --- a/coderd/database/modelmethods.go +++ b/coderd/database/modelmethods.go @@ -1,6 +1,7 @@ package database import ( + "database/sql" "encoding/hex" "slices" "sort" @@ -132,11 +133,29 @@ func (w ConnectionLog) RBACObject() rbac.Object { return obj } +// TaskTable converts a Task to it's reduced version. +// A more generalized solution is to use json marshaling to +// consistently keep these two structs in sync. +// That would be a lot of overhead, and a more costly unit test is +// written to make sure these match up. +func (t Task) TaskTable() TaskTable { + return TaskTable{ + ID: t.ID, + OrganizationID: t.OrganizationID, + OwnerID: t.OwnerID, + Name: t.Name, + DisplayName: t.DisplayName, + WorkspaceID: t.WorkspaceID, + TemplateVersionID: t.TemplateVersionID, + TemplateParameters: t.TemplateParameters, + Prompt: t.Prompt, + CreatedAt: t.CreatedAt, + DeletedAt: t.DeletedAt, + } +} + func (t Task) RBACObject() rbac.Object { - return rbac.ResourceTask. - WithID(t.ID). - WithOwner(t.OwnerID.String()). - InOrg(t.OrganizationID) + return t.TaskTable().RBACObject() } func (t TaskTable) RBACObject() rbac.Object { @@ -662,6 +681,7 @@ func ConvertWorkspaceRows(rows []GetWorkspacesRow) []Workspace { TemplateIcon: r.TemplateIcon, TemplateDescription: r.TemplateDescription, NextStartAt: r.NextStartAt, + TaskID: r.TaskID, } } @@ -777,3 +797,60 @@ func (s UserSecret) RBACObject() rbac.Object { func (s AIBridgeInterception) RBACObject() rbac.Object { return rbac.ResourceAibridgeInterception.WithOwner(s.InitiatorID.String()) } + +// WorkspaceIdentity contains the minimal workspace fields needed for agent API metadata/stats reporting +// and RBAC checks, without requiring a full database.Workspace object. +type WorkspaceIdentity struct { + // Add any other fields needed for IsPrebuild() if it relies on workspace fields + // Identity fields + ID uuid.UUID + OwnerID uuid.UUID + OrganizationID uuid.UUID + TemplateID uuid.UUID + + // Display fields for logging/metrics + Name string + OwnerUsername string + TemplateName string + + // Lifecycle fields needed for stats reporting + AutostartSchedule sql.NullString +} + +func (w WorkspaceIdentity) RBACObject() rbac.Object { + return Workspace{ + ID: w.ID, + OwnerID: w.OwnerID, + OrganizationID: w.OrganizationID, + TemplateID: w.TemplateID, + Name: w.Name, + OwnerUsername: w.OwnerUsername, + TemplateName: w.TemplateName, + AutostartSchedule: w.AutostartSchedule, + }.RBACObject() +} + +// IsPrebuild returns true if the workspace is a prebuild workspace. +// A workspace is considered a prebuild if its owner is the prebuild system user. +func (w WorkspaceIdentity) IsPrebuild() bool { + return w.OwnerID == PrebuildsSystemUserID +} + +func (w WorkspaceIdentity) Equal(w2 WorkspaceIdentity) bool { + return w.ID == w2.ID && w.OwnerID == w2.OwnerID && w.OrganizationID == w2.OrganizationID && + w.TemplateID == w2.TemplateID && w.Name == w2.Name && w.OwnerUsername == w2.OwnerUsername && + w.TemplateName == w2.TemplateName && w.AutostartSchedule == w2.AutostartSchedule +} + +func WorkspaceIdentityFromWorkspace(w Workspace) WorkspaceIdentity { + return WorkspaceIdentity{ + ID: w.ID, + OwnerID: w.OwnerID, + OrganizationID: w.OrganizationID, + TemplateID: w.TemplateID, + Name: w.Name, + OwnerUsername: w.OwnerUsername, + TemplateName: w.TemplateName, + AutostartSchedule: w.AutostartSchedule, + } +} diff --git a/coderd/database/modelqueries.go b/coderd/database/modelqueries.go index c9c7879627684..fae0f3eca4fa4 100644 --- a/coderd/database/modelqueries.go +++ b/coderd/database/modelqueries.go @@ -127,6 +127,7 @@ func (q *sqlQuerier) GetAuthorizedTemplates(ctx context.Context, arg GetTemplate &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, &i.CorsBehavior, + &i.UseTerraformWorkspaceCache, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -321,6 +322,7 @@ func (q *sqlQuerier) GetAuthorizedWorkspaces(ctx context.Context, arg GetWorkspa &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, &i.TemplateVersionID, &i.TemplateVersionName, &i.LatestBuildCompletedAt, @@ -328,7 +330,6 @@ func (q *sqlQuerier) GetAuthorizedWorkspaces(ctx context.Context, arg GetWorkspa &i.LatestBuildError, &i.LatestBuildTransition, &i.LatestBuildStatus, - &i.LatestBuildHasAITask, &i.LatestBuildHasExternalAgent, &i.Count, ); err != nil { @@ -805,6 +806,7 @@ func (q *sqlQuerier) ListAuthorizedAIBridgeInterceptions(ctx context.Context, ar &i.AIBridgeInterception.StartedAt, &i.AIBridgeInterception.Metadata, &i.AIBridgeInterception.EndedAt, + &i.AIBridgeInterception.APIKeyID, &i.VisibleUser.ID, &i.VisibleUser.Username, &i.VisibleUser.Name, diff --git a/coderd/database/modelqueries_internal_test.go b/coderd/database/modelqueries_internal_test.go index 275ed947a3e4c..9e84324b72ee8 100644 --- a/coderd/database/modelqueries_internal_test.go +++ b/coderd/database/modelqueries_internal_test.go @@ -58,6 +58,45 @@ func TestWorkspaceTableConvert(t *testing.T) { "To resolve this, go to the 'func (w Workspace) WorkspaceTable()' and ensure all fields are converted.") } +// TestTaskTableConvert verifies all task fields are converted +// when reducing a `Task` to a `TaskTable`. +// This test is a guard rail to prevent developer oversight mistakes. +func TestTaskTableConvert(t *testing.T) { + t.Parallel() + + staticRandoms := &testutil.Random{ + String: func() string { return "foo" }, + Bool: func() bool { return true }, + Int: func() int64 { return 500 }, + Uint: func() uint64 { return 126 }, + Float: func() float64 { return 3.14 }, + Complex: func() complex128 { return 6.24 }, + Time: func() time.Time { + return time.Date(2020, 5, 2, 5, 19, 21, 30, time.UTC) + }, + } + + // Copies the approach taken by TestWorkspaceTableConvert. + // + // If you use 'PopulateStruct' to create 2 tasks, using the same + // "random" values for each type. Then they should be identical. + // + // So if 'task.TaskTable()' was missing any fields in its + // conversion, the comparison would fail. + + var task Task + err := testutil.PopulateStruct(&task, staticRandoms) + require.NoError(t, err) + + var subset TaskTable + err = testutil.PopulateStruct(&subset, staticRandoms) + require.NoError(t, err) + + require.Equal(t, task.TaskTable(), subset, + "'task.TaskTable()' is not missing at least 1 field when converting to 'TaskTable'. "+ + "To resolve this, go to the 'func (t Task) TaskTable()' and ensure all fields are converted.") +} + // TestAuditLogsQueryConsistency ensures that GetAuditLogsOffset and CountAuditLogs // have identical WHERE clauses to prevent filtering inconsistencies. // This test is a guard rail to prevent developer oversight mistakes. diff --git a/coderd/database/models.go b/coderd/database/models.go index e55f3a553721b..e55cd1f24bf0a 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -1,6 +1,6 @@ // Code generated by sqlc. DO NOT EDIT. // versions: -// sqlc v1.27.0 +// sqlc v1.30.0 package database @@ -3614,6 +3614,7 @@ type AIBridgeInterception struct { StartedAt time.Time `db:"started_at" json:"started_at"` Metadata pqtype.NullRawMessage `db:"metadata" json:"metadata"` EndedAt sql.NullTime `db:"ended_at" json:"ended_at"` + APIKeyID sql.NullString `db:"api_key_id" json:"api_key_id"` } // Audit log of tokens used by intercepted requests in AI Bridge @@ -4207,23 +4208,27 @@ type TailnetTunnel struct { } type Task struct { - ID uuid.UUID `db:"id" json:"id"` - OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` - OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` - Name string `db:"name" json:"name"` - WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"` - TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` - TemplateParameters json.RawMessage `db:"template_parameters" json:"template_parameters"` - Prompt string `db:"prompt" json:"prompt"` - CreatedAt time.Time `db:"created_at" json:"created_at"` - DeletedAt sql.NullTime `db:"deleted_at" json:"deleted_at"` - Status TaskStatus `db:"status" json:"status"` - WorkspaceBuildNumber sql.NullInt32 `db:"workspace_build_number" json:"workspace_build_number"` - WorkspaceAgentID uuid.NullUUID `db:"workspace_agent_id" json:"workspace_agent_id"` - WorkspaceAppID uuid.NullUUID `db:"workspace_app_id" json:"workspace_app_id"` - OwnerUsername string `db:"owner_username" json:"owner_username"` - OwnerName string `db:"owner_name" json:"owner_name"` - OwnerAvatarUrl string `db:"owner_avatar_url" json:"owner_avatar_url"` + ID uuid.UUID `db:"id" json:"id"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` + Name string `db:"name" json:"name"` + WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"` + TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` + TemplateParameters json.RawMessage `db:"template_parameters" json:"template_parameters"` + Prompt string `db:"prompt" json:"prompt"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + DeletedAt sql.NullTime `db:"deleted_at" json:"deleted_at"` + DisplayName string `db:"display_name" json:"display_name"` + Status TaskStatus `db:"status" json:"status"` + StatusDebug json.RawMessage `db:"status_debug" json:"status_debug"` + WorkspaceBuildNumber sql.NullInt32 `db:"workspace_build_number" json:"workspace_build_number"` + WorkspaceAgentID uuid.NullUUID `db:"workspace_agent_id" json:"workspace_agent_id"` + WorkspaceAppID uuid.NullUUID `db:"workspace_app_id" json:"workspace_app_id"` + WorkspaceAgentLifecycleState NullWorkspaceAgentLifecycleState `db:"workspace_agent_lifecycle_state" json:"workspace_agent_lifecycle_state"` + WorkspaceAppHealth NullWorkspaceAppHealth `db:"workspace_app_health" json:"workspace_app_health"` + OwnerUsername string `db:"owner_username" json:"owner_username"` + OwnerName string `db:"owner_name" json:"owner_name"` + OwnerAvatarUrl string `db:"owner_avatar_url" json:"owner_avatar_url"` } type TaskTable struct { @@ -4237,6 +4242,8 @@ type TaskTable struct { Prompt string `db:"prompt" json:"prompt"` CreatedAt time.Time `db:"created_at" json:"created_at"` DeletedAt sql.NullTime `db:"deleted_at" json:"deleted_at"` + // Display name is a custom, human-friendly task name. + DisplayName string `db:"display_name" json:"display_name"` } type TaskWorkspaceApp struct { @@ -4293,6 +4300,7 @@ type Template struct { MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"` UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"` CorsBehavior CorsBehavior `db:"cors_behavior" json:"cors_behavior"` + UseTerraformWorkspaceCache bool `db:"use_terraform_workspace_cache" json:"use_terraform_workspace_cache"` CreatedByAvatarURL string `db:"created_by_avatar_url" json:"created_by_avatar_url"` CreatedByUsername string `db:"created_by_username" json:"created_by_username"` CreatedByName string `db:"created_by_name" json:"created_by_name"` @@ -4342,6 +4350,8 @@ type TemplateTable struct { // Determines whether to default to the dynamic parameter creation flow for this template or continue using the legacy classic parameter creation flow.This is a template wide setting, the template admin can revert to the classic flow if there are any issues. An escape hatch is required, as workspace creation is a core workflow and cannot break. This column will be removed when the dynamic parameter creation flow is stable. UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"` CorsBehavior CorsBehavior `db:"cors_behavior" json:"cors_behavior"` + // Determines whether to keep terraform directories cached between runs for workspaces created from this template. When enabled, this can significantly speed up the `terraform init` step at the cost of increased disk usage. This is an opt-in experience, as it prevents modules from being updated, and therefore is a behavioral difference from the default. + UseTerraformWorkspaceCache bool `db:"use_terraform_workspace_cache" json:"use_terraform_workspace_cache"` } // Records aggregated usage statistics for templates/users. All usage is rounded up to the nearest minute. @@ -4445,7 +4455,8 @@ type TemplateVersionPreset struct { // Short text describing the preset (max 128 characters). Description string `db:"description" json:"description"` // URL or path to an icon representing the preset (max 256 characters). - Icon string `db:"icon" json:"icon"` + Icon string `db:"icon" json:"icon"` + LastInvalidatedAt sql.NullTime `db:"last_invalidated_at" json:"last_invalidated_at"` } type TemplateVersionPresetParameter struct { @@ -4663,6 +4674,7 @@ type Workspace struct { TemplateDisplayName string `db:"template_display_name" json:"template_display_name"` TemplateIcon string `db:"template_icon" json:"template_icon"` TemplateDescription string `db:"template_description" json:"template_description"` + TaskID uuid.NullUUID `db:"task_id" json:"task_id"` } type WorkspaceAgent struct { @@ -4938,7 +4950,6 @@ type WorkspaceBuild struct { MaxDeadline time.Time `db:"max_deadline" json:"max_deadline"` TemplateVersionPresetID uuid.NullUUID `db:"template_version_preset_id" json:"template_version_preset_id"` HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` - AITaskSidebarAppID uuid.NullUUID `db:"ai_task_sidebar_app_id" json:"ai_task_sidebar_app_id"` HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` InitiatorByAvatarUrl string `db:"initiator_by_avatar_url" json:"initiator_by_avatar_url"` InitiatorByUsername string `db:"initiator_by_username" json:"initiator_by_username"` @@ -4970,7 +4981,6 @@ type WorkspaceBuildTable struct { MaxDeadline time.Time `db:"max_deadline" json:"max_deadline"` TemplateVersionPresetID uuid.NullUUID `db:"template_version_preset_id" json:"template_version_preset_id"` HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` - AITaskSidebarAppID uuid.NullUUID `db:"ai_task_sidebar_app_id" json:"ai_task_sidebar_app_id"` HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` } diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 2b96823028f61..7997d7f085c1a 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -1,6 +1,6 @@ // Code generated by sqlc. DO NOT EDIT. // versions: -// sqlc v1.27.0 +// sqlc v1.30.0 package database @@ -91,6 +91,7 @@ type sqlcQuerier interface { DeleteCoordinator(ctx context.Context, id uuid.UUID) error DeleteCryptoKey(ctx context.Context, arg DeleteCryptoKeyParams) (CryptoKey, error) DeleteCustomRole(ctx context.Context, arg DeleteCustomRoleParams) error + DeleteExpiredAPIKeys(ctx context.Context, arg DeleteExpiredAPIKeysParams) (int64, error) DeleteExternalAuthLink(ctx context.Context, arg DeleteExternalAuthLinkParams) error DeleteGitSSHKey(ctx context.Context, userID uuid.UUID) error DeleteGroupByID(ctx context.Context, id uuid.UUID) error @@ -102,6 +103,8 @@ type sqlcQuerier interface { DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx context.Context, arg DeleteOAuth2ProviderAppCodesByAppAndUserIDParams) error DeleteOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) error DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Context, arg DeleteOAuth2ProviderAppTokensByAppAndUserIDParams) error + // Cumulative count. + DeleteOldAIBridgeRecords(ctx context.Context, beforeTime time.Time) (int32, error) DeleteOldAuditLogConnectionEvents(ctx context.Context, arg DeleteOldAuditLogConnectionEventsParams) error // Delete all notification messages which have not been updated for over a week. DeleteOldNotificationMessages(ctx context.Context) error @@ -235,7 +238,7 @@ type sqlcQuerier interface { GetInboxNotificationsByUserID(ctx context.Context, arg GetInboxNotificationsByUserIDParams) ([]InboxNotification, error) GetLastUpdateCheck(ctx context.Context) (string, error) GetLatestCryptoKeyByFeature(ctx context.Context, feature CryptoKeyFeature) (CryptoKey, error) - GetLatestWorkspaceAppStatusesByAppID(ctx context.Context, appID uuid.UUID) ([]WorkspaceAppStatus, error) + GetLatestWorkspaceAppStatusByAppID(ctx context.Context, appID uuid.UUID) (WorkspaceAppStatus, error) GetLatestWorkspaceAppStatusesByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceAppStatus, error) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (WorkspaceBuild, error) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceBuild, error) @@ -269,6 +272,9 @@ type sqlcQuerier interface { GetOrganizationResourceCountByID(ctx context.Context, organizationID uuid.UUID) (GetOrganizationResourceCountByIDRow, error) GetOrganizations(ctx context.Context, arg GetOrganizationsParams) ([]Organization, error) GetOrganizationsByUserID(ctx context.Context, arg GetOrganizationsByUserIDParams) ([]Organization, error) + // GetOrganizationsWithPrebuildStatus returns organizations with prebuilds configured and their + // membership status for the prebuilds system user (org membership, group existence, group membership). + GetOrganizationsWithPrebuildStatus(ctx context.Context, arg GetOrganizationsWithPrebuildStatusParams) ([]GetOrganizationsWithPrebuildStatusRow, error) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]ParameterSchema, error) GetPrebuildMetrics(ctx context.Context) ([]GetPrebuildMetricsRow, error) GetPrebuildsSettings(ctx context.Context) (string, error) @@ -340,6 +346,7 @@ type sqlcQuerier interface { GetTailnetTunnelPeerBindings(ctx context.Context, srcID uuid.UUID) ([]GetTailnetTunnelPeerBindingsRow, error) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) ([]GetTailnetTunnelPeerIDsRow, error) GetTaskByID(ctx context.Context, id uuid.UUID) (Task, error) + GetTaskByOwnerIDAndName(ctx context.Context, arg GetTaskByOwnerIDAndNameParams) (Task, error) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (Task, error) GetTelemetryItem(ctx context.Context, key string) (TelemetryItem, error) GetTelemetryItems(ctx context.Context) ([]TelemetryItem, error) @@ -440,6 +447,7 @@ type sqlcQuerier interface { // We do not start counting from 0 at the start_time. We check the last status change before the start_time for each user. As such, // the result shows the total number of users in each status on any particular day. GetUserStatusCounts(ctx context.Context, arg GetUserStatusCountsParams) ([]GetUserStatusCountsRow, error) + GetUserTaskNotificationAlertDismissed(ctx context.Context, userID uuid.UUID) (bool, error) GetUserTerminalFont(ctx context.Context, userID uuid.UUID) (string, error) GetUserThemePreference(ctx context.Context, userID uuid.UUID) (string, error) GetUserWorkspaceBuildParameters(ctx context.Context, arg GetUserWorkspaceBuildParametersParams) ([]GetUserWorkspaceBuildParametersRow, error) @@ -606,7 +614,7 @@ type sqlcQuerier interface { InsertWorkspaceResource(ctx context.Context, arg InsertWorkspaceResourceParams) (WorkspaceResource, error) InsertWorkspaceResourceMetadata(ctx context.Context, arg InsertWorkspaceResourceMetadataParams) ([]WorkspaceResourceMetadatum, error) ListAIBridgeInterceptions(ctx context.Context, arg ListAIBridgeInterceptionsParams) ([]ListAIBridgeInterceptionsRow, error) - // Finds all unique AIBridge interception telemetry summaries combinations + // Finds all unique AI Bridge interception telemetry summaries combinations // (provider, model, client) in the given timeframe for telemetry reporting. ListAIBridgeInterceptionsTelemetrySummaries(ctx context.Context, arg ListAIBridgeInterceptionsTelemetrySummariesParams) ([]ListAIBridgeInterceptionsTelemetrySummariesRow, error) ListAIBridgeTokenUsagesByInterceptionIDs(ctx context.Context, interceptionIds []uuid.UUID) ([]AIBridgeTokenUsage, error) @@ -667,8 +675,9 @@ type sqlcQuerier interface { // Cancels all pending provisioner jobs for prebuilt workspaces on a specific preset from an // inactive template version. // This is an optimization to clean up stale pending jobs. - UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg UpdatePrebuildProvisionerJobWithCancelParams) ([]uuid.UUID, error) + UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg UpdatePrebuildProvisionerJobWithCancelParams) ([]UpdatePrebuildProvisionerJobWithCancelRow, error) UpdatePresetPrebuildStatus(ctx context.Context, arg UpdatePresetPrebuildStatusParams) error + UpdatePresetsLastInvalidatedAt(ctx context.Context, arg UpdatePresetsLastInvalidatedAtParams) ([]UpdatePresetsLastInvalidatedAtRow, error) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg UpdateProvisionerDaemonLastSeenAtParams) error UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error UpdateProvisionerJobLogsLength(ctx context.Context, arg UpdateProvisionerJobLogsLengthParams) error @@ -678,6 +687,7 @@ type sqlcQuerier interface { UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error UpdateReplica(ctx context.Context, arg UpdateReplicaParams) (Replica, error) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg UpdateTailnetPeerStatusByCoordinatorParams) error + UpdateTaskPrompt(ctx context.Context, arg UpdateTaskPromptParams) (TaskTable, error) UpdateTaskWorkspaceID(ctx context.Context, arg UpdateTaskWorkspaceIDParams) (TaskTable, error) UpdateTemplateACLByID(ctx context.Context, arg UpdateTemplateACLByIDParams) error UpdateTemplateAccessControlByID(ctx context.Context, arg UpdateTemplateAccessControlByIDParams) error @@ -705,6 +715,7 @@ type sqlcQuerier interface { UpdateUserRoles(ctx context.Context, arg UpdateUserRolesParams) (User, error) UpdateUserSecret(ctx context.Context, arg UpdateUserSecretParams) (UserSecret, error) UpdateUserStatus(ctx context.Context, arg UpdateUserStatusParams) (User, error) + UpdateUserTaskNotificationAlertDismissed(ctx context.Context, arg UpdateUserTaskNotificationAlertDismissedParams) (bool, error) UpdateUserTerminalFont(ctx context.Context, arg UpdateUserTerminalFontParams) (UserConfig, error) UpdateUserThemePreference(ctx context.Context, arg UpdateUserThemePreferenceParams) (UserConfig, error) UpdateVolumeResourceMonitor(ctx context.Context, arg UpdateVolumeResourceMonitorParams) error diff --git a/coderd/database/querier_test.go b/coderd/database/querier_test.go index 773f944756576..4dbb4a350a1c3 100644 --- a/coderd/database/querier_test.go +++ b/coderd/database/querier_test.go @@ -6664,6 +6664,23 @@ func TestTasksWithStatusView(t *testing.T) { StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, CompletedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, } + case database.ProvisionerJobStatusCanceling: + jobParams = database.ProvisionerJob{ + OrganizationID: org.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + InitiatorID: user.ID, + StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + CanceledAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + } + case database.ProvisionerJobStatusCanceled: + jobParams = database.ProvisionerJob{ + OrganizationID: org.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + InitiatorID: user.ID, + StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + CompletedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + CanceledAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + } default: t.Errorf("invalid build status: %v", buildStatus) } @@ -6816,6 +6833,28 @@ func TestTasksWithStatusView(t *testing.T) { expectWorkspaceAgentValid: false, expectWorkspaceAppValid: false, }, + { + name: "CancelingBuild", + buildStatus: database.ProvisionerJobStatusCanceling, + buildTransition: database.WorkspaceTransitionStart, + expectedStatus: database.TaskStatusError, + description: "Latest workspace build is canceling", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: false, + expectWorkspaceAppValid: false, + }, + { + name: "CanceledBuild", + buildStatus: database.ProvisionerJobStatusCanceled, + buildTransition: database.WorkspaceTransitionStart, + expectedStatus: database.TaskStatusError, + description: "Latest workspace build was canceled", + expectBuildNumberValid: true, + expectBuildNumber: 1, + expectWorkspaceAgentValid: false, + expectWorkspaceAppValid: false, + }, { name: "StoppedWorkspace", buildStatus: database.ProvisionerJobStatusSucceeded, @@ -6943,24 +6982,26 @@ func TestTasksWithStatusView(t *testing.T) { buildStatus: database.ProvisionerJobStatusSucceeded, buildTransition: database.WorkspaceTransitionStart, agentState: database.WorkspaceAgentLifecycleStateStartTimeout, - expectedStatus: database.TaskStatusUnknown, - description: "Agent start timed out", + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthHealthy}, + expectedStatus: database.TaskStatusActive, + description: "Agent start timed out but app is healthy, defer to app", expectBuildNumberValid: true, expectBuildNumber: 1, expectWorkspaceAgentValid: true, - expectWorkspaceAppValid: false, + expectWorkspaceAppValid: true, }, { name: "AgentStartError", buildStatus: database.ProvisionerJobStatusSucceeded, buildTransition: database.WorkspaceTransitionStart, agentState: database.WorkspaceAgentLifecycleStateStartError, - expectedStatus: database.TaskStatusUnknown, - description: "Agent failed to start", + appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthHealthy}, + expectedStatus: database.TaskStatusActive, + description: "Agent start failed but app is healthy, defer to app", expectBuildNumberValid: true, expectBuildNumber: 1, expectWorkspaceAgentValid: true, - expectWorkspaceAppValid: false, + expectWorkspaceAppValid: true, }, { name: "AgentShuttingDown", @@ -7081,6 +7122,8 @@ func TestTasksWithStatusView(t *testing.T) { got, err := db.GetTaskByID(ctx, task.ID) require.NoError(t, err) + t.Logf("Task status debug: %s", got.StatusDebug) + require.Equal(t, tt.expectedStatus, got.Status) require.Equal(t, tt.expectBuildNumberValid, got.WorkspaceBuildNumber.Valid) @@ -7792,3 +7835,81 @@ func TestUpdateAIBridgeInterceptionEnded(t *testing.T) { } }) } + +func TestDeleteExpiredAPIKeys(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + + // Constant time for testing + now := time.Date(2025, 11, 20, 12, 0, 0, 0, time.UTC) + expiredBefore := now.Add(-time.Hour) // Anything before this is expired + + ctx := testutil.Context(t, testutil.WaitLong) + + user := dbgen.User(t, db, database.User{}) + + expiredTimes := []time.Time{ + expiredBefore.Add(-time.Hour * 24 * 365), + expiredBefore.Add(-time.Hour * 24), + expiredBefore.Add(-time.Hour), + expiredBefore.Add(-time.Minute), + expiredBefore.Add(-time.Second), + } + for _, exp := range expiredTimes { + // Expired api keys + dbgen.APIKey(t, db, database.APIKey{UserID: user.ID, ExpiresAt: exp}) + } + + unexpiredTimes := []time.Time{ + expiredBefore.Add(time.Hour * 24 * 365), + expiredBefore.Add(time.Hour * 24), + expiredBefore.Add(time.Hour), + expiredBefore.Add(time.Minute), + expiredBefore.Add(time.Second), + } + for _, unexp := range unexpiredTimes { + // Unexpired api keys + dbgen.APIKey(t, db, database.APIKey{UserID: user.ID, ExpiresAt: unexp}) + } + + // All keys are present before deletion + keys, err := db.GetAPIKeysByUserID(ctx, database.GetAPIKeysByUserIDParams{ + LoginType: user.LoginType, + UserID: user.ID, + }) + require.NoError(t, err) + require.Len(t, keys, len(expiredTimes)+len(unexpiredTimes)) + + // Delete expired keys + // First verify the limit works by deleting one at a time + deletedCount, err := db.DeleteExpiredAPIKeys(ctx, database.DeleteExpiredAPIKeysParams{ + Before: expiredBefore, + LimitCount: 1, + }) + require.NoError(t, err) + require.Equal(t, int64(1), deletedCount) + + // Ensure it was deleted + remaining, err := db.GetAPIKeysByUserID(ctx, database.GetAPIKeysByUserIDParams{ + LoginType: user.LoginType, + UserID: user.ID, + }) + require.NoError(t, err) + require.Len(t, remaining, len(expiredTimes)+len(unexpiredTimes)-1) + + // Delete the rest of the expired keys + deletedCount, err = db.DeleteExpiredAPIKeys(ctx, database.DeleteExpiredAPIKeysParams{ + Before: expiredBefore, + LimitCount: 100, + }) + require.NoError(t, err) + require.Equal(t, int64(len(expiredTimes)-1), deletedCount) + + // Ensure only unexpired keys remain + remaining, err = db.GetAPIKeysByUserID(ctx, database.GetAPIKeysByUserIDParams{ + LoginType: user.LoginType, + UserID: user.ID, + }) + require.NoError(t, err) + require.Len(t, remaining, len(unexpiredTimes)) +} diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 2005f98347c6c..f7aa26d166eb6 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -1,6 +1,6 @@ // Code generated by sqlc. DO NOT EDIT. // versions: -// sqlc v1.27.0 +// sqlc v1.30.0 package database @@ -275,8 +275,10 @@ SELECT FROM aibridge_interceptions WHERE + -- Remove inflight interceptions (ones which lack an ended_at value). + aibridge_interceptions.ended_at IS NOT NULL -- Filter by time frame - CASE + AND CASE WHEN $1::timestamptz != '0001-01-01 00:00:00+00'::timestamptz THEN aibridge_interceptions.started_at >= $1::timestamptz ELSE true END @@ -324,9 +326,52 @@ func (q *sqlQuerier) CountAIBridgeInterceptions(ctx context.Context, arg CountAI return count, err } +const deleteOldAIBridgeRecords = `-- name: DeleteOldAIBridgeRecords :one +WITH + -- We don't have FK relationships between the dependent tables and aibridge_interceptions, so we can't rely on DELETE CASCADE. + to_delete AS ( + SELECT id FROM aibridge_interceptions + WHERE started_at < $1::timestamp with time zone + ), + -- CTEs are executed in order. + tool_usages AS ( + DELETE FROM aibridge_tool_usages + WHERE interception_id IN (SELECT id FROM to_delete) + RETURNING 1 + ), + token_usages AS ( + DELETE FROM aibridge_token_usages + WHERE interception_id IN (SELECT id FROM to_delete) + RETURNING 1 + ), + user_prompts AS ( + DELETE FROM aibridge_user_prompts + WHERE interception_id IN (SELECT id FROM to_delete) + RETURNING 1 + ), + interceptions AS ( + DELETE FROM aibridge_interceptions + WHERE id IN (SELECT id FROM to_delete) + RETURNING 1 + ) +SELECT + (SELECT COUNT(*) FROM tool_usages) + + (SELECT COUNT(*) FROM token_usages) + + (SELECT COUNT(*) FROM user_prompts) + + (SELECT COUNT(*) FROM interceptions) as total_deleted +` + +// Cumulative count. +func (q *sqlQuerier) DeleteOldAIBridgeRecords(ctx context.Context, beforeTime time.Time) (int32, error) { + row := q.db.QueryRowContext(ctx, deleteOldAIBridgeRecords, beforeTime) + var total_deleted int32 + err := row.Scan(&total_deleted) + return total_deleted, err +} + const getAIBridgeInterceptionByID = `-- name: GetAIBridgeInterceptionByID :one SELECT - id, initiator_id, provider, model, started_at, metadata, ended_at + id, initiator_id, provider, model, started_at, metadata, ended_at, api_key_id FROM aibridge_interceptions WHERE @@ -344,13 +389,14 @@ func (q *sqlQuerier) GetAIBridgeInterceptionByID(ctx context.Context, id uuid.UU &i.StartedAt, &i.Metadata, &i.EndedAt, + &i.APIKeyID, ) return i, err } const getAIBridgeInterceptions = `-- name: GetAIBridgeInterceptions :many SELECT - id, initiator_id, provider, model, started_at, metadata, ended_at + id, initiator_id, provider, model, started_at, metadata, ended_at, api_key_id FROM aibridge_interceptions ` @@ -372,6 +418,7 @@ func (q *sqlQuerier) GetAIBridgeInterceptions(ctx context.Context) ([]AIBridgeIn &i.StartedAt, &i.Metadata, &i.EndedAt, + &i.APIKeyID, ); err != nil { return nil, err } @@ -517,15 +564,16 @@ func (q *sqlQuerier) GetAIBridgeUserPromptsByInterceptionID(ctx context.Context, const insertAIBridgeInterception = `-- name: InsertAIBridgeInterception :one INSERT INTO aibridge_interceptions ( - id, initiator_id, provider, model, metadata, started_at + id, api_key_id, initiator_id, provider, model, metadata, started_at ) VALUES ( - $1, $2, $3, $4, COALESCE($5::jsonb, '{}'::jsonb), $6 + $1, $2, $3, $4, $5, COALESCE($6::jsonb, '{}'::jsonb), $7 ) -RETURNING id, initiator_id, provider, model, started_at, metadata, ended_at +RETURNING id, initiator_id, provider, model, started_at, metadata, ended_at, api_key_id ` type InsertAIBridgeInterceptionParams struct { ID uuid.UUID `db:"id" json:"id"` + APIKeyID sql.NullString `db:"api_key_id" json:"api_key_id"` InitiatorID uuid.UUID `db:"initiator_id" json:"initiator_id"` Provider string `db:"provider" json:"provider"` Model string `db:"model" json:"model"` @@ -536,6 +584,7 @@ type InsertAIBridgeInterceptionParams struct { func (q *sqlQuerier) InsertAIBridgeInterception(ctx context.Context, arg InsertAIBridgeInterceptionParams) (AIBridgeInterception, error) { row := q.db.QueryRowContext(ctx, insertAIBridgeInterception, arg.ID, + arg.APIKeyID, arg.InitiatorID, arg.Provider, arg.Model, @@ -551,6 +600,7 @@ func (q *sqlQuerier) InsertAIBridgeInterception(ctx context.Context, arg InsertA &i.StartedAt, &i.Metadata, &i.EndedAt, + &i.APIKeyID, ) return i, err } @@ -689,15 +739,17 @@ func (q *sqlQuerier) InsertAIBridgeUserPrompt(ctx context.Context, arg InsertAIB const listAIBridgeInterceptions = `-- name: ListAIBridgeInterceptions :many SELECT - aibridge_interceptions.id, aibridge_interceptions.initiator_id, aibridge_interceptions.provider, aibridge_interceptions.model, aibridge_interceptions.started_at, aibridge_interceptions.metadata, aibridge_interceptions.ended_at, + aibridge_interceptions.id, aibridge_interceptions.initiator_id, aibridge_interceptions.provider, aibridge_interceptions.model, aibridge_interceptions.started_at, aibridge_interceptions.metadata, aibridge_interceptions.ended_at, aibridge_interceptions.api_key_id, visible_users.id, visible_users.username, visible_users.name, visible_users.avatar_url FROM aibridge_interceptions JOIN visible_users ON visible_users.id = aibridge_interceptions.initiator_id WHERE + -- Remove inflight interceptions (ones which lack an ended_at value). + aibridge_interceptions.ended_at IS NOT NULL -- Filter by time frame - CASE + AND CASE WHEN $1::timestamptz != '0001-01-01 00:00:00+00'::timestamptz THEN aibridge_interceptions.started_at >= $1::timestamptz ELSE true END @@ -787,6 +839,7 @@ func (q *sqlQuerier) ListAIBridgeInterceptions(ctx context.Context, arg ListAIBr &i.AIBridgeInterception.StartedAt, &i.AIBridgeInterception.Metadata, &i.AIBridgeInterception.EndedAt, + &i.AIBridgeInterception.APIKeyID, &i.VisibleUser.ID, &i.VisibleUser.Username, &i.VisibleUser.Name, @@ -831,7 +884,7 @@ type ListAIBridgeInterceptionsTelemetrySummariesRow struct { Client string `db:"client" json:"client"` } -// Finds all unique AIBridge interception telemetry summaries combinations +// Finds all unique AI Bridge interception telemetry summaries combinations // (provider, model, client) in the given timeframe for telemetry reporting. func (q *sqlQuerier) ListAIBridgeInterceptionsTelemetrySummaries(ctx context.Context, arg ListAIBridgeInterceptionsTelemetrySummariesParams) ([]ListAIBridgeInterceptionsTelemetrySummariesRow, error) { rows, err := q.db.QueryContext(ctx, listAIBridgeInterceptionsTelemetrySummaries, arg.EndedAtAfter, arg.EndedAtBefore) @@ -993,7 +1046,7 @@ UPDATE aibridge_interceptions WHERE id = $2::uuid AND ended_at IS NULL -RETURNING id, initiator_id, provider, model, started_at, metadata, ended_at +RETURNING id, initiator_id, provider, model, started_at, metadata, ended_at, api_key_id ` type UpdateAIBridgeInterceptionEndedParams struct { @@ -1012,6 +1065,7 @@ func (q *sqlQuerier) UpdateAIBridgeInterceptionEnded(ctx context.Context, arg Up &i.StartedAt, &i.Metadata, &i.EndedAt, + &i.APIKeyID, ) return i, err } @@ -1053,6 +1107,38 @@ func (q *sqlQuerier) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context return err } +const deleteExpiredAPIKeys = `-- name: DeleteExpiredAPIKeys :one +WITH expired_keys AS ( + SELECT id + FROM api_keys + -- expired keys only + WHERE expires_at < $1::timestamptz + LIMIT $2 +), +deleted_rows AS ( + DELETE FROM + api_keys + USING + expired_keys + WHERE + api_keys.id = expired_keys.id + RETURNING api_keys.id + ) +SELECT COUNT(deleted_rows.id) AS deleted_count FROM deleted_rows +` + +type DeleteExpiredAPIKeysParams struct { + Before time.Time `db:"before" json:"before"` + LimitCount int32 `db:"limit_count" json:"limit_count"` +} + +func (q *sqlQuerier) DeleteExpiredAPIKeys(ctx context.Context, arg DeleteExpiredAPIKeysParams) (int64, error) { + row := q.db.QueryRowContext(ctx, deleteExpiredAPIKeys, arg.Before, arg.LimitCount) + var deleted_count int64 + err := row.Scan(&deleted_count) + return deleted_count, err +} + const expirePrebuildsAPIKeys = `-- name: ExpirePrebuildsAPIKeys :exec WITH unexpired_prebuilds_workspace_session_tokens AS ( SELECT id, SUBSTRING(token_name FROM 38 FOR 36)::uuid AS workspace_id @@ -8285,6 +8371,93 @@ func (q *sqlQuerier) FindMatchingPresetID(ctx context.Context, arg FindMatchingP return template_version_preset_id, err } +const getOrganizationsWithPrebuildStatus = `-- name: GetOrganizationsWithPrebuildStatus :many +WITH orgs_with_prebuilds AS ( + -- Get unique organizations that have presets with prebuilds configured + SELECT DISTINCT o.id, o.name + FROM organizations o + INNER JOIN templates t ON t.organization_id = o.id + INNER JOIN template_versions tv ON tv.template_id = t.id + INNER JOIN template_version_presets tvp ON tvp.template_version_id = tv.id + WHERE tvp.desired_instances IS NOT NULL +), +prebuild_user_membership AS ( + -- Check if the user is a member of the organizations + SELECT om.organization_id + FROM organization_members om + INNER JOIN orgs_with_prebuilds owp ON owp.id = om.organization_id + WHERE om.user_id = $1::uuid +), +prebuild_groups AS ( + -- Check if the organizations have the prebuilds group + SELECT g.organization_id, g.id as group_id + FROM groups g + INNER JOIN orgs_with_prebuilds owp ON owp.id = g.organization_id + WHERE g.name = $2::text +), +prebuild_group_membership AS ( + -- Check if the user is in the prebuilds group + SELECT pg.organization_id + FROM prebuild_groups pg + INNER JOIN group_members gm ON gm.group_id = pg.group_id + WHERE gm.user_id = $1::uuid +) +SELECT + owp.id AS organization_id, + owp.name AS organization_name, + (pum.organization_id IS NOT NULL)::boolean AS has_prebuild_user, + pg.group_id AS prebuilds_group_id, + (pgm.organization_id IS NOT NULL)::boolean AS has_prebuild_user_in_group +FROM orgs_with_prebuilds owp +LEFT JOIN prebuild_groups pg ON pg.organization_id = owp.id +LEFT JOIN prebuild_user_membership pum ON pum.organization_id = owp.id +LEFT JOIN prebuild_group_membership pgm ON pgm.organization_id = owp.id +` + +type GetOrganizationsWithPrebuildStatusParams struct { + UserID uuid.UUID `db:"user_id" json:"user_id"` + GroupName string `db:"group_name" json:"group_name"` +} + +type GetOrganizationsWithPrebuildStatusRow struct { + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + OrganizationName string `db:"organization_name" json:"organization_name"` + HasPrebuildUser bool `db:"has_prebuild_user" json:"has_prebuild_user"` + PrebuildsGroupID uuid.NullUUID `db:"prebuilds_group_id" json:"prebuilds_group_id"` + HasPrebuildUserInGroup bool `db:"has_prebuild_user_in_group" json:"has_prebuild_user_in_group"` +} + +// GetOrganizationsWithPrebuildStatus returns organizations with prebuilds configured and their +// membership status for the prebuilds system user (org membership, group existence, group membership). +func (q *sqlQuerier) GetOrganizationsWithPrebuildStatus(ctx context.Context, arg GetOrganizationsWithPrebuildStatusParams) ([]GetOrganizationsWithPrebuildStatusRow, error) { + rows, err := q.db.QueryContext(ctx, getOrganizationsWithPrebuildStatus, arg.UserID, arg.GroupName) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetOrganizationsWithPrebuildStatusRow + for rows.Next() { + var i GetOrganizationsWithPrebuildStatusRow + if err := rows.Scan( + &i.OrganizationID, + &i.OrganizationName, + &i.HasPrebuildUser, + &i.PrebuildsGroupID, + &i.HasPrebuildUserInGroup, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getPrebuildMetrics = `-- name: GetPrebuildMetrics :many SELECT t.name as template_name, @@ -8615,6 +8788,7 @@ SELECT tvp.scheduling_timezone, tvp.invalidate_after_secs AS ttl, tvp.prebuild_status, + tvp.last_invalidated_at, t.deleted, t.deprecated != '' AS deprecated FROM templates t @@ -8640,6 +8814,7 @@ type GetTemplatePresetsWithPrebuildsRow struct { SchedulingTimezone string `db:"scheduling_timezone" json:"scheduling_timezone"` Ttl sql.NullInt32 `db:"ttl" json:"ttl"` PrebuildStatus PrebuildStatus `db:"prebuild_status" json:"prebuild_status"` + LastInvalidatedAt sql.NullTime `db:"last_invalidated_at" json:"last_invalidated_at"` Deleted bool `db:"deleted" json:"deleted"` Deprecated bool `db:"deprecated" json:"deprecated"` } @@ -8670,6 +8845,7 @@ func (q *sqlQuerier) GetTemplatePresetsWithPrebuilds(ctx context.Context, templa &i.SchedulingTimezone, &i.Ttl, &i.PrebuildStatus, + &i.LastInvalidatedAt, &i.Deleted, &i.Deprecated, ); err != nil { @@ -8687,12 +8863,8 @@ func (q *sqlQuerier) GetTemplatePresetsWithPrebuilds(ctx context.Context, templa } const updatePrebuildProvisionerJobWithCancel = `-- name: UpdatePrebuildProvisionerJobWithCancel :many -UPDATE provisioner_jobs -SET - canceled_at = $1::timestamptz, - completed_at = $1::timestamptz -WHERE id IN ( - SELECT pj.id +WITH jobs_to_cancel AS ( + SELECT pj.id, w.id AS workspace_id, w.template_id, wpb.template_version_preset_id FROM provisioner_jobs pj INNER JOIN workspace_prebuild_builds wpb ON wpb.job_id = pj.id INNER JOIN workspaces w ON w.id = wpb.workspace_id @@ -8711,7 +8883,13 @@ WHERE id IN ( AND pj.canceled_at IS NULL AND pj.completed_at IS NULL ) -RETURNING id +UPDATE provisioner_jobs +SET + canceled_at = $1::timestamptz, + completed_at = $1::timestamptz +FROM jobs_to_cancel +WHERE provisioner_jobs.id = jobs_to_cancel.id +RETURNING jobs_to_cancel.id, jobs_to_cancel.workspace_id, jobs_to_cancel.template_id, jobs_to_cancel.template_version_preset_id ` type UpdatePrebuildProvisionerJobWithCancelParams struct { @@ -8719,22 +8897,34 @@ type UpdatePrebuildProvisionerJobWithCancelParams struct { PresetID uuid.NullUUID `db:"preset_id" json:"preset_id"` } +type UpdatePrebuildProvisionerJobWithCancelRow struct { + ID uuid.UUID `db:"id" json:"id"` + WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"` + TemplateID uuid.UUID `db:"template_id" json:"template_id"` + TemplateVersionPresetID uuid.NullUUID `db:"template_version_preset_id" json:"template_version_preset_id"` +} + // Cancels all pending provisioner jobs for prebuilt workspaces on a specific preset from an // inactive template version. // This is an optimization to clean up stale pending jobs. -func (q *sqlQuerier) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg UpdatePrebuildProvisionerJobWithCancelParams) ([]uuid.UUID, error) { +func (q *sqlQuerier) UpdatePrebuildProvisionerJobWithCancel(ctx context.Context, arg UpdatePrebuildProvisionerJobWithCancelParams) ([]UpdatePrebuildProvisionerJobWithCancelRow, error) { rows, err := q.db.QueryContext(ctx, updatePrebuildProvisionerJobWithCancel, arg.Now, arg.PresetID) if err != nil { return nil, err } defer rows.Close() - var items []uuid.UUID + var items []UpdatePrebuildProvisionerJobWithCancelRow for rows.Next() { - var id uuid.UUID - if err := rows.Scan(&id); err != nil { + var i UpdatePrebuildProvisionerJobWithCancelRow + if err := rows.Scan( + &i.ID, + &i.WorkspaceID, + &i.TemplateID, + &i.TemplateVersionPresetID, + ); err != nil { return nil, err } - items = append(items, id) + items = append(items, i) } if err := rows.Close(); err != nil { return nil, err @@ -8789,7 +8979,7 @@ func (q *sqlQuerier) GetActivePresetPrebuildSchedules(ctx context.Context) ([]Te } const getPresetByID = `-- name: GetPresetByID :one -SELECT tvp.id, tvp.template_version_id, tvp.name, tvp.created_at, tvp.desired_instances, tvp.invalidate_after_secs, tvp.prebuild_status, tvp.scheduling_timezone, tvp.is_default, tvp.description, tvp.icon, tv.template_id, tv.organization_id FROM +SELECT tvp.id, tvp.template_version_id, tvp.name, tvp.created_at, tvp.desired_instances, tvp.invalidate_after_secs, tvp.prebuild_status, tvp.scheduling_timezone, tvp.is_default, tvp.description, tvp.icon, tvp.last_invalidated_at, tv.template_id, tv.organization_id FROM template_version_presets tvp INNER JOIN template_versions tv ON tvp.template_version_id = tv.id WHERE tvp.id = $1 @@ -8807,6 +8997,7 @@ type GetPresetByIDRow struct { IsDefault bool `db:"is_default" json:"is_default"` Description string `db:"description" json:"description"` Icon string `db:"icon" json:"icon"` + LastInvalidatedAt sql.NullTime `db:"last_invalidated_at" json:"last_invalidated_at"` TemplateID uuid.NullUUID `db:"template_id" json:"template_id"` OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` } @@ -8826,6 +9017,7 @@ func (q *sqlQuerier) GetPresetByID(ctx context.Context, presetID uuid.UUID) (Get &i.IsDefault, &i.Description, &i.Icon, + &i.LastInvalidatedAt, &i.TemplateID, &i.OrganizationID, ) @@ -8834,7 +9026,7 @@ func (q *sqlQuerier) GetPresetByID(ctx context.Context, presetID uuid.UUID) (Get const getPresetByWorkspaceBuildID = `-- name: GetPresetByWorkspaceBuildID :one SELECT - template_version_presets.id, template_version_presets.template_version_id, template_version_presets.name, template_version_presets.created_at, template_version_presets.desired_instances, template_version_presets.invalidate_after_secs, template_version_presets.prebuild_status, template_version_presets.scheduling_timezone, template_version_presets.is_default, template_version_presets.description, template_version_presets.icon + template_version_presets.id, template_version_presets.template_version_id, template_version_presets.name, template_version_presets.created_at, template_version_presets.desired_instances, template_version_presets.invalidate_after_secs, template_version_presets.prebuild_status, template_version_presets.scheduling_timezone, template_version_presets.is_default, template_version_presets.description, template_version_presets.icon, template_version_presets.last_invalidated_at FROM template_version_presets INNER JOIN workspace_builds ON workspace_builds.template_version_preset_id = template_version_presets.id @@ -8857,6 +9049,7 @@ func (q *sqlQuerier) GetPresetByWorkspaceBuildID(ctx context.Context, workspaceB &i.IsDefault, &i.Description, &i.Icon, + &i.LastInvalidatedAt, ) return i, err } @@ -8938,7 +9131,7 @@ func (q *sqlQuerier) GetPresetParametersByTemplateVersionID(ctx context.Context, const getPresetsByTemplateVersionID = `-- name: GetPresetsByTemplateVersionID :many SELECT - id, template_version_id, name, created_at, desired_instances, invalidate_after_secs, prebuild_status, scheduling_timezone, is_default, description, icon + id, template_version_id, name, created_at, desired_instances, invalidate_after_secs, prebuild_status, scheduling_timezone, is_default, description, icon, last_invalidated_at FROM template_version_presets WHERE @@ -8966,6 +9159,7 @@ func (q *sqlQuerier) GetPresetsByTemplateVersionID(ctx context.Context, template &i.IsDefault, &i.Description, &i.Icon, + &i.LastInvalidatedAt, ); err != nil { return nil, err } @@ -8991,7 +9185,8 @@ INSERT INTO template_version_presets ( scheduling_timezone, is_default, description, - icon + icon, + last_invalidated_at ) VALUES ( $1, @@ -9003,8 +9198,9 @@ VALUES ( $7, $8, $9, - $10 -) RETURNING id, template_version_id, name, created_at, desired_instances, invalidate_after_secs, prebuild_status, scheduling_timezone, is_default, description, icon + $10, + $11 +) RETURNING id, template_version_id, name, created_at, desired_instances, invalidate_after_secs, prebuild_status, scheduling_timezone, is_default, description, icon, last_invalidated_at ` type InsertPresetParams struct { @@ -9018,6 +9214,7 @@ type InsertPresetParams struct { IsDefault bool `db:"is_default" json:"is_default"` Description string `db:"description" json:"description"` Icon string `db:"icon" json:"icon"` + LastInvalidatedAt sql.NullTime `db:"last_invalidated_at" json:"last_invalidated_at"` } func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) (TemplateVersionPreset, error) { @@ -9032,6 +9229,7 @@ func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) ( arg.IsDefault, arg.Description, arg.Icon, + arg.LastInvalidatedAt, ) var i TemplateVersionPreset err := row.Scan( @@ -9046,6 +9244,7 @@ func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) ( &i.IsDefault, &i.Description, &i.Icon, + &i.LastInvalidatedAt, ) return i, err } @@ -9141,6 +9340,57 @@ func (q *sqlQuerier) UpdatePresetPrebuildStatus(ctx context.Context, arg UpdateP return err } +const updatePresetsLastInvalidatedAt = `-- name: UpdatePresetsLastInvalidatedAt :many +UPDATE + template_version_presets tvp +SET + last_invalidated_at = $1 +FROM + templates t + JOIN template_versions tv ON tv.id = t.active_version_id +WHERE + t.id = $2 + AND tvp.template_version_id = tv.id +RETURNING + t.name AS template_name, + tv.name AS template_version_name, + tvp.name AS template_version_preset_name +` + +type UpdatePresetsLastInvalidatedAtParams struct { + LastInvalidatedAt sql.NullTime `db:"last_invalidated_at" json:"last_invalidated_at"` + TemplateID uuid.UUID `db:"template_id" json:"template_id"` +} + +type UpdatePresetsLastInvalidatedAtRow struct { + TemplateName string `db:"template_name" json:"template_name"` + TemplateVersionName string `db:"template_version_name" json:"template_version_name"` + TemplateVersionPresetName string `db:"template_version_preset_name" json:"template_version_preset_name"` +} + +func (q *sqlQuerier) UpdatePresetsLastInvalidatedAt(ctx context.Context, arg UpdatePresetsLastInvalidatedAtParams) ([]UpdatePresetsLastInvalidatedAtRow, error) { + rows, err := q.db.QueryContext(ctx, updatePresetsLastInvalidatedAt, arg.LastInvalidatedAt, arg.TemplateID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []UpdatePresetsLastInvalidatedAtRow + for rows.Next() { + var i UpdatePresetsLastInvalidatedAtRow + if err := rows.Scan(&i.TemplateName, &i.TemplateVersionName, &i.TemplateVersionPresetName); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const deleteOldProvisionerDaemons = `-- name: DeleteOldProvisionerDaemons :exec DELETE FROM provisioner_daemons WHERE ( (created_at < (NOW() - INTERVAL '7 days') AND last_seen_at IS NULL) OR @@ -12937,7 +13187,7 @@ SET WHERE id = $2::uuid AND deleted_at IS NULL -RETURNING id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at +RETURNING id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, display_name ` type DeleteTaskParams struct { @@ -12959,12 +13209,13 @@ func (q *sqlQuerier) DeleteTask(ctx context.Context, arg DeleteTaskParams) (Task &i.Prompt, &i.CreatedAt, &i.DeletedAt, + &i.DisplayName, ) return i, err } const getTaskByID = `-- name: GetTaskByID :one -SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id, owner_username, owner_name, owner_avatar_url FROM tasks_with_status WHERE id = $1::uuid +SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, display_name, status, status_debug, workspace_build_number, workspace_agent_id, workspace_app_id, workspace_agent_lifecycle_state, workspace_app_health, owner_username, owner_name, owner_avatar_url FROM tasks_with_status WHERE id = $1::uuid ` func (q *sqlQuerier) GetTaskByID(ctx context.Context, id uuid.UUID) (Task, error) { @@ -12981,10 +13232,56 @@ func (q *sqlQuerier) GetTaskByID(ctx context.Context, id uuid.UUID) (Task, error &i.Prompt, &i.CreatedAt, &i.DeletedAt, + &i.DisplayName, &i.Status, + &i.StatusDebug, &i.WorkspaceBuildNumber, &i.WorkspaceAgentID, &i.WorkspaceAppID, + &i.WorkspaceAgentLifecycleState, + &i.WorkspaceAppHealth, + &i.OwnerUsername, + &i.OwnerName, + &i.OwnerAvatarUrl, + ) + return i, err +} + +const getTaskByOwnerIDAndName = `-- name: GetTaskByOwnerIDAndName :one +SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, display_name, status, status_debug, workspace_build_number, workspace_agent_id, workspace_app_id, workspace_agent_lifecycle_state, workspace_app_health, owner_username, owner_name, owner_avatar_url FROM tasks_with_status +WHERE + owner_id = $1::uuid + AND deleted_at IS NULL + AND LOWER(name) = LOWER($2::text) +` + +type GetTaskByOwnerIDAndNameParams struct { + OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` + Name string `db:"name" json:"name"` +} + +func (q *sqlQuerier) GetTaskByOwnerIDAndName(ctx context.Context, arg GetTaskByOwnerIDAndNameParams) (Task, error) { + row := q.db.QueryRowContext(ctx, getTaskByOwnerIDAndName, arg.OwnerID, arg.Name) + var i Task + err := row.Scan( + &i.ID, + &i.OrganizationID, + &i.OwnerID, + &i.Name, + &i.WorkspaceID, + &i.TemplateVersionID, + &i.TemplateParameters, + &i.Prompt, + &i.CreatedAt, + &i.DeletedAt, + &i.DisplayName, + &i.Status, + &i.StatusDebug, + &i.WorkspaceBuildNumber, + &i.WorkspaceAgentID, + &i.WorkspaceAppID, + &i.WorkspaceAgentLifecycleState, + &i.WorkspaceAppHealth, &i.OwnerUsername, &i.OwnerName, &i.OwnerAvatarUrl, @@ -12993,7 +13290,7 @@ func (q *sqlQuerier) GetTaskByID(ctx context.Context, id uuid.UUID) (Task, error } const getTaskByWorkspaceID = `-- name: GetTaskByWorkspaceID :one -SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id, owner_username, owner_name, owner_avatar_url FROM tasks_with_status WHERE workspace_id = $1::uuid +SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, display_name, status, status_debug, workspace_build_number, workspace_agent_id, workspace_app_id, workspace_agent_lifecycle_state, workspace_app_health, owner_username, owner_name, owner_avatar_url FROM tasks_with_status WHERE workspace_id = $1::uuid ` func (q *sqlQuerier) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (Task, error) { @@ -13010,10 +13307,14 @@ func (q *sqlQuerier) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid. &i.Prompt, &i.CreatedAt, &i.DeletedAt, + &i.DisplayName, &i.Status, + &i.StatusDebug, &i.WorkspaceBuildNumber, &i.WorkspaceAgentID, &i.WorkspaceAppID, + &i.WorkspaceAgentLifecycleState, + &i.WorkspaceAppHealth, &i.OwnerUsername, &i.OwnerName, &i.OwnerAvatarUrl, @@ -13023,10 +13324,10 @@ func (q *sqlQuerier) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid. const insertTask = `-- name: InsertTask :one INSERT INTO tasks - (id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at) + (id, organization_id, owner_id, name, display_name, workspace_id, template_version_id, template_parameters, prompt, created_at) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9) -RETURNING id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) +RETURNING id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, display_name ` type InsertTaskParams struct { @@ -13034,6 +13335,7 @@ type InsertTaskParams struct { OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` Name string `db:"name" json:"name"` + DisplayName string `db:"display_name" json:"display_name"` WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"` TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` TemplateParameters json.RawMessage `db:"template_parameters" json:"template_parameters"` @@ -13047,6 +13349,7 @@ func (q *sqlQuerier) InsertTask(ctx context.Context, arg InsertTaskParams) (Task arg.OrganizationID, arg.OwnerID, arg.Name, + arg.DisplayName, arg.WorkspaceID, arg.TemplateVersionID, arg.TemplateParameters, @@ -13065,12 +13368,13 @@ func (q *sqlQuerier) InsertTask(ctx context.Context, arg InsertTaskParams) (Task &i.Prompt, &i.CreatedAt, &i.DeletedAt, + &i.DisplayName, ) return i, err } const listTasks = `-- name: ListTasks :many -SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id, owner_username, owner_name, owner_avatar_url FROM tasks_with_status tws +SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, display_name, status, status_debug, workspace_build_number, workspace_agent_id, workspace_app_id, workspace_agent_lifecycle_state, workspace_app_health, owner_username, owner_name, owner_avatar_url FROM tasks_with_status tws WHERE tws.deleted_at IS NULL AND CASE WHEN $1::UUID != '00000000-0000-0000-0000-000000000000' THEN tws.owner_id = $1::UUID ELSE TRUE END AND CASE WHEN $2::UUID != '00000000-0000-0000-0000-000000000000' THEN tws.organization_id = $2::UUID ELSE TRUE END @@ -13104,10 +13408,14 @@ func (q *sqlQuerier) ListTasks(ctx context.Context, arg ListTasksParams) ([]Task &i.Prompt, &i.CreatedAt, &i.DeletedAt, + &i.DisplayName, &i.Status, + &i.StatusDebug, &i.WorkspaceBuildNumber, &i.WorkspaceAgentID, &i.WorkspaceAppID, + &i.WorkspaceAgentLifecycleState, + &i.WorkspaceAppHealth, &i.OwnerUsername, &i.OwnerName, &i.OwnerAvatarUrl, @@ -13125,6 +13433,41 @@ func (q *sqlQuerier) ListTasks(ctx context.Context, arg ListTasksParams) ([]Task return items, nil } +const updateTaskPrompt = `-- name: UpdateTaskPrompt :one +UPDATE + tasks +SET + prompt = $1::text +WHERE + id = $2::uuid + AND deleted_at IS NULL +RETURNING id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, display_name +` + +type UpdateTaskPromptParams struct { + Prompt string `db:"prompt" json:"prompt"` + ID uuid.UUID `db:"id" json:"id"` +} + +func (q *sqlQuerier) UpdateTaskPrompt(ctx context.Context, arg UpdateTaskPromptParams) (TaskTable, error) { + row := q.db.QueryRowContext(ctx, updateTaskPrompt, arg.Prompt, arg.ID) + var i TaskTable + err := row.Scan( + &i.ID, + &i.OrganizationID, + &i.OwnerID, + &i.Name, + &i.WorkspaceID, + &i.TemplateVersionID, + &i.TemplateParameters, + &i.Prompt, + &i.CreatedAt, + &i.DeletedAt, + &i.DisplayName, + ) + return i, err +} + const updateTaskWorkspaceID = `-- name: UpdateTaskWorkspaceID :one UPDATE tasks @@ -13142,7 +13485,7 @@ WHERE AND w.id = $2 AND tv.id = tasks.template_version_id RETURNING - tasks.id, tasks.organization_id, tasks.owner_id, tasks.name, tasks.workspace_id, tasks.template_version_id, tasks.template_parameters, tasks.prompt, tasks.created_at, tasks.deleted_at + tasks.id, tasks.organization_id, tasks.owner_id, tasks.name, tasks.workspace_id, tasks.template_version_id, tasks.template_parameters, tasks.prompt, tasks.created_at, tasks.deleted_at, tasks.display_name ` type UpdateTaskWorkspaceIDParams struct { @@ -13164,6 +13507,7 @@ func (q *sqlQuerier) UpdateTaskWorkspaceID(ctx context.Context, arg UpdateTaskWo &i.Prompt, &i.CreatedAt, &i.DeletedAt, + &i.DisplayName, ) return i, err } @@ -13376,7 +13720,7 @@ func (q *sqlQuerier) GetTemplateAverageBuildTime(ctx context.Context, templateID const getTemplateByID = `-- name: GetTemplateByID :one SELECT - id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon + id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, use_terraform_workspace_cache, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon FROM template_with_names WHERE @@ -13419,6 +13763,7 @@ func (q *sqlQuerier) GetTemplateByID(ctx context.Context, id uuid.UUID) (Templat &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, &i.CorsBehavior, + &i.UseTerraformWorkspaceCache, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -13431,7 +13776,7 @@ func (q *sqlQuerier) GetTemplateByID(ctx context.Context, id uuid.UUID) (Templat const getTemplateByOrganizationAndName = `-- name: GetTemplateByOrganizationAndName :one SELECT - id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon + id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, use_terraform_workspace_cache, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon FROM template_with_names AS templates WHERE @@ -13482,6 +13827,7 @@ func (q *sqlQuerier) GetTemplateByOrganizationAndName(ctx context.Context, arg G &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, &i.CorsBehavior, + &i.UseTerraformWorkspaceCache, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -13493,7 +13839,7 @@ func (q *sqlQuerier) GetTemplateByOrganizationAndName(ctx context.Context, arg G } const getTemplates = `-- name: GetTemplates :many -SELECT id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon FROM template_with_names AS templates +SELECT id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, use_terraform_workspace_cache, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon FROM template_with_names AS templates ORDER BY (name, id) ASC ` @@ -13537,6 +13883,7 @@ func (q *sqlQuerier) GetTemplates(ctx context.Context) ([]Template, error) { &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, &i.CorsBehavior, + &i.UseTerraformWorkspaceCache, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -13559,7 +13906,7 @@ func (q *sqlQuerier) GetTemplates(ctx context.Context) ([]Template, error) { const getTemplatesWithFilter = `-- name: GetTemplatesWithFilter :many SELECT - t.id, t.created_at, t.updated_at, t.organization_id, t.deleted, t.name, t.provisioner, t.active_version_id, t.description, t.default_ttl, t.created_by, t.icon, t.user_acl, t.group_acl, t.display_name, t.allow_user_cancel_workspace_jobs, t.allow_user_autostart, t.allow_user_autostop, t.failure_ttl, t.time_til_dormant, t.time_til_dormant_autodelete, t.autostop_requirement_days_of_week, t.autostop_requirement_weeks, t.autostart_block_days_of_week, t.require_active_version, t.deprecated, t.activity_bump, t.max_port_sharing_level, t.use_classic_parameter_flow, t.cors_behavior, t.created_by_avatar_url, t.created_by_username, t.created_by_name, t.organization_name, t.organization_display_name, t.organization_icon + t.id, t.created_at, t.updated_at, t.organization_id, t.deleted, t.name, t.provisioner, t.active_version_id, t.description, t.default_ttl, t.created_by, t.icon, t.user_acl, t.group_acl, t.display_name, t.allow_user_cancel_workspace_jobs, t.allow_user_autostart, t.allow_user_autostop, t.failure_ttl, t.time_til_dormant, t.time_til_dormant_autodelete, t.autostop_requirement_days_of_week, t.autostop_requirement_weeks, t.autostart_block_days_of_week, t.require_active_version, t.deprecated, t.activity_bump, t.max_port_sharing_level, t.use_classic_parameter_flow, t.cors_behavior, t.use_terraform_workspace_cache, t.created_by_avatar_url, t.created_by_username, t.created_by_name, t.organization_name, t.organization_display_name, t.organization_icon FROM template_with_names AS t LEFT JOIN @@ -13718,6 +14065,7 @@ func (q *sqlQuerier) GetTemplatesWithFilter(ctx context.Context, arg GetTemplate &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, &i.CorsBehavior, + &i.UseTerraformWorkspaceCache, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -13903,7 +14251,8 @@ SET group_acl = $8, max_port_sharing_level = $9, use_classic_parameter_flow = $10, - cors_behavior = $11 + cors_behavior = $11, + use_terraform_workspace_cache = $12 WHERE id = $1 ` @@ -13920,6 +14269,7 @@ type UpdateTemplateMetaByIDParams struct { MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"` UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"` CorsBehavior CorsBehavior `db:"cors_behavior" json:"cors_behavior"` + UseTerraformWorkspaceCache bool `db:"use_terraform_workspace_cache" json:"use_terraform_workspace_cache"` } func (q *sqlQuerier) UpdateTemplateMetaByID(ctx context.Context, arg UpdateTemplateMetaByIDParams) error { @@ -13935,6 +14285,7 @@ func (q *sqlQuerier) UpdateTemplateMetaByID(ctx context.Context, arg UpdateTempl arg.MaxPortSharingLevel, arg.UseClassicParameterFlow, arg.CorsBehavior, + arg.UseTerraformWorkspaceCache, ) return err } @@ -15973,6 +16324,23 @@ func (q *sqlQuerier) GetUserCount(ctx context.Context, includeSystem bool) (int6 return count, err } +const getUserTaskNotificationAlertDismissed = `-- name: GetUserTaskNotificationAlertDismissed :one +SELECT + value::boolean as task_notification_alert_dismissed +FROM + user_configs +WHERE + user_id = $1 + AND key = 'preference_task_notification_alert_dismissed' +` + +func (q *sqlQuerier) GetUserTaskNotificationAlertDismissed(ctx context.Context, userID uuid.UUID) (bool, error) { + row := q.db.QueryRowContext(ctx, getUserTaskNotificationAlertDismissed, userID) + var task_notification_alert_dismissed bool + err := row.Scan(&task_notification_alert_dismissed) + return task_notification_alert_dismissed, err +} + const getUserTerminalFont = `-- name: GetUserTerminalFont :one SELECT value as terminal_font @@ -16725,6 +17093,33 @@ func (q *sqlQuerier) UpdateUserStatus(ctx context.Context, arg UpdateUserStatusP return i, err } +const updateUserTaskNotificationAlertDismissed = `-- name: UpdateUserTaskNotificationAlertDismissed :one +INSERT INTO + user_configs (user_id, key, value) +VALUES + ($1, 'preference_task_notification_alert_dismissed', ($2::boolean)::text) +ON CONFLICT + ON CONSTRAINT user_configs_pkey +DO UPDATE +SET + value = $2 +WHERE user_configs.user_id = $1 + AND user_configs.key = 'preference_task_notification_alert_dismissed' +RETURNING value::boolean AS task_notification_alert_dismissed +` + +type UpdateUserTaskNotificationAlertDismissedParams struct { + UserID uuid.UUID `db:"user_id" json:"user_id"` + TaskNotificationAlertDismissed bool `db:"task_notification_alert_dismissed" json:"task_notification_alert_dismissed"` +} + +func (q *sqlQuerier) UpdateUserTaskNotificationAlertDismissed(ctx context.Context, arg UpdateUserTaskNotificationAlertDismissedParams) (bool, error) { + row := q.db.QueryRowContext(ctx, updateUserTaskNotificationAlertDismissed, arg.UserID, arg.TaskNotificationAlertDismissed) + var task_notification_alert_dismissed bool + err := row.Scan(&task_notification_alert_dismissed) + return task_notification_alert_dismissed, err +} + const updateUserTerminalFont = `-- name: UpdateUserTerminalFont :one INSERT INTO user_configs (user_id, key, value) @@ -17468,7 +17863,8 @@ const getWorkspaceAgentAndLatestBuildByAuthToken = `-- name: GetWorkspaceAgentAn SELECT workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.group_acl, workspaces.user_acl, workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order, workspace_agents.parent_id, workspace_agents.api_key_scope, workspace_agents.deleted, - workspace_build_with_user.id, workspace_build_with_user.created_at, workspace_build_with_user.updated_at, workspace_build_with_user.workspace_id, workspace_build_with_user.template_version_id, workspace_build_with_user.build_number, workspace_build_with_user.transition, workspace_build_with_user.initiator_id, workspace_build_with_user.provisioner_state, workspace_build_with_user.job_id, workspace_build_with_user.deadline, workspace_build_with_user.reason, workspace_build_with_user.daily_cost, workspace_build_with_user.max_deadline, workspace_build_with_user.template_version_preset_id, workspace_build_with_user.has_ai_task, workspace_build_with_user.ai_task_sidebar_app_id, workspace_build_with_user.has_external_agent, workspace_build_with_user.initiator_by_avatar_url, workspace_build_with_user.initiator_by_username, workspace_build_with_user.initiator_by_name + workspace_build_with_user.id, workspace_build_with_user.created_at, workspace_build_with_user.updated_at, workspace_build_with_user.workspace_id, workspace_build_with_user.template_version_id, workspace_build_with_user.build_number, workspace_build_with_user.transition, workspace_build_with_user.initiator_id, workspace_build_with_user.provisioner_state, workspace_build_with_user.job_id, workspace_build_with_user.deadline, workspace_build_with_user.reason, workspace_build_with_user.daily_cost, workspace_build_with_user.max_deadline, workspace_build_with_user.template_version_preset_id, workspace_build_with_user.has_ai_task, workspace_build_with_user.has_external_agent, workspace_build_with_user.initiator_by_avatar_url, workspace_build_with_user.initiator_by_username, workspace_build_with_user.initiator_by_name, + tasks.id AS task_id FROM workspace_agents JOIN @@ -17483,6 +17879,10 @@ JOIN workspaces ON workspace_build_with_user.workspace_id = workspaces.id +LEFT JOIN + tasks +ON + tasks.workspace_id = workspaces.id WHERE -- This should only match 1 agent, so 1 returned row or 0. workspace_agents.auth_token = $1::uuid @@ -17506,6 +17906,7 @@ type GetWorkspaceAgentAndLatestBuildByAuthTokenRow struct { WorkspaceTable WorkspaceTable `db:"workspace_table" json:"workspace_table"` WorkspaceAgent WorkspaceAgent `db:"workspace_agent" json:"workspace_agent"` WorkspaceBuild WorkspaceBuild `db:"workspace_build" json:"workspace_build"` + TaskID uuid.NullUUID `db:"task_id" json:"task_id"` } func (q *sqlQuerier) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Context, authToken uuid.UUID) (GetWorkspaceAgentAndLatestBuildByAuthTokenRow, error) { @@ -17580,11 +17981,11 @@ func (q *sqlQuerier) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Cont &i.WorkspaceBuild.MaxDeadline, &i.WorkspaceBuild.TemplateVersionPresetID, &i.WorkspaceBuild.HasAITask, - &i.WorkspaceBuild.AITaskSidebarAppID, &i.WorkspaceBuild.HasExternalAgent, &i.WorkspaceBuild.InitiatorByAvatarUrl, &i.WorkspaceBuild.InitiatorByUsername, &i.WorkspaceBuild.InitiatorByName, + &i.TaskID, ) return i, err } @@ -19684,43 +20085,28 @@ func (q *sqlQuerier) UpsertWorkspaceAppAuditSession(ctx context.Context, arg Ups return new_or_stale, err } -const getLatestWorkspaceAppStatusesByAppID = `-- name: GetLatestWorkspaceAppStatusesByAppID :many +const getLatestWorkspaceAppStatusByAppID = `-- name: GetLatestWorkspaceAppStatusByAppID :one SELECT id, created_at, agent_id, app_id, workspace_id, state, message, uri FROM workspace_app_statuses WHERE app_id = $1::uuid ORDER BY created_at DESC, id DESC +LIMIT 1 ` -func (q *sqlQuerier) GetLatestWorkspaceAppStatusesByAppID(ctx context.Context, appID uuid.UUID) ([]WorkspaceAppStatus, error) { - rows, err := q.db.QueryContext(ctx, getLatestWorkspaceAppStatusesByAppID, appID) - if err != nil { - return nil, err - } - defer rows.Close() - var items []WorkspaceAppStatus - for rows.Next() { - var i WorkspaceAppStatus - if err := rows.Scan( - &i.ID, - &i.CreatedAt, - &i.AgentID, - &i.AppID, - &i.WorkspaceID, - &i.State, - &i.Message, - &i.Uri, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Close(); err != nil { - return nil, err - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *sqlQuerier) GetLatestWorkspaceAppStatusByAppID(ctx context.Context, appID uuid.UUID) (WorkspaceAppStatus, error) { + row := q.db.QueryRowContext(ctx, getLatestWorkspaceAppStatusByAppID, appID) + var i WorkspaceAppStatus + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.AgentID, + &i.AppID, + &i.WorkspaceID, + &i.State, + &i.Message, + &i.Uri, + ) + return i, err } const getLatestWorkspaceAppStatusesByWorkspaceIDs = `-- name: GetLatestWorkspaceAppStatusesByWorkspaceIDs :many @@ -20380,7 +20766,7 @@ func (q *sqlQuerier) InsertWorkspaceBuildParameters(ctx context.Context, arg Ins } const getActiveWorkspaceBuildsByTemplateID = `-- name: GetActiveWorkspaceBuildsByTemplateID :many -SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline, wb.template_version_preset_id, wb.has_ai_task, wb.ai_task_sidebar_app_id, wb.has_external_agent, wb.initiator_by_avatar_url, wb.initiator_by_username, wb.initiator_by_name +SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline, wb.template_version_preset_id, wb.has_ai_task, wb.has_external_agent, wb.initiator_by_avatar_url, wb.initiator_by_username, wb.initiator_by_name FROM ( SELECT workspace_id, MAX(build_number) as max_build_number @@ -20436,7 +20822,6 @@ func (q *sqlQuerier) GetActiveWorkspaceBuildsByTemplateID(ctx context.Context, t &i.MaxDeadline, &i.TemplateVersionPresetID, &i.HasAITask, - &i.AITaskSidebarAppID, &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, @@ -20537,7 +20922,7 @@ func (q *sqlQuerier) GetFailedWorkspaceBuildsByTemplateID(ctx context.Context, a const getLatestWorkspaceBuildByWorkspaceID = `-- name: GetLatestWorkspaceBuildByWorkspaceID :one SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -20568,7 +20953,6 @@ func (q *sqlQuerier) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, w &i.MaxDeadline, &i.TemplateVersionPresetID, &i.HasAITask, - &i.AITaskSidebarAppID, &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, @@ -20580,7 +20964,7 @@ func (q *sqlQuerier) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, w const getLatestWorkspaceBuildsByWorkspaceIDs = `-- name: GetLatestWorkspaceBuildsByWorkspaceIDs :many SELECT DISTINCT ON (workspace_id) - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -20615,7 +20999,6 @@ func (q *sqlQuerier) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, &i.MaxDeadline, &i.TemplateVersionPresetID, &i.HasAITask, - &i.AITaskSidebarAppID, &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, @@ -20636,7 +21019,7 @@ func (q *sqlQuerier) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, const getWorkspaceBuildByID = `-- name: GetWorkspaceBuildByID :one SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -20665,7 +21048,6 @@ func (q *sqlQuerier) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (W &i.MaxDeadline, &i.TemplateVersionPresetID, &i.HasAITask, - &i.AITaskSidebarAppID, &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, @@ -20676,7 +21058,7 @@ func (q *sqlQuerier) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (W const getWorkspaceBuildByJobID = `-- name: GetWorkspaceBuildByJobID :one SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -20705,7 +21087,6 @@ func (q *sqlQuerier) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UU &i.MaxDeadline, &i.TemplateVersionPresetID, &i.HasAITask, - &i.AITaskSidebarAppID, &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, @@ -20716,7 +21097,7 @@ func (q *sqlQuerier) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UU const getWorkspaceBuildByWorkspaceIDAndBuildNumber = `-- name: GetWorkspaceBuildByWorkspaceIDAndBuildNumber :one SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -20749,7 +21130,6 @@ func (q *sqlQuerier) GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx context.Co &i.MaxDeadline, &i.TemplateVersionPresetID, &i.HasAITask, - &i.AITaskSidebarAppID, &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, @@ -20827,7 +21207,7 @@ func (q *sqlQuerier) GetWorkspaceBuildStatsByTemplates(ctx context.Context, sinc const getWorkspaceBuildsByWorkspaceID = `-- name: GetWorkspaceBuildsByWorkspaceID :many SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -20899,7 +21279,6 @@ func (q *sqlQuerier) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg Ge &i.MaxDeadline, &i.TemplateVersionPresetID, &i.HasAITask, - &i.AITaskSidebarAppID, &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, @@ -20919,7 +21298,7 @@ func (q *sqlQuerier) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg Ge } const getWorkspaceBuildsCreatedAfter = `-- name: GetWorkspaceBuildsCreatedAfter :many -SELECT id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user WHERE created_at > $1 +SELECT id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user WHERE created_at > $1 ` func (q *sqlQuerier) GetWorkspaceBuildsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceBuild, error) { @@ -20948,7 +21327,6 @@ func (q *sqlQuerier) GetWorkspaceBuildsCreatedAfter(ctx context.Context, created &i.MaxDeadline, &i.TemplateVersionPresetID, &i.HasAITask, - &i.AITaskSidebarAppID, &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, @@ -21085,24 +21463,21 @@ UPDATE workspace_builds SET has_ai_task = $1, - ai_task_sidebar_app_id = $2, - has_external_agent = $3, - updated_at = $4::timestamptz -WHERE id = $5::uuid + has_external_agent = $2, + updated_at = $3::timestamptz +WHERE id = $4::uuid ` type UpdateWorkspaceBuildFlagsByIDParams struct { - HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` - SidebarAppID uuid.NullUUID `db:"sidebar_app_id" json:"sidebar_app_id"` - HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` - UpdatedAt time.Time `db:"updated_at" json:"updated_at"` - ID uuid.UUID `db:"id" json:"id"` + HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + ID uuid.UUID `db:"id" json:"id"` } func (q *sqlQuerier) UpdateWorkspaceBuildFlagsByID(ctx context.Context, arg UpdateWorkspaceBuildFlagsByIDParams) error { _, err := q.db.ExecContext(ctx, updateWorkspaceBuildFlagsByID, arg.HasAITask, - arg.SidebarAppID, arg.HasExternalAgent, arg.UpdatedAt, arg.ID, @@ -21826,7 +22201,7 @@ func (q *sqlQuerier) GetWorkspaceACLByID(ctx context.Context, id uuid.UUID) (Get const getWorkspaceByAgentID = `-- name: GetWorkspaceByAgentID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded as workspaces WHERE @@ -21887,13 +22262,14 @@ func (q *sqlQuerier) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUI &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } const getWorkspaceByID = `-- name: GetWorkspaceByID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded WHERE @@ -21935,13 +22311,14 @@ func (q *sqlQuerier) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Worksp &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } const getWorkspaceByOwnerIDAndName = `-- name: GetWorkspaceByOwnerIDAndName :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded as workspaces WHERE @@ -21990,13 +22367,14 @@ func (q *sqlQuerier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWo &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } const getWorkspaceByResourceID = `-- name: GetWorkspaceByResourceID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded as workspaces WHERE @@ -22052,13 +22430,14 @@ func (q *sqlQuerier) GetWorkspaceByResourceID(ctx context.Context, resourceID uu &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } const getWorkspaceByWorkspaceAppID = `-- name: GetWorkspaceByWorkspaceAppID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description, task_id FROM workspaces_expanded as workspaces WHERE @@ -22126,6 +22505,7 @@ func (q *sqlQuerier) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspace &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, ) return i, err } @@ -22175,7 +22555,7 @@ SELECT ), filtered_workspaces AS ( SELECT - workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.group_acl, workspaces.user_acl, workspaces.owner_avatar_url, workspaces.owner_username, workspaces.owner_name, workspaces.organization_name, workspaces.organization_display_name, workspaces.organization_icon, workspaces.organization_description, workspaces.template_name, workspaces.template_display_name, workspaces.template_icon, workspaces.template_description, + workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.group_acl, workspaces.user_acl, workspaces.owner_avatar_url, workspaces.owner_username, workspaces.owner_name, workspaces.organization_name, workspaces.organization_display_name, workspaces.organization_icon, workspaces.organization_description, workspaces.template_name, workspaces.template_display_name, workspaces.template_icon, workspaces.template_description, workspaces.task_id, latest_build.template_version_id, latest_build.template_version_name, latest_build.completed_at as latest_build_completed_at, @@ -22183,7 +22563,6 @@ SELECT latest_build.error as latest_build_error, latest_build.transition as latest_build_transition, latest_build.job_status as latest_build_status, - latest_build.has_ai_task as latest_build_has_ai_task, latest_build.has_external_agent as latest_build_has_external_agent FROM workspaces_expanded as workspaces @@ -22225,7 +22604,7 @@ LEFT JOIN LATERAL ( ) latest_build ON TRUE LEFT JOIN LATERAL ( SELECT - id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior + id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, use_terraform_workspace_cache FROM templates WHERE @@ -22417,25 +22796,19 @@ WHERE (latest_build.template_version_id = template.active_version_id) = $18 :: boolean ELSE true END - -- Filter by has_ai_task in latest build + -- Filter by has_ai_task, checks if this is a task workspace. AND CASE - WHEN $19 :: boolean IS NOT NULL THEN - (COALESCE(latest_build.has_ai_task, false) OR ( - -- If the build has no AI task, it means that the provisioner job is in progress - -- and we don't know if it has an AI task yet. In this case, we optimistically - -- assume that it has an AI task if the AI Prompt parameter is not empty. This - -- lets the AI Task frontend spawn a task and see it immediately after instead of - -- having to wait for the build to complete. - latest_build.has_ai_task IS NULL AND - latest_build.completed_at IS NULL AND - EXISTS ( - SELECT 1 - FROM workspace_build_parameters - WHERE workspace_build_parameters.workspace_build_id = latest_build.id - AND workspace_build_parameters.name = 'AI Prompt' - AND workspace_build_parameters.value != '' - ) - )) = ($19 :: boolean) + WHEN $19::boolean IS NOT NULL + THEN $19::boolean = EXISTS ( + SELECT + 1 + FROM + tasks + WHERE + -- Consider all tasks, deleting a task does not turn the + -- workspace into a non-task workspace. + tasks.workspace_id = workspaces.id + ) ELSE true END -- Filter by has_external_agent in latest build @@ -22466,7 +22839,7 @@ WHERE -- @authorize_filter ), filtered_workspaces_order AS ( SELECT - fw.id, fw.created_at, fw.updated_at, fw.owner_id, fw.organization_id, fw.template_id, fw.deleted, fw.name, fw.autostart_schedule, fw.ttl, fw.last_used_at, fw.dormant_at, fw.deleting_at, fw.automatic_updates, fw.favorite, fw.next_start_at, fw.group_acl, fw.user_acl, fw.owner_avatar_url, fw.owner_username, fw.owner_name, fw.organization_name, fw.organization_display_name, fw.organization_icon, fw.organization_description, fw.template_name, fw.template_display_name, fw.template_icon, fw.template_description, fw.template_version_id, fw.template_version_name, fw.latest_build_completed_at, fw.latest_build_canceled_at, fw.latest_build_error, fw.latest_build_transition, fw.latest_build_status, fw.latest_build_has_ai_task, fw.latest_build_has_external_agent + fw.id, fw.created_at, fw.updated_at, fw.owner_id, fw.organization_id, fw.template_id, fw.deleted, fw.name, fw.autostart_schedule, fw.ttl, fw.last_used_at, fw.dormant_at, fw.deleting_at, fw.automatic_updates, fw.favorite, fw.next_start_at, fw.group_acl, fw.user_acl, fw.owner_avatar_url, fw.owner_username, fw.owner_name, fw.organization_name, fw.organization_display_name, fw.organization_icon, fw.organization_description, fw.template_name, fw.template_display_name, fw.template_icon, fw.template_description, fw.task_id, fw.template_version_id, fw.template_version_name, fw.latest_build_completed_at, fw.latest_build_canceled_at, fw.latest_build_error, fw.latest_build_transition, fw.latest_build_status, fw.latest_build_has_external_agent FROM filtered_workspaces fw ORDER BY @@ -22487,7 +22860,7 @@ WHERE $25 ), filtered_workspaces_order_with_summary AS ( SELECT - fwo.id, fwo.created_at, fwo.updated_at, fwo.owner_id, fwo.organization_id, fwo.template_id, fwo.deleted, fwo.name, fwo.autostart_schedule, fwo.ttl, fwo.last_used_at, fwo.dormant_at, fwo.deleting_at, fwo.automatic_updates, fwo.favorite, fwo.next_start_at, fwo.group_acl, fwo.user_acl, fwo.owner_avatar_url, fwo.owner_username, fwo.owner_name, fwo.organization_name, fwo.organization_display_name, fwo.organization_icon, fwo.organization_description, fwo.template_name, fwo.template_display_name, fwo.template_icon, fwo.template_description, fwo.template_version_id, fwo.template_version_name, fwo.latest_build_completed_at, fwo.latest_build_canceled_at, fwo.latest_build_error, fwo.latest_build_transition, fwo.latest_build_status, fwo.latest_build_has_ai_task, fwo.latest_build_has_external_agent + fwo.id, fwo.created_at, fwo.updated_at, fwo.owner_id, fwo.organization_id, fwo.template_id, fwo.deleted, fwo.name, fwo.autostart_schedule, fwo.ttl, fwo.last_used_at, fwo.dormant_at, fwo.deleting_at, fwo.automatic_updates, fwo.favorite, fwo.next_start_at, fwo.group_acl, fwo.user_acl, fwo.owner_avatar_url, fwo.owner_username, fwo.owner_name, fwo.organization_name, fwo.organization_display_name, fwo.organization_icon, fwo.organization_description, fwo.template_name, fwo.template_display_name, fwo.template_icon, fwo.template_description, fwo.task_id, fwo.template_version_id, fwo.template_version_name, fwo.latest_build_completed_at, fwo.latest_build_canceled_at, fwo.latest_build_error, fwo.latest_build_transition, fwo.latest_build_status, fwo.latest_build_has_external_agent FROM filtered_workspaces_order fwo -- Return a technical summary row with total count of workspaces. @@ -22523,6 +22896,7 @@ WHERE '', -- template_display_name '', -- template_icon '', -- template_description + '00000000-0000-0000-0000-000000000000'::uuid, -- task_id -- Extra columns added to ` + "`" + `filtered_workspaces` + "`" + ` '00000000-0000-0000-0000-000000000000'::uuid, -- template_version_id '', -- template_version_name @@ -22531,7 +22905,6 @@ WHERE '', -- latest_build_error 'start'::workspace_transition, -- latest_build_transition 'unknown'::provisioner_job_status, -- latest_build_status - false, -- latest_build_has_ai_task false -- latest_build_has_external_agent WHERE $27 :: boolean = true @@ -22542,7 +22915,7 @@ WHERE filtered_workspaces ) SELECT - fwos.id, fwos.created_at, fwos.updated_at, fwos.owner_id, fwos.organization_id, fwos.template_id, fwos.deleted, fwos.name, fwos.autostart_schedule, fwos.ttl, fwos.last_used_at, fwos.dormant_at, fwos.deleting_at, fwos.automatic_updates, fwos.favorite, fwos.next_start_at, fwos.group_acl, fwos.user_acl, fwos.owner_avatar_url, fwos.owner_username, fwos.owner_name, fwos.organization_name, fwos.organization_display_name, fwos.organization_icon, fwos.organization_description, fwos.template_name, fwos.template_display_name, fwos.template_icon, fwos.template_description, fwos.template_version_id, fwos.template_version_name, fwos.latest_build_completed_at, fwos.latest_build_canceled_at, fwos.latest_build_error, fwos.latest_build_transition, fwos.latest_build_status, fwos.latest_build_has_ai_task, fwos.latest_build_has_external_agent, + fwos.id, fwos.created_at, fwos.updated_at, fwos.owner_id, fwos.organization_id, fwos.template_id, fwos.deleted, fwos.name, fwos.autostart_schedule, fwos.ttl, fwos.last_used_at, fwos.dormant_at, fwos.deleting_at, fwos.automatic_updates, fwos.favorite, fwos.next_start_at, fwos.group_acl, fwos.user_acl, fwos.owner_avatar_url, fwos.owner_username, fwos.owner_name, fwos.organization_name, fwos.organization_display_name, fwos.organization_icon, fwos.organization_description, fwos.template_name, fwos.template_display_name, fwos.template_icon, fwos.template_description, fwos.task_id, fwos.template_version_id, fwos.template_version_name, fwos.latest_build_completed_at, fwos.latest_build_canceled_at, fwos.latest_build_error, fwos.latest_build_transition, fwos.latest_build_status, fwos.latest_build_has_external_agent, tc.count FROM filtered_workspaces_order_with_summary fwos @@ -22610,6 +22983,7 @@ type GetWorkspacesRow struct { TemplateDisplayName string `db:"template_display_name" json:"template_display_name"` TemplateIcon string `db:"template_icon" json:"template_icon"` TemplateDescription string `db:"template_description" json:"template_description"` + TaskID uuid.NullUUID `db:"task_id" json:"task_id"` TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` TemplateVersionName sql.NullString `db:"template_version_name" json:"template_version_name"` LatestBuildCompletedAt sql.NullTime `db:"latest_build_completed_at" json:"latest_build_completed_at"` @@ -22617,7 +22991,6 @@ type GetWorkspacesRow struct { LatestBuildError sql.NullString `db:"latest_build_error" json:"latest_build_error"` LatestBuildTransition WorkspaceTransition `db:"latest_build_transition" json:"latest_build_transition"` LatestBuildStatus ProvisionerJobStatus `db:"latest_build_status" json:"latest_build_status"` - LatestBuildHasAITask sql.NullBool `db:"latest_build_has_ai_task" json:"latest_build_has_ai_task"` LatestBuildHasExternalAgent sql.NullBool `db:"latest_build_has_external_agent" json:"latest_build_has_external_agent"` Count int64 `db:"count" json:"count"` } @@ -22692,6 +23065,7 @@ func (q *sqlQuerier) GetWorkspaces(ctx context.Context, arg GetWorkspacesParams) &i.TemplateDisplayName, &i.TemplateIcon, &i.TemplateDescription, + &i.TaskID, &i.TemplateVersionID, &i.TemplateVersionName, &i.LatestBuildCompletedAt, @@ -22699,7 +23073,6 @@ func (q *sqlQuerier) GetWorkspaces(ctx context.Context, arg GetWorkspacesParams) &i.LatestBuildError, &i.LatestBuildTransition, &i.LatestBuildStatus, - &i.LatestBuildHasAITask, &i.LatestBuildHasExternalAgent, &i.Count, ); err != nil { diff --git a/coderd/database/queries/aibridge.sql b/coderd/database/queries/aibridge.sql index fd5a9868bbaa8..cf87598115803 100644 --- a/coderd/database/queries/aibridge.sql +++ b/coderd/database/queries/aibridge.sql @@ -1,8 +1,8 @@ -- name: InsertAIBridgeInterception :one INSERT INTO aibridge_interceptions ( - id, initiator_id, provider, model, metadata, started_at + id, api_key_id, initiator_id, provider, model, metadata, started_at ) VALUES ( - @id, @initiator_id, @provider, @model, COALESCE(@metadata::jsonb, '{}'::jsonb), @started_at + @id, @api_key_id, @initiator_id, @provider, @model, COALESCE(@metadata::jsonb, '{}'::jsonb), @started_at ) RETURNING *; @@ -89,8 +89,10 @@ SELECT FROM aibridge_interceptions WHERE + -- Remove inflight interceptions (ones which lack an ended_at value). + aibridge_interceptions.ended_at IS NOT NULL -- Filter by time frame - CASE + AND CASE WHEN @started_after::timestamptz != '0001-01-01 00:00:00+00'::timestamptz THEN aibridge_interceptions.started_at >= @started_after::timestamptz ELSE true END @@ -126,8 +128,10 @@ FROM JOIN visible_users ON visible_users.id = aibridge_interceptions.initiator_id WHERE + -- Remove inflight interceptions (ones which lack an ended_at value). + aibridge_interceptions.ended_at IS NOT NULL -- Filter by time frame - CASE + AND CASE WHEN @started_after::timestamptz != '0001-01-01 00:00:00+00'::timestamptz THEN aibridge_interceptions.started_at >= @started_after::timestamptz ELSE true END @@ -209,7 +213,7 @@ ORDER BY id ASC; -- name: ListAIBridgeInterceptionsTelemetrySummaries :many --- Finds all unique AIBridge interception telemetry summaries combinations +-- Finds all unique AI Bridge interception telemetry summaries combinations -- (provider, model, client) in the given timeframe for telemetry reporting. SELECT DISTINCT ON (provider, model, client) @@ -326,3 +330,38 @@ FROM prompt_aggregates pa, tool_aggregates tool_agg ; + +-- name: DeleteOldAIBridgeRecords :one +WITH + -- We don't have FK relationships between the dependent tables and aibridge_interceptions, so we can't rely on DELETE CASCADE. + to_delete AS ( + SELECT id FROM aibridge_interceptions + WHERE started_at < @before_time::timestamp with time zone + ), + -- CTEs are executed in order. + tool_usages AS ( + DELETE FROM aibridge_tool_usages + WHERE interception_id IN (SELECT id FROM to_delete) + RETURNING 1 + ), + token_usages AS ( + DELETE FROM aibridge_token_usages + WHERE interception_id IN (SELECT id FROM to_delete) + RETURNING 1 + ), + user_prompts AS ( + DELETE FROM aibridge_user_prompts + WHERE interception_id IN (SELECT id FROM to_delete) + RETURNING 1 + ), + interceptions AS ( + DELETE FROM aibridge_interceptions + WHERE id IN (SELECT id FROM to_delete) + RETURNING 1 + ) +-- Cumulative count. +SELECT + (SELECT COUNT(*) FROM tool_usages) + + (SELECT COUNT(*) FROM token_usages) + + (SELECT COUNT(*) FROM user_prompts) + + (SELECT COUNT(*) FROM interceptions) as total_deleted; diff --git a/coderd/database/queries/apikeys.sql b/coderd/database/queries/apikeys.sql index c067305755078..2e2be542c9601 100644 --- a/coderd/database/queries/apikeys.sql +++ b/coderd/database/queries/apikeys.sql @@ -85,6 +85,26 @@ DELETE FROM WHERE user_id = $1; +-- name: DeleteExpiredAPIKeys :one +WITH expired_keys AS ( + SELECT id + FROM api_keys + -- expired keys only + WHERE expires_at < @before::timestamptz + LIMIT @limit_count +), +deleted_rows AS ( + DELETE FROM + api_keys + USING + expired_keys + WHERE + api_keys.id = expired_keys.id + RETURNING api_keys.id + ) +SELECT COUNT(deleted_rows.id) AS deleted_count FROM deleted_rows; +; + -- name: ExpirePrebuildsAPIKeys :exec -- Firstly, collect api_keys owned by the prebuilds user that correlate -- to workspaces no longer owned by the prebuilds user. diff --git a/coderd/database/queries/prebuilds.sql b/coderd/database/queries/prebuilds.sql index 6c5520c9da7e1..9dd68e8297314 100644 --- a/coderd/database/queries/prebuilds.sql +++ b/coderd/database/queries/prebuilds.sql @@ -51,6 +51,7 @@ SELECT tvp.scheduling_timezone, tvp.invalidate_after_secs AS ttl, tvp.prebuild_status, + tvp.last_invalidated_at, t.deleted, t.deprecated != '' AS deprecated FROM templates t @@ -300,12 +301,8 @@ GROUP BY wpb.template_version_preset_id; -- Cancels all pending provisioner jobs for prebuilt workspaces on a specific preset from an -- inactive template version. -- This is an optimization to clean up stale pending jobs. -UPDATE provisioner_jobs -SET - canceled_at = @now::timestamptz, - completed_at = @now::timestamptz -WHERE id IN ( - SELECT pj.id +WITH jobs_to_cancel AS ( + SELECT pj.id, w.id AS workspace_id, w.template_id, wpb.template_version_preset_id FROM provisioner_jobs pj INNER JOIN workspace_prebuild_builds wpb ON wpb.job_id = pj.id INNER JOIN workspaces w ON w.id = wpb.workspace_id @@ -324,4 +321,54 @@ WHERE id IN ( AND pj.canceled_at IS NULL AND pj.completed_at IS NULL ) -RETURNING id; +UPDATE provisioner_jobs +SET + canceled_at = @now::timestamptz, + completed_at = @now::timestamptz +FROM jobs_to_cancel +WHERE provisioner_jobs.id = jobs_to_cancel.id +RETURNING jobs_to_cancel.id, jobs_to_cancel.workspace_id, jobs_to_cancel.template_id, jobs_to_cancel.template_version_preset_id; + +-- name: GetOrganizationsWithPrebuildStatus :many +-- GetOrganizationsWithPrebuildStatus returns organizations with prebuilds configured and their +-- membership status for the prebuilds system user (org membership, group existence, group membership). +WITH orgs_with_prebuilds AS ( + -- Get unique organizations that have presets with prebuilds configured + SELECT DISTINCT o.id, o.name + FROM organizations o + INNER JOIN templates t ON t.organization_id = o.id + INNER JOIN template_versions tv ON tv.template_id = t.id + INNER JOIN template_version_presets tvp ON tvp.template_version_id = tv.id + WHERE tvp.desired_instances IS NOT NULL +), +prebuild_user_membership AS ( + -- Check if the user is a member of the organizations + SELECT om.organization_id + FROM organization_members om + INNER JOIN orgs_with_prebuilds owp ON owp.id = om.organization_id + WHERE om.user_id = @user_id::uuid +), +prebuild_groups AS ( + -- Check if the organizations have the prebuilds group + SELECT g.organization_id, g.id as group_id + FROM groups g + INNER JOIN orgs_with_prebuilds owp ON owp.id = g.organization_id + WHERE g.name = @group_name::text +), +prebuild_group_membership AS ( + -- Check if the user is in the prebuilds group + SELECT pg.organization_id + FROM prebuild_groups pg + INNER JOIN group_members gm ON gm.group_id = pg.group_id + WHERE gm.user_id = @user_id::uuid +) +SELECT + owp.id AS organization_id, + owp.name AS organization_name, + (pum.organization_id IS NOT NULL)::boolean AS has_prebuild_user, + pg.group_id AS prebuilds_group_id, + (pgm.organization_id IS NOT NULL)::boolean AS has_prebuild_user_in_group +FROM orgs_with_prebuilds owp +LEFT JOIN prebuild_groups pg ON pg.organization_id = owp.id +LEFT JOIN prebuild_user_membership pum ON pum.organization_id = owp.id +LEFT JOIN prebuild_group_membership pgm ON pgm.organization_id = owp.id; diff --git a/coderd/database/queries/presets.sql b/coderd/database/queries/presets.sql index e6edcb4c59c1f..314c74b668657 100644 --- a/coderd/database/queries/presets.sql +++ b/coderd/database/queries/presets.sql @@ -9,7 +9,8 @@ INSERT INTO template_version_presets ( scheduling_timezone, is_default, description, - icon + icon, + last_invalidated_at ) VALUES ( @id, @@ -21,7 +22,8 @@ VALUES ( @scheduling_timezone, @is_default, @description, - @icon + @icon, + @last_invalidated_at ) RETURNING *; -- name: InsertPresetParameters :many @@ -103,3 +105,19 @@ WHERE tv.id = t.active_version_id AND NOT t.deleted AND t.deprecated = ''; + +-- name: UpdatePresetsLastInvalidatedAt :many +UPDATE + template_version_presets tvp +SET + last_invalidated_at = @last_invalidated_at +FROM + templates t + JOIN template_versions tv ON tv.id = t.active_version_id +WHERE + t.id = @template_id + AND tvp.template_version_id = tv.id +RETURNING + t.name AS template_name, + tv.name AS template_version_name, + tvp.name AS template_version_preset_name; diff --git a/coderd/database/queries/tasks.sql b/coderd/database/queries/tasks.sql index d0617ad39f4dc..52e259953fb42 100644 --- a/coderd/database/queries/tasks.sql +++ b/coderd/database/queries/tasks.sql @@ -1,8 +1,8 @@ -- name: InsertTask :one INSERT INTO tasks - (id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at) + (id, organization_id, owner_id, name, display_name, workspace_id, template_version_id, template_parameters, prompt, created_at) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9) + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING *; -- name: UpdateTaskWorkspaceID :one @@ -41,6 +41,13 @@ SELECT * FROM tasks_with_status WHERE id = @id::uuid; -- name: GetTaskByWorkspaceID :one SELECT * FROM tasks_with_status WHERE workspace_id = @workspace_id::uuid; +-- name: GetTaskByOwnerIDAndName :one +SELECT * FROM tasks_with_status +WHERE + owner_id = @owner_id::uuid + AND deleted_at IS NULL + AND LOWER(name) = LOWER(@name::text); + -- name: ListTasks :many SELECT * FROM tasks_with_status tws WHERE tws.deleted_at IS NULL @@ -57,3 +64,14 @@ WHERE id = @id::uuid AND deleted_at IS NULL RETURNING *; + + +-- name: UpdateTaskPrompt :one +UPDATE + tasks +SET + prompt = @prompt::text +WHERE + id = @id::uuid + AND deleted_at IS NULL +RETURNING *; diff --git a/coderd/database/queries/templates.sql b/coderd/database/queries/templates.sql index 43f1aea6c561f..4de4e2fadbebd 100644 --- a/coderd/database/queries/templates.sql +++ b/coderd/database/queries/templates.sql @@ -173,7 +173,8 @@ SET group_acl = $8, max_port_sharing_level = $9, use_classic_parameter_flow = $10, - cors_behavior = $11 + cors_behavior = $11, + use_terraform_workspace_cache = $12 WHERE id = $1 ; diff --git a/coderd/database/queries/users.sql b/coderd/database/queries/users.sql index 0b6e52d6bc918..889e99a3300d3 100644 --- a/coderd/database/queries/users.sql +++ b/coderd/database/queries/users.sql @@ -168,6 +168,29 @@ WHERE user_configs.user_id = @user_id AND user_configs.key = 'terminal_font' RETURNING *; +-- name: GetUserTaskNotificationAlertDismissed :one +SELECT + value::boolean as task_notification_alert_dismissed +FROM + user_configs +WHERE + user_id = @user_id + AND key = 'preference_task_notification_alert_dismissed'; + +-- name: UpdateUserTaskNotificationAlertDismissed :one +INSERT INTO + user_configs (user_id, key, value) +VALUES + (@user_id, 'preference_task_notification_alert_dismissed', (@task_notification_alert_dismissed::boolean)::text) +ON CONFLICT + ON CONSTRAINT user_configs_pkey +DO UPDATE +SET + value = @task_notification_alert_dismissed +WHERE user_configs.user_id = @user_id + AND user_configs.key = 'preference_task_notification_alert_dismissed' +RETURNING value::boolean AS task_notification_alert_dismissed; + -- name: UpdateUserRoles :one UPDATE users diff --git a/coderd/database/queries/workspaceagents.sql b/coderd/database/queries/workspaceagents.sql index cc59e96544778..b60d1f2c88455 100644 --- a/coderd/database/queries/workspaceagents.sql +++ b/coderd/database/queries/workspaceagents.sql @@ -285,7 +285,8 @@ WHERE SELECT sqlc.embed(workspaces), sqlc.embed(workspace_agents), - sqlc.embed(workspace_build_with_user) + sqlc.embed(workspace_build_with_user), + tasks.id AS task_id FROM workspace_agents JOIN @@ -300,6 +301,10 @@ JOIN workspaces ON workspace_build_with_user.workspace_id = workspaces.id +LEFT JOIN + tasks +ON + tasks.workspace_id = workspaces.id WHERE -- This should only match 1 agent, so 1 returned row or 0. workspace_agents.auth_token = @auth_token::uuid diff --git a/coderd/database/queries/workspaceapps.sql b/coderd/database/queries/workspaceapps.sql index d76e789f1946d..b3694fb2750c6 100644 --- a/coderd/database/queries/workspaceapps.sql +++ b/coderd/database/queries/workspaceapps.sql @@ -73,11 +73,12 @@ RETURNING *; -- name: GetWorkspaceAppStatusesByAppIDs :many SELECT * FROM workspace_app_statuses WHERE app_id = ANY(@ids :: uuid [ ]); --- name: GetLatestWorkspaceAppStatusesByAppID :many +-- name: GetLatestWorkspaceAppStatusByAppID :one SELECT * FROM workspace_app_statuses WHERE app_id = @app_id::uuid -ORDER BY created_at DESC, id DESC; +ORDER BY created_at DESC, id DESC +LIMIT 1; -- name: GetLatestWorkspaceAppStatusesByWorkspaceIDs :many SELECT DISTINCT ON (workspace_id) diff --git a/coderd/database/queries/workspacebuilds.sql b/coderd/database/queries/workspacebuilds.sql index 0736c5514b3f7..cf13b30758bd4 100644 --- a/coderd/database/queries/workspacebuilds.sql +++ b/coderd/database/queries/workspacebuilds.sql @@ -240,7 +240,6 @@ UPDATE workspace_builds SET has_ai_task = @has_ai_task, - ai_task_sidebar_app_id = @sidebar_app_id, has_external_agent = @has_external_agent, updated_at = @updated_at::timestamptz WHERE id = @id::uuid; diff --git a/coderd/database/queries/workspaces.sql b/coderd/database/queries/workspaces.sql index 8ccc69b9a813c..9e6e0d8b24862 100644 --- a/coderd/database/queries/workspaces.sql +++ b/coderd/database/queries/workspaces.sql @@ -117,7 +117,6 @@ SELECT latest_build.error as latest_build_error, latest_build.transition as latest_build_transition, latest_build.job_status as latest_build_status, - latest_build.has_ai_task as latest_build_has_ai_task, latest_build.has_external_agent as latest_build_has_external_agent FROM workspaces_expanded as workspaces @@ -351,25 +350,19 @@ WHERE (latest_build.template_version_id = template.active_version_id) = sqlc.narg('using_active') :: boolean ELSE true END - -- Filter by has_ai_task in latest build + -- Filter by has_ai_task, checks if this is a task workspace. AND CASE - WHEN sqlc.narg('has_ai_task') :: boolean IS NOT NULL THEN - (COALESCE(latest_build.has_ai_task, false) OR ( - -- If the build has no AI task, it means that the provisioner job is in progress - -- and we don't know if it has an AI task yet. In this case, we optimistically - -- assume that it has an AI task if the AI Prompt parameter is not empty. This - -- lets the AI Task frontend spawn a task and see it immediately after instead of - -- having to wait for the build to complete. - latest_build.has_ai_task IS NULL AND - latest_build.completed_at IS NULL AND - EXISTS ( - SELECT 1 - FROM workspace_build_parameters - WHERE workspace_build_parameters.workspace_build_id = latest_build.id - AND workspace_build_parameters.name = 'AI Prompt' - AND workspace_build_parameters.value != '' - ) - )) = (sqlc.narg('has_ai_task') :: boolean) + WHEN sqlc.narg('has_ai_task')::boolean IS NOT NULL + THEN sqlc.narg('has_ai_task')::boolean = EXISTS ( + SELECT + 1 + FROM + tasks + WHERE + -- Consider all tasks, deleting a task does not turn the + -- workspace into a non-task workspace. + tasks.workspace_id = workspaces.id + ) ELSE true END -- Filter by has_external_agent in latest build @@ -457,6 +450,7 @@ WHERE '', -- template_display_name '', -- template_icon '', -- template_description + '00000000-0000-0000-0000-000000000000'::uuid, -- task_id -- Extra columns added to `filtered_workspaces` '00000000-0000-0000-0000-000000000000'::uuid, -- template_version_id '', -- template_version_name @@ -465,7 +459,6 @@ WHERE '', -- latest_build_error 'start'::workspace_transition, -- latest_build_transition 'unknown'::provisioner_job_status, -- latest_build_status - false, -- latest_build_has_ai_task false -- latest_build_has_external_agent WHERE @with_summary :: boolean = true diff --git a/coderd/database/sqlc.yaml b/coderd/database/sqlc.yaml index af700c14519be..2386a4091f2d6 100644 --- a/coderd/database/sqlc.yaml +++ b/coderd/database/sqlc.yaml @@ -106,6 +106,15 @@ sql: # Workaround for sqlc not interpreting the left join correctly. - column: "tasks_with_status.workspace_build_number" go_type: "database/sql.NullInt32" + - column: "tasks_with_status.status" + go_type: + type: "TaskStatus" + - column: "tasks_with_status.workspace_agent_lifecycle_state" + go_type: + type: "NullWorkspaceAgentLifecycleState" + - column: "tasks_with_status.workspace_app_health" + go_type: + type: "NullWorkspaceAppHealth" rename: group_member: GroupMemberTable group_members_expanded: GroupMember diff --git a/coderd/httpmw/loggermw/logger.go b/coderd/httpmw/loggermw/logger.go index 37e15b3bfcf81..edd878efa9825 100644 --- a/coderd/httpmw/loggermw/logger.go +++ b/coderd/httpmw/loggermw/logger.go @@ -7,19 +7,17 @@ import ( "net/url" "strconv" "strings" - "sync" "time" "github.com/go-chi/chi/v5" "cdr.dev/slog" "github.com/coder/coder/v2/coderd/httpapi" - "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/tracing" ) var ( - safeParams = []string{"page", "limit", "offset"} + safeParams = []string{"page", "limit", "offset", "path"} countParams = []string{"ids", "template_ids"} ) @@ -124,85 +122,18 @@ func Logger(log slog.Logger) func(next http.Handler) http.Handler { } } -type RequestLogger interface { - WithFields(fields ...slog.Field) - WriteLog(ctx context.Context, status int) - WithAuthContext(actor rbac.Subject) -} - type SlogRequestLogger struct { - log slog.Logger - written bool - message string - start time.Time - // Protects actors map for concurrent writes. - mu sync.RWMutex - actors map[rbac.SubjectType]rbac.Subject -} - -var _ RequestLogger = &SlogRequestLogger{} - -func NewRequestLogger(log slog.Logger, message string, start time.Time) RequestLogger { - return &SlogRequestLogger{ - log: log, - written: false, - message: message, - start: start, - actors: make(map[rbac.SubjectType]rbac.Subject), - } + log slog.Logger + written bool + message string + start time.Time + addFields func() } func (c *SlogRequestLogger) WithFields(fields ...slog.Field) { c.log = c.log.With(fields...) } -func (c *SlogRequestLogger) WithAuthContext(actor rbac.Subject) { - c.mu.Lock() - defer c.mu.Unlock() - c.actors[actor.Type] = actor -} - -func (c *SlogRequestLogger) addAuthContextFields() { - c.mu.RLock() - defer c.mu.RUnlock() - - usr, ok := c.actors[rbac.SubjectTypeUser] - if ok { - c.log = c.log.With( - slog.F("requestor_id", usr.ID), - slog.F("requestor_name", usr.FriendlyName), - slog.F("requestor_email", usr.Email), - ) - } else { - // If there is no user, we log the requestor name for the first - // actor in a defined order. - for _, v := range actorLogOrder { - subj, ok := c.actors[v] - if !ok { - continue - } - c.log = c.log.With( - slog.F("requestor_name", subj.FriendlyName), - ) - break - } - } -} - -var actorLogOrder = []rbac.SubjectType{ - rbac.SubjectTypeAutostart, - rbac.SubjectTypeCryptoKeyReader, - rbac.SubjectTypeCryptoKeyRotator, - rbac.SubjectTypeJobReaper, - rbac.SubjectTypeNotifier, - rbac.SubjectTypePrebuildsOrchestrator, - rbac.SubjectTypeSubAgentAPI, - rbac.SubjectTypeProvisionerd, - rbac.SubjectTypeResourceMonitor, - rbac.SubjectTypeSystemReadProvisionerDaemons, - rbac.SubjectTypeSystemRestricted, -} - func (c *SlogRequestLogger) WriteLog(ctx context.Context, status int) { if c.written { return @@ -210,9 +141,9 @@ func (c *SlogRequestLogger) WriteLog(ctx context.Context, status int) { c.written = true end := time.Now() - // Right before we write the log, we try to find the user in the actors - // and add the fields to the log. - c.addAuthContextFields() + if c.addFields != nil { + c.addFields() + } logger := c.log.With( slog.F("took", end.Sub(c.start)), diff --git a/coderd/httpmw/loggermw/logger_full.go b/coderd/httpmw/loggermw/logger_full.go new file mode 100644 index 0000000000000..8240289c50177 --- /dev/null +++ b/coderd/httpmw/loggermw/logger_full.go @@ -0,0 +1,88 @@ +//go:build !slim + +package loggermw + +import ( + "context" + "sync" + "time" + + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/rbac" +) + +type RequestLogger interface { + WithFields(fields ...slog.Field) + WriteLog(ctx context.Context, status int) + WithAuthContext(actor rbac.Subject) +} + +type RbacSlogRequestLogger struct { + SlogRequestLogger + // Protects actors map for concurrent writes. + mu sync.RWMutex + actors map[rbac.SubjectType]rbac.Subject +} + +var _ RequestLogger = &RbacSlogRequestLogger{} + +func NewRequestLogger(log slog.Logger, message string, start time.Time) RequestLogger { + rlogger := &RbacSlogRequestLogger{ + SlogRequestLogger: SlogRequestLogger{ + log: log, + written: false, + message: message, + start: start, + }, + actors: make(map[rbac.SubjectType]rbac.Subject), + } + rlogger.addFields = rlogger.addAuthContextFields + return rlogger +} + +func (c *RbacSlogRequestLogger) WithAuthContext(actor rbac.Subject) { + c.mu.Lock() + defer c.mu.Unlock() + c.actors[actor.Type] = actor +} + +var actorLogOrder = []rbac.SubjectType{ + rbac.SubjectTypeAutostart, + rbac.SubjectTypeCryptoKeyReader, + rbac.SubjectTypeCryptoKeyRotator, + rbac.SubjectTypeJobReaper, + rbac.SubjectTypeNotifier, + rbac.SubjectTypePrebuildsOrchestrator, + rbac.SubjectTypeSubAgentAPI, + rbac.SubjectTypeProvisionerd, + rbac.SubjectTypeResourceMonitor, + rbac.SubjectTypeSystemReadProvisionerDaemons, + rbac.SubjectTypeSystemRestricted, +} + +func (c *RbacSlogRequestLogger) addAuthContextFields() { + c.mu.RLock() + defer c.mu.RUnlock() + + usr, ok := c.actors[rbac.SubjectTypeUser] + if ok { + c.log = c.log.With( + slog.F("requestor_id", usr.ID), + slog.F("requestor_name", usr.FriendlyName), + slog.F("requestor_email", usr.Email), + ) + } else { + // If there is no user, we log the requestor name for the first + // actor in a defined order. + for _, v := range actorLogOrder { + subj, ok := c.actors[v] + if !ok { + continue + } + c.log = c.log.With( + slog.F("requestor_name", subj.FriendlyName), + ) + break + } + } +} diff --git a/coderd/httpmw/loggermw/logger_internal_test.go b/coderd/httpmw/loggermw/logger_internal_test.go index bf090464241a0..5f22de7477d92 100644 --- a/coderd/httpmw/loggermw/logger_internal_test.go +++ b/coderd/httpmw/loggermw/logger_internal_test.go @@ -16,6 +16,7 @@ import ( "github.com/stretchr/testify/require" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/testutil" "github.com/coder/websocket" @@ -363,6 +364,31 @@ func TestSafeQueryParams(t *testing.T) { } } +func TestRequestLogger_AuthContext(t *testing.T) { + t.Parallel() + ctx := context.Background() + + sink := &fakeSink{} + logger := slog.Make(sink) + logger = logger.Leveled(slog.LevelDebug) + logCtx := NewRequestLogger(logger, "GET", time.Now()) + + logCtx.WithAuthContext(rbac.Subject{ + ID: "test-user-id", + FriendlyName: "test name", + Email: "test@coder.com", + Type: rbac.SubjectTypeUser, + }) + + logCtx.WriteLog(ctx, http.StatusOK) + + require.Len(t, sink.entries, 1, "log was written twice") + require.Equal(t, sink.entries[0].Message, "GET") + require.Equal(t, sink.entries[0].Fields[0].Value, "test-user-id") + require.Equal(t, sink.entries[0].Fields[1].Value, "test name") + require.Equal(t, sink.entries[0].Fields[2].Value, "test@coder.com") +} + type fakeSink struct { entries []slog.SinkEntry newEntries chan slog.SinkEntry diff --git a/coderd/httpmw/loggermw/logger_slim.go b/coderd/httpmw/loggermw/logger_slim.go new file mode 100644 index 0000000000000..36470265e50df --- /dev/null +++ b/coderd/httpmw/loggermw/logger_slim.go @@ -0,0 +1,26 @@ +//go:build slim + +package loggermw + +import ( + "context" + "time" + + "cdr.dev/slog" +) + +type RequestLogger interface { + WithFields(fields ...slog.Field) + WriteLog(ctx context.Context, status int) +} + +var _ RequestLogger = &SlogRequestLogger{} + +func NewRequestLogger(log slog.Logger, message string, start time.Time) RequestLogger { + return &SlogRequestLogger{ + log: log, + written: false, + message: message, + start: start, + } +} diff --git a/coderd/httpmw/taskparam.go b/coderd/httpmw/taskparam.go index 6ecc888b378fe..1e6051eb03666 100644 --- a/coderd/httpmw/taskparam.go +++ b/coderd/httpmw/taskparam.go @@ -2,8 +2,14 @@ package httpmw import ( "context" + "database/sql" + "errors" "net/http" + "github.com/go-chi/chi/v5" + "github.com/google/uuid" + "golang.org/x/xerrors" + "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" @@ -23,16 +29,34 @@ func TaskParam(r *http.Request) database.Task { return task } -// ExtractTaskParam grabs a task from the "task" URL parameter by UUID. +// ExtractTaskParam grabs a task from the "task" URL parameter. +// It supports two lookup strategies: +// 1. Task UUID (primary) +// 2. Task name scoped to owner (secondary) +// +// This middleware depends on ExtractOrganizationMembersParam being in the chain +// to provide the owner context for name-based lookups. func ExtractTaskParam(db database.Store) func(http.Handler) http.Handler { return func(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - taskID, parsed := ParseUUIDParam(rw, r, "task") - if !parsed { + + // Get the task parameter value. We can't use ParseUUIDParam here because + // we need to support non-UUID values (task names) and + // attempt all lookup strategies. + taskParam := chi.URLParam(r, "task") + if taskParam == "" { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "\"task\" must be provided.", + }) return } - task, err := db.GetTaskByID(ctx, taskID) + + // Get owner from OrganizationMembersParam middleware for name-based lookups + members := OrganizationMembersParam(r) + ownerID := members.UserID() + + task, err := fetchTaskWithFallback(ctx, db, taskParam, ownerID) if err != nil { if httpapi.Is404Error(err) { httpapi.ResourceNotFound(rw) @@ -48,10 +72,38 @@ func ExtractTaskParam(db database.Store) func(http.Handler) http.Handler { ctx = context.WithValue(ctx, taskParamContextKey{}, task) if rlogger := loggermw.RequestLoggerFromContext(ctx); rlogger != nil { - rlogger.WithFields(slog.F("task_id", task.ID), slog.F("task_name", task.Name)) + rlogger.WithFields( + slog.F("task_id", task.ID), + slog.F("task_name", task.Name), + ) } next.ServeHTTP(rw, r.WithContext(ctx)) }) } } + +func fetchTaskWithFallback(ctx context.Context, db database.Store, taskParam string, ownerID uuid.UUID) (database.Task, error) { + // Attempt to first lookup the task by UUID. + taskID, err := uuid.Parse(taskParam) + if err == nil { + task, err := db.GetTaskByID(ctx, taskID) + if err == nil { + return task, nil + } + // There may be a task named with a valid UUID. Fall back to name lookup in this case. + if !errors.Is(err, sql.ErrNoRows) { + return database.Task{}, xerrors.Errorf("fetch task by uuid: %w", err) + } + } + + // taskParam not a valid UUID, OR valid UUID but not found, so attempt lookup by name. + task, err := db.GetTaskByOwnerIDAndName(ctx, database.GetTaskByOwnerIDAndNameParams{ + OwnerID: ownerID, + Name: taskParam, + }) + if err != nil { + return database.Task{}, xerrors.Errorf("fetch task by name: %w", err) + } + return task, nil +} diff --git a/coderd/httpmw/taskparam_test.go b/coderd/httpmw/taskparam_test.go index 559ccc2a2df2d..7430785f3377a 100644 --- a/coderd/httpmw/taskparam_test.go +++ b/coderd/httpmw/taskparam_test.go @@ -4,35 +4,119 @@ import ( "context" "net/http" "net/http/httptest" + "strings" "testing" "github.com/go-chi/chi/v5" "github.com/google/uuid" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) func TestTaskParam(t *testing.T) { t.Parallel() - setup := func(db database.Store) (*http.Request, database.User) { - user := dbgen.User(t, db, database.User{}) - _, token := dbgen.APIKey(t, db, database.APIKey{ - UserID: user.ID, - }) + // Create all fixtures once - they're only read, never modified + db, _ := dbtestutil.NewDB(t) + user := dbgen.User(t, db, database.User{}) + _, token := dbgen.APIKey(t, db, database.APIKey{ + UserID: user.ID, + }) + org := dbgen.Organization(t, db, database.Organization{}) + tpl := dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + TemplateID: uuid.NullUUID{ + UUID: tpl.ID, + Valid: true, + }, + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ + OwnerID: user.ID, + OrganizationID: org.ID, + TemplateID: tpl.ID, + }) + task := dbgen.Task(t, db, database.TaskTable{ + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateVersionID: tv.ID, + WorkspaceID: uuid.NullUUID{UUID: workspace.ID, Valid: true}, + Prompt: "test prompt", + }) + workspaceNoTask := dbgen.Workspace(t, db, database.WorkspaceTable{ + OwnerID: user.ID, + OrganizationID: org.ID, + TemplateID: tpl.ID, + }) + taskFoundByUUID := dbgen.Task(t, db, database.TaskTable{ + Name: "found-by-uuid", + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateVersionID: tv.ID, + WorkspaceID: uuid.NullUUID{UUID: workspace.ID, Valid: true}, + Prompt: "test prompt", + }) + // To test precedence of UUID over name, we create another task with the same name as the UUID task + _ = dbgen.Task(t, db, database.TaskTable{ + Name: taskFoundByUUID.ID.String(), + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateVersionID: tv.ID, + WorkspaceID: uuid.NullUUID{UUID: workspace.ID, Valid: true}, + Prompt: "test prompt", + }) + workspaceSharedName := dbgen.Workspace(t, db, database.WorkspaceTable{ + Name: "shared-name", + OwnerID: user.ID, + OrganizationID: org.ID, + TemplateID: tpl.ID, + }) + // We create a task with the same name as the workspace shared name. + _ = dbgen.Task(t, db, database.TaskTable{ + Name: "task-different-name", + OrganizationID: org.ID, + OwnerID: user.ID, + TemplateVersionID: tv.ID, + WorkspaceID: uuid.NullUUID{UUID: workspaceSharedName.ID, Valid: true}, + Prompt: "test prompt", + }) + makeRequest := func(userID uuid.UUID, sessionToken string) *http.Request { r := httptest.NewRequest("GET", "/", nil) - r.Header.Set(codersdk.SessionTokenHeader, token) + r.Header.Set(codersdk.SessionTokenHeader, sessionToken) ctx := chi.NewRouteContext() - ctx.URLParams.Add("user", "me") + ctx.URLParams.Add("user", userID.String()) r = r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, ctx)) - return r, user + return r + } + + makeRouter := func(handler http.HandlerFunc) chi.Router { + rtr := chi.NewRouter() + rtr.Use( + httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{ + DB: db, + RedirectToLogin: false, + }), + httpmw.ExtractOrganizationMembersParam(db, func(r *http.Request, _ policy.Action, _ rbac.Objecter) bool { + return true + }), + httpmw.ExtractTaskParam(db), + ) + rtr.Get("/", handler) + return rtr } t.Run("None", func(t *testing.T) { @@ -40,8 +124,11 @@ func TestTaskParam(t *testing.T) { db, _ := dbtestutil.NewDB(t) rtr := chi.NewRouter() rtr.Use(httpmw.ExtractTaskParam(db)) - rtr.Get("/", nil) - r, _ := setup(db) + rtr.Get("/", func(w http.ResponseWriter, r *http.Request) { + assert.Fail(t, "this should never get called") + }) + r := httptest.NewRequest("GET", "/", nil) + r = r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, chi.NewRouteContext())) rw := httptest.NewRecorder() rtr.ServeHTTP(rw, r) @@ -52,11 +139,10 @@ func TestTaskParam(t *testing.T) { t.Run("NotFound", func(t *testing.T) { t.Parallel() - db, _ := dbtestutil.NewDB(t) - rtr := chi.NewRouter() - rtr.Use(httpmw.ExtractTaskParam(db)) - rtr.Get("/", nil) - r, _ := setup(db) + rtr := makeRouter(func(w http.ResponseWriter, r *http.Request) { + assert.Fail(t, "this should never get called") + }) + r := makeRequest(user.ID, token) chi.RouteContext(r.Context()).URLParams.Add("task", uuid.NewString()) rw := httptest.NewRecorder() rtr.ServeHTTP(rw, r) @@ -68,48 +154,77 @@ func TestTaskParam(t *testing.T) { t.Run("Found", func(t *testing.T) { t.Parallel() - db, _ := dbtestutil.NewDB(t) - rtr := chi.NewRouter() - rtr.Use( - httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{ - DB: db, - RedirectToLogin: false, - }), - httpmw.ExtractTaskParam(db), - ) - rtr.Get("/", func(rw http.ResponseWriter, r *http.Request) { - _ = httpmw.TaskParam(r) - rw.WriteHeader(http.StatusOK) + rtr := makeRouter(func(w http.ResponseWriter, r *http.Request) { + foundTask := httpmw.TaskParam(r) + assert.Equal(t, task.ID.String(), foundTask.ID.String()) }) - r, user := setup(db) - org := dbgen.Organization(t, db, database.Organization{}) - tpl := dbgen.Template(t, db, database.Template{ - OrganizationID: org.ID, - CreatedBy: user.ID, + r := makeRequest(user.ID, token) + chi.RouteContext(r.Context()).URLParams.Add("task", task.ID.String()) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + }) + + t.Run("FoundByTaskName", func(t *testing.T) { + t.Parallel() + rtr := makeRouter(func(w http.ResponseWriter, r *http.Request) { + foundTask := httpmw.TaskParam(r) + assert.Equal(t, task.ID.String(), foundTask.ID.String()) }) - tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{ - UUID: tpl.ID, - Valid: true, - }, - OrganizationID: org.ID, - CreatedBy: user.ID, + r := makeRequest(user.ID, token) + chi.RouteContext(r.Context()).URLParams.Add("task", task.Name) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + }) + + t.Run("NotFoundByWorkspaceName", func(t *testing.T) { + t.Parallel() + rtr := makeRouter(func(w http.ResponseWriter, r *http.Request) { + assert.Fail(t, "this should never get called") }) - workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ - OwnerID: user.ID, - Name: "test-workspace", - OrganizationID: org.ID, - TemplateID: tpl.ID, + r := makeRequest(user.ID, token) + chi.RouteContext(r.Context()).URLParams.Add("task", workspace.Name) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("CaseInsensitiveTaskName", func(t *testing.T) { + t.Parallel() + rtr := makeRouter(func(w http.ResponseWriter, r *http.Request) { + foundTask := httpmw.TaskParam(r) + assert.Equal(t, task.ID.String(), foundTask.ID.String()) }) - task := dbgen.Task(t, db, database.TaskTable{ - Name: "test-task", - OrganizationID: org.ID, - OwnerID: user.ID, - TemplateVersionID: tv.ID, - WorkspaceID: uuid.NullUUID{UUID: workspace.ID, Valid: true}, - Prompt: "test prompt", + r := makeRequest(user.ID, token) + // Look up with different case + chi.RouteContext(r.Context()).URLParams.Add("task", strings.ToUpper(task.Name)) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + }) + + t.Run("UUIDTakesPrecedence", func(t *testing.T) { + t.Parallel() + rtr := makeRouter(func(w http.ResponseWriter, r *http.Request) { + foundTask := httpmw.TaskParam(r) + assert.Equal(t, taskFoundByUUID.ID.String(), foundTask.ID.String()) }) - chi.RouteContext(r.Context()).URLParams.Add("task", task.ID.String()) + r := makeRequest(user.ID, token) + // Look up by UUID - should find the first task, not the one named with the UUID + chi.RouteContext(r.Context()).URLParams.Add("task", taskFoundByUUID.ID.String()) rw := httptest.NewRecorder() rtr.ServeHTTP(rw, r) @@ -117,4 +232,35 @@ func TestTaskParam(t *testing.T) { defer res.Body.Close() require.Equal(t, http.StatusOK, res.StatusCode) }) + + t.Run("NotFoundWhenNoMatch", func(t *testing.T) { + t.Parallel() + rtr := makeRouter(func(w http.ResponseWriter, r *http.Request) { + assert.Fail(t, "this should never get called") + }) + r := makeRequest(user.ID, token) + chi.RouteContext(r.Context()).URLParams.Add("task", "nonexistent-name") + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("WorkspaceWithoutTask", func(t *testing.T) { + t.Parallel() + rtr := makeRouter(func(w http.ResponseWriter, r *http.Request) { + assert.Fail(t, "this should never get called") + }) + r := makeRequest(user.ID, token) + // Look up by workspace name, but workspace has no task + chi.RouteContext(r.Context()).URLParams.Add("task", workspaceNoTask.Name) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusNotFound, res.StatusCode) + }) } diff --git a/coderd/httpmw/workspaceagent.go b/coderd/httpmw/workspaceagent.go index 0ee231b2f5a12..d5f4e6fef21b6 100644 --- a/coderd/httpmw/workspaceagent.go +++ b/coderd/httpmw/workspaceagent.go @@ -118,6 +118,7 @@ func ExtractWorkspaceAgentAndLatestBuild(opts ExtractWorkspaceAgentAndLatestBuil OwnerID: row.WorkspaceTable.OwnerID, TemplateID: row.WorkspaceTable.TemplateID, VersionID: row.WorkspaceBuild.TemplateVersionID, + TaskID: row.TaskID, BlockUserData: row.WorkspaceAgent.APIKeyScope == database.AgentKeyScopeEnumNoUserData, }), ) diff --git a/coderd/oauth2provider/metadata.go b/coderd/oauth2provider/metadata.go index a6edc4006bc1d..ecc80049df279 100644 --- a/coderd/oauth2provider/metadata.go +++ b/coderd/oauth2provider/metadata.go @@ -18,6 +18,7 @@ func GetAuthorizationServerMetadata(accessURL *url.URL) http.HandlerFunc { AuthorizationEndpoint: accessURL.JoinPath("/oauth2/authorize").String(), TokenEndpoint: accessURL.JoinPath("/oauth2/tokens").String(), RegistrationEndpoint: accessURL.JoinPath("/oauth2/register").String(), // RFC 7591 + RevocationEndpoint: accessURL.JoinPath("/oauth2/revoke").String(), // RFC 7009 ResponseTypesSupported: []string{"code"}, GrantTypesSupported: []string{"authorization_code", "refresh_token"}, CodeChallengeMethodsSupported: []string{"S256"}, diff --git a/coderd/prebuilds/api.go b/coderd/prebuilds/api.go index ed39f2a322776..0deab99416fd5 100644 --- a/coderd/prebuilds/api.go +++ b/coderd/prebuilds/api.go @@ -37,13 +37,18 @@ type ReconciliationOrchestrator interface { TrackResourceReplacement(ctx context.Context, workspaceID, buildID uuid.UUID, replacements []*sdkproto.ResourceReplacement) } +// ReconcileStats contains statistics about a reconciliation cycle. +type ReconcileStats struct { + Elapsed time.Duration +} + type Reconciler interface { StateSnapshotter // ReconcileAll orchestrates the reconciliation of all prebuilds across all templates. // It takes a global snapshot of the system state and then reconciles each preset // in parallel, creating or deleting prebuilds as needed to reach their desired states. - ReconcileAll(ctx context.Context) error + ReconcileAll(ctx context.Context) (ReconcileStats, error) } // StateSnapshotter defines the operations necessary to capture workspace prebuilds state. diff --git a/coderd/prebuilds/global_snapshot.go b/coderd/prebuilds/global_snapshot.go index 3c7ec24f5644b..cb91658707c1b 100644 --- a/coderd/prebuilds/global_snapshot.go +++ b/coderd/prebuilds/global_snapshot.go @@ -125,20 +125,29 @@ func (s GlobalSnapshot) IsHardLimited(presetID uuid.UUID) bool { } // filterExpiredWorkspaces splits running workspaces into expired and non-expired -// based on the preset's TTL. -// If TTL is missing or zero, all workspaces are considered non-expired. +// based on the preset's TTL and last_invalidated_at timestamp. +// A prebuild is considered expired if: +// 1. The preset has been invalidated (last_invalidated_at is set), OR +// 2. It exceeds the preset's TTL (if TTL is set) +// If TTL is missing or zero, only last_invalidated_at is checked. func filterExpiredWorkspaces(preset database.GetTemplatePresetsWithPrebuildsRow, runningWorkspaces []database.GetRunningPrebuiltWorkspacesRow) (nonExpired []database.GetRunningPrebuiltWorkspacesRow, expired []database.GetRunningPrebuiltWorkspacesRow) { - if !preset.Ttl.Valid { - return runningWorkspaces, expired - } + for _, prebuild := range runningWorkspaces { + isExpired := false - ttl := time.Duration(preset.Ttl.Int32) * time.Second - if ttl <= 0 { - return runningWorkspaces, expired - } + // Check if prebuild was created before last invalidation + if preset.LastInvalidatedAt.Valid && prebuild.CreatedAt.Before(preset.LastInvalidatedAt.Time) { + isExpired = true + } - for _, prebuild := range runningWorkspaces { - if time.Since(prebuild.CreatedAt) > ttl { + // Check TTL expiration if set + if !isExpired && preset.Ttl.Valid { + ttl := time.Duration(preset.Ttl.Int32) * time.Second + if ttl > 0 && time.Since(prebuild.CreatedAt) > ttl { + isExpired = true + } + } + + if isExpired { expired = append(expired, prebuild) } else { nonExpired = append(nonExpired, prebuild) diff --git a/coderd/prebuilds/noop.go b/coderd/prebuilds/noop.go index 170b0a12af6fd..0859d428b4796 100644 --- a/coderd/prebuilds/noop.go +++ b/coderd/prebuilds/noop.go @@ -17,7 +17,11 @@ func (NoopReconciler) Run(context.Context) {} func (NoopReconciler) Stop(context.Context, error) {} func (NoopReconciler) TrackResourceReplacement(context.Context, uuid.UUID, uuid.UUID, []*sdkproto.ResourceReplacement) { } -func (NoopReconciler) ReconcileAll(context.Context) error { return nil } + +func (NoopReconciler) ReconcileAll(context.Context) (ReconcileStats, error) { + return ReconcileStats{}, nil +} + func (NoopReconciler) SnapshotState(context.Context, database.Store) (*GlobalSnapshot, error) { return &GlobalSnapshot{}, nil } diff --git a/coderd/prebuilds/preset_snapshot_test.go b/coderd/prebuilds/preset_snapshot_test.go index c32a84777d069..ebc8921430861 100644 --- a/coderd/prebuilds/preset_snapshot_test.go +++ b/coderd/prebuilds/preset_snapshot_test.go @@ -600,6 +600,9 @@ func TestExpiredPrebuilds(t *testing.T) { running int32 desired int32 expired int32 + + invalidated int32 + checkFn func(runningPrebuilds []database.GetRunningPrebuiltWorkspacesRow, state prebuilds.ReconciliationState, actions []*prebuilds.ReconciliationActions) }{ // With 2 running prebuilds, none of which are expired, and the desired count is met, @@ -708,6 +711,52 @@ func TestExpiredPrebuilds(t *testing.T) { }, } + validateState(t, expectedState, state) + validateActions(t, expectedActions, actions) + }, + }, + { + name: "preset has been invalidated - both instances expired", + running: 2, + desired: 2, + expired: 0, + invalidated: 2, + checkFn: func(runningPrebuilds []database.GetRunningPrebuiltWorkspacesRow, state prebuilds.ReconciliationState, actions []*prebuilds.ReconciliationActions) { + expectedState := prebuilds.ReconciliationState{Actual: 2, Desired: 2, Expired: 2} + expectedActions := []*prebuilds.ReconciliationActions{ + { + ActionType: prebuilds.ActionTypeDelete, + DeleteIDs: []uuid.UUID{runningPrebuilds[0].ID, runningPrebuilds[1].ID}, + }, + { + ActionType: prebuilds.ActionTypeCreate, + Create: 2, + }, + } + + validateState(t, expectedState, state) + validateActions(t, expectedActions, actions) + }, + }, + { + name: "preset has been invalidated, but one prebuild instance is newer", + running: 2, + desired: 2, + expired: 0, + invalidated: 1, + checkFn: func(runningPrebuilds []database.GetRunningPrebuiltWorkspacesRow, state prebuilds.ReconciliationState, actions []*prebuilds.ReconciliationActions) { + expectedState := prebuilds.ReconciliationState{Actual: 2, Desired: 2, Expired: 1} + expectedActions := []*prebuilds.ReconciliationActions{ + { + ActionType: prebuilds.ActionTypeDelete, + DeleteIDs: []uuid.UUID{runningPrebuilds[0].ID}, + }, + { + ActionType: prebuilds.ActionTypeCreate, + Create: 1, + }, + } + validateState(t, expectedState, state) validateActions(t, expectedActions, actions) }, @@ -719,7 +768,17 @@ func TestExpiredPrebuilds(t *testing.T) { t.Parallel() // GIVEN: a preset. - defaultPreset := preset(true, tc.desired, current) + now := time.Now() + invalidatedAt := now.Add(1 * time.Minute) + + var muts []func(row database.GetTemplatePresetsWithPrebuildsRow) database.GetTemplatePresetsWithPrebuildsRow + if tc.invalidated > 0 { + muts = append(muts, func(row database.GetTemplatePresetsWithPrebuildsRow) database.GetTemplatePresetsWithPrebuildsRow { + row.LastInvalidatedAt = sql.NullTime{Valid: true, Time: invalidatedAt} + return row + }) + } + defaultPreset := preset(true, tc.desired, current, muts...) presets := []database.GetTemplatePresetsWithPrebuildsRow{ defaultPreset, } @@ -727,11 +786,22 @@ func TestExpiredPrebuilds(t *testing.T) { // GIVEN: running prebuilt workspaces for the preset. running := make([]database.GetRunningPrebuiltWorkspacesRow, 0, tc.running) expiredCount := 0 + invalidatedCount := 0 ttlDuration := time.Duration(defaultPreset.Ttl.Int32) for range tc.running { name, err := prebuilds.GenerateName() require.NoError(t, err) + prebuildCreateAt := time.Now() + if int(tc.invalidated) > invalidatedCount { + prebuildCreateAt = prebuildCreateAt.Add(-ttlDuration - 10*time.Second) + invalidatedCount++ + } else if invalidatedCount > 0 { + // Only `tc.invalidated` instances have been invalidated, + // so the next instance is assumed to be created after `invalidatedAt`. + prebuildCreateAt = invalidatedAt.Add(1 * time.Minute) + } + if int(tc.expired) > expiredCount { // Update the prebuild workspace createdAt to exceed its TTL (5 seconds) prebuildCreateAt = prebuildCreateAt.Add(-ttlDuration - 10*time.Second) diff --git a/coderd/prometheusmetrics/aggregator.go b/coderd/prometheusmetrics/aggregator.go index ad51c3e7fa8a7..f3693137d3355 100644 --- a/coderd/prometheusmetrics/aggregator.go +++ b/coderd/prometheusmetrics/aggregator.go @@ -37,6 +37,11 @@ const ( var MetricLabelValueEncoder = strings.NewReplacer("\\", "\\\\", "|", "\\|", ",", "\\,", "=", "\\=") +type descCacheEntry struct { + desc *prometheus.Desc + lastUsed time.Time +} + type MetricsAggregator struct { store map[metricKey]annotatedMetric @@ -50,6 +55,8 @@ type MetricsAggregator struct { updateHistogram prometheus.Histogram cleanupHistogram prometheus.Histogram aggregateByLabels []string + // per-aggregator cache of descriptors + descCache map[string]descCacheEntry } type updateRequest struct { @@ -107,42 +114,6 @@ func hashKey(req *updateRequest, m *agentproto.Stats_Metric) metricKey { var _ prometheus.Collector = new(MetricsAggregator) -func (am *annotatedMetric) asPrometheus() (prometheus.Metric, error) { - var ( - baseLabelNames = am.aggregateByLabels - baseLabelValues []string - extraLabels = am.Labels - ) - - for _, label := range baseLabelNames { - val, err := am.getFieldByLabel(label) - if err != nil { - return nil, err - } - - baseLabelValues = append(baseLabelValues, val) - } - - labels := make([]string, 0, len(baseLabelNames)+len(extraLabels)) - labelValues := make([]string, 0, len(baseLabelNames)+len(extraLabels)) - - labels = append(labels, baseLabelNames...) - labelValues = append(labelValues, baseLabelValues...) - - for _, l := range extraLabels { - labels = append(labels, l.Name) - labelValues = append(labelValues, l.Value) - } - - desc := prometheus.NewDesc(am.Name, metricHelpForAgent, labels, nil) - valueType, err := asPrometheusValueType(am.Type) - if err != nil { - return nil, err - } - - return prometheus.MustNewConstMetric(desc, valueType, am.Value, labelValues...), nil -} - // getFieldByLabel returns the related field value for a given label func (am *annotatedMetric) getFieldByLabel(label string) (string, error) { var labelVal string @@ -364,7 +335,7 @@ func (ma *MetricsAggregator) Run(ctx context.Context) func() { } for _, m := range input { - promMetric, err := m.asPrometheus() + promMetric, err := ma.asPrometheus(&m) if err != nil { ma.log.Error(ctx, "can't convert Prometheus value type", slog.F("name", m.Name), slog.F("type", m.Type), slog.F("value", m.Value), slog.Error(err)) continue @@ -386,6 +357,8 @@ func (ma *MetricsAggregator) Run(ctx context.Context) func() { } } + ma.cleanupDescCache() + timer.ObserveDuration() cleanupTicker.Reset(ma.metricsCleanupInterval) ma.storeSizeGauge.Set(float64(len(ma.store))) @@ -407,6 +380,86 @@ func (ma *MetricsAggregator) Run(ctx context.Context) func() { func (*MetricsAggregator) Describe(_ chan<- *prometheus.Desc) { } +// cacheKeyForDesc is used to determine the cache key for a set of labels/extra labels. Used with the aggregators description cache. +// for strings.Builder returned errors from these functions are always nil. +// nolint:revive +func cacheKeyForDesc(name string, baseLabelNames []string, extraLabels []*agentproto.Stats_Metric_Label) string { + var b strings.Builder + hint := len(name) + (len(baseLabelNames)+len(extraLabels))*8 + b.Grow(hint) + b.WriteString(name) + for _, ln := range baseLabelNames { + b.WriteByte('|') + b.WriteString(ln) + } + for _, l := range extraLabels { + b.WriteByte('|') + b.WriteString(l.Name) + } + return b.String() +} + +// getOrCreateDec checks if we already have a metric description in the aggregators cache for a given combination of base +// labels and extra labels. If we do not, we create a new description and cache it. +func (ma *MetricsAggregator) getOrCreateDesc(name string, help string, baseLabelNames []string, extraLabels []*agentproto.Stats_Metric_Label) *prometheus.Desc { + if ma.descCache == nil { + ma.descCache = make(map[string]descCacheEntry) + } + key := cacheKeyForDesc(name, baseLabelNames, extraLabels) + if d, ok := ma.descCache[key]; ok { + d.lastUsed = time.Now() + ma.descCache[key] = d + return d.desc + } + nBase := len(baseLabelNames) + nExtra := len(extraLabels) + labels := make([]string, nBase+nExtra) + copy(labels, baseLabelNames) + for i, l := range extraLabels { + labels[nBase+i] = l.Name + } + d := prometheus.NewDesc(name, help, labels, nil) + ma.descCache[key] = descCacheEntry{d, time.Now()} + return d +} + +// asPrometheus returns the annotatedMetric as a prometheus.Metric, it preallocates/fills by index, uses the aggregators +// metric description cache, and a small stack buffer for values in order to reduce memory allocations. +func (ma *MetricsAggregator) asPrometheus(am *annotatedMetric) (prometheus.Metric, error) { + baseLabelNames := am.aggregateByLabels + extraLabels := am.Labels + + nBase := len(baseLabelNames) + nExtra := len(extraLabels) + nTotal := nBase + nExtra + + var scratch [16]string + var labelValues []string + if nTotal <= len(scratch) { + labelValues = scratch[:nTotal] + } else { + labelValues = make([]string, nTotal) + } + + for i, label := range baseLabelNames { + val, err := am.getFieldByLabel(label) + if err != nil { + return nil, err + } + labelValues[i] = val + } + for i, l := range extraLabels { + labelValues[nBase+i] = l.Value + } + + desc := ma.getOrCreateDesc(am.Name, metricHelpForAgent, baseLabelNames, extraLabels) + valueType, err := asPrometheusValueType(am.Type) + if err != nil { + return nil, err + } + return prometheus.MustNewConstMetric(desc, valueType, am.Value, labelValues...), nil +} + var defaultAgentMetricsLabels = []string{agentmetrics.LabelUsername, agentmetrics.LabelWorkspaceName, agentmetrics.LabelAgentName, agentmetrics.LabelTemplateName} // AgentMetricLabels are the labels used to decorate an agent's metrics. @@ -453,6 +506,16 @@ func (ma *MetricsAggregator) Update(ctx context.Context, labels AgentMetricLabel } } +// Move to a function for testability +func (ma *MetricsAggregator) cleanupDescCache() { + now := time.Now() + for key, entry := range ma.descCache { + if now.Sub(entry.lastUsed) > ma.metricsCleanupInterval { + delete(ma.descCache, key) + } + } +} + func asPrometheusValueType(metricType agentproto.Stats_Metric_Type) (prometheus.ValueType, error) { switch metricType { case agentproto.Stats_Metric_GAUGE: diff --git a/coderd/prometheusmetrics/aggregator_internal_test.go b/coderd/prometheusmetrics/aggregator_internal_test.go new file mode 100644 index 0000000000000..cd5f4432dc8b1 --- /dev/null +++ b/coderd/prometheusmetrics/aggregator_internal_test.go @@ -0,0 +1,89 @@ +package prometheusmetrics + +import ( + "testing" + "time" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + + "cdr.dev/slog/sloggers/slogtest" + agentproto "github.com/coder/coder/v2/agent/proto" + "github.com/coder/coder/v2/coderd/agentmetrics" + "github.com/coder/coder/v2/testutil" +) + +func TestDescCache_DescExpire(t *testing.T) { + const ( + testWorkspaceName = "yogi-workspace" + testUsername = "yogi-bear" + testAgentName = "main-agent" + testTemplateName = "main-template" + ) + + testLabels := AgentMetricLabels{ + Username: testUsername, + WorkspaceName: testWorkspaceName, + AgentName: testAgentName, + TemplateName: testTemplateName, + } + + t.Parallel() + + // given + registry := prometheus.NewRegistry() + ma, err := NewMetricsAggregator(slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}), registry, time.Millisecond, agentmetrics.LabelAll) + require.NoError(t, err) + + given := []*agentproto.Stats_Metric{ + {Name: "a_counter_one", Type: agentproto.Stats_Metric_COUNTER, Value: 1}, + } + + _, err = ma.asPrometheus(&annotatedMetric{ + given[0], + testLabels.Username, + testLabels.WorkspaceName, + testLabels.AgentName, + testLabels.TemplateName, + // the rest doesn't matter for this test + time.Now(), + []string{}, + }) + require.NoError(t, err) + + require.Eventually(t, func() bool { + ma.cleanupDescCache() + return len(ma.descCache) == 0 + }, testutil.WaitShort, testutil.IntervalFast) +} + +// TestDescCacheTimestampUpdate ensures that the timestamp update in getOrCreateDesc +// updates the map entry because d is a copy, not a pointer. +func TestDescCacheTimestampUpdate(t *testing.T) { + t.Parallel() + + registry := prometheus.NewRegistry() + ma, err := NewMetricsAggregator(slogtest.Make(t, nil), registry, time.Hour, nil) + require.NoError(t, err) + + baseLabelNames := []string{"label1", "label2"} + extraLabels := []*agentproto.Stats_Metric_Label{ + {Name: "extra1", Value: "value1"}, + } + + desc1 := ma.getOrCreateDesc("test_metric", "help text", baseLabelNames, extraLabels) + require.NotNil(t, desc1) + + key := cacheKeyForDesc("test_metric", baseLabelNames, extraLabels) + initialEntry := ma.descCache[key] + initialTime := initialEntry.lastUsed + + desc2 := ma.getOrCreateDesc("test_metric", "help text", baseLabelNames, extraLabels) + require.NotNil(t, desc2) + + updatedEntry := ma.descCache[key] + updatedTime := updatedEntry.lastUsed + + require.NotEqual(t, initialTime, updatedTime, + "Timestamp was NOT updated in map when accessing a metric description that should be cached") +} diff --git a/coderd/prometheusmetrics/prometheusmetrics_internal_test.go b/coderd/prometheusmetrics/prometheusmetrics_internal_test.go index 3a6ecec5c12ec..97eea554fff4a 100644 --- a/coderd/prometheusmetrics/prometheusmetrics_internal_test.go +++ b/coderd/prometheusmetrics/prometheusmetrics_internal_test.go @@ -1,10 +1,12 @@ package prometheusmetrics import ( + "fmt" "testing" "github.com/stretchr/testify/require" + agentproto "github.com/coder/coder/v2/agent/proto" "github.com/coder/coder/v2/coderd/agentmetrics" ) @@ -36,3 +38,52 @@ func TestFilterAcceptableAgentLabels(t *testing.T) { }) } } + +func benchAsPrometheus(b *testing.B, base []string, extraN int) { + am := annotatedMetric{ + Stats_Metric: &agentproto.Stats_Metric{ + Name: "blink_test_metric", + Type: agentproto.Stats_Metric_GAUGE, + Value: 1, + Labels: make([]*agentproto.Stats_Metric_Label, extraN), + }, + username: "user", + workspaceName: "ws", + agentName: "agent", + templateName: "tmpl", + aggregateByLabels: base, + } + for i := 0; i < extraN; i++ { + am.Labels[i] = &agentproto.Stats_Metric_Label{Name: fmt.Sprintf("l%d", i), Value: "v"} + } + + ma := &MetricsAggregator{} + + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := ma.asPrometheus(&am) + if err != nil { + b.Fatal(err) + } + } +} + +func Benchmark_asPrometheus(b *testing.B) { + cases := []struct { + name string + base []string + extraN int + }{ + {"base4_extra0", defaultAgentMetricsLabels, 0}, + {"base4_extra2", defaultAgentMetricsLabels, 2}, + {"base4_extra5", defaultAgentMetricsLabels, 5}, + {"base4_extra10", defaultAgentMetricsLabels, 10}, + {"base2_extra5", []string{agentmetrics.LabelUsername, agentmetrics.LabelWorkspaceName}, 5}, + } + for _, tc := range cases { + b.Run(tc.name, func(b *testing.B) { + benchAsPrometheus(b, tc.base, tc.extraN) + }) + } +} diff --git a/coderd/provisionerdserver/metrics.go b/coderd/provisionerdserver/metrics.go index b1afc10670f22..204bc2e717402 100644 --- a/coderd/provisionerdserver/metrics.go +++ b/coderd/provisionerdserver/metrics.go @@ -100,6 +100,12 @@ func (m *Metrics) Register(reg prometheus.Registerer) error { return reg.Register(m.workspaceClaimTimings) } +// IsTrackable returns true if the workspace build should be tracked in metrics. +// This includes workspace creation, prebuild creation, and prebuild claims. +func (f WorkspaceTimingFlags) IsTrackable() bool { + return f.IsPrebuild || f.IsClaim || f.IsFirstBuild +} + // getWorkspaceTimingType classifies a workspace build: // - PrebuildCreation: creation of a prebuilt workspace // - PrebuildClaim: claim of an existing prebuilt workspace @@ -153,6 +159,6 @@ func (m *Metrics) UpdateWorkspaceTimingsMetrics( m.workspaceClaimTimings. WithLabelValues(organizationName, templateName, presetName).Observe(buildTime) default: - m.logger.Warn(ctx, "unsupported workspace timing flags") + // Not a trackable build type (e.g. restart, stop, subsequent builds) } } diff --git a/coderd/provisionerdserver/provisionerdserver.go b/coderd/provisionerdserver/provisionerdserver.go index bf7741bdc260f..c4598beaf8399 100644 --- a/coderd/provisionerdserver/provisionerdserver.go +++ b/coderd/provisionerdserver/provisionerdserver.go @@ -43,6 +43,7 @@ import ( "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/coderd/usage" "github.com/coder/coder/v2/coderd/usage/usagetypes" + "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/coderd/wspubsub" "github.com/coder/coder/v2/codersdk" @@ -120,6 +121,7 @@ type server struct { NotificationsEnqueuer notifications.Enqueuer PrebuildsOrchestrator *atomic.Pointer[prebuilds.ReconciliationOrchestrator] UsageInserter *atomic.Pointer[usage.Inserter] + Experiments codersdk.Experiments OIDCConfig promoauth.OAuth2Config @@ -181,6 +183,7 @@ func NewServer( enqueuer notifications.Enqueuer, prebuildsOrchestrator *atomic.Pointer[prebuilds.ReconciliationOrchestrator], metrics *Metrics, + experiments codersdk.Experiments, ) (proto.DRPCProvisionerDaemonServer, error) { // Fail-fast if pointers are nil if lifecycleCtx == nil { @@ -252,6 +255,7 @@ func NewServer( PrebuildsOrchestrator: prebuildsOrchestrator, UsageInserter: usageInserter, metrics: metrics, + Experiments: experiments, } if s.heartbeatFn == nil { @@ -695,6 +699,7 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo } } + activeVersion := template.ActiveVersionID == templateVersion.ID protoJob.Type = &proto.AcquiredJob_WorkspaceBuild_{ WorkspaceBuild: &proto.AcquiredJob_WorkspaceBuild{ WorkspaceBuildId: workspaceBuild.ID.String(), @@ -704,6 +709,12 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo PreviousParameterValues: convertRichParameterValues(lastWorkspaceBuildParameters), VariableValues: asVariableValues(templateVariables), ExternalAuthProviders: externalAuthProviders, + // If active and experiment is enabled, allow workspace reuse existing TF + // workspaces (directories) for a faster startup. + ExpReuseTerraformWorkspace: ptr.Ref(s.Experiments.Enabled(codersdk.ExperimentTerraformWorkspace) && // Experiment required + template.UseTerraformWorkspaceCache && // Template setting + activeVersion, // Only for active versions + ), Metadata: &sdkproto.Metadata{ CoderUrl: s.AccessURL.String(), WorkspaceTransition: transition, @@ -717,6 +728,7 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo WorkspaceOwnerId: owner.ID.String(), TemplateId: template.ID.String(), TemplateName: template.Name, + TemplateVersionId: templateVersion.ID.String(), TemplateVersion: templateVersion.Name, WorkspaceOwnerSessionToken: sessionToken, WorkspaceOwnerSshPublicKey: ownerSSHPublicKey, @@ -773,6 +785,11 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo return nil, failJob(err.Error()) } + templateID := "" + if input.TemplateID.Valid { + templateID = input.TemplateID.UUID.String() + } + protoJob.Type = &proto.AcquiredJob_TemplateImport_{ TemplateImport: &proto.AcquiredJob_TemplateImport{ UserVariableValues: convertVariableValues(userVariableValues), @@ -781,6 +798,8 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo // There is no owner for a template import, but we can assume // the "Everyone" group as a placeholder. WorkspaceOwnerGroups: []string{database.EveryoneGroup}, + TemplateId: templateID, + TemplateVersionId: input.TemplateVersionID.String(), }, }, } @@ -2006,10 +2025,12 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } } - var taskAppID uuid.NullUUID - var taskAgentID uuid.NullUUID - var hasAITask bool - var warnUnknownTaskAppID bool + var ( + hasAITask bool + unknownAppID string + taskAppID uuid.NullUUID + taskAgentID uuid.NullUUID + ) if tasks := jobType.WorkspaceBuild.GetAiTasks(); len(tasks) > 0 { hasAITask = true task := tasks[0] @@ -2026,59 +2047,29 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } if !slices.Contains(appIDs, appID) { - warnUnknownTaskAppID = true - } - - id, err := uuid.Parse(appID) - if err != nil { - return xerrors.Errorf("parse app id: %w", err) - } - - taskAppID = uuid.NullUUID{UUID: id, Valid: true} - - agentID, ok := agentIDByAppID[appID] - taskAgentID = uuid.NullUUID{UUID: agentID, Valid: ok} - } - - // This is a hacky workaround for the issue with tasks 'disappearing' on stop: - // reuse has_ai_task and sidebar_app_id from the previous build. - // This workaround should be removed as soon as possible. - if workspaceBuild.Transition == database.WorkspaceTransitionStop && workspaceBuild.BuildNumber > 1 { - if prevBuild, err := s.Database.GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx, database.GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams{ - WorkspaceID: workspaceBuild.WorkspaceID, - BuildNumber: workspaceBuild.BuildNumber - 1, - }); err == nil { - hasAITask = prevBuild.HasAITask.Bool - taskAppID = prevBuild.AITaskSidebarAppID - warnUnknownTaskAppID = false - s.Logger.Debug(ctx, "task workaround: reused has_ai_task and app_id from previous build to keep track of task", - slog.F("job_id", job.ID.String()), - slog.F("build_number", prevBuild.BuildNumber), - slog.F("workspace_id", workspace.ID), - slog.F("workspace_build_id", workspaceBuild.ID), - slog.F("transition", string(workspaceBuild.Transition)), - slog.F("sidebar_app_id", taskAppID.UUID), - slog.F("has_ai_task", hasAITask), - ) + unknownAppID = appID + hasAITask = false } else { - s.Logger.Error(ctx, "task workaround: tracking via has_ai_task and app_id from previous build failed", - slog.Error(err), - slog.F("job_id", job.ID.String()), - slog.F("workspace_id", workspace.ID), - slog.F("workspace_build_id", workspaceBuild.ID), - slog.F("transition", string(workspaceBuild.Transition)), - ) + // Only parse for valid app and agent to avoid fk violation. + id, err := uuid.Parse(appID) + if err != nil { + return xerrors.Errorf("parse app id: %w", err) + } + taskAppID = uuid.NullUUID{UUID: id, Valid: true} + + agentID, ok := agentIDByAppID[appID] + taskAgentID = uuid.NullUUID{UUID: agentID, Valid: ok} } } - if warnUnknownTaskAppID { + if unknownAppID != "" && workspaceBuild.Transition == database.WorkspaceTransitionStart { // Ref: https://github.com/coder/coder/issues/18776 // This can happen for a number of reasons: // 1. Misconfigured template // 2. Count=0 on the agent due to stop transition, meaning the associated coder_app was not inserted. // Failing the build at this point is not ideal, so log a warning instead. s.Logger.Warn(ctx, "unknown ai_task_app_id", - slog.F("ai_task_app_id", taskAppID.UUID.String()), + slog.F("ai_task_app_id", unknownAppID), slog.F("job_id", job.ID.String()), slog.F("workspace_id", workspace.ID), slog.F("workspace_build_id", workspaceBuild.ID), @@ -2105,9 +2096,6 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro slog.F("transition", string(workspaceBuild.Transition)), ) } - // Important: reset hasAITask and sidebarAppID so that we don't run into a fk constraint violation. - hasAITask = false - taskAppID = uuid.NullUUID{} } if hasAITask && workspaceBuild.Transition == database.WorkspaceTransitionStart { @@ -2124,14 +2112,6 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } } - hasExternalAgent := false - for _, resource := range jobType.WorkspaceBuild.Resources { - if resource.Type == "coder_external_agent" { - hasExternalAgent = true - break - } - } - if task, err := db.GetTaskByWorkspaceID(ctx, workspace.ID); err == nil { // Irrespective of whether the agent or sidebar app is present, // perform the upsert to ensure a link between the task and @@ -2153,8 +2133,9 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro return xerrors.Errorf("get task by workspace id: %w", err) } - // Regardless of whether there is an AI task or not, update the field to indicate one way or the other since it - // always defaults to nil. ONLY if has_ai_task=true MUST ai_task_sidebar_app_id be set. + _, hasExternalAgent := slice.Find(jobType.WorkspaceBuild.Resources, func(resource *sdkproto.Resource) bool { + return resource.Type == "coder_external_agent" + }) if err := db.UpdateWorkspaceBuildFlagsByID(ctx, database.UpdateWorkspaceBuildFlagsByIDParams{ ID: workspaceBuild.ID, HasAITask: sql.NullBool{ @@ -2165,8 +2146,7 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro Bool: hasExternalAgent, Valid: true, }, - SidebarAppID: taskAppID, - UpdatedAt: now, + UpdatedAt: now, }); err != nil { return xerrors.Errorf("update workspace build ai tasks and external agent flag: %w", err) } @@ -2195,6 +2175,12 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro continue } + // Scan does not guarantee validity + if !stg.Valid() { + s.Logger.Warn(ctx, "invalid stage, will fail insert based one enum", slog.F("value", t.Stage)) + continue + } + params.Stage = append(params.Stage, stg) params.Source = append(params.Source, t.Source) params.Resource = append(params.Resource, t.Resource) @@ -2204,8 +2190,11 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } _, err = db.InsertProvisionerJobTimings(ctx, params) if err != nil { - // Log error but don't fail the whole transaction for non-critical data + // A database error here will "fail" this transaction. Making this error fatal. + // If this error is seen, add checks above to validate the insert parameters. In + // production, timings should not be a fatal error. s.Logger.Warn(ctx, "failed to update provisioner job timings", slog.F("job_id", jobID), slog.Error(err)) + return xerrors.Errorf("update provisioner job timings: %w", err) } // On start, we want to ensure that workspace agents timeout statuses @@ -2278,6 +2267,14 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro if err != nil { return xerrors.Errorf("update workspace deleted: %w", err) } + if workspace.TaskID.Valid { + if _, err := db.DeleteTask(ctx, database.DeleteTaskParams{ + ID: workspace.TaskID.UUID, + DeletedAt: dbtime.Now(), + }); err != nil && !errors.Is(err, sql.ErrNoRows) { + return xerrors.Errorf("delete task related to workspace: %w", err) + } + } return nil }, nil) @@ -2348,40 +2345,42 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } // Update workspace (regular and prebuild) timing metrics - if s.metrics != nil { - // Only consider 'start' workspace builds - if workspaceBuild.Transition == database.WorkspaceTransitionStart { - // Get the updated job to report the metrics with correct data - updatedJob, err := s.Database.GetProvisionerJobByID(ctx, jobID) - if err != nil { - s.Logger.Error(ctx, "get updated job from database", slog.Error(err)) - } else - // Only consider 'succeeded' provisioner jobs - if updatedJob.JobStatus == database.ProvisionerJobStatusSucceeded { - presetName := "" - if workspaceBuild.TemplateVersionPresetID.Valid { - preset, err := s.Database.GetPresetByID(ctx, workspaceBuild.TemplateVersionPresetID.UUID) - if err != nil { - if !errors.Is(err, sql.ErrNoRows) { - s.Logger.Error(ctx, "get preset by ID for workspace timing metrics", slog.Error(err)) - } - } else { - presetName = preset.Name + // Only consider 'start' workspace builds + if s.metrics != nil && workspaceBuild.Transition == database.WorkspaceTransitionStart { + // Get the updated job to report the metrics with correct data + updatedJob, err := s.Database.GetProvisionerJobByID(ctx, jobID) + if err != nil { + s.Logger.Error(ctx, "get updated job from database", slog.Error(err)) + } else + // Only consider 'succeeded' provisioner jobs + if updatedJob.JobStatus == database.ProvisionerJobStatusSucceeded { + presetName := "" + if workspaceBuild.TemplateVersionPresetID.Valid { + preset, err := s.Database.GetPresetByID(ctx, workspaceBuild.TemplateVersionPresetID.UUID) + if err != nil { + if !errors.Is(err, sql.ErrNoRows) { + s.Logger.Error(ctx, "get preset by ID for workspace timing metrics", slog.Error(err)) } + } else { + presetName = preset.Name } + } - buildTime := updatedJob.CompletedAt.Time.Sub(updatedJob.StartedAt.Time).Seconds() + buildTime := updatedJob.CompletedAt.Time.Sub(updatedJob.StartedAt.Time).Seconds() + flags := WorkspaceTimingFlags{ + // Is a prebuilt workspace creation build + IsPrebuild: input.PrebuiltWorkspaceBuildStage.IsPrebuild(), + // Is a prebuilt workspace claim build + IsClaim: input.PrebuiltWorkspaceBuildStage.IsPrebuiltWorkspaceClaim(), + // Is a regular workspace creation build + // Only consider the first build number for regular workspaces + IsFirstBuild: workspaceBuild.BuildNumber == 1, + } + // Only track metrics for prebuild creation, prebuild claims and workspace creation + if flags.IsTrackable() { s.metrics.UpdateWorkspaceTimingsMetrics( ctx, - WorkspaceTimingFlags{ - // Is a prebuilt workspace creation build - IsPrebuild: input.PrebuiltWorkspaceBuildStage.IsPrebuild(), - // Is a prebuilt workspace claim build - IsClaim: input.PrebuiltWorkspaceBuildStage.IsPrebuiltWorkspaceClaim(), - // Is a regular workspace creation build - // Only consider the first build number for regular workspaces - IsFirstBuild: workspaceBuild.BuildNumber == 1, - }, + flags, workspace.OrganizationName, workspace.TemplateName, presetName, @@ -2582,6 +2581,7 @@ func InsertWorkspacePresetAndParameters(ctx context.Context, db database.Store, IsDefault: protoPreset.GetDefault(), Description: protoPreset.Description, Icon: protoPreset.Icon, + LastInvalidatedAt: sql.NullTime{}, }) if err != nil { return xerrors.Errorf("insert preset: %w", err) @@ -3241,6 +3241,10 @@ func auditActionFromTransition(transition database.WorkspaceTransition) database } type TemplateVersionImportJob struct { + // TemplateID is not guaranteed to be set. Template versions can be created + // without being associated with a template. Resulting in a template id of + // `uuid.Nil` + TemplateID uuid.NullUUID `json:"template_id"` TemplateVersionID uuid.UUID `json:"template_version_id"` UserVariableValues []codersdk.VariableValue `json:"user_variable_values"` } diff --git a/coderd/provisionerdserver/provisionerdserver_test.go b/coderd/provisionerdserver/provisionerdserver_test.go index 8d55e1529289f..4dc8621736b5c 100644 --- a/coderd/provisionerdserver/provisionerdserver_test.go +++ b/coderd/provisionerdserver/provisionerdserver_test.go @@ -26,6 +26,8 @@ import ( "storj.io/drpc" "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd" + "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/quartz" "github.com/coder/serpent" @@ -59,24 +61,24 @@ import ( ) func testTemplateScheduleStore() *atomic.Pointer[schedule.TemplateScheduleStore] { - ptr := &atomic.Pointer[schedule.TemplateScheduleStore]{} + poitr := &atomic.Pointer[schedule.TemplateScheduleStore]{} store := schedule.NewAGPLTemplateScheduleStore() - ptr.Store(&store) - return ptr + poitr.Store(&store) + return poitr } func testUserQuietHoursScheduleStore() *atomic.Pointer[schedule.UserQuietHoursScheduleStore] { - ptr := &atomic.Pointer[schedule.UserQuietHoursScheduleStore]{} + poitr := &atomic.Pointer[schedule.UserQuietHoursScheduleStore]{} store := schedule.NewAGPLUserQuietHoursScheduleStore() - ptr.Store(&store) - return ptr + poitr.Store(&store) + return poitr } func testUsageInserter() *atomic.Pointer[usage.Inserter] { - ptr := &atomic.Pointer[usage.Inserter]{} + poitr := &atomic.Pointer[usage.Inserter]{} inserter := usage.NewAGPLInserter() - ptr.Store(&inserter) - return ptr + poitr.Store(&inserter) + return poitr } func TestAcquireJob_LongPoll(t *testing.T) { @@ -450,6 +452,7 @@ func TestAcquireJob(t *testing.T) { TemplateId: template.ID.String(), TemplateName: template.Name, TemplateVersion: version.Name, + TemplateVersionId: version.ID.String(), WorkspaceOwnerSessionToken: sessionToken, WorkspaceOwnerSshPublicKey: sshKey.PublicKey, WorkspaceOwnerSshPrivateKey: sshKey.PrivateKey, @@ -474,8 +477,9 @@ func TestAcquireJob(t *testing.T) { }) want, err := json.Marshal(&proto.AcquiredJob_WorkspaceBuild_{ WorkspaceBuild: &proto.AcquiredJob_WorkspaceBuild{ - WorkspaceBuildId: build.ID.String(), - WorkspaceName: workspace.Name, + ExpReuseTerraformWorkspace: ptr.Ref(false), + WorkspaceBuildId: build.ID.String(), + WorkspaceName: workspace.Name, VariableValues: []*sdkproto.VariableValue{ { Name: "first", @@ -629,6 +633,7 @@ func TestAcquireJob(t *testing.T) { Metadata: &sdkproto.Metadata{ CoderUrl: (&url.URL{}).String(), WorkspaceOwnerGroups: []string{database.EveryoneGroup}, + TemplateVersionId: uuid.Nil.String(), }, }, }) @@ -677,6 +682,7 @@ func TestAcquireJob(t *testing.T) { Metadata: &sdkproto.Metadata{ CoderUrl: (&url.URL{}).String(), WorkspaceOwnerGroups: []string{database.EveryoneGroup}, + TemplateVersionId: version.ID.String(), }, }, }) @@ -2864,11 +2870,12 @@ func TestCompleteJob(t *testing.T) { input *proto.CompletedJob_WorkspaceBuild isTask bool expectTaskStatus database.TaskStatus + expectAppID uuid.NullUUID expectHasAiTask bool expectUsageEvent bool } - sidebarAppID := uuid.NewString() + sidebarAppID := uuid.New() for _, tc := range []testcase{ { name: "has_ai_task is false by default", @@ -2883,12 +2890,45 @@ func TestCompleteJob(t *testing.T) { { name: "has_ai_task is set to true", transition: database.WorkspaceTransitionStart, + input: &proto.CompletedJob_WorkspaceBuild{ + AiTasks: []*sdkproto.AITask{ + { + Id: uuid.NewString(), + AppId: sidebarAppID.String(), + }, + }, + Resources: []*sdkproto.Resource{ + { + Agents: []*sdkproto.Agent{ + { + Id: uuid.NewString(), + Name: "a", + Apps: []*sdkproto.App{ + { + Id: sidebarAppID.String(), + Slug: "test-app", + }, + }, + }, + }, + }, + }, + }, + isTask: true, + expectTaskStatus: database.TaskStatusInitializing, + expectAppID: uuid.NullUUID{UUID: sidebarAppID, Valid: true}, + expectHasAiTask: true, + expectUsageEvent: true, + }, + { + name: "has_ai_task is set to true, with sidebar app id", + transition: database.WorkspaceTransitionStart, input: &proto.CompletedJob_WorkspaceBuild{ AiTasks: []*sdkproto.AITask{ { Id: uuid.NewString(), SidebarApp: &sdkproto.AITaskSidebarApp{ - Id: sidebarAppID, + Id: sidebarAppID.String(), }, }, }, @@ -2900,7 +2940,7 @@ func TestCompleteJob(t *testing.T) { Name: "a", Apps: []*sdkproto.App{ { - Id: sidebarAppID, + Id: sidebarAppID.String(), Slug: "test-app", }, }, @@ -2911,6 +2951,7 @@ func TestCompleteJob(t *testing.T) { }, isTask: true, expectTaskStatus: database.TaskStatusInitializing, + expectAppID: uuid.NullUUID{UUID: sidebarAppID, Valid: true}, expectHasAiTask: true, expectUsageEvent: true, }, @@ -2922,10 +2963,9 @@ func TestCompleteJob(t *testing.T) { AiTasks: []*sdkproto.AITask{ { Id: uuid.NewString(), - SidebarApp: &sdkproto.AITaskSidebarApp{ - // Non-existing app ID would previously trigger a FK violation. - Id: uuid.NewString(), - }, + // Non-existing app ID would previously trigger a FK violation. + // Now it should just be ignored. + AppId: sidebarAppID.String(), }, }, }, @@ -2940,10 +2980,8 @@ func TestCompleteJob(t *testing.T) { input: &proto.CompletedJob_WorkspaceBuild{ AiTasks: []*sdkproto.AITask{ { - Id: uuid.NewString(), - SidebarApp: &sdkproto.AITaskSidebarApp{ - Id: sidebarAppID, - }, + Id: uuid.NewString(), + AppId: sidebarAppID.String(), }, }, Resources: []*sdkproto.Resource{ @@ -2954,7 +2992,7 @@ func TestCompleteJob(t *testing.T) { Name: "a", Apps: []*sdkproto.App{ { - Id: sidebarAppID, + Id: sidebarAppID.String(), Slug: "test-app", }, }, @@ -2965,6 +3003,7 @@ func TestCompleteJob(t *testing.T) { }, isTask: true, expectTaskStatus: database.TaskStatusPaused, + expectAppID: uuid.NullUUID{UUID: sidebarAppID, Valid: true}, expectHasAiTask: true, expectUsageEvent: false, }, @@ -2978,7 +3017,7 @@ func TestCompleteJob(t *testing.T) { }, isTask: true, expectTaskStatus: database.TaskStatusPaused, - expectHasAiTask: true, + expectHasAiTask: false, // We no longer inherit this from the previous build. expectUsageEvent: false, }, } { @@ -3092,15 +3131,15 @@ func TestCompleteJob(t *testing.T) { require.True(t, build.HasAITask.Valid) // We ALWAYS expect a value to be set, therefore not nil, i.e. valid = true. require.Equal(t, tc.expectHasAiTask, build.HasAITask.Bool) + task, err := db.GetTaskByID(ctx, genTask.ID) if tc.isTask { - task, err := db.GetTaskByID(ctx, genTask.ID) require.NoError(t, err) require.Equal(t, tc.expectTaskStatus, task.Status) + } else { + require.Error(t, err) } - if tc.expectHasAiTask && build.Transition != database.WorkspaceTransitionStop { - require.Equal(t, sidebarAppID, build.AITaskSidebarAppID.UUID.String()) - } + require.Equal(t, tc.expectAppID, task.WorkspaceAppID) if tc.expectUsageEvent { // Check that a usage event was collected. @@ -4091,6 +4130,7 @@ func TestServer_ExpirePrebuildsSessionToken(t *testing.T) { job, err := fs.waitForJob() require.NoError(t, err) require.NotNil(t, job) + require.NotNil(t, job.Type, "acquired job type was nil?!") workspaceBuildJob := job.Type.(*proto.AcquiredJob_WorkspaceBuild_).WorkspaceBuild require.NotNil(t, workspaceBuildJob.Metadata) @@ -4124,7 +4164,7 @@ func setup(t *testing.T, ignoreLogErrors bool, ov *overrides) (proto.DRPCProvisi defOrg, err := db.GetDefaultOrganization(context.Background()) require.NoError(t, err, "default org not found") - deploymentValues := &codersdk.DeploymentValues{} + deploymentValues := coderdtest.DeploymentValues(t) var externalAuthConfigs []*externalauth.Config tss := testTemplateScheduleStore() uqhss := testUserQuietHoursScheduleStore() @@ -4247,6 +4287,7 @@ func setup(t *testing.T, ignoreLogErrors bool, ov *overrides) (proto.DRPCProvisi notifEnq, &op, provisionerdserver.NewMetrics(logger), + coderd.ReadExperiments(logger, deploymentValues.Experiments), ) require.NoError(t, err) return srv, db, ps, daemon @@ -4358,11 +4399,11 @@ type fakeUsageInserter struct { var _ usage.Inserter = &fakeUsageInserter{} func newFakeUsageInserter() (*fakeUsageInserter, *atomic.Pointer[usage.Inserter]) { - ptr := &atomic.Pointer[usage.Inserter]{} + poitr := &atomic.Pointer[usage.Inserter]{} fake := &fakeUsageInserter{} var inserter usage.Inserter = fake - ptr.Store(&inserter) - return fake, ptr + poitr.Store(&inserter) + return fake, poitr } func (f *fakeUsageInserter) InsertDiscreteUsageEvent(_ context.Context, _ database.Store, event usagetypes.DiscreteEvent) error { @@ -4413,19 +4454,18 @@ func seedPreviousWorkspaceStartWithAITask(ctx context.Context, t testing.TB, db agt := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{ ResourceID: res.ID, }) - wa := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{ + _ = dbgen.WorkspaceApp(t, db, database.WorkspaceApp{ AgentID: agt.ID, }) _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - BuildNumber: 1, - HasAITask: sql.NullBool{Valid: true, Bool: true}, - AITaskSidebarAppID: uuid.NullUUID{Valid: true, UUID: wa.ID}, - ID: w.ID, - InitiatorID: w.OwnerID, - JobID: prevJob.ID, - TemplateVersionID: tvs[0].ID, - Transition: database.WorkspaceTransitionStart, - WorkspaceID: w.ID, + BuildNumber: 1, + HasAITask: sql.NullBool{Valid: true, Bool: true}, + ID: w.ID, + InitiatorID: w.OwnerID, + JobID: prevJob.ID, + TemplateVersionID: tvs[0].ID, + Transition: database.WorkspaceTransitionStart, + WorkspaceID: w.ID, }) return nil } diff --git a/coderd/rbac/regosql/configs.go b/coderd/rbac/regosql/configs.go index b06d4d0583014..355a49756d587 100644 --- a/coderd/rbac/regosql/configs.go +++ b/coderd/rbac/regosql/configs.go @@ -81,7 +81,7 @@ func ConnectionLogConverter() *sqltypes.VariableConverter { func AIBridgeInterceptionConverter() *sqltypes.VariableConverter { matcher := sqltypes.NewVariableConverter().RegisterMatcher( resourceIDMatcher(), - // AIBridge interceptions are not tied to any organization. + // AI Bridge interceptions are not tied to any organization. sqltypes.StringVarMatcher("''", []string{"input", "object", "org_owner"}), sqltypes.StringVarMatcher("initiator_id :: text", []string{"input", "object", "owner"}), ) diff --git a/coderd/rbac/scopes.go b/coderd/rbac/scopes.go index 5c8c80305679c..4e5babba29e0f 100644 --- a/coderd/rbac/scopes.go +++ b/coderd/rbac/scopes.go @@ -18,6 +18,7 @@ type WorkspaceAgentScopeParams struct { OwnerID uuid.UUID TemplateID uuid.UUID VersionID uuid.UUID + TaskID uuid.NullUUID BlockUserData bool } @@ -42,6 +43,15 @@ func WorkspaceAgentScope(params WorkspaceAgentScopeParams) Scope { panic("failed to expand scope, this should never happen") } + // Include task in the allow list if the workspace has an associated task. + var extraAllowList []AllowListElement + if params.TaskID.Valid { + extraAllowList = append(extraAllowList, AllowListElement{ + Type: ResourceTask.Type, + ID: params.TaskID.UUID.String(), + }) + } + return Scope{ // TODO: We want to limit the role too to be extra safe. // Even though the allowlist blocks anything else, it is still good @@ -52,12 +62,12 @@ func WorkspaceAgentScope(params WorkspaceAgentScopeParams) Scope { // Limit the agent to only be able to access the singular workspace and // the template/version it was created from. Add additional resources here // as needed, but do not add more workspace or template resource ids. - AllowIDList: []AllowListElement{ + AllowIDList: append([]AllowListElement{ {Type: ResourceWorkspace.Type, ID: params.WorkspaceID.String()}, {Type: ResourceTemplate.Type, ID: params.TemplateID.String()}, {Type: ResourceTemplate.Type, ID: params.VersionID.String()}, {Type: ResourceUser.Type, ID: params.OwnerID.String()}, - }, + }, extraAllowList...), } } diff --git a/coderd/rbac/scopes_catalog.go b/coderd/rbac/scopes_catalog.go index ef4f3186de4fd..7f6b538bd5bfd 100644 --- a/coderd/rbac/scopes_catalog.go +++ b/coderd/rbac/scopes_catalog.go @@ -43,6 +43,7 @@ var externalLowLevel = map[ScopeName]struct{}{ // Users (personal profile only) "user:read_personal": {}, "user:update_personal": {}, + "user.*": {}, // User secrets "user_secret:read": {}, @@ -57,6 +58,12 @@ var externalLowLevel = map[ScopeName]struct{}{ "task:update": {}, "task:delete": {}, "task:*": {}, + + // Organizations + "organization:read": {}, + "organization:update": {}, + "organization:delete": {}, + "organization:*": {}, } // Public composite coder:* scopes exposed to users. diff --git a/coderd/taskname/taskname.go b/coderd/taskname/taskname.go index 734c23eb3dd76..3aabd8bf335ac 100644 --- a/coderd/taskname/taskname.go +++ b/coderd/taskname/taskname.go @@ -2,39 +2,82 @@ package taskname import ( "context" + "encoding/json" "fmt" "io" "math/rand/v2" "os" + "regexp" "strings" + "cdr.dev/slog" + "github.com/anthropics/anthropic-sdk-go" anthropicoption "github.com/anthropics/anthropic-sdk-go/option" "github.com/moby/moby/pkg/namesgenerator" "golang.org/x/xerrors" "github.com/coder/aisdk-go" + strutil "github.com/coder/coder/v2/coderd/util/strings" "github.com/coder/coder/v2/codersdk" ) const ( defaultModel = anthropic.ModelClaude3_5HaikuLatest - systemPrompt = `Generate a short workspace name from this AI task prompt. + systemPrompt = `Generate a short task display name and name from this AI task prompt. +Identify the main task (the core action and subject) and base both names on it. +The task display name and name should be as similar as possible so a human can easily associate them. + +Requirements for task display name (generate this first): +- Human-readable description +- Maximum 64 characters total +- Should concisely describe the main task -Requirements: +Requirements for task name: +- Should be derived from the display name - Only lowercase letters, numbers, and hyphens -- Start with "task-" +- No spaces or underscores - Maximum 27 characters total -- Descriptive of the main task +- Should concisely describe the main task + +Output format (must be valid JSON): +{ + "display_name": "", + "task_name": "" +} Examples: -- "Help me debug a Python script" → "task-python-debug" -- "Create a React dashboard component" → "task-react-dashboard" -- "Analyze sales data from Q3" → "task-analyze-q3-sales" -- "Set up CI/CD pipeline" → "task-setup-cicd" +Prompt: "Help me debug a Python script" → +{ + "display_name": "Debug Python script", + "task_name": "python-debug" +} + +Prompt: "Create a React dashboard component" → +{ + "display_name": "React dashboard component", + "task_name": "react-dashboard" +} + +Prompt: "Analyze sales data from Q3" → +{ + "display_name": "Analyze Q3 sales data", + "task_name": "analyze-q3-sales" +} -If you cannot create a suitable name: -- Respond with "task-unnamed"` +Prompt: "Set up CI/CD pipeline" → +{ + "display_name": "CI/CD pipeline setup", + "task_name": "setup-cicd" +} + +If a suitable name cannot be created, output exactly: +{ + "display_name": "Task Unnamed", + "task_name": "task-unnamed" +} + +Do not include any additional keys, explanations, or text outside the JSON.` ) var ( @@ -42,30 +85,16 @@ var ( ErrNoNameGenerated = xerrors.New("no task name generated") ) -type options struct { - apiKey string - model anthropic.Model -} - -type Option func(o *options) - -func WithAPIKey(apiKey string) Option { - return func(o *options) { - o.apiKey = apiKey - } +type TaskName struct { + Name string `json:"task_name"` + DisplayName string `json:"display_name"` } -func WithModel(model anthropic.Model) Option { - return func(o *options) { - o.model = model - } -} - -func GetAnthropicAPIKeyFromEnv() string { +func getAnthropicAPIKeyFromEnv() string { return os.Getenv("ANTHROPIC_API_KEY") } -func GetAnthropicModelFromEnv() anthropic.Model { +func getAnthropicModelFromEnv() anthropic.Model { return anthropic.Model(os.Getenv("ANTHROPIC_MODEL")) } @@ -79,33 +108,85 @@ func generateSuffix() string { return fmt.Sprintf("%04x", num) } -func GenerateFallback() string { +// generateFallback generates a random task name when other methods fail. +// Uses Docker-style name generation with a collision-resistant suffix. +func generateFallback() TaskName { // We have a 32 character limit for the name. - // We have a 5 character prefix `task-`. // We have a 5 character suffix `-ffff`. - // This leaves us with 22 characters for the middle. + // This leaves us with 27 characters for the name. // - // Unfortunately, `namesgenerator.GetRandomName(0)` will - // generate names that are longer than 22 characters, so - // we just trim these down to length. + // `namesgenerator.GetRandomName(0)` can generate names + // up to 27 characters, but we truncate defensively. name := strings.ReplaceAll(namesgenerator.GetRandomName(0), "_", "-") - name = name[:min(len(name), 22)] + name = name[:min(len(name), 27)] name = strings.TrimSuffix(name, "-") - return fmt.Sprintf("task-%s-%s", name, generateSuffix()) + taskName := fmt.Sprintf("%s-%s", name, generateSuffix()) + displayName := strings.ReplaceAll(name, "-", " ") + if len(displayName) > 0 { + displayName = strings.ToUpper(displayName[:1]) + displayName[1:] + } + + return TaskName{ + Name: taskName, + DisplayName: displayName, + } } -func Generate(ctx context.Context, prompt string, opts ...Option) (string, error) { - o := options{} - for _, opt := range opts { - opt(&o) +// generateFromPrompt creates a task name directly from the prompt by sanitizing it. +// This is used as a fallback when Claude fails to generate a name. +func generateFromPrompt(prompt string) (TaskName, error) { + // Normalize newlines and tabs to spaces + prompt = regexp.MustCompile(`[\n\r\t]+`).ReplaceAllString(prompt, " ") + + // Truncate prompt to 27 chars with full words for task name generation + truncatedForName := prompt + if len(prompt) > 27 { + truncatedForName = strutil.Truncate(prompt, 27, strutil.TruncateWithFullWords) + } + + // Generate task name from truncated prompt + name := strings.ToLower(truncatedForName) + // Replace whitespace (\t \r \n and spaces) sequences with hyphens + name = regexp.MustCompile(`\s+`).ReplaceAllString(name, "-") + // Remove all characters except lowercase letters, numbers, and hyphens + name = regexp.MustCompile(`[^a-z0-9-]+`).ReplaceAllString(name, "") + // Collapse multiple consecutive hyphens into a single hyphen + name = regexp.MustCompile(`-+`).ReplaceAllString(name, "-") + // Remove leading and trailing hyphens + name = strings.Trim(name, "-") + + if len(name) == 0 { + return TaskName{}, ErrNoNameGenerated + } + + taskName := fmt.Sprintf("%s-%s", name, generateSuffix()) + + // Use the initial prompt as display name, truncated to 64 chars with full words + displayName := strutil.Truncate(prompt, 64, strutil.TruncateWithFullWords, strutil.TruncateWithEllipsis) + displayName = strings.TrimSpace(displayName) + if len(displayName) == 0 { + // Ensure display name is never empty + displayName = strings.ReplaceAll(name, "-", " ") } + displayName = strings.ToUpper(displayName[:1]) + displayName[1:] + + return TaskName{ + Name: taskName, + DisplayName: displayName, + }, nil +} - if o.model == "" { - o.model = defaultModel +// generateFromAnthropic uses Claude (Anthropic) to generate semantic task and display names from a user prompt. +// It sends the prompt to Claude with a structured system prompt requesting JSON output containing both names. +// Returns an error if the API call fails, the response is invalid, or Claude returns an "unnamed" placeholder. +func generateFromAnthropic(ctx context.Context, prompt string, apiKey string, model anthropic.Model) (TaskName, error) { + anthropicModel := model + if anthropicModel == "" { + anthropicModel = defaultModel } - if o.apiKey == "" { - return "", ErrNoAPIKey + if apiKey == "" { + return TaskName{}, ErrNoAPIKey } conversation := []aisdk.Message{ @@ -126,42 +207,95 @@ func Generate(ctx context.Context, prompt string, opts ...Option) (string, error } anthropicOptions := anthropic.DefaultClientOptions() - anthropicOptions = append(anthropicOptions, anthropicoption.WithAPIKey(o.apiKey)) + anthropicOptions = append(anthropicOptions, anthropicoption.WithAPIKey(apiKey)) anthropicClient := anthropic.NewClient(anthropicOptions...) - stream, err := anthropicDataStream(ctx, anthropicClient, o.model, conversation) + stream, err := anthropicDataStream(ctx, anthropicClient, anthropicModel, conversation) if err != nil { - return "", xerrors.Errorf("create anthropic data stream: %w", err) + return TaskName{}, xerrors.Errorf("create anthropic data stream: %w", err) } var acc aisdk.DataStreamAccumulator stream = stream.WithAccumulator(&acc) if err := stream.Pipe(io.Discard); err != nil { - return "", xerrors.Errorf("pipe data stream") + return TaskName{}, xerrors.Errorf("pipe data stream") } if len(acc.Messages()) == 0 { - return "", ErrNoNameGenerated + return TaskName{}, ErrNoNameGenerated } - taskName := acc.Messages()[0].Content - if taskName == "task-unnamed" { - return "", ErrNoNameGenerated + // Parse the JSON response + var taskNameResponse TaskName + if err := json.Unmarshal([]byte(acc.Messages()[0].Content), &taskNameResponse); err != nil { + return TaskName{}, xerrors.Errorf("failed to parse anthropic response: %w", err) + } + + taskNameResponse.Name = strings.TrimSpace(taskNameResponse.Name) + taskNameResponse.DisplayName = strings.TrimSpace(taskNameResponse.DisplayName) + + if taskNameResponse.Name == "" || taskNameResponse.Name == "task-unnamed" { + return TaskName{}, xerrors.Errorf("anthropic returned invalid task name: %q", taskNameResponse.Name) + } + + if taskNameResponse.DisplayName == "" || taskNameResponse.DisplayName == "Task Unnamed" { + return TaskName{}, xerrors.Errorf("anthropic returned invalid task display name: %q", taskNameResponse.DisplayName) } // We append a suffix to the end of the task name to reduce // the chance of collisions. We truncate the task name to - // to a maximum of 27 bytes, so that when we append the + // a maximum of 27 bytes, so that when we append the // 5 byte suffix (`-` and 4 byte hex slug), it should // remain within the 32 byte workspace name limit. - taskName = taskName[:min(len(taskName), 27)] - taskName = fmt.Sprintf("%s-%s", taskName, generateSuffix()) - if err := codersdk.NameValid(taskName); err != nil { - return "", xerrors.Errorf("generated name %v not valid: %w", taskName, err) + name := taskNameResponse.Name[:min(len(taskNameResponse.Name), 27)] + name = strings.TrimSuffix(name, "-") + name = fmt.Sprintf("%s-%s", name, generateSuffix()) + if err := codersdk.NameValid(name); err != nil { + return TaskName{}, xerrors.Errorf("generated name %v not valid: %w", name, err) + } + + displayName := taskNameResponse.DisplayName + displayName = strings.TrimSpace(displayName) + if len(displayName) == 0 { + // Ensure display name is never empty + displayName = strings.ReplaceAll(taskNameResponse.Name, "-", " ") + } + displayName = strings.ToUpper(displayName[:1]) + displayName[1:] + + return TaskName{ + Name: name, + DisplayName: displayName, + }, nil +} + +// Generate creates a task name and display name from a user prompt. +// It attempts multiple strategies in order of preference: +// 1. Use Claude (Anthropic) to generate semantic names from the prompt if an API key is available +// 2. Sanitize the prompt directly into a valid task name +// 3. Generate a random name as a final fallback +// +// A suffix is always appended to task names to reduce collision risk. +// This function always succeeds and returns a valid TaskName. +func Generate(ctx context.Context, logger slog.Logger, prompt string) TaskName { + if anthropicAPIKey := getAnthropicAPIKeyFromEnv(); anthropicAPIKey != "" { + taskName, err := generateFromAnthropic(ctx, prompt, anthropicAPIKey, getAnthropicModelFromEnv()) + if err == nil { + return taskName + } + // Anthropic failed, fall through to next fallback + logger.Error(ctx, "unable to generate task name and display name from Anthropic", slog.Error(err)) + } + + // Try generating from prompt + taskName, err := generateFromPrompt(prompt) + if err == nil { + return taskName } + logger.Warn(ctx, "unable to generate task name and display name from prompt", slog.Error(err)) - return taskName, nil + // Final fallback + return generateFallback() } func anthropicDataStream(ctx context.Context, client anthropic.Client, model anthropic.Model, input []aisdk.Message) (aisdk.DataStream, error) { @@ -171,8 +305,15 @@ func anthropicDataStream(ctx context.Context, client anthropic.Client, model ant } return aisdk.AnthropicToDataStream(client.Messages.NewStreaming(ctx, anthropic.MessageNewParams{ - Model: model, - MaxTokens: 24, + Model: model, + // MaxTokens is set to 100 based on the maximum expected output size. + // The worst-case JSON output is 134 characters: + // - Base structure: 43 chars (including formatting) + // - task_name: 27 chars max + // - display_name: 64 chars max + // Using Anthropic's token counting API, this worst-case output tokenizes to 70 tokens. + // We set MaxTokens to 100 to provide a safety buffer. + MaxTokens: 100, System: system, Messages: messages, })), nil diff --git a/coderd/taskname/taskname_internal_test.go b/coderd/taskname/taskname_internal_test.go new file mode 100644 index 0000000000000..46131232505d4 --- /dev/null +++ b/coderd/taskname/taskname_internal_test.go @@ -0,0 +1,164 @@ +package taskname + +import ( + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/testutil" +) + +func TestGenerateFallback(t *testing.T) { + t.Parallel() + + taskName := generateFallback() + err := codersdk.NameValid(taskName.Name) + require.NoErrorf(t, err, "expected fallback to be valid workspace name, instead found %s", taskName.Name) + require.NotEmpty(t, taskName.DisplayName) +} + +func TestGenerateFromPrompt(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + prompt string + expectError bool + expectedName string + expectedDisplayName string + }{ + { + name: "EmptyPrompt", + prompt: "", + expectError: true, + }, + { + name: "OnlySpaces", + prompt: " ", + expectError: true, + }, + { + name: "OnlySpecialCharacters", + prompt: "!@#$%^&*()", + expectError: true, + }, + { + name: "UppercasePrompt", + prompt: "BUILD MY APP", + expectError: false, + expectedName: "build-my-app", + expectedDisplayName: "BUILD MY APP", + }, + { + name: "PromptWithApostrophes", + prompt: "fix user's dashboard", + expectError: false, + expectedName: "fix-users-dashboard", + expectedDisplayName: "Fix user's dashboard", + }, + { + name: "LongPrompt", + prompt: strings.Repeat("a", 100), + expectError: false, + expectedName: strings.Repeat("a", 27), + expectedDisplayName: "A" + strings.Repeat("a", 62) + "…", + }, + { + name: "PromptWithMultipleSpaces", + prompt: "build my app", + expectError: false, + expectedName: "build-my-app", + expectedDisplayName: "Build my app", + }, + { + name: "PromptWithNewlines", + prompt: "build\nmy\napp", + expectError: false, + expectedName: "build-my-app", + expectedDisplayName: "Build my app", + }, + { + name: "TruncatesLongPromptAtWordBoundary", + prompt: "implement real-time notifications dashboard", + expectError: false, + expectedName: "implement-real-time", + expectedDisplayName: "Implement real-time notifications dashboard", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + taskName, err := generateFromPrompt(tc.prompt) + + if tc.expectError { + require.Error(t, err) + return + } + + require.NoError(t, err) + + // Validate task name + require.Contains(t, taskName.Name, fmt.Sprintf("%s-", tc.expectedName)) + require.NoError(t, codersdk.NameValid(taskName.Name)) + + // Validate task display name + require.NotEmpty(t, taskName.DisplayName) + require.Equal(t, tc.expectedDisplayName, taskName.DisplayName) + }) + } +} + +func TestGenerateFromAnthropic(t *testing.T) { + t.Parallel() + + apiKey := getAnthropicAPIKeyFromEnv() + if apiKey == "" { + t.Skip("Skipping test as ANTHROPIC_API_KEY not set") + } + + tests := []struct { + name string + prompt string + }{ + { + name: "SimplePrompt", + prompt: "Create a finance planning app", + }, + { + name: "TechnicalPrompt", + prompt: "Debug authentication middleware for OAuth2", + }, + { + name: "ShortPrompt", + prompt: "Fix bug", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + + taskName, err := generateFromAnthropic(ctx, tc.prompt, apiKey, getAnthropicModelFromEnv()) + require.NoError(t, err) + + t.Log("Task name:", taskName.Name) + t.Log("Task display name:", taskName.DisplayName) + + // Validate task name + require.NotEmpty(t, taskName.DisplayName) + require.NoError(t, codersdk.NameValid(taskName.Name)) + + // Validate display name + require.NotEmpty(t, taskName.DisplayName) + require.NotEqual(t, "task-unnamed", taskName.Name) + require.NotEqual(t, "Task Unnamed", taskName.DisplayName) + }) + } +} diff --git a/coderd/taskname/taskname_test.go b/coderd/taskname/taskname_test.go index 3eb26ef1d4ac7..314333709244a 100644 --- a/coderd/taskname/taskname_test.go +++ b/coderd/taskname/taskname_test.go @@ -15,42 +15,51 @@ const ( anthropicEnvVar = "ANTHROPIC_API_KEY" ) -func TestGenerateFallback(t *testing.T) { - t.Parallel() - - name := taskname.GenerateFallback() - err := codersdk.NameValid(name) - require.NoErrorf(t, err, "expected fallback to be valid workspace name, instead found %s", name) -} - -func TestGenerateTaskName(t *testing.T) { - t.Parallel() - - t.Run("Fallback", func(t *testing.T) { - t.Parallel() +func TestGenerate(t *testing.T) { + t.Run("FromPrompt", func(t *testing.T) { + // Ensure no API key in env for this test + t.Setenv("ANTHROPIC_API_KEY", "") ctx := testutil.Context(t, testutil.WaitShort) - name, err := taskname.Generate(ctx, "Some random prompt") - require.ErrorIs(t, err, taskname.ErrNoAPIKey) - require.Equal(t, "", name) - }) + taskName := taskname.Generate(ctx, testutil.Logger(t), "Create a finance planning app") - t.Run("Anthropic", func(t *testing.T) { - t.Parallel() + // Should succeed via prompt sanitization + require.NoError(t, codersdk.NameValid(taskName.Name)) + require.Contains(t, taskName.Name, "create-a-finance-planning-") + require.NotEmpty(t, taskName.DisplayName) + require.Equal(t, "Create a finance planning app", taskName.DisplayName) + }) + t.Run("FromAnthropic", func(t *testing.T) { apiKey := os.Getenv(anthropicEnvVar) if apiKey == "" { t.Skipf("Skipping test as %s not set", anthropicEnvVar) } + // Set API key for this test + t.Setenv("ANTHROPIC_API_KEY", apiKey) + + ctx := testutil.Context(t, testutil.WaitShort) + + taskName := taskname.Generate(ctx, testutil.Logger(t), "Create a finance planning app") + + // Should succeed with Claude-generated names + require.NoError(t, codersdk.NameValid(taskName.Name)) + require.NotEmpty(t, taskName.DisplayName) + }) + + t.Run("Fallback", func(t *testing.T) { + // Ensure no API key + t.Setenv("ANTHROPIC_API_KEY", "") + ctx := testutil.Context(t, testutil.WaitShort) - name, err := taskname.Generate(ctx, "Create a finance planning app", taskname.WithAPIKey(apiKey)) - require.NoError(t, err) - require.NotEqual(t, "", name) + // Use a prompt that can't be sanitized (only special chars) + taskName := taskname.Generate(ctx, testutil.Logger(t), "!@#$%^&*()") - err = codersdk.NameValid(name) - require.NoError(t, err, "name should be valid") + // Should fall back to random name + require.NoError(t, codersdk.NameValid(taskName.Name)) + require.NotEmpty(t, taskName.DisplayName) }) } diff --git a/coderd/telemetry/telemetry.go b/coderd/telemetry/telemetry.go index 19873f99eeb2f..58822a93d7086 100644 --- a/coderd/telemetry/telemetry.go +++ b/coderd/telemetry/telemetry.go @@ -751,7 +751,7 @@ func (r *remoteReporter) createSnapshot() (*Snapshot, error) { eg.Go(func() error { summaries, err := r.generateAIBridgeInterceptionsSummaries(ctx) if err != nil { - return xerrors.Errorf("generate AIBridge interceptions telemetry summaries: %w", err) + return xerrors.Errorf("generate AI Bridge interceptions telemetry summaries: %w", err) } snapshot.AIBridgeInterceptionsSummaries = summaries return nil @@ -785,7 +785,7 @@ func (r *remoteReporter) generateAIBridgeInterceptionsSummaries(ctx context.Cont return nil, nil } if err != nil { - return nil, xerrors.Errorf("insert AIBridge interceptions telemetry lock (period_ending_at=%q): %w", endedAtBefore, err) + return nil, xerrors.Errorf("insert AI Bridge interceptions telemetry lock (period_ending_at=%q): %w", endedAtBefore, err) } // List the summary categories that need to be calculated. @@ -794,7 +794,7 @@ func (r *remoteReporter) generateAIBridgeInterceptionsSummaries(ctx context.Cont EndedAtBefore: endedAtBefore, // exclusive }) if err != nil { - return nil, xerrors.Errorf("list AIBridge interceptions telemetry summaries (startedAtAfter=%q, endedAtBefore=%q): %w", endedAtAfter, endedAtBefore, err) + return nil, xerrors.Errorf("list AI Bridge interceptions telemetry summaries (startedAtAfter=%q, endedAtBefore=%q): %w", endedAtAfter, endedAtBefore, err) } // Calculate and convert the summaries for all categories. @@ -813,7 +813,7 @@ func (r *remoteReporter) generateAIBridgeInterceptionsSummaries(ctx context.Cont EndedAtBefore: endedAtBefore, }) if err != nil { - return xerrors.Errorf("calculate AIBridge interceptions telemetry summary (provider=%q, model=%q, client=%q, startedAtAfter=%q, endedAtBefore=%q): %w", category.Provider, category.Model, category.Client, endedAtAfter, endedAtBefore, err) + return xerrors.Errorf("calculate AI Bridge interceptions telemetry summary (provider=%q, model=%q, client=%q, startedAtAfter=%q, endedAtBefore=%q): %w", category.Provider, category.Model, category.Client, endedAtAfter, endedAtBefore, err) } // Double check that at least one interception was found in the diff --git a/coderd/telemetry/telemetry_test.go b/coderd/telemetry/telemetry_test.go index dede229acdacf..a818b66db2c41 100644 --- a/coderd/telemetry/telemetry_test.go +++ b/coderd/telemetry/telemetry_test.go @@ -145,13 +145,12 @@ func TestTelemetry(t *testing.T) { AgentID: taskWsAgent.ID, }) taskWB := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - Transition: database.WorkspaceTransitionStart, - Reason: database.BuildReasonAutostart, - WorkspaceID: taskWs.ID, - TemplateVersionID: tv.ID, - JobID: taskJob.ID, - HasAITask: sql.NullBool{Valid: true, Bool: true}, - AITaskSidebarAppID: uuid.NullUUID{Valid: true, UUID: taskWsApp.ID}, + Transition: database.WorkspaceTransitionStart, + Reason: database.BuildReasonAutostart, + WorkspaceID: taskWs.ID, + TemplateVersionID: tv.ID, + JobID: taskJob.ID, + HasAITask: sql.NullBool{Valid: true, Bool: true}, }) task := dbgen.Task(t, db, database.TaskTable{ OwnerID: user.ID, diff --git a/coderd/templates.go b/coderd/templates.go index 9202fc48234a6..39892aa5fef8c 100644 --- a/coderd/templates.go +++ b/coderd/templates.go @@ -773,6 +773,11 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { classicTemplateFlow = *req.UseClassicParameterFlow } + useTerraformWorkspaceCache := template.UseTerraformWorkspaceCache + if req.UseTerraformWorkspaceCache != nil { + useTerraformWorkspaceCache = *req.UseTerraformWorkspaceCache + } + displayName := ptr.NilToDefault(req.DisplayName, template.DisplayName) description := ptr.NilToDefault(req.Description, template.Description) icon := ptr.NilToDefault(req.Icon, template.Icon) @@ -798,7 +803,8 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { (deprecationMessage == template.Deprecated) && (classicTemplateFlow == template.UseClassicParameterFlow) && maxPortShareLevel == template.MaxPortSharingLevel && - corsBehavior == template.CorsBehavior { + corsBehavior == template.CorsBehavior && + useTerraformWorkspaceCache == template.UseTerraformWorkspaceCache { return nil } @@ -841,6 +847,7 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { MaxPortSharingLevel: maxPortShareLevel, UseClassicParameterFlow: classicTemplateFlow, CorsBehavior: corsBehavior, + UseTerraformWorkspaceCache: useTerraformWorkspaceCache, }) if err != nil { return xerrors.Errorf("update template metadata: %w", err) @@ -1119,30 +1126,23 @@ func (api *API) convertTemplate( DaysOfWeek: codersdk.BitmapToWeekdays(template.AutostartAllowedDays()), }, // These values depend on entitlements and come from the templateAccessControl - RequireActiveVersion: templateAccessControl.RequireActiveVersion, - Deprecated: templateAccessControl.IsDeprecated(), - DeprecationMessage: templateAccessControl.Deprecated, - MaxPortShareLevel: maxPortShareLevel, - UseClassicParameterFlow: template.UseClassicParameterFlow, - CORSBehavior: codersdk.CORSBehavior(template.CorsBehavior), + RequireActiveVersion: templateAccessControl.RequireActiveVersion, + Deprecated: templateAccessControl.IsDeprecated(), + DeprecationMessage: templateAccessControl.Deprecated, + MaxPortShareLevel: maxPortShareLevel, + UseClassicParameterFlow: template.UseClassicParameterFlow, + UseTerraformWorkspaceCache: template.UseTerraformWorkspaceCache, + CORSBehavior: codersdk.CORSBehavior(template.CorsBehavior), } } // findTemplateAdmins fetches all users with template admin permission including owners. func findTemplateAdmins(ctx context.Context, store database.Store) ([]database.GetUsersRow, error) { - // Notice: we can't scrape the user information in parallel as pq - // fails with: unexpected describe rows response: 'D' - owners, err := store.GetUsers(ctx, database.GetUsersParams{ - RbacRole: []string{codersdk.RoleOwner}, - }) - if err != nil { - return nil, xerrors.Errorf("get owners: %w", err) - } templateAdmins, err := store.GetUsers(ctx, database.GetUsersParams{ - RbacRole: []string{codersdk.RoleTemplateAdmin}, + RbacRole: []string{codersdk.RoleTemplateAdmin, codersdk.RoleOwner}, }) if err != nil { - return nil, xerrors.Errorf("get template admins: %w", err) + return nil, xerrors.Errorf("get owners: %w", err) } - return append(owners, templateAdmins...), nil + return templateAdmins, nil } diff --git a/coderd/templateversions.go b/coderd/templateversions.go index 2e959702fbde5..13dd93d528793 100644 --- a/coderd/templateversions.go +++ b/coderd/templateversions.go @@ -1609,9 +1609,13 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht var matchedProvisioners codersdk.MatchedProvisioners err = api.Database.InTx(func(tx database.Store) error { jobID := uuid.New() - templateVersionID := uuid.New() + jobInput, err := json.Marshal(provisionerdserver.TemplateVersionImportJob{ + TemplateID: uuid.NullUUID{ + UUID: req.TemplateID, + Valid: req.TemplateID != uuid.Nil, + }, TemplateVersionID: templateVersionID, UserVariableValues: req.UserVariableValues, }) diff --git a/coderd/users.go b/coderd/users.go index 30fa7bf7cabeb..94d4dece246c5 100644 --- a/coderd/users.go +++ b/coderd/users.go @@ -1077,6 +1077,74 @@ func (api *API) putUserAppearanceSettings(rw http.ResponseWriter, r *http.Reques }) } +// @Summary Get user preference settings +// @ID get-user-preference-settings +// @Security CoderSessionToken +// @Produce json +// @Tags Users +// @Param user path string true "User ID, name, or me" +// @Success 200 {object} codersdk.UserPreferenceSettings +// @Router /users/{user}/preferences [get] +func (api *API) userPreferenceSettings(rw http.ResponseWriter, r *http.Request) { + var ( + ctx = r.Context() + user = httpmw.UserParam(r) + ) + + taskAlertDismissed, err := api.Database.GetUserTaskNotificationAlertDismissed(ctx, user.ID) + if err != nil { + if !errors.Is(err, sql.ErrNoRows) { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Error reading user preference settings.", + Detail: err.Error(), + }) + return + } + } + + httpapi.Write(ctx, rw, http.StatusOK, codersdk.UserPreferenceSettings{ + TaskNotificationAlertDismissed: taskAlertDismissed, + }) +} + +// @Summary Update user preference settings +// @ID update-user-preference-settings +// @Security CoderSessionToken +// @Accept json +// @Produce json +// @Tags Users +// @Param user path string true "User ID, name, or me" +// @Param request body codersdk.UpdateUserPreferenceSettingsRequest true "New preference settings" +// @Success 200 {object} codersdk.UserPreferenceSettings +// @Router /users/{user}/preferences [put] +func (api *API) putUserPreferenceSettings(rw http.ResponseWriter, r *http.Request) { + var ( + ctx = r.Context() + user = httpmw.UserParam(r) + ) + + var params codersdk.UpdateUserPreferenceSettingsRequest + if !httpapi.Read(ctx, rw, r, ¶ms) { + return + } + + updatedTaskAlertDismissed, err := api.Database.UpdateUserTaskNotificationAlertDismissed(ctx, database.UpdateUserTaskNotificationAlertDismissedParams{ + UserID: user.ID, + TaskNotificationAlertDismissed: params.TaskNotificationAlertDismissed, + }) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error updating user task notification alert dismissed.", + Detail: err.Error(), + }) + return + } + + httpapi.Write(ctx, rw, http.StatusOK, codersdk.UserPreferenceSettings{ + TaskNotificationAlertDismissed: updatedTaskAlertDismissed, + }) +} + func isValidFontName(font codersdk.TerminalFontName) bool { return slices.Contains(codersdk.TerminalFontNames, font) } @@ -1537,21 +1605,13 @@ func (api *API) CreateUser(ctx context.Context, store database.Store, req Create // findUserAdmins fetches all users with user admin permission including owners. func findUserAdmins(ctx context.Context, store database.Store) ([]database.GetUsersRow, error) { - // Notice: we can't scrape the user information in parallel as pq - // fails with: unexpected describe rows response: 'D' - owners, err := store.GetUsers(ctx, database.GetUsersParams{ - RbacRole: []string{codersdk.RoleOwner}, - }) - if err != nil { - return nil, xerrors.Errorf("get owners: %w", err) - } userAdmins, err := store.GetUsers(ctx, database.GetUsersParams{ - RbacRole: []string{codersdk.RoleUserAdmin}, + RbacRole: []string{codersdk.RoleOwner, codersdk.RoleUserAdmin}, }) if err != nil { - return nil, xerrors.Errorf("get user admins: %w", err) + return nil, xerrors.Errorf("get owners: %w", err) } - return append(owners, userAdmins...), nil + return userAdmins, nil } func convertUsers(users []database.User, organizationIDsByUserID map[uuid.UUID][]uuid.UUID) []codersdk.User { diff --git a/coderd/users_test.go b/coderd/users_test.go index 283b607e89df9..4691165930a22 100644 --- a/coderd/users_test.go +++ b/coderd/users_test.go @@ -599,21 +599,28 @@ func TestNotifyDeletedUser(t *testing.T) { // then sent := notifyEnq.Sent() require.Len(t, sent, 5) - // sent[0]: "User admin" account created, "owner" notified - // sent[1]: "Member" account created, "owner" notified - // sent[2]: "Member" account created, "user admin" notified + // Other notifications: + // "User admin" account created, "owner" notified + // "Member" account created, "owner" notified + // "Member" account created, "user admin" notified // "Member" account deleted, "owner" notified - require.Equal(t, notifications.TemplateUserAccountDeleted, sent[3].TemplateID) - require.Equal(t, firstUser.UserID, sent[3].UserID) - require.Contains(t, sent[3].Targets, member.ID) - require.Equal(t, member.Username, sent[3].Labels["deleted_account_name"]) + ownerNotifications := notifyEnq.Sent(func(n *notificationstest.FakeNotification) bool { + return n.TemplateID == notifications.TemplateUserAccountDeleted && + n.UserID == firstUser.UserID && + slices.Contains(n.Targets, member.ID) && + n.Labels["deleted_account_name"] == member.Username + }) + require.Len(t, ownerNotifications, 1) // "Member" account deleted, "user admin" notified - require.Equal(t, notifications.TemplateUserAccountDeleted, sent[4].TemplateID) - require.Equal(t, userAdmin.ID, sent[4].UserID) - require.Contains(t, sent[4].Targets, member.ID) - require.Equal(t, member.Username, sent[4].Labels["deleted_account_name"]) + adminNotifications := notifyEnq.Sent(func(n *notificationstest.FakeNotification) bool { + return n.TemplateID == notifications.TemplateUserAccountDeleted && + n.UserID == userAdmin.ID && + slices.Contains(n.Targets, member.ID) && + n.Labels["deleted_account_name"] == member.Username + }) + require.Len(t, adminNotifications, 1) }) } @@ -960,22 +967,31 @@ func TestNotifyCreatedUser(t *testing.T) { require.Len(t, sent, 3) // "User admin" account created, "owner" notified - require.Equal(t, notifications.TemplateUserAccountCreated, sent[0].TemplateID) - require.Equal(t, firstUser.UserID, sent[0].UserID) - require.Contains(t, sent[0].Targets, userAdmin.ID) - require.Equal(t, userAdmin.Username, sent[0].Labels["created_account_name"]) + ownerNotifiedAboutUserAdmin := notifyEnq.Sent(func(n *notificationstest.FakeNotification) bool { + return n.TemplateID == notifications.TemplateUserAccountCreated && + n.UserID == firstUser.UserID && + slices.Contains(n.Targets, userAdmin.ID) && + n.Labels["created_account_name"] == userAdmin.Username + }) + require.Len(t, ownerNotifiedAboutUserAdmin, 1) // "Member" account created, "owner" notified - require.Equal(t, notifications.TemplateUserAccountCreated, sent[1].TemplateID) - require.Equal(t, firstUser.UserID, sent[1].UserID) - require.Contains(t, sent[1].Targets, member.ID) - require.Equal(t, member.Username, sent[1].Labels["created_account_name"]) + ownerNotifiedAboutMember := notifyEnq.Sent(func(n *notificationstest.FakeNotification) bool { + return n.TemplateID == notifications.TemplateUserAccountCreated && + n.UserID == firstUser.UserID && + slices.Contains(n.Targets, member.ID) && + n.Labels["created_account_name"] == member.Username + }) + require.Len(t, ownerNotifiedAboutMember, 1) // "Member" account created, "user admin" notified - require.Equal(t, notifications.TemplateUserAccountCreated, sent[1].TemplateID) - require.Equal(t, userAdmin.ID, sent[2].UserID) - require.Contains(t, sent[2].Targets, member.ID) - require.Equal(t, member.Username, sent[2].Labels["created_account_name"]) + userAdminNotifiedAboutMember := notifyEnq.Sent(func(n *notificationstest.FakeNotification) bool { + return n.TemplateID == notifications.TemplateUserAccountCreated && + n.UserID == userAdmin.ID && + slices.Contains(n.Targets, member.ID) && + n.Labels["created_account_name"] == member.Username + }) + require.Len(t, userAdminNotifiedAboutMember, 1) }) } @@ -2176,16 +2192,16 @@ func TestUserTerminalFont(t *testing.T) { firstUser := coderdtest.CreateFirstUser(t, adminClient) client, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) defer cancel() // given - initial, err := client.GetUserAppearanceSettings(ctx, "me") + initial, err := client.GetUserAppearanceSettings(ctx, codersdk.Me) require.NoError(t, err) require.Equal(t, codersdk.TerminalFontName(""), initial.TerminalFont) // when - updated, err := client.UpdateUserAppearanceSettings(ctx, "me", codersdk.UpdateUserAppearanceSettingsRequest{ + updated, err := client.UpdateUserAppearanceSettings(ctx, codersdk.Me, codersdk.UpdateUserAppearanceSettingsRequest{ ThemePreference: "light", TerminalFont: "fira-code", }) @@ -2202,16 +2218,16 @@ func TestUserTerminalFont(t *testing.T) { firstUser := coderdtest.CreateFirstUser(t, adminClient) client, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) defer cancel() // given - initial, err := client.GetUserAppearanceSettings(ctx, "me") + initial, err := client.GetUserAppearanceSettings(ctx, codersdk.Me) require.NoError(t, err) require.Equal(t, codersdk.TerminalFontName(""), initial.TerminalFont) // when - _, err = client.UpdateUserAppearanceSettings(ctx, "me", codersdk.UpdateUserAppearanceSettingsRequest{ + _, err = client.UpdateUserAppearanceSettings(ctx, codersdk.Me, codersdk.UpdateUserAppearanceSettingsRequest{ ThemePreference: "light", TerminalFont: "foobar", }) @@ -2227,16 +2243,16 @@ func TestUserTerminalFont(t *testing.T) { firstUser := coderdtest.CreateFirstUser(t, adminClient) client, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) defer cancel() // given - initial, err := client.GetUserAppearanceSettings(ctx, "me") + initial, err := client.GetUserAppearanceSettings(ctx, codersdk.Me) require.NoError(t, err) require.Equal(t, codersdk.TerminalFontName(""), initial.TerminalFont) // when - _, err = client.UpdateUserAppearanceSettings(ctx, "me", codersdk.UpdateUserAppearanceSettingsRequest{ + _, err = client.UpdateUserAppearanceSettings(ctx, codersdk.Me, codersdk.UpdateUserAppearanceSettingsRequest{ ThemePreference: "light", TerminalFont: "", }) @@ -2246,6 +2262,75 @@ func TestUserTerminalFont(t *testing.T) { }) } +func TestUserTaskNotificationAlertDismissed(t *testing.T) { + t.Parallel() + + t.Run("defaults to false", func(t *testing.T) { + t.Parallel() + + adminClient := coderdtest.New(t, nil) + firstUser := coderdtest.CreateFirstUser(t, adminClient) + client, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + // When: getting user preference settings for a user + settings, err := client.GetUserPreferenceSettings(ctx, codersdk.Me) + require.NoError(t, err) + + // Then: the task notification alert dismissed should default to false + require.False(t, settings.TaskNotificationAlertDismissed) + }) + + t.Run("update to true", func(t *testing.T) { + t.Parallel() + + adminClient := coderdtest.New(t, nil) + firstUser := coderdtest.CreateFirstUser(t, adminClient) + client, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + // When: user dismisses the task notification alert + updated, err := client.UpdateUserPreferenceSettings(ctx, codersdk.Me, codersdk.UpdateUserPreferenceSettingsRequest{ + TaskNotificationAlertDismissed: true, + }) + require.NoError(t, err) + + // Then: the setting is updated to true + require.True(t, updated.TaskNotificationAlertDismissed) + }) + + t.Run("update to false", func(t *testing.T) { + t.Parallel() + + adminClient := coderdtest.New(t, nil) + firstUser := coderdtest.CreateFirstUser(t, adminClient) + client, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + // Given: user has dismissed the task notification alert + _, err := client.UpdateUserPreferenceSettings(ctx, codersdk.Me, codersdk.UpdateUserPreferenceSettingsRequest{ + TaskNotificationAlertDismissed: true, + }) + require.NoError(t, err) + + // When: the task notification alert dismissal is cleared + // (e.g., when user enables a task notification in the UI settings) + updated, err := client.UpdateUserPreferenceSettings(ctx, codersdk.Me, codersdk.UpdateUserPreferenceSettingsRequest{ + TaskNotificationAlertDismissed: false, + }) + require.NoError(t, err) + + // Then: the setting is updated to false + require.False(t, updated.TaskNotificationAlertDismissed) + }) +} + func TestWorkspacesByUser(t *testing.T) { t.Parallel() t.Run("Empty", func(t *testing.T) { diff --git a/coderd/util/strings/strings.go b/coderd/util/strings/strings.go index e21908d488cd8..f320142da55a1 100644 --- a/coderd/util/strings/strings.go +++ b/coderd/util/strings/strings.go @@ -10,6 +10,15 @@ import ( "github.com/microcosm-cc/bluemonday" ) +// EmptyToNil returns a `nil` for an empty string, or a pointer to the string +// otherwise. Useful when needing to treat zero values as nil in APIs. +func EmptyToNil(s string) *string { + if s == "" { + return nil + } + return &s +} + // JoinWithConjunction joins a slice of strings with commas except for the last // two which are joined with "and". func JoinWithConjunction(s []string) string { diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index 23046dab28e15..d3cca07066517 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -388,16 +388,17 @@ func (api *API) patchWorkspaceAgentAppStatus(rw http.ResponseWriter, r *http.Req // Treat the message as untrusted input. cleaned := strutil.UISanitize(req.Message) - // Get the latest statuses for the workspace app to detect no-op updates + // Get the latest status for the workspace app to detect no-op updates // nolint:gocritic // This is a system restricted operation. - latestAppStatus, err := api.Database.GetLatestWorkspaceAppStatusesByAppID(dbauthz.AsSystemRestricted(ctx), app.ID) - if err != nil { + latestAppStatus, err := api.Database.GetLatestWorkspaceAppStatusByAppID(dbauthz.AsSystemRestricted(ctx), app.ID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Failed to get latest workspace app statuses.", + Message: "Failed to get latest workspace app status.", Detail: err.Error(), }) return } + // If no rows found, latestAppStatus will be a zero-value struct (ID == uuid.Nil) // nolint:gocritic // This is a system restricted operation. _, err = api.Database.InsertWorkspaceAppStatus(dbauthz.AsSystemRestricted(ctx), database.InsertWorkspaceAppStatusParams{ @@ -428,7 +429,7 @@ func (api *API) patchWorkspaceAgentAppStatus(rw http.ResponseWriter, r *http.Req }) // Notify on state change to Working/Idle for AI tasks - api.enqueueAITaskStateNotification(ctx, app.ID, latestAppStatus, req.State, workspace) + api.enqueueAITaskStateNotification(ctx, app.ID, latestAppStatus, req.State, workspace, workspaceAgent) httpapi.Write(ctx, rw, http.StatusOK, nil) } @@ -437,13 +438,15 @@ func (api *API) patchWorkspaceAgentAppStatus(rw http.ResponseWriter, r *http.Req // transitions to Working or Idle. // No-op if: // - the workspace agent app isn't configured as an AI task, -// - the new state equals the latest persisted state. +// - the new state equals the latest persisted state, +// - the workspace agent is not ready (still starting up). func (api *API) enqueueAITaskStateNotification( ctx context.Context, appID uuid.UUID, - latestAppStatus []database.WorkspaceAppStatus, + latestAppStatus database.WorkspaceAppStatus, newAppStatus codersdk.WorkspaceAppStatusState, workspace database.Workspace, + agent database.WorkspaceAgent, ) { // Select notification template based on the new state var notificationTemplate uuid.UUID @@ -461,67 +464,69 @@ func (api *API) enqueueAITaskStateNotification( return } - workspaceBuild, err := api.Database.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspace.ID) - if err != nil { - api.Logger.Warn(ctx, "failed to get workspace build", slog.Error(err)) + if !workspace.TaskID.Valid { + // Workspace has no task ID, do nothing. return } - // Confirm Workspace Agent App is an AI Task - if workspaceBuild.HasAITask.Valid && workspaceBuild.HasAITask.Bool && - workspaceBuild.AITaskSidebarAppID.Valid && workspaceBuild.AITaskSidebarAppID.UUID == appID { - // Skip if the latest persisted state equals the new state (no new transition) - if len(latestAppStatus) > 0 && latestAppStatus[0].State == database.WorkspaceAppStatusState(newAppStatus) { - return - } + // Only send notifications when the agent is ready. We want to skip + // any state transitions that occur whilst the workspace is starting + // up as it doesn't make sense to receive them. + if agent.LifecycleState != database.WorkspaceAgentLifecycleStateReady { + api.Logger.Debug(ctx, "skipping AI task notification because agent is not ready", + slog.F("agent_id", agent.ID), + slog.F("lifecycle_state", agent.LifecycleState), + slog.F("new_app_status", newAppStatus), + ) + return + } - // Skip the initial "Working" notification when task first starts. - // This is obvious to the user since they just created the task. - // We still notify on first "Idle" status and all subsequent transitions. - if len(latestAppStatus) == 0 && newAppStatus == codersdk.WorkspaceAppStatusStateWorking { - return - } + task, err := api.Database.GetTaskByID(ctx, workspace.TaskID.UUID) + if err != nil { + api.Logger.Warn(ctx, "failed to get task", slog.Error(err)) + return + } - // Use the task prompt as the "task" label, fallback to workspace name - parameters, err := api.Database.GetWorkspaceBuildParameters(ctx, workspaceBuild.ID) - if err != nil { - api.Logger.Warn(ctx, "failed to get workspace build parameters", slog.Error(err)) - return - } - taskName := workspace.Name - for _, param := range parameters { - if param.Name == codersdk.AITaskPromptParameterName { - taskName = param.Value - } - } + if !task.WorkspaceAppID.Valid || task.WorkspaceAppID.UUID != appID { + // Non-task app, do nothing. + return + } - // As task prompt may be particularly long, truncate it to 160 characters for notifications. - if len(taskName) > 160 { - taskName = strutil.Truncate(taskName, 160, strutil.TruncateWithEllipsis, strutil.TruncateWithFullWords) - } + // Skip if the latest persisted state equals the new state (no new transition) + // Note: uuid.Nil check is valid here. If no previous status exists, + // GetLatestWorkspaceAppStatusByAppID returns sql.ErrNoRows and we get a zero-value struct. + if latestAppStatus.ID != uuid.Nil && latestAppStatus.State == database.WorkspaceAppStatusState(newAppStatus) { + return + } - if _, err := api.NotificationsEnqueuer.EnqueueWithData( - // nolint:gocritic // Need notifier actor to enqueue notifications - dbauthz.AsNotifier(ctx), - workspace.OwnerID, - notificationTemplate, - map[string]string{ - "task": taskName, - "workspace": workspace.Name, - }, - map[string]any{ - // Use a 1-minute bucketed timestamp to bypass per-day dedupe, - // allowing identical content to resend within the same day - // (but not more than once every 10s). - "dedupe_bypass_ts": api.Clock.Now().UTC().Truncate(time.Minute), - }, - "api-workspace-agent-app-status", - // Associate this notification with related entities - workspace.ID, workspace.OwnerID, workspace.OrganizationID, appID, - ); err != nil { - api.Logger.Warn(ctx, "failed to notify of task state", slog.Error(err)) - return - } + // Skip the initial "Working" notification when task first starts. + // This is obvious to the user since they just created the task. + // We still notify on first "Idle" status and all subsequent transitions. + if latestAppStatus.ID == uuid.Nil && newAppStatus == codersdk.WorkspaceAppStatusStateWorking { + return + } + + if _, err := api.NotificationsEnqueuer.EnqueueWithData( + // nolint:gocritic // Need notifier actor to enqueue notifications + dbauthz.AsNotifier(ctx), + workspace.OwnerID, + notificationTemplate, + map[string]string{ + "task": task.Name, + "workspace": workspace.Name, + }, + map[string]any{ + // Use a 1-minute bucketed timestamp to bypass per-day dedupe, + // allowing identical content to resend within the same day + // (but not more than once every 10s). + "dedupe_bypass_ts": api.Clock.Now().UTC().Truncate(time.Minute), + }, + "api-workspace-agent-app-status", + // Associate this notification with related entities + workspace.ID, workspace.OwnerID, workspace.OrganizationID, appID, + ); err != nil { + api.Logger.Warn(ctx, "failed to notify of task state", slog.Error(err)) + return } } diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index e950f970755bb..6c12f91d37388 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -5,12 +5,10 @@ import ( "encoding/json" "fmt" "maps" - "net" "net/http" "os" "path/filepath" - "runtime" - "strconv" + "slices" "strings" "sync" "sync/atomic" @@ -934,17 +932,45 @@ func TestWorkspaceAgentTailnetDirectDisabled(t *testing.T) { require.False(t, p2p) } +type fakeListeningPortsGetter struct { + sync.Mutex + ports []codersdk.WorkspaceAgentListeningPort +} + +func (g *fakeListeningPortsGetter) GetListeningPorts() ([]codersdk.WorkspaceAgentListeningPort, error) { + g.Lock() + defer g.Unlock() + return slices.Clone(g.ports), nil +} + +func (g *fakeListeningPortsGetter) setPorts(ports ...codersdk.WorkspaceAgentListeningPort) { + g.Lock() + defer g.Unlock() + g.ports = slices.Clone(ports) +} + func TestWorkspaceAgentListeningPorts(t *testing.T) { t.Parallel() - setup := func(t *testing.T, apps []*proto.App, dv *codersdk.DeploymentValues) (*codersdk.Client, uint16, uuid.UUID) { + testPort := codersdk.WorkspaceAgentListeningPort{ + Network: "tcp", + ProcessName: "test-app", + Port: 44762, + } + filteredPort := codersdk.WorkspaceAgentListeningPort{ + Network: "tcp", + ProcessName: "postgres", + Port: 5432, + } + + setup := func(t *testing.T, apps []*proto.App, dv *codersdk.DeploymentValues) (*codersdk.Client, uuid.UUID, *fakeListeningPortsGetter) { t.Helper() client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{ DeploymentValues: dv, }) - coderdPort, err := strconv.Atoi(client.URL.Port()) - require.NoError(t, err) + + fLPG := &fakeListeningPortsGetter{} user := coderdtest.CreateFirstUser(t, client) r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ @@ -955,228 +981,73 @@ func TestWorkspaceAgentListeningPorts(t *testing.T) { return agents }).Do() _ = agenttest.New(t, client.URL, r.AgentToken, func(o *agent.Options) { - o.PortCacheDuration = time.Millisecond + o.ListeningPortsGetter = fLPG }) - resources := coderdtest.AwaitWorkspaceAgents(t, client, r.Workspace.ID) + resources := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID).Wait() // #nosec G115 - Safe conversion as TCP port numbers are within uint16 range (0-65535) - return client, uint16(coderdPort), resources[0].Agents[0].ID - } - - willFilterPort := func(port int) bool { - if port < workspacesdk.AgentMinimumListeningPort || port > 65535 { - return true - } - if _, ok := workspacesdk.AgentIgnoredListeningPorts[uint16(port)]; ok { - return true - } - - return false - } - - generateUnfilteredPort := func(t *testing.T) (net.Listener, uint16) { - var ( - l net.Listener - port uint16 - ) - require.Eventually(t, func() bool { - var err error - l, err = net.Listen("tcp", "localhost:0") - if err != nil { - return false - } - tcpAddr, _ := l.Addr().(*net.TCPAddr) - if willFilterPort(tcpAddr.Port) { - _ = l.Close() - return false - } - t.Cleanup(func() { - _ = l.Close() - }) - - // #nosec G115 - Safe conversion as TCP port numbers are within uint16 range (0-65535) - port = uint16(tcpAddr.Port) - return true - }, testutil.WaitShort, testutil.IntervalFast) - - return l, port - } - - generateFilteredPort := func(t *testing.T) (net.Listener, uint16) { - var ( - l net.Listener - port uint16 - ) - require.Eventually(t, func() bool { - for ignoredPort := range workspacesdk.AgentIgnoredListeningPorts { - if ignoredPort < 1024 || ignoredPort == 5432 { - continue - } - - var err error - l, err = net.Listen("tcp", fmt.Sprintf("localhost:%d", ignoredPort)) - if err != nil { - continue - } - t.Cleanup(func() { - _ = l.Close() - }) - - port = ignoredPort - return true - } - - return false - }, testutil.WaitShort, testutil.IntervalFast) - - return l, port + return client, resources[0].Agents[0].ID, fLPG } - t.Run("LinuxAndWindows", func(t *testing.T) { - t.Parallel() - if runtime.GOOS != "linux" && runtime.GOOS != "windows" { - t.Skip("only runs on linux and windows") - return - } - - for _, tc := range []struct { - name string - setDV func(t *testing.T, dv *codersdk.DeploymentValues) - }{ - { - name: "Mainline", - setDV: func(*testing.T, *codersdk.DeploymentValues) {}, - }, - { - name: "BlockDirect", - setDV: func(t *testing.T, dv *codersdk.DeploymentValues) { - err := dv.DERP.Config.BlockDirect.Set("true") - require.NoError(t, err) - require.True(t, dv.DERP.Config.BlockDirect.Value()) - }, - }, - } { - t.Run("OK_"+tc.name, func(t *testing.T) { - t.Parallel() - - dv := coderdtest.DeploymentValues(t) - tc.setDV(t, dv) - client, coderdPort, agentID := setup(t, nil, dv) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() - - // Generate a random unfiltered port. - l, lPort := generateUnfilteredPort(t) - - // List ports and ensure that the port we expect to see is there. - res, err := client.WorkspaceAgentListeningPorts(ctx, agentID) + for _, tc := range []struct { + name string + setDV func(t *testing.T, dv *codersdk.DeploymentValues) + }{ + { + name: "Mainline", + setDV: func(*testing.T, *codersdk.DeploymentValues) {}, + }, + { + name: "BlockDirect", + setDV: func(t *testing.T, dv *codersdk.DeploymentValues) { + err := dv.DERP.Config.BlockDirect.Set("true") require.NoError(t, err) - - expected := map[uint16]bool{ - // expect the listener we made - lPort: false, - // expect the coderdtest server - coderdPort: false, - } - for _, port := range res.Ports { - if port.Network == "tcp" { - if val, ok := expected[port.Port]; ok { - if val { - t.Fatalf("expected to find TCP port %d only once in response", port.Port) - } - } - expected[port.Port] = true - } - } - for port, found := range expected { - if !found { - t.Fatalf("expected to find TCP port %d in response", port) - } - } - - // Close the listener and check that the port is no longer in the response. - require.NoError(t, l.Close()) - t.Log("checking for ports after listener close:") - require.Eventually(t, func() bool { - res, err = client.WorkspaceAgentListeningPorts(ctx, agentID) - if !assert.NoError(t, err) { - return false - } - - for _, port := range res.Ports { - if port.Network == "tcp" && port.Port == lPort { - t.Logf("expected to not find TCP port %d in response", lPort) - return false - } - } - return true - }, testutil.WaitLong, testutil.IntervalMedium) - }) - } - - t.Run("Filter", func(t *testing.T) { + require.True(t, dv.DERP.Config.BlockDirect.Value()) + }, + }, + } { + t.Run("OK_"+tc.name, func(t *testing.T) { t.Parallel() - // Generate an unfiltered port that we will create an app for and - // should not exist in the response. - _, appLPort := generateUnfilteredPort(t) - app := &proto.App{ - Slug: "test-app", - Url: fmt.Sprintf("http://localhost:%d", appLPort), - } - - // Generate a filtered port that should not exist in the response. - _, filteredLPort := generateFilteredPort(t) - - client, coderdPort, agentID := setup(t, []*proto.App{app}, nil) + dv := coderdtest.DeploymentValues(t) + tc.setDV(t, dv) + client, agentID, fLPG := setup(t, nil, dv) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() + fLPG.setPorts(testPort) + + // List ports and ensure that the port we expect to see is there. res, err := client.WorkspaceAgentListeningPorts(ctx, agentID) require.NoError(t, err) + require.Equal(t, []codersdk.WorkspaceAgentListeningPort{testPort}, res.Ports) - sawCoderdPort := false - for _, port := range res.Ports { - if port.Network == "tcp" { - if port.Port == appLPort { - t.Fatalf("expected to not find TCP port (app port) %d in response", appLPort) - } - if port.Port == filteredLPort { - t.Fatalf("expected to not find TCP port (filtered port) %d in response", filteredLPort) - } - if port.Port == coderdPort { - sawCoderdPort = true - } - } - } - if !sawCoderdPort { - t.Fatalf("expected to find TCP port (coderd port) %d in response", coderdPort) - } + // Remove the port and check that the port is no longer in the response. + fLPG.setPorts() + res, err = client.WorkspaceAgentListeningPorts(ctx, agentID) + require.NoError(t, err) + require.Empty(t, res.Ports) }) - }) + } - t.Run("Darwin", func(t *testing.T) { + t.Run("Filter", func(t *testing.T) { t.Parallel() - if runtime.GOOS != "darwin" { - t.Skip("only runs on darwin") - return + + app := &proto.App{ + Slug: testPort.ProcessName, + Url: fmt.Sprintf("http://localhost:%d", testPort.Port), } - client, _, agentID := setup(t, nil, nil) + client, agentID, fLPG := setup(t, []*proto.App{app}, nil) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() - // Create a TCP listener on a random port. - l, err := net.Listen("tcp", "localhost:0") - require.NoError(t, err) - defer l.Close() + fLPG.setPorts(testPort, filteredPort) - // List ports and ensure that the list is empty because we're on darwin. res, err := client.WorkspaceAgentListeningPorts(ctx, agentID) require.NoError(t, err) - require.Len(t, res.Ports, 0) + require.Empty(t, res.Ports) }) } diff --git a/coderd/workspaceagentsrpc.go b/coderd/workspaceagentsrpc.go index 8dacbe9812ca9..50a14768c1b7d 100644 --- a/coderd/workspaceagentsrpc.go +++ b/coderd/workspaceagentsrpc.go @@ -132,7 +132,7 @@ func (api *API) workspaceAgentRPC(rw http.ResponseWriter, r *http.Request) { WorkspaceID: workspace.ID, OrganizationID: workspace.OrganizationID, - Ctx: api.ctx, + AuthenticatedCtx: ctx, Log: logger, Clock: api.Clock, Database: api.Database, @@ -158,7 +158,7 @@ func (api *API) workspaceAgentRPC(rw http.ResponseWriter, r *http.Request) { // Optional: UpdateAgentMetricsFn: api.UpdateAgentMetrics, - }) + }, workspace) streamID := tailnet.StreamID{ Name: fmt.Sprintf("%s-%s-%s", workspace.OwnerUsername, workspace.Name, workspaceAgent.Name), diff --git a/coderd/workspaceapps/apptest/setup.go b/coderd/workspaceapps/apptest/setup.go index 7fef20503bc2b..65eebf8ecada5 100644 --- a/coderd/workspaceapps/apptest/setup.go +++ b/coderd/workspaceapps/apptest/setup.go @@ -195,6 +195,22 @@ func setupProxyTestWithFactory(t *testing.T, factory DeploymentFactory, opts *De if opts.DisableSubdomainApps { opts.AppHost = "" } + if opts.StatsCollectorOptions.ReportInterval == 0 { + // Set to a really high value to avoid triggering flush without manually + // calling the function in test. This can easily happen because the + // default value is 30s and we run tests in parallel. The assertion + // typically happens such that: + // + // [use workspace] -> [fetch previous last used] -> [flush] -> [fetch new last used] + // + // When this edge case is triggered: + // + // [use workspace] -> [report interval flush] -> [fetch previous last used] -> [flush] -> [fetch new last used] + // + // In this case, both the previous and new last used will be the same, + // breaking the test assertion. + opts.StatsCollectorOptions.ReportInterval = 9001 * time.Hour + } deployment := factory(t, opts) diff --git a/coderd/workspaceapps/db.go b/coderd/workspaceapps/db.go index 4c24cc9325e46..4d77dc32b1fc7 100644 --- a/coderd/workspaceapps/db.go +++ b/coderd/workspaceapps/db.go @@ -35,6 +35,7 @@ import ( // by querying the database if the request is missing a valid token. type DBTokenProvider struct { Logger slog.Logger + ctx context.Context // DashboardURL is the main dashboard access URL for error pages. DashboardURL *url.URL @@ -50,7 +51,8 @@ type DBTokenProvider struct { var _ SignedTokenProvider = &DBTokenProvider{} -func NewDBTokenProvider(log slog.Logger, +func NewDBTokenProvider(ctx context.Context, + log slog.Logger, accessURL *url.URL, authz rbac.Authorizer, connectionLogger *atomic.Pointer[connectionlog.ConnectionLogger], @@ -70,6 +72,7 @@ func NewDBTokenProvider(log slog.Logger, return &DBTokenProvider{ Logger: log, + ctx: ctx, DashboardURL: accessURL, Authorizer: authz, ConnectionLogger: connectionLogger, @@ -94,7 +97,7 @@ func (p *DBTokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r * // // permissions. dangerousSystemCtx := dbauthz.AsSystemRestricted(ctx) - aReq, commitAudit := p.connLogInitRequest(ctx, rw, r) + aReq, commitAudit := p.connLogInitRequest(rw, r) defer commitAudit() appReq := issueReq.AppRequest.Normalize() @@ -406,7 +409,7 @@ type connLogRequest struct { // // A session is unique to the agent, app, user and users IP. If any of these // values change, a new session and connect log is created. -func (p *DBTokenProvider) connLogInitRequest(ctx context.Context, w http.ResponseWriter, r *http.Request) (aReq *connLogRequest, commit func()) { +func (p *DBTokenProvider) connLogInitRequest(w http.ResponseWriter, r *http.Request) (aReq *connLogRequest, commit func()) { // Get the status writer from the request context so we can figure // out the HTTP status and autocommit the audit log. sw, ok := w.(*tracing.StatusWriter) @@ -422,6 +425,9 @@ func (p *DBTokenProvider) connLogInitRequest(ctx context.Context, w http.Respons // this ensures that the status and response body are available. var committed bool return aReq, func() { + // We want to log/audit the connection attempt even if the request context has expired. + ctx, cancel := context.WithCancel(p.ctx) + defer cancel() if committed { return } diff --git a/coderd/workspacebuilds.go b/coderd/workspacebuilds.go index 1e3020376041b..0c58b902e2158 100644 --- a/coderd/workspacebuilds.go +++ b/coderd/workspacebuilds.go @@ -335,6 +335,15 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) { return } + // We want to allow a delete build for a deleted workspace, but not a start or stop build. + if workspace.Deleted && createBuild.Transition != codersdk.WorkspaceTransitionDelete { + httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{ + Message: fmt.Sprintf("Cannot %s a deleted workspace!", createBuild.Transition), + Detail: "This workspace has been deleted and cannot be modified.", + }) + return + } + apiBuild, err := api.postWorkspaceBuildsInternal( ctx, apiKey, @@ -1181,11 +1190,6 @@ func (api *API) convertWorkspaceBuild( if build.HasAITask.Valid { hasAITask = &build.HasAITask.Bool } - var taskAppID *uuid.UUID - if build.AITaskSidebarAppID.Valid { - taskAppID = &build.AITaskSidebarAppID.UUID - } - var hasExternalAgent *bool if build.HasExternalAgent.Valid { hasExternalAgent = &build.HasExternalAgent.Bool @@ -1218,8 +1222,6 @@ func (api *API) convertWorkspaceBuild( MatchedProvisioners: &matchedProvisioners, TemplateVersionPresetID: presetID, HasAITask: hasAITask, - AITaskSidebarAppID: taskAppID, - TaskAppID: taskAppID, HasExternalAgent: hasExternalAgent, }, nil } diff --git a/coderd/workspacebuilds_test.go b/coderd/workspacebuilds_test.go index f857296db1a5c..d0ab64b1aeb32 100644 --- a/coderd/workspacebuilds_test.go +++ b/coderd/workspacebuilds_test.go @@ -1840,6 +1840,68 @@ func TestPostWorkspaceBuild(t *testing.T) { require.NoError(t, err) require.Equal(t, codersdk.BuildReasonDashboard, build.Reason) }) + t.Run("DeletedWorkspace", func(t *testing.T) { + t.Parallel() + + // Given: a workspace that has already been deleted + var ( + ctx = testutil.Context(t, testutil.WaitShort) + logger = slogtest.Make(t, &slogtest.Options{}).Leveled(slog.LevelError) + adminClient, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ + Logger: &logger, + }) + admin = coderdtest.CreateFirstUser(t, adminClient) + workspaceOwnerClient, member1 = coderdtest.CreateAnotherUser(t, adminClient, admin.OrganizationID) + otherMemberClient, _ = coderdtest.CreateAnotherUser(t, adminClient, admin.OrganizationID) + ws = dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{OwnerID: member1.ID, OrganizationID: admin.OrganizationID}). + Seed(database.WorkspaceBuild{Transition: database.WorkspaceTransitionDelete}). + Do() + ) + + // This needs to be done separately as provisionerd handles marking the workspace as deleted + // and we're skipping provisionerd here for speed. + require.NoError(t, db.UpdateWorkspaceDeletedByID(dbauthz.AsProvisionerd(ctx), database.UpdateWorkspaceDeletedByIDParams{ + ID: ws.Workspace.ID, + Deleted: true, + })) + + // Assert test invariant: Workspace should be deleted + dbWs, err := db.GetWorkspaceByID(dbauthz.AsProvisionerd(ctx), ws.Workspace.ID) + require.NoError(t, err) + require.True(t, dbWs.Deleted, "workspace should be deleted") + + for _, tc := range []struct { + user *codersdk.Client + tr codersdk.WorkspaceTransition + expectStatus int + }{ + // You should not be allowed to mess with a workspace you don't own, regardless of its deleted state. + {otherMemberClient, codersdk.WorkspaceTransitionStart, http.StatusNotFound}, + {otherMemberClient, codersdk.WorkspaceTransitionStop, http.StatusNotFound}, + {otherMemberClient, codersdk.WorkspaceTransitionDelete, http.StatusNotFound}, + // Starting or stopping a workspace is not allowed when it is deleted. + {workspaceOwnerClient, codersdk.WorkspaceTransitionStart, http.StatusConflict}, + {workspaceOwnerClient, codersdk.WorkspaceTransitionStop, http.StatusConflict}, + // We allow a delete just in case a retry is required. In most cases, this will be a no-op. + // Note: this is the last test case because it will change the state of the workspace. + {workspaceOwnerClient, codersdk.WorkspaceTransitionDelete, http.StatusOK}, + } { + // When: we create a workspace build with the given transition + _, err = tc.user.CreateWorkspaceBuild(ctx, ws.Workspace.ID, codersdk.CreateWorkspaceBuildRequest{ + Transition: tc.tr, + }) + + // Then: we allow ONLY a delete build for a deleted workspace. + if tc.expectStatus < http.StatusBadRequest { + require.NoError(t, err, "creating a %s build for a deleted workspace should not error", tc.tr) + } else { + var apiError *codersdk.Error + require.Error(t, err, "creating a %s build for a deleted workspace should return an error", tc.tr) + require.ErrorAs(t, err, &apiError) + require.Equal(t, tc.expectStatus, apiError.StatusCode()) + } + } + }) } func TestWorkspaceBuildTimings(t *testing.T) { diff --git a/coderd/workspaces.go b/coderd/workspaces.go index e8b7ff51530c3..a02c16ec89576 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -1717,13 +1717,13 @@ func (api *API) postWorkspaceUsage(rw http.ResponseWriter, r *http.Request) { return } - template, err := api.Database.GetTemplateByID(ctx, workspace.TemplateID) - if err != nil { - httpapi.InternalServerError(rw, err) - return - } + // template, err := api.Database.GetTemplateByID(ctx, workspace.TemplateID) + // if err != nil { + // httpapi.InternalServerError(rw, err) + // return + // } - err = api.statsReporter.ReportAgentStats(ctx, dbtime.Now(), workspace, agent, template.Name, stat, true) + err = api.statsReporter.ReportAgentStats(ctx, dbtime.Now(), database.WorkspaceIdentityFromWorkspace(workspace), agent, stat, true) if err != nil { httpapi.InternalServerError(rw, err) return @@ -2654,6 +2654,7 @@ func convertWorkspace( Favorite: requesterFavorite, NextStartAt: nextStartAt, IsPrebuild: workspace.IsPrebuild(), + TaskID: workspace.TaskID, }, nil } diff --git a/coderd/workspaces_test.go b/coderd/workspaces_test.go index 51134dce27951..4ab334222a438 100644 --- a/coderd/workspaces_test.go +++ b/coderd/workspaces_test.go @@ -4700,11 +4700,16 @@ func TestWorkspaceFilterHasAITask(t *testing.T) { ctx := testutil.Context(t, testutil.WaitLong) - // Helper function to create workspace with AI task configuration - createWorkspaceWithAIConfig := func(hasAITask sql.NullBool, jobCompleted bool, aiTaskPrompt *string) database.WorkspaceTable { + // Helper function to create workspace with optional task. + createWorkspace := func(jobCompleted, createTask bool, prompt string) uuid.UUID { + // TODO(mafredri): The bellow comment is based on deprecated logic and + // kept only present to test that the old observable behavior works as + // intended. + // // When a provisioner job uses these tags, no provisioner will match it. - // We do this so jobs will always be stuck in "pending", allowing us to exercise the intermediary state when - // has_ai_task is nil and we compensate by looking at pending provisioning jobs. + // We do this so jobs will always be stuck in "pending", allowing us to + // exercise the intermediary state when has_ai_task is nil and we + // compensate by looking at pending provisioning jobs. // See GetWorkspaces clauses. unpickableTags := database.StringMap{"custom": "true"} @@ -4723,102 +4728,127 @@ func TestWorkspaceFilterHasAITask(t *testing.T) { jobConfig.CompletedAt = sql.NullTime{Time: time.Now(), Valid: true} } job := dbgen.ProvisionerJob(t, db, pubsub, jobConfig) - res := dbgen.WorkspaceResource(t, db, database.WorkspaceResource{JobID: job.ID}) agnt := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{ResourceID: res.ID}) - - var sidebarAppID uuid.UUID - if hasAITask.Bool { - sidebarApp := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{AgentID: agnt.ID}) - sidebarAppID = sidebarApp.ID - } - + taskApp := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{AgentID: agnt.ID}) build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - WorkspaceID: ws.ID, - TemplateVersionID: version.ID, - InitiatorID: user.UserID, - JobID: job.ID, - BuildNumber: 1, - HasAITask: hasAITask, - AITaskSidebarAppID: uuid.NullUUID{UUID: sidebarAppID, Valid: sidebarAppID != uuid.Nil}, - }) - - if aiTaskPrompt != nil { - err := db.InsertWorkspaceBuildParameters(dbauthz.AsSystemRestricted(ctx), database.InsertWorkspaceBuildParametersParams{ - WorkspaceBuildID: build.ID, - Name: []string{provider.TaskPromptParameterName}, - Value: []string{*aiTaskPrompt}, + WorkspaceID: ws.ID, + TemplateVersionID: version.ID, + InitiatorID: user.UserID, + JobID: job.ID, + BuildNumber: 1, + }) + + if createTask { + task := dbgen.Task(t, db, database.TaskTable{ + WorkspaceID: uuid.NullUUID{UUID: ws.ID, Valid: true}, + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + TemplateVersionID: version.ID, + Prompt: prompt, + }) + dbgen.TaskWorkspaceApp(t, db, database.TaskWorkspaceApp{ + TaskID: task.ID, + WorkspaceBuildNumber: build.BuildNumber, + WorkspaceAgentID: uuid.NullUUID{UUID: agnt.ID, Valid: true}, + WorkspaceAppID: uuid.NullUUID{UUID: taskApp.ID, Valid: true}, }) - require.NoError(t, err) } - return ws + return ws.ID } - // Create test workspaces with different AI task configurations - wsWithAITask := createWorkspaceWithAIConfig(sql.NullBool{Bool: true, Valid: true}, true, nil) - wsWithoutAITask := createWorkspaceWithAIConfig(sql.NullBool{Bool: false, Valid: true}, false, nil) + // Create workspaces with tasks. + wsWithTask1 := createWorkspace(true, true, "Build me a web app") + wsWithTask2 := createWorkspace(false, true, "Another task") - aiTaskPrompt := "Build me a web app" - wsWithAITaskParam := createWorkspaceWithAIConfig(sql.NullBool{Valid: false}, false, &aiTaskPrompt) - - anotherTaskPrompt := "Another task" - wsCompletedWithAITaskParam := createWorkspaceWithAIConfig(sql.NullBool{Valid: false}, true, &anotherTaskPrompt) - - emptyPrompt := "" - wsWithEmptyAITaskParam := createWorkspaceWithAIConfig(sql.NullBool{Valid: false}, false, &emptyPrompt) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() - - // Debug: Check all workspaces without filter first - allRes, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{}) - require.NoError(t, err) - t.Logf("Total workspaces created: %d", len(allRes.Workspaces)) - for i, ws := range allRes.Workspaces { - t.Logf("All Workspace %d: ID=%s, Name=%s, Build ID=%s, Job ID=%s", i, ws.ID, ws.Name, ws.LatestBuild.ID, ws.LatestBuild.Job.ID) - } + // Create workspaces without tasks + wsWithoutTask1 := createWorkspace(true, false, "") + wsWithoutTask2 := createWorkspace(false, false, "") // Test filtering for workspaces with AI tasks - // Should include: wsWithAITask (has_ai_task=true) and wsWithAITaskParam (null + incomplete + param) + // Should include: wsWithTask1 and wsWithTask2 res, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ FilterQuery: "has-ai-task:true", }) require.NoError(t, err) - t.Logf("Expected 2 workspaces for has-ai-task:true, got %d", len(res.Workspaces)) - t.Logf("Expected workspaces: %s, %s", wsWithAITask.ID, wsWithAITaskParam.ID) - for i, ws := range res.Workspaces { - t.Logf("AI Task True Workspace %d: ID=%s, Name=%s", i, ws.ID, ws.Name) - } require.Len(t, res.Workspaces, 2) workspaceIDs := []uuid.UUID{res.Workspaces[0].ID, res.Workspaces[1].ID} - require.Contains(t, workspaceIDs, wsWithAITask.ID) - require.Contains(t, workspaceIDs, wsWithAITaskParam.ID) + require.Contains(t, workspaceIDs, wsWithTask1) + require.Contains(t, workspaceIDs, wsWithTask2) // Test filtering for workspaces without AI tasks - // Should include: wsWithoutAITask, wsCompletedWithAITaskParam, wsWithEmptyAITaskParam + // Should include: wsWithoutTask1, wsWithoutTask2, wsWithoutTask3 res, err = client.Workspaces(ctx, codersdk.WorkspaceFilter{ FilterQuery: "has-ai-task:false", }) require.NoError(t, err) - - // Debug: print what we got - t.Logf("Expected 3 workspaces for has-ai-task:false, got %d", len(res.Workspaces)) - for i, ws := range res.Workspaces { - t.Logf("Workspace %d: ID=%s, Name=%s", i, ws.ID, ws.Name) - } - t.Logf("Expected IDs: %s, %s, %s", wsWithoutAITask.ID, wsCompletedWithAITaskParam.ID, wsWithEmptyAITaskParam.ID) - - require.Len(t, res.Workspaces, 3) - workspaceIDs = []uuid.UUID{res.Workspaces[0].ID, res.Workspaces[1].ID, res.Workspaces[2].ID} - require.Contains(t, workspaceIDs, wsWithoutAITask.ID) - require.Contains(t, workspaceIDs, wsCompletedWithAITaskParam.ID) - require.Contains(t, workspaceIDs, wsWithEmptyAITaskParam.ID) + require.Len(t, res.Workspaces, 2) + workspaceIDs = []uuid.UUID{res.Workspaces[0].ID, res.Workspaces[1].ID} + require.Contains(t, workspaceIDs, wsWithoutTask1) + require.Contains(t, workspaceIDs, wsWithoutTask2) // Test no filter returns all res, err = client.Workspaces(ctx, codersdk.WorkspaceFilter{}) require.NoError(t, err) - require.Len(t, res.Workspaces, 5) + require.Len(t, res.Workspaces, 4) +} + +func TestWorkspaceListTasks(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + HasAiTasks: true, + }}}, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + // Given: a regular user workspace + workspaceWithoutTask, err := client.CreateUserWorkspace(ctx, codersdk.Me, codersdk.CreateWorkspaceRequest{ + TemplateID: template.ID, + Name: "user-workspace", + }) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspaceWithoutTask.LatestBuild.ID) + + // Given: a workspace associated with a task + task, err := client.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Input: "Some task prompt", + }) + require.NoError(t, err) + assert.True(t, task.WorkspaceID.Valid) + workspaceWithTask, err := client.Workspace(ctx, task.WorkspaceID.UUID) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspaceWithTask.LatestBuild.ID) + assert.NotEmpty(t, task.Name) + assert.Equal(t, template.ID, task.TemplateID) + + // When: listing the workspaces + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{}) + require.NoError(t, err) + + assert.Equal(t, workspaces.Count, 2) + + // Then: verify TaskID is only set for task workspaces + for _, workspace := range workspaces.Workspaces { + if workspace.ID == workspaceWithoutTask.ID { + assert.False(t, workspace.TaskID.Valid) + } else if workspace.ID == workspaceWithTask.ID { + assert.True(t, workspace.TaskID.Valid) + assert.Equal(t, task.ID, workspace.TaskID.UUID) + } + } } func TestWorkspaceAppUpsertRestart(t *testing.T) { diff --git a/coderd/workspacestats/reporter.go b/coderd/workspacestats/reporter.go index 7a6b1d50034a8..ea81843488e82 100644 --- a/coderd/workspacestats/reporter.go +++ b/coderd/workspacestats/reporter.go @@ -120,7 +120,7 @@ func (r *Reporter) ReportAppStats(ctx context.Context, stats []workspaceapps.Sta } // nolint:revive // usage is a control flag while we have the experiment -func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspace database.Workspace, workspaceAgent database.WorkspaceAgent, templateName string, stats *agentproto.Stats, usage bool) error { +func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspace database.WorkspaceIdentity, workspaceAgent database.WorkspaceAgent, stats *agentproto.Stats, usage bool) error { // update agent stats r.opts.StatsBatcher.Add(now, workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, stats, usage) @@ -130,7 +130,7 @@ func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspac Username: workspace.OwnerUsername, WorkspaceName: workspace.Name, AgentName: workspaceAgent.Name, - TemplateName: templateName, + TemplateName: workspace.TemplateName, }, stats.Metrics) } diff --git a/codersdk/agentsdk/agentsdk.go b/codersdk/agentsdk/agentsdk.go index a6c8e5b1ea620..b668ab4a36569 100644 --- a/codersdk/agentsdk/agentsdk.go +++ b/codersdk/agentsdk/agentsdk.go @@ -391,7 +391,7 @@ func (i *InstanceIdentitySessionTokenProvider) GetSessionToken() string { defer cancel() resp, err := i.TokenExchanger.exchange(ctx) if err != nil { - i.logger.Error(ctx, "failed to exchange session token: %v", err) + i.logger.Error(ctx, "failed to exchange session token", slog.Error(err)) return "" } i.sessionToken = resp.SessionToken diff --git a/codersdk/aibridge.go b/codersdk/aibridge.go index b627f5e9d5ef7..09dab7caf04a9 100644 --- a/codersdk/aibridge.go +++ b/codersdk/aibridge.go @@ -13,6 +13,7 @@ import ( type AIBridgeInterception struct { ID uuid.UUID `json:"id" format:"uuid"` + APIKeyID *string `json:"api_key_id"` Initiator MinimalUser `json:"initiator"` Provider string `json:"provider"` Model string `json:"model"` @@ -64,7 +65,7 @@ type AIBridgeListInterceptionsResponse struct { // @typescript-ignore AIBridgeListInterceptionsFilter type AIBridgeListInterceptionsFilter struct { // Limit defaults to 100, max is 1000. - // Offset based pagination is not supported for AIBridge interceptions. Use + // Offset based pagination is not supported for AI Bridge interceptions. Use // cursor pagination instead with after_id. Pagination Pagination `json:"pagination,omitempty"` @@ -111,10 +112,10 @@ func (f AIBridgeListInterceptionsFilter) asRequestOption() RequestOption { } } -// AIBridgeListInterceptions returns AIBridge interceptions with the given +// AIBridgeListInterceptions returns AI Bridge interceptions with the given // filter. -func (c *ExperimentalClient) AIBridgeListInterceptions(ctx context.Context, filter AIBridgeListInterceptionsFilter) (AIBridgeListInterceptionsResponse, error) { - res, err := c.Request(ctx, http.MethodGet, "/api/experimental/aibridge/interceptions", nil, filter.asRequestOption(), filter.Pagination.asRequestOption(), filter.Pagination.asRequestOption()) +func (c *Client) AIBridgeListInterceptions(ctx context.Context, filter AIBridgeListInterceptionsFilter) (AIBridgeListInterceptionsResponse, error) { + res, err := c.Request(ctx, http.MethodGet, "/api/v2/aibridge/interceptions", nil, filter.asRequestOption(), filter.Pagination.asRequestOption(), filter.Pagination.asRequestOption()) if err != nil { return AIBridgeListInterceptionsResponse{}, err } diff --git a/codersdk/aitasks.go b/codersdk/aitasks.go index 9f390202e4fd2..e2acbfe4897c3 100644 --- a/codersdk/aitasks.go +++ b/codersdk/aitasks.go @@ -17,61 +17,28 @@ import ( // AITaskPromptParameterName is the name of the parameter used to pass prompts // to AI tasks. // -// Experimental: This value is experimental and may change in the future. -const AITaskPromptParameterName = provider.TaskPromptParameterName - -// AITasksPromptsResponse represents the response from the AITaskPrompts method. -// -// Experimental: This method is experimental and may change in the future. -type AITasksPromptsResponse struct { - // Prompts is a map of workspace build IDs to prompts. - Prompts map[string]string `json:"prompts"` -} - -// AITaskPrompts returns prompts for multiple workspace builds by their IDs. +// Deprecated: This constant is deprecated and maintained only for backwards +// compatibility with older templates. Task prompts are now stored directly +// in the tasks.prompt database column. New code should access prompts via +// the Task.InitialPrompt field returned from task endpoints. // -// Experimental: This method is experimental and may change in the future. -func (c *ExperimentalClient) AITaskPrompts(ctx context.Context, buildIDs []uuid.UUID) (AITasksPromptsResponse, error) { - if len(buildIDs) == 0 { - return AITasksPromptsResponse{ - Prompts: make(map[string]string), - }, nil - } - - // Convert UUIDs to strings and join them - buildIDStrings := make([]string, len(buildIDs)) - for i, id := range buildIDs { - buildIDStrings[i] = id.String() - } - buildIDsParam := strings.Join(buildIDStrings, ",") - - res, err := c.Request(ctx, http.MethodGet, "/api/experimental/aitasks/prompts", nil, WithQueryParam("build_ids", buildIDsParam)) - if err != nil { - return AITasksPromptsResponse{}, err - } - defer res.Body.Close() - if res.StatusCode != http.StatusOK { - return AITasksPromptsResponse{}, ReadBodyAsError(res) - } - var prompts AITasksPromptsResponse - return prompts, json.NewDecoder(res.Body).Decode(&prompts) -} +// This constant will be removed in a future major version. Templates should +// not rely on this parameter name, as the backend will continue to create it +// automatically for compatibility but reads from tasks.prompt. +const AITaskPromptParameterName = provider.TaskPromptParameterName // CreateTaskRequest represents the request to create a new task. -// -// Experimental: This type is experimental and may change in the future. type CreateTaskRequest struct { TemplateVersionID uuid.UUID `json:"template_version_id" format:"uuid"` TemplateVersionPresetID uuid.UUID `json:"template_version_preset_id,omitempty" format:"uuid"` Input string `json:"input"` Name string `json:"name,omitempty"` + DisplayName string `json:"display_name,omitempty"` } // CreateTask creates a new task. -// -// Experimental: This method is experimental and may change in the future. -func (c *ExperimentalClient) CreateTask(ctx context.Context, user string, request CreateTaskRequest) (Task, error) { - res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/experimental/tasks/%s", user), request) +func (c *Client) CreateTask(ctx context.Context, user string, request CreateTaskRequest) (Task, error) { + res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/tasks/%s", user), request) if err != nil { return Task{}, err } @@ -90,8 +57,6 @@ func (c *ExperimentalClient) CreateTask(ctx context.Context, user string, reques } // TaskStatus represents the status of a task. -// -// Experimental: This type is experimental and may change in the future. type TaskStatus string const ( @@ -128,8 +93,6 @@ func AllTaskStatuses() []TaskStatus { } // TaskState represents the high-level lifecycle of a task. -// -// Experimental: This type is experimental and may change in the future. type TaskState string // TaskState enums. @@ -149,8 +112,6 @@ const ( ) // Task represents a task. -// -// Experimental: This type is experimental and may change in the future. type Task struct { ID uuid.UUID `json:"id" format:"uuid" table:"id"` OrganizationID uuid.UUID `json:"organization_id" format:"uuid" table:"organization id"` @@ -158,6 +119,7 @@ type Task struct { OwnerName string `json:"owner_name" table:"owner name"` OwnerAvatarURL string `json:"owner_avatar_url,omitempty" table:"owner avatar url"` Name string `json:"name" table:"name,default_sort"` + DisplayName string `json:"display_name" table:"display_name"` TemplateID uuid.UUID `json:"template_id" format:"uuid" table:"template id"` TemplateVersionID uuid.UUID `json:"template_version_id" format:"uuid" table:"template version id"` TemplateName string `json:"template_name" table:"template name"` @@ -179,8 +141,6 @@ type Task struct { } // TaskStateEntry represents a single entry in the task's state history. -// -// Experimental: This type is experimental and may change in the future. type TaskStateEntry struct { Timestamp time.Time `json:"timestamp" format:"date-time" table:"-"` State TaskState `json:"state" enum:"working,idle,completed,failed" table:"state"` @@ -189,8 +149,6 @@ type TaskStateEntry struct { } // TasksFilter filters the list of tasks. -// -// Experimental: This type is experimental and may change in the future. type TasksFilter struct { // Owner can be a username, UUID, or "me". Owner string `json:"owner,omitempty"` @@ -203,8 +161,6 @@ type TasksFilter struct { } // TaskListResponse is the response shape for tasks list. -// -// Experimental response shape for tasks list (server returns []Task). type TasksListResponse struct { Tasks []Task `json:"tasks"` Count int `json:"count"` @@ -236,14 +192,12 @@ func (f TasksFilter) asRequestOption() RequestOption { } // Tasks lists all tasks belonging to the user or specified owner. -// -// Experimental: This method is experimental and may change in the future. -func (c *ExperimentalClient) Tasks(ctx context.Context, filter *TasksFilter) ([]Task, error) { +func (c *Client) Tasks(ctx context.Context, filter *TasksFilter) ([]Task, error) { if filter == nil { filter = &TasksFilter{} } - res, err := c.Request(ctx, http.MethodGet, "/api/experimental/tasks", nil, filter.asRequestOption()) + res, err := c.Request(ctx, http.MethodGet, "/api/v2/tasks", nil, filter.asRequestOption()) if err != nil { return nil, err } @@ -260,11 +214,32 @@ func (c *ExperimentalClient) Tasks(ctx context.Context, filter *TasksFilter) ([] return tres.Tasks, nil } -// TaskByID fetches a single experimental task by its ID. -// -// Experimental: This method is experimental and may change in the future. -func (c *ExperimentalClient) TaskByID(ctx context.Context, id uuid.UUID) (Task, error) { - res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/experimental/tasks/%s/%s", "me", id.String()), nil) +// TaskByID fetches a single task by its ID. +// Only tasks owned by codersdk.Me are supported. +func (c *Client) TaskByID(ctx context.Context, id uuid.UUID) (Task, error) { + res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/tasks/%s/%s", "me", id.String()), nil) + if err != nil { + return Task{}, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return Task{}, ReadBodyAsError(res) + } + + var task Task + if err := json.NewDecoder(res.Body).Decode(&task); err != nil { + return Task{}, err + } + + return task, nil +} + +// TaskByOwnerAndName fetches a single task by its owner and name. +func (c *Client) TaskByOwnerAndName(ctx context.Context, owner, ident string) (Task, error) { + if owner == "" { + owner = Me + } + res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/tasks/%s/%s", owner, ident), nil) if err != nil { return Task{}, err } @@ -303,7 +278,7 @@ func splitTaskIdentifier(identifier string) (owner string, taskName string, err // // Since there is no TaskByOwnerAndName endpoint yet, this function uses the // list endpoint with filtering when a name is provided. -func (c *ExperimentalClient) TaskByIdentifier(ctx context.Context, identifier string) (Task, error) { +func (c *Client) TaskByIdentifier(ctx context.Context, identifier string) (Task, error) { identifier = strings.TrimSpace(identifier) // Try parsing as UUID first. @@ -317,41 +292,12 @@ func (c *ExperimentalClient) TaskByIdentifier(ctx context.Context, identifier st return Task{}, err } - tasks, err := c.Tasks(ctx, &TasksFilter{ - Owner: owner, - }) - if err != nil { - return Task{}, xerrors.Errorf("list tasks for owner %q: %w", owner, err) - } - - if taskID, err := uuid.Parse(taskName); err == nil { - // Find task by ID. - for _, task := range tasks { - if task.ID == taskID { - return task, nil - } - } - } else { - // Find task by name. - for _, task := range tasks { - if task.Name == taskName { - return task, nil - } - } - } - - // Mimic resource not found from API. - var notFoundErr error = &Error{ - Response: Response{Message: "Resource not found or you do not have access to this resource"}, - } - return Task{}, xerrors.Errorf("task %q not found for owner %q: %w", taskName, owner, notFoundErr) + return c.TaskByOwnerAndName(ctx, owner, taskName) } // DeleteTask deletes a task by its ID. -// -// Experimental: This method is experimental and may change in the future. -func (c *ExperimentalClient) DeleteTask(ctx context.Context, user string, id uuid.UUID) error { - res, err := c.Request(ctx, http.MethodDelete, fmt.Sprintf("/api/experimental/tasks/%s/%s", user, id.String()), nil) +func (c *Client) DeleteTask(ctx context.Context, user string, id uuid.UUID) error { + res, err := c.Request(ctx, http.MethodDelete, fmt.Sprintf("/api/v2/tasks/%s/%s", user, id.String()), nil) if err != nil { return err } @@ -363,17 +309,31 @@ func (c *ExperimentalClient) DeleteTask(ctx context.Context, user string, id uui } // TaskSendRequest is used to send task input to the tasks sidebar app. -// -// Experimental: This type is experimental and may change in the future. type TaskSendRequest struct { Input string `json:"input"` } // TaskSend submits task input to the tasks sidebar app. -// -// Experimental: This method is experimental and may change in the future. -func (c *ExperimentalClient) TaskSend(ctx context.Context, user string, id uuid.UUID, req TaskSendRequest) error { - res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/experimental/tasks/%s/%s/send", user, id.String()), req) +func (c *Client) TaskSend(ctx context.Context, user string, id uuid.UUID, req TaskSendRequest) error { + res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/tasks/%s/%s/send", user, id.String()), req) + if err != nil { + return err + } + defer res.Body.Close() + if res.StatusCode != http.StatusNoContent { + return ReadBodyAsError(res) + } + return nil +} + +// UpdateTaskInputRequest is used to update a task's input. +type UpdateTaskInputRequest struct { + Input string `json:"input"` +} + +// UpdateTaskInput updates the task's input. +func (c *Client) UpdateTaskInput(ctx context.Context, user string, id uuid.UUID, req UpdateTaskInputRequest) error { + res, err := c.Request(ctx, http.MethodPatch, fmt.Sprintf("/api/v2/tasks/%s/%s/input", user, id.String()), req) if err != nil { return err } @@ -385,8 +345,6 @@ func (c *ExperimentalClient) TaskSend(ctx context.Context, user string, id uuid. } // TaskLogType indicates the source of a task log entry. -// -// Experimental: This type is experimental and may change in the future. type TaskLogType string // TaskLogType enums. @@ -396,8 +354,6 @@ const ( ) // TaskLogEntry represents a single log entry for a task. -// -// Experimental: This type is experimental and may change in the future. type TaskLogEntry struct { ID int `json:"id" table:"id"` Content string `json:"content" table:"content"` @@ -406,17 +362,13 @@ type TaskLogEntry struct { } // TaskLogsResponse contains the logs for a task. -// -// Experimental: This type is experimental and may change in the future. type TaskLogsResponse struct { Logs []TaskLogEntry `json:"logs"` } -// TaskLogs retrieves logs from the task's sidebar app via the experimental API. -// -// Experimental: This method is experimental and may change in the future. -func (c *ExperimentalClient) TaskLogs(ctx context.Context, user string, id uuid.UUID) (TaskLogsResponse, error) { - res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/experimental/tasks/%s/%s/logs", user, id.String()), nil) +// TaskLogs retrieves logs from the task app. +func (c *Client) TaskLogs(ctx context.Context, user string, id uuid.UUID) (TaskLogsResponse, error) { + res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/tasks/%s/%s/logs", user, id.String()), nil) if err != nil { return TaskLogsResponse{}, err } diff --git a/codersdk/apikey_scopes_gen.go b/codersdk/apikey_scopes_gen.go index df7fe96c4585e..f4bc90152dd42 100644 --- a/codersdk/apikey_scopes_gen.go +++ b/codersdk/apikey_scopes_gen.go @@ -221,6 +221,10 @@ var PublicAPIKeyScopes = []APIKeyScope{ APIKeyScopeFileAll, APIKeyScopeFileCreate, APIKeyScopeFileRead, + APIKeyScopeOrganizationAll, + APIKeyScopeOrganizationDelete, + APIKeyScopeOrganizationRead, + APIKeyScopeOrganizationUpdate, APIKeyScopeTaskAll, APIKeyScopeTaskCreate, APIKeyScopeTaskDelete, diff --git a/codersdk/client.go b/codersdk/client.go index 42ad51286f181..72dd7ac4b64f4 100644 --- a/codersdk/client.go +++ b/codersdk/client.go @@ -251,16 +251,17 @@ func (c *Client) RequestWithoutSessionToken(ctx context.Context, method, path st } // Copy the request body so we can log it. - var reqBody []byte + var reqLogFields []any c.mu.RLock() logBodies := c.logBodies c.mu.RUnlock() if r != nil && logBodies { - reqBody, err = io.ReadAll(r) + reqBody, err := io.ReadAll(r) if err != nil { return nil, xerrors.Errorf("read request body: %w", err) } r = bytes.NewReader(reqBody) + reqLogFields = append(reqLogFields, slog.F("body", string(reqBody))) } req, err := http.NewRequestWithContext(ctx, method, serverURL.String(), r) @@ -291,7 +292,7 @@ func (c *Client) RequestWithoutSessionToken(ctx context.Context, method, path st slog.F("url", req.URL.String()), ) tracing.RunWithoutSpan(ctx, func(ctx context.Context) { - c.Logger().Debug(ctx, "sdk request", slog.F("body", string(reqBody))) + c.Logger().Debug(ctx, "sdk request", reqLogFields...) }) resp, err := c.HTTPClient.Do(req) @@ -324,11 +325,11 @@ func (c *Client) RequestWithoutSessionToken(ctx context.Context, method, path st span.SetStatus(httpconv.ClientStatus(resp.StatusCode)) // Copy the response body so we can log it if it's a loggable mime type. - var respBody []byte + var respLogFields []any if resp.Body != nil && logBodies { mimeType := parseMimeType(resp.Header.Get("Content-Type")) if _, ok := loggableMimeTypes[mimeType]; ok { - respBody, err = io.ReadAll(resp.Body) + respBody, err := io.ReadAll(resp.Body) if err != nil { return nil, xerrors.Errorf("copy response body for logs: %w", err) } @@ -337,16 +338,18 @@ func (c *Client) RequestWithoutSessionToken(ctx context.Context, method, path st return nil, xerrors.Errorf("close response body: %w", err) } resp.Body = io.NopCloser(bytes.NewReader(respBody)) + respLogFields = append(respLogFields, slog.F("body", string(respBody))) } } // See above for why this is not logged to the span. tracing.RunWithoutSpan(ctx, func(ctx context.Context) { c.Logger().Debug(ctx, "sdk response", - slog.F("status", resp.StatusCode), - slog.F("body", string(respBody)), - slog.F("trace_id", resp.Header.Get("X-Trace-Id")), - slog.F("span_id", resp.Header.Get("X-Span-Id")), + append(respLogFields, + slog.F("status", resp.StatusCode), + slog.F("trace_id", resp.Header.Get("X-Trace-Id")), + slog.F("span_id", resp.Header.Get("X-Span-Id")), + )..., ) }) diff --git a/codersdk/client_internal_test.go b/codersdk/client_internal_test.go index cfd8bdbf26086..415e88ac9c9fc 100644 --- a/codersdk/client_internal_test.go +++ b/codersdk/client_internal_test.go @@ -162,6 +162,45 @@ func Test_Client(t *testing.T) { require.Contains(t, logStr, strings.ReplaceAll(resBody, `"`, `\"`)) } +func Test_Client_LogBodiesFalse(t *testing.T) { + t.Parallel() + + const method = http.MethodPost + const path = "/ok" + const reqBody = `{"msg": "request body"}` + const resBody = `{"status": "ok"}` + + s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", jsonCT) + w.WriteHeader(http.StatusOK) + _, _ = io.WriteString(w, resBody) + })) + + u, err := url.Parse(s.URL) + require.NoError(t, err) + client := New(u) + + logBuf := bytes.NewBuffer(nil) + client.SetLogger(slog.Make(sloghuman.Sink(logBuf)).Leveled(slog.LevelDebug)) + client.SetLogBodies(false) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + resp, err := client.Request(ctx, method, path, []byte(reqBody)) + require.NoError(t, err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, resBody, string(body)) + + logStr := logBuf.String() + require.Contains(t, logStr, "sdk request") + require.Contains(t, logStr, "sdk response") + require.NotContains(t, logStr, "body") +} + func Test_readBodyAsError(t *testing.T) { t.Parallel() diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 97bbb13bedbc7..a51e3e9247e58 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -1174,7 +1174,7 @@ func (c *DeploymentValues) Options() serpent.OptionSet { YAML: "inbox", } deploymentGroupAIBridge = serpent.Group{ - Name: "AIBridge", + Name: "AI Bridge", YAML: "aibridge", } ) @@ -3238,20 +3238,19 @@ Write out the current server config as YAML to stdout.`, YAML: "hideAITasks", }, - // AIBridge Options + // AI Bridge Options { - Name: "AIBridge Enabled", - Description: fmt.Sprintf("Whether to start an in-memory aibridged instance (%q experiment must be enabled, too).", ExperimentAIBridge), + Name: "AI Bridge Enabled", + Description: "Whether to start an in-memory aibridged instance.", Flag: "aibridge-enabled", Env: "CODER_AIBRIDGE_ENABLED", Value: &c.AI.BridgeConfig.Enabled, Default: "false", Group: &deploymentGroupAIBridge, YAML: "enabled", - Hidden: true, }, { - Name: "AIBridge OpenAI Base URL", + Name: "AI Bridge OpenAI Base URL", Description: "The base URL of the OpenAI API.", Flag: "aibridge-openai-base-url", Env: "CODER_AIBRIDGE_OPENAI_BASE_URL", @@ -3259,10 +3258,9 @@ Write out the current server config as YAML to stdout.`, Default: "https://api.openai.com/v1/", Group: &deploymentGroupAIBridge, YAML: "openai_base_url", - Hidden: true, }, { - Name: "AIBridge OpenAI Key", + Name: "AI Bridge OpenAI Key", Description: "The key to authenticate against the OpenAI API.", Flag: "aibridge-openai-key", Env: "CODER_AIBRIDGE_OPENAI_KEY", @@ -3270,10 +3268,9 @@ Write out the current server config as YAML to stdout.`, Default: "", Group: &deploymentGroupAIBridge, YAML: "openai_key", - Hidden: true, }, { - Name: "AIBridge Anthropic Base URL", + Name: "AI Bridge Anthropic Base URL", Description: "The base URL of the Anthropic API.", Flag: "aibridge-anthropic-base-url", Env: "CODER_AIBRIDGE_ANTHROPIC_BASE_URL", @@ -3281,10 +3278,9 @@ Write out the current server config as YAML to stdout.`, Default: "https://api.anthropic.com/", Group: &deploymentGroupAIBridge, YAML: "anthropic_base_url", - Hidden: true, }, { - Name: "AIBridge Anthropic Key", + Name: "AI Bridge Anthropic Key", Description: "The key to authenticate against the Anthropic API.", Flag: "aibridge-anthropic-key", Env: "CODER_AIBRIDGE_ANTHROPIC_KEY", @@ -3292,10 +3288,9 @@ Write out the current server config as YAML to stdout.`, Default: "", Group: &deploymentGroupAIBridge, YAML: "anthropic_key", - Hidden: true, }, { - Name: "AIBridge Bedrock Region", + Name: "AI Bridge Bedrock Region", Description: "The AWS Bedrock API region.", Flag: "aibridge-bedrock-region", Env: "CODER_AIBRIDGE_BEDROCK_REGION", @@ -3303,10 +3298,9 @@ Write out the current server config as YAML to stdout.`, Default: "", Group: &deploymentGroupAIBridge, YAML: "bedrock_region", - Hidden: true, }, { - Name: "AIBridge Bedrock Access Key", + Name: "AI Bridge Bedrock Access Key", Description: "The access key to authenticate against the AWS Bedrock API.", Flag: "aibridge-bedrock-access-key", Env: "CODER_AIBRIDGE_BEDROCK_ACCESS_KEY", @@ -3314,10 +3308,9 @@ Write out the current server config as YAML to stdout.`, Default: "", Group: &deploymentGroupAIBridge, YAML: "bedrock_access_key", - Hidden: true, }, { - Name: "AIBridge Bedrock Access Key Secret", + Name: "AI Bridge Bedrock Access Key Secret", Description: "The access key secret to use with the access key to authenticate against the AWS Bedrock API.", Flag: "aibridge-bedrock-access-key-secret", Env: "CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET", @@ -3325,10 +3318,9 @@ Write out the current server config as YAML to stdout.`, Default: "", Group: &deploymentGroupAIBridge, YAML: "bedrock_access_key_secret", - Hidden: true, }, { - Name: "AIBridge Bedrock Model", + Name: "AI Bridge Bedrock Model", Description: "The model to use when making requests to the AWS Bedrock API.", Flag: "aibridge-bedrock-model", Env: "CODER_AIBRIDGE_BEDROCK_MODEL", @@ -3336,10 +3328,9 @@ Write out the current server config as YAML to stdout.`, Default: "global.anthropic.claude-sonnet-4-5-20250929-v1:0", // See https://docs.claude.com/en/api/claude-on-amazon-bedrock#accessing-bedrock. Group: &deploymentGroupAIBridge, YAML: "bedrock_model", - Hidden: true, }, { - Name: "AIBridge Bedrock Small Fast Model", + Name: "AI Bridge Bedrock Small Fast Model", Description: "The small fast model to use when making requests to the AWS Bedrock API. Claude Code uses Haiku-class models to perform background tasks. See https://docs.claude.com/en/docs/claude-code/settings#environment-variables.", Flag: "aibridge-bedrock-small-fastmodel", Env: "CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL", @@ -3347,7 +3338,27 @@ Write out the current server config as YAML to stdout.`, Default: "global.anthropic.claude-haiku-4-5-20251001-v1:0", // See https://docs.claude.com/en/api/claude-on-amazon-bedrock#accessing-bedrock. Group: &deploymentGroupAIBridge, YAML: "bedrock_small_fast_model", - Hidden: true, + }, + { + Name: "AI Bridge Inject Coder MCP tools", + Description: "Whether to inject Coder's MCP tools into intercepted AI Bridge requests (requires the \"oauth2\" and \"mcp-server-http\" experiments to be enabled).", + Flag: "aibridge-inject-coder-mcp-tools", + Env: "CODER_AIBRIDGE_INJECT_CODER_MCP_TOOLS", + Value: &c.AI.BridgeConfig.InjectCoderMCPTools, + Default: "false", + Group: &deploymentGroupAIBridge, + YAML: "inject_coder_mcp_tools", + }, + { + Name: "AI Bridge Data Retention Duration", + Description: "Length of time to retain data such as interceptions and all related records (token, prompt, tool use).", + Flag: "aibridge-retention", + Env: "CODER_AIBRIDGE_RETENTION", + Value: &c.AI.BridgeConfig.Retention, + Default: "60d", + Group: &deploymentGroupAIBridge, + YAML: "retention", + Annotations: serpent.Annotations{}.Mark(annotationFormatDuration, "true"), }, { Name: "Enable Authorization Recordings", @@ -3368,10 +3379,12 @@ Write out the current server config as YAML to stdout.`, } type AIBridgeConfig struct { - Enabled serpent.Bool `json:"enabled" typescript:",notnull"` - OpenAI AIBridgeOpenAIConfig `json:"openai" typescript:",notnull"` - Anthropic AIBridgeAnthropicConfig `json:"anthropic" typescript:",notnull"` - Bedrock AIBridgeBedrockConfig `json:"bedrock" typescript:",notnull"` + Enabled serpent.Bool `json:"enabled" typescript:",notnull"` + OpenAI AIBridgeOpenAIConfig `json:"openai" typescript:",notnull"` + Anthropic AIBridgeAnthropicConfig `json:"anthropic" typescript:",notnull"` + Bedrock AIBridgeBedrockConfig `json:"bedrock" typescript:",notnull"` + InjectCoderMCPTools serpent.Bool `json:"inject_coder_mcp_tools" typescript:",notnull"` + Retention serpent.Duration `json:"retention" typescript:",notnull"` } type AIBridgeOpenAIConfig struct { @@ -3645,7 +3658,8 @@ const ( ExperimentOAuth2 Experiment = "oauth2" // Enables OAuth2 provider functionality. ExperimentMCPServerHTTP Experiment = "mcp-server-http" // Enables the MCP HTTP server functionality. ExperimentWorkspaceSharing Experiment = "workspace-sharing" // Enables updating workspace ACLs for sharing with users and groups. - ExperimentAIBridge Experiment = "aibridge" // Enables AI Bridge functionality. + // ExperimentTerraformWorkspace uses the "Terraform Workspaces" feature, not to be confused with Coder Workspaces. + ExperimentTerraformWorkspace Experiment = "terraform-directory-reuse" // Enables reuse of existing terraform directory for builds ) func (e Experiment) DisplayName() string { @@ -3666,8 +3680,8 @@ func (e Experiment) DisplayName() string { return "MCP HTTP Server Functionality" case ExperimentWorkspaceSharing: return "Workspace Sharing" - case ExperimentAIBridge: - return "AI Bridge" + case ExperimentTerraformWorkspace: + return "Terraform Directory Reuse" default: // Split on hyphen and convert to title case // e.g. "web-push" -> "Web Push", "mcp-server-http" -> "Mcp Server Http" @@ -3686,7 +3700,6 @@ var ExperimentsKnown = Experiments{ ExperimentOAuth2, ExperimentMCPServerHTTP, ExperimentWorkspaceSharing, - ExperimentAIBridge, } // ExperimentsSafe should include all experiments that are safe for diff --git a/codersdk/oauth2.go b/codersdk/oauth2.go index 79b2186480b9c..6b4d220df0a46 100644 --- a/codersdk/oauth2.go +++ b/codersdk/oauth2.go @@ -262,6 +262,7 @@ type OAuth2AuthorizationServerMetadata struct { AuthorizationEndpoint string `json:"authorization_endpoint"` TokenEndpoint string `json:"token_endpoint"` RegistrationEndpoint string `json:"registration_endpoint,omitempty"` + RevocationEndpoint string `json:"revocation_endpoint,omitempty"` ResponseTypesSupported []string `json:"response_types_supported"` GrantTypesSupported []string `json:"grant_types_supported"` CodeChallengeMethodsSupported []string `json:"code_challenge_methods_supported"` diff --git a/codersdk/templates.go b/codersdk/templates.go index 49c1f9e7c57f9..36d57521c595d 100644 --- a/codersdk/templates.go +++ b/codersdk/templates.go @@ -63,7 +63,8 @@ type Template struct { MaxPortShareLevel WorkspaceAgentPortShareLevel `json:"max_port_share_level"` CORSBehavior CORSBehavior `json:"cors_behavior"` - UseClassicParameterFlow bool `json:"use_classic_parameter_flow"` + UseClassicParameterFlow bool `json:"use_classic_parameter_flow"` + UseTerraformWorkspaceCache bool `json:"use_terraform_workspace_cache"` } // WeekdaysToBitmap converts a list of weekdays to a bitmap in accordance with @@ -263,6 +264,11 @@ type UpdateTemplateMeta struct { // made the default. // An "opt-out" is present in case the new feature breaks some existing templates. UseClassicParameterFlow *bool `json:"use_classic_parameter_flow,omitempty"` + // UseTerraformWorkspaceCache allows optionally specifying whether to use cached + // terraform directories for workspaces created from this template. This field + // only applies when the correct experiment is enabled. This field is subject to + // being removed in the future. + UseTerraformWorkspaceCache *bool `json:"use_terraform_workspace_cache,omitempty"` } type TemplateExample struct { @@ -507,3 +513,34 @@ func (c *Client) StarterTemplates(ctx context.Context) ([]TemplateExample, error var templateExamples []TemplateExample return templateExamples, json.NewDecoder(res.Body).Decode(&templateExamples) } + +type InvalidatePresetsResponse struct { + Invalidated []InvalidatedPreset `json:"invalidated"` +} + +type InvalidatedPreset struct { + TemplateName string `json:"template_name"` + TemplateVersionName string `json:"template_version_name"` + PresetName string `json:"preset_name"` +} + +// InvalidateTemplatePresets invalidates all presets for the +// template's active version by setting last_invalidated_at timestamp. +// The reconciler will then mark these prebuilds as expired and create new ones. +func (c *Client) InvalidateTemplatePresets(ctx context.Context, template uuid.UUID) (InvalidatePresetsResponse, error) { + res, err := c.Request(ctx, http.MethodPost, + fmt.Sprintf("/api/v2/templates/%s/prebuilds/invalidate", template), + nil, + ) + if err != nil { + return InvalidatePresetsResponse{}, err + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return InvalidatePresetsResponse{}, ReadBodyAsError(res) + } + + var response InvalidatePresetsResponse + return response, json.NewDecoder(res.Body).Decode(&response) +} diff --git a/codersdk/toolsdk/toolsdk.go b/codersdk/toolsdk/toolsdk.go index 802b319a5a6b1..454e014265134 100644 --- a/codersdk/toolsdk/toolsdk.go +++ b/codersdk/toolsdk/toolsdk.go @@ -317,13 +317,14 @@ type GetWorkspaceArgs struct { var GetWorkspace = Tool[GetWorkspaceArgs, codersdk.Workspace]{ Tool: aisdk.Tool{ Name: ToolNameGetWorkspace, - Description: `Get a workspace by ID. + Description: `Get a workspace by name or ID. This returns more data than list_workspaces to reduce token usage.`, Schema: aisdk.Schema{ Properties: map[string]any{ "workspace_id": map[string]any{ - "type": "string", + "type": "string", + "description": workspaceDescription, }, }, Required: []string{"workspace_id"}, @@ -332,7 +333,7 @@ This returns more data than list_workspaces to reduce token usage.`, Handler: func(ctx context.Context, deps Deps, args GetWorkspaceArgs) (codersdk.Workspace, error) { wsID, err := uuid.Parse(args.WorkspaceID) if err != nil { - return codersdk.Workspace{}, xerrors.New("workspace_id must be a valid UUID") + return namedWorkspace(ctx, deps.coderClient, NormalizeWorkspaceInput(args.WorkspaceID)) } return deps.coderClient.Workspace(ctx, wsID) }, @@ -1432,7 +1433,7 @@ var WorkspaceLS = Tool[WorkspaceLSArgs, WorkspaceLSResponse]{ Properties: map[string]any{ "workspace": map[string]any{ "type": "string", - "description": workspaceDescription, + "description": workspaceAgentDescription, }, "path": map[string]any{ "type": "string", @@ -1489,7 +1490,7 @@ var WorkspaceReadFile = Tool[WorkspaceReadFileArgs, WorkspaceReadFileResponse]{ Properties: map[string]any{ "workspace": map[string]any{ "type": "string", - "description": workspaceDescription, + "description": workspaceAgentDescription, }, "path": map[string]any{ "type": "string", @@ -1566,7 +1567,7 @@ content you are trying to write, then re-encode it properly. Properties: map[string]any{ "workspace": map[string]any{ "type": "string", - "description": workspaceDescription, + "description": workspaceAgentDescription, }, "path": map[string]any{ "type": "string", @@ -1614,7 +1615,7 @@ var WorkspaceEditFile = Tool[WorkspaceEditFileArgs, codersdk.Response]{ Properties: map[string]any{ "workspace": map[string]any{ "type": "string", - "description": workspaceDescription, + "description": workspaceAgentDescription, }, "path": map[string]any{ "type": "string", @@ -1681,7 +1682,7 @@ var WorkspaceEditFiles = Tool[WorkspaceEditFilesArgs, codersdk.Response]{ Properties: map[string]any{ "workspace": map[string]any{ "type": "string", - "description": workspaceDescription, + "description": workspaceAgentDescription, }, "files": map[string]any{ "type": "array", @@ -1755,7 +1756,7 @@ var WorkspacePortForward = Tool[WorkspacePortForwardArgs, WorkspacePortForwardRe Properties: map[string]any{ "workspace": map[string]any{ "type": "string", - "description": workspaceDescription, + "description": workspaceAgentDescription, }, "port": map[string]any{ "type": "number", @@ -1812,7 +1813,7 @@ var WorkspaceListApps = Tool[WorkspaceListAppsArgs, WorkspaceListAppsResponse]{ Properties: map[string]any{ "workspace": map[string]any{ "type": "string", - "description": workspaceDescription, + "description": workspaceAgentDescription, }, }, Required: []string{"workspace"}, @@ -1898,8 +1899,7 @@ var CreateTask = Tool[CreateTaskArgs, codersdk.Task]{ args.User = codersdk.Me } - expClient := codersdk.NewExperimentalClient(deps.coderClient) - task, err := expClient.CreateTask(ctx, args.User, codersdk.CreateTaskRequest{ + task, err := deps.coderClient.CreateTask(ctx, args.User, codersdk.CreateTaskRequest{ Input: args.Input, TemplateVersionID: tvID, TemplateVersionPresetID: tvPresetID, @@ -1936,14 +1936,12 @@ var DeleteTask = Tool[DeleteTaskArgs, codersdk.Response]{ return codersdk.Response{}, xerrors.New("task_id is required") } - expClient := codersdk.NewExperimentalClient(deps.coderClient) - - task, err := expClient.TaskByIdentifier(ctx, args.TaskID) + task, err := deps.coderClient.TaskByIdentifier(ctx, args.TaskID) if err != nil { return codersdk.Response{}, xerrors.Errorf("resolve task: %w", err) } - err = expClient.DeleteTask(ctx, task.OwnerName, task.ID) + err = deps.coderClient.DeleteTask(ctx, task.OwnerName, task.ID) if err != nil { return codersdk.Response{}, xerrors.Errorf("delete task: %w", err) } @@ -1987,8 +1985,7 @@ var ListTasks = Tool[ListTasksArgs, ListTasksResponse]{ args.User = codersdk.Me } - expClient := codersdk.NewExperimentalClient(deps.coderClient) - tasks, err := expClient.Tasks(ctx, &codersdk.TasksFilter{ + tasks, err := deps.coderClient.Tasks(ctx, &codersdk.TasksFilter{ Owner: args.User, Status: args.Status, }) @@ -2031,9 +2028,7 @@ var GetTaskStatus = Tool[GetTaskStatusArgs, GetTaskStatusResponse]{ return GetTaskStatusResponse{}, xerrors.New("task_id is required") } - expClient := codersdk.NewExperimentalClient(deps.coderClient) - - task, err := expClient.TaskByIdentifier(ctx, args.TaskID) + task, err := deps.coderClient.TaskByIdentifier(ctx, args.TaskID) if err != nil { return GetTaskStatusResponse{}, xerrors.Errorf("resolve task %q: %w", args.TaskID, err) } @@ -2078,14 +2073,12 @@ var SendTaskInput = Tool[SendTaskInputArgs, codersdk.Response]{ return codersdk.Response{}, xerrors.New("input is required") } - expClient := codersdk.NewExperimentalClient(deps.coderClient) - - task, err := expClient.TaskByIdentifier(ctx, args.TaskID) + task, err := deps.coderClient.TaskByIdentifier(ctx, args.TaskID) if err != nil { return codersdk.Response{}, xerrors.Errorf("resolve task %q: %w", args.TaskID, err) } - err = expClient.TaskSend(ctx, task.OwnerName, task.ID, codersdk.TaskSendRequest{ + err = deps.coderClient.TaskSend(ctx, task.OwnerName, task.ID, codersdk.TaskSendRequest{ Input: args.Input, }) if err != nil { @@ -2122,14 +2115,12 @@ var GetTaskLogs = Tool[GetTaskLogsArgs, codersdk.TaskLogsResponse]{ return codersdk.TaskLogsResponse{}, xerrors.New("task_id is required") } - expClient := codersdk.NewExperimentalClient(deps.coderClient) - - task, err := expClient.TaskByIdentifier(ctx, args.TaskID) + task, err := deps.coderClient.TaskByIdentifier(ctx, args.TaskID) if err != nil { return codersdk.TaskLogsResponse{}, err } - logs, err := expClient.TaskLogs(ctx, task.OwnerName, task.ID) + logs, err := deps.coderClient.TaskLogs(ctx, task.OwnerName, task.ID) if err != nil { return codersdk.TaskLogsResponse{}, xerrors.Errorf("get task logs %q: %w", args.TaskID, err) } @@ -2199,7 +2190,9 @@ func newAgentConn(ctx context.Context, client *codersdk.Client, workspace string return conn, nil } -const workspaceDescription = "The workspace name in the format [owner/]workspace[.agent]. If an owner is not specified, the authenticated user is used." +const workspaceDescription = "The workspace ID or name in the format [owner/]workspace. If an owner is not specified, the authenticated user is used." + +const workspaceAgentDescription = "The workspace name in the format [owner/]workspace[.agent]. If an owner is not specified, the authenticated user is used." func taskIDDescription(action string) string { return fmt.Sprintf("ID or workspace identifier in the format [owner/]workspace[.agent] for the task to %s. If an owner is not specified, the authenticated user is used.", action) diff --git a/codersdk/toolsdk/toolsdk_test.go b/codersdk/toolsdk/toolsdk_test.go index 749105f5b85ed..f69bcc4d0e7fe 100644 --- a/codersdk/toolsdk/toolsdk_test.go +++ b/codersdk/toolsdk/toolsdk_test.go @@ -28,6 +28,7 @@ import ( "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/httpapi" @@ -106,8 +107,9 @@ func TestTools(t *testing.T) { }) t.Run("ReportTask", func(t *testing.T) { + ctx := testutil.Context(t, testutil.WaitShort) tb, err := toolsdk.NewDeps(memberClient, toolsdk.WithTaskReporter(func(args toolsdk.ReportTaskArgs) error { - return agentClient.PatchAppStatus(setupCtx, agentsdk.PatchAppStatus{ + return agentClient.PatchAppStatus(ctx, agentsdk.PatchAppStatus{ AppSlug: "some-agent-app", Message: args.Summary, URI: args.Link, @@ -126,12 +128,32 @@ func TestTools(t *testing.T) { t.Run("GetWorkspace", func(t *testing.T) { tb, err := toolsdk.NewDeps(memberClient) require.NoError(t, err) - result, err := testTool(t, toolsdk.GetWorkspace, tb, toolsdk.GetWorkspaceArgs{ - WorkspaceID: r.Workspace.ID.String(), - }) - require.NoError(t, err) - require.Equal(t, r.Workspace.ID, result.ID, "expected the workspace ID to match") + tests := []struct { + name string + workspace string + }{ + { + name: "ByID", + workspace: r.Workspace.ID.String(), + }, + { + name: "ByName", + workspace: r.Workspace.Name, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result, err := testTool(t, toolsdk.GetWorkspace, tb, toolsdk.GetWorkspaceArgs{ + WorkspaceID: tt.workspace, + }) + require.NoError(t, err) + require.Equal(t, r.Workspace.ID, result.ID, "expected the workspace ID to match") + }) + } }) t.Run("ListTemplates", func(t *testing.T) { @@ -851,7 +873,7 @@ func TestTools(t *testing.T) { TemplateVersionID: r.TemplateVersion.ID.String(), Input: "do yet another barrel roll", }, - error: "Template does not have required parameter \"AI Prompt\"", + error: "Template does not have a valid \"coder_ai_task\" resource.", }, { name: "WithPreset", @@ -860,7 +882,7 @@ func TestTools(t *testing.T) { TemplateVersionPresetID: presetID.String(), Input: "not enough barrel rolls", }, - error: "Template does not have required parameter \"AI Prompt\"", + error: "Template does not have a valid \"coder_ai_task\" resource.", }, } @@ -1003,11 +1025,8 @@ func TestTools(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) - expClient := codersdk.NewExperimentalClient(client) - taskExpClient := codersdk.NewExperimentalClient(taskClient) - // This task should not show up since listing is user-scoped. - _, err := expClient.CreateTask(ctx, member.Username, codersdk.CreateTaskRequest{ + _, err := client.CreateTask(ctx, member.Username, codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: "task for member", Name: "list-task-workspace-member", @@ -1017,7 +1036,7 @@ func TestTools(t *testing.T) { // Create tasks for taskUser. These should show up in the list. for i := range 5 { taskName := fmt.Sprintf("list-task-workspace-%d", i) - task, err := taskExpClient.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ + task, err := taskClient.CreateTask(ctx, codersdk.Me, codersdk.CreateTaskRequest{ TemplateVersionID: template.ActiveVersionID, Input: fmt.Sprintf("task %d", i), Name: taskName, @@ -1373,7 +1392,17 @@ func TestTools(t *testing.T) { task := ws.Task _ = agenttest.New(t, client.URL, ws.AgentToken) - coderdtest.NewWorkspaceAgentWaiter(t, client, ws.Workspace.ID).Wait() + coderdtest.NewWorkspaceAgentWaiter(t, client, ws.Workspace.ID). + WaitFor(coderdtest.AgentsReady) + + ctx := testutil.Context(t, testutil.WaitShort) + + // Ensure the app is healthy (required to send task input). + err = store.UpdateWorkspaceAppHealthByID(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceAppHealthByIDParams{ + ID: task.WorkspaceAppID.UUID, + Health: database.WorkspaceAppHealthHealthy, + }) + require.NoError(t, err) tests := []struct { name string @@ -1434,8 +1463,6 @@ func TestTools(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - t.Parallel() - tb, err := toolsdk.NewDeps(memberClient) require.NoError(t, err) @@ -1505,7 +1532,17 @@ func TestTools(t *testing.T) { task := ws.Task _ = agenttest.New(t, client.URL, ws.AgentToken) - coderdtest.NewWorkspaceAgentWaiter(t, client, ws.Workspace.ID).Wait() + coderdtest.NewWorkspaceAgentWaiter(t, client, ws.Workspace.ID). + WaitFor(coderdtest.AgentsReady) + + ctx := testutil.Context(t, testutil.WaitShort) + + // Ensure the app is healthy (required to read task logs). + err = store.UpdateWorkspaceAppHealthByID(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceAppHealthByIDParams{ + ID: task.WorkspaceAppID.UUID, + Health: database.WorkspaceAppHealthHealthy, + }) + require.NoError(t, err) tests := []struct { name string @@ -1557,8 +1594,6 @@ func TestTools(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - t.Parallel() - tb, err := toolsdk.NewDeps(memberClient) require.NoError(t, err) diff --git a/codersdk/users.go b/codersdk/users.go index 44464f9476ddd..1bf09370d9a2f 100644 --- a/codersdk/users.go +++ b/codersdk/users.go @@ -217,6 +217,14 @@ type UpdateUserAppearanceSettingsRequest struct { TerminalFont TerminalFontName `json:"terminal_font" validate:"required"` } +type UserPreferenceSettings struct { + TaskNotificationAlertDismissed bool `json:"task_notification_alert_dismissed"` +} + +type UpdateUserPreferenceSettingsRequest struct { + TaskNotificationAlertDismissed bool `json:"task_notification_alert_dismissed"` +} + type UpdateUserPasswordRequest struct { OldPassword string `json:"old_password" validate:""` Password string `json:"password" validate:"required"` @@ -514,6 +522,34 @@ func (c *Client) UpdateUserAppearanceSettings(ctx context.Context, user string, return resp, json.NewDecoder(res.Body).Decode(&resp) } +// GetUserPreferenceSettings fetches the preference settings for a user. +func (c *Client) GetUserPreferenceSettings(ctx context.Context, user string) (UserPreferenceSettings, error) { + res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/users/%s/preferences", user), nil) + if err != nil { + return UserPreferenceSettings{}, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return UserPreferenceSettings{}, ReadBodyAsError(res) + } + var resp UserPreferenceSettings + return resp, json.NewDecoder(res.Body).Decode(&resp) +} + +// UpdateUserPreferenceSettings updates the preference settings for a user. +func (c *Client) UpdateUserPreferenceSettings(ctx context.Context, user string, req UpdateUserPreferenceSettingsRequest) (UserPreferenceSettings, error) { + res, err := c.Request(ctx, http.MethodPut, fmt.Sprintf("/api/v2/users/%s/preferences", user), req) + if err != nil { + return UserPreferenceSettings{}, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return UserPreferenceSettings{}, ReadBodyAsError(res) + } + var resp UserPreferenceSettings + return resp, json.NewDecoder(res.Body).Decode(&resp) +} + // UpdateUserPassword updates a user password. // It calls PUT /users/{user}/password func (c *Client) UpdateUserPassword(ctx context.Context, user string, req UpdateUserPasswordRequest) error { diff --git a/codersdk/workspacebuilds.go b/codersdk/workspacebuilds.go index fee4c114b7eae..a91148ab2ad9e 100644 --- a/codersdk/workspacebuilds.go +++ b/codersdk/workspacebuilds.go @@ -88,11 +88,9 @@ type WorkspaceBuild struct { DailyCost int32 `json:"daily_cost"` MatchedProvisioners *MatchedProvisioners `json:"matched_provisioners,omitempty"` TemplateVersionPresetID *uuid.UUID `json:"template_version_preset_id" format:"uuid"` - HasAITask *bool `json:"has_ai_task,omitempty"` - // Deprecated: This field has been replaced with `TaskAppID` - AITaskSidebarAppID *uuid.UUID `json:"ai_task_sidebar_app_id,omitempty" format:"uuid"` - TaskAppID *uuid.UUID `json:"task_app_id,omitempty" format:"uuid"` - HasExternalAgent *bool `json:"has_external_agent,omitempty"` + // Deprecated: This field has been deprecated in favor of Task WorkspaceID. + HasAITask *bool `json:"has_ai_task,omitempty"` + HasExternalAgent *bool `json:"has_external_agent,omitempty"` } // WorkspaceResource describes resources used to create a workspace, for instance: diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go index f190d58be6bfb..709c9257c8350 100644 --- a/codersdk/workspaces.go +++ b/codersdk/workspaces.go @@ -72,6 +72,8 @@ type Workspace struct { // Once a prebuilt workspace is claimed by a user, it transitions to a regular workspace, // and IsPrebuild returns false. IsPrebuild bool `json:"is_prebuild"` + // TaskID, if set, indicates that the workspace is relevant to the given codersdk.Task. + TaskID uuid.NullUUID `json:"task_id,omitempty"` } func (w Workspace) FullName() string { diff --git a/docs/admin/infrastructure/validated-architectures/10k-users.md b/docs/admin/infrastructure/validated-architectures/10k-users.md deleted file mode 100644 index 486ac8192c991..0000000000000 --- a/docs/admin/infrastructure/validated-architectures/10k-users.md +++ /dev/null @@ -1,124 +0,0 @@ -# Reference Architecture: up to 10,000 users - -> [!CAUTION] -> This page is a work in progress. -> -> We are actively testing different load profiles for this user target and will be updating -> recommendations. Use these recommendations as a starting point, but monitor your cluster resource -> utilization and adjust. - -The 10,000 users architecture targets large-scale enterprises with development -teams in multiple geographic regions. - -**Geographic Distribution**: For these tests we deploy on 3 cloud-managed Kubernetes clusters in -the following regions: - -1. USA - Primary - Coderd collocated with the PostgreSQL database deployment. -2. Europe - Workspace Proxies -3. Asia - Workspace Proxies - -**High Availability**: Typically, such scale requires a fully-managed HA -PostgreSQL service, and all Coder observability features enabled for operational -purposes. - -**Observability**: Deploy monitoring solutions to gather Prometheus metrics and -visualize them with Grafana to gain detailed insights into infrastructure and -application behavior. This allows operators to respond quickly to incidents and -continuously improve the reliability and performance of the platform. - -## Testing Methodology - -### Workspace Network Traffic - -6000 concurrent workspaces (2000 per region), each sending 10 kB/s application traffic. - -Test procedure: - -1. Create workspaces. This happens simultaneously in each region with 200 provisioners (and thus 600 concurrent builds). -2. Wait 5 minutes to establish baselines for metrics. -3. Generate 10 kB/s traffic to each workspace (originating within the same region & cluster). - -After, we examine the Coderd, Workspace Proxy, and Database metrics to look for issues. - -### Dynamic Parameters - -1000 connections simulating changing parameters while configuring a new workspace. - -Test procedure: - -1. Create a template with complex parameter logic and multiple template versions. -1. Partition the connections among the template versions (forces Coder to process multiple template files) -1. Simultaneously connect to the dynamic-parameters API websocket endpoint for the template version -1. Wait for the initial parameter update. -1. Send a new parameter value that has cascading effects among other parameters. -1. Wait for the next update. - -After, we examine the latency in the initial connection and update, as well as Coderd and Database metrics to look for -issues. - -### API Request Traffic - -To be determined. - -## Hardware recommendations - -### Coderd - -These are deployed in the Primary region only. - -| vCPU Limit | Memory Limit | Replicas | GCP Node Pool Machine Type | -|----------------|--------------|----------|----------------------------| -| 4 vCPU (4000m) | 12 GiB | 10 | `c2d-standard-16` | - -### Provisioners - -These are deployed in each of the 3 regions. - -| vCPU Limit | Memory Limit | Replicas | GCP Node Pool Machine Type | -|-----------------|--------------|----------|----------------------------| -| 0.1 vCPU (100m) | 1 GiB | 200 | `c2d-standard-16` | - -**Footnotes**: - -- Each provisioner handles a single concurrent build, so this configuration implies 200 concurrent - workspace builds per region. -- Provisioners are run as a separate Kubernetes Deployment from Coderd, although they may - share the same node pool. -- Separate provisioners into different namespaces in favor of zero-trust or - multi-cloud deployments. - -### Workspace Proxies - -These are deployed in the non-Primary regions only. - -| vCPU Limit | Memory Limit | Replicas | GCP Node Pool Machine Type | -|----------------|--------------|----------|----------------------------| -| 4 vCPU (4000m) | 12 GiB | 10 | `c2d-standard-16` | - -**Footnotes**: - -- Our testing implies this is somewhat overspecced for the loads we have tried. We are in process of revising these numbers. - -### Workspaces - -These numbers are for each of the 3 regions. We recommend that you use a separate node pool for user Workspaces. - -| Users | Node capacity | Replicas | GCP | AWS | Azure | -|-------------|----------------------|-------------------------------|------------------|--------------|-------------------| -| Up to 3,000 | 8 vCPU, 32 GB memory | 256 nodes, 12 workspaces each | `t2d-standard-8` | `m5.2xlarge` | `Standard_D8s_v3` | - -**Footnotes**: - -- Assumed that a workspace user needs 2 GB memory to perform -- Maximum number of Kubernetes workspace pods per node: 256 -- As workspace nodes can be distributed between regions, on-premises networks - and cloud areas, consider different namespaces in favor of zero-trust or - multi-cloud deployments. - -### Database nodes - -We conducted our test using the `db-custom-16-61440` tier on Google Cloud SQL. - -**Footnotes**: - -- This database tier was only just able to keep up with 600 concurrent builds in our tests. diff --git a/docs/admin/infrastructure/validated-architectures/index.md b/docs/admin/infrastructure/validated-architectures/index.md index 59602f22bc47a..6bd18f7f3c132 100644 --- a/docs/admin/infrastructure/validated-architectures/index.md +++ b/docs/admin/infrastructure/validated-architectures/index.md @@ -220,8 +220,6 @@ For sizing recommendations, see the below reference architectures: - [Up to 3,000 users](3k-users.md) -- DRAFT: [Up to 10,000 users](10k-users.md) - ### AWS Instance Types For production AWS deployments, we recommend using non-burstable instance types, diff --git a/docs/admin/integrations/prometheus.md b/docs/admin/integrations/prometheus.md index f3820bdd298dd..5085832775b87 100644 --- a/docs/admin/integrations/prometheus.md +++ b/docs/admin/integrations/prometheus.md @@ -104,90 +104,97 @@ deployment. They will always be available from the agent. -| Name | Type | Description | Labels | -|---------------------------------------------------------------|-----------|----------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------| -| `agent_scripts_executed_total` | counter | Total number of scripts executed by the Coder agent. Includes cron scheduled scripts. | `agent_name` `success` `template_name` `username` `workspace_name` | -| `coderd_agents_apps` | gauge | Agent applications with statuses. | `agent_name` `app_name` `health` `username` `workspace_name` | -| `coderd_agents_connection_latencies_seconds` | gauge | Agent connection latencies in seconds. | `agent_name` `derp_region` `preferred` `username` `workspace_name` | -| `coderd_agents_connections` | gauge | Agent connections with statuses. | `agent_name` `lifecycle_state` `status` `tailnet_node` `username` `workspace_name` | -| `coderd_agents_up` | gauge | The number of active agents per workspace. | `template_name` `username` `workspace_name` | -| `coderd_agentstats_connection_count` | gauge | The number of established connections by agent | `agent_name` `username` `workspace_name` | -| `coderd_agentstats_connection_median_latency_seconds` | gauge | The median agent connection latency | `agent_name` `username` `workspace_name` | -| `coderd_agentstats_currently_reachable_peers` | gauge | The number of peers (e.g. clients) that are currently reachable over the encrypted network. | `agent_name` `connection_type` `template_name` `username` `workspace_name` | -| `coderd_agentstats_rx_bytes` | gauge | Agent Rx bytes | `agent_name` `username` `workspace_name` | -| `coderd_agentstats_session_count_jetbrains` | gauge | The number of session established by JetBrains | `agent_name` `username` `workspace_name` | -| `coderd_agentstats_session_count_reconnecting_pty` | gauge | The number of session established by reconnecting PTY | `agent_name` `username` `workspace_name` | -| `coderd_agentstats_session_count_ssh` | gauge | The number of session established by SSH | `agent_name` `username` `workspace_name` | -| `coderd_agentstats_session_count_vscode` | gauge | The number of session established by VSCode | `agent_name` `username` `workspace_name` | -| `coderd_agentstats_startup_script_seconds` | gauge | The number of seconds the startup script took to execute. | `agent_name` `success` `template_name` `username` `workspace_name` | -| `coderd_agentstats_tx_bytes` | gauge | Agent Tx bytes | `agent_name` `username` `workspace_name` | -| `coderd_api_active_users_duration_hour` | gauge | The number of users that have been active within the last hour. | | -| `coderd_api_concurrent_requests` | gauge | The number of concurrent API requests. | | -| `coderd_api_concurrent_websockets` | gauge | The total number of concurrent API websockets. | | -| `coderd_api_request_latencies_seconds` | histogram | Latency distribution of requests in seconds. | `method` `path` | -| `coderd_api_requests_processed_total` | counter | The total number of processed API requests | `code` `method` `path` | -| `coderd_api_websocket_durations_seconds` | histogram | Websocket duration distribution of requests in seconds. | `path` | -| `coderd_api_workspace_latest_build` | gauge | The latest workspace builds with a status. | `status` | -| `coderd_api_workspace_latest_build_total` | gauge | DEPRECATED: use coderd_api_workspace_latest_build instead | `status` | -| `coderd_insights_applications_usage_seconds` | gauge | The application usage per template. | `application_name` `slug` `template_name` | -| `coderd_insights_parameters` | gauge | The parameter usage per template. | `parameter_name` `parameter_type` `parameter_value` `template_name` | -| `coderd_insights_templates_active_users` | gauge | The number of active users of the template. | `template_name` | -| `coderd_license_active_users` | gauge | The number of active users. | | -| `coderd_license_limit_users` | gauge | The user seats limit based on the active Coder license. | | -| `coderd_license_user_limit_enabled` | gauge | Returns 1 if the current license enforces the user limit. | | -| `coderd_metrics_collector_agents_execution_seconds` | histogram | Histogram for duration of agents metrics collection in seconds. | | -| `coderd_oauth2_external_requests_rate_limit` | gauge | The total number of allowed requests per interval. | `name` `resource` | -| `coderd_oauth2_external_requests_rate_limit_next_reset_unix` | gauge | Unix timestamp of the next interval | `name` `resource` | -| `coderd_oauth2_external_requests_rate_limit_remaining` | gauge | The remaining number of allowed requests in this interval. | `name` `resource` | -| `coderd_oauth2_external_requests_rate_limit_reset_in_seconds` | gauge | Seconds until the next interval | `name` `resource` | -| `coderd_oauth2_external_requests_rate_limit_total` | gauge | DEPRECATED: use coderd_oauth2_external_requests_rate_limit instead | `name` `resource` | -| `coderd_oauth2_external_requests_rate_limit_used` | gauge | The number of requests made in this interval. | `name` `resource` | -| `coderd_oauth2_external_requests_total` | counter | The total number of api calls made to external oauth2 providers. 'status_code' will be 0 if the request failed with no response. | `name` `source` `status_code` | -| `coderd_prebuilt_workspace_claim_duration_seconds` | histogram | Time to claim a prebuilt workspace by organization, template, and preset. | `organization_name` `preset_name` `template_name` | -| `coderd_provisionerd_job_timings_seconds` | histogram | The provisioner job time duration in seconds. | `provisioner` `status` | -| `coderd_provisionerd_jobs_current` | gauge | The number of currently running provisioner jobs. | `provisioner` | -| `coderd_provisionerd_num_daemons` | gauge | The number of provisioner daemons. | | -| `coderd_provisionerd_workspace_build_timings_seconds` | histogram | The time taken for a workspace to build. | `status` `template_name` `template_version` `workspace_transition` | -| `coderd_workspace_builds_total` | counter | The number of workspaces started, updated, or deleted. | `action` `owner_email` `status` `template_name` `template_version` `workspace_name` | -| `coderd_workspace_creation_duration_seconds` | histogram | Time to create a workspace by organization, template, preset, and type (regular or prebuild). | `organization_name` `preset_name` `template_name` `type` | -| `coderd_workspace_creation_total` | counter | Total regular (non-prebuilt) workspace creations by organization, template, and preset. | `organization_name` `preset_name` `template_name` | -| `coderd_workspace_latest_build_status` | gauge | The current workspace statuses by template, transition, and owner. | `status` `template_name` `template_version` `workspace_owner` `workspace_transition` | -| `go_gc_duration_seconds` | summary | A summary of the pause duration of garbage collection cycles. | | -| `go_goroutines` | gauge | Number of goroutines that currently exist. | | -| `go_info` | gauge | Information about the Go environment. | `version` | -| `go_memstats_alloc_bytes` | gauge | Number of bytes allocated and still in use. | | -| `go_memstats_alloc_bytes_total` | counter | Total number of bytes allocated, even if freed. | | -| `go_memstats_buck_hash_sys_bytes` | gauge | Number of bytes used by the profiling bucket hash table. | | -| `go_memstats_frees_total` | counter | Total number of frees. | | -| `go_memstats_gc_sys_bytes` | gauge | Number of bytes used for garbage collection system metadata. | | -| `go_memstats_heap_alloc_bytes` | gauge | Number of heap bytes allocated and still in use. | | -| `go_memstats_heap_idle_bytes` | gauge | Number of heap bytes waiting to be used. | | -| `go_memstats_heap_inuse_bytes` | gauge | Number of heap bytes that are in use. | | -| `go_memstats_heap_objects` | gauge | Number of allocated objects. | | -| `go_memstats_heap_released_bytes` | gauge | Number of heap bytes released to OS. | | -| `go_memstats_heap_sys_bytes` | gauge | Number of heap bytes obtained from system. | | -| `go_memstats_last_gc_time_seconds` | gauge | Number of seconds since 1970 of last garbage collection. | | -| `go_memstats_lookups_total` | counter | Total number of pointer lookups. | | -| `go_memstats_mallocs_total` | counter | Total number of mallocs. | | -| `go_memstats_mcache_inuse_bytes` | gauge | Number of bytes in use by mcache structures. | | -| `go_memstats_mcache_sys_bytes` | gauge | Number of bytes used for mcache structures obtained from system. | | -| `go_memstats_mspan_inuse_bytes` | gauge | Number of bytes in use by mspan structures. | | -| `go_memstats_mspan_sys_bytes` | gauge | Number of bytes used for mspan structures obtained from system. | | -| `go_memstats_next_gc_bytes` | gauge | Number of heap bytes when next garbage collection will take place. | | -| `go_memstats_other_sys_bytes` | gauge | Number of bytes used for other system allocations. | | -| `go_memstats_stack_inuse_bytes` | gauge | Number of bytes in use by the stack allocator. | | -| `go_memstats_stack_sys_bytes` | gauge | Number of bytes obtained from system for stack allocator. | | -| `go_memstats_sys_bytes` | gauge | Number of bytes obtained from system. | | -| `go_threads` | gauge | Number of OS threads created. | | -| `process_cpu_seconds_total` | counter | Total user and system CPU time spent in seconds. | | -| `process_max_fds` | gauge | Maximum number of open file descriptors. | | -| `process_open_fds` | gauge | Number of open file descriptors. | | -| `process_resident_memory_bytes` | gauge | Resident memory size in bytes. | | -| `process_start_time_seconds` | gauge | Start time of the process since unix epoch in seconds. | | -| `process_virtual_memory_bytes` | gauge | Virtual memory size in bytes. | | -| `process_virtual_memory_max_bytes` | gauge | Maximum amount of virtual memory available in bytes. | | -| `promhttp_metric_handler_requests_in_flight` | gauge | Current number of scrapes being served. | | -| `promhttp_metric_handler_requests_total` | counter | Total number of scrapes by HTTP status code. | `code` | +| Name | Type | Description | Labels | +|---------------------------------------------------------------|-----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------| +| `agent_scripts_executed_total` | counter | Total number of scripts executed by the Coder agent. Includes cron scheduled scripts. | `agent_name` `success` `template_name` `username` `workspace_name` | +| `coder_aibridged_injected_tool_invocations_total` | counter | The number of times an injected MCP tool was invoked by aibridge. | `model` `name` `provider` `server` | +| `coder_aibridged_interceptions_duration_seconds` | histogram | The total duration of intercepted requests, in seconds. The majority of this time will be the upstream processing of the request. aibridge has no control over upstream processing time, so it's just an illustrative metric. | `model` `provider` | +| `coder_aibridged_interceptions_inflight` | gauge | The number of intercepted requests which are being processed. | `model` `provider` `route` | +| `coder_aibridged_interceptions_total` | counter | The count of intercepted requests. | `initiator_id` `method` `model` `provider` `route` `status` | +| `coder_aibridged_non_injected_tool_selections_total` | counter | The number of times an AI model selected a tool to be invoked by the client. | `model` `name` `provider` | +| `coder_aibridged_prompts_total` | counter | The number of prompts issued by users (initiators). | `initiator_id` `model` `provider` | +| `coder_aibridged_tokens_total` | counter | The number of tokens used by intercepted requests. | `initiator_id` `model` `provider` `type` | +| `coderd_agents_apps` | gauge | Agent applications with statuses. | `agent_name` `app_name` `health` `username` `workspace_name` | +| `coderd_agents_connection_latencies_seconds` | gauge | Agent connection latencies in seconds. | `agent_name` `derp_region` `preferred` `username` `workspace_name` | +| `coderd_agents_connections` | gauge | Agent connections with statuses. | `agent_name` `lifecycle_state` `status` `tailnet_node` `username` `workspace_name` | +| `coderd_agents_up` | gauge | The number of active agents per workspace. | `template_name` `username` `workspace_name` | +| `coderd_agentstats_connection_count` | gauge | The number of established connections by agent | `agent_name` `username` `workspace_name` | +| `coderd_agentstats_connection_median_latency_seconds` | gauge | The median agent connection latency | `agent_name` `username` `workspace_name` | +| `coderd_agentstats_currently_reachable_peers` | gauge | The number of peers (e.g. clients) that are currently reachable over the encrypted network. | `agent_name` `connection_type` `template_name` `username` `workspace_name` | +| `coderd_agentstats_rx_bytes` | gauge | Agent Rx bytes | `agent_name` `username` `workspace_name` | +| `coderd_agentstats_session_count_jetbrains` | gauge | The number of session established by JetBrains | `agent_name` `username` `workspace_name` | +| `coderd_agentstats_session_count_reconnecting_pty` | gauge | The number of session established by reconnecting PTY | `agent_name` `username` `workspace_name` | +| `coderd_agentstats_session_count_ssh` | gauge | The number of session established by SSH | `agent_name` `username` `workspace_name` | +| `coderd_agentstats_session_count_vscode` | gauge | The number of session established by VSCode | `agent_name` `username` `workspace_name` | +| `coderd_agentstats_startup_script_seconds` | gauge | The number of seconds the startup script took to execute. | `agent_name` `success` `template_name` `username` `workspace_name` | +| `coderd_agentstats_tx_bytes` | gauge | Agent Tx bytes | `agent_name` `username` `workspace_name` | +| `coderd_api_active_users_duration_hour` | gauge | The number of users that have been active within the last hour. | | +| `coderd_api_concurrent_requests` | gauge | The number of concurrent API requests. | | +| `coderd_api_concurrent_websockets` | gauge | The total number of concurrent API websockets. | | +| `coderd_api_request_latencies_seconds` | histogram | Latency distribution of requests in seconds. | `method` `path` | +| `coderd_api_requests_processed_total` | counter | The total number of processed API requests | `code` `method` `path` | +| `coderd_api_websocket_durations_seconds` | histogram | Websocket duration distribution of requests in seconds. | `path` | +| `coderd_api_workspace_latest_build` | gauge | The latest workspace builds with a status. | `status` | +| `coderd_api_workspace_latest_build_total` | gauge | DEPRECATED: use coderd_api_workspace_latest_build instead | `status` | +| `coderd_insights_applications_usage_seconds` | gauge | The application usage per template. | `application_name` `slug` `template_name` | +| `coderd_insights_parameters` | gauge | The parameter usage per template. | `parameter_name` `parameter_type` `parameter_value` `template_name` | +| `coderd_insights_templates_active_users` | gauge | The number of active users of the template. | `template_name` | +| `coderd_license_active_users` | gauge | The number of active users. | | +| `coderd_license_limit_users` | gauge | The user seats limit based on the active Coder license. | | +| `coderd_license_user_limit_enabled` | gauge | Returns 1 if the current license enforces the user limit. | | +| `coderd_metrics_collector_agents_execution_seconds` | histogram | Histogram for duration of agents metrics collection in seconds. | | +| `coderd_oauth2_external_requests_rate_limit` | gauge | The total number of allowed requests per interval. | `name` `resource` | +| `coderd_oauth2_external_requests_rate_limit_next_reset_unix` | gauge | Unix timestamp of the next interval | `name` `resource` | +| `coderd_oauth2_external_requests_rate_limit_remaining` | gauge | The remaining number of allowed requests in this interval. | `name` `resource` | +| `coderd_oauth2_external_requests_rate_limit_reset_in_seconds` | gauge | Seconds until the next interval | `name` `resource` | +| `coderd_oauth2_external_requests_rate_limit_total` | gauge | DEPRECATED: use coderd_oauth2_external_requests_rate_limit instead | `name` `resource` | +| `coderd_oauth2_external_requests_rate_limit_used` | gauge | The number of requests made in this interval. | `name` `resource` | +| `coderd_oauth2_external_requests_total` | counter | The total number of api calls made to external oauth2 providers. 'status_code' will be 0 if the request failed with no response. | `name` `source` `status_code` | +| `coderd_prebuilt_workspace_claim_duration_seconds` | histogram | Time to claim a prebuilt workspace by organization, template, and preset. | `organization_name` `preset_name` `template_name` | +| `coderd_provisionerd_job_timings_seconds` | histogram | The provisioner job time duration in seconds. | `provisioner` `status` | +| `coderd_provisionerd_jobs_current` | gauge | The number of currently running provisioner jobs. | `provisioner` | +| `coderd_provisionerd_num_daemons` | gauge | The number of provisioner daemons. | | +| `coderd_provisionerd_workspace_build_timings_seconds` | histogram | The time taken for a workspace to build. | `status` `template_name` `template_version` `workspace_transition` | +| `coderd_workspace_builds_total` | counter | The number of workspaces started, updated, or deleted. | `action` `owner_email` `status` `template_name` `template_version` `workspace_name` | +| `coderd_workspace_creation_duration_seconds` | histogram | Time to create a workspace by organization, template, preset, and type (regular or prebuild). | `organization_name` `preset_name` `template_name` `type` | +| `coderd_workspace_creation_total` | counter | Total regular (non-prebuilt) workspace creations by organization, template, and preset. | `organization_name` `preset_name` `template_name` | +| `coderd_workspace_latest_build_status` | gauge | The current workspace statuses by template, transition, and owner. | `status` `template_name` `template_version` `workspace_owner` `workspace_transition` | +| `go_gc_duration_seconds` | summary | A summary of the pause duration of garbage collection cycles. | | +| `go_goroutines` | gauge | Number of goroutines that currently exist. | | +| `go_info` | gauge | Information about the Go environment. | `version` | +| `go_memstats_alloc_bytes` | gauge | Number of bytes allocated and still in use. | | +| `go_memstats_alloc_bytes_total` | counter | Total number of bytes allocated, even if freed. | | +| `go_memstats_buck_hash_sys_bytes` | gauge | Number of bytes used by the profiling bucket hash table. | | +| `go_memstats_frees_total` | counter | Total number of frees. | | +| `go_memstats_gc_sys_bytes` | gauge | Number of bytes used for garbage collection system metadata. | | +| `go_memstats_heap_alloc_bytes` | gauge | Number of heap bytes allocated and still in use. | | +| `go_memstats_heap_idle_bytes` | gauge | Number of heap bytes waiting to be used. | | +| `go_memstats_heap_inuse_bytes` | gauge | Number of heap bytes that are in use. | | +| `go_memstats_heap_objects` | gauge | Number of allocated objects. | | +| `go_memstats_heap_released_bytes` | gauge | Number of heap bytes released to OS. | | +| `go_memstats_heap_sys_bytes` | gauge | Number of heap bytes obtained from system. | | +| `go_memstats_last_gc_time_seconds` | gauge | Number of seconds since 1970 of last garbage collection. | | +| `go_memstats_lookups_total` | counter | Total number of pointer lookups. | | +| `go_memstats_mallocs_total` | counter | Total number of mallocs. | | +| `go_memstats_mcache_inuse_bytes` | gauge | Number of bytes in use by mcache structures. | | +| `go_memstats_mcache_sys_bytes` | gauge | Number of bytes used for mcache structures obtained from system. | | +| `go_memstats_mspan_inuse_bytes` | gauge | Number of bytes in use by mspan structures. | | +| `go_memstats_mspan_sys_bytes` | gauge | Number of bytes used for mspan structures obtained from system. | | +| `go_memstats_next_gc_bytes` | gauge | Number of heap bytes when next garbage collection will take place. | | +| `go_memstats_other_sys_bytes` | gauge | Number of bytes used for other system allocations. | | +| `go_memstats_stack_inuse_bytes` | gauge | Number of bytes in use by the stack allocator. | | +| `go_memstats_stack_sys_bytes` | gauge | Number of bytes obtained from system for stack allocator. | | +| `go_memstats_sys_bytes` | gauge | Number of bytes obtained from system. | | +| `go_threads` | gauge | Number of OS threads created. | | +| `process_cpu_seconds_total` | counter | Total user and system CPU time spent in seconds. | | +| `process_max_fds` | gauge | Maximum number of open file descriptors. | | +| `process_open_fds` | gauge | Number of open file descriptors. | | +| `process_resident_memory_bytes` | gauge | Resident memory size in bytes. | | +| `process_start_time_seconds` | gauge | Start time of the process since unix epoch in seconds. | | +| `process_virtual_memory_bytes` | gauge | Virtual memory size in bytes. | | +| `process_virtual_memory_max_bytes` | gauge | Maximum amount of virtual memory available in bytes. | | +| `promhttp_metric_handler_requests_in_flight` | gauge | Current number of scrapes being served. | | +| `promhttp_metric_handler_requests_total` | counter | Total number of scrapes by HTTP status code. | `code` | diff --git a/docs/admin/security/audit-logs.md b/docs/admin/security/audit-logs.md index 387bdd9836a19..29e9fe7b6f6c8 100644 --- a/docs/admin/security/audit-logs.md +++ b/docs/admin/security/audit-logs.md @@ -13,32 +13,32 @@ We track the following resources: -| Resource | | | -|----------------------------------------------------------|----------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| APIKey
login, logout, register, create, delete | |
FieldTracked
allow_listfalse
created_attrue
expires_attrue
hashed_secretfalse
idfalse
ip_addressfalse
last_usedtrue
lifetime_secondsfalse
login_typefalse
scopesfalse
token_namefalse
updated_atfalse
user_idtrue
| -| AuditOAuthConvertState
| |
FieldTracked
created_attrue
expires_attrue
from_login_typetrue
to_login_typetrue
user_idtrue
| -| Group
create, write, delete | |
FieldTracked
avatar_urltrue
display_nametrue
idtrue
memberstrue
nametrue
organization_idfalse
quota_allowancetrue
sourcefalse
| -| AuditableOrganizationMember
| |
FieldTracked
created_attrue
organization_idfalse
rolestrue
updated_attrue
user_idtrue
usernametrue
| -| CustomRole
| |
FieldTracked
created_atfalse
display_nametrue
idfalse
nametrue
org_permissionstrue
organization_idfalse
site_permissionstrue
updated_atfalse
user_permissionstrue
| -| GitSSHKey
create | |
FieldTracked
created_atfalse
private_keytrue
public_keytrue
updated_atfalse
user_idtrue
| -| GroupSyncSettings
| |
FieldTracked
auto_create_missing_groupstrue
fieldtrue
legacy_group_name_mappingfalse
mappingtrue
regex_filtertrue
| -| HealthSettings
| |
FieldTracked
dismissed_healthcheckstrue
idfalse
| -| License
create, delete | |
FieldTracked
exptrue
idfalse
jwtfalse
uploaded_attrue
uuidtrue
| -| NotificationTemplate
| |
FieldTracked
actionstrue
body_templatetrue
enabled_by_defaulttrue
grouptrue
idfalse
kindtrue
methodtrue
nametrue
title_templatetrue
| -| NotificationsSettings
| |
FieldTracked
idfalse
notifier_pausedtrue
| -| OAuth2ProviderApp
| |
FieldTracked
callback_urltrue
client_id_issued_atfalse
client_secret_expires_attrue
client_typetrue
client_uritrue
contactstrue
created_atfalse
dynamically_registeredtrue
grant_typestrue
icontrue
idfalse
jwkstrue
jwks_uritrue
logo_uritrue
nametrue
policy_uritrue
redirect_uristrue
registration_access_tokentrue
registration_client_uritrue
response_typestrue
scopetrue
software_idtrue
software_versiontrue
token_endpoint_auth_methodtrue
tos_uritrue
updated_atfalse
| -| OAuth2ProviderAppSecret
| |
FieldTracked
app_idfalse
created_atfalse
display_secretfalse
hashed_secretfalse
idfalse
last_used_atfalse
secret_prefixfalse
| -| Organization
| |
FieldTracked
created_atfalse
deletedtrue
descriptiontrue
display_nametrue
icontrue
idfalse
is_defaulttrue
nametrue
updated_attrue
| -| OrganizationSyncSettings
| |
FieldTracked
assign_defaulttrue
fieldtrue
mappingtrue
| -| PrebuildsSettings
| |
FieldTracked
idfalse
reconciliation_pausedtrue
| -| RoleSyncSettings
| |
FieldTracked
fieldtrue
mappingtrue
| -| TaskTable
| |
FieldTracked
created_atfalse
deleted_atfalse
idtrue
nametrue
organization_idfalse
owner_idtrue
prompttrue
template_parameterstrue
template_version_idtrue
workspace_idtrue
| -| Template
write, delete | |
FieldTracked
active_version_idtrue
activity_bumptrue
allow_user_autostarttrue
allow_user_autostoptrue
allow_user_cancel_workspace_jobstrue
autostart_block_days_of_weektrue
autostop_requirement_days_of_weektrue
autostop_requirement_weekstrue
cors_behaviortrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
default_ttltrue
deletedfalse
deprecatedtrue
descriptiontrue
display_nametrue
failure_ttltrue
group_acltrue
icontrue
idtrue
max_port_sharing_leveltrue
nametrue
organization_display_namefalse
organization_iconfalse
organization_idfalse
organization_namefalse
provisionertrue
require_active_versiontrue
time_til_dormanttrue
time_til_dormant_autodeletetrue
updated_atfalse
use_classic_parameter_flowtrue
user_acltrue
| -| TemplateVersion
create, write | |
FieldTracked
archivedtrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
external_auth_providersfalse
has_ai_taskfalse
has_external_agentfalse
idtrue
job_idfalse
messagefalse
nametrue
organization_idfalse
readmetrue
source_example_idfalse
template_idtrue
updated_atfalse
| -| User
create, write, delete | |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
is_systemtrue
last_seen_atfalse
login_typetrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
updated_atfalse
usernametrue
| -| WorkspaceBuild
start, stop | |
FieldTracked
ai_task_sidebar_app_idfalse
build_numberfalse
created_atfalse
daily_costfalse
deadlinefalse
has_ai_taskfalse
has_external_agentfalse
idfalse
initiator_by_avatar_urlfalse
initiator_by_namefalse
initiator_by_usernamefalse
initiator_idfalse
job_idfalse
max_deadlinefalse
provisioner_statefalse
reasonfalse
template_version_idtrue
template_version_preset_idfalse
transitionfalse
updated_atfalse
workspace_idfalse
| -| WorkspaceProxy
| |
FieldTracked
created_attrue
deletedfalse
derp_enabledtrue
derp_onlytrue
display_nametrue
icontrue
idtrue
nametrue
region_idtrue
token_hashed_secrettrue
updated_atfalse
urltrue
versiontrue
wildcard_hostnametrue
| -| WorkspaceTable
| |
FieldTracked
automatic_updatestrue
autostart_scheduletrue
created_atfalse
deletedfalse
deleting_attrue
dormant_attrue
favoritetrue
group_acltrue
idtrue
last_used_atfalse
nametrue
next_start_attrue
organization_idfalse
owner_idtrue
template_idtrue
ttltrue
updated_atfalse
user_acltrue
| +| Resource | | | +|----------------------------------------------------------|----------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| APIKey
login, logout, register, create, delete | |
FieldTracked
allow_listfalse
created_attrue
expires_attrue
hashed_secretfalse
idfalse
ip_addressfalse
last_usedtrue
lifetime_secondsfalse
login_typefalse
scopesfalse
token_namefalse
updated_atfalse
user_idtrue
| +| AuditOAuthConvertState
| |
FieldTracked
created_attrue
expires_attrue
from_login_typetrue
to_login_typetrue
user_idtrue
| +| Group
create, write, delete | |
FieldTracked
avatar_urltrue
display_nametrue
idtrue
memberstrue
nametrue
organization_idfalse
quota_allowancetrue
sourcefalse
| +| AuditableOrganizationMember
| |
FieldTracked
created_attrue
organization_idfalse
rolestrue
updated_attrue
user_idtrue
usernametrue
| +| CustomRole
| |
FieldTracked
created_atfalse
display_nametrue
idfalse
nametrue
org_permissionstrue
organization_idfalse
site_permissionstrue
updated_atfalse
user_permissionstrue
| +| GitSSHKey
create | |
FieldTracked
created_atfalse
private_keytrue
public_keytrue
updated_atfalse
user_idtrue
| +| GroupSyncSettings
| |
FieldTracked
auto_create_missing_groupstrue
fieldtrue
legacy_group_name_mappingfalse
mappingtrue
regex_filtertrue
| +| HealthSettings
| |
FieldTracked
dismissed_healthcheckstrue
idfalse
| +| License
create, delete | |
FieldTracked
exptrue
idfalse
jwtfalse
uploaded_attrue
uuidtrue
| +| NotificationTemplate
| |
FieldTracked
actionstrue
body_templatetrue
enabled_by_defaulttrue
grouptrue
idfalse
kindtrue
methodtrue
nametrue
title_templatetrue
| +| NotificationsSettings
| |
FieldTracked
idfalse
notifier_pausedtrue
| +| OAuth2ProviderApp
| |
FieldTracked
callback_urltrue
client_id_issued_atfalse
client_secret_expires_attrue
client_typetrue
client_uritrue
contactstrue
created_atfalse
dynamically_registeredtrue
grant_typestrue
icontrue
idfalse
jwkstrue
jwks_uritrue
logo_uritrue
nametrue
policy_uritrue
redirect_uristrue
registration_access_tokentrue
registration_client_uritrue
response_typestrue
scopetrue
software_idtrue
software_versiontrue
token_endpoint_auth_methodtrue
tos_uritrue
updated_atfalse
| +| OAuth2ProviderAppSecret
| |
FieldTracked
app_idfalse
created_atfalse
display_secretfalse
hashed_secretfalse
idfalse
last_used_atfalse
secret_prefixfalse
| +| Organization
| |
FieldTracked
created_atfalse
deletedtrue
descriptiontrue
display_nametrue
icontrue
idfalse
is_defaulttrue
nametrue
updated_attrue
| +| OrganizationSyncSettings
| |
FieldTracked
assign_defaulttrue
fieldtrue
mappingtrue
| +| PrebuildsSettings
| |
FieldTracked
idfalse
reconciliation_pausedtrue
| +| RoleSyncSettings
| |
FieldTracked
fieldtrue
mappingtrue
| +| TaskTable
| |
FieldTracked
created_atfalse
deleted_atfalse
display_nametrue
idtrue
nametrue
organization_idfalse
owner_idtrue
prompttrue
template_parameterstrue
template_version_idtrue
workspace_idtrue
| +| Template
write, delete | |
FieldTracked
active_version_idtrue
activity_bumptrue
allow_user_autostarttrue
allow_user_autostoptrue
allow_user_cancel_workspace_jobstrue
autostart_block_days_of_weektrue
autostop_requirement_days_of_weektrue
autostop_requirement_weekstrue
cors_behaviortrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
default_ttltrue
deletedfalse
deprecatedtrue
descriptiontrue
display_nametrue
failure_ttltrue
group_acltrue
icontrue
idtrue
max_port_sharing_leveltrue
nametrue
organization_display_namefalse
organization_iconfalse
organization_idfalse
organization_namefalse
provisionertrue
require_active_versiontrue
time_til_dormanttrue
time_til_dormant_autodeletetrue
updated_atfalse
use_classic_parameter_flowtrue
use_terraform_workspace_cachetrue
user_acltrue
| +| TemplateVersion
create, write | |
FieldTracked
archivedtrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
external_auth_providersfalse
has_ai_taskfalse
has_external_agentfalse
idtrue
job_idfalse
messagefalse
nametrue
organization_idfalse
readmetrue
source_example_idfalse
template_idtrue
updated_atfalse
| +| User
create, write, delete | |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
is_systemtrue
last_seen_atfalse
login_typetrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
updated_atfalse
usernametrue
| +| WorkspaceBuild
start, stop | |
FieldTracked
build_numberfalse
created_atfalse
daily_costfalse
deadlinefalse
has_ai_taskfalse
has_external_agentfalse
idfalse
initiator_by_avatar_urlfalse
initiator_by_namefalse
initiator_by_usernamefalse
initiator_idfalse
job_idfalse
max_deadlinefalse
provisioner_statefalse
reasonfalse
template_version_idtrue
template_version_preset_idfalse
transitionfalse
updated_atfalse
workspace_idfalse
| +| WorkspaceProxy
| |
FieldTracked
created_attrue
deletedfalse
derp_enabledtrue
derp_onlytrue
display_nametrue
icontrue
idtrue
nametrue
region_idtrue
token_hashed_secrettrue
updated_atfalse
urltrue
versiontrue
wildcard_hostnametrue
| +| WorkspaceTable
| |
FieldTracked
automatic_updatestrue
autostart_scheduletrue
created_atfalse
deletedfalse
deleting_attrue
dormant_attrue
favoritetrue
group_acltrue
idtrue
last_used_atfalse
nametrue
next_start_attrue
organization_idfalse
owner_idtrue
template_idtrue
ttltrue
updated_atfalse
user_acltrue
| @@ -151,6 +151,36 @@ Should you wish to purge these records, it is safe to do so. This can only be do directly against the `audit_logs` table in the database. We advise users to only purge old records (>1yr) and in accordance with your compliance requirements. +### Maintenance Procedures for the Audit Logs Table + +> [!NOTE] +> `VACUUM FULL` acquires an exclusive lock on the table, blocking all reads and writes. For more information, see the [PostgreSQL VACUUM documentation](https://www.postgresql.org/docs/current/sql-vacuum.html). + +You may choose to run a `VACUUM` or `VACUUM FULL` operation on the audit logs table to reclaim disk space. If you choose to run the `FULL` operation, consider the following when doing so: + +- **Run during a planned mainteance window** to ensure ample time for the operation to complete and minimize impact to users +- **Stop all running instances of `coderd`** to prevent connection errors while the table is locked. The actual steps for this will depend on your particular deployment setup. For example, if your `coderd` deployment is running on Kubernetes: + + ```bash + kubectl scale deployment coder --replicas=0 -n coder + ``` + +- **Terminate lingering connections** before running the `VACUUM` operation to ensure it starts immediately + + ```sql + SELECT pg_terminate_backend(pg_stat_activity.pid) + FROM pg_stat_activity + WHERE pg_stat_activity.datname = 'coder' AND pid <> pg_backend_pid(); + ``` + +- **Only `coderd` needs to scale down** - external provisioner daemons, workspace proxies, and workspace agents don't connect to the database directly. + +After the vacuum completes, scale coderd back up: + +```bash +kubectl scale deployment coder --replicas= -n coder +``` + ### Backup/Archive Consider exporting or archiving these records before deletion: diff --git a/docs/admin/templates/extending-templates/devcontainers.md b/docs/admin/templates/extending-templates/devcontainers.md index d4284bf48efde..f3eb4835c5a7c 100644 --- a/docs/admin/templates/extending-templates/devcontainers.md +++ b/docs/admin/templates/extending-templates/devcontainers.md @@ -1,8 +1,32 @@ -# Configure a template for dev containers +# Configure a template for Dev Containers -To enable dev containers in workspaces, configure your template with the dev containers +To enable Dev Containers in workspaces, configure your template with the Dev Containers modules and configurations outlined in this doc. +> [!NOTE] +> +> Dev Containers require a **Linux or macOS workspace**. Windows is not supported. + +## Configuration Modes + +There are two approaches to configuring Dev Containers in Coder: + +### Manual Configuration + +Use the [`coder_devcontainer`](https://registry.terraform.io/providers/coder/coder/latest/docs/resources/devcontainer) Terraform resource to explicitly define which Dev +Containers should be started in your workspace. This approach provides: + +- Predictable behavior and explicit control +- Clear template configuration +- Easier troubleshooting +- Better for production environments + +This is the recommended approach for most use cases. + +### Project Discovery + +Enable automatic discovery of Dev Containers in Git repositories. Project discovery automatically scans Git repositories for `.devcontainer/devcontainer.json` or `.devcontainer.json` files and surfaces them in the Coder UI. See the [Environment Variables](#environment-variables) section for detailed configuration options. + ## Install the Dev Containers CLI Use the @@ -23,7 +47,7 @@ Alternatively, install the devcontainer CLI manually in your base image. The [`coder_devcontainer`](https://registry.terraform.io/providers/coder/coder/latest/docs/resources/devcontainer) -resource automatically starts a dev container in your workspace, ensuring it's +resource automatically starts a Dev Container in your workspace, ensuring it's ready when you access the workspace: ```terraform @@ -50,30 +74,140 @@ resource "coder_devcontainer" "my-repository" { ## Enable Dev Containers Integration -To enable the dev containers integration in your workspace, you must set the -`CODER_AGENT_DEVCONTAINERS_ENABLE` environment variable to `true` in your -workspace container: +Dev Containers integration is **enabled by default** in Coder 2.24.0 and later. +You don't need to set any environment variables unless you want to change the +default behavior. + +If you need to explicitly disable Dev Containers, set the +`CODER_AGENT_DEVCONTAINERS_ENABLE` environment variable to `false`: ```terraform resource "docker_container" "workspace" { count = data.coder_workspace.me.start_count image = "codercom/oss-dogfood:latest" env = [ - "CODER_AGENT_DEVCONTAINERS_ENABLE=true", + "CODER_AGENT_DEVCONTAINERS_ENABLE=false", # Explicitly disable # ... Other environment variables. ] # ... Other container configuration. } ``` -This environment variable is required for the Coder agent to detect and manage -dev containers. Without it, the agent will not attempt to start or connect to -dev containers even if the `coder_devcontainer` resource is defined. +See the [Environment Variables](#environment-variables) section below for more +details on available configuration options. + +## Environment Variables + +The following environment variables control Dev Container behavior in your +workspace. Both `CODER_AGENT_DEVCONTAINERS_ENABLE` and +`CODER_AGENT_DEVCONTAINERS_PROJECT_DISCOVERY_ENABLE` are **enabled by default**, +so you typically don't need to set them unless you want to explicitly disable +the feature. + +### CODER_AGENT_DEVCONTAINERS_ENABLE + +**Default: `true`** • **Added in: v2.24.0** + +Enables the Dev Containers integration in the Coder agent. + +The Dev Containers feature is enabled by default. You can explicitly disable it +by setting this to `false`. + +### CODER_AGENT_DEVCONTAINERS_PROJECT_DISCOVERY_ENABLE + +**Default: `true`** • **Added in: v2.25.0** + +Enables automatic discovery of Dev Containers in Git repositories. + +When enabled, the agent will: + +- Scan the agent directory for Git repositories +- Look for `.devcontainer/devcontainer.json` or `.devcontainer.json` files +- Surface discovered Dev Containers automatically in the Coder UI +- Respect `.gitignore` patterns during discovery + +You can disable automatic discovery by setting this to `false` if you prefer to +use only the `coder_devcontainer` resource for explicit configuration. + +### CODER_AGENT_DEVCONTAINERS_DISCOVERY_AUTOSTART_ENABLE + +**Default: `false`** • **Added in: v2.25.0** + +Automatically starts Dev Containers discovered via project discovery. + +When enabled, discovered Dev Containers will be automatically built and started +during workspace initialization. This only applies to Dev Containers found via +project discovery. Dev Containers defined with the `coder_devcontainer` resource +always auto-start regardless of this setting. + +## Per-Container Customizations + +Individual Dev Containers can be customized using the `customizations.coder` block +in your `devcontainer.json` file. These customizations allow you to control +container-specific behavior without modifying your template. + +### Ignore Specific Containers + +Use the `ignore` option to hide a Dev Container from Coder completely: + +```json +{ + "name": "My Dev Container", + "image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "customizations": { + "coder": { + "ignore": true + } + } +} +``` + +When `ignore` is set to `true`: + +- The Dev Container won't appear in the Coder UI +- Coder won't manage or monitor the container + +This is useful when you have Dev Containers in your repository that you don't +want Coder to manage. + +### Per-Container Auto-Start + +Control whether individual Dev Containers should auto-start using the +`autoStart` option: + +```json +{ + "name": "My Dev Container", + "image": "mcr.microsoft.com/devcontainers/base:ubuntu", + "customizations": { + "coder": { + "autoStart": true + } + } +} +``` + +**Important**: The `autoStart` option only applies when global auto-start is +enabled via `CODER_AGENT_DEVCONTAINERS_DISCOVERY_AUTOSTART_ENABLE=true`. If +the global setting is disabled, containers won't auto-start regardless of this +setting. + +When `autoStart` is set to `true`: + +- The Dev Container automatically builds and starts during workspace + initialization +- Works on a per-container basis (you can enable it for some containers but not + others) + +When `autoStart` is set to `false` or omitted: + +- The Dev Container is discovered and shown in the UI +- Users must manually start it via the UI ## Complete Template Example -Here's a simplified template example that enables the dev containers -integration: +Here's a simplified template example that uses Dev Containers with manual +configuration: ```terraform terraform { @@ -107,18 +241,38 @@ resource "coder_devcontainer" "my-repository" { agent_id = coder_agent.dev.id workspace_folder = "/home/coder/my-repository" } +``` + +### Alternative: Project Discovery Mode +You can enable automatic starting of discovered Dev Containers: + +```terraform resource "docker_container" "workspace" { count = data.coder_workspace.me.start_count image = "codercom/oss-dogfood:latest" env = [ - "CODER_AGENT_DEVCONTAINERS_ENABLE=true", + # Project discovery is enabled by default, but autostart is not. + # Enable autostart to automatically build and start discovered containers: + "CODER_AGENT_DEVCONTAINERS_DISCOVERY_AUTOSTART_ENABLE=true", # ... Other environment variables. ] # ... Other container configuration. } ``` +With this configuration: + +- Project discovery is enabled (default behavior) +- Discovered containers are automatically started (via the env var) +- The `coder_devcontainer` resource is **not** required +- Developers can work with multiple projects seamlessly + +> [!NOTE] +> +> When using project discovery, you still need to install the devcontainers CLI +> using the module or in your base image. + ## Next Steps - [Dev Containers Integration](../../../user-guides/devcontainers/index.md) diff --git a/docs/admin/templates/extending-templates/parameters.md b/docs/admin/templates/extending-templates/parameters.md index 43a477632e7db..57d2582bc8f02 100644 --- a/docs/admin/templates/extending-templates/parameters.md +++ b/docs/admin/templates/extending-templates/parameters.md @@ -322,15 +322,33 @@ their needs. ![Template with options in the preset dropdown](../../../images/admin/templates/extend-templates/template-preset-dropdown.png) -Use `coder_workspace_preset` to define the preset parameters. -After you save the template file, the presets will be available for all new -workspace deployments. +Use the +[`coder_workspace_preset`](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/workspace_preset) +data source to define the preset parameters. After you save the template file, +the presets will be available for all new workspace deployments. + +### Optional preset fields + +In addition to the required `name` and `parameters` fields, you can enhance your +workspace presets with optional `description` and `icon` fields: + +- **description**: A helpful text description that provides additional context + about the preset. This helps users understand what the preset is for and when + to use it. +- **icon**: A visual icon displayed alongside the preset name in the UI. Use + emoji icons with the format `/emojis/{code}.png` (e.g., + `/emojis/1f1fa-1f1f8.png` for the US flag emoji 🇺🇸). + +For a complete list of all available fields, see the +[Terraform provider documentation](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/workspace_preset#schema).
Expand for an example ```tf data "coder_workspace_preset" "goland-gpu" { name = "GoLand with GPU" + description = "Development workspace with GPU acceleration for GoLand IDE" + icon = "/emojis/1f680.png" parameters = { "machine_type" = "n1-standard-1" "attach_gpu" = "true" @@ -339,6 +357,16 @@ data "coder_workspace_preset" "goland-gpu" { } } +data "coder_workspace_preset" "pittsburgh" { + name = "Pittsburgh" + description = "Development workspace hosted in United States" + icon = "/emojis/1f1fa-1f1f8.png" + parameters = { + "region" = "us-pittsburgh" + "machine_type" = "n1-standard-2" + } +} + data "coder_parameter" "machine_type" { name = "machine_type" display_name = "Machine Type" @@ -355,16 +383,23 @@ data "coder_parameter" "attach_gpu" { data "coder_parameter" "gcp_region" { name = "gcp_region" - display_name = "Machine Type" + display_name = "GCP Region" type = "string" - default = "n1-standard-2" + default = "us-central1-a" } data "coder_parameter" "jetbrains_ide" { name = "jetbrains_ide" - display_name = "Machine Type" + display_name = "JetBrains IDE" type = "string" - default = "n1-standard-2" + default = "IU" +} + +data "coder_parameter" "region" { + name = "region" + display_name = "Region" + type = "string" + default = "us-east-1" } ``` diff --git a/docs/admin/templates/managing-templates/external-workspaces.md b/docs/admin/templates/managing-templates/external-workspaces.md index 25a97db468867..5d547b67fc891 100644 --- a/docs/admin/templates/managing-templates/external-workspaces.md +++ b/docs/admin/templates/managing-templates/external-workspaces.md @@ -20,7 +20,7 @@ External workspaces offer flexibility and control in complex environments: - **Incremental adoption of Coder** - Integrate with existing infrastructure gradually without needing to migrate everything at once. This is particularly useful when gradually migrating worklods to Coder without refactoring current infrastructure. + Integrate with existing infrastructure gradually without needing to migrate everything at once. This is particularly useful when gradually migrating workloads to Coder without refactoring current infrastructure. - **Flexibility** diff --git a/docs/admin/users/headless-auth.md b/docs/admin/users/headless-auth.md index 83173e2bbf1e5..6aa780288a94b 100644 --- a/docs/admin/users/headless-auth.md +++ b/docs/admin/users/headless-auth.md @@ -16,7 +16,7 @@ You must have the User Admin role or above to create headless users. coder users create \ --email="coder-bot@coder.com" \ --username="coder-bot" \ - --login-type="none \ + --login-type="none" \ ``` ## UI diff --git a/docs/admin/users/oidc-auth/microsoft.md b/docs/admin/users/oidc-auth/microsoft.md new file mode 100644 index 0000000000000..db9958f1bd0b7 --- /dev/null +++ b/docs/admin/users/oidc-auth/microsoft.md @@ -0,0 +1,63 @@ +# Microsoft Entra ID authentication (OIDC) + +This guide shows how to configure Coder to authenticate users with Microsoft Entra ID using OpenID Connect (OIDC) + +## Prerequisites + +- A Microsoft Azure Entra ID Tenant +- Permission to create Applications in your Azure environment + +## Step 1: Create an OAuth App Registration in Microsoft Azure + +1. Open Microsoft Azure Portal (https://portal.azure.com) → Microsoft Entra ID → App Registrations → New Registration +2. Name: Name your application appropriately +3. Supported Account Types: Choose the appropriate radio button according to your needs. Most organizations will want to use the first one labeled "Accounts in this organizational directory only" +4. Click on "Register" +5. On the next screen, select: "Certificates and Secrets" +6. Click on "New Client Secret" and under description, enter an appropriate description. Then set an expiry and hit "Add" once it's created, copy the value and save it somewhere secure for the next step +7. Next, click on the tab labeled "Token Configuration", then click "Add optional claim" and select the "ID" radio button, and finally check "upn" and hit "add" at the bottom +8. Then, click on the button labeled "Add groups claim" and check "Security groups" and click "Save" at the bottom +9. Now, click on the tab labeled "Authentication" and click on "Add a platform", select "Web" and for the redirect URI enter your Coder callback URL, and then hit "Configure" at the bottom: + - `https://coder.example.com/api/v2/users/oidc/callback` + +## Step 2: Configure Coder OIDC for Microsoft Entra ID + +Set the following environment variables on your Coder deployment and restart Coder: + +```env +CODER_OIDC_ISSUER_URL=https://login.microsoftonline.com/{tenant-id}/v2.0 # Replace {tenant-id} with your Azure tenant ID +CODER_OIDC_CLIENT_ID= +CODER_OIDC_CLIENT_SECRET= +# Restrict to one or more email domains (comma-separated) +CODER_OIDC_EMAIL_DOMAIN="example.com" +CODER_OIDC_EMAIL_FIELD="upn" # This is set because EntraID typically uses .onmicrosoft.com domains by default, this should pull the user's username@domain email. +CODER_OIDC_GROUP_FIELD="groups" # This is for group sync / IdP Sync, a premium feature. +# Optional: customize the login button +CODER_OIDC_SIGN_IN_TEXT="Sign in with Microsoft Entra ID" +CODER_OIDC_ICON_URL=/icon/microsoft.svg +``` + +> [!NOTE] +> The redirect URI must exactly match what you configured in Microsoft Azure Entra ID + +## Enable refresh tokens (recommended) + +```env +# Keep standard scopes +CODER_OIDC_SCOPES=openid,profile,email,offline_access +``` + +After changing settings, users must log out and back in once to obtain refresh tokens + +Learn more in [Configure OIDC refresh tokens](./refresh-tokens.md). + +## Troubleshooting + +- "invalid redirect_uri": ensure the redirect URI in Azure Entra ID matches `https:///api/v2/users/oidc/callback` +- Domain restriction: if users from unexpected domains can log in, verify `CODER_OIDC_EMAIL_DOMAIN` +- Claims: to inspect claims returned by Microsoft, see guidance in the [OIDC overview](./index.md#oidc-claims) + +## See also + +- [OIDC overview](./index.md) +- [Configure OIDC refresh tokens](./refresh-tokens.md) diff --git a/docs/admin/users/sessions-tokens.md b/docs/admin/users/sessions-tokens.md index 8152c92290877..901f4ae038cd3 100644 --- a/docs/admin/users/sessions-tokens.md +++ b/docs/admin/users/sessions-tokens.md @@ -80,3 +80,54 @@ You can use the [`CODER_MAX_TOKEN_LIFETIME`](https://coder.com/docs/reference/cli/server#--max-token-lifetime) server flag to set the maximum duration for long-lived tokens in your deployment. + +## API Key Scopes + +API key scopes allow you to limit the permissions of a token to specific operations. By default, tokens are created with the `all` scope, granting full access to all actions the user can perform. For improved security, you can create tokens with limited scopes that restrict access to only the operations needed. + +Scopes follow the format `resource:action`, where `resource` is the type of object (like `workspace`, `template`, or `user`) and `action` is the operation (like `read`, `create`, `update`, or `delete`). You can also use wildcards like `workspace:*` to grant all permissions for a specific resource type. + +### Creating tokens with scopes + +You can specify scopes when creating a token using the `--scope` flag: + +```sh +# Create a token that can only read workspaces +coder tokens create --name "readonly-token" --scope "workspace:read" + +# Create a token with multiple scopes +coder tokens create --name "limited-token" --scope "workspace:read" --scope "template:read" +``` + +Common scope examples include: + +- `workspace:read` - View workspace information +- `workspace:*` - Full workspace access (create, read, update, delete) +- `template:read` - View template information +- `api_key:read` - View API keys (useful for automation) +- `application_connect` - Connect to workspace applications + +For a complete list of available scopes, see the API reference documentation. + +### Allow lists (advanced) + +For additional security, you can combine scopes with allow lists to restrict tokens to specific resources. Allow lists let you limit a token to only interact with particular workspaces, templates, or other resources by their UUID: + +```sh +# Create a token limited to a specific workspace +coder tokens create --name "workspace-token" \ + --scope "workspace:read" \ + --allow "workspace:a1b2c3d4-5678-90ab-cdef-1234567890ab" +``` + +**Important:** Allow lists are exclusive - the token can **only** perform actions on resources explicitly listed. In the example above, the token can only read the specified workspace and cannot access any other resources (templates, organizations, other workspaces, etc.). To maintain access to other resources, you must explicitly add them to the allow list: + +```sh +# Token that can read one workspace AND access templates and user info +coder tokens create --name "limited-token" \ + --scope "workspace:read" --scope "template:*" --scope "user:read" \ + --allow "workspace:a1b2c3d4-5678-90ab-cdef-1234567890ab" \ + --allow "template:*" \ + --allow "user:*" \ + ... etc +``` diff --git a/docs/ai-coder/agent-boundary.md b/docs/ai-coder/agent-boundary.md index 36e36a08b6d2f..5944cfbf2b1f3 100644 --- a/docs/ai-coder/agent-boundary.md +++ b/docs/ai-coder/agent-boundary.md @@ -16,32 +16,86 @@ Agent Boundaries offer network policy enforcement, which blocks domains and HTTP The easiest way to use Agent Boundaries is through existing Coder modules, such as the [Claude Code module](https://registry.coder.com/modules/coder/claude-code). It can also be ran directly in the terminal by installing the [CLI](https://github.com/coder/boundary). -Below is an example of how to configure Agent Boundaries for usage in your workspace. +There are two supported ways to configure Boundary today: + +1. **Inline module configuration** – fastest for quick testing. +2. **External `config.yaml`** – best when you need a large allow list or want everyone who launches Boundary manually to share the same config. + +### Option 1: Inline module configuration (quick start) + +Put every setting directly in the Terraform module when you just want to experiment: ```tf module "claude-code" { source = "dev.registry.coder.com/coder/claude-code/coder" + version = "4.1.0" enable_boundary = true - boundary_version = "main" - boundary_log_dir = "/tmp/boundary_logs" + boundary_version = "v0.2.0" + boundary_log_dir = "/tmp/boundary_logs" boundary_log_level = "WARN" - boundary_additional_allowed_urls = ["GET *google.com"] + boundary_additional_allowed_urls = ["domain=google.com"] boundary_proxy_port = "8087" - version = "3.2.1" } ``` -- `boundary_version` defines what version of Boundary is being applied. This is set to `main`, which points to the main branch of `coder/boundary`. +All Boundary knobs live in Terraform, so you can iterate quickly without creating extra files. + +### Option 2: Keep policy in `config.yaml` (extensive allow lists) + +When you need to maintain a long allow list or share a detailed policy with teammates, keep Terraform minimal and move the rest into `config.yaml`: + +```tf +module "claude-code" { + source = "dev.registry.coder.com/coder/claude-code/coder" + version = "4.1.0" + enable_boundary = true + boundary_version = "v0.2.0" +} +``` + +Then create a `config.yaml` file in your template directory with your policy: + +```yaml +allowlist: + - "domain=google.com" + - "method=GET,HEAD domain=api.github.com" + - "method=POST domain=api.example.com path=/users,/posts" +log_dir: /tmp/boundary_logs +proxy_port: 8087 +log_level: warn +``` + +Add a `coder_script` resource to mount the configuration file into the workspace filesystem: + +```tf +resource "coder_script" "boundary_config_setup" { + agent_id = coder_agent.dev.id + display_name = "Boundary Setup Configuration" + run_on_start = true + + script = <<-EOF + #!/bin/sh + mkdir -p ~/.config/coder_boundary + echo '${base64encode(file("${path.module}/config.yaml"))}' | base64 -d > ~/.config/coder_boundary/config.yaml + chmod 600 ~/.config/coder_boundary/config.yaml + EOF +} +``` + +Boundary automatically reads `config.yaml` from `~/.config/coder_boundary/` when it starts, so everyone who launches Boundary manually inside the workspace picks up the same configuration without extra flags. This is especially convenient for managing extensive allow lists in version control. + +- `boundary_version` defines what version of Boundary is being applied. This is set to `v0.2.0`, which points to the v0.2.0 release tag of `coder/boundary`. - `boundary_log_dir` is the directory where log files are written to when the workspace spins up. - `boundary_log_level` defines the verbosity at which requests are logged. Boundary uses the following verbosity levels: - `WARN`: logs only requests that have been blocked by Boundary - `INFO`: logs all requests at a high level - `DEBUG`: logs all requests in detail -- `boundary_additional_allowed_urls`: defines the URLs that the agent can access, in additional to the default URLs required for the agent to work - - `github.com` means only the specific domain is allowed - - `*.github.com` means only the subdomains are allowed - the specific domain is excluded - - `*github.com` means both the specific domain and all subdomains are allowed - - You can also also filter on methods, hostnames, and paths - for example, `GET,HEAD *github.com/coder`. +- `boundary_additional_allowed_urls`: defines the URLs that the agent can access, in addition to the default URLs required for the agent to work. Rules use the format `"key=value [key=value ...]"`: + - `domain=github.com` - allows the domain and all its subdomains + - `domain=*.github.com` - allows only subdomains (the specific domain is excluded) + - `method=GET,HEAD domain=api.github.com` - allows specific HTTP methods for a domain + - `method=POST domain=api.example.com path=/users,/posts` - allows specific methods, domain, and paths + - `path=/api/v1/*,/api/v2/*` - allows specific URL paths You can also run Agent Boundaries directly in your workspace and configure it per template. You can do so by installing the [binary](https://github.com/coder/boundary) into the workspace image or at start-up. You can do so with the following command: diff --git a/docs/ai-coder/ai-bridge.md b/docs/ai-coder/ai-bridge.md deleted file mode 100644 index c7cfbe7d85ea2..0000000000000 --- a/docs/ai-coder/ai-bridge.md +++ /dev/null @@ -1,306 +0,0 @@ -# AI Bridge - -> [!NOTE] -> AI Bridge is currently an _experimental_ feature. - -![AI bridge diagram](../images/aibridge/aibridge_diagram.png) - -Bridge is a smart proxy for AI. It acts as a man-in-the-middle between your users' coding agents / IDEs -and providers like OpenAI and Anthropic. By intercepting all the AI traffic between these clients and -the upstream APIs, Bridge can record user prompts, token usage, and tool invocations. - -Bridge solves 3 key problems: - -1. **Centralized authn/z management**: no more issuing & managing API tokens for OpenAI/Anthropic usage. - Users use their Coder session or API tokens to authenticate with `coderd` (Coder control plane), and - `coderd` securely communicates with the upstream APIs on their behalf. Use a single key for all users. -2. **Auditing and attribution**: all interactions with AI services, whether autonomous or human-initiated, - will be audited and attributed back to a user. -3. **Centralized MCP administration**: define a set of approved MCP servers and tools which your users may - use, and prevent users from using their own. - -## When to use AI Bridge - -As the library of LLMs and their associated tools grow, administrators are pressured to provide auditing, measure adoption, provide tools through MCP, and track token spend. Disparate SAAS platforms provide _some_ of these for _some_ tools, but there is no centralized, secure solution for these challenges. - -If you are an administrator or devops leader looking to: - -- Measure AI tooling adoption across teams or projects -- Provide an LLM audit trail to security administrators -- Manage token spend in a central dashboard -- Investigate opportunities for AI automation -- Uncover the high-leverage use cases from experienced engineers - -We advise trying Bridge as self-hosted proxy to monitor LLM usage agnostically across AI powered IDEs like Cursor and headless agents like Claude Code. - -## Setup - -Bridge runs inside the Coder control plane, requiring no separate compute to deploy or scale. Once enabled, `coderd` hosts the bridge in-memory and brokers traffic to your configured AI providers on behalf of authenticated users. - -**Required**: - -1. A **premium** licensed Coder deployment -1. Feature must be [enabled](#activation) using the server flag -1. One or more [provider](#providers) API keys must be configured - -### Activation - -To enable this feature, activate the `aibridge` experiment using an environment variable or a CLI flag. -Additionally, you will need to enable Bridge explicitly: - -```sh -CODER_EXPERIMENTS="aibridge" CODER_AIBRIDGE_ENABLED=true coder server -# or -coder server --experiments=aibridge --aibridge-enabled=true -``` - -_If you have other experiments enabled, separate them by commas._ - -### Providers - -Bridge currently supports OpenAI and Anthropic APIs. - -**API Key**: - -The single key used to authenticate all requests from Bridge to OpenAI/Anthropic APIs. - -- `CODER_AIBRIDGE_OPENAI_KEY` or `--aibridge-openai-key` -- `CODER_AIBRIDGE_ANTHROPIC_KEY` or `--aibridge-anthropic-key` - -**Base URL**: - -The API to which Bridge will relay requests. - -- `CODER_AIBRIDGE_OPENAI_BASE_URL` or `--aibridge-openai-base-url`, defaults to `https://api.openai.com/v1/` -- `CODER_AIBRIDGE_ANTHROPIC_BASE_URL` or `--aibridge-anthropic-base-url`, defaults to `https://api.anthropic.com/` - -Bridge is compatible with _[Google Vertex AI](https://cloud.google.com/vertex-ai?hl=en)_, _[AWS Bedrock](https://aws.amazon.com/bedrock/)_, and other LLM brokers. You may specify the base URL(s) above to the appropriate API endpoint for your provider. - ---- - -> [!NOTE] -> See [Supported APIs](#supported-apis) section below for a comprehensive list. - -## Client Configuration - -Once AI Bridge is enabled on the server, your users need to configure their AI coding tools to use it. This section explains how users should configure their clients to connect to AI Bridge. - -### Setting Base URLs - -The exact configuration method varies by client — some use environment variables, others use configuration files or UI settings: - -- **OpenAI-compatible clients**: Set the base URL (commonly via the `OPENAI_BASE_URL` environment variable) to `https://coder.example.com/api/experimental/aibridge/openai/v1` -- **Anthropic-compatible clients**: Set the base URL (commonly via the `ANTHROPIC_BASE_URL` environment variable) to `https://coder.example.com/api/experimental/aibridge/anthropic` - -Replace `coder.example.com` with your actual Coder deployment URL. - -### Authentication - -Instead of distributing provider-specific API keys (OpenAI/Anthropic keys) to users, they authenticate to AI Bridge using their **Coder session token** or **API key**: - -- **OpenAI clients**: Users set `OPENAI_API_KEY` to their Coder session token or API key -- **Anthropic clients**: Users set `ANTHROPIC_API_KEY` to their Coder session token or API key - -Users can generate a Coder API key using: - -```sh -coder tokens create -``` - -Template admins can pre-configure authentication in templates using [`data.coder_workspace_owner.me.session_token`](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/workspace_owner#session_token-1) to automatically configure the workspace owner's credentials. - -#### Compatibility Notes - -Most AI coding assistants that support custom base URLs can work with AI Bridge. However, client-specific configuration requirements vary: - -- Some clients require specific URL formats (e.g. try removing the `/v1` suffix) -- Some clients may proxy requests through their own servers, limiting compatibility (e.g. Cursor) -- Some clients may not support custom base URLs at all (e.g. Copilot CLI, Sourcegraph Amp) - -Consult your specific AI client's documentation for details on configuring custom API endpoints. - -## Collected Data - -Bridge collects: - -- The last `user` prompt of each request -- All token usage (associated with each prompt) -- Every tool invocation - -All of these records are associated to an "interception" record, which maps 1:1 with requests received from clients but may involve several interactions with upstream providers. Interceptions are associated with a Coder identity, allowing you to map consumption and cost with teams or individuals in your organization: - -![User Prompt logging](../images/aibridge/grafana_user_prompts_logging.png) - -These logs can be used to determine usage patterns, track costs, and evaluate tooling adoption. - -This data is currently accessible through the API and CLI (experimental), which we advise administrators export to their observability platform of choice. We've configured a Grafana dashboard to display Claude Code usage internally which can be imported as a starting point for your tooling adoption metrics. - -![User Leaderboard](../images/aibridge/grafana_user_leaderboard.png) - -We provide an example Grafana dashboard that you can import as a starting point for your tooling adoption metrics. See [here](https://github.com/coder/coder/blob/main/examples/monitoring/dashboards/grafana/aibridge/README.md). - -## Implementation Details - -`coderd` runs an in-memory instance of `aibridged`, whose logic is mostly contained in https://github.com/coder/aibridge. In future releases we will support running external instances for higher throughput and complete memory isolation from `coderd`. - -
-See a diagram of how Bridge interception works - -```mermaid - -sequenceDiagram - actor User - participant Client - participant Bridge - - User->>Client: Issues prompt - activate Client - - Note over User, Client: Coder session key used
as AI token - Client-->>Bridge: Sends request - - activate Bridge - Note over Client, Bridge: Coder session key
passed along - - Note over Bridge: Authenticate - Note over Bridge: Parse request - - alt Rejected - Bridge-->>Client: Send response - Client->>User: Display response - end - - Note over Bridge: If first request, establish
connection(s) with MCP server(s)
and list tools - - Note over Bridge: Inject MCP tools - - Bridge-->>AIProvider: Send modified request - - activate AIProvider - - AIProvider-->>Bridge: Send response - - Note over Client: Client is unaware of injected
tools and invocations,
just receives one long response - - alt Has injected tool calls - loop - Note over Bridge: Invoke injected tool - Bridge-->>AIProvider: Send tool result - AIProvider-->>Bridge: Send response - end - end - - deactivate AIProvider - - Bridge-->>Client: Relay response - deactivate Bridge - - Client->>User: Display response - deactivate Client -``` - -
- -## MCP - -[Model Context Protocol (MCP)](https://modelcontextprotocol.io/docs/getting-started/intro) is a mechanism for connecting AI applications to external systems. - -Bridge can connect to MCP servers and inject tools automatically, enabling you to centrally manage the list of tools you wish to grant your users. - -> [!NOTE] -> Only MCP servers which support OAuth2 Authorization are supported currently. In future releases we will support [optional authorization](https://modelcontextprotocol.io/specification/2025-06-18/basic/authorization#protocol-requirements). -> -> [_Streamable HTTP_](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http) is the only supported transport currently. In future releases we will support the (now deprecated) [_Server-Sent Events_](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#backwards-compatibility) transport. - -Bridge makes use of [External Auth](../admin/external-auth/index.md) applications, as they define OAuth2 connections to upstream services. If your External Auth application hosts a remote MCP server, you can configure Bridge to connect to it, retrieve its tools and inject them into requests automatically - all while using each individual user's access token. - -For example, GitHub has a [remote MCP server](https://github.com/github/github-mcp-server?tab=readme-ov-file#remote-github-mcp-server) and we can use it as follows. - -```bash -CODER_EXTERNAL_AUTH_0_TYPE=github -CODER_EXTERNAL_AUTH_0_CLIENT_ID=... -CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=... -# Tell Bridge where it can find this service's remote MCP server. -CODER_EXTERNAL_AUTH_0_MCP_URL=https://api.githubcopilot.com/mcp/ -``` - -See the diagram in [Implementation Details](#implementation-details) for more information. - -You can also control which tools are injected by using an allow and/or a deny regular expression on the tool names: - -```bash -CODER_EXTERNAL_AUTH_0_MCP_TOOL_ALLOW_REGEX=(.+_gist.*) -CODER_EXTERNAL_AUTH_0_MCP_TOOL_DENY_REGEX=(create_gist) -``` - -In the above example, all tools containing `_gist` in their name will be allowed, but `create_gist` is denied. - -The logic works as follows: - -- If neither the allow/deny patterns are defined, all tools will be injected. -- The deny pattern takes precedence. -- If only a deny pattern is defined, all tools are injected except those explicitly denied. - -In the above example, if you prompted your AI model with "list your available github tools by name", it would reply something like: - -> Certainly! Here are the GitHub-related tools that I have available: -> -> 1. `bmcp_github_update_gist` -> 2. `bmcp_github_list_gists` - -Bridge marks automatically injected tools with a prefix `bmcp_` ("bridged MCP"). It also namespaces all tool names by the ID of their associated External Auth application (in this case `github`). - -## Tool Injection - -If a model decides to invoke a tool and it has a `bmcp_` suffix and Bridge has a connection with the related MCP server, it will invoke the tool. The tool result will be passed back to the upstream AI provider, and this will loop until the model has all of its required data. These inner loops are not relayed back to the client; all it seems is the result of this loop. See [Implementation Details](#implementation-details). - -In contrast, tools which are defined by the client (i.e. the [`Bash` tool](https://docs.claude.com/en/docs/claude-code/settings#tools-available-to-claude) defined by _Claude Code_) cannot be invoked by Bridge, and the tool call from the model will be relayed to the client, after which it will invoke the tool. - -If you have the `oauth2` and `mcp-server-http` experiments enabled, Coder's own [internal MCP tools](mcp-server.md) will be injected automatically. - -### Troubleshooting - -- **Too many tools**: should you receive an error like `Invalid 'tools': array too long. Expected an array with maximum length 128, but got an array with length 132 instead`, you can reduce the number by filtering out tools using the allow/deny patterns documented in the [MCP](#mcp) section. - -- **Coder MCP tools not being injected**: in order for Coder MCP tools to be injected, the internal MCP server needs to be active. Follow the instructions in the [MCP Server](mcp-server.md) page to enable it. - -- **External Auth tools not being injected**: this is generally due to the requesting user not being authenticated against the External Auth app; when this is the case, no attempt is made to connect to the MCP server. - -## Known Issues / Limitations - -- Codex CLI currently does not work with Bridge due to a JSON marshaling issue: https://github.com/coder/aibridge/issues/19 -- Claude Code web searches do not report correctly: https://github.com/coder/aibridge/issues/11 - -## Supported APIs - -API support is broken down into two categories: - -- **Intercepted**: requests are intercepted, audited, and augmented - full Bridge functionality -- **Passthrough**: requests are proxied directly to the upstream, no auditing or augmentation takes place - -Where relevant, both streaming and non-streaming requests are supported. - -### OpenAI - -**Intercepted**: - -- [`/v1/chat/completions`](https://platform.openai.com/docs/api-reference/chat/create) - -**Passthrough**: - -- [`/v1/models(/*)`](https://platform.openai.com/docs/api-reference/models/list) -- [`/v1/responses`](https://platform.openai.com/docs/api-reference/responses/create) _(Interception support coming in **Beta**)_ - -### Anthropic - -**Intercepted**: - -- [`/v1/messages`](https://docs.claude.com/en/api/messages) - -**Passthrough**: - -- [`/v1/models(/*)`](https://docs.claude.com/en/api/models-list) - -## Troubleshooting - -To report a bug, file a feature request, or view a list of known issues, please visit our [GitHub repository for Bridge](https://github.com/coder/aibridge). If you encounter issues with Bridge during early access, please reach out to us via [Discord](https://discord.gg/coder). diff --git a/docs/ai-coder/ai-bridge/client-config.md b/docs/ai-coder/ai-bridge/client-config.md new file mode 100644 index 0000000000000..7f63ad22973f4 --- /dev/null +++ b/docs/ai-coder/ai-bridge/client-config.md @@ -0,0 +1,125 @@ +# Client Configuration + +Once AI Bridge is setup on your deployment, the AI coding tools used by your users will need to be configured to route requests via AI Bridge. + +## Base URLs + +Most AI coding tools allow the "base URL" to be customized. In other words, when a request is made to OpenAI's API from your coding tool, the API endpoint such as [/v1/chat/completions](https://platform.openai.com/docs/api-reference/chat) will be appended to the configured base. Therefore, instead of the default base URL of "https://api.openai.com/v1", you'll need to set it to "https://coder.example.com/api/v2/aibridge/openai/v1". + +The exact configuration method varies by client — some use environment variables, others use configuration files or UI settings: + +- **OpenAI-compatible clients**: Set the base URL (commonly via the `OPENAI_BASE_URL` environment variable) to `https://coder.example.com/api/v2/aibridge/openai/v1` +- **Anthropic-compatible clients**: Set the base URL (commonly via the `ANTHROPIC_BASE_URL` environment variable) to `https://coder.example.com/api/v2/aibridge/anthropic` + +Replace `coder.example.com` with your actual Coder deployment URL. + +## Authentication + +Instead of distributing provider-specific API keys (OpenAI/Anthropic keys) to users, they authenticate to AI Bridge using their **Coder session token** or **API key**: + +- **OpenAI clients**: Users set `OPENAI_API_KEY` to their Coder session token or API key +- **Anthropic clients**: Users set `ANTHROPIC_API_KEY` to their Coder session token or API key + +Again, the exact environment variable or setting naming may differ from tool to tool; consult your tool's documentation. + +## Configuring In-Workspace Tools + +AI coding tools running inside a Coder workspace, such as IDE extensions, can be configured to use AI Bridge. + +While users can manually configure these tools with a long-lived API key, template admins can provide a more seamless experience by pre-configuring them. Admins can automatically inject the user's session token with `data.coder_workspace_owner.me.session_token` and the AI Bridge base URL into the workspace environment. + +In this example, Claude code respects these environment variables and will route all requests via AI Bridge. + +This is the fastest way to bring existing agents like Roo Code, Cursor, or Claude Code into compliance without adopting Coder Tasks. + +```hcl +data "coder_workspace_owner" "me" {} + +data "coder_workspace" "me" {} + +resource "coder_agent" "dev" { + arch = "amd64" + os = "linux" + dir = local.repo_dir + env = { + ANTHROPIC_BASE_URL : "${data.coder_workspace.me.access_url}/api/v2/aibridge/anthropic", + ANTHROPIC_AUTH_TOKEN : data.coder_workspace_owner.me.session_token + } + ... # other agent configuration +} +``` + +### Using Coder Tasks + +Agents like Claude Code can be configured to route through AI Bridge in any template by pre-configuring the agent with the session token. [Coder Tasks](../tasks.md) is particularly useful for this pattern, providing a framework for agents to complete background development operations autonomously. To route agents through AI Bridge in a Coder Tasks template, pre-configure it to install Claude Code and configure it with the session token: + +```hcl +data "coder_workspace_owner" "me" {} + +data "coder_workspace" "me" {} + +resource "coder_agent" "dev" { + arch = "amd64" + os = "linux" + dir = local.repo_dir + env = { + ANTHROPIC_BASE_URL : "${data.coder_workspace.me.access_url}/api/v2/aibridge/anthropic", + ANTHROPIC_AUTH_TOKEN : data.coder_workspace_owner.me.session_token + } + ... # other agent configuration +} + +# See https://registry.coder.com/modules/coder/claude-code for more information +module "claude-code" { + count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + source = "dev.registry.coder.com/coder/claude-code/coder" + version = ">= 3.4.0" + agent_id = coder_agent.dev.id + workdir = "/home/coder/project" + claude_api_key = data.coder_workspace_owner.me.session_token # Use the Coder session token to authenticate with AI Bridge + ai_prompt = data.coder_parameter.ai_prompt.value + ... # other claude-code configuration +} +``` + +## External and Desktop Clients + +You can also configure AI tools running outside of a Coder workspace, such as local IDE extensions or desktop applications, to connect to AI Bridge. + +The configuration is the same: point the tool to the AI Bridge [base URL](#base-urls) and use a Coder API key for authentication. + +Users can generate a long-lived API key from the Coder UI or CLI. Follow the instructions at [Sessions and API tokens](../../admin/users/sessions-tokens.md#generate-a-long-lived-api-token-on-behalf-of-yourself) to create one. + +## Compatibility + +The table below shows tested AI clients and their compatibility with AI Bridge. Click each client name for vendor-specific configuration instructions. Report issues or share compatibility updates in the [aibridge](https://github.com/coder/aibridge) issue tracker. + +| Client | OpenAI support | Anthropic support | Notes | +|-------------------------------------------------------------------------------------------------------------------------------------|----------------|-------------------|-----------------------------------------------------------------------------------------------------------------------------------------------| +| [Claude Code](https://docs.claude.com/en/docs/claude-code/settings#environment-variables) | - | ✅ | Works out of the box and can be preconfigured in templates. | +| Claude Code (VS Code) | - | ✅ | May require signing in once; afterwards respects workspace environment variables. | +| Cursor | ❌ | ❌ | Support dropped for `v1/chat/completions` endpoints; `v1/responses` support is in progress [#16](https://github.com/coder/aibridge/issues/16) | +| [Roo Code](https://docs.roocode.com/features/api-configuration-profiles#creating-and-managing-profiles) | ✅ | ✅ | Use the **OpenAI Compatible** provider with the legacy format to avoid `/v1/responses`. | +| [Codex CLI](https://github.com/openai/codex/blob/main/docs/config.md#model_providers) | ✅ | N/A | `gpt-5-codex` support is [in progress](https://github.com/coder/aibridge/issues/16). | +| [GitHub Copilot (VS Code)](https://code.visualstudio.com/docs/copilot/customization/language-models#_add-an-openaicompatible-model) | ✅ | ❌ | Requires the pre-release extension. Anthropic endpoints are not supported. | +| [Goose](https://block.github.io/goose/docs/getting-started/providers/#available-providers) | ❓ | ❓ | | +| [Goose Desktop](https://block.github.io/goose/docs/getting-started/providers/#available-providers) | ❓ | ✅ | | +| WindSurf | ❌ | ❌ | No option to override the base URL. | +| Sourcegraph Amp | ❌ | ❌ | No option to override the base URL. | +| Kiro | ❌ | ❌ | No option to override the base URL. | +| [Copilot CLI](https://github.com/github/copilot-cli/issues/104) | ❌ | ❌ | No support for custom base URLs and uses a `GITHUB_TOKEN` for authentication. | +| [Kilo Code](https://kilocode.ai/docs/features/api-configuration-profiles#creating-and-managing-profiles) | ✅ | ✅ | Similar to Roo Code. | +| Gemini CLI | ❌ | ❌ | Not supported yet. | +| [Amazon Q CLI](https://aws.amazon.com/q/) | ❌ | ❌ | Limited to Amazon Q subscriptions; no custom endpoint support. | + +Legend: ✅ works, ⚠️ limited support, ❌ not supported, ❓ not yet verified, — not applicable. + +### Compatibility Overview + +Most AI coding assistants can use AI Bridge, provided they support custom base URLs. Client-specific requirements vary: + +- Some clients require specific URL formats (for example, removing the `/v1` suffix). +- Some clients proxy requests through their own servers, which limits compatibility. +- Some clients do not support custom base URLs. + +See the table in the [compatibility](#compatibility) section above for the combinations we have verified and any known issues. diff --git a/docs/ai-coder/ai-bridge/index.md b/docs/ai-coder/ai-bridge/index.md new file mode 100644 index 0000000000000..5c760df1d16e7 --- /dev/null +++ b/docs/ai-coder/ai-bridge/index.md @@ -0,0 +1,39 @@ +# AI Bridge + +![AI bridge diagram](../../images/aibridge/aibridge_diagram.png) + +AI Bridge is a smart gateway for AI. It acts as an intermediary between your users' coding agents / IDEs +and providers like OpenAI and Anthropic. By intercepting all the AI traffic between these clients and +the upstream APIs, AI Bridge can record user prompts, token usage, and tool invocations. + +AI Bridge solves 3 key problems: + +1. **Centralized authn/z management**: no more issuing & managing API tokens for OpenAI/Anthropic usage. + Users use their Coder session or API tokens to authenticate with `coderd` (Coder control plane), and + `coderd` securely communicates with the upstream APIs on their behalf. +1. **Auditing and attribution**: all interactions with AI services, whether autonomous or human-initiated, + will be audited and attributed back to a user. +1. **Centralized MCP administration**: define a set of approved MCP servers and tools which your users may + use. + +## When to use AI Bridge + +As LLM adoption grows, administrators need centralized auditing, monitoring, and token management. AI Bridge enables organizations to manage AI tooling access for thousands of engineers from a single control plane. + +If you are an administrator or devops leader looking to: + +- Measure AI tooling adoption across teams or projects +- Establish an audit trail of prompts, issues, and tools invoked +- Manage token spend in a central dashboard +- Investigate opportunities for AI automation +- Uncover high-leverage use cases last + +AI Bridge is best suited for organizations facing these centralized management and observability challenges. + +## Next steps + +- [Set up AI Bridge](./setup.md) on your Coder deployment +- [Configure AI clients](./client-config.md) to use AI Bridge +- [Configure MCP servers](./mcp.md) for tool access +- [Monitor usage and metrics](./monitoring.md) +- [Reference documentation](./reference.md) diff --git a/docs/ai-coder/ai-bridge/mcp.md b/docs/ai-coder/ai-bridge/mcp.md new file mode 100644 index 0000000000000..ef173f8b3ec46 --- /dev/null +++ b/docs/ai-coder/ai-bridge/mcp.md @@ -0,0 +1,66 @@ +# MCP + +[Model Context Protocol (MCP)](https://modelcontextprotocol.io/docs/getting-started/intro) is a mechanism for connecting AI applications to external systems. + +AI Bridge can connect to MCP servers and inject tools automatically, enabling you to centrally manage the list of tools you wish to grant your users. + +> [!NOTE] +> Only MCP servers which support OAuth2 Authorization are supported currently. +> +> [_Streamable HTTP_](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#streamable-http) is the only supported transport currently. In future releases we will support the (now deprecated) [_Server-Sent Events_](https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#backwards-compatibility) transport. + +AI Bridge makes use of [External Auth](../../admin/external-auth/index.md) applications, as they define OAuth2 connections to upstream services. If your External Auth application hosts a remote MCP server, you can configure AI Bridge to connect to it, retrieve its tools and inject them into requests automatically - all while using each individual user's access token. + +For example, GitHub has a [remote MCP server](https://github.com/github/github-mcp-server?tab=readme-ov-file#remote-github-mcp-server) and we can use it as follows. + +```bash +CODER_EXTERNAL_AUTH_0_TYPE=github +CODER_EXTERNAL_AUTH_0_CLIENT_ID=... +CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=... +# Tell AI Bridge where it can find this service's remote MCP server. +CODER_EXTERNAL_AUTH_0_MCP_URL=https://api.githubcopilot.com/mcp/ +``` + +See the diagram in [Implementation Details](./reference.md#implementation-details) for more information. + +You can also control which tools are injected by using an allow and/or a deny regular expression on the tool names: + +```env +CODER_EXTERNAL_AUTH_0_MCP_TOOL_ALLOW_REGEX=(.+_gist.*) +CODER_EXTERNAL_AUTH_0_MCP_TOOL_DENY_REGEX=(create_gist) +``` + +In the above example, all tools containing `_gist` in their name will be allowed, but `create_gist` is denied. + +The logic works as follows: + +- If neither the allow/deny patterns are defined, all tools will be injected. +- The deny pattern takes precedence. +- If only a deny pattern is defined, all tools are injected except those explicitly denied. + +In the above example, if you prompted your AI model with "list your available github tools by name", it would reply something like: + +> Certainly! Here are the GitHub-related tools that I have available: +> +> ```text +> 1. bmcp_github_update_gist +> 2. bmcp_github_list_gists +> ``` + +AI Bridge marks automatically injected tools with a prefix `bmcp_` ("bridged MCP"). It also namespaces all tool names by the ID of their associated External Auth application (in this case `github`). + +## Tool Injection + +If a model decides to invoke a tool and it has a `bmcp_` suffix and AI Bridge has a connection with the related MCP server, it will invoke the tool. The tool result will be passed back to the upstream AI provider, and this will loop until the model has all of its required data. These inner loops are not relayed back to the client; all it seems is the result of this loop. See [Implementation Details](./reference.md#implementation-details). + +In contrast, tools which are defined by the client (i.e. the [`Bash` tool](https://docs.claude.com/en/docs/claude-code/settings#tools-available-to-claude) defined by _Claude Code_) cannot be invoked by AI Bridge, and the tool call from the model will be relayed to the client, after which it will invoke the tool. + +If you have [Coder MCP Server](../mcp-server.md) enabled, as well as have [`CODER_AIBRIDGE_INJECT_CODER_MCP_TOOLS=true`](../../reference/cli/server#--aibridge-inject-coder-mcp-tools) set, Coder's MCP tools will be injected into intercepted requests. + +### Troubleshooting + +- **Too many tools**: should you receive an error like `Invalid 'tools': array too long. Expected an array with maximum length 128, but got an array with length 132 instead`, you can reduce the number by filtering out tools using the allow/deny patterns documented in the [MCP](#mcp) section. + +- **Coder MCP tools not being injected**: in order for Coder MCP tools to be injected, the internal MCP server needs to be active. Follow the instructions in the [MCP Server](../mcp-server.md) page to enable it and ensure `CODER_AIBRIDGE_INJECT_CODER_MCP_TOOLS` is set to `true`. + +- **External Auth tools not being injected**: this is generally due to the requesting user not being authenticated against the [External Auth](../../admin/external-auth/index.md) app; when this is the case, no attempt is made to connect to the MCP server. diff --git a/docs/ai-coder/ai-bridge/monitoring.md b/docs/ai-coder/ai-bridge/monitoring.md new file mode 100644 index 0000000000000..6fd04e09b6a39 --- /dev/null +++ b/docs/ai-coder/ai-bridge/monitoring.md @@ -0,0 +1,11 @@ +# Monitoring + +AI Bridge records the last `user` prompt, token usage, and every tool invocation for each intercepted request. Each capture is tied to a single "interception" that maps back to the authenticated Coder identity, making it easy to attribute spend and behaviour. + +![User Prompt logging](../../images/aibridge/grafana_user_prompts_logging.png) + +![User Leaderboard](../../images/aibridge/grafana_user_leaderboard.png) + +We provide an example Grafana dashboard that you can import as a starting point for your metrics. See [the Grafana dashboard README](https://github.com/coder/coder/blob/main/examples/monitoring/dashboards/grafana/aibridge/README.md). + +These logs and metrics can be used to determine usage patterns, track costs, and evaluate tooling adoption. diff --git a/docs/ai-coder/ai-bridge/reference.md b/docs/ai-coder/ai-bridge/reference.md new file mode 100644 index 0000000000000..597f62fe616e0 --- /dev/null +++ b/docs/ai-coder/ai-bridge/reference.md @@ -0,0 +1,41 @@ +# Reference + +## Implementation Details + +`coderd` runs an in-memory instance of `aibridged`, whose logic is mostly contained in https://github.com/coder/aibridge. In future releases we will support running external instances for higher throughput and complete memory isolation from `coderd`. + +![AI Bridge implementation details](../../images/aibridge/aibridge-implementation-details.png) + +## Supported APIs + +API support is broken down into two categories: + +- **Intercepted**: requests are intercepted, audited, and augmented - full AI Bridge functionality +- **Passthrough**: requests are proxied directly to the upstream, no auditing or augmentation takes place + +Where relevant, both streaming and non-streaming requests are supported. + +### OpenAI + +#### Intercepted + +- [`/v1/chat/completions`](https://platform.openai.com/docs/api-reference/chat/create) + +#### Passthrough + +- [`/v1/models(/*)`](https://platform.openai.com/docs/api-reference/models/list) +- [`/v1/responses`](https://platform.openai.com/docs/api-reference/responses/create) _(Interception support coming in **Beta**)_ + +### Anthropic + +#### Intercepted + +- [`/v1/messages`](https://docs.claude.com/en/api/messages) + +#### Passthrough + +- [`/v1/models(/*)`](https://docs.claude.com/en/api/models-list) + +## Troubleshooting + +To report a bug, file a feature request, or view a list of known issues, please visit our [GitHub repository for AI Bridge](https://github.com/coder/aibridge). If you encounter issues with AI Bridge during early access, please reach out to us via [Discord](https://discord.gg/coder). diff --git a/docs/ai-coder/ai-bridge/setup.md b/docs/ai-coder/ai-bridge/setup.md new file mode 100644 index 0000000000000..b9ddbd56cdfb8 --- /dev/null +++ b/docs/ai-coder/ai-bridge/setup.md @@ -0,0 +1,96 @@ +# Setup + +AI Bridge runs inside the Coder control plane (`coderd`), requiring no separate compute to deploy or scale. Once enabled, `coderd` runs the `aibridged` in-memory and brokers traffic to your configured AI providers on behalf of authenticated users. + +**Required**: + +1. A **premium** licensed Coder deployment +1. Feature must be [enabled](#activation) using the server flag +1. One or more [providers](#configure-providers) API key(s) must be configured + +## Activation + +You will need to enable AI Bridge explicitly: + +```sh +CODER_AIBRIDGE_ENABLED=true coder server +# or +coder server --aibridge-enabled=true +``` + +## Configure Providers + +AI Bridge proxies requests to upstream LLM APIs. Configure at least one provider before exposing AI Bridge to end users. + +
+ +### OpenAI + +Set the following when routing [OpenAI-compatible](https://coder.com/docs/reference/cli/server#--aibridge-openai-key) traffic through AI Bridge: + +- `CODER_AIBRIDGE_OPENAI_KEY` or `--aibridge-openai-key` +- `CODER_AIBRIDGE_OPENAI_BASE_URL` or `--aibridge-openai-base-url` + +The default base URL (`https://api.openai.com/v1/`) works for the native OpenAI service. Point the base URL at your preferred OpenAI-compatible endpoint (for example, a hosted proxy or LiteLLM deployment) when needed. + +If you'd like to create an [OpenAI key](https://platform.openai.com/api-keys) with minimal privileges, this is the minimum required set: + +![List Models scope should be set to "Read", Model Capabilities set to "Request"](../../images/aibridge/openai_key_scope.png) + +### Anthropic + +Set the following when routing [Anthropic-compatible](https://coder.com/docs/reference/cli/server#--aibridge-anthropic-key) traffic through AI Bridge: + +- `CODER_AIBRIDGE_ANTHROPIC_KEY` or `--aibridge-anthropic-key` +- `CODER_AIBRIDGE_ANTHROPIC_BASE_URL` or `--aibridge-anthropic-base-url` + +The default base URL (`https://api.anthropic.com/`) targets Anthropic's public API. Override it for Anthropic-compatible brokers. + +Anthropic does not allow [API keys](https://console.anthropic.com/settings/keys) to have restricted permissions at the time of writing (Nov 2025). + +### Amazon Bedrock + +Set the following when routing [Amazon Bedrock](https://coder.com/docs/reference/cli/server#--aibridge-bedrock-region) traffic through AI Bridge: + +- `CODER_AIBRIDGE_BEDROCK_REGION` or `--aibridge-bedrock-region` +- `CODER_AIBRIDGE_BEDROCK_ACCESS_KEY` or `--aibridge-bedrock-access-key` +- `CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET` or `--aibridge-bedrock-access-key-secret` +- `CODER_AIBRIDGE_BEDROCK_MODEL` or `--aibridge-bedrock-model` +- `CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL` or `--aibridge-bedrock-small-fast-model` + +#### Obtaining Bedrock credentials + +1. **Choose a region** where you want to use Bedrock. + +2. **Generate API keys** in the [AWS Bedrock console](https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/api-keys/long-term/create) (replace `us-east-1` in the URL with your chosen region): + - Choose an expiry period for the key. + - Click **Generate**. + - This creates an IAM user with strictly-scoped permissions for Bedrock access. + +3. **Create an access key** for the IAM user: + - After generating the API key, click **"You can directly modify permissions for the IAM user associated"**. + - In the IAM user page, navigate to the **Security credentials** tab. + - Under **Access keys**, click **Create access key**. + - Select **"Application running outside AWS"** as the use case. + - Click **Next**. + - Add a description like "Coder AI Bridge token". + - Click **Create access key**. + - Save both the access key ID and secret access key securely. + +4. **Configure your Coder deployment** with the credentials: + + ```sh + export CODER_AIBRIDGE_BEDROCK_REGION=us-east-1 + export CODER_AIBRIDGE_BEDROCK_ACCESS_KEY= + export CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET= + coder server + ``` + +### Additional providers and Model Proxies + +AI Bridge can relay traffic to other OpenAI- or Anthropic-compatible services or model proxies like LiteLLM by pointing the base URL variables above at the provider you operate. Share feedback or follow along in the [`aibridge`](https://github.com/coder/aibridge) issue tracker as we expand support for additional providers. + +
+ +> [!NOTE] +> See the [Supported APIs](./reference.md#supported-apis) section below for precise endpoint coverage and interception behavior. diff --git a/docs/ai-coder/cli.md b/docs/ai-coder/cli.md index 6d337b458d6a7..2e56a76cf4882 100644 --- a/docs/ai-coder/cli.md +++ b/docs/ai-coder/cli.md @@ -1,230 +1,13 @@ # Tasks CLI -The Coder CLI provides experimental commands for managing tasks programmatically. These are available under `coder exp task`: +The Tasks CLI documentation has moved to the auto-generated CLI reference pages: -```console -USAGE: - coder exp task +- [task](../reference/cli/task.md) - Main tasks command +- [task create](../reference/cli/task_create.md) - Create a task +- [task delete](../reference/cli/task_delete.md) - Delete tasks +- [task list](../reference/cli/task_list.md) - List tasks +- [task logs](../reference/cli/task_logs.md) - Show task logs +- [task send](../reference/cli/task_send.md) - Send input to a task +- [task status](../reference/cli/task_status.md) - Show task status - Experimental task commands. - - Aliases: tasks - -SUBCOMMANDS: - create Create an experimental task - delete Delete experimental tasks - list List experimental tasks - logs Show a task's logs - send Send input to a task - status Show the status of a task. -``` - -## Creating tasks - -```console -USAGE: - coder exp task create [flags] [input] - - Create an experimental task - - - Create a task with direct input: - - $ coder exp task create "Add authentication to the user service" - - - Create a task with stdin input: - - $ echo "Add authentication to the user service" | coder exp task create - - - Create a task with a specific name: - - $ coder exp task create --name task1 "Add authentication to the user service" - - - Create a task from a specific template / preset: - - $ coder exp task create --template backend-dev --preset "My Preset" "Add authentication to the user service" - - - Create a task for another user (requires appropriate permissions): - - $ coder exp task create --owner user@example.com "Add authentication to the user service" - -OPTIONS: - -O, --org string, $CODER_ORGANIZATION - Select which organization (uuid or name) to use. - - --name string - Specify the name of the task. If you do not specify one, a name will be generated for you. - - --owner string (default: me) - Specify the owner of the task. Defaults to the current user. - - --preset string, $CODER_TASK_PRESET_NAME (default: none) - -q, --quiet bool - Only display the created task's ID. - - --stdin bool - Reads from stdin for the task input. - - --template string, $CODER_TASK_TEMPLATE_NAME - --template-version string, $CODER_TASK_TEMPLATE_VERSION -``` - -## Deleting Tasks - -```console -USAGE: - coder exp task delete [flags] [ ...] - - Delete experimental tasks - - Aliases: rm - - - Delete a single task.: - - $ $ coder exp task delete task1 - - - Delete multiple tasks.: - - $ $ coder exp task delete task1 task2 task3 - - - Delete a task without confirmation.: - - $ $ coder exp task delete task4 --yes - -OPTIONS: - -y, --yes bool - Bypass prompts. -``` - -## Listing tasks - -```console -USAGE: - coder exp task list [flags] - - List experimental tasks - - Aliases: ls - - - List tasks for the current user.: - - $ coder exp task list - - - List tasks for a specific user.: - - $ coder exp task list --user someone-else - - - List all tasks you can view.: - - $ coder exp task list --all - - - List all your running tasks.: - - $ coder exp task list --status running - - - As above, but only show IDs.: - - $ coder exp task list --status running --quiet - -OPTIONS: - -a, --all bool (default: false) - List tasks for all users you can view. - - -c, --column [id|organization id|owner id|owner name|name|template id|template name|template display name|template icon|workspace id|workspace agent id|workspace agent lifecycle|workspace agent health|initial prompt|status|state|message|created at|updated at|state changed] (default: name,status,state,state changed,message) - Columns to display in table output. - - -o, --output table|json (default: table) - Output format. - - -q, --quiet bool (default: false) - Only display task IDs. - - --status string - Filter by task status (e.g. running, failed, etc). - - --user string - List tasks for the specified user (username, "me"). -``` - -## Viewing Task Logs - -```console -USAGE: - coder exp task logs [flags] - - Show a task's logs - - - Show logs for a given task.: - - $ coder exp task logs task1 - -OPTIONS: - -c, --column [id|content|type|time] (default: type,content) - Columns to display in table output. - - -o, --output table|json (default: table) - Output format. -``` - -## Sending input to a task - -```console -USAGE: - coder exp task send [flags] [ | --stdin] - - Send input to a task - - - Send direct input to a task.: - - $ coder exp task send task1 "Please also add unit tests" - - - Send input from stdin to a task.: - - $ echo "Please also add unit tests" | coder exp task send task1 --stdin - -OPTIONS: - --stdin bool - Reads the input from stdin. -``` - -## Viewing Task Status - -```console -USAGE: - coder exp task status [flags] - - Show the status of a task. - - Aliases: stat - - - Show the status of a given task.: - - $ coder exp task status task1 - - - Watch the status of a given task until it completes (idle or stopped).: - - $ coder exp task status task1 --watch - -OPTIONS: - -c, --column [state changed|status|healthy|state|message] (default: state changed,status,healthy,state,message) - Columns to display in table output. - - -o, --output table|json (default: table) - Output format. - - --watch bool (default: false) - Watch the task status output. This will stream updates to the terminal until the underlying workspace is stopped. -``` - -> **Note**: The `--watch` flag will automatically exit when the task reaches a terminal state. Watch mode ends when: -> -> - The workspace is stopped -> - The workspace agent becomes unhealthy or is shutting down -> - The task completes (reaches a non-working state like completed, failed, or canceled) - -## Identifying Tasks - -Tasks can be identified in CLI commands using either: - -- **Task Name**: The human-readable name (e.g., `my-task-name`) - > Note: Tasks owned by other users can be identified by their owner and name (e.g., `alice/her-task`). -- **Task ID**: The UUID identifier (e.g., `550e8400-e29b-41d4-a716-446655440000`) +For the complete CLI reference, see the [CLI documentation](../reference/cli/index.md). diff --git a/docs/ai-coder/index.md b/docs/ai-coder/index.md index eb1fe33d7f24d..36da055e0cb79 100644 --- a/docs/ai-coder/index.md +++ b/docs/ai-coder/index.md @@ -8,11 +8,11 @@ Coder [integrates with IDEs](../user-guides/workspace-access/index.md) such as C These agents work well inside existing Coder workspaces as they can simply be enabled via an extension or are built-into the editor. -## Agents with Coder Tasks (Beta) +## Agents with Coder Tasks In cases where the IDE is secondary, such as prototyping or long-running background jobs, agents like Claude Code or Aider are better for the job and new SaaS interfaces like [Devin](https://devin.ai) and [ChatGPT Codex](https://openai.com/index/introducing-codex/) are emerging. -[Coder Tasks](./tasks.md) is a new interface inside Coder to run and manage coding agents with a chat-based UI. Unlike SaaS-based products, Coder Tasks is self-hosted (included in your Coder deployment) and allows you to run any terminal-based agent such as Claude Code or Codex's Open Source CLI. +[Coder Tasks](./tasks.md) is an interface inside Coder to run and manage coding agents with a chat-based UI. Unlike SaaS-based products, Coder Tasks is self-hosted (included in your Coder deployment) and allows you to run any terminal-based agent such as Claude Code or Codex's Open Source CLI. ![Coder Tasks UI](../images/guides/ai-agents/tasks-ui.png) diff --git a/docs/ai-coder/tasks-core-principles.md b/docs/ai-coder/tasks-core-principles.md index 337d499d95ec9..fadd4273b0aed 100644 --- a/docs/ai-coder/tasks-core-principles.md +++ b/docs/ai-coder/tasks-core-principles.md @@ -49,19 +49,17 @@ There are two approaches to turning a Template into a Task Template: You can use a pre-existing agent module that [Coder maintains](https://registry.coder.com/modules). When using an agent module, you must define: -- `coder_parameter` named _ai_prompt_: Define the AI prompt input so users can define/specify what tasks need to run +- `coder_ai_task` resource: links a `coder_app` to a Task. - **Agentic Module** that defines the agent you want to use, e.g. Claude Code, Codex CLI, Gemini CLI -Coder maintains various agentic modules; see [Coder Labs](https://registry.coder.com/contributors/coder-labs). These modules, in addition to defining connection information for the specific agent, reference the [AgentAPI module](https://registry.coder.com/modules/coder/agentapi) which provides connection, reporting, and agent life cycle management operations. The module also defines the `coder_ai_task` resource which allows the Task to be visible in the UI. +Coder maintains various agentic modules; see [Coder Labs](https://registry.coder.com/contributors/coder-labs). These modules, in addition to defining connection information for the specific agent, reference the [AgentAPI module](https://registry.coder.com/modules/coder/agentapi) which provides connection, reporting, and agent life cycle management operations. The modules also output the specific `coder_app` identifier for the specific agent running inside the workspace. -The following code snippet can be dropped into any existing template to modify it into a Claude-Code enabled task template. This snippet also includes space for a setup script that will prime the agent for execution. +The following code snippet can be dropped into any existing template in Coder v2.28 or above to modify it into a Claude-Code enabled task template. This snippet also includes space for a setup script that will prime the agent for execution. -```hcl -data "coder_parameter" "ai_prompt" { - name = "AI Prompt" - type = "string" -} +> [!NOTE] +> This requires at least version 2.13.0 of the `coder/coder` Terraform provider. +```hcl data "coder_parameter" "setup_script" { name = "setup_script" display_name = "Setup Script" @@ -72,12 +70,18 @@ data "coder_parameter" "setup_script" { default = "" } +data "coder_task" "me" {} + +resource "coder_ai_task" "task" { + app_id = module.claude-code.task_app_id +} + # The Claude Code module does the automatic task reporting # Other agent modules: https://registry.coder.com/modules?search=agent -# Or use a custom agent: +# Or use a custom agent: module "claude-code" { source = "registry.coder.com/coder/claude-code/coder" - version = "3.0.1" + version = "4.0.0" agent_id = coder_agent.example.id workdir = "/home/coder/project" @@ -88,7 +92,7 @@ module "claude-code" { claude_code_version = "1.0.82" # Pin to a specific version agentapi_version = "v0.6.1" - ai_prompt = data.coder_parameter.ai_prompt.value + ai_prompt = data.coder_task.me.prompt model = "sonnet" # Optional: run your pre-flight script @@ -118,19 +122,19 @@ variable "anthropic_api_key" { Let's break down this snippet: -- The `module "claude-code"` sets up the Task template to use Claude Code, but Coder's Registry supports many other agent modules like [OpenAI's Codex](https://registry.coder.com/modules/coder-labs/codex) or [Gemini CLI](https://registry.coder.com/modules/coder-labs/gemini) -- Each module defines its own specific inputs. Claude Code expects the `claude_api_key` input, but OpenAI based agents expect `OPENAI_API_KEY` for example. You'll want to check the specific module's defined variables to know what exactly needs to be defined +- The `module "claude-code"` sets up the Task template to use Claude Code. Coder's Registry supports many other agent modules like [OpenAI's Codex](https://registry.coder.com/modules/coder-labs/codex) or [Gemini CLI](https://registry.coder.com/modules/coder-labs/gemini) +- Each module defines its own specific inputs. Claude Code expects the `claude_api_key` input, but OpenAI based agents expect `OPENAI_API_KEY` for example. You'll want to check the specific module's defined variables to know what exactly needs to be defined. You will also generally need to pass `data.coder_task.me.prompt` +- Each module outputs the UUID of the `coder_app` related to the AI agent. In the above example, the output is named `task_app_id`. See the relevant documentation for the module for more detailed information. - You can define specific scripts to run before the module is installed, `pre_install_script`, or after install, `pre_install_script`. For example, you could define a setup script that calls to AWS S3 and pulls specific files you want your agent to have access to #### Using a Custom Agent Coder allows you to define a custom agent. When doing so, you must define: -- `coder_parameter` named _ai_prompt_: Define the AI prompt input so users can define/specify what tasks need to run -- `coder_ai_task` which registers the task with the UI and allows the task to be visible -- **AgentAPI binary** which provides runtime execution logistics for the task +- A `coder_app` resource that uses [`coder/agentapi`](https://github.com/coder/agentapi) to run the custom agent. **AgentAPI** provides runtime execution logistics for the task. +- A `coder_ai_task` resource which associates the `coder_app` related to the AI agent with the Task. -You can find the latest [AgentAPI binary here](https://github.com/coder/agentapi/releases). You can alternatively import and use the [AgentAPI module](https://registry.coder.com/modules/coder/agentapi?tab=variables) Coder maintains, which also conveniently defines the `coder_ai_task` resource. +You can find the latest [AgentAPI binary here](https://github.com/coder/agentapi/releases). You can alternatively import and use the [AgentAPI module](https://registry.coder.com/modules/coder/agentapi?tab=variables) Coder maintains. Read more about [custom agents here](https://coder.com/docs/ai-coder/custom-agents). @@ -138,10 +142,12 @@ Read more about [custom agents here](https://coder.com/docs/ai-coder/custom-agen Coder recommends using pre-existing agent modules when making a Task Template. Making a Task Template boils down to: -1. Identify the existing agent you want access to in our [Registry](https://registry.coder.com/modules) -1. Add the agent's module to your existing template -1. Define the module's required inputs -1. Define the `coder_parameter` +1. Identify the existing agent you want access to in our [Registry](https://registry.coder.com/modules). +1. Add the agent's module to your existing template. +1. Define the `coder_ai_task` resource and `coder_task` data source. +1. Wire in the module's inputs and outputs: + - Pass the prompt from the `coder_task` data source into the module. + - Pass the module's `task_app_id` output into the `coder_ai_task` resource. and you're all set to go! If you want to build your own custom agent, read up on our [Custom Agents](https://coder.com/docs/ai-coder/custom-agents) documentation. @@ -163,7 +169,7 @@ These design principles aren’t just technical guidelines; they're the lens thr ### Practical Considerations -Tasks don't expose template parameters at runtime, other than the AI Prompt. If users need to choose different compute, region, or tooling options for example, you can define workspace presets in the template and have users select a preset when starting the Task. See workspace presets for details: ../admin/templates/extending-templates/parameters#workspace-presets. +Tasks don't expose template parameters at runtime. If users need to choose different compute, region, or tooling options for example, you can define workspace presets in the template and have users select a preset when starting the Task. See workspace presets for details: ../admin/templates/extending-templates/parameters#workspace-presets. ### Identity, Security, and Access diff --git a/docs/ai-coder/tasks-migration.md b/docs/ai-coder/tasks-migration.md new file mode 100644 index 0000000000000..6cd02ba2e7ba2 --- /dev/null +++ b/docs/ai-coder/tasks-migration.md @@ -0,0 +1,163 @@ +# Migrating Task Templates for Coder version 2.28.0 + +Prior to Coder version 2.28.0, the definition of a Coder task was different to the above. It required the following to be defined in the template: + +1. A Coder parameter specifically named `"AI Prompt"`, +2. A `coder_workspace_app` that runs the `coder/agentapi` binary, +3. A `coder_ai_task` resource in the template that sets `sidebar_app.id`. This was generally defined in Coder modules specific to AI Tasks. + +Note that 2 and 3 were generally handled by the `coder/agentapi` Terraform module. + +The pre-2.28.0 definition will be supported until the release of 2.29.0. You will need to update your Tasks-enabled templates to continue using Tasks after this release. + +You can view an [example migration here](https://github.com/coder/coder/pull/20420). Alternatively, follow the steps below: + +## Upgrade Steps + +1. Update the Coder Terraform provider to at least version 2.13.0: + +```diff +terraform { + required_providers { + coder = { + source = "coder/coder" +- version = "x.y.z" ++ version = ">= 2.13" + } + } +} +``` + +1. Define a `coder_ai_task` resource and `coder_task` data source in your template: + +```diff ++data "coder_task" "me" {} ++resource "coder_ai_task" "task" {} +``` + +1. Update the version of the respective AI agent module (e.g. `claude-code`) to at least 4.0.0 and provide the prompt from `data.coder_task.me.prompt` instead of the "AI Prompt" parameter. + +```diff +module "claude-code" { + source = "registry.coder.com/coder/claude-code/coder" +- version = "4.0.0" ++ version = "4.0.0" + ... +- ai_prompt = data.coder_parameter.ai_prompt.value ++ ai_prompt = data.coder_task.me.prompt +} +``` + +1. Add the `coder_ai_task` resource and set `app_id` to the `task_app_id` output of the Claude module. + +> [!NOTE] +> Refer to the documentation for the specific module you are using for the exact name of the output. + +```diff +resource "coder_ai_task" "task" { ++ app_id = module.claude-code.task_app_id +} +``` + +## Coder Tasks format pre-2.28 + +Below is a minimal illustrative example of a Coder Tasks template pre-2.28.0. +**Note that this is NOT a full template.** + +```hcl +terraform { + required_providers { + coder = { + source = "coder/coder + } + } +} + +data "coder_workspace" "me" {} + +resource "coder_agent" "main" { ... } + +# The prompt is passed in via the specifically named "AI Prompt" parameter. +data "coder_parameter" "ai_prompt" { + name = "AI Prompt" + mutable = true +} + +# This coder_app is the interface to the Coder Task. +# This is assumed to be a running instance of coder/agentapi +resource "coder_app" "ai_agent" { + ... +} + +# Assuming that the below script runs `coder/agentapi` with the prompt +# defined in ARG_AI_PROMPT +resource "coder_script" "agentapi" { + agent_id = coder_agent.main.id + run_on_start = true + script = <= 2.13.0 + } + } +} + +data "coder_workspace" "me" {} + +# The prompt is now available in the coder_task data source. +data "coder_task" "me" {} + +resource "coder_agent" "main" { ... } + +# This coder_app is the interface to the Coder Task. +# This is assumed to be a running instance of coder/agentapi (for instance, started via `coder_script`). +resource "coder_app" "ai_agent" { + ... +} + +# Assuming that the below script runs `coder/agentapi` with the prompt +# defined in ARG_AI_PROMPT +resource "coder_script" "agentapi" { + agent_id = coder_agent.main.id + run_on_start = true + script = < [!NOTE] +> The `coder_ai_task` resource is not defined within the [Claude Code Module](https://registry.coder.com/modules/coder/claude-code?tab=readme). You need to define it yourself. ```hcl -data "coder_parameter" "ai_prompt" { - name = "AI Prompt" - type = "string" +terraform { + required_providers { + coder = { + source = "coder/coder" + version = ">= 2.13" + } + } } data "coder_parameter" "setup_script" { @@ -61,12 +68,18 @@ data "coder_parameter" "setup_script" { default = "" } +data "coder_task" "me" {} + +resource "coder_ai_task" "task" { + app_id = module.claude-code.task_app_id +} + # The Claude Code module does the automatic task reporting # Other agent modules: https://registry.coder.com/modules?search=agent # Or use a custom agent: module "claude-code" { source = "registry.coder.com/coder/claude-code/coder" - version = "3.0.1" + version = "4.0.0" agent_id = coder_agent.example.id workdir = "/home/coder/project" @@ -77,7 +90,7 @@ module "claude-code" { claude_code_version = "1.0.82" # Pin to a specific version agentapi_version = "v0.6.1" - ai_prompt = data.coder_parameter.ai_prompt.value + ai_prompt = data.coder_task.me.prompt model = "sonnet" # Optional: run your pre-flight script @@ -105,16 +118,16 @@ variable "anthropic_api_key" { } ``` -> [!NOTE] -> This definition is not final and may change while Tasks is in beta. After any changes, we guarantee backwards compatibility for one minor Coder version. After that, you may need to update your template to continue using it with Tasks. - Because Tasks run unpredictable AI agents, often for background tasks, we recommend creating a separate template for Coder Tasks with limited permissions. You can always duplicate your existing template, then apply separate network policies/firewalls/permissions to the template. From there, follow the docs for one of our [built-in modules for agents](https://registry.coder.com/modules?search=tag%3Atasks) in order to add it to your template, configure your LLM provider. Alternatively, follow our guide for [custom agents](./custom-agents.md). +> [!IMPORTANT] +> Upgrading from Coder v2.27 or earlier? See the [Tasks Migration Guide](./tasks-migration.md) for breaking changes in v2.28.0. + ## Customizing the Task UI -The Task UI displays all workspace apps declared in a Task template. You can customize the app shown in the sidebar using the `sidebar_app.id` field on the `coder_ai_task` resource. +The Task UI displays all workspace apps declared in a Task template. You can customize the app shown in the sidebar using the `app_id` field on the `coder_ai_task` resource. If a workspace app has the special `"preview"` slug, a navbar will appear above it. This is intended for templates that let users preview a web app they’re working on. diff --git a/docs/images/aibridge/aibridge-implementation-details.png b/docs/images/aibridge/aibridge-implementation-details.png new file mode 100644 index 0000000000000..41c3c55e4aa32 Binary files /dev/null and b/docs/images/aibridge/aibridge-implementation-details.png differ diff --git a/docs/images/aibridge/openai_key_scope.png b/docs/images/aibridge/openai_key_scope.png new file mode 100644 index 0000000000000..aded76c970e4d Binary files /dev/null and b/docs/images/aibridge/openai_key_scope.png differ diff --git a/docs/install/kubernetes.md b/docs/install/kubernetes.md index 3af2d917b431d..8c2a0ed4d1d23 100644 --- a/docs/install/kubernetes.md +++ b/docs/install/kubernetes.md @@ -136,7 +136,7 @@ We support two release channels: mainline and stable - read the helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.27.1 + --version 2.28.3 ``` - **OCI Registry** @@ -147,7 +147,7 @@ We support two release channels: mainline and stable - read the helm install coder oci://ghcr.io/coder/chart/coder \ --namespace coder \ --values values.yaml \ - --version 2.27.2 + --version 2.28.3 ``` - **Stable** Coder release: @@ -160,7 +160,7 @@ We support two release channels: mainline and stable - read the helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.26.3 + --version 2.27.6 ``` - **OCI Registry** @@ -171,7 +171,7 @@ We support two release channels: mainline and stable - read the helm install coder oci://ghcr.io/coder/chart/coder \ --namespace coder \ --values values.yaml \ - --version 2.26.3 + --version 2.27.6 ``` You can watch Coder start up by running `kubectl get pods -n coder`. Once Coder diff --git a/docs/install/rancher.md b/docs/install/rancher.md index aaf9c947dff37..a28e408c2e4eb 100644 --- a/docs/install/rancher.md +++ b/docs/install/rancher.md @@ -134,8 +134,8 @@ kubectl create secret generic coder-db-url -n coder \ 1. Select a Coder version: - - **Mainline**: `2.27.2` - - **Stable**: `2.26.3` + - **Mainline**: `2.28.3` + - **Stable**: `2.27.6` Learn more about release channels in the [Releases documentation](./releases/index.md). diff --git a/docs/install/releases/index.md b/docs/install/releases/index.md index afe0cf4e1656c..db870f55c4701 100644 --- a/docs/install/releases/index.md +++ b/docs/install/releases/index.md @@ -57,13 +57,13 @@ pages. | Release name | Release Date | Status | Latest Release | |------------------------------------------------|--------------------|------------------|----------------------------------------------------------------| -| [2.22](https://coder.com/changelog/coder-2-22) | May 16, 2025 | Not Supported | [v2.22.1](https://github.com/coder/coder/releases/tag/v2.22.1) | | [2.23](https://coder.com/changelog/coder-2-23) | June 03, 2025 | Not Supported | [v2.23.5](https://github.com/coder/coder/releases/tag/v2.23.5) | | [2.24](https://coder.com/changelog/coder-2-24) | July 01, 2025 | Not Supported | [v2.24.4](https://github.com/coder/coder/releases/tag/v2.24.4) | -| [2.25](https://coder.com/changelog/coder-2-25) | August 05, 2025 | Security Support | [v2.25.3](https://github.com/coder/coder/releases/tag/v2.25.3) | -| [2.26](https://coder.com/changelog/coder-2-26) | September 03, 2025 | Stable | [v2.26.3](https://github.com/coder/coder/releases/tag/v2.26.3) | -| [2.27](https://coder.com/changelog/coder-2-27) | October 02, 2025 | Mainline | [v2.27.2](https://github.com/coder/coder/releases/tag/v2.27.2) | -| 2.28 | | Not Released | N/A | +| [2.25](https://coder.com/changelog/coder-2-25) | August 05, 2025 | Not Supported | [v2.25.3](https://github.com/coder/coder/releases/tag/v2.25.3) | +| [2.26](https://coder.com/changelog/coder-2-26) | September 03, 2025 | Security Support | [v2.26.4](https://github.com/coder/coder/releases/tag/v2.26.4) | +| [2.27](https://coder.com/changelog/coder-2-27) | October 02, 2025 | Stable | [v2.27.6](https://github.com/coder/coder/releases/tag/v2.27.6) | +| [2.28](https://coder.com/changelog/coder-2-28) | November 04, 2025 | Mainline | [v2.28.3](https://github.com/coder/coder/releases/tag/v2.28.3) | +| 2.29 | | Not Released | N/A | > [!TIP] diff --git a/docs/manifest.json b/docs/manifest.json index 78a0d38ec949d..56b583eb284b7 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -396,11 +396,6 @@ "title": "Up to 3,000 Users", "description": "Enterprise-scale architecture recommendations for Coder deployments that support up to 3,000 users", "path": "./admin/infrastructure/validated-architectures/3k-users.md" - }, - { - "title": "Up to 10,000 Users", - "description": "Enterprise-scale architecture recommendations for Coder deployments that support up to 10,000 users", - "path": "./admin/infrastructure/validated-architectures/10k-users.md" } ] }, @@ -437,6 +432,11 @@ "description": "Configure Google as an OIDC provider", "path": "./admin/users/oidc-auth/google.md" }, + { + "title": "Microsoft", + "description": "Configure Microsoft Entra ID as an OIDC provider", + "path": "./admin/users/oidc-auth/microsoft.md" + }, { "title": "Configure OIDC refresh tokens", "description": "How to configure OIDC refresh tokens", @@ -905,6 +905,12 @@ "path": "./ai-coder/custom-agents.md", "state": ["beta"] }, + { + "title": "Tasks Migration Guide", + "description": "Changes to Coder Tasks made in v2.28", + "path": "./ai-coder/tasks-migration.md", + "state": ["beta"] + }, { "title": "Security \u0026 Boundaries", "description": "Learn about security and boundaries when running AI coding agents in Coder", @@ -927,9 +933,37 @@ { "title": "AI Bridge", "description": "Centralized LLM and MCP proxy for platform teams", - "path": "./ai-coder/ai-bridge.md", + "path": "./ai-coder/ai-bridge/index.md", "icon_path": "./images/icons/api.svg", - "state": ["premium", "early access"] + "state": ["premium", "early access"], + "children": [ + { + "title": "Setup", + "description": "How to set up and configure AI Bridge", + "path": "./ai-coder/ai-bridge/setup.md" + }, + { + "title": "Client Configuration", + "description": "How to configure your AI coding tools to use AI Bridge", + "path": "./ai-coder/ai-bridge/client-config.md" + }, + { + "title": "MCP Tools Injection", + "description": "How to configure MCP servers for tools injection through AI Bridge", + "path": "./ai-coder/ai-bridge/mcp.md", + "state": ["early access"] + }, + { + "title": "Monitoring", + "description": "How to monitor AI Bridge", + "path": "./ai-coder/ai-bridge/monitoring.md" + }, + { + "title": "Reference", + "description": "Technical reference for AI Bridge", + "path": "./ai-coder/ai-bridge/reference.md" + } + ] }, { "title": "Tasks CLI", @@ -1096,6 +1130,10 @@ "title": "General", "path": "./reference/api/general.md" }, + { + "title": "AI Bridge", + "path": "./reference/api/aibridge.md" + }, { "title": "Agents", "path": "./reference/api/agents.md" @@ -1136,6 +1174,10 @@ "title": "Git", "path": "./reference/api/git.md" }, + { + "title": "InitScript", + "path": "./reference/api/initscript.md" + }, { "title": "Insights", "path": "./reference/api/insights.md" @@ -1144,6 +1186,10 @@ "title": "Members", "path": "./reference/api/members.md" }, + { + "title": "Notifications", + "path": "./reference/api/notifications.md" + }, { "title": "Organizations", "path": "./reference/api/organizations.md" @@ -1152,10 +1198,22 @@ "title": "PortSharing", "path": "./reference/api/portsharing.md" }, + { + "title": "Prebuilds", + "path": "./reference/api/prebuilds.md" + }, + { + "title": "Provisioning", + "path": "./reference/api/provisioning.md" + }, { "title": "Schemas", "path": "./reference/api/schemas.md" }, + { + "title": "Tasks", + "path": "./reference/api/tasks.md" + }, { "title": "Templates", "path": "./reference/api/templates.md" @@ -1180,6 +1238,21 @@ "path": "./reference/cli/index.md", "icon_path": "./images/icons/terminal.svg", "children": [ + { + "title": "aibridge", + "description": "Manage AI Bridge.", + "path": "reference/cli/aibridge.md" + }, + { + "title": "aibridge interceptions", + "description": "Manage AI Bridge interceptions.", + "path": "reference/cli/aibridge_interceptions.md" + }, + { + "title": "aibridge interceptions list", + "description": "List AI Bridge interceptions as JSON.", + "path": "reference/cli/aibridge_interceptions_list.md" + }, { "title": "autoupdate", "description": "Toggle auto-update policy for a workspace", @@ -1698,6 +1771,41 @@ "description": "Generate a support bundle to troubleshoot issues connecting to a workspace.", "path": "reference/cli/support_bundle.md" }, + { + "title": "task", + "description": "Manage tasks", + "path": "reference/cli/task.md" + }, + { + "title": "task create", + "description": "Create a task", + "path": "reference/cli/task_create.md" + }, + { + "title": "task delete", + "description": "Delete tasks", + "path": "reference/cli/task_delete.md" + }, + { + "title": "task list", + "description": "List tasks", + "path": "reference/cli/task_list.md" + }, + { + "title": "task logs", + "description": "Show a task's logs", + "path": "reference/cli/task_logs.md" + }, + { + "title": "task send", + "description": "Send input to a task", + "path": "reference/cli/task_send.md" + }, + { + "title": "task status", + "description": "Show the status of a task.", + "path": "reference/cli/task_status.md" + }, { "title": "templates", "description": "Manage templates", diff --git a/docs/reference/api/aibridge.md b/docs/reference/api/aibridge.md index d2be736eb32b2..9969a51d4adc7 100644 --- a/docs/reference/api/aibridge.md +++ b/docs/reference/api/aibridge.md @@ -1,17 +1,17 @@ -# AIBridge +# AI Bridge -## List AIBridge interceptions +## List AI Bridge interceptions ### Code samples ```shell # Example request using curl -curl -X GET http://coder-server:8080/api/v2/api/experimental/aibridge/interceptions \ +curl -X GET http://coder-server:8080/api/v2/aibridge/interceptions \ -H 'Accept: application/json' \ -H 'Coder-Session-Token: API_KEY' ``` -`GET /api/experimental/aibridge/interceptions` +`GET /aibridge/interceptions` ### Parameters @@ -31,6 +31,7 @@ curl -X GET http://coder-server:8080/api/v2/api/experimental/aibridge/intercepti "count": 0, "results": [ { + "api_key_id": "string", "ended_at": "2019-08-24T14:15:22Z", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator": { diff --git a/docs/reference/api/builds.md b/docs/reference/api/builds.md index ea207f84eab39..dd7323886e179 100644 --- a/docs/reference/api/builds.md +++ b/docs/reference/api/builds.md @@ -27,7 +27,6 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam ```json { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -222,7 +221,6 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -269,7 +267,6 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild} \ ```json { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -464,7 +461,6 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild} \ } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -1002,7 +998,6 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/sta ```json { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -1197,7 +1192,6 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/sta } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -1317,7 +1311,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ ```json [ { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -1512,7 +1505,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -1540,12 +1532,11 @@ Status Code **200** | Name | Type | Required | Restrictions | Description | |----------------------------------|--------------------------------------------------------------------------------------------------------|----------|--------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `[array item]` | array | false | | | -| `» ai_task_sidebar_app_id` | string(uuid) | false | | Deprecated: This field has been replaced with `TaskAppID` | | `» build_number` | integer | false | | | | `» created_at` | string(date-time) | false | | | | `» daily_cost` | integer | false | | | | `» deadline` | string(date-time) | false | | | -| `» has_ai_task` | boolean | false | | | +| `» has_ai_task` | boolean | false | | Deprecated: This field has been deprecated in favor of Task WorkspaceID. | | `» has_external_agent` | boolean | false | | | | `» id` | string(uuid) | false | | | | `» initiator_id` | string(uuid) | false | | | @@ -1691,7 +1682,6 @@ Status Code **200** | `»» type` | string | false | | | | `»» workspace_transition` | [codersdk.WorkspaceTransition](schemas.md#codersdkworkspacetransition) | false | | | | `» status` | [codersdk.WorkspaceStatus](schemas.md#codersdkworkspacestatus) | false | | | -| `» task_app_id` | string(uuid) | false | | | | `» template_version_id` | string(uuid) | false | | | | `» template_version_name` | string | false | | | | `» template_version_preset_id` | string(uuid) | false | | | @@ -1818,7 +1808,6 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ ```json { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -2013,7 +2002,6 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", diff --git a/docs/reference/api/enterprise.md b/docs/reference/api/enterprise.md index 131223e38e5f4..dfdeeb1756ef7 100644 --- a/docs/reference/api/enterprise.md +++ b/docs/reference/api/enterprise.md @@ -30,6 +30,7 @@ curl -X GET http://coder-server:8080/api/v2/.well-known/oauth-authorization-serv "response_types_supported": [ "string" ], + "revocation_endpoint": "string", "scopes_supported": [ "string" ], @@ -3787,6 +3788,49 @@ Status Code **200** To perform this operation, you must be authenticated. [Learn more](authentication.md). +## Invalidate presets for template + +### Code samples + +```shell +# Example request using curl +curl -X POST http://coder-server:8080/api/v2/templates/{template}/prebuilds/invalidate \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`POST /templates/{template}/prebuilds/invalidate` + +### Parameters + +| Name | In | Type | Required | Description | +|------------|------|--------------|----------|-------------| +| `template` | path | string(uuid) | true | Template ID | + +### Example responses + +> 200 Response + +```json +{ + "invalidated": [ + { + "preset_name": "string", + "template_name": "string", + "template_version_name": "string" + } + ] +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|------------------------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.InvalidatePresetsResponse](schemas.md#codersdkinvalidatepresetsresponse) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + ## Get user quiet hours schedule ### Code samples diff --git a/docs/reference/api/experimental.md b/docs/reference/api/experimental.md deleted file mode 100644 index 34ad224bd3538..0000000000000 --- a/docs/reference/api/experimental.md +++ /dev/null @@ -1,204 +0,0 @@ -# Experimental - -## List AI tasks - -### Code samples - -```shell -# Example request using curl -curl -X GET http://coder-server:8080/api/v2/api/experimental/tasks \ - -H 'Accept: */*' \ - -H 'Coder-Session-Token: API_KEY' -``` - -`GET /api/experimental/tasks` - -### Parameters - -| Name | In | Type | Required | Description | -|------|-------|--------|----------|---------------------------------------------------------------------------------------------------------------------| -| `q` | query | string | false | Search query for filtering tasks. Supports: owner:, organization:, status: | - -### Example responses - -> 200 Response - -### Responses - -| Status | Meaning | Description | Schema | -|--------|---------------------------------------------------------|-------------|--------------------------------------------------------------------| -| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.TasksListResponse](schemas.md#codersdktaskslistresponse) | - -To perform this operation, you must be authenticated. [Learn more](authentication.md). - -## Create a new AI task - -### Code samples - -```shell -# Example request using curl -curl -X POST http://coder-server:8080/api/v2/api/experimental/tasks/{user} \ - -H 'Content-Type: application/json' \ - -H 'Accept: */*' \ - -H 'Coder-Session-Token: API_KEY' -``` - -`POST /api/experimental/tasks/{user}` - -> Body parameter - -```json -{ - "input": "string", - "name": "string", - "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", - "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1" -} -``` - -### Parameters - -| Name | In | Type | Required | Description | -|--------|------|--------------------------------------------------------------------|----------|-------------------------------------------------------| -| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | -| `body` | body | [codersdk.CreateTaskRequest](schemas.md#codersdkcreatetaskrequest) | true | Create task request | - -### Example responses - -> 201 Response - -### Responses - -| Status | Meaning | Description | Schema | -|--------|--------------------------------------------------------------|-------------|------------------------------------------| -| 201 | [Created](https://tools.ietf.org/html/rfc7231#section-6.3.2) | Created | [codersdk.Task](schemas.md#codersdktask) | - -To perform this operation, you must be authenticated. [Learn more](authentication.md). - -## Get AI task by ID - -### Code samples - -```shell -# Example request using curl -curl -X GET http://coder-server:8080/api/v2/api/experimental/tasks/{user}/{task} \ - -H 'Accept: */*' \ - -H 'Coder-Session-Token: API_KEY' -``` - -`GET /api/experimental/tasks/{user}/{task}` - -### Parameters - -| Name | In | Type | Required | Description | -|--------|------|--------------|----------|-------------------------------------------------------| -| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | -| `task` | path | string(uuid) | true | Task ID | - -### Example responses - -> 200 Response - -### Responses - -| Status | Meaning | Description | Schema | -|--------|---------------------------------------------------------|-------------|------------------------------------------| -| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Task](schemas.md#codersdktask) | - -To perform this operation, you must be authenticated. [Learn more](authentication.md). - -## Delete AI task by ID - -### Code samples - -```shell -# Example request using curl -curl -X DELETE http://coder-server:8080/api/v2/api/experimental/tasks/{user}/{task} \ - -H 'Coder-Session-Token: API_KEY' -``` - -`DELETE /api/experimental/tasks/{user}/{task}` - -### Parameters - -| Name | In | Type | Required | Description | -|--------|------|--------------|----------|-------------------------------------------------------| -| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | -| `task` | path | string(uuid) | true | Task ID | - -### Responses - -| Status | Meaning | Description | Schema | -|--------|---------------------------------------------------------------|-------------------------|--------| -| 202 | [Accepted](https://tools.ietf.org/html/rfc7231#section-6.3.3) | Task deletion initiated | | - -To perform this operation, you must be authenticated. [Learn more](authentication.md). - -## Get AI task logs - -### Code samples - -```shell -# Example request using curl -curl -X GET http://coder-server:8080/api/v2/api/experimental/tasks/{user}/{task}/logs \ - -H 'Accept: */*' \ - -H 'Coder-Session-Token: API_KEY' -``` - -`GET /api/experimental/tasks/{user}/{task}/logs` - -### Parameters - -| Name | In | Type | Required | Description | -|--------|------|--------------|----------|-------------------------------------------------------| -| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | -| `task` | path | string(uuid) | true | Task ID | - -### Example responses - -> 200 Response - -### Responses - -| Status | Meaning | Description | Schema | -|--------|---------------------------------------------------------|-------------|------------------------------------------------------------------| -| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.TaskLogsResponse](schemas.md#codersdktasklogsresponse) | - -To perform this operation, you must be authenticated. [Learn more](authentication.md). - -## Send input to AI task - -### Code samples - -```shell -# Example request using curl -curl -X POST http://coder-server:8080/api/v2/api/experimental/tasks/{user}/{task}/send \ - -H 'Content-Type: application/json' \ - -H 'Coder-Session-Token: API_KEY' -``` - -`POST /api/experimental/tasks/{user}/{task}/send` - -> Body parameter - -```json -{ - "input": "string" -} -``` - -### Parameters - -| Name | In | Type | Required | Description | -|--------|------|----------------------------------------------------------------|----------|-------------------------------------------------------| -| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | -| `task` | path | string(uuid) | true | Task ID | -| `body` | body | [codersdk.TaskSendRequest](schemas.md#codersdktasksendrequest) | true | Task input request | - -### Responses - -| Status | Meaning | Description | Schema | -|--------|-----------------------------------------------------------------|-------------------------|--------| -| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | Input sent successfully | | - -To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/reference/api/general.md b/docs/reference/api/general.md index 5718979ae86c2..2f30b4c1d67e2 100644 --- a/docs/reference/api/general.md +++ b/docs/reference/api/general.md @@ -175,10 +175,12 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \ "small_fast_model": "string" }, "enabled": true, + "inject_coder_mcp_tools": true, "openai": { "base_url": "string", "key": "string" - } + }, + "retention": 0 } }, "allow_workspace_renames": true, diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index 796b4811cc4c1..7f88558c75503 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -389,26 +389,31 @@ "small_fast_model": "string" }, "enabled": true, + "inject_coder_mcp_tools": true, "openai": { "base_url": "string", "key": "string" - } + }, + "retention": 0 } ``` ### Properties -| Name | Type | Required | Restrictions | Description | -|-------------|----------------------------------------------------------------------|----------|--------------|-------------| -| `anthropic` | [codersdk.AIBridgeAnthropicConfig](#codersdkaibridgeanthropicconfig) | false | | | -| `bedrock` | [codersdk.AIBridgeBedrockConfig](#codersdkaibridgebedrockconfig) | false | | | -| `enabled` | boolean | false | | | -| `openai` | [codersdk.AIBridgeOpenAIConfig](#codersdkaibridgeopenaiconfig) | false | | | +| Name | Type | Required | Restrictions | Description | +|--------------------------|----------------------------------------------------------------------|----------|--------------|-------------| +| `anthropic` | [codersdk.AIBridgeAnthropicConfig](#codersdkaibridgeanthropicconfig) | false | | | +| `bedrock` | [codersdk.AIBridgeBedrockConfig](#codersdkaibridgebedrockconfig) | false | | | +| `enabled` | boolean | false | | | +| `inject_coder_mcp_tools` | boolean | false | | | +| `openai` | [codersdk.AIBridgeOpenAIConfig](#codersdkaibridgeopenaiconfig) | false | | | +| `retention` | integer | false | | | ## codersdk.AIBridgeInterception ```json { + "api_key_id": "string", "ended_at": "2019-08-24T14:15:22Z", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator": { @@ -475,6 +480,7 @@ | Name | Type | Required | Restrictions | Description | |--------------------|---------------------------------------------------------------------|----------|--------------|-------------| +| `api_key_id` | string | false | | | | `ended_at` | string | false | | | | `id` | string | false | | | | `initiator` | [codersdk.MinimalUser](#codersdkminimaluser) | false | | | @@ -494,6 +500,7 @@ "count": 0, "results": [ { + "api_key_id": "string", "ended_at": "2019-08-24T14:15:22Z", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator": { @@ -692,10 +699,12 @@ "small_fast_model": "string" }, "enabled": true, + "inject_coder_mcp_tools": true, "openai": { "base_url": "string", "key": "string" - } + }, + "retention": 0 } } ``` @@ -2076,6 +2085,7 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in ```json { + "display_name": "string", "input": "string", "name": "string", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", @@ -2087,6 +2097,7 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in | Name | Type | Required | Restrictions | Description | |------------------------------|--------|----------|--------------|-------------| +| `display_name` | string | false | | | | `input` | string | false | | | | `name` | string | false | | | | `template_version_id` | string | false | | | @@ -2848,10 +2859,12 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "small_fast_model": "string" }, "enabled": true, + "inject_coder_mcp_tools": true, "openai": { "base_url": "string", "key": "string" - } + }, + "retention": 0 } }, "allow_workspace_renames": true, @@ -3362,10 +3375,12 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "small_fast_model": "string" }, "enabled": true, + "inject_coder_mcp_tools": true, "openai": { "base_url": "string", "key": "string" - } + }, + "retention": 0 } }, "allow_workspace_renames": true, @@ -4049,17 +4064,17 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o #### Enumerated Values -| Value | -|------------------------| -| `example` | -| `auto-fill-parameters` | -| `notifications` | -| `workspace-usage` | -| `web-push` | -| `oauth2` | -| `mcp-server-http` | -| `workspace-sharing` | -| `aibridge` | +| Value | +|-----------------------------| +| `example` | +| `auto-fill-parameters` | +| `notifications` | +| `workspace-usage` | +| `web-push` | +| `oauth2` | +| `mcp-server-http` | +| `workspace-sharing` | +| `terraform-directory-reuse` | ## codersdk.ExternalAPIKeyScopes @@ -4707,6 +4722,44 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `day` | | `week` | +## codersdk.InvalidatePresetsResponse + +```json +{ + "invalidated": [ + { + "preset_name": "string", + "template_name": "string", + "template_version_name": "string" + } + ] +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|---------------|-------------------------------------------------------------------|----------|--------------|-------------| +| `invalidated` | array of [codersdk.InvalidatedPreset](#codersdkinvalidatedpreset) | false | | | + +## codersdk.InvalidatedPreset + +```json +{ + "preset_name": "string", + "template_name": "string", + "template_version_name": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|-------------------------|--------|----------|--------------|-------------| +| `preset_name` | string | false | | | +| `template_name` | string | false | | | +| `template_version_name` | string | false | | | + ## codersdk.IssueReconnectingPTYSignedTokenRequest ```json @@ -5323,6 +5376,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "response_types_supported": [ "string" ], + "revocation_endpoint": "string", "scopes_supported": [ "string" ], @@ -5343,6 +5397,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `issuer` | string | false | | | | `registration_endpoint` | string | false | | | | `response_types_supported` | array of string | false | | | +| `revocation_endpoint` | string | false | | | | `scopes_supported` | array of string | false | | | | `token_endpoint` | string | false | | | | `token_endpoint_auth_methods_supported` | array of string | false | | | @@ -7752,6 +7807,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "timestamp": "2019-08-24T14:15:22Z", "uri": "string" }, + "display_name": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initial_prompt": "string", "name": "string", @@ -7795,6 +7851,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| |-----------------------------|----------------------------------------------------------------------|----------|--------------|-------------| | `created_at` | string | false | | | | `current_state` | [codersdk.TaskStateEntry](#codersdktaskstateentry) | false | | | +| `display_name` | string | false | | | | `id` | string | false | | | | `initial_prompt` | string | false | | | | `name` | string | false | | | @@ -7979,6 +8036,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "timestamp": "2019-08-24T14:15:22Z", "uri": "string" }, + "display_name": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initial_prompt": "string", "name": "string", @@ -8109,7 +8167,8 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "time_til_dormant_autodelete_ms": 0, "time_til_dormant_ms": 0, "updated_at": "2019-08-24T14:15:22Z", - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ``` @@ -8150,6 +8209,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `time_til_dormant_ms` | integer | false | | | | `updated_at` | string | false | | | | `use_classic_parameter_flow` | boolean | false | | | +| `use_terraform_workspace_cache` | boolean | false | | | #### Enumerated Values @@ -9143,6 +9203,20 @@ Restarts will only happen on weekdays in this list on weeks which line up with W |---------|-----------------|----------|--------------|-------------| | `roles` | array of string | false | | | +## codersdk.UpdateTaskInputRequest + +```json +{ + "input": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|---------|--------|----------|--------------|-------------| +| `input` | string | false | | | + ## codersdk.UpdateTemplateACL ```json @@ -9201,7 +9275,8 @@ Restarts will only happen on weekdays in this list on weeks which line up with W "time_til_dormant_ms": 0, "update_workspace_dormant_at": true, "update_workspace_last_used_at": true, - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ``` @@ -9231,6 +9306,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W | `update_workspace_dormant_at` | boolean | false | | Update workspace dormant at updates the dormant_at field of workspaces spawned from the template. This is useful for preventing dormant workspaces being immediately deleted when updating the dormant_ttl field to a new, shorter value. | | `update_workspace_last_used_at` | boolean | false | | Update workspace last used at updates the last_used_at field of workspaces spawned from the template. This is useful for preventing workspaces being immediately locked when updating the inactivity_ttl field to a new, shorter value. | | `use_classic_parameter_flow` | boolean | false | | Use classic parameter flow is a flag that switches the default behavior to use the classic parameter flow when creating a workspace. This only affects deployments with the experiment "dynamic-parameters" enabled. This setting will live for a period after the experiment is made the default. An "opt-out" is present in case the new feature breaks some existing templates. | +| `use_terraform_workspace_cache` | boolean | false | | Use terraform workspace cache allows optionally specifying whether to use cached terraform directories for workspaces created from this template. This field only applies when the correct experiment is enabled. This field is subject to being removed in the future. | ## codersdk.UpdateUserAppearanceSettingsRequest @@ -9282,6 +9358,20 @@ Restarts will only happen on weekdays in this list on weeks which line up with W | `old_password` | string | false | | | | `password` | string | true | | | +## codersdk.UpdateUserPreferenceSettingsRequest + +```json +{ + "task_notification_alert_dismissed": true +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|-------------------------------------|---------|----------|--------------|-------------| +| `task_notification_alert_dismissed` | boolean | false | | | + ## codersdk.UpdateUserProfileRequest ```json @@ -9770,6 +9860,20 @@ If the schedule is empty, the user will be updated to use the default schedule.| | `name` | string | false | | | | `value` | string | false | | | +## codersdk.UserPreferenceSettings + +```json +{ + "task_notification_alert_dismissed": true +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|-------------------------------------|---------|----------|--------------|-------------| +| `task_notification_alert_dismissed` | boolean | false | | | + ## codersdk.UserQuietHoursScheduleConfig ```json @@ -9970,7 +10074,6 @@ If the schedule is empty, the user will be updated to use the default schedule.| "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9" }, "latest_build": { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -10165,7 +10268,6 @@ If the schedule is empty, the user will be updated to use the default schedule.| } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -10185,6 +10287,10 @@ If the schedule is empty, the user will be updated to use the default schedule.| "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -10223,6 +10329,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| | `owner_avatar_url` | string | false | | | | `owner_id` | string | false | | | | `owner_name` | string | false | | Owner name is the username of the owner of the workspace. | +| `task_id` | [uuid.NullUUID](#uuidnulluuid) | false | | Task ID if set, indicates that the workspace is relevant to the given codersdk.Task. | | `template_active_version_id` | string | false | | | | `template_allow_user_cancel_workspace_jobs` | boolean | false | | | | `template_display_name` | string | false | | | @@ -11140,7 +11247,6 @@ If the schedule is empty, the user will be updated to use the default schedule.| ```json { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -11335,7 +11441,6 @@ If the schedule is empty, the user will be updated to use the default schedule.| } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -11351,35 +11456,33 @@ If the schedule is empty, the user will be updated to use the default schedule.| ### Properties -| Name | Type | Required | Restrictions | Description | -|------------------------------|-------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------| -| `ai_task_sidebar_app_id` | string | false | | Deprecated: This field has been replaced with `TaskAppID` | -| `build_number` | integer | false | | | -| `created_at` | string | false | | | -| `daily_cost` | integer | false | | | -| `deadline` | string | false | | | -| `has_ai_task` | boolean | false | | | -| `has_external_agent` | boolean | false | | | -| `id` | string | false | | | -| `initiator_id` | string | false | | | -| `initiator_name` | string | false | | | -| `job` | [codersdk.ProvisionerJob](#codersdkprovisionerjob) | false | | | -| `matched_provisioners` | [codersdk.MatchedProvisioners](#codersdkmatchedprovisioners) | false | | | -| `max_deadline` | string | false | | | -| `reason` | [codersdk.BuildReason](#codersdkbuildreason) | false | | | -| `resources` | array of [codersdk.WorkspaceResource](#codersdkworkspaceresource) | false | | | -| `status` | [codersdk.WorkspaceStatus](#codersdkworkspacestatus) | false | | | -| `task_app_id` | string | false | | | -| `template_version_id` | string | false | | | -| `template_version_name` | string | false | | | -| `template_version_preset_id` | string | false | | | -| `transition` | [codersdk.WorkspaceTransition](#codersdkworkspacetransition) | false | | | -| `updated_at` | string | false | | | -| `workspace_id` | string | false | | | -| `workspace_name` | string | false | | | -| `workspace_owner_avatar_url` | string | false | | | -| `workspace_owner_id` | string | false | | | -| `workspace_owner_name` | string | false | | Workspace owner name is the username of the owner of the workspace. | +| Name | Type | Required | Restrictions | Description | +|------------------------------|-------------------------------------------------------------------|----------|--------------|--------------------------------------------------------------------------| +| `build_number` | integer | false | | | +| `created_at` | string | false | | | +| `daily_cost` | integer | false | | | +| `deadline` | string | false | | | +| `has_ai_task` | boolean | false | | Deprecated: This field has been deprecated in favor of Task WorkspaceID. | +| `has_external_agent` | boolean | false | | | +| `id` | string | false | | | +| `initiator_id` | string | false | | | +| `initiator_name` | string | false | | | +| `job` | [codersdk.ProvisionerJob](#codersdkprovisionerjob) | false | | | +| `matched_provisioners` | [codersdk.MatchedProvisioners](#codersdkmatchedprovisioners) | false | | | +| `max_deadline` | string | false | | | +| `reason` | [codersdk.BuildReason](#codersdkbuildreason) | false | | | +| `resources` | array of [codersdk.WorkspaceResource](#codersdkworkspaceresource) | false | | | +| `status` | [codersdk.WorkspaceStatus](#codersdkworkspacestatus) | false | | | +| `template_version_id` | string | false | | | +| `template_version_name` | string | false | | | +| `template_version_preset_id` | string | false | | | +| `transition` | [codersdk.WorkspaceTransition](#codersdkworkspacetransition) | false | | | +| `updated_at` | string | false | | | +| `workspace_id` | string | false | | | +| `workspace_name` | string | false | | | +| `workspace_owner_avatar_url` | string | false | | | +| `workspace_owner_id` | string | false | | | +| `workspace_owner_name` | string | false | | Workspace owner name is the username of the owner of the workspace. | #### Enumerated Values @@ -11981,7 +12084,6 @@ If the schedule is empty, the user will be updated to use the default schedule.| "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9" }, "latest_build": { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -12159,7 +12261,6 @@ If the schedule is empty, the user will be updated to use the default schedule.| } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -12179,6 +12280,10 @@ If the schedule is empty, the user will be updated to use the default schedule.| "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", diff --git a/docs/reference/api/tasks.md b/docs/reference/api/tasks.md new file mode 100644 index 0000000000000..7a85fccefb4ce --- /dev/null +++ b/docs/reference/api/tasks.md @@ -0,0 +1,401 @@ +# Tasks + +## List AI tasks + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/tasks \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /tasks` + +### Parameters + +| Name | In | Type | Required | Description | +|------|-------|--------|----------|---------------------------------------------------------------------------------------------------------------------| +| `q` | query | string | false | Search query for filtering tasks. Supports: owner:, organization:, status: | + +### Example responses + +> 200 Response + +```json +{ + "count": 0, + "tasks": [ + { + "created_at": "2019-08-24T14:15:22Z", + "current_state": { + "message": "string", + "state": "working", + "timestamp": "2019-08-24T14:15:22Z", + "uri": "string" + }, + "display_name": "string", + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initial_prompt": "string", + "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "owner_avatar_url": "string", + "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", + "owner_name": "string", + "status": "pending", + "template_display_name": "string", + "template_icon": "string", + "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", + "template_name": "string", + "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", + "updated_at": "2019-08-24T14:15:22Z", + "workspace_agent_health": { + "healthy": false, + "reason": "agent has lost connection" + }, + "workspace_agent_id": { + "uuid": "string", + "valid": true + }, + "workspace_agent_lifecycle": "created", + "workspace_app_id": { + "uuid": "string", + "valid": true + }, + "workspace_build_number": 0, + "workspace_id": { + "uuid": "string", + "valid": true + }, + "workspace_name": "string", + "workspace_status": "pending" + } + ] +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|--------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.TasksListResponse](schemas.md#codersdktaskslistresponse) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Create a new AI task + +### Code samples + +```shell +# Example request using curl +curl -X POST http://coder-server:8080/api/v2/tasks/{user} \ + -H 'Content-Type: application/json' \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`POST /tasks/{user}` + +> Body parameter + +```json +{ + "display_name": "string", + "input": "string", + "name": "string", + "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", + "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1" +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------------------------------------------------------------------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `body` | body | [codersdk.CreateTaskRequest](schemas.md#codersdkcreatetaskrequest) | true | Create task request | + +### Example responses + +> 201 Response + +```json +{ + "created_at": "2019-08-24T14:15:22Z", + "current_state": { + "message": "string", + "state": "working", + "timestamp": "2019-08-24T14:15:22Z", + "uri": "string" + }, + "display_name": "string", + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initial_prompt": "string", + "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "owner_avatar_url": "string", + "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", + "owner_name": "string", + "status": "pending", + "template_display_name": "string", + "template_icon": "string", + "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", + "template_name": "string", + "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", + "updated_at": "2019-08-24T14:15:22Z", + "workspace_agent_health": { + "healthy": false, + "reason": "agent has lost connection" + }, + "workspace_agent_id": { + "uuid": "string", + "valid": true + }, + "workspace_agent_lifecycle": "created", + "workspace_app_id": { + "uuid": "string", + "valid": true + }, + "workspace_build_number": 0, + "workspace_id": { + "uuid": "string", + "valid": true + }, + "workspace_name": "string", + "workspace_status": "pending" +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|--------------------------------------------------------------|-------------|------------------------------------------| +| 201 | [Created](https://tools.ietf.org/html/rfc7231#section-6.3.2) | Created | [codersdk.Task](schemas.md#codersdktask) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Get AI task by ID or name + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/tasks/{user}/{task} \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /tasks/{user}/{task}` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string | true | Task ID, or task name | + +### Example responses + +> 200 Response + +```json +{ + "created_at": "2019-08-24T14:15:22Z", + "current_state": { + "message": "string", + "state": "working", + "timestamp": "2019-08-24T14:15:22Z", + "uri": "string" + }, + "display_name": "string", + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "initial_prompt": "string", + "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "owner_avatar_url": "string", + "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", + "owner_name": "string", + "status": "pending", + "template_display_name": "string", + "template_icon": "string", + "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", + "template_name": "string", + "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", + "updated_at": "2019-08-24T14:15:22Z", + "workspace_agent_health": { + "healthy": false, + "reason": "agent has lost connection" + }, + "workspace_agent_id": { + "uuid": "string", + "valid": true + }, + "workspace_agent_lifecycle": "created", + "workspace_app_id": { + "uuid": "string", + "valid": true + }, + "workspace_build_number": 0, + "workspace_id": { + "uuid": "string", + "valid": true + }, + "workspace_name": "string", + "workspace_status": "pending" +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Task](schemas.md#codersdktask) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Delete AI task + +### Code samples + +```shell +# Example request using curl +curl -X DELETE http://coder-server:8080/api/v2/tasks/{user}/{task} \ + -H 'Coder-Session-Token: API_KEY' +``` + +`DELETE /tasks/{user}/{task}` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string | true | Task ID, or task name | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------------|-------------|--------| +| 202 | [Accepted](https://tools.ietf.org/html/rfc7231#section-6.3.3) | Accepted | | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Update AI task input + +### Code samples + +```shell +# Example request using curl +curl -X PATCH http://coder-server:8080/api/v2/tasks/{user}/{task}/input \ + -H 'Content-Type: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`PATCH /tasks/{user}/{task}/input` + +> Body parameter + +```json +{ + "input": "string" +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|------------------------------------------------------------------------------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string | true | Task ID, or task name | +| `body` | body | [codersdk.UpdateTaskInputRequest](schemas.md#codersdkupdatetaskinputrequest) | true | Update task input request | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|-----------------------------------------------------------------|-------------|--------| +| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Get AI task logs + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/tasks/{user}/{task}/logs \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /tasks/{user}/{task}/logs` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string | true | Task ID, or task name | + +### Example responses + +> 200 Response + +```json +{ + "logs": [ + { + "content": "string", + "id": 0, + "time": "2019-08-24T14:15:22Z", + "type": "input" + } + ] +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.TaskLogsResponse](schemas.md#codersdktasklogsresponse) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Send input to AI task + +### Code samples + +```shell +# Example request using curl +curl -X POST http://coder-server:8080/api/v2/tasks/{user}/{task}/send \ + -H 'Content-Type: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`POST /tasks/{user}/{task}/send` + +> Body parameter + +```json +{ + "input": "string" +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|----------------------------------------------------------------|----------|-------------------------------------------------------| +| `user` | path | string | true | Username, user ID, or 'me' for the authenticated user | +| `task` | path | string | true | Task ID, or task name | +| `body` | body | [codersdk.TaskSendRequest](schemas.md#codersdktasksendrequest) | true | Task input request | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|-----------------------------------------------------------------|-------------|--------| +| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/reference/api/templates.md b/docs/reference/api/templates.md index 2c516f4788b4d..7849b79957006 100644 --- a/docs/reference/api/templates.md +++ b/docs/reference/api/templates.md @@ -80,7 +80,8 @@ To include deprecated templates, specify `deprecated:true` in the search query. "time_til_dormant_autodelete_ms": 0, "time_til_dormant_ms": 0, "updated_at": "2019-08-24T14:15:22Z", - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ] ``` @@ -138,6 +139,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W |`» time_til_dormant_ms`|integer|false||| |`» updated_at`|string(date-time)|false||| |`» use_classic_parameter_flow`|boolean|false||| +|`» use_terraform_workspace_cache`|boolean|false||| #### Enumerated Values @@ -266,7 +268,8 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa "time_til_dormant_autodelete_ms": 0, "time_til_dormant_ms": 0, "updated_at": "2019-08-24T14:15:22Z", - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ``` @@ -416,7 +419,8 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "time_til_dormant_autodelete_ms": 0, "time_til_dormant_ms": 0, "updated_at": "2019-08-24T14:15:22Z", - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ``` @@ -832,7 +836,8 @@ To include deprecated templates, specify `deprecated:true` in the search query. "time_til_dormant_autodelete_ms": 0, "time_til_dormant_ms": 0, "updated_at": "2019-08-24T14:15:22Z", - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ] ``` @@ -890,6 +895,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W |`» time_til_dormant_ms`|integer|false||| |`» updated_at`|string(date-time)|false||| |`» use_classic_parameter_flow`|boolean|false||| +|`» use_terraform_workspace_cache`|boolean|false||| #### Enumerated Values @@ -1036,7 +1042,8 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template} \ "time_til_dormant_autodelete_ms": 0, "time_til_dormant_ms": 0, "updated_at": "2019-08-24T14:15:22Z", - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ``` @@ -1140,7 +1147,8 @@ curl -X PATCH http://coder-server:8080/api/v2/templates/{template} \ "time_til_dormant_ms": 0, "update_workspace_dormant_at": true, "update_workspace_last_used_at": true, - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ``` @@ -1207,7 +1215,8 @@ curl -X PATCH http://coder-server:8080/api/v2/templates/{template} \ "time_til_dormant_autodelete_ms": 0, "time_til_dormant_ms": 0, "updated_at": "2019-08-24T14:15:22Z", - "use_classic_parameter_flow": true + "use_classic_parameter_flow": true, + "use_terraform_workspace_cache": true } ``` diff --git a/docs/reference/api/users.md b/docs/reference/api/users.md index 857d619398ff9..c69c57af859aa 100644 --- a/docs/reference/api/users.md +++ b/docs/reference/api/users.md @@ -1241,6 +1241,90 @@ curl -X PUT http://coder-server:8080/api/v2/users/{user}/password \ To perform this operation, you must be authenticated. [Learn more](authentication.md). +## Get user preference settings + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/users/{user}/preferences \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /users/{user}/preferences` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------|----------|----------------------| +| `user` | path | string | true | User ID, name, or me | + +### Example responses + +> 200 Response + +```json +{ + "task_notification_alert_dismissed": true +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|------------------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.UserPreferenceSettings](schemas.md#codersdkuserpreferencesettings) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Update user preference settings + +### Code samples + +```shell +# Example request using curl +curl -X PUT http://coder-server:8080/api/v2/users/{user}/preferences \ + -H 'Content-Type: application/json' \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`PUT /users/{user}/preferences` + +> Body parameter + +```json +{ + "task_notification_alert_dismissed": true +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------------------------------------------------------------------------------------------------------|----------|-------------------------| +| `user` | path | string | true | User ID, name, or me | +| `body` | body | [codersdk.UpdateUserPreferenceSettingsRequest](schemas.md#codersdkupdateuserpreferencesettingsrequest) | true | New preference settings | + +### Example responses + +> 200 Response + +```json +{ + "task_notification_alert_dismissed": true +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|------------------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.UserPreferenceSettings](schemas.md#codersdkuserpreferencesettings) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + ## Update user profile ### Code samples diff --git a/docs/reference/api/workspaces.md b/docs/reference/api/workspaces.md index 91ab23f9260e9..733c5993669e4 100644 --- a/docs/reference/api/workspaces.md +++ b/docs/reference/api/workspaces.md @@ -82,7 +82,6 @@ of the template will be used. "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9" }, "latest_build": { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -277,7 +276,6 @@ of the template will be used. } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -297,6 +295,10 @@ of the template will be used. "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -374,7 +376,6 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9" }, "latest_build": { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -569,7 +570,6 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -589,6 +589,10 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -691,7 +695,6 @@ of the template will be used. "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9" }, "latest_build": { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -886,7 +889,6 @@ of the template will be used. } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -906,6 +908,10 @@ of the template will be used. "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -986,7 +992,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9" }, "latest_build": { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -1164,7 +1169,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -1184,6 +1188,10 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -1262,7 +1270,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9" }, "latest_build": { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -1457,7 +1464,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \ } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -1477,6 +1483,10 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \ "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", @@ -1814,7 +1824,6 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \ "workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9" }, "latest_build": { - "ai_task_sidebar_app_id": "852ddafb-2cb9-4cbf-8a8c-075389fb3d3d", "build_number": 0, "created_at": "2019-08-24T14:15:22Z", "daily_cost": 0, @@ -2009,7 +2018,6 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \ } ], "status": "pending", - "task_app_id": "ca438251-3e16-4fae-b9ab-dd3c237c3735", "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "template_version_name": "string", "template_version_preset_id": "512a53a7-30da-446e-a1fc-713c630baff1", @@ -2029,6 +2037,10 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \ "owner_avatar_url": "string", "owner_id": "8826ee2e-7933-4665-aef2-2393f84a0d05", "owner_name": "string", + "task_id": { + "uuid": "string", + "valid": true + }, "template_active_version_id": "b0da9c29-67d8-4c87-888c-bafe356f7f3c", "template_allow_user_cancel_workspace_jobs": true, "template_display_name": "string", diff --git a/docs/reference/cli/aibridge.md b/docs/reference/cli/aibridge.md new file mode 100644 index 0000000000000..67e633682d433 --- /dev/null +++ b/docs/reference/cli/aibridge.md @@ -0,0 +1,16 @@ + +# aibridge + +Manage AI Bridge. + +## Usage + +```console +coder aibridge +``` + +## Subcommands + +| Name | Purpose | +|-----------------------------------------------------------|---------------------------------| +| [interceptions](./aibridge_interceptions.md) | Manage AI Bridge interceptions. | diff --git a/docs/reference/cli/aibridge_interceptions.md b/docs/reference/cli/aibridge_interceptions.md new file mode 100644 index 0000000000000..80c2135b07055 --- /dev/null +++ b/docs/reference/cli/aibridge_interceptions.md @@ -0,0 +1,16 @@ + +# aibridge interceptions + +Manage AI Bridge interceptions. + +## Usage + +```console +coder aibridge interceptions +``` + +## Subcommands + +| Name | Purpose | +|-------------------------------------------------------|---------------------------------------| +| [list](./aibridge_interceptions_list.md) | List AI Bridge interceptions as JSON. | diff --git a/docs/reference/cli/aibridge_interceptions_list.md b/docs/reference/cli/aibridge_interceptions_list.md new file mode 100644 index 0000000000000..a47b8c53dafd3 --- /dev/null +++ b/docs/reference/cli/aibridge_interceptions_list.md @@ -0,0 +1,69 @@ + +# aibridge interceptions list + +List AI Bridge interceptions as JSON. + +## Usage + +```console +coder aibridge interceptions list [flags] +``` + +## Options + +### --initiator + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions initiated by this user. Accepts a user ID, username, or "me". + +### --started-before + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions started before this time. Must be after 'started-after' if set. Accepts a time in the RFC 3339 format, e.g. "2006-01-02T15:04:05Z07:00". + +### --started-after + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions started after this time. Must be before 'started-before' if set. Accepts a time in the RFC 3339 format, e.g. "2006-01-02T15:04:05Z07:00". + +### --provider + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions from this provider. + +### --model + +| | | +|------|---------------------| +| Type | string | + +Only return interceptions from this model. + +### --after-id + +| | | +|------|---------------------| +| Type | string | + +The ID of the last result on the previous page to use as a pagination cursor. + +### --limit + +| | | +|---------|------------------| +| Type | int | +| Default | 100 | + +The limit of results to return. Must be between 1 and 1000. diff --git a/docs/reference/cli/index.md b/docs/reference/cli/index.md index c298f8bcb61a2..b26ec94a7f80d 100644 --- a/docs/reference/cli/index.md +++ b/docs/reference/cli/index.md @@ -36,6 +36,7 @@ Coder — A tool for provisioning self-hosted development environments with Terr | [publickey](./publickey.md) | Output your Coder public key used for Git operations | | [reset-password](./reset-password.md) | Directly connect to the database to reset a user's password | | [state](./state.md) | Manually manage Terraform state to fix broken workspaces | +| [task](./task.md) | Manage tasks | | [templates](./templates.md) | Manage templates | | [tokens](./tokens.md) | Manage personal access tokens | | [users](./users.md) | Manage users | @@ -68,6 +69,7 @@ Coder — A tool for provisioning self-hosted development environments with Terr | [groups](./groups.md) | Manage groups | | [prebuilds](./prebuilds.md) | Manage Coder prebuilds | | [external-workspaces](./external-workspaces.md) | Create or manage external workspaces | +| [aibridge](./aibridge.md) | Manage AI Bridge. | ## Options @@ -169,6 +171,16 @@ Disable direct (P2P) connections to workspaces. Disable network telemetry. Network telemetry is collected when connecting to workspaces using the CLI, and is forwarded to the server. If telemetry is also enabled on the server, it may be sent to Coder. Network telemetry is used to measure network quality and detect regressions. +### --use-keyring + +| | | +|-------------|---------------------------------| +| Type | bool | +| Environment | $CODER_USE_KEYRING | +| Default | true | + +Store and retrieve session tokens using the operating system keyring. This flag is ignored and file-based storage is used when --global-config is set or keyring usage is not supported on the current platform. Set to false to force file-based storage on supported platforms. + ### --global-config | | | diff --git a/docs/reference/cli/login.md b/docs/reference/cli/login.md index a35038fedef8c..1371ebae1bf2f 100644 --- a/docs/reference/cli/login.md +++ b/docs/reference/cli/login.md @@ -9,6 +9,12 @@ Authenticate with Coder deployment coder login [flags] [] ``` +## Description + +```console +By default, the session token is stored in the operating system keyring on macOS and Windows and a plain text file on Linux. Use the --use-keyring flag or CODER_USE_KEYRING environment variable to change the storage mechanism. +``` + ## Options ### --first-user-email diff --git a/docs/reference/cli/provisioner_start.md b/docs/reference/cli/provisioner_start.md index 2a3c88ff93139..f278bac310cad 100644 --- a/docs/reference/cli/provisioner_start.md +++ b/docs/reference/cli/provisioner_start.md @@ -144,6 +144,16 @@ Serve prometheus metrics on the address defined by prometheus address. The bind address to serve prometheus metrics. +### --experiments + +| | | +|-------------|---------------------------------| +| Type | string-array | +| Environment | $CODER_EXPERIMENTS | +| YAML | experiments | + +Enable one or more experiments. These are not ready for production. Separate multiple experiments with commas, or enter '*' to opt-in to all available experiments. + ### -O, --org | | | diff --git a/docs/reference/cli/server.md b/docs/reference/cli/server.md index bdc424bdd7a8b..951e140f3f01f 100644 --- a/docs/reference/cli/server.md +++ b/docs/reference/cli/server.md @@ -1647,3 +1647,130 @@ How often to reconcile workspace prebuilds state. | Default | false | Hide AI tasks from the dashboard. + +### --aibridge-enabled + +| | | +|-------------|--------------------------------------| +| Type | bool | +| Environment | $CODER_AIBRIDGE_ENABLED | +| YAML | aibridge.enabled | +| Default | false | + +Whether to start an in-memory aibridged instance. + +### --aibridge-openai-base-url + +| | | +|-------------|----------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_OPENAI_BASE_URL | +| YAML | aibridge.openai_base_url | +| Default | https://api.openai.com/v1/ | + +The base URL of the OpenAI API. + +### --aibridge-openai-key + +| | | +|-------------|-----------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_OPENAI_KEY | +| YAML | aibridge.openai_key | + +The key to authenticate against the OpenAI API. + +### --aibridge-anthropic-base-url + +| | | +|-------------|-------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_ANTHROPIC_BASE_URL | +| YAML | aibridge.anthropic_base_url | +| Default | https://api.anthropic.com/ | + +The base URL of the Anthropic API. + +### --aibridge-anthropic-key + +| | | +|-------------|--------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_ANTHROPIC_KEY | +| YAML | aibridge.anthropic_key | + +The key to authenticate against the Anthropic API. + +### --aibridge-bedrock-region + +| | | +|-------------|---------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_REGION | +| YAML | aibridge.bedrock_region | + +The AWS Bedrock API region. + +### --aibridge-bedrock-access-key + +| | | +|-------------|-------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY | +| YAML | aibridge.bedrock_access_key | + +The access key to authenticate against the AWS Bedrock API. + +### --aibridge-bedrock-access-key-secret + +| | | +|-------------|--------------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET | +| YAML | aibridge.bedrock_access_key_secret | + +The access key secret to use with the access key to authenticate against the AWS Bedrock API. + +### --aibridge-bedrock-model + +| | | +|-------------|---------------------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_MODEL | +| YAML | aibridge.bedrock_model | +| Default | global.anthropic.claude-sonnet-4-5-20250929-v1:0 | + +The model to use when making requests to the AWS Bedrock API. + +### --aibridge-bedrock-small-fastmodel + +| | | +|-------------|--------------------------------------------------------------| +| Type | string | +| Environment | $CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL | +| YAML | aibridge.bedrock_small_fast_model | +| Default | global.anthropic.claude-haiku-4-5-20251001-v1:0 | + +The small fast model to use when making requests to the AWS Bedrock API. Claude Code uses Haiku-class models to perform background tasks. See https://docs.claude.com/en/docs/claude-code/settings#environment-variables. + +### --aibridge-inject-coder-mcp-tools + +| | | +|-------------|-----------------------------------------------------| +| Type | bool | +| Environment | $CODER_AIBRIDGE_INJECT_CODER_MCP_TOOLS | +| YAML | aibridge.inject_coder_mcp_tools | +| Default | false | + +Whether to inject Coder's MCP tools into intercepted AI Bridge requests (requires the "oauth2" and "mcp-server-http" experiments to be enabled). + +### --aibridge-retention + +| | | +|-------------|----------------------------------------| +| Type | duration | +| Environment | $CODER_AIBRIDGE_RETENTION | +| YAML | aibridge.retention | +| Default | 60d | + +Length of time to retain data such as interceptions and all related records (token, prompt, tool use). diff --git a/docs/reference/cli/task.md b/docs/reference/cli/task.md new file mode 100644 index 0000000000000..9f70c9c4d5022 --- /dev/null +++ b/docs/reference/cli/task.md @@ -0,0 +1,25 @@ + +# task + +Manage tasks + +Aliases: + +* tasks + +## Usage + +```console +coder task +``` + +## Subcommands + +| Name | Purpose | +|-----------------------------------------|----------------------------| +| [create](./task_create.md) | Create a task | +| [delete](./task_delete.md) | Delete tasks | +| [list](./task_list.md) | List tasks | +| [logs](./task_logs.md) | Show a task's logs | +| [send](./task_send.md) | Send input to a task | +| [status](./task_status.md) | Show the status of a task. | diff --git a/docs/reference/cli/task_create.md b/docs/reference/cli/task_create.md new file mode 100644 index 0000000000000..726c805469dc2 --- /dev/null +++ b/docs/reference/cli/task_create.md @@ -0,0 +1,100 @@ + +# task create + +Create a task + +## Usage + +```console +coder task create [flags] [input] +``` + +## Description + +```console + - Create a task with direct input: + + $ coder task create "Add authentication to the user service" + + - Create a task with stdin input: + + $ echo "Add authentication to the user service" | coder task create + + - Create a task with a specific name: + + $ coder task create --name task1 "Add authentication to the user service" + + - Create a task from a specific template / preset: + + $ coder task create --template backend-dev --preset "My Preset" "Add authentication to the user service" + + - Create a task for another user (requires appropriate permissions): + + $ coder task create --owner user@example.com "Add authentication to the user service" +``` + +## Options + +### --name + +| | | +|------|---------------------| +| Type | string | + +Specify the name of the task. If you do not specify one, a name will be generated for you. + +### --owner + +| | | +|---------|---------------------| +| Type | string | +| Default | me | + +Specify the owner of the task. Defaults to the current user. + +### --template + +| | | +|-------------|----------------------------------------| +| Type | string | +| Environment | $CODER_TASK_TEMPLATE_NAME | + +### --template-version + +| | | +|-------------|-------------------------------------------| +| Type | string | +| Environment | $CODER_TASK_TEMPLATE_VERSION | + +### --preset + +| | | +|-------------|--------------------------------------| +| Type | string | +| Environment | $CODER_TASK_PRESET_NAME | +| Default | none | + +### --stdin + +| | | +|------|-------------------| +| Type | bool | + +Reads from stdin for the task input. + +### -q, --quiet + +| | | +|------|-------------------| +| Type | bool | + +Only display the created task's ID. + +### -O, --org + +| | | +|-------------|----------------------------------| +| Type | string | +| Environment | $CODER_ORGANIZATION | + +Select which organization (uuid or name) to use. diff --git a/docs/reference/cli/task_delete.md b/docs/reference/cli/task_delete.md new file mode 100644 index 0000000000000..0181ee0ceafd7 --- /dev/null +++ b/docs/reference/cli/task_delete.md @@ -0,0 +1,40 @@ + +# task delete + +Delete tasks + +Aliases: + +* rm + +## Usage + +```console +coder task delete [flags] [ ...] +``` + +## Description + +```console + - Delete a single task.: + + $ $ coder task delete task1 + + - Delete multiple tasks.: + + $ $ coder task delete task1 task2 task3 + + - Delete a task without confirmation.: + + $ $ coder task delete task4 --yes +``` + +## Options + +### -y, --yes + +| | | +|------|-------------------| +| Type | bool | + +Bypass prompts. diff --git a/docs/reference/cli/task_list.md b/docs/reference/cli/task_list.md new file mode 100644 index 0000000000000..1a9335f65f649 --- /dev/null +++ b/docs/reference/cli/task_list.md @@ -0,0 +1,92 @@ + +# task list + +List tasks + +Aliases: + +* ls + +## Usage + +```console +coder task list [flags] +``` + +## Description + +```console + - List tasks for the current user.: + + $ coder task list + + - List tasks for a specific user.: + + $ coder task list --user someone-else + + - List all tasks you can view.: + + $ coder task list --all + + - List all your running tasks.: + + $ coder task list --status running + + - As above, but only show IDs.: + + $ coder task list --status running --quiet +``` + +## Options + +### --status + +| | | +|------|--------------------------------------------------------------------| +| Type | pending\|initializing\|active\|paused\|error\|unknown | + +Filter by task status. + +### -a, --all + +| | | +|---------|--------------------| +| Type | bool | +| Default | false | + +List tasks for all users you can view. + +### --user + +| | | +|------|---------------------| +| Type | string | + +List tasks for the specified user (username, "me"). + +### -q, --quiet + +| | | +|---------|--------------------| +| Type | bool | +| Default | false | + +Only display task IDs. + +### -c, --column + +| | | +|---------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Type | [id\|organization id\|owner id\|owner name\|owner avatar url\|name\|display name\|template id\|template version id\|template name\|template display name\|template icon\|workspace id\|workspace name\|workspace status\|workspace build number\|workspace agent id\|workspace agent lifecycle\|workspace agent health\|workspace app id\|initial prompt\|status\|state\|message\|created at\|updated at\|state changed] | +| Default | name,status,state,state changed,message | + +Columns to display in table output. + +### -o, --output + +| | | +|---------|--------------------------| +| Type | table\|json | +| Default | table | + +Output format. diff --git a/docs/reference/cli/task_logs.md b/docs/reference/cli/task_logs.md new file mode 100644 index 0000000000000..d7e4b0eda65cc --- /dev/null +++ b/docs/reference/cli/task_logs.md @@ -0,0 +1,38 @@ + +# task logs + +Show a task's logs + +## Usage + +```console +coder task logs [flags] +``` + +## Description + +```console + - Show logs for a given task.: + + $ coder task logs task1 +``` + +## Options + +### -c, --column + +| | | +|---------|----------------------------------------| +| Type | [id\|content\|type\|time] | +| Default | type,content | + +Columns to display in table output. + +### -o, --output + +| | | +|---------|--------------------------| +| Type | table\|json | +| Default | table | + +Output format. diff --git a/docs/reference/cli/task_send.md b/docs/reference/cli/task_send.md new file mode 100644 index 0000000000000..0ad847a441387 --- /dev/null +++ b/docs/reference/cli/task_send.md @@ -0,0 +1,32 @@ + +# task send + +Send input to a task + +## Usage + +```console +coder task send [flags] [ | --stdin] +``` + +## Description + +```console + - Send direct input to a task.: + + $ coder task send task1 "Please also add unit tests" + + - Send input from stdin to a task.: + + $ echo "Please also add unit tests" | coder task send task1 --stdin +``` + +## Options + +### --stdin + +| | | +|------|-------------------| +| Type | bool | + +Reads the input from stdin. diff --git a/docs/reference/cli/task_status.md b/docs/reference/cli/task_status.md new file mode 100644 index 0000000000000..4a167a249fbe8 --- /dev/null +++ b/docs/reference/cli/task_status.md @@ -0,0 +1,55 @@ + +# task status + +Show the status of a task. + +Aliases: + +* stat + +## Usage + +```console +coder task status [flags] +``` + +## Description + +```console + - Show the status of a given task.: + + $ coder task status task1 + + - Watch the status of a given task until it completes (idle or stopped).: + + $ coder task status task1 --watch +``` + +## Options + +### --watch + +| | | +|---------|--------------------| +| Type | bool | +| Default | false | + +Watch the task status output. This will stream updates to the terminal until the underlying workspace is stopped. + +### -c, --column + +| | | +|---------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Type | [id\|organization id\|owner id\|owner name\|owner avatar url\|name\|display name\|template id\|template version id\|template name\|template display name\|template icon\|workspace id\|workspace name\|workspace status\|workspace build number\|workspace agent id\|workspace agent lifecycle\|workspace agent health\|workspace app id\|initial prompt\|status\|state\|message\|created at\|updated at\|state changed\|healthy] | +| Default | state changed,status,healthy,state,message | + +Columns to display in table output. + +### -o, --output + +| | | +|---------|--------------------------| +| Type | table\|json | +| Default | table | + +Output format. diff --git a/docs/tutorials/faqs.md b/docs/tutorials/faqs.md index a2f350b45a734..f2a0902eb790f 100644 --- a/docs/tutorials/faqs.md +++ b/docs/tutorials/faqs.md @@ -559,3 +559,27 @@ confidential resources to their local machines. For more advanced security needs, consider adopting an endpoint security solution. + +## How do I change the access URL for my Coder server? + +You may want to change the default domain that's used to access coder, i.e. `yourcompany.coder.com` and find yourself unfamiliar with the process. + +To change the access URL associated with your server, you can edit any of the following variables: + +- CLI using the `--access-url` flag +- YAML using the `accessURL` option +- or ENV using the `CODER_ACCESS_URL` environmental variable. + +For example, if you're using an environment file to configure your server, you'll want to edit the file located at `/etc/coder.d/coder.env` and edit the following: + +`CODER_ACCESS_URL=https://yourcompany.coder.com` to your new desired URL. + +Then save your changes, and reload daemon-ctl using the following command: + +`systemctl daemon-reload` + +and restart the service using: + +`systemctl restart coder` + +After coder restarts, your changes should be applied and should reflect in the admin settings. diff --git a/docs/tutorials/quickstart.md b/docs/tutorials/quickstart.md index 19f9571326cf7..2b7b2c2e385bb 100644 --- a/docs/tutorials/quickstart.md +++ b/docs/tutorials/quickstart.md @@ -239,7 +239,7 @@ advanced capabilities that Coder offers. ### Get Coder Tasks Running Coder Tasks is an interface that allows you to run and manage coding agents like -Claude Code within a given Workspace. Tasks become available when the Template for a Workspace has the `coder_ai_task` resource and `coder_parameter` named `AI Prompt` defined in its source code. +Claude Code within a given Workspace. Tasks become available when a Workspace Template has the `coder_ai_task` resource defined in its source code. In other words, any existing template can become a Task template by adding in that resource and parameter. diff --git a/docs/user-guides/devcontainers/index.md b/docs/user-guides/devcontainers/index.md index ed817fe853416..04ec15c54b470 100644 --- a/docs/user-guides/devcontainers/index.md +++ b/docs/user-guides/devcontainers/index.md @@ -1,81 +1,69 @@ # Dev Containers Integration -> [!NOTE] -> -> The Coder dev containers integration is an [early access](../../install/releases/feature-stages.md) feature. -> -> While functional for testing and feedback, it may change significantly before general availability. - -The dev containers integration is an early access feature that enables seamless -creation and management of dev containers in Coder workspaces. This feature -leverages the [`@devcontainers/cli`](https://github.com/devcontainers/cli) and +The Dev Containers integration enables seamless creation and management of Dev +Containers in Coder workspaces. This feature leverages the +[`@devcontainers/cli`](https://github.com/devcontainers/cli) and [Docker](https://www.docker.com) to provide a streamlined development experience. This implementation is different from the existing -[Envbuilder-based dev containers](../../admin/templates/managing-templates/devcontainers/index.md) +[Envbuilder-based Dev Containers](../../admin/templates/managing-templates/devcontainers/index.md) offering. ## Prerequisites -- Coder version 2.22.0 or later -- Coder CLI version 2.22.0 or later +- Coder version 2.24.0 or later +- Coder CLI version 2.24.0 or later +- **Linux or macOS workspace**, Dev Containers are not supported on Windows - A template with: - - Dev containers integration enabled + - Dev Containers integration enabled - A Docker-compatible workspace image - Appropriate permissions to execute Docker commands inside your workspace ## How It Works -The dev containers integration utilizes the `devcontainer` command from -[`@devcontainers/cli`](https://github.com/devcontainers/cli) to manage dev -containers within your Coder workspace. -This command provides comprehensive functionality for creating, starting, and managing dev containers. +The Dev Containers integration utilizes the `devcontainer` command from +[`@devcontainers/cli`](https://github.com/devcontainers/cli) to manage Dev +Containers within your Coder workspace. +This command provides comprehensive functionality for creating, starting, and managing Dev Containers. Dev environments are configured through a standard `devcontainer.json` file, which allows for extensive customization of your development setup. -When a workspace with the dev containers integration starts: +When a workspace with the Dev Containers integration starts: 1. The workspace initializes the Docker environment. 1. The integration detects repositories with a `.devcontainer` directory or a `devcontainer.json` file. -1. The integration builds and starts the dev container based on the +1. The integration builds and starts the Dev Container based on the configuration. -1. Your workspace automatically detects the running dev container. +1. Your workspace automatically detects the running Dev Container. ## Features ### Available Now -- Automatic dev container detection from repositories -- Seamless dev container startup during workspace initialization -- Integrated IDE experience in dev containers with VS Code -- Direct service access in dev containers -- Limited SSH access to dev containers +- Automatic Dev Container detection from repositories +- Seamless Dev Container startup during workspace initialization +- Dev Container change detection and dirty state indicators +- On-demand Dev Container recreation via rebuild button +- Integrated IDE experience in Dev Containers with VS Code +- Direct service access in Dev Containers +- SSH access to Dev Containers +- Automatic port detection for container ports -### Coming Soon +## Limitations -- Dev container change detection -- On-demand dev container recreation -- Support for automatic port forwarding inside the container -- Full native SSH support to dev containers - -## Limitations during Early Access - -During the early access phase, the dev containers integration has the following -limitations: +The Dev Containers integration has the following limitations: +- **Not supported on Windows** - Changes to the `devcontainer.json` file require manual container recreation -- Automatic port forwarding only works for ports specified in `appPort` -- SSH access requires using the `--container` flag -- Some devcontainer features may not work as expected - -These limitations will be addressed in future updates as the feature matures. + using the rebuild button +- Some Dev Container features may not work as expected ## Comparison with Envbuilder-based Dev Containers -| Feature | Dev Containers (Early Access) | Envbuilder Dev Containers | +| Feature | Dev Containers Integration | Envbuilder Dev Containers | |----------------|----------------------------------------|----------------------------------------------| | Implementation | Direct `@devcontainers/cli` and Docker | Coder's Envbuilder | | Target users | Individual developers | Platform teams and administrators | @@ -84,15 +72,15 @@ These limitations will be addressed in future updates as the feature matures. | Requirements | Docker access in workspace | Compatible with more restricted environments | Choose the appropriate solution based on your team's needs and infrastructure -constraints. For additional details on Envbuilder's dev container support, see +constraints. For additional details on Envbuilder's Dev Container support, see the -[Envbuilder devcontainer spec support documentation](https://github.com/coder/envbuilder/blob/main/docs/devcontainer-spec-support.md). +[Envbuilder Dev Container spec support documentation](https://github.com/coder/envbuilder/blob/main/docs/devcontainer-spec-support.md). ## Next Steps -- Explore the [dev container specification](https://containers.dev/) to learn +- Explore the [Dev Container specification](https://containers.dev/) to learn more about advanced configuration options -- Read about [dev container features](https://containers.dev/features) to +- Read about [Dev Container features](https://containers.dev/features) to enhance your development environment - Check the [VS Code dev containers documentation](https://code.visualstudio.com/docs/devcontainers/containers) diff --git a/dogfood/coder-envbuilder/main.tf b/dogfood/coder-envbuilder/main.tf index cd316100fea8e..5970bbdfb16f1 100644 --- a/dogfood/coder-envbuilder/main.tf +++ b/dogfood/coder-envbuilder/main.tf @@ -24,7 +24,6 @@ locals { // actually in Germany now. "eu-helsinki" = "tcp://katerose-fsn-cdr-dev.tailscale.svc.cluster.local:2375" "ap-sydney" = "tcp://wolfgang-syd-cdr-dev.tailscale.svc.cluster.local:2375" - "sa-saopaulo" = "tcp://oberstein-sao-cdr-dev.tailscale.svc.cluster.local:2375" "za-jnb" = "tcp://greenhill-jnb-cdr-dev.tailscale.svc.cluster.local:2375" } @@ -72,11 +71,6 @@ data "coder_parameter" "region" { name = "Sydney" value = "ap-sydney" } - option { - icon = "/emojis/1f1e7-1f1f7.png" - name = "São Paulo" - value = "sa-saopaulo" - } option { icon = "/emojis/1f1ff-1f1e6.png" name = "Johannesburg" @@ -129,7 +123,7 @@ module "personalize" { module "code-server" { source = "dev.registry.coder.com/coder/code-server/coder" - version = "1.3.1" + version = "1.4.0" agent_id = coder_agent.dev.id folder = local.repo_dir auto_install_extensions = true @@ -446,4 +440,4 @@ resource "coder_metadata" "container_info" { key = "region" value = data.coder_parameter.region.option[index(data.coder_parameter.region.option.*.value, data.coder_parameter.region.value)].name } -} \ No newline at end of file +} diff --git a/dogfood/coder/Dockerfile b/dogfood/coder/Dockerfile index 932e2ce9b06a6..655e3b8084f85 100644 --- a/dogfood/coder/Dockerfile +++ b/dogfood/coder/Dockerfile @@ -1,5 +1,5 @@ # 1.86.0 -FROM rust:slim@sha256:e4ae8ab67883487c5545884d5aa5ebbe86b5f13c6df4a8e3e2f34c89cedb9f54 AS rust-utils +FROM rust:slim@sha256:5218a2b4b4cb172f26503ac2b2de8e5ffd629ae1c0d885aff2cbe97fd4d1a409 AS rust-utils # Install rust helper programs ENV CARGO_INSTALL_ROOT=/tmp/ # Use more reliable mirrors for Debian packages @@ -8,11 +8,11 @@ RUN sed -i 's|http://deb.debian.org/debian|http://mirrors.edge.kernel.org/debian RUN apt-get update && apt-get install -y libssl-dev openssl pkg-config build-essential RUN cargo install jj-cli typos-cli watchexec-cli -FROM ubuntu:jammy@sha256:4e0171b9275e12d375863f2b3ae9ce00a4c53ddda176bd55868df97ac6f21a6e AS go +FROM ubuntu:jammy@sha256:104ae83764a5119017b8e8d6218fa0832b09df65aae7d5a6de29a85d813da2fb AS go # Install Go manually, so that we can control the version -ARG GO_VERSION=1.24.6 -ARG GO_CHECKSUM="bbca37cc395c974ffa4893ee35819ad23ebb27426df87af92e93a9ec66ef8712" +ARG GO_VERSION=1.24.10 +ARG GO_CHECKSUM="dd52b974e3d9c5a7bbfb222c685806def6be5d6f7efd10f9caa9ca1fa2f47955" # Boring Go is needed to build FIPS-compliant binaries. RUN apt-get update && \ @@ -62,7 +62,12 @@ RUN apt-get update && \ # charts and values files go install github.com/norwoodj/helm-docs/cmd/helm-docs@v1.5.0 && \ # sqlc for Go code generation - (CGO_ENABLED=1 go install github.com/sqlc-dev/sqlc/cmd/sqlc@v1.27.0) && \ + # (CGO_ENABLED=1 go install github.com/sqlc-dev/sqlc/cmd/sqlc@v1.27.0) && \ + # + # Switched to coder/sqlc fork to fix ambiguous column bug, see: + # - https://github.com/coder/sqlc/pull/1 + # - https://github.com/sqlc-dev/sqlc/pull/4159 + (CGO_ENABLED=1 go install github.com/coder/sqlc/cmd/sqlc@aab4e865a51df0c43e1839f81a9d349b41d14f05) && \ # gcr-cleaner-cli used by CI to prune unused images go install github.com/sethvargo/gcr-cleaner/cmd/gcr-cleaner-cli@v0.5.1 && \ # ruleguard for checking custom rules, without needing to run all of @@ -97,7 +102,7 @@ RUN curl -L -o protoc.zip https://github.com/protocolbuffers/protobuf/releases/d unzip protoc.zip && \ rm protoc.zip -FROM ubuntu:jammy@sha256:4e0171b9275e12d375863f2b3ae9ce00a4c53ddda176bd55868df97ac6f21a6e +FROM ubuntu:jammy@sha256:104ae83764a5119017b8e8d6218fa0832b09df65aae7d5a6de29a85d813da2fb SHELL ["/bin/bash", "-c"] @@ -209,7 +214,7 @@ RUN sed -i 's|http://archive.ubuntu.com/ubuntu/|http://mirrors.edge.kernel.org/u # NOTE: In scripts/Dockerfile.base we specifically install Terraform version 1.12.2. # Installing the same version here to match. -RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.13.0/terraform_1.13.0_linux_amd64.zip" && \ +RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.13.4/terraform_1.13.4_linux_amd64.zip" && \ unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ diff --git a/dogfood/coder/boundary-config.yaml b/dogfood/coder/boundary-config.yaml new file mode 100644 index 0000000000000..2b6c412a36ab4 --- /dev/null +++ b/dogfood/coder/boundary-config.yaml @@ -0,0 +1,222 @@ +allowlist: + # specified in claude-code module as well (effectively a duplicate); needed for basic functionality of claude-code agent + - domain=anthropic.com + - domain=registry.npmjs.org + - domain=sentry.io + - domain=claude.ai + - domain=dev.coder.com + + # test domains + - method=GET domain=google.com + - method=GET domain=typicode.com + + # domain used in coder task workspaces + - method=POST domain=http-intake.logs.datadoghq.com + + # Default allowed domains from Claude Code on the web + # Source: https://code.claude.com/docs/en/claude-code-on-the-web#default-allowed-domains + # Anthropic Services + - domain=api.anthropic.com + - domain=statsig.anthropic.com + - domain=claude.ai + + # Version Control + - domain=github.com + - domain=www.github.com + - domain=api.github.com + - domain=raw.githubusercontent.com + - domain=objects.githubusercontent.com + - domain=codeload.github.com + - domain=avatars.githubusercontent.com + - domain=camo.githubusercontent.com + - domain=gist.github.com + - domain=gitlab.com + - domain=www.gitlab.com + - domain=registry.gitlab.com + - domain=bitbucket.org + - domain=www.bitbucket.org + - domain=api.bitbucket.org + + # Container Registries + - domain=registry-1.docker.io + - domain=auth.docker.io + - domain=index.docker.io + - domain=hub.docker.com + - domain=www.docker.com + - domain=production.cloudflare.docker.com + - domain=download.docker.com + - domain=*.gcr.io + - domain=ghcr.io + - domain=mcr.microsoft.com + - domain=*.data.mcr.microsoft.com + + # Cloud Platforms + - domain=cloud.google.com + - domain=accounts.google.com + - domain=gcloud.google.com + - domain=*.googleapis.com + - domain=storage.googleapis.com + - domain=compute.googleapis.com + - domain=container.googleapis.com + - domain=azure.com + - domain=portal.azure.com + - domain=microsoft.com + - domain=www.microsoft.com + - domain=*.microsoftonline.com + - domain=packages.microsoft.com + - domain=dotnet.microsoft.com + - domain=dot.net + - domain=visualstudio.com + - domain=dev.azure.com + - domain=oracle.com + - domain=www.oracle.com + - domain=java.com + - domain=www.java.com + - domain=java.net + - domain=www.java.net + - domain=download.oracle.com + - domain=yum.oracle.com + + # Package Managers - JavaScript/Node + - domain=registry.npmjs.org + - domain=www.npmjs.com + - domain=www.npmjs.org + - domain=npmjs.com + - domain=npmjs.org + - domain=yarnpkg.com + - domain=registry.yarnpkg.com + + # Package Managers - Python + - domain=pypi.org + - domain=www.pypi.org + - domain=files.pythonhosted.org + - domain=pythonhosted.org + - domain=test.pypi.org + - domain=pypi.python.org + - domain=pypa.io + - domain=www.pypa.io + + # Package Managers - Ruby + - domain=rubygems.org + - domain=www.rubygems.org + - domain=api.rubygems.org + - domain=index.rubygems.org + - domain=ruby-lang.org + - domain=www.ruby-lang.org + - domain=rubyforge.org + - domain=www.rubyforge.org + - domain=rubyonrails.org + - domain=www.rubyonrails.org + - domain=rvm.io + - domain=get.rvm.io + + # Package Managers - Rust + - domain=crates.io + - domain=www.crates.io + - domain=static.crates.io + - domain=rustup.rs + - domain=static.rust-lang.org + - domain=www.rust-lang.org + + # Package Managers - Go + - domain=proxy.golang.org + - domain=sum.golang.org + - domain=index.golang.org + - domain=golang.org + - domain=www.golang.org + - domain=goproxy.io + - domain=pkg.go.dev + + # Package Managers - JVM + - domain=maven.org + - domain=repo.maven.org + - domain=central.maven.org + - domain=repo1.maven.org + - domain=jcenter.bintray.com + - domain=gradle.org + - domain=www.gradle.org + - domain=services.gradle.org + - domain=spring.io + - domain=repo.spring.io + + # Package Managers - Other Languages + - domain=packagist.org + - domain=www.packagist.org + - domain=repo.packagist.org + - domain=nuget.org + - domain=www.nuget.org + - domain=api.nuget.org + - domain=pub.dev + - domain=api.pub.dev + - domain=hex.pm + - domain=www.hex.pm + - domain=cpan.org + - domain=www.cpan.org + - domain=metacpan.org + - domain=www.metacpan.org + - domain=api.metacpan.org + - domain=cocoapods.org + - domain=www.cocoapods.org + - domain=cdn.cocoapods.org + - domain=haskell.org + - domain=www.haskell.org + - domain=hackage.haskell.org + - domain=swift.org + - domain=www.swift.org + + # Linux Distributions + - domain=archive.ubuntu.com + - domain=security.ubuntu.com + - domain=ubuntu.com + - domain=www.ubuntu.com + - domain=*.ubuntu.com + - domain=ppa.launchpad.net + - domain=launchpad.net + - domain=www.launchpad.net + + # Development Tools & Platforms + - domain=dl.k8s.io + - domain=pkgs.k8s.io + - domain=k8s.io + - domain=www.k8s.io + - domain=releases.hashicorp.com + - domain=apt.releases.hashicorp.com + - domain=rpm.releases.hashicorp.com + - domain=archive.releases.hashicorp.com + - domain=hashicorp.com + - domain=www.hashicorp.com + - domain=repo.anaconda.com + - domain=conda.anaconda.org + - domain=anaconda.org + - domain=www.anaconda.com + - domain=anaconda.com + - domain=continuum.io + - domain=apache.org + - domain=www.apache.org + - domain=archive.apache.org + - domain=downloads.apache.org + - domain=eclipse.org + - domain=www.eclipse.org + - domain=download.eclipse.org + - domain=nodejs.org + - domain=www.nodejs.org + + # Cloud Services & Monitoring + - domain=statsig.com + - domain=www.statsig.com + - domain=api.statsig.com + - domain=*.sentry.io + + # Content Delivery & Mirrors + - domain=*.sourceforge.net + - domain=packagecloud.io + - domain=*.packagecloud.io + + # Schema & Configuration + - domain=json-schema.org + - domain=www.json-schema.org + - domain=json.schemastore.org + - domain=www.schemastore.org +log_dir: /tmp/boundary_logs +log_level: warn +proxy_port: 8087 diff --git a/dogfood/coder/main.tf b/dogfood/coder/main.tf index 05ec4a6a2e975..44cfe1654d3ea 100644 --- a/dogfood/coder/main.tf +++ b/dogfood/coder/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = ">= 2.12.0" + version = ">= 2.13.0" } docker = { source = "kreuzwerker/docker" @@ -31,14 +31,12 @@ locals { // actually in Germany now. "eu-helsinki" = "tcp://katerose-fsn-cdr-dev.tailscale.svc.cluster.local:2375" "ap-sydney" = "tcp://wolfgang-syd-cdr-dev.tailscale.svc.cluster.local:2375" - "sa-saopaulo" = "tcp://oberstein-sao-cdr-dev.tailscale.svc.cluster.local:2375" "za-cpt" = "tcp://schonkopf-cpt-cdr-dev.tailscale.svc.cluster.local:2375" } repo_base_dir = data.coder_parameter.repo_base_dir.value == "~" ? "/home/coder" : replace(data.coder_parameter.repo_base_dir.value, "/^~\\//", "/home/coder/") repo_dir = replace(try(module.git-clone[0].repo_dir, ""), "/^~\\//", "/home/coder/") container_name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" - has_ai_prompt = data.coder_parameter.ai_prompt.value != "" } data "coder_workspace_preset" "cpt" { @@ -109,23 +107,6 @@ data "coder_workspace_preset" "sydney" { } } -data "coder_workspace_preset" "saopaulo" { - name = "São Paulo" - description = "Development workspace hosted in Brazil with 1 prebuild instance" - icon = "/emojis/1f1e7-1f1f7.png" - parameters = { - (data.coder_parameter.region.name) = "sa-saopaulo" - (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" - (data.coder_parameter.repo_base_dir.name) = "~" - (data.coder_parameter.res_mon_memory_threshold.name) = 80 - (data.coder_parameter.res_mon_volume_threshold.name) = 90 - (data.coder_parameter.res_mon_volume_path.name) = "/home/coder" - } - prebuilds { - instances = 1 - } -} - data "coder_parameter" "repo_base_dir" { type = "string" name = "Coder Repository Base Directory" @@ -157,7 +138,6 @@ locals { "north-america" : "us-pittsburgh" "europe" : "eu-helsinki" "australia" : "ap-sydney" - "south-america" : "sa-saopaulo" "africa" : "za-cpt" } @@ -190,11 +170,6 @@ data "coder_parameter" "region" { name = "Sydney" value = "ap-sydney" } - option { - icon = "/emojis/1f1e7-1f1f7.png" - name = "São Paulo" - value = "sa-saopaulo" - } option { icon = "/emojis/1f1ff-1f1e6.png" name = "Cape Town" @@ -242,12 +217,22 @@ data "coder_parameter" "devcontainer_autostart" { mutable = true } -data "coder_parameter" "ai_prompt" { - type = "string" - name = "AI Prompt" +data "coder_parameter" "use_ai_bridge" { + type = "bool" + name = "Use AI Bridge" + default = true + description = "If enabled, AI requests will be sent via AI Bridge." + mutable = true +} + +# Only used if AI Bridge is disabled. +# dogfood/main.tf injects this value from a GH Actions secret; +# `coderd_template.dogfood` passes the value injected by .github/workflows/dogfood.yaml in `TF_VAR_CODER_DOGFOOD_ANTHROPIC_API_KEY`. +variable "anthropic_api_key" { + type = string + description = "The API key used to authenticate with the Anthropic API, if AI Bridge is disabled." default = "" - description = "Prompt for Claude Code" - mutable = true // Workaround for issue with claiming a prebuild from a preset that does not include this parameter. + sensitive = true } provider "docker" { @@ -262,6 +247,7 @@ data "coder_external_auth" "github" { data "coder_workspace" "me" {} data "coder_workspace_owner" "me" {} +data "coder_task" "me" {} data "coder_workspace_tags" "tags" { tags = { "cluster" : "dogfood-v2" @@ -282,12 +268,17 @@ data "coder_parameter" "ide_choices" { form_type = "multi-select" mutable = true description = "Choose one or more IDEs to enable in your workspace" - default = jsonencode(["vscode", "code-server", "cursor"]) + default = jsonencode(["vscode", "code-server", "cursor", "mux"]) option { name = "VS Code Desktop" value = "vscode" icon = "/icon/code.svg" } + option { + name = "mux" + value = "mux" + icon = "/icon/mux.svg" + } option { name = "code-server" value = "code-server" @@ -384,10 +375,18 @@ module "personalize" { agent_id = coder_agent.dev.id } +module "mux" { + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "mux") ? data.coder_workspace.me.start_count : 0 + source = "registry.coder.com/coder/mux/coder" + version = "1.0.1" + agent_id = coder_agent.dev.id + subdomain = true +} + module "code-server" { count = contains(jsondecode(data.coder_parameter.ide_choices.value), "code-server") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/code-server/coder" - version = "1.3.1" + version = "1.4.0" agent_id = coder_agent.dev.id folder = local.repo_dir auto_install_extensions = true @@ -409,12 +408,12 @@ module "vscode-web" { module "jetbrains" { count = contains(jsondecode(data.coder_parameter.ide_choices.value), "jetbrains") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/jetbrains/coder" - version = "1.1.0" + version = "1.2.0" agent_id = coder_agent.dev.id agent_name = "dev" folder = local.repo_dir major_version = "latest" - tooltip = "You need to [Install Coder Desktop](https://coder.com/docs/user-guides/desktop#install-coder-desktop) to use this button." + tooltip = "You need to [install JetBrains Toolbox](https://coder.com/docs/user-guides/workspace-access/jetbrains/toolbox) to use this app." } module "filebrowser" { @@ -477,11 +476,15 @@ resource "coder_agent" "dev" { arch = "amd64" os = "linux" dir = local.repo_dir - env = { - OIDC_TOKEN : data.coder_workspace_owner.me.oidc_access_token, - ANTHROPIC_BASE_URL : "https://dev.coder.com/api/experimental/aibridge/anthropic", - ANTHROPIC_AUTH_TOKEN : data.coder_workspace_owner.me.session_token - } + env = merge( + { + OIDC_TOKEN : data.coder_workspace_owner.me.oidc_access_token, + }, + data.coder_parameter.use_ai_bridge.value ? { + ANTHROPIC_BASE_URL : "https://dev.coder.com/api/v2/aibridge/anthropic", + ANTHROPIC_AUTH_TOKEN : data.coder_workspace_owner.me.session_token, + } : {} + ) startup_script_behavior = "blocking" display_apps { @@ -814,7 +817,7 @@ resource "coder_metadata" "container_info" { } item { key = "ai_task" - value = local.has_ai_prompt ? "yes" : "no" + value = data.coder_task.me.enabled ? "yes" : "no" } } @@ -830,15 +833,9 @@ locals { -- Tool Selection -- - playwright: previewing your changes after you made them to confirm it worked as expected - - desktop-commander - use only for commands that keep running - (servers, dev watchers, GUI apps). - Built-in tools - use for everything else: (file operations, git commands, builds & installs, one-off shell commands) - Remember this decision rule: - - Stays running? → desktop-commander - - Finishes immediately? → built-in tools - -- Context -- There is an existing application in the current directory. Be sure to read CLAUDE.md before making any changes. @@ -847,27 +844,46 @@ locals { EOT } +resource "coder_script" "boundary_config_setup" { + agent_id = coder_agent.dev.id + display_name = "Boundary Setup Configuration" + run_on_start = true + + script = <<-EOF + #!/bin/sh + mkdir -p ~/.config/coder_boundary + echo '${base64encode(file("${path.module}/boundary-config.yaml"))}' | base64 -d > ~/.config/coder_boundary/config.yaml + chmod 600 ~/.config/coder_boundary/config.yaml + EOF +} + module "claude-code" { - count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + count = data.coder_task.me.enabled ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/claude-code/coder" - version = "3.3.2" + version = "4.2.1" + enable_boundary = true + boundary_version = "v0.2.1" agent_id = coder_agent.dev.id workdir = local.repo_dir claude_code_version = "latest" order = 999 - claude_api_key = data.coder_workspace_owner.me.session_token # To Enable AI Bridge integration + claude_api_key = data.coder_parameter.use_ai_bridge.value ? data.coder_workspace_owner.me.session_token : var.anthropic_api_key agentapi_version = "latest" system_prompt = local.claude_system_prompt - ai_prompt = data.coder_parameter.ai_prompt.value + ai_prompt = data.coder_task.me.prompt post_install_script = <<-EOT claude mcp add playwright npx -- @playwright/mcp@latest --headless --isolated --no-sandbox - claude mcp add desktop-commander npx -- @wonderwhy-er/desktop-commander@latest EOT } +resource "coder_ai_task" "task" { + count = data.coder_task.me.enabled ? data.coder_workspace.me.start_count : 0 + app_id = module.claude-code[count.index].task_app_id +} + resource "coder_app" "develop_sh" { - count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + count = data.coder_task.me.enabled ? data.coder_workspace.me.start_count : 0 agent_id = coder_agent.dev.id slug = "develop-sh" display_name = "develop.sh" @@ -880,7 +896,7 @@ resource "coder_app" "develop_sh" { } resource "coder_script" "develop_sh" { - count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + count = data.coder_task.me.enabled ? data.coder_workspace.me.start_count : 0 display_name = "develop.sh" agent_id = coder_agent.dev.id run_on_start = true @@ -903,7 +919,7 @@ resource "coder_script" "develop_sh" { } resource "coder_app" "preview" { - count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + count = data.coder_task.me.enabled ? data.coder_workspace.me.start_count : 0 agent_id = coder_agent.dev.id slug = "preview" display_name = "Preview" diff --git a/dogfood/main.tf b/dogfood/main.tf index c79e950efadf4..49bc3a611b2eb 100644 --- a/dogfood/main.tf +++ b/dogfood/main.tf @@ -9,6 +9,11 @@ terraform { } } +import { + to = coderd_template.envbuilder_dogfood + id = "e75f1212-834c-4183-8bed-d6817cac60a5" +} + data "coderd_organization" "default" { is_default = true } diff --git a/enterprise/x/aibridged/aibridged.go b/enterprise/aibridged/aibridged.go similarity index 94% rename from enterprise/x/aibridged/aibridged.go rename to enterprise/aibridged/aibridged.go index a1fa4022ff960..edbd353ccf1d3 100644 --- a/enterprise/x/aibridged/aibridged.go +++ b/enterprise/aibridged/aibridged.go @@ -19,7 +19,7 @@ var _ io.Closer = &Server{} // Server provides the AI Bridge functionality. // It is responsible for: -// - receiving requests on /api/experimental/aibridged/* // TODO: update endpoint once out of experimental +// - receiving requests on /api/v2/aibridged/* // - manipulating the requests // - relaying requests to upstream AI services and relaying responses to caller // @@ -55,13 +55,14 @@ func New(ctx context.Context, pool Pooler, rpcDialer Dialer, logger slog.Logger) ctx, cancel := context.WithCancel(ctx) daemon := &Server{ - logger: logger, - clientDialer: rpcDialer, + logger: logger, + clientDialer: rpcDialer, + clientCh: make(chan DRPCClient), + lifecycleCtx: ctx, + cancelFn: cancel, + initConnectionCh: make(chan struct{}), + requestBridgePool: pool, - clientCh: make(chan DRPCClient), - lifecycleCtx: ctx, - cancelFn: cancel, - initConnectionCh: make(chan struct{}), } daemon.wg.Add(1) diff --git a/enterprise/x/aibridged/aibridged_integration_test.go b/enterprise/aibridged/aibridged_integration_test.go similarity index 63% rename from enterprise/x/aibridged/aibridged_integration_test.go rename to enterprise/aibridged/aibridged_integration_test.go index 45d47bd1b3507..cb1a3ed8f1b3b 100644 --- a/enterprise/x/aibridged/aibridged_integration_test.go +++ b/enterprise/aibridged/aibridged_integration_test.go @@ -9,7 +9,10 @@ import ( "testing" "time" + "github.com/prometheus/client_golang/prometheus" + promtest "github.com/prometheus/client_golang/prometheus/testutil" "github.com/stretchr/testify/require" + "github.com/tidwall/gjson" "github.com/coder/aibridge" "github.com/coder/coder/v2/coderd/coderdtest" @@ -18,9 +21,10 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/externalauth" + "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/aibridged" "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" - "github.com/coder/coder/v2/enterprise/x/aibridged" "github.com/coder/coder/v2/testutil" ) @@ -104,7 +108,7 @@ func TestIntegration(t *testing.T) { "completion_tokens": 15, "total_tokens": 75, "prompt_tokens_details": { - "cached_tokens": 0, + "cached_tokens": 15, "audio_tokens": 0 }, "completion_tokens_details": { @@ -165,7 +169,7 @@ func TestIntegration(t *testing.T) { logger := testutil.Logger(t) providers := []aibridge.Provider{aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{BaseURL: mockOpenAI.URL})} - pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, logger) + pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, nil, logger) require.NoError(t, err) // Given: aibridged is started. @@ -221,6 +225,18 @@ func TestIntegration(t *testing.T) { require.NoError(t, err) require.Len(t, interceptions, 1) + intc0 := interceptions[0] + keyID, _, err := httpmw.SplitAPIToken(apiKey.Key) + require.NoError(t, err) + require.Equal(t, user.ID, intc0.InitiatorID) + require.True(t, intc0.APIKeyID.Valid) + require.Equal(t, keyID, intc0.APIKeyID.String) + require.Equal(t, "openai", intc0.Provider) + require.Equal(t, "gpt-4.1", intc0.Model) + require.True(t, intc0.EndedAt.Valid) + require.True(t, intc0.StartedAt.Before(intc0.EndedAt.Time)) + require.Less(t, intc0.EndedAt.Time.Sub(intc0.StartedAt), 5*time.Second) + prompts, err := db.GetAIBridgeUserPromptsByInterceptionID(ctx, interceptions[0].ID) require.NoError(t, err) require.Len(t, prompts, 1) @@ -229,8 +245,9 @@ func TestIntegration(t *testing.T) { tokens, err := db.GetAIBridgeTokenUsagesByInterceptionID(ctx, interceptions[0].ID) require.NoError(t, err) require.Len(t, tokens, 1) - require.EqualValues(t, tokens[0].InputTokens, 60) + require.EqualValues(t, tokens[0].InputTokens, 45) require.EqualValues(t, tokens[0].OutputTokens, 15) + require.EqualValues(t, gjson.Get(string(tokens[0].Metadata.RawMessage), "prompt_cached").Int(), 15) tools, err := db.GetAIBridgeToolUsagesByInterceptionID(ctx, interceptions[0].ID) require.NoError(t, err) @@ -240,3 +257,109 @@ func TestIntegration(t *testing.T) { // Then: the MCP server was initialized. require.Contains(t, mcpTokenReceived, authLink.OAuthAccessToken, "mock MCP server not requested") } + +// TestIntegrationWithMetrics validates that Prometheus metrics are correctly incremented +// when requests are processed through aibridged. +func TestIntegrationWithMetrics(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + + // Create prometheus registry and metrics. + registry := prometheus.NewRegistry() + metrics := aibridge.NewMetrics(registry) + + // Set up mock OpenAI server. + mockOpenAI := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{ + "id": "chatcmpl-test", + "object": "chat.completion", + "created": 1753343279, + "model": "gpt-4.1", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": "test response" + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 10, + "completion_tokens": 5, + "total_tokens": 15 + } +}`)) + })) + t.Cleanup(mockOpenAI.Close) + + // Database and coderd setup. + db, ps := dbtestutil.NewDB(t) + client, _, api, firstUser := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Database: db, + Pubsub: ps, + }, + }) + + userClient, _ := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) + + // Create an API token for the user. + apiKey, err := userClient.CreateToken(ctx, "me", codersdk.CreateTokenRequest{ + TokenName: fmt.Sprintf("test-key-%d", time.Now().UnixNano()), + Lifetime: time.Hour, + Scope: codersdk.APIKeyScopeCoderAll, + }) + require.NoError(t, err) + + // Create aibridge client. + aiBridgeClient, err := api.CreateInMemoryAIBridgeServer(ctx) + require.NoError(t, err) + + logger := testutil.Logger(t) + providers := []aibridge.Provider{aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{BaseURL: mockOpenAI.URL})} + + // Create pool with metrics. + pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, metrics, logger) + require.NoError(t, err) + + // Given: aibridged is started. + srv, err := aibridged.New(ctx, pool, func(ctx context.Context) (aibridged.DRPCClient, error) { + return aiBridgeClient, nil + }, logger) + require.NoError(t, err, "create new aibridged") + t.Cleanup(func() { + _ = srv.Shutdown(ctx) + }) + + // When: a request is made to aibridged. + req, err := http.NewRequestWithContext(ctx, http.MethodPost, "/openai/v1/chat/completions", bytes.NewBufferString(`{ + "messages": [ + { + "role": "user", + "content": "test message" + } + ], + "model": "gpt-4.1" +}`)) + require.NoError(t, err, "make request to test server") + req.Header.Add("Authorization", "Bearer "+apiKey.Key) + req.Header.Add("Accept", "application/json") + + // When: aibridged handles the request. + rec := httptest.NewRecorder() + srv.ServeHTTP(rec, req) + require.Equal(t, http.StatusOK, rec.Code) + + // Then: the interceptions metric should increase to 1. + // This is not exhaustively checking the available metrics; just an indicative one to prove + // the plumbing is working. + require.Eventually(t, func() bool { + count := promtest.ToFloat64(metrics.InterceptionCount) + return count == 1 + }, testutil.WaitShort, testutil.IntervalFast, "interceptions_total metric should be 1") +} diff --git a/enterprise/x/aibridged/aibridged_test.go b/enterprise/aibridged/aibridged_test.go similarity index 97% rename from enterprise/x/aibridged/aibridged_test.go rename to enterprise/aibridged/aibridged_test.go index 967e9aac2bce3..a66f3157a0d42 100644 --- a/enterprise/x/aibridged/aibridged_test.go +++ b/enterprise/aibridged/aibridged_test.go @@ -18,9 +18,9 @@ import ( "cdr.dev/slog/sloggers/slogtest" "github.com/coder/aibridge" "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/enterprise/x/aibridged" - mock "github.com/coder/coder/v2/enterprise/x/aibridged/aibridgedmock" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged" + mock "github.com/coder/coder/v2/enterprise/aibridged/aibridgedmock" + "github.com/coder/coder/v2/enterprise/aibridged/proto" "github.com/coder/coder/v2/testutil" ) @@ -41,8 +41,7 @@ func newTestServer(t *testing.T) (*aibridged.Server, *mock.MockDRPCClient, *mock pool, func(ctx context.Context) (aibridged.DRPCClient, error) { return client, nil - }, - logger) + }, logger) require.NoError(t, err, "create new aibridged") t.Cleanup(func() { srv.Shutdown(context.Background()) @@ -291,7 +290,7 @@ func TestRouting(t *testing.T) { aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{BaseURL: openaiSrv.URL}), aibridge.NewAnthropicProvider(aibridge.AnthropicConfig{BaseURL: antSrv.URL}, nil), } - pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, logger) + pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, nil, logger) require.NoError(t, err) conn := &mockDRPCConn{} client.EXPECT().DRPCConn().AnyTimes().Return(conn) diff --git a/enterprise/x/aibridged/aibridgedmock/clientmock.go b/enterprise/aibridged/aibridgedmock/clientmock.go similarity index 97% rename from enterprise/x/aibridged/aibridgedmock/clientmock.go rename to enterprise/aibridged/aibridgedmock/clientmock.go index c49a385451a8e..2bb7083e10924 100644 --- a/enterprise/x/aibridged/aibridgedmock/clientmock.go +++ b/enterprise/aibridged/aibridgedmock/clientmock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/coder/coder/v2/enterprise/x/aibridged (interfaces: DRPCClient) +// Source: github.com/coder/coder/v2/enterprise/aibridged (interfaces: DRPCClient) // // Generated by this command: // -// mockgen -destination ./clientmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/x/aibridged DRPCClient +// mockgen -destination ./clientmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/aibridged DRPCClient // // Package aibridgedmock is a generated GoMock package. @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - proto "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + proto "github.com/coder/coder/v2/enterprise/aibridged/proto" gomock "go.uber.org/mock/gomock" drpc "storj.io/drpc" ) diff --git a/enterprise/x/aibridged/aibridgedmock/doc.go b/enterprise/aibridged/aibridgedmock/doc.go similarity index 52% rename from enterprise/x/aibridged/aibridgedmock/doc.go rename to enterprise/aibridged/aibridgedmock/doc.go index 3d3f56c05574d..9c9c644570463 100644 --- a/enterprise/x/aibridged/aibridgedmock/doc.go +++ b/enterprise/aibridged/aibridgedmock/doc.go @@ -1,4 +1,4 @@ package aibridgedmock -//go:generate mockgen -destination ./clientmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/x/aibridged DRPCClient -//go:generate mockgen -destination ./poolmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/x/aibridged Pooler +//go:generate mockgen -destination ./clientmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/aibridged DRPCClient +//go:generate mockgen -destination ./poolmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/aibridged Pooler diff --git a/enterprise/x/aibridged/aibridgedmock/poolmock.go b/enterprise/aibridged/aibridgedmock/poolmock.go similarity index 91% rename from enterprise/x/aibridged/aibridgedmock/poolmock.go rename to enterprise/aibridged/aibridgedmock/poolmock.go index bf3b39ed2a879..fcd941fc7c989 100644 --- a/enterprise/x/aibridged/aibridgedmock/poolmock.go +++ b/enterprise/aibridged/aibridgedmock/poolmock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/coder/coder/v2/enterprise/x/aibridged (interfaces: Pooler) +// Source: github.com/coder/coder/v2/enterprise/aibridged (interfaces: Pooler) // // Generated by this command: // -// mockgen -destination ./poolmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/x/aibridged Pooler +// mockgen -destination ./poolmock.go -package aibridgedmock github.com/coder/coder/v2/enterprise/aibridged Pooler // // Package aibridgedmock is a generated GoMock package. @@ -14,7 +14,7 @@ import ( http "net/http" reflect "reflect" - aibridged "github.com/coder/coder/v2/enterprise/x/aibridged" + aibridged "github.com/coder/coder/v2/enterprise/aibridged" gomock "go.uber.org/mock/gomock" ) diff --git a/enterprise/x/aibridged/client.go b/enterprise/aibridged/client.go similarity index 90% rename from enterprise/x/aibridged/client.go rename to enterprise/aibridged/client.go index 3004a84df9626..60650bf994f28 100644 --- a/enterprise/x/aibridged/client.go +++ b/enterprise/aibridged/client.go @@ -5,7 +5,7 @@ import ( "storj.io/drpc" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" ) type Dialer func(ctx context.Context) (DRPCClient, error) diff --git a/enterprise/x/aibridged/http.go b/enterprise/aibridged/http.go similarity index 97% rename from enterprise/x/aibridged/http.go rename to enterprise/aibridged/http.go index 43f4ba7670671..7e41f0c0073f2 100644 --- a/enterprise/x/aibridged/http.go +++ b/enterprise/aibridged/http.go @@ -9,7 +9,7 @@ import ( "cdr.dev/slog" "github.com/coder/aibridge" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" ) var _ http.Handler = &Server{} @@ -68,6 +68,7 @@ func (s *Server) ServeHTTP(rw http.ResponseWriter, r *http.Request) { handler, err := s.GetRequestHandler(ctx, Request{ SessionKey: key, + APIKeyID: resp.ApiKeyId, InitiatorID: id, }) if err != nil { diff --git a/enterprise/x/aibridged/mcp.go b/enterprise/aibridged/mcp.go similarity index 99% rename from enterprise/x/aibridged/mcp.go rename to enterprise/aibridged/mcp.go index 4b42287e02899..ab6d1d0031d37 100644 --- a/enterprise/x/aibridged/mcp.go +++ b/enterprise/aibridged/mcp.go @@ -10,7 +10,7 @@ import ( "cdr.dev/slog" "github.com/coder/aibridge/mcp" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" ) var ( diff --git a/enterprise/x/aibridged/mcp_internal_test.go b/enterprise/aibridged/mcp_internal_test.go similarity index 95% rename from enterprise/x/aibridged/mcp_internal_test.go rename to enterprise/aibridged/mcp_internal_test.go index 20edf79d06bf5..37fb6fe2c25d2 100644 --- a/enterprise/x/aibridged/mcp_internal_test.go +++ b/enterprise/aibridged/mcp_internal_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" "github.com/coder/coder/v2/testutil" ) diff --git a/enterprise/x/aibridged/pool.go b/enterprise/aibridged/pool.go similarity index 91% rename from enterprise/x/aibridged/pool.go rename to enterprise/aibridged/pool.go index 309f8fc61f86c..d043f533cdaf0 100644 --- a/enterprise/x/aibridged/pool.go +++ b/enterprise/aibridged/pool.go @@ -51,11 +51,13 @@ type CachedBridgePool struct { singleflight *singleflight.Group[string, *aibridge.RequestBridge] + metrics *aibridge.Metrics + shutDownOnce sync.Once shuttingDownCh chan struct{} } -func NewCachedBridgePool(options PoolOptions, providers []aibridge.Provider, logger slog.Logger) (*CachedBridgePool, error) { +func NewCachedBridgePool(options PoolOptions, providers []aibridge.Provider, metrics *aibridge.Metrics, logger slog.Logger) (*CachedBridgePool, error) { cache, err := ristretto.NewCache(&ristretto.Config[string, *aibridge.RequestBridge]{ NumCounters: options.MaxItems * 10, // Docs suggest setting this 10x number of keys. MaxCost: options.MaxItems * cacheCost, // Up to n instances. @@ -88,6 +90,8 @@ func NewCachedBridgePool(options PoolOptions, providers []aibridge.Provider, log singleflight: &singleflight.Group[string, *aibridge.RequestBridge]{}, + metrics: metrics, + shuttingDownCh: make(chan struct{}), }, nil } @@ -111,17 +115,9 @@ func (p *CachedBridgePool) Acquire(ctx context.Context, req Request, clientFn Cl // may visit the slow path unnecessarily. defer p.cache.Wait() - recorder := aibridge.NewRecorder(p.logger.Named("recorder"), func() (aibridge.Recorder, error) { - client, err := clientFn() - if err != nil { - return nil, xerrors.Errorf("acquire client: %w", err) - } - - return &recorderTranslation{client: client}, nil - }) - // Fast path. - bridge, ok := p.cache.Get(req.InitiatorID.String()) + cacheKey := req.InitiatorID.String() + "|" + req.APIKeyID + bridge, ok := p.cache.Get(cacheKey) if ok && bridge != nil { // TODO: future improvement: // Once we can detect token expiry against an MCP server, we no longer need to let these instances @@ -131,6 +127,15 @@ func (p *CachedBridgePool) Acquire(ctx context.Context, req Request, clientFn Cl return bridge, nil } + recorder := aibridge.NewRecorder(p.logger.Named("recorder"), func() (aibridge.Recorder, error) { + client, err := clientFn() + if err != nil { + return nil, xerrors.Errorf("acquire client: %w", err) + } + + return &recorderTranslation{apiKeyID: req.APIKeyID, client: client}, nil + }) + // Slow path. // Creating an *aibridge.RequestBridge may take some time, so gate all subsequent callers behind the initial request and return the resulting value. // TODO: track startup time since it adds latency to first request (histogram count will also help us see how often this occurs). @@ -153,12 +158,12 @@ func (p *CachedBridgePool) Acquire(ctx context.Context, req Request, clientFn Cl } } - bridge, err := aibridge.NewRequestBridge(ctx, p.providers, p.logger, recorder, mcpServers) + bridge, err := aibridge.NewRequestBridge(ctx, p.providers, recorder, mcpServers, p.metrics, p.logger) if err != nil { return nil, xerrors.Errorf("create new request bridge: %w", err) } - p.cache.SetWithTTL(req.InitiatorID.String(), bridge, cacheCost, p.options.TTL) + p.cache.SetWithTTL(cacheKey, bridge, cacheCost, p.options.TTL) return bridge, nil }) @@ -166,7 +171,7 @@ func (p *CachedBridgePool) Acquire(ctx context.Context, req Request, clientFn Cl return instance, err } -func (p *CachedBridgePool) Metrics() PoolMetrics { +func (p *CachedBridgePool) CacheMetrics() PoolMetrics { if p.cache == nil { return nil } diff --git a/enterprise/x/aibridged/pool_test.go b/enterprise/aibridged/pool_test.go similarity index 64% rename from enterprise/x/aibridged/pool_test.go rename to enterprise/aibridged/pool_test.go index 38cae85da9d92..d1b07d293d5c3 100644 --- a/enterprise/x/aibridged/pool_test.go +++ b/enterprise/aibridged/pool_test.go @@ -13,8 +13,8 @@ import ( "cdr.dev/slog/sloggers/slogtest" "github.com/coder/aibridge/mcp" "github.com/coder/aibridge/mcpmock" - "github.com/coder/coder/v2/enterprise/x/aibridged" - mock "github.com/coder/coder/v2/enterprise/x/aibridged/aibridgedmock" + "github.com/coder/coder/v2/enterprise/aibridged" + mock "github.com/coder/coder/v2/enterprise/aibridged/aibridgedmock" ) // TestPool validates the published behavior of [aibridged.CachedBridgePool]. @@ -30,11 +30,11 @@ func TestPool(t *testing.T) { mcpProxy := mcpmock.NewMockServerProxier(ctrl) opts := aibridged.PoolOptions{MaxItems: 1, TTL: time.Second} - pool, err := aibridged.NewCachedBridgePool(opts, nil, logger) + pool, err := aibridged.NewCachedBridgePool(opts, nil, nil, logger) require.NoError(t, err) t.Cleanup(func() { pool.Shutdown(context.Background()) }) - id, id2 := uuid.New(), uuid.New() + id, id2, apiKeyID1, apiKeyID2 := uuid.New(), uuid.New(), uuid.New(), uuid.New() clientFn := func() (aibridged.DRPCClient, error) { return client, nil } @@ -50,6 +50,7 @@ func TestPool(t *testing.T) { inst, err := pool.Acquire(t.Context(), aibridged.Request{ SessionKey: "key", InitiatorID: id, + APIKeyID: apiKeyID1.String(), }, clientFn, newMockMCPFactory(mcpProxy)) require.NoError(t, err, "acquire pool instance") @@ -57,15 +58,16 @@ func TestPool(t *testing.T) { instB, err := pool.Acquire(t.Context(), aibridged.Request{ SessionKey: "key", InitiatorID: id, + APIKeyID: apiKeyID1.String(), }, clientFn, newMockMCPFactory(mcpProxy)) require.NoError(t, err, "acquire pool instance") require.Same(t, inst, instB) - metrics := pool.Metrics() - require.EqualValues(t, 1, metrics.KeysAdded()) - require.EqualValues(t, 0, metrics.KeysEvicted()) - require.EqualValues(t, 1, metrics.Hits()) - require.EqualValues(t, 1, metrics.Misses()) + cacheMetrics := pool.CacheMetrics() + require.EqualValues(t, 1, cacheMetrics.KeysAdded()) + require.EqualValues(t, 0, cacheMetrics.KeysEvicted()) + require.EqualValues(t, 1, cacheMetrics.Hits()) + require.EqualValues(t, 1, cacheMetrics.Misses()) // This will get called again because a new instance will be created. mcpProxy.EXPECT().Init(gomock.Any()).Times(1).Return(nil) @@ -74,15 +76,34 @@ func TestPool(t *testing.T) { inst2, err := pool.Acquire(t.Context(), aibridged.Request{ SessionKey: "key", InitiatorID: id2, + APIKeyID: apiKeyID1.String(), }, clientFn, newMockMCPFactory(mcpProxy)) require.NoError(t, err, "acquire pool instance") require.NotSame(t, inst, inst2) - metrics = pool.Metrics() - require.EqualValues(t, 2, metrics.KeysAdded()) - require.EqualValues(t, 1, metrics.KeysEvicted()) - require.EqualValues(t, 1, metrics.Hits()) - require.EqualValues(t, 2, metrics.Misses()) + cacheMetrics = pool.CacheMetrics() + require.EqualValues(t, 2, cacheMetrics.KeysAdded()) + require.EqualValues(t, 1, cacheMetrics.KeysEvicted()) + require.EqualValues(t, 1, cacheMetrics.Hits()) + require.EqualValues(t, 2, cacheMetrics.Misses()) + + // This will get called again because a new instance will be created. + mcpProxy.EXPECT().Init(gomock.Any()).Times(1).Return(nil) + + // New instance is created for different api key id + inst2B, err := pool.Acquire(t.Context(), aibridged.Request{ + SessionKey: "key", + InitiatorID: id2, + APIKeyID: apiKeyID2.String(), + }, clientFn, newMockMCPFactory(mcpProxy)) + require.NoError(t, err, "acquire pool instance 2B") + require.NotSame(t, inst2, inst2B) + + cacheMetrics = pool.CacheMetrics() + require.EqualValues(t, 3, cacheMetrics.KeysAdded()) + require.EqualValues(t, 2, cacheMetrics.KeysEvicted()) + require.EqualValues(t, 1, cacheMetrics.Hits()) + require.EqualValues(t, 3, cacheMetrics.Misses()) // TODO: add test for expiry. // This requires Go 1.25's [synctest](https://pkg.go.dev/testing/synctest) since the diff --git a/enterprise/x/aibridged/proto/aibridged.pb.go b/enterprise/aibridged/proto/aibridged.pb.go similarity index 58% rename from enterprise/x/aibridged/proto/aibridged.pb.go rename to enterprise/aibridged/proto/aibridged.pb.go index 41d31563b4043..09c6f4eb8e5f4 100644 --- a/enterprise/x/aibridged/proto/aibridged.pb.go +++ b/enterprise/aibridged/proto/aibridged.pb.go @@ -2,7 +2,7 @@ // versions: // protoc-gen-go v1.30.0 // protoc v4.23.4 -// source: enterprise/x/aibridged/proto/aibridged.proto +// source: enterprise/aibridged/proto/aibridged.proto package proto @@ -33,12 +33,13 @@ type RecordInterceptionRequest struct { Model string `protobuf:"bytes,4,opt,name=model,proto3" json:"model,omitempty"` Metadata map[string]*anypb.Any `protobuf:"bytes,5,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` StartedAt *timestamppb.Timestamp `protobuf:"bytes,6,opt,name=started_at,json=startedAt,proto3" json:"started_at,omitempty"` + ApiKeyId string `protobuf:"bytes,7,opt,name=api_key_id,json=apiKeyId,proto3" json:"api_key_id,omitempty"` } func (x *RecordInterceptionRequest) Reset() { *x = RecordInterceptionRequest{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[0] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -51,7 +52,7 @@ func (x *RecordInterceptionRequest) String() string { func (*RecordInterceptionRequest) ProtoMessage() {} func (x *RecordInterceptionRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[0] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -64,7 +65,7 @@ func (x *RecordInterceptionRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordInterceptionRequest.ProtoReflect.Descriptor instead. func (*RecordInterceptionRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{0} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{0} } func (x *RecordInterceptionRequest) GetId() string { @@ -109,6 +110,13 @@ func (x *RecordInterceptionRequest) GetStartedAt() *timestamppb.Timestamp { return nil } +func (x *RecordInterceptionRequest) GetApiKeyId() string { + if x != nil { + return x.ApiKeyId + } + return "" +} + type RecordInterceptionResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -118,7 +126,7 @@ type RecordInterceptionResponse struct { func (x *RecordInterceptionResponse) Reset() { *x = RecordInterceptionResponse{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[1] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -131,7 +139,7 @@ func (x *RecordInterceptionResponse) String() string { func (*RecordInterceptionResponse) ProtoMessage() {} func (x *RecordInterceptionResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[1] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -144,7 +152,7 @@ func (x *RecordInterceptionResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordInterceptionResponse.ProtoReflect.Descriptor instead. func (*RecordInterceptionResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{1} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{1} } type RecordInterceptionEndedRequest struct { @@ -159,7 +167,7 @@ type RecordInterceptionEndedRequest struct { func (x *RecordInterceptionEndedRequest) Reset() { *x = RecordInterceptionEndedRequest{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[2] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -172,7 +180,7 @@ func (x *RecordInterceptionEndedRequest) String() string { func (*RecordInterceptionEndedRequest) ProtoMessage() {} func (x *RecordInterceptionEndedRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[2] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -185,7 +193,7 @@ func (x *RecordInterceptionEndedRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordInterceptionEndedRequest.ProtoReflect.Descriptor instead. func (*RecordInterceptionEndedRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{2} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{2} } func (x *RecordInterceptionEndedRequest) GetId() string { @@ -211,7 +219,7 @@ type RecordInterceptionEndedResponse struct { func (x *RecordInterceptionEndedResponse) Reset() { *x = RecordInterceptionEndedResponse{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[3] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -224,7 +232,7 @@ func (x *RecordInterceptionEndedResponse) String() string { func (*RecordInterceptionEndedResponse) ProtoMessage() {} func (x *RecordInterceptionEndedResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[3] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -237,7 +245,7 @@ func (x *RecordInterceptionEndedResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordInterceptionEndedResponse.ProtoReflect.Descriptor instead. func (*RecordInterceptionEndedResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{3} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{3} } type RecordTokenUsageRequest struct { @@ -256,7 +264,7 @@ type RecordTokenUsageRequest struct { func (x *RecordTokenUsageRequest) Reset() { *x = RecordTokenUsageRequest{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[4] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -269,7 +277,7 @@ func (x *RecordTokenUsageRequest) String() string { func (*RecordTokenUsageRequest) ProtoMessage() {} func (x *RecordTokenUsageRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[4] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -282,7 +290,7 @@ func (x *RecordTokenUsageRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordTokenUsageRequest.ProtoReflect.Descriptor instead. func (*RecordTokenUsageRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{4} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{4} } func (x *RecordTokenUsageRequest) GetInterceptionId() string { @@ -336,7 +344,7 @@ type RecordTokenUsageResponse struct { func (x *RecordTokenUsageResponse) Reset() { *x = RecordTokenUsageResponse{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[5] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -349,7 +357,7 @@ func (x *RecordTokenUsageResponse) String() string { func (*RecordTokenUsageResponse) ProtoMessage() {} func (x *RecordTokenUsageResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[5] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -362,7 +370,7 @@ func (x *RecordTokenUsageResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordTokenUsageResponse.ProtoReflect.Descriptor instead. func (*RecordTokenUsageResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{5} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{5} } type RecordPromptUsageRequest struct { @@ -380,7 +388,7 @@ type RecordPromptUsageRequest struct { func (x *RecordPromptUsageRequest) Reset() { *x = RecordPromptUsageRequest{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[6] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -393,7 +401,7 @@ func (x *RecordPromptUsageRequest) String() string { func (*RecordPromptUsageRequest) ProtoMessage() {} func (x *RecordPromptUsageRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[6] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -406,7 +414,7 @@ func (x *RecordPromptUsageRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordPromptUsageRequest.ProtoReflect.Descriptor instead. func (*RecordPromptUsageRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{6} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{6} } func (x *RecordPromptUsageRequest) GetInterceptionId() string { @@ -453,7 +461,7 @@ type RecordPromptUsageResponse struct { func (x *RecordPromptUsageResponse) Reset() { *x = RecordPromptUsageResponse{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[7] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -466,7 +474,7 @@ func (x *RecordPromptUsageResponse) String() string { func (*RecordPromptUsageResponse) ProtoMessage() {} func (x *RecordPromptUsageResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[7] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -479,7 +487,7 @@ func (x *RecordPromptUsageResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordPromptUsageResponse.ProtoReflect.Descriptor instead. func (*RecordPromptUsageResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{7} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{7} } type RecordToolUsageRequest struct { @@ -501,7 +509,7 @@ type RecordToolUsageRequest struct { func (x *RecordToolUsageRequest) Reset() { *x = RecordToolUsageRequest{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[8] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -514,7 +522,7 @@ func (x *RecordToolUsageRequest) String() string { func (*RecordToolUsageRequest) ProtoMessage() {} func (x *RecordToolUsageRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[8] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -527,7 +535,7 @@ func (x *RecordToolUsageRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordToolUsageRequest.ProtoReflect.Descriptor instead. func (*RecordToolUsageRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{8} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{8} } func (x *RecordToolUsageRequest) GetInterceptionId() string { @@ -602,7 +610,7 @@ type RecordToolUsageResponse struct { func (x *RecordToolUsageResponse) Reset() { *x = RecordToolUsageResponse{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[9] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -615,7 +623,7 @@ func (x *RecordToolUsageResponse) String() string { func (*RecordToolUsageResponse) ProtoMessage() {} func (x *RecordToolUsageResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[9] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -628,7 +636,7 @@ func (x *RecordToolUsageResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use RecordToolUsageResponse.ProtoReflect.Descriptor instead. func (*RecordToolUsageResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{9} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{9} } type GetMCPServerConfigsRequest struct { @@ -642,7 +650,7 @@ type GetMCPServerConfigsRequest struct { func (x *GetMCPServerConfigsRequest) Reset() { *x = GetMCPServerConfigsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[10] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -655,7 +663,7 @@ func (x *GetMCPServerConfigsRequest) String() string { func (*GetMCPServerConfigsRequest) ProtoMessage() {} func (x *GetMCPServerConfigsRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[10] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -668,7 +676,7 @@ func (x *GetMCPServerConfigsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetMCPServerConfigsRequest.ProtoReflect.Descriptor instead. func (*GetMCPServerConfigsRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{10} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{10} } func (x *GetMCPServerConfigsRequest) GetUserId() string { @@ -690,7 +698,7 @@ type GetMCPServerConfigsResponse struct { func (x *GetMCPServerConfigsResponse) Reset() { *x = GetMCPServerConfigsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[11] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -703,7 +711,7 @@ func (x *GetMCPServerConfigsResponse) String() string { func (*GetMCPServerConfigsResponse) ProtoMessage() {} func (x *GetMCPServerConfigsResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[11] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -716,7 +724,7 @@ func (x *GetMCPServerConfigsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetMCPServerConfigsResponse.ProtoReflect.Descriptor instead. func (*GetMCPServerConfigsResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{11} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{11} } func (x *GetMCPServerConfigsResponse) GetCoderMcpConfig() *MCPServerConfig { @@ -747,7 +755,7 @@ type MCPServerConfig struct { func (x *MCPServerConfig) Reset() { *x = MCPServerConfig{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[12] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -760,7 +768,7 @@ func (x *MCPServerConfig) String() string { func (*MCPServerConfig) ProtoMessage() {} func (x *MCPServerConfig) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[12] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -773,7 +781,7 @@ func (x *MCPServerConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use MCPServerConfig.ProtoReflect.Descriptor instead. func (*MCPServerConfig) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{12} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{12} } func (x *MCPServerConfig) GetId() string { @@ -816,7 +824,7 @@ type GetMCPServerAccessTokensBatchRequest struct { func (x *GetMCPServerAccessTokensBatchRequest) Reset() { *x = GetMCPServerAccessTokensBatchRequest{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[13] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -829,7 +837,7 @@ func (x *GetMCPServerAccessTokensBatchRequest) String() string { func (*GetMCPServerAccessTokensBatchRequest) ProtoMessage() {} func (x *GetMCPServerAccessTokensBatchRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[13] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -842,7 +850,7 @@ func (x *GetMCPServerAccessTokensBatchRequest) ProtoReflect() protoreflect.Messa // Deprecated: Use GetMCPServerAccessTokensBatchRequest.ProtoReflect.Descriptor instead. func (*GetMCPServerAccessTokensBatchRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{13} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{13} } func (x *GetMCPServerAccessTokensBatchRequest) GetUserId() string { @@ -873,7 +881,7 @@ type GetMCPServerAccessTokensBatchResponse struct { func (x *GetMCPServerAccessTokensBatchResponse) Reset() { *x = GetMCPServerAccessTokensBatchResponse{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[14] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -886,7 +894,7 @@ func (x *GetMCPServerAccessTokensBatchResponse) String() string { func (*GetMCPServerAccessTokensBatchResponse) ProtoMessage() {} func (x *GetMCPServerAccessTokensBatchResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[14] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -899,7 +907,7 @@ func (x *GetMCPServerAccessTokensBatchResponse) ProtoReflect() protoreflect.Mess // Deprecated: Use GetMCPServerAccessTokensBatchResponse.ProtoReflect.Descriptor instead. func (*GetMCPServerAccessTokensBatchResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{14} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{14} } func (x *GetMCPServerAccessTokensBatchResponse) GetAccessTokens() map[string]string { @@ -927,7 +935,7 @@ type IsAuthorizedRequest struct { func (x *IsAuthorizedRequest) Reset() { *x = IsAuthorizedRequest{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[15] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -940,7 +948,7 @@ func (x *IsAuthorizedRequest) String() string { func (*IsAuthorizedRequest) ProtoMessage() {} func (x *IsAuthorizedRequest) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[15] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -953,7 +961,7 @@ func (x *IsAuthorizedRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use IsAuthorizedRequest.ProtoReflect.Descriptor instead. func (*IsAuthorizedRequest) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{15} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{15} } func (x *IsAuthorizedRequest) GetKey() string { @@ -968,13 +976,14 @@ type IsAuthorizedResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - OwnerId string `protobuf:"bytes,1,opt,name=owner_id,json=ownerId,proto3" json:"owner_id,omitempty"` + OwnerId string `protobuf:"bytes,1,opt,name=owner_id,json=ownerId,proto3" json:"owner_id,omitempty"` + ApiKeyId string `protobuf:"bytes,2,opt,name=api_key_id,json=apiKeyId,proto3" json:"api_key_id,omitempty"` } func (x *IsAuthorizedResponse) Reset() { *x = IsAuthorizedResponse{} if protoimpl.UnsafeEnabled { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[16] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -987,7 +996,7 @@ func (x *IsAuthorizedResponse) String() string { func (*IsAuthorizedResponse) ProtoMessage() {} func (x *IsAuthorizedResponse) ProtoReflect() protoreflect.Message { - mi := &file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[16] + mi := &file_enterprise_aibridged_proto_aibridged_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1000,7 +1009,7 @@ func (x *IsAuthorizedResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use IsAuthorizedResponse.ProtoReflect.Descriptor instead. func (*IsAuthorizedResponse) Descriptor() ([]byte, []int) { - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{16} + return file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP(), []int{16} } func (x *IsAuthorizedResponse) GetOwnerId() string { @@ -1010,257 +1019,267 @@ func (x *IsAuthorizedResponse) GetOwnerId() string { return "" } -var File_enterprise_x_aibridged_proto_aibridged_proto protoreflect.FileDescriptor - -var file_enterprise_x_aibridged_proto_aibridged_proto_rawDesc = []byte{ - 0x0a, 0x2c, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x69, 0x73, 0x65, 0x2f, 0x78, 0x2f, 0x61, - 0x69, 0x62, 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, - 0x69, 0x62, 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x22, 0xda, 0x02, 0x0a, 0x19, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, - 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, - 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x74, 0x6f, 0x72, - 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x14, - 0x0a, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, - 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x4a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, - 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x12, 0x39, 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1c, - 0x0a, 0x1a, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x67, 0x0a, 0x1e, - 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, - 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x35, - 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, - 0x64, 0x65, 0x64, 0x41, 0x74, 0x22, 0x21, 0x0a, 0x1f, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, - 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xf9, 0x02, 0x0a, 0x17, 0x52, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, - 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, - 0x06, 0x6d, 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, - 0x73, 0x67, 0x49, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, - 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x69, 0x6e, 0x70, 0x75, - 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, - 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, - 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x48, 0x0a, 0x08, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, - 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, - 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1a, 0x0a, 0x18, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, - 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0xcb, 0x02, 0x0a, 0x18, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, - 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, - 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, - 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, 0x73, 0x67, 0x5f, 0x69, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, 0x49, 0x64, 0x12, 0x16, 0x0a, - 0x06, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x12, 0x49, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, - 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, - 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1b, - 0x0a, 0x19, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, - 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xed, 0x03, 0x0a, 0x16, - 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, - 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, - 0x15, 0x0a, 0x06, 0x6d, 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x6d, 0x73, 0x67, 0x49, 0x64, 0x12, 0x22, 0x0a, 0x0a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x09, 0x73, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x6f, - 0x6f, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x6f, 0x6f, 0x6c, 0x12, 0x14, - 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, - 0x6e, 0x70, 0x75, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x6a, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x6e, 0x6a, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x12, 0x2e, 0x0a, 0x10, 0x69, 0x6e, 0x76, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0f, 0x69, 0x6e, - 0x76, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, - 0x12, 0x47, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, - 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x69, 0x6e, 0x76, 0x6f, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x19, 0x0a, 0x17, 0x52, - 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x35, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, - 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0xb2, 0x01, - 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x40, 0x0a, - 0x10, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x63, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, - 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, - 0x0e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x4d, 0x63, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, - 0x51, 0x0a, 0x19, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, - 0x5f, 0x6d, 0x63, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4d, 0x43, 0x50, 0x53, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x16, 0x65, 0x78, 0x74, 0x65, - 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x4d, 0x63, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x73, 0x22, 0x85, 0x01, 0x0a, 0x0f, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x28, 0x0a, 0x10, 0x74, 0x6f, 0x6f, 0x6c, - 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x72, 0x65, 0x67, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0e, 0x74, 0x6f, 0x6f, 0x6c, 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x67, - 0x65, 0x78, 0x12, 0x26, 0x0a, 0x0f, 0x74, 0x6f, 0x6f, 0x6c, 0x5f, 0x64, 0x65, 0x6e, 0x79, 0x5f, - 0x72, 0x65, 0x67, 0x65, 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x6f, 0x6f, - 0x6c, 0x44, 0x65, 0x6e, 0x79, 0x52, 0x65, 0x67, 0x65, 0x78, 0x22, 0x72, 0x0a, 0x24, 0x47, 0x65, - 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x31, 0x0a, 0x15, 0x6d, - 0x63, 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x5f, 0x69, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x6d, 0x63, 0x70, 0x53, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x49, 0x64, 0x73, 0x22, 0xda, - 0x02, 0x0a, 0x25, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, - 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x0d, 0x61, 0x63, 0x63, 0x65, - 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x3e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, - 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x41, 0x63, - 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x50, 0x0a, - 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, - 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x72, 0x72, 0x6f, - 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x1a, - 0x3f, 0x0a, 0x11, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, - 0x1a, 0x39, 0x0a, 0x0b, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x27, 0x0a, 0x13, 0x49, - 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x22, 0x31, 0x0a, 0x14, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, - 0x69, 0x7a, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, - 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x32, 0xce, 0x03, 0x0a, 0x08, 0x52, 0x65, 0x63, 0x6f, - 0x72, 0x64, 0x65, 0x72, 0x12, 0x59, 0x0a, 0x12, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, - 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, - 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x68, 0x0a, 0x17, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x12, 0x25, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, - 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, - 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, - 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x10, 0x52, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, - 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, - 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x56, - 0x0a, 0x11, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, - 0x61, 0x67, 0x65, 0x12, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, +func (x *IsAuthorizedResponse) GetApiKeyId() string { + if x != nil { + return x.ApiKeyId + } + return "" +} + +var File_enterprise_aibridged_proto_aibridged_proto protoreflect.FileDescriptor + +var file_enterprise_aibridged_proto_aibridged_proto_rawDesc = []byte{ + 0x0a, 0x2a, 0x65, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x69, 0x73, 0x65, 0x2f, 0x61, 0x69, 0x62, + 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, 0x69, 0x62, + 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x05, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, + 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0xf8, 0x02, 0x0a, 0x19, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, + 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x21, 0x0a, + 0x0c, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x74, 0x6f, 0x72, 0x49, 0x64, + 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, + 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x6f, 0x64, + 0x65, 0x6c, 0x12, 0x4a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, + 0x0a, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x1c, 0x0a, 0x0a, 0x61, 0x70, 0x69, + 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, + 0x70, 0x69, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1c, 0x0a, 0x1a, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x67, 0x0a, 0x1e, 0x52, 0x65, 0x63, 0x6f, + 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, + 0x64, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x35, 0x0a, 0x08, 0x65, 0x6e, + 0x64, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x41, + 0x74, 0x22, 0x21, 0x0a, 0x1f, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, + 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xf9, 0x02, 0x0a, 0x17, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, + 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, + 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, 0x73, 0x67, + 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, 0x49, 0x64, + 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x54, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0c, 0x6f, 0x75, 0x74, 0x70, + 0x75, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x48, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, + 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, + 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, + 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, + 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x22, 0x1a, 0x0a, 0x18, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, + 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xcb, 0x02, 0x0a, + 0x18, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, + 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x72, 0x6f, 0x6d, 0x70, + 0x74, 0x12, 0x49, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x50, 0x0a, 0x0f, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, - 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, - 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0xeb, 0x01, 0x0a, 0x0f, 0x4d, 0x43, 0x50, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x13, - 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x73, 0x12, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, - 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, - 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x7a, 0x0a, 0x1d, 0x47, 0x65, - 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2b, 0x2e, 0x70, 0x72, + 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x1b, 0x0a, 0x19, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xed, 0x03, 0x0a, 0x16, 0x52, 0x65, 0x63, 0x6f, + 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x27, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x15, 0x0a, 0x06, 0x6d, + 0x73, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6d, 0x73, 0x67, + 0x49, 0x64, 0x12, 0x22, 0x0a, 0x0a, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x09, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x55, 0x72, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x6f, 0x6f, 0x6c, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x6f, 0x6f, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6e, + 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, + 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x6a, 0x65, 0x63, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x08, 0x69, 0x6e, 0x6a, 0x65, 0x63, 0x74, 0x65, 0x64, 0x12, 0x2e, 0x0a, 0x10, + 0x69, 0x6e, 0x76, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x0f, 0x69, 0x6e, 0x76, 0x6f, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12, 0x47, 0x0a, 0x08, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, + 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x5f, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, + 0x1a, 0x51, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, + 0x02, 0x38, 0x01, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x75, + 0x72, 0x6c, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x69, 0x6e, 0x76, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x19, 0x0a, 0x17, 0x52, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x35, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, + 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x17, 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x22, 0xb2, 0x01, 0x0a, 0x1b, 0x47, 0x65, + 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x40, 0x0a, 0x10, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x5f, 0x6d, 0x63, 0x70, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4d, 0x43, 0x50, 0x53, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0e, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x4d, 0x63, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x51, 0x0a, 0x19, 0x65, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x6d, 0x63, 0x70, + 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x16, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x41, 0x75, 0x74, 0x68, 0x4d, 0x63, 0x70, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x22, 0x85, + 0x01, 0x0a, 0x0f, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x75, 0x72, 0x6c, 0x12, 0x28, 0x0a, 0x10, 0x74, 0x6f, 0x6f, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, + 0x6f, 0x77, 0x5f, 0x72, 0x65, 0x67, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, + 0x74, 0x6f, 0x6f, 0x6c, 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x52, 0x65, 0x67, 0x65, 0x78, 0x12, 0x26, + 0x0a, 0x0f, 0x74, 0x6f, 0x6f, 0x6c, 0x5f, 0x64, 0x65, 0x6e, 0x79, 0x5f, 0x72, 0x65, 0x67, 0x65, + 0x78, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x74, 0x6f, 0x6f, 0x6c, 0x44, 0x65, 0x6e, + 0x79, 0x52, 0x65, 0x67, 0x65, 0x78, 0x22, 0x72, 0x0a, 0x24, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, + 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, + 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, + 0x0a, 0x07, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x06, 0x75, 0x73, 0x65, 0x72, 0x49, 0x64, 0x12, 0x31, 0x0a, 0x15, 0x6d, 0x63, 0x70, 0x5f, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x69, 0x64, 0x73, + 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x12, 0x6d, 0x63, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, + 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x49, 0x64, 0x73, 0x22, 0xda, 0x02, 0x0a, 0x25, 0x47, + 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x0d, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, - 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, - 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x55, 0x0a, 0x0a, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, - 0x69, 0x7a, 0x65, 0x72, 0x12, 0x47, 0x0a, 0x0c, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, - 0x69, 0x7a, 0x65, 0x64, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x41, - 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, - 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x2b, 0x5a, - 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, - 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x61, 0x69, 0x62, 0x72, 0x69, - 0x64, 0x67, 0x65, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, + 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x61, 0x63, 0x63, + 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x50, 0x0a, 0x06, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, + 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x06, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x1a, 0x3f, 0x0a, 0x11, 0x41, + 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x39, 0x0a, 0x0b, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, + 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x27, 0x0a, 0x13, 0x49, 0x73, 0x41, 0x75, 0x74, + 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, + 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, + 0x22, 0x4f, 0x0a, 0x14, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x77, 0x6e, 0x65, + 0x72, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6f, 0x77, 0x6e, 0x65, + 0x72, 0x49, 0x64, 0x12, 0x1c, 0x0a, 0x0a, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, + 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, 0x70, 0x69, 0x4b, 0x65, 0x79, 0x49, + 0x64, 0x32, 0xce, 0x03, 0x0a, 0x08, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x59, + 0x0a, 0x12, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, + 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x68, 0x0a, 0x17, 0x52, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x6e, 0x64, 0x65, 0x64, 0x12, 0x25, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, + 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x63, + 0x65, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x6e, 0x64, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x10, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, + 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x55, 0x73, 0x61, 0x67, 0x65, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x56, 0x0a, 0x11, 0x52, 0x65, 0x63, 0x6f, + 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1f, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, 0x6d, + 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x50, 0x72, 0x6f, + 0x6d, 0x70, 0x74, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x50, 0x0a, 0x0f, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, + 0x72, 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x54, 0x6f, 0x6f, 0x6c, 0x55, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x32, 0xeb, 0x01, 0x0a, 0x0f, 0x4d, 0x43, 0x50, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x75, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x5c, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, + 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x12, 0x21, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, + 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x7a, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, + 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, + 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x2b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, + 0x74, 0x4d, 0x43, 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, + 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x43, + 0x50, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, + 0x65, 0x6e, 0x73, 0x42, 0x61, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x32, 0x55, 0x0a, 0x0a, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x72, 0x12, 0x47, + 0x0a, 0x0c, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x12, 0x1a, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, + 0x7a, 0x65, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x7a, 0x65, 0x64, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, + 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x61, 0x69, 0x62, 0x72, 0x69, 0x64, 0x67, 0x65, 0x64, 0x2f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( - file_enterprise_x_aibridged_proto_aibridged_proto_rawDescOnce sync.Once - file_enterprise_x_aibridged_proto_aibridged_proto_rawDescData = file_enterprise_x_aibridged_proto_aibridged_proto_rawDesc + file_enterprise_aibridged_proto_aibridged_proto_rawDescOnce sync.Once + file_enterprise_aibridged_proto_aibridged_proto_rawDescData = file_enterprise_aibridged_proto_aibridged_proto_rawDesc ) -func file_enterprise_x_aibridged_proto_aibridged_proto_rawDescGZIP() []byte { - file_enterprise_x_aibridged_proto_aibridged_proto_rawDescOnce.Do(func() { - file_enterprise_x_aibridged_proto_aibridged_proto_rawDescData = protoimpl.X.CompressGZIP(file_enterprise_x_aibridged_proto_aibridged_proto_rawDescData) +func file_enterprise_aibridged_proto_aibridged_proto_rawDescGZIP() []byte { + file_enterprise_aibridged_proto_aibridged_proto_rawDescOnce.Do(func() { + file_enterprise_aibridged_proto_aibridged_proto_rawDescData = protoimpl.X.CompressGZIP(file_enterprise_aibridged_proto_aibridged_proto_rawDescData) }) - return file_enterprise_x_aibridged_proto_aibridged_proto_rawDescData + return file_enterprise_aibridged_proto_aibridged_proto_rawDescData } -var file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes = make([]protoimpl.MessageInfo, 23) -var file_enterprise_x_aibridged_proto_aibridged_proto_goTypes = []interface{}{ +var file_enterprise_aibridged_proto_aibridged_proto_msgTypes = make([]protoimpl.MessageInfo, 23) +var file_enterprise_aibridged_proto_aibridged_proto_goTypes = []interface{}{ (*RecordInterceptionRequest)(nil), // 0: proto.RecordInterceptionRequest (*RecordInterceptionResponse)(nil), // 1: proto.RecordInterceptionResponse (*RecordInterceptionEndedRequest)(nil), // 2: proto.RecordInterceptionEndedRequest @@ -1287,7 +1306,7 @@ var file_enterprise_x_aibridged_proto_aibridged_proto_goTypes = []interface{}{ (*timestamppb.Timestamp)(nil), // 23: google.protobuf.Timestamp (*anypb.Any)(nil), // 24: google.protobuf.Any } -var file_enterprise_x_aibridged_proto_aibridged_proto_depIdxs = []int32{ +var file_enterprise_aibridged_proto_aibridged_proto_depIdxs = []int32{ 17, // 0: proto.RecordInterceptionRequest.metadata:type_name -> proto.RecordInterceptionRequest.MetadataEntry 23, // 1: proto.RecordInterceptionRequest.started_at:type_name -> google.protobuf.Timestamp 23, // 2: proto.RecordInterceptionEndedRequest.ended_at:type_name -> google.protobuf.Timestamp @@ -1328,13 +1347,13 @@ var file_enterprise_x_aibridged_proto_aibridged_proto_depIdxs = []int32{ 0, // [0:17] is the sub-list for field type_name } -func init() { file_enterprise_x_aibridged_proto_aibridged_proto_init() } -func file_enterprise_x_aibridged_proto_aibridged_proto_init() { - if File_enterprise_x_aibridged_proto_aibridged_proto != nil { +func init() { file_enterprise_aibridged_proto_aibridged_proto_init() } +func file_enterprise_aibridged_proto_aibridged_proto_init() { + if File_enterprise_aibridged_proto_aibridged_proto != nil { return } if !protoimpl.UnsafeEnabled { - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordInterceptionRequest); i { case 0: return &v.state @@ -1346,7 +1365,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordInterceptionResponse); i { case 0: return &v.state @@ -1358,7 +1377,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordInterceptionEndedRequest); i { case 0: return &v.state @@ -1370,7 +1389,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordInterceptionEndedResponse); i { case 0: return &v.state @@ -1382,7 +1401,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordTokenUsageRequest); i { case 0: return &v.state @@ -1394,7 +1413,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordTokenUsageResponse); i { case 0: return &v.state @@ -1406,7 +1425,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordPromptUsageRequest); i { case 0: return &v.state @@ -1418,7 +1437,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordPromptUsageResponse); i { case 0: return &v.state @@ -1430,7 +1449,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordToolUsageRequest); i { case 0: return &v.state @@ -1442,7 +1461,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RecordToolUsageResponse); i { case 0: return &v.state @@ -1454,7 +1473,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetMCPServerConfigsRequest); i { case 0: return &v.state @@ -1466,7 +1485,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetMCPServerConfigsResponse); i { case 0: return &v.state @@ -1478,7 +1497,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MCPServerConfig); i { case 0: return &v.state @@ -1490,7 +1509,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetMCPServerAccessTokensBatchRequest); i { case 0: return &v.state @@ -1502,7 +1521,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetMCPServerAccessTokensBatchResponse); i { case 0: return &v.state @@ -1514,7 +1533,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*IsAuthorizedRequest); i { case 0: return &v.state @@ -1526,7 +1545,7 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { return nil } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*IsAuthorizedResponse); i { case 0: return &v.state @@ -1539,23 +1558,23 @@ func file_enterprise_x_aibridged_proto_aibridged_proto_init() { } } } - file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes[8].OneofWrappers = []interface{}{} + file_enterprise_aibridged_proto_aibridged_proto_msgTypes[8].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_enterprise_x_aibridged_proto_aibridged_proto_rawDesc, + RawDescriptor: file_enterprise_aibridged_proto_aibridged_proto_rawDesc, NumEnums: 0, NumMessages: 23, NumExtensions: 0, NumServices: 3, }, - GoTypes: file_enterprise_x_aibridged_proto_aibridged_proto_goTypes, - DependencyIndexes: file_enterprise_x_aibridged_proto_aibridged_proto_depIdxs, - MessageInfos: file_enterprise_x_aibridged_proto_aibridged_proto_msgTypes, + GoTypes: file_enterprise_aibridged_proto_aibridged_proto_goTypes, + DependencyIndexes: file_enterprise_aibridged_proto_aibridged_proto_depIdxs, + MessageInfos: file_enterprise_aibridged_proto_aibridged_proto_msgTypes, }.Build() - File_enterprise_x_aibridged_proto_aibridged_proto = out.File - file_enterprise_x_aibridged_proto_aibridged_proto_rawDesc = nil - file_enterprise_x_aibridged_proto_aibridged_proto_goTypes = nil - file_enterprise_x_aibridged_proto_aibridged_proto_depIdxs = nil + File_enterprise_aibridged_proto_aibridged_proto = out.File + file_enterprise_aibridged_proto_aibridged_proto_rawDesc = nil + file_enterprise_aibridged_proto_aibridged_proto_goTypes = nil + file_enterprise_aibridged_proto_aibridged_proto_depIdxs = nil } diff --git a/enterprise/x/aibridged/proto/aibridged.proto b/enterprise/aibridged/proto/aibridged.proto similarity index 98% rename from enterprise/x/aibridged/proto/aibridged.proto rename to enterprise/aibridged/proto/aibridged.proto index 01ab07c8be40d..c6c5abcff0410 100644 --- a/enterprise/x/aibridged/proto/aibridged.proto +++ b/enterprise/aibridged/proto/aibridged.proto @@ -42,6 +42,7 @@ message RecordInterceptionRequest { string model = 4; map metadata = 5; google.protobuf.Timestamp started_at = 6; + string api_key_id = 7; } message RecordInterceptionResponse {} @@ -119,4 +120,5 @@ message IsAuthorizedRequest { message IsAuthorizedResponse { string owner_id = 1; + string api_key_id = 2; } diff --git a/enterprise/x/aibridged/proto/aibridged_drpc.pb.go b/enterprise/aibridged/proto/aibridged_drpc.pb.go similarity index 84% rename from enterprise/x/aibridged/proto/aibridged_drpc.pb.go rename to enterprise/aibridged/proto/aibridged_drpc.pb.go index 4c7cb3c190764..1309957d153d5 100644 --- a/enterprise/x/aibridged/proto/aibridged_drpc.pb.go +++ b/enterprise/aibridged/proto/aibridged_drpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-drpc. DO NOT EDIT. // protoc-gen-go-drpc version: v0.0.34 -// source: enterprise/x/aibridged/proto/aibridged.proto +// source: enterprise/aibridged/proto/aibridged.proto package proto @@ -13,25 +13,25 @@ import ( drpcerr "storj.io/drpc/drpcerr" ) -type drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto struct{} +type drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto struct{} -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) Marshal(msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) Marshal(msg drpc.Message) ([]byte, error) { return proto.Marshal(msg.(proto.Message)) } -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { return proto.MarshalOptions{}.MarshalAppend(buf, msg.(proto.Message)) } -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) Unmarshal(buf []byte, msg drpc.Message) error { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) Unmarshal(buf []byte, msg drpc.Message) error { return proto.Unmarshal(buf, msg.(proto.Message)) } -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { return protojson.Marshal(msg.(proto.Message)) } -func (drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { +func (drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { return protojson.Unmarshal(buf, msg.(proto.Message)) } @@ -57,7 +57,7 @@ func (c *drpcRecorderClient) DRPCConn() drpc.Conn { return c.cc } func (c *drpcRecorderClient) RecordInterception(ctx context.Context, in *RecordInterceptionRequest) (*RecordInterceptionResponse, error) { out := new(RecordInterceptionResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordInterception", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordInterception", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -66,7 +66,7 @@ func (c *drpcRecorderClient) RecordInterception(ctx context.Context, in *RecordI func (c *drpcRecorderClient) RecordInterceptionEnded(ctx context.Context, in *RecordInterceptionEndedRequest) (*RecordInterceptionEndedResponse, error) { out := new(RecordInterceptionEndedResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordInterceptionEnded", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordInterceptionEnded", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -75,7 +75,7 @@ func (c *drpcRecorderClient) RecordInterceptionEnded(ctx context.Context, in *Re func (c *drpcRecorderClient) RecordTokenUsage(ctx context.Context, in *RecordTokenUsageRequest) (*RecordTokenUsageResponse, error) { out := new(RecordTokenUsageResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordTokenUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordTokenUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -84,7 +84,7 @@ func (c *drpcRecorderClient) RecordTokenUsage(ctx context.Context, in *RecordTok func (c *drpcRecorderClient) RecordPromptUsage(ctx context.Context, in *RecordPromptUsageRequest) (*RecordPromptUsageResponse, error) { out := new(RecordPromptUsageResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordPromptUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordPromptUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -93,7 +93,7 @@ func (c *drpcRecorderClient) RecordPromptUsage(ctx context.Context, in *RecordPr func (c *drpcRecorderClient) RecordToolUsage(ctx context.Context, in *RecordToolUsageRequest) (*RecordToolUsageResponse, error) { out := new(RecordToolUsageResponse) - err := c.cc.Invoke(ctx, "/proto.Recorder/RecordToolUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Recorder/RecordToolUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -137,7 +137,7 @@ func (DRPCRecorderDescription) NumMethods() int { return 5 } func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { case 0: - return "/proto.Recorder/RecordInterception", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Recorder/RecordInterception", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordInterception( @@ -146,7 +146,7 @@ func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiv ) }, DRPCRecorderServer.RecordInterception, true case 1: - return "/proto.Recorder/RecordInterceptionEnded", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Recorder/RecordInterceptionEnded", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordInterceptionEnded( @@ -155,7 +155,7 @@ func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiv ) }, DRPCRecorderServer.RecordInterceptionEnded, true case 2: - return "/proto.Recorder/RecordTokenUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Recorder/RecordTokenUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordTokenUsage( @@ -164,7 +164,7 @@ func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiv ) }, DRPCRecorderServer.RecordTokenUsage, true case 3: - return "/proto.Recorder/RecordPromptUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Recorder/RecordPromptUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordPromptUsage( @@ -173,7 +173,7 @@ func (DRPCRecorderDescription) Method(n int) (string, drpc.Encoding, drpc.Receiv ) }, DRPCRecorderServer.RecordPromptUsage, true case 4: - return "/proto.Recorder/RecordToolUsage", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Recorder/RecordToolUsage", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCRecorderServer). RecordToolUsage( @@ -200,7 +200,7 @@ type drpcRecorder_RecordInterceptionStream struct { } func (x *drpcRecorder_RecordInterceptionStream) SendAndClose(m *RecordInterceptionResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -216,7 +216,7 @@ type drpcRecorder_RecordInterceptionEndedStream struct { } func (x *drpcRecorder_RecordInterceptionEndedStream) SendAndClose(m *RecordInterceptionEndedResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -232,7 +232,7 @@ type drpcRecorder_RecordTokenUsageStream struct { } func (x *drpcRecorder_RecordTokenUsageStream) SendAndClose(m *RecordTokenUsageResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -248,7 +248,7 @@ type drpcRecorder_RecordPromptUsageStream struct { } func (x *drpcRecorder_RecordPromptUsageStream) SendAndClose(m *RecordPromptUsageResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -264,7 +264,7 @@ type drpcRecorder_RecordToolUsageStream struct { } func (x *drpcRecorder_RecordToolUsageStream) SendAndClose(m *RecordToolUsageResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -289,7 +289,7 @@ func (c *drpcMCPConfiguratorClient) DRPCConn() drpc.Conn { return c.cc } func (c *drpcMCPConfiguratorClient) GetMCPServerConfigs(ctx context.Context, in *GetMCPServerConfigsRequest) (*GetMCPServerConfigsResponse, error) { out := new(GetMCPServerConfigsResponse) - err := c.cc.Invoke(ctx, "/proto.MCPConfigurator/GetMCPServerConfigs", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.MCPConfigurator/GetMCPServerConfigs", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -298,7 +298,7 @@ func (c *drpcMCPConfiguratorClient) GetMCPServerConfigs(ctx context.Context, in func (c *drpcMCPConfiguratorClient) GetMCPServerAccessTokensBatch(ctx context.Context, in *GetMCPServerAccessTokensBatchRequest) (*GetMCPServerAccessTokensBatchResponse, error) { out := new(GetMCPServerAccessTokensBatchResponse) - err := c.cc.Invoke(ctx, "/proto.MCPConfigurator/GetMCPServerAccessTokensBatch", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.MCPConfigurator/GetMCPServerAccessTokensBatch", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -327,7 +327,7 @@ func (DRPCMCPConfiguratorDescription) NumMethods() int { return 2 } func (DRPCMCPConfiguratorDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { case 0: - return "/proto.MCPConfigurator/GetMCPServerConfigs", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.MCPConfigurator/GetMCPServerConfigs", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCMCPConfiguratorServer). GetMCPServerConfigs( @@ -336,7 +336,7 @@ func (DRPCMCPConfiguratorDescription) Method(n int) (string, drpc.Encoding, drpc ) }, DRPCMCPConfiguratorServer.GetMCPServerConfigs, true case 1: - return "/proto.MCPConfigurator/GetMCPServerAccessTokensBatch", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.MCPConfigurator/GetMCPServerAccessTokensBatch", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCMCPConfiguratorServer). GetMCPServerAccessTokensBatch( @@ -363,7 +363,7 @@ type drpcMCPConfigurator_GetMCPServerConfigsStream struct { } func (x *drpcMCPConfigurator_GetMCPServerConfigsStream) SendAndClose(m *GetMCPServerConfigsResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -379,7 +379,7 @@ type drpcMCPConfigurator_GetMCPServerAccessTokensBatchStream struct { } func (x *drpcMCPConfigurator_GetMCPServerAccessTokensBatchStream) SendAndClose(m *GetMCPServerAccessTokensBatchResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() @@ -403,7 +403,7 @@ func (c *drpcAuthorizerClient) DRPCConn() drpc.Conn { return c.cc } func (c *drpcAuthorizerClient) IsAuthorized(ctx context.Context, in *IsAuthorizedRequest) (*IsAuthorizedResponse, error) { out := new(IsAuthorizedResponse) - err := c.cc.Invoke(ctx, "/proto.Authorizer/IsAuthorized", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, in, out) + err := c.cc.Invoke(ctx, "/proto.Authorizer/IsAuthorized", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, in, out) if err != nil { return nil, err } @@ -427,7 +427,7 @@ func (DRPCAuthorizerDescription) NumMethods() int { return 1 } func (DRPCAuthorizerDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { case 0: - return "/proto.Authorizer/IsAuthorized", drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}, + return "/proto.Authorizer/IsAuthorized", drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCAuthorizerServer). IsAuthorized( @@ -454,7 +454,7 @@ type drpcAuthorizer_IsAuthorizedStream struct { } func (x *drpcAuthorizer_IsAuthorizedStream) SendAndClose(m *IsAuthorizedResponse) error { - if err := x.MsgSend(m, drpcEncoding_File_enterprise_x_aibridged_proto_aibridged_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_enterprise_aibridged_proto_aibridged_proto{}); err != nil { return err } return x.CloseSend() diff --git a/enterprise/x/aibridged/request.go b/enterprise/aibridged/request.go similarity index 85% rename from enterprise/x/aibridged/request.go rename to enterprise/aibridged/request.go index 29196adb88a49..3b2880f1a9cd9 100644 --- a/enterprise/x/aibridged/request.go +++ b/enterprise/aibridged/request.go @@ -4,5 +4,6 @@ import "github.com/google/uuid" type Request struct { SessionKey string + APIKeyID string InitiatorID uuid.UUID } diff --git a/enterprise/x/aibridged/server.go b/enterprise/aibridged/server.go similarity index 68% rename from enterprise/x/aibridged/server.go rename to enterprise/aibridged/server.go index 713ea2a0cd126..052c94dad4a9e 100644 --- a/enterprise/x/aibridged/server.go +++ b/enterprise/aibridged/server.go @@ -1,6 +1,6 @@ package aibridged -import "github.com/coder/coder/v2/enterprise/x/aibridged/proto" +import "github.com/coder/coder/v2/enterprise/aibridged/proto" type DRPCServer interface { proto.DRPCRecorderServer diff --git a/enterprise/x/aibridged/translator.go b/enterprise/aibridged/translator.go similarity index 89% rename from enterprise/x/aibridged/translator.go rename to enterprise/aibridged/translator.go index bfc39d834ad2c..cbede0bc729f5 100644 --- a/enterprise/x/aibridged/translator.go +++ b/enterprise/aibridged/translator.go @@ -11,7 +11,7 @@ import ( "google.golang.org/protobuf/types/known/timestamppb" "github.com/coder/coder/v2/coderd/util/ptr" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged/proto" "github.com/coder/aibridge" ) @@ -20,12 +20,14 @@ var _ aibridge.Recorder = &recorderTranslation{} // recorderTranslation satisfies the aibridge.Recorder interface and translates calls into dRPC calls to aibridgedserver. type recorderTranslation struct { - client proto.DRPCRecorderClient + apiKeyID string + client proto.DRPCRecorderClient } func (t *recorderTranslation) RecordInterception(ctx context.Context, req *aibridge.InterceptionRecord) error { _, err := t.client.RecordInterception(ctx, &proto.RecordInterceptionRequest{ Id: req.ID, + ApiKeyId: t.apiKeyID, InitiatorId: req.InitiatorID, Provider: req.Provider, Model: req.Model, @@ -55,12 +57,22 @@ func (t *recorderTranslation) RecordPromptUsage(ctx context.Context, req *aibrid } func (t *recorderTranslation) RecordTokenUsage(ctx context.Context, req *aibridge.TokenUsageRecord) error { + merged := req.Metadata + if merged == nil { + merged = aibridge.Metadata{} + } + + // Merge the token usage values into metadata; later we might want to store some of these in their own fields. + for k, v := range req.ExtraTokenTypes { + merged[k] = v + } + _, err := t.client.RecordTokenUsage(ctx, &proto.RecordTokenUsageRequest{ InterceptionId: req.InterceptionID, MsgId: req.MsgID, InputTokens: req.Input, OutputTokens: req.Output, - Metadata: marshalForProto(req.Metadata), + Metadata: marshalForProto(merged), CreatedAt: timestamppb.New(req.CreatedAt), }) return err diff --git a/enterprise/x/aibridged/utils_test.go b/enterprise/aibridged/utils_test.go similarity index 100% rename from enterprise/x/aibridged/utils_test.go rename to enterprise/aibridged/utils_test.go diff --git a/enterprise/x/aibridgedserver/aibridgedserver.go b/enterprise/aibridgedserver/aibridgedserver.go similarity index 94% rename from enterprise/x/aibridgedserver/aibridgedserver.go rename to enterprise/aibridgedserver/aibridgedserver.go index 2c5e3ff71c072..156f3aa9d05da 100644 --- a/enterprise/x/aibridgedserver/aibridgedserver.go +++ b/enterprise/aibridgedserver/aibridgedserver.go @@ -24,8 +24,8 @@ import ( "github.com/coder/coder/v2/coderd/httpmw" codermcp "github.com/coder/coder/v2/coderd/mcp" "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/enterprise/x/aibridged" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridged" + "github.com/coder/coder/v2/enterprise/aibridged/proto" ) var ( @@ -77,7 +77,9 @@ type Server struct { coderMCPConfig *proto.MCPServerConfig // may be nil if not available } -func NewServer(lifecycleCtx context.Context, store store, logger slog.Logger, accessURL string, externalAuthConfigs []*externalauth.Config, experiments codersdk.Experiments) (*Server, error) { +func NewServer(lifecycleCtx context.Context, store store, logger slog.Logger, accessURL string, + bridgeCfg codersdk.AIBridgeConfig, externalAuthConfigs []*externalauth.Config, experiments codersdk.Experiments, +) (*Server, error) { eac := make(map[string]*externalauth.Config, len(externalAuthConfigs)) for _, cfg := range externalAuthConfigs { @@ -88,18 +90,22 @@ func NewServer(lifecycleCtx context.Context, store store, logger slog.Logger, ac eac[cfg.ID] = cfg } - coderMCPConfig, err := getCoderMCPServerConfig(experiments, accessURL) - if err != nil { - logger.Warn(lifecycleCtx, "failed to retrieve coder MCP server config, Coder MCP will not be available", slog.Error(err)) - } - - return &Server{ + srv := &Server{ lifecycleCtx: lifecycleCtx, store: store, logger: logger.Named("aibridgedserver"), externalAuthConfigs: eac, - coderMCPConfig: coderMCPConfig, - }, nil + } + + if bridgeCfg.InjectCoderMCPTools { + coderMCPConfig, err := getCoderMCPServerConfig(experiments, accessURL) + if err != nil { + logger.Warn(lifecycleCtx, "failed to retrieve coder MCP server config, Coder MCP will not be available", slog.Error(err)) + } + srv.coderMCPConfig = coderMCPConfig + } + + return srv, nil } func (s *Server) RecordInterception(ctx context.Context, in *proto.RecordInterceptionRequest) (*proto.RecordInterceptionResponse, error) { @@ -114,9 +120,13 @@ func (s *Server) RecordInterception(ctx context.Context, in *proto.RecordInterce if err != nil { return nil, xerrors.Errorf("invalid initiator ID %q: %w", in.GetInitiatorId(), err) } + if in.ApiKeyId == "" { + return nil, xerrors.Errorf("empty API key ID") + } _, err = s.store.InsertAIBridgeInterception(ctx, database.InsertAIBridgeInterceptionParams{ ID: intcID, + APIKeyID: sql.NullString{String: in.ApiKeyId, Valid: true}, InitiatorID: initID, Provider: in.Provider, Model: in.Model, @@ -398,7 +408,8 @@ func (s *Server) IsAuthorized(ctx context.Context, in *proto.IsAuthorizedRequest } return &proto.IsAuthorizedResponse{ - OwnerId: key.UserID.String(), + OwnerId: key.UserID.String(), + ApiKeyId: key.ID, }, nil } diff --git a/enterprise/x/aibridgedserver/aibridgedserver_internal_test.go b/enterprise/aibridgedserver/aibridgedserver_internal_test.go similarity index 100% rename from enterprise/x/aibridgedserver/aibridgedserver_internal_test.go rename to enterprise/aibridgedserver/aibridgedserver_internal_test.go diff --git a/enterprise/x/aibridgedserver/aibridgedserver_test.go b/enterprise/aibridgedserver/aibridgedserver_test.go similarity index 92% rename from enterprise/x/aibridgedserver/aibridgedserver_test.go rename to enterprise/aibridgedserver/aibridgedserver_test.go index 4f9f892bc886a..b871bfb3f8e54 100644 --- a/enterprise/x/aibridgedserver/aibridgedserver_test.go +++ b/enterprise/aibridgedserver/aibridgedserver_test.go @@ -28,10 +28,11 @@ import ( codermcp "github.com/coder/coder/v2/coderd/mcp" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" - "github.com/coder/coder/v2/enterprise/x/aibridged" - "github.com/coder/coder/v2/enterprise/x/aibridged/proto" - "github.com/coder/coder/v2/enterprise/x/aibridgedserver" + "github.com/coder/coder/v2/enterprise/aibridged" + "github.com/coder/coder/v2/enterprise/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridgedserver" "github.com/coder/coder/v2/testutil" + "github.com/coder/serpent" ) var requiredExperiments = []codersdk.Experiment{ @@ -169,16 +170,21 @@ func TestAuthorization(t *testing.T) { tc.mocksFn(db, apiKey, user) } - srv, err := aibridgedserver.NewServer(t.Context(), db, logger, "/", nil, requiredExperiments) + srv, err := aibridgedserver.NewServer(t.Context(), db, logger, "/", codersdk.AIBridgeConfig{}, nil, requiredExperiments) require.NoError(t, err) require.NotNil(t, srv) - _, err = srv.IsAuthorized(t.Context(), &proto.IsAuthorizedRequest{Key: tc.key}) + resp, err := srv.IsAuthorized(t.Context(), &proto.IsAuthorizedRequest{Key: tc.key}) if tc.expectedErr != nil { require.Error(t, err) require.ErrorIs(t, err, tc.expectedErr) } else { + expected := proto.IsAuthorizedResponse{ + OwnerId: user.ID.String(), + ApiKeyId: keyID, + } require.NoError(t, err) + require.Equal(t, &expected, resp) } }) } @@ -198,11 +204,12 @@ func TestGetMCPServerConfigs(t *testing.T) { } cases := []struct { - name string - experiments codersdk.Experiments - externalAuthConfigs []*externalauth.Config - expectCoderMCP bool - expectedExternalMCP bool + name string + disableCoderMCPInjection bool + experiments codersdk.Experiments + externalAuthConfigs []*externalauth.Config + expectCoderMCP bool + expectedExternalMCP bool }{ { name: "experiments not enabled", @@ -233,6 +240,14 @@ func TestGetMCPServerConfigs(t *testing.T) { expectCoderMCP: true, expectedExternalMCP: true, }, + { + name: "both internal & external MCP, but coder MCP tools not injected", + disableCoderMCPInjection: true, + experiments: requiredExperiments, + externalAuthConfigs: externalAuthCfgs, + expectCoderMCP: false, + expectedExternalMCP: true, + }, } for _, tc := range cases { @@ -244,7 +259,9 @@ func TestGetMCPServerConfigs(t *testing.T) { logger := testutil.Logger(t) accessURL := "https://my-cool-deployment.com" - srv, err := aibridgedserver.NewServer(t.Context(), db, logger, accessURL, tc.externalAuthConfigs, tc.experiments) + srv, err := aibridgedserver.NewServer(t.Context(), db, logger, accessURL, codersdk.AIBridgeConfig{ + InjectCoderMCPTools: serpent.Bool(!tc.disableCoderMCPInjection), + }, tc.externalAuthConfigs, tc.experiments) require.NoError(t, err) require.NotNil(t, srv) @@ -282,7 +299,7 @@ func TestGetMCPServerAccessTokensBatch(t *testing.T) { logger := testutil.Logger(t) // Given: 2 external auth configured with MCP and 1 without. - srv, err := aibridgedserver.NewServer(t.Context(), db, logger, "/", []*externalauth.Config{ + srv, err := aibridgedserver.NewServer(t.Context(), db, logger, "/", codersdk.AIBridgeConfig{}, []*externalauth.Config{ { ID: "1", MCPURL: "1.com/mcp", @@ -355,6 +372,7 @@ func TestRecordInterception(t *testing.T) { name: "valid interception", request: &proto.RecordInterceptionRequest{ Id: uuid.NewString(), + ApiKeyId: uuid.NewString(), InitiatorId: uuid.NewString(), Provider: "anthropic", Model: "claude-4-opus", @@ -369,6 +387,7 @@ func TestRecordInterception(t *testing.T) { db.EXPECT().InsertAIBridgeInterception(gomock.Any(), database.InsertAIBridgeInterceptionParams{ ID: interceptionID, + APIKeyID: sql.NullString{String: req.ApiKeyId, Valid: true}, InitiatorID: initiatorID, Provider: req.GetProvider(), Model: req.GetModel(), @@ -376,6 +395,7 @@ func TestRecordInterception(t *testing.T) { StartedAt: req.StartedAt.AsTime().UTC(), }).Return(database.AIBridgeInterception{ ID: interceptionID, + APIKeyID: sql.NullString{String: req.ApiKeyId, Valid: true}, InitiatorID: initiatorID, Provider: req.GetProvider(), Model: req.GetModel(), @@ -388,6 +408,7 @@ func TestRecordInterception(t *testing.T) { request: &proto.RecordInterceptionRequest{ Id: "not-a-uuid", InitiatorId: uuid.NewString(), + ApiKeyId: uuid.NewString(), Provider: "anthropic", Model: "claude-4-opus", StartedAt: timestamppb.Now(), @@ -398,6 +419,7 @@ func TestRecordInterception(t *testing.T) { name: "invalid initiator ID", request: &proto.RecordInterceptionRequest{ Id: uuid.NewString(), + ApiKeyId: uuid.NewString(), InitiatorId: "not-a-uuid", Provider: "anthropic", Model: "claude-4-opus", @@ -405,10 +427,23 @@ func TestRecordInterception(t *testing.T) { }, expectedErr: "invalid initiator ID", }, + { + name: "invalid interception no api key set", + request: &proto.RecordInterceptionRequest{ + Id: uuid.NewString(), + InitiatorId: uuid.NewString(), + Provider: "anthropic", + Model: "claude-4-opus", + Metadata: metadataProto, + StartedAt: timestamppb.Now(), + }, + expectedErr: "empty API key ID", + }, { name: "database error", request: &proto.RecordInterceptionRequest{ Id: uuid.NewString(), + ApiKeyId: uuid.NewString(), InitiatorId: uuid.NewString(), Provider: "anthropic", Model: "claude-4-opus", @@ -771,7 +806,7 @@ func testRecordMethod[Req any, Resp any]( } ctx := testutil.Context(t, testutil.WaitLong) - srv, err := aibridgedserver.NewServer(ctx, db, logger, "/", nil, requiredExperiments) + srv, err := aibridgedserver.NewServer(ctx, db, logger, "/", codersdk.AIBridgeConfig{}, nil, requiredExperiments) require.NoError(t, err) resp, err := callMethod(srv, ctx, tc.request) diff --git a/enterprise/audit/table.go b/enterprise/audit/table.go index 9b887b30ef517..7006c434c609c 100644 --- a/enterprise/audit/table.go +++ b/enterprise/audit/table.go @@ -117,6 +117,7 @@ var auditableResourcesTypes = map[any]map[string]Action{ "activity_bump": ActionTrack, "use_classic_parameter_flow": ActionTrack, "cors_behavior": ActionTrack, + "use_terraform_workspace_cache": ActionTrack, }, &database.TemplateVersion{}: { "id": ActionTrack, @@ -198,7 +199,6 @@ var auditableResourcesTypes = map[any]map[string]Action{ "initiator_by_name": ActionIgnore, "template_version_preset_id": ActionIgnore, // Never changes. "has_ai_task": ActionIgnore, // Never changes. - "ai_task_sidebar_app_id": ActionIgnore, // Never changes. "has_external_agent": ActionIgnore, // Never changes. }, &database.AuditableGroup{}: { @@ -353,6 +353,7 @@ var auditableResourcesTypes = map[any]map[string]Action{ "organization_id": ActionIgnore, // Never changes. "owner_id": ActionTrack, "name": ActionTrack, + "display_name": ActionTrack, "workspace_id": ActionTrack, "template_version_id": ActionTrack, "template_parameters": ActionTrack, diff --git a/enterprise/cli/exp_aibridge.go b/enterprise/cli/aibridge.go similarity index 94% rename from enterprise/cli/exp_aibridge.go rename to enterprise/cli/aibridge.go index 722f7bf239223..a8e539713067a 100644 --- a/enterprise/cli/exp_aibridge.go +++ b/enterprise/cli/aibridge.go @@ -17,7 +17,7 @@ const maxInterceptionsLimit = 1000 func (r *RootCmd) aibridge() *serpent.Command { cmd := &serpent.Command{ Use: "aibridge", - Short: "Manage AIBridge.", + Short: "Manage AI Bridge.", Handler: func(inv *serpent.Invocation) error { return inv.Command.HelpHandler(inv) }, @@ -31,7 +31,7 @@ func (r *RootCmd) aibridge() *serpent.Command { func (r *RootCmd) aibridgeInterceptions() *serpent.Command { cmd := &serpent.Command{ Use: "interceptions", - Short: "Manage AIBridge interceptions.", + Short: "Manage AI Bridge interceptions.", Handler: func(inv *serpent.Invocation) error { return inv.Command.HelpHandler(inv) }, @@ -55,7 +55,7 @@ func (r *RootCmd) aibridgeInterceptionsList() *serpent.Command { return &serpent.Command{ Use: "list", - Short: "List AIBridge interceptions as JSON.", + Short: "List AI Bridge interceptions as JSON.", Options: serpent.OptionSet{ { Flag: "initiator", @@ -134,8 +134,7 @@ func (r *RootCmd) aibridgeInterceptionsList() *serpent.Command { return xerrors.Errorf("limit value must be between 1 and %d", maxInterceptionsLimit) } - expCli := codersdk.NewExperimentalClient(client) - resp, err := expCli.AIBridgeListInterceptions(inv.Context(), codersdk.AIBridgeListInterceptionsFilter{ + resp, err := client.AIBridgeListInterceptions(inv.Context(), codersdk.AIBridgeListInterceptionsFilter{ Pagination: codersdk.Pagination{ AfterID: afterID, // #nosec G115 - Checked above. diff --git a/enterprise/cli/exp_aibridge_test.go b/enterprise/cli/aibridge_test.go similarity index 96% rename from enterprise/cli/exp_aibridge_test.go rename to enterprise/cli/aibridge_test.go index 466d6b3df8246..666dc69858039 100644 --- a/enterprise/cli/exp_aibridge_test.go +++ b/enterprise/cli/aibridge_test.go @@ -27,7 +27,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -44,10 +43,11 @@ func TestAIBridgeListInterceptions(t *testing.T) { InitiatorID: member.ID, StartedAt: now.Add(-time.Hour), }, &now) + interception2EndedAt := now.Add(time.Minute) interception2 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, StartedAt: now, - }, nil) + }, &interception2EndedAt) // Should not be returned because the user can't see it. _ = dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: owner.UserID, @@ -55,7 +55,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, nil) args := []string{ - "exp", "aibridge", "interceptions", "list", @@ -78,7 +77,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -94,12 +92,13 @@ func TestAIBridgeListInterceptions(t *testing.T) { now := dbtime.Now() // This interception should be returned since it matches all filters. + goodInterceptionEndedAt := now.Add(time.Minute) goodInterception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, Provider: "real-provider", Model: "real-model", StartedAt: now, - }, nil) + }, &goodInterceptionEndedAt) // These interceptions should not be returned since they don't match the // filters. @@ -137,7 +136,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, nil) args := []string{ - "exp", "aibridge", "interceptions", "list", @@ -164,7 +162,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -178,10 +175,11 @@ func TestAIBridgeListInterceptions(t *testing.T) { memberClient, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) now := dbtime.Now() + firstInterceptionEndedAt := now.Add(time.Minute) firstInterception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, StartedAt: now, - }, nil) + }, &firstInterceptionEndedAt) returnedInterception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: member.ID, StartedAt: now.Add(-time.Hour), @@ -192,7 +190,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, nil) args := []string{ - "exp", "aibridge", "interceptions", "list", diff --git a/enterprise/cli/aibridged.go b/enterprise/cli/aibridged.go index 17bb5ebe681fa..e955c4fcbe73b 100644 --- a/enterprise/cli/aibridged.go +++ b/enterprise/cli/aibridged.go @@ -7,10 +7,12 @@ import ( "golang.org/x/xerrors" + "github.com/prometheus/client_golang/prometheus" + "github.com/coder/aibridge" "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/aibridged" "github.com/coder/coder/v2/enterprise/coderd" - "github.com/coder/coder/v2/enterprise/x/aibridged" ) func newAIBridgeDaemon(coderAPI *coderd.API) (*aibridged.Server, error) { @@ -31,8 +33,11 @@ func newAIBridgeDaemon(coderAPI *coderd.API) (*aibridged.Server, error) { }, getBedrockConfig(coderAPI.DeploymentValues.AI.BridgeConfig.Bedrock)), } + reg := prometheus.WrapRegistererWithPrefix("coder_aibridged_", coderAPI.PrometheusRegistry) + metrics := aibridge.NewMetrics(reg) + // Create pool for reusable stateful [aibridge.RequestBridge] instances (one per user). - pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, logger.Named("pool")) // TODO: configurable. + pool, err := aibridged.NewCachedBridgePool(aibridged.DefaultPoolOptions, providers, metrics, logger.Named("pool")) // TODO: configurable size. if err != nil { return nil, xerrors.Errorf("create request pool: %w", err) } diff --git a/enterprise/cli/provisionerdaemonstart.go b/enterprise/cli/provisionerdaemonstart.go index 1869007a85173..b15e56d8ab385 100644 --- a/enterprise/cli/provisionerdaemonstart.go +++ b/enterprise/cli/provisionerdaemonstart.go @@ -23,6 +23,7 @@ import ( "github.com/coder/coder/v2/cli/clilog" "github.com/coder/coder/v2/cli/cliui" "github.com/coder/coder/v2/cli/cliutil" + "github.com/coder/coder/v2/coderd" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/drpcsdk" @@ -48,6 +49,7 @@ func (r *RootCmd) provisionerDaemonStart() *serpent.Command { preSharedKey string provisionerKey string verbose bool + experiments []string prometheusEnable bool prometheusAddress string @@ -105,7 +107,7 @@ func (r *RootCmd) provisionerDaemonStart() *serpent.Command { if provisionerKey != "" { pkDetails, err := client.GetProvisionerKey(ctx, provisionerKey) if err != nil { - return xerrors.New("unable to get provisioner key details") + return xerrors.Errorf("unable to get provisioner key details: %w", err) } for k, v := range pkDetails.Tags { @@ -186,6 +188,7 @@ func (r *RootCmd) provisionerDaemonStart() *serpent.Command { Listener: terraformServer, Logger: logger.Named("terraform"), WorkDirectory: tempDir, + Experiments: coderd.ReadExperiments(logger, experiments), }, CachePath: cacheDir, }) @@ -378,6 +381,14 @@ func (r *RootCmd) provisionerDaemonStart() *serpent.Command { Value: serpent.StringOf(&prometheusAddress), Default: "127.0.0.1:2112", }, + { + Name: "Experiments", + Description: "Enable one or more experiments. These are not ready for production. Separate multiple experiments with commas, or enter '*' to opt-in to all available experiments.", + Flag: "experiments", + Env: "CODER_EXPERIMENTS", + Value: serpent.StringArrayOf(&experiments), + YAML: "experiments", + }, } orgContext.AttachOptions(cmd) diff --git a/enterprise/cli/root.go b/enterprise/cli/root.go index 3cec11970369e..78858ef48da7b 100644 --- a/enterprise/cli/root.go +++ b/enterprise/cli/root.go @@ -25,13 +25,12 @@ func (r *RootCmd) enterpriseOnly() []*serpent.Command { r.prebuilds(), r.provisionerd(), r.externalWorkspaces(), + r.aibridge(), } } -func (r *RootCmd) enterpriseExperimental() []*serpent.Command { - return []*serpent.Command{ - r.aibridge(), - } +func (*RootCmd) enterpriseExperimental() []*serpent.Command { + return []*serpent.Command{} } func (r *RootCmd) EnterpriseSubcommands() []*serpent.Command { diff --git a/enterprise/cli/server.go b/enterprise/cli/server.go index ea9f2d3e93825..bc77bc54ba522 100644 --- a/enterprise/cli/server.go +++ b/enterprise/cli/server.go @@ -7,7 +7,6 @@ import ( "database/sql" "encoding/base64" "errors" - "fmt" "io" "net/url" @@ -16,8 +15,8 @@ import ( "tailscale.com/types/key" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" + "github.com/coder/coder/v2/enterprise/aibridged" "github.com/coder/coder/v2/enterprise/audit" "github.com/coder/coder/v2/enterprise/audit/backends" "github.com/coder/coder/v2/enterprise/coderd" @@ -25,7 +24,6 @@ import ( "github.com/coder/coder/v2/enterprise/coderd/usage" "github.com/coder/coder/v2/enterprise/dbcrypt" "github.com/coder/coder/v2/enterprise/trialer" - "github.com/coder/coder/v2/enterprise/x/aibridged" "github.com/coder/coder/v2/tailnet" "github.com/coder/quartz" "github.com/coder/serpent" @@ -146,8 +144,6 @@ func (r *RootCmd) Server(_ func()) *serpent.Command { } closers.Add(publisher) - experiments := agplcoderd.ReadExperiments(options.Logger, options.DeploymentValues.Experiments.Value()) - // In-memory aibridge daemon. // TODO(@deansheather): the lifecycle of the aibridged server is // probably better managed by the enterprise API type itself. Managing @@ -155,26 +151,18 @@ func (r *RootCmd) Server(_ func()) *serpent.Command { // is not entitled to the feature. var aibridgeDaemon *aibridged.Server if options.DeploymentValues.AI.BridgeConfig.Enabled { - if experiments.Enabled(codersdk.ExperimentAIBridge) { - aibridgeDaemon, err = newAIBridgeDaemon(api) - if err != nil { - return nil, nil, xerrors.Errorf("create aibridged: %w", err) - } + aibridgeDaemon, err = newAIBridgeDaemon(api) + if err != nil { + return nil, nil, xerrors.Errorf("create aibridged: %w", err) + } - api.RegisterInMemoryAIBridgedHTTPHandler(aibridgeDaemon) + api.RegisterInMemoryAIBridgedHTTPHandler(aibridgeDaemon) - // When running as an in-memory daemon, the HTTP handler is wired into the - // coderd API and therefore is subject to its context. Calling Close() on - // aibridged will NOT affect in-flight requests but those will be closed once - // the API server is itself shutdown. - closers.Add(aibridgeDaemon) - } else { - api.Logger.Warn(ctx, fmt.Sprintf("CODER_AIBRIDGE_ENABLED=true but experiment %q not enabled", codersdk.ExperimentAIBridge)) - } - } else { - if experiments.Enabled(codersdk.ExperimentAIBridge) { - api.Logger.Warn(ctx, "aibridge experiment enabled but CODER_AIBRIDGE_ENABLED=false") - } + // When running as an in-memory daemon, the HTTP handler is wired into the + // coderd API and therefore is subject to its context. Calling Close() on + // aibridged will NOT affect in-flight requests but those will be closed once + // the API server is itself shutdown. + closers.Add(aibridgeDaemon) } return api.AGPL, closers, nil diff --git a/enterprise/cli/testdata/coder_--help.golden b/enterprise/cli/testdata/coder_--help.golden index ddb44f78ae524..e199e8cc27d4d 100644 --- a/enterprise/cli/testdata/coder_--help.golden +++ b/enterprise/cli/testdata/coder_--help.golden @@ -14,6 +14,7 @@ USAGE: $ coder templates init SUBCOMMANDS: + aibridge Manage AI Bridge. external-workspaces Create or manage external workspaces features List Enterprise features groups Manage groups @@ -67,6 +68,13 @@ variables or flags. --url url, $CODER_URL URL to a deployment. + --use-keyring bool, $CODER_USE_KEYRING (default: true) + Store and retrieve session tokens using the operating system keyring. + This flag is ignored and file-based storage is used when + --global-config is set or keyring usage is not supported on the + current platform. Set to false to force file-based storage on + supported platforms. + -v, --verbose bool, $CODER_VERBOSE Enable verbose output. diff --git a/enterprise/cli/testdata/coder_aibridge_--help.golden b/enterprise/cli/testdata/coder_aibridge_--help.golden new file mode 100644 index 0000000000000..5fdb98d21a479 --- /dev/null +++ b/enterprise/cli/testdata/coder_aibridge_--help.golden @@ -0,0 +1,12 @@ +coder v0.0.0-devel + +USAGE: + coder aibridge + + Manage AI Bridge. + +SUBCOMMANDS: + interceptions Manage AI Bridge interceptions. + +——— +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_aibridge_interceptions_--help.golden b/enterprise/cli/testdata/coder_aibridge_interceptions_--help.golden new file mode 100644 index 0000000000000..49e36fb712177 --- /dev/null +++ b/enterprise/cli/testdata/coder_aibridge_interceptions_--help.golden @@ -0,0 +1,12 @@ +coder v0.0.0-devel + +USAGE: + coder aibridge interceptions + + Manage AI Bridge interceptions. + +SUBCOMMANDS: + list List AI Bridge interceptions as JSON. + +——— +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_aibridge_interceptions_list_--help.golden b/enterprise/cli/testdata/coder_aibridge_interceptions_list_--help.golden new file mode 100644 index 0000000000000..307696c390486 --- /dev/null +++ b/enterprise/cli/testdata/coder_aibridge_interceptions_list_--help.golden @@ -0,0 +1,37 @@ +coder v0.0.0-devel + +USAGE: + coder aibridge interceptions list [flags] + + List AI Bridge interceptions as JSON. + +OPTIONS: + --after-id string + The ID of the last result on the previous page to use as a pagination + cursor. + + --initiator string + Only return interceptions initiated by this user. Accepts a user ID, + username, or "me". + + --limit int (default: 100) + The limit of results to return. Must be between 1 and 1000. + + --model string + Only return interceptions from this model. + + --provider string + Only return interceptions from this provider. + + --started-after string + Only return interceptions started after this time. Must be before + 'started-before' if set. Accepts a time in the RFC 3339 format, e.g. + "====[timestamp]=====07:00". + + --started-before string + Only return interceptions started before this time. Must be after + 'started-after' if set. Accepts a time in the RFC 3339 format, e.g. + "====[timestamp]=====07:00". + +——— +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_provisioner_start_--help.golden b/enterprise/cli/testdata/coder_provisioner_start_--help.golden index 439a2d68ba038..e3d4c69a8c45c 100644 --- a/enterprise/cli/testdata/coder_provisioner_start_--help.golden +++ b/enterprise/cli/testdata/coder_provisioner_start_--help.golden @@ -6,6 +6,11 @@ USAGE: Run a provisioner daemon OPTIONS: + --experiments string-array, $CODER_EXPERIMENTS + Enable one or more experiments. These are not ready for production. + Separate multiple experiments with commas, or enter '*' to opt-in to + all available experiments. + -O, --org string, $CODER_ORGANIZATION Select which organization (uuid or name) to use. diff --git a/enterprise/cli/testdata/coder_server_--help.golden b/enterprise/cli/testdata/coder_server_--help.golden index 162d4214ccc6a..5ed217752b2c6 100644 --- a/enterprise/cli/testdata/coder_server_--help.golden +++ b/enterprise/cli/testdata/coder_server_--help.golden @@ -81,6 +81,50 @@ OPTIONS: Periodically check for new releases of Coder and inform the owner. The check is performed once per day. +AI BRIDGE OPTIONS: + --aibridge-anthropic-base-url string, $CODER_AIBRIDGE_ANTHROPIC_BASE_URL (default: https://api.anthropic.com/) + The base URL of the Anthropic API. + + --aibridge-anthropic-key string, $CODER_AIBRIDGE_ANTHROPIC_KEY + The key to authenticate against the Anthropic API. + + --aibridge-bedrock-access-key string, $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY + The access key to authenticate against the AWS Bedrock API. + + --aibridge-bedrock-access-key-secret string, $CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET + The access key secret to use with the access key to authenticate + against the AWS Bedrock API. + + --aibridge-bedrock-model string, $CODER_AIBRIDGE_BEDROCK_MODEL (default: global.anthropic.claude-sonnet-4-5-20250929-v1:0) + The model to use when making requests to the AWS Bedrock API. + + --aibridge-bedrock-region string, $CODER_AIBRIDGE_BEDROCK_REGION + The AWS Bedrock API region. + + --aibridge-bedrock-small-fastmodel string, $CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL (default: global.anthropic.claude-haiku-4-5-20251001-v1:0) + The small fast model to use when making requests to the AWS Bedrock + API. Claude Code uses Haiku-class models to perform background tasks. + See + https://docs.claude.com/en/docs/claude-code/settings#environment-variables. + + --aibridge-retention duration, $CODER_AIBRIDGE_RETENTION (default: 60d) + Length of time to retain data such as interceptions and all related + records (token, prompt, tool use). + + --aibridge-enabled bool, $CODER_AIBRIDGE_ENABLED (default: false) + Whether to start an in-memory aibridged instance. + + --aibridge-inject-coder-mcp-tools bool, $CODER_AIBRIDGE_INJECT_CODER_MCP_TOOLS (default: false) + Whether to inject Coder's MCP tools into intercepted AI Bridge + requests (requires the "oauth2" and "mcp-server-http" experiments to + be enabled). + + --aibridge-openai-base-url string, $CODER_AIBRIDGE_OPENAI_BASE_URL (default: https://api.openai.com/v1/) + The base URL of the OpenAI API. + + --aibridge-openai-key string, $CODER_AIBRIDGE_OPENAI_KEY + The key to authenticate against the OpenAI API. + CLIENT OPTIONS: These options change the behavior of how clients interact with the Coder. Clients include the Coder CLI, Coder Desktop, IDE extensions, and the web UI. diff --git a/enterprise/coderd/aibridge.go b/enterprise/coderd/aibridge.go index dab93d8992a79..96bbe1d205181 100644 --- a/enterprise/coderd/aibridge.go +++ b/enterprise/coderd/aibridge.go @@ -4,7 +4,9 @@ import ( "context" "fmt" "net/http" + "strings" + "github.com/go-chi/chi/v5" "github.com/google/uuid" "golang.org/x/xerrors" @@ -23,20 +25,52 @@ const ( defaultListInterceptionsLimit = 100 ) -// aiBridgeListInterceptions returns all AIBridge interceptions a user can read. +// aibridgeHandler handles all aibridged-related endpoints. +func aibridgeHandler(api *API, middlewares ...func(http.Handler) http.Handler) func(r chi.Router) { + return func(r chi.Router) { + r.Use(api.RequireFeatureMW(codersdk.FeatureAIBridge)) + r.Group(func(r chi.Router) { + r.Use(middlewares...) + r.Get("/interceptions", api.aiBridgeListInterceptions) + }) + + // This is a bit funky but since aibridge only exposes a HTTP + // handler, this is how it has to be. + r.HandleFunc("/*", func(rw http.ResponseWriter, r *http.Request) { + if api.aibridgedHandler == nil { + httpapi.Write(r.Context(), rw, http.StatusNotFound, codersdk.Response{ + Message: "aibridged handler not mounted", + }) + return + } + + // Strip either the experimental or stable prefix. + // TODO: experimental route is deprecated and must be removed with Beta. + prefixes := []string{"/api/experimental/aibridge", "/api/v2/aibridge"} + for _, prefix := range prefixes { + if strings.Contains(r.URL.String(), prefix) { + http.StripPrefix(prefix, api.aibridgedHandler).ServeHTTP(rw, r) + break + } + } + }) + } +} + +// aiBridgeListInterceptions returns all AI Bridge interceptions a user can read. // Optional filters with query params // -// @Summary List AIBridge interceptions -// @ID list-aibridge-interceptions +// @Summary List AI Bridge interceptions +// @ID list-ai-bridge-interceptions // @Security CoderSessionToken // @Produce json -// @Tags AIBridge +// @Tags AI Bridge // @Param q query string false "Search query in the format `key:value`. Available keys are: initiator, provider, model, started_after, started_before." // @Param limit query int false "Page limit" // @Param after_id query string false "Cursor pagination after ID (cannot be used with offset)" // @Param offset query int false "Offset pagination (cannot be used with after_id)" // @Success 200 {object} codersdk.AIBridgeListInterceptionsResponse -// @Router /api/experimental/aibridge/interceptions [get] +// @Router /aibridge/interceptions [get] func (api *API) aiBridgeListInterceptions(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) @@ -110,7 +144,7 @@ func (api *API) aiBridgeListInterceptions(rw http.ResponseWriter, r *http.Reques }, nil) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error getting AIBridge interceptions.", + Message: "Internal error getting AI Bridge interceptions.", Detail: err.Error(), }) return @@ -138,7 +172,7 @@ func populatedAndConvertAIBridgeInterceptions(ctx context.Context, db database.S ids[i] = row.AIBridgeInterception.ID } - //nolint:gocritic // This is a system function until we implement a join for aibridge interceptions. AIBridge interception subresources use the same authorization call as their parent. + //nolint:gocritic // This is a system function until we implement a join for aibridge interceptions. AI Bridge interception subresources use the same authorization call as their parent. tokenUsagesRows, err := db.ListAIBridgeTokenUsagesByInterceptionIDs(dbauthz.AsSystemRestricted(ctx), ids) if err != nil { return nil, xerrors.Errorf("get linked aibridge token usages from database: %w", err) @@ -148,7 +182,7 @@ func populatedAndConvertAIBridgeInterceptions(ctx context.Context, db database.S tokenUsagesMap[row.InterceptionID] = append(tokenUsagesMap[row.InterceptionID], row) } - //nolint:gocritic // This is a system function until we implement a join for aibridge interceptions. AIBridge interception subresources use the same authorization call as their parent. + //nolint:gocritic // This is a system function until we implement a join for aibridge interceptions. AI Bridge interception subresources use the same authorization call as their parent. userPromptRows, err := db.ListAIBridgeUserPromptsByInterceptionIDs(dbauthz.AsSystemRestricted(ctx), ids) if err != nil { return nil, xerrors.Errorf("get linked aibridge user prompts from database: %w", err) @@ -158,7 +192,7 @@ func populatedAndConvertAIBridgeInterceptions(ctx context.Context, db database.S userPromptsMap[row.InterceptionID] = append(userPromptsMap[row.InterceptionID], row) } - //nolint:gocritic // This is a system function until we implement a join for aibridge interceptions. AIBridge interception subresources use the same authorization call as their parent. + //nolint:gocritic // This is a system function until we implement a join for aibridge interceptions. AI Bridge interception subresources use the same authorization call as their parent. toolUsagesRows, err := db.ListAIBridgeToolUsagesByInterceptionIDs(dbauthz.AsSystemRestricted(ctx), ids) if err != nil { return nil, xerrors.Errorf("get linked aibridge tool usages from database: %w", err) diff --git a/enterprise/coderd/aibridge_test.go b/enterprise/coderd/aibridge_test.go index abaf82dbe85f8..2913fe516ae28 100644 --- a/enterprise/coderd/aibridge_test.go +++ b/enterprise/coderd/aibridge_test.go @@ -1,6 +1,8 @@ package coderd_test import ( + "database/sql" + "io" "net/http" "testing" "time" @@ -27,7 +29,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, _ := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -37,10 +38,10 @@ func TestAIBridgeListInterceptions(t *testing.T) { Features: license.Features{}, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) - _, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + //nolint:gocritic // Owner role is irrelevant here. + _, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) var sdkErr *codersdk.Error require.ErrorAs(t, err, &sdkErr) require.Equal(t, http.StatusForbidden, sdkErr.StatusCode()) @@ -50,7 +51,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run("EmptyDB", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, _ := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -61,9 +61,9 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + //nolint:gocritic // Owner role is irrelevant here. + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) require.NoError(t, err) require.Empty(t, res.Results) }) @@ -71,7 +71,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run("OK", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -82,7 +81,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) user1, err := client.User(ctx, codersdk.Me) @@ -104,10 +102,13 @@ func TestAIBridgeListInterceptions(t *testing.T) { // Insert a bunch of test data. now := dbtime.Now() + i1ApiKey := sql.NullString{String: "some-api-key", Valid: true} + i1EndedAt := now.Add(-time.Hour + time.Minute) i1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + APIKeyID: i1ApiKey, InitiatorID: user1.ID, StartedAt: now.Add(-time.Hour), - }, nil) + }, &i1EndedAt) i1tok1 := dbgen.AIBridgeTokenUsage(t, db, database.InsertAIBridgeTokenUsageParams{ InterceptionID: i1.ID, CreatedAt: now, @@ -143,12 +144,15 @@ func TestAIBridgeListInterceptions(t *testing.T) { i1SDK := db2sdk.AIBridgeInterception(i1, user1Visible, []database.AIBridgeTokenUsage{i1tok2, i1tok1}, []database.AIBridgeUserPrompt{i1up2, i1up1}, []database.AIBridgeToolUsage{i1tool2, i1tool1}) i2SDK := db2sdk.AIBridgeInterception(i2, user2Visible, nil, nil, nil) - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) require.NoError(t, err) require.Len(t, res.Results, 2) require.Equal(t, i2SDK.ID, res.Results[0].ID) require.Equal(t, i1SDK.ID, res.Results[1].ID) + require.Equal(t, &i1ApiKey.String, i1SDK.APIKeyID) + require.Nil(t, i2SDK.APIKeyID) + // Normalize timestamps in the response so we can compare the whole // thing easily. res.Results[0].StartedAt = i2SDK.StartedAt @@ -172,9 +176,11 @@ func TestAIBridgeListInterceptions(t *testing.T) { // Time comparison require.Len(t, res.Results, 2) require.Equal(t, res.Results[0].ID, i2SDK.ID) - require.NotNil(t, now, res.Results[0].EndedAt) + require.NotNil(t, res.Results[0].EndedAt) require.WithinDuration(t, now, *res.Results[0].EndedAt, 5*time.Second) res.Results[0].EndedAt = i2SDK.EndedAt + require.NotNil(t, res.Results[1].EndedAt) + res.Results[1].EndedAt = i1SDK.EndedAt require.Equal(t, []codersdk.AIBridgeInterception{i2SDK, i1SDK}, res.Results) }) @@ -183,7 +189,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -194,7 +199,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) allInterceptionIDs := make([]uuid.UUID, 0, 20) @@ -216,16 +220,17 @@ func TestAIBridgeListInterceptions(t *testing.T) { randomOffset, err := cryptorand.Intn(10000) require.NoError(t, err) randomOffsetDur := time.Duration(randomOffset) * time.Second + endedAt := now.Add(randomOffsetDur + time.Minute) interception := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ ID: uuid.UUID{byte(i + 10)}, InitiatorID: firstUser.UserID, StartedAt: now.Add(randomOffsetDur), - }, nil) + }, &endedAt) allInterceptionIDs = append(allInterceptionIDs, interception.ID) } // Try to fetch with an invalid limit. - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ Pagination: codersdk.Pagination{ Limit: 1001, }, @@ -236,7 +241,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { require.Empty(t, res.Results) // Try to fetch with both after_id and offset pagination. - res, err = experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ + res, err = client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ Pagination: codersdk.Pagination{ AfterID: allInterceptionIDs[0], Offset: 1, @@ -269,7 +274,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { } else { pagination.Offset = len(interceptionIDs) } - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ Pagination: pagination, }) require.NoError(t, err) @@ -296,10 +301,42 @@ func TestAIBridgeListInterceptions(t *testing.T) { } }) + t.Run("InflightInterceptions", func(t *testing.T) { + t.Parallel() + dv := coderdtest.DeploymentValues(t) + client, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureAIBridge: 1, + }, + }, + }) + ctx := testutil.Context(t, testutil.WaitLong) + + now := dbtime.Now() + i1EndedAt := now.Add(time.Minute) + i1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + InitiatorID: firstUser.UserID, + StartedAt: now, + }, &i1EndedAt) + dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ + InitiatorID: firstUser.UserID, + StartedAt: now.Add(-time.Hour), + }, nil) + + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + require.NoError(t, err) + require.EqualValues(t, 1, res.Count) + require.Len(t, res.Results, 1) + require.Equal(t, i1.ID, res.Results[0].ID) + }) + t.Run("Authorized", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} adminClient, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -310,24 +347,23 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - adminExperimentalClient := codersdk.NewExperimentalClient(adminClient) ctx := testutil.Context(t, testutil.WaitLong) secondUserClient, secondUser := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID) - secondUserExperimentalClient := codersdk.NewExperimentalClient(secondUserClient) now := dbtime.Now() + i1EndedAt := now.Add(time.Minute) i1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: firstUser.UserID, StartedAt: now, - }, nil) + }, &i1EndedAt) i2 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ InitiatorID: secondUser.ID, StartedAt: now.Add(-time.Hour), }, &now) // Admin can see all interceptions. - res, err := adminExperimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + res, err := adminClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) require.NoError(t, err) require.EqualValues(t, 2, res.Count) require.Len(t, res.Results, 2) @@ -335,7 +371,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { require.Equal(t, i2.ID, res.Results[1].ID) // Second user can only see their own interceptions. - res, err = secondUserExperimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) + res, err = secondUserClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{}) require.NoError(t, err) require.EqualValues(t, 1, res.Count) require.Len(t, res.Results, 1) @@ -345,7 +381,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run("Filter", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, db, firstUser := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -356,7 +391,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) ctx := testutil.Context(t, testutil.WaitLong) user1, err := client.User(ctx, codersdk.Me) @@ -378,13 +412,14 @@ func TestAIBridgeListInterceptions(t *testing.T) { // Insert a bunch of test data with varying filterable fields. now := dbtime.Now() + i1EndedAt := now.Add(time.Minute) i1 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ ID: uuid.MustParse("00000000-0000-0000-0000-000000000001"), InitiatorID: user1.ID, Provider: "one", Model: "one", StartedAt: now, - }, nil) + }, &i1EndedAt) i2 := dbgen.AIBridgeInterception(t, db, database.InsertAIBridgeInterceptionParams{ ID: uuid.MustParse("00000000-0000-0000-0000-000000000002"), InitiatorID: user1.ID, @@ -506,7 +541,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitLong) - res, err := experimentalClient.AIBridgeListInterceptions(ctx, tc.filter) + res, err := client.AIBridgeListInterceptions(ctx, tc.filter) require.NoError(t, err) require.EqualValues(t, len(tc.want), res.Count) // We just compare UUID strings for the sake of this test. @@ -526,7 +561,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run("FilterErrors", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentAIBridge)} client, _ := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, @@ -537,7 +571,6 @@ func TestAIBridgeListInterceptions(t *testing.T) { }, }, }) - experimentalClient := codersdk.NewExperimentalClient(client) // No need to insert any test data, we're just testing the filter // errors. @@ -594,7 +627,7 @@ func TestAIBridgeListInterceptions(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitLong) - res, err := experimentalClient.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ + res, err := client.AIBridgeListInterceptions(ctx, codersdk.AIBridgeListInterceptionsFilter{ FilterQuery: tc.q, }) var sdkErr *codersdk.Error @@ -605,3 +638,68 @@ func TestAIBridgeListInterceptions(t *testing.T) { } }) } + +func TestAIBridgeRouting(t *testing.T) { + t.Parallel() + + dv := coderdtest.DeploymentValues(t) + client, closer, api, _ := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureAIBridge: 1, + }, + }, + }) + t.Cleanup(func() { + _ = closer.Close() + }) + + // Register a simple test handler that echoes back the request path. + testHandler := http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + rw.WriteHeader(http.StatusOK) + _, _ = rw.Write([]byte(r.URL.Path)) + }) + api.RegisterInMemoryAIBridgedHTTPHandler(testHandler) + + cases := []struct { + name string + path string + expectedPath string + }{ + { + name: "StablePrefix", + path: "/api/v2/aibridge/openai/v1/chat/completions", + expectedPath: "/openai/v1/chat/completions", + }, + { + name: "ExperimentalPrefix", + path: "/api/experimental/aibridge/openai/v1/chat/completions", + expectedPath: "/openai/v1/chat/completions", + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, client.URL.String()+tc.path, nil) + require.NoError(t, err) + req.Header.Set(codersdk.SessionTokenHeader, client.SessionToken()) + + httpClient := &http.Client{} + resp, err := httpClient.Do(req) + require.NoError(t, err) + defer resp.Body.Close() + require.Equal(t, http.StatusOK, resp.StatusCode) + + // Verify that the prefix was stripped correctly and the path was forwarded. + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, tc.expectedPath, string(body)) + }) + } +} diff --git a/enterprise/coderd/aibridged.go b/enterprise/coderd/aibridged.go index bf991103b1f52..2ff2de902bce1 100644 --- a/enterprise/coderd/aibridged.go +++ b/enterprise/coderd/aibridged.go @@ -14,9 +14,9 @@ import ( "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/codersdk/drpcsdk" - "github.com/coder/coder/v2/enterprise/x/aibridged" - aibridgedproto "github.com/coder/coder/v2/enterprise/x/aibridged/proto" - "github.com/coder/coder/v2/enterprise/x/aibridgedserver" + "github.com/coder/coder/v2/enterprise/aibridged" + aibridgedproto "github.com/coder/coder/v2/enterprise/aibridged/proto" + "github.com/coder/coder/v2/enterprise/aibridgedserver" ) // RegisterInMemoryAIBridgedHTTPHandler mounts [aibridged.Server]'s HTTP router onto @@ -49,7 +49,7 @@ func (api *API) CreateInMemoryAIBridgeServer(dialCtx context.Context) (client ai mux := drpcmux.New() srv, err := aibridgedserver.NewServer(api.ctx, api.Database, api.Logger.Named("aibridgedserver"), - api.AccessURL.String(), api.ExternalAuthConfigs, api.AGPL.Experiments) + api.AccessURL.String(), api.DeploymentValues.AI.BridgeConfig, api.ExternalAuthConfigs, api.AGPL.Experiments) if err != nil { return nil, err } diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 7666e8f957fc2..9a7b1f318f7c2 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -227,28 +227,13 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { } api.AGPL.ExperimentalHandler.Group(func(r chi.Router) { - r.Route("/aibridge", func(r chi.Router) { - r.Use( - api.RequireFeatureMW(codersdk.FeatureAIBridge), - httpmw.RequireExperimentWithDevBypass(api.AGPL.Experiments, codersdk.ExperimentAIBridge), - ) - r.Group(func(r chi.Router) { - r.Use(apiKeyMiddleware) - r.Get("/interceptions", api.aiBridgeListInterceptions) - }) + // Deprecated. + // TODO: remove with Beta release. + r.Route("/aibridge", aibridgeHandler(api, apiKeyMiddleware)) + }) - // This is a bit funky but since aibridge only exposes a HTTP - // handler, this is how it has to be. - r.HandleFunc("/*", func(rw http.ResponseWriter, r *http.Request) { - if api.aibridgedHandler == nil { - httpapi.Write(r.Context(), rw, http.StatusNotFound, codersdk.Response{ - Message: "aibridged handler not mounted", - }) - return - } - http.StripPrefix("/api/experimental/aibridge", api.aibridgedHandler).ServeHTTP(rw, r) - }) - }) + api.AGPL.APIHandler.Group(func(r chi.Router) { + r.Route("/aibridge", aibridgeHandler(api, apiKeyMiddleware)) }) api.AGPL.APIHandler.Group(func(r chi.Router) { @@ -473,6 +458,15 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { r.Get("/", api.templateACL) r.Patch("/", api.patchTemplateACL) }) + r.Route("/templates/{template}/prebuilds", func(r chi.Router) { + r.Use( + api.templateRBACEnabledMW, + apiKeyMiddleware, + httpmw.ExtractTemplateParam(api.Database), + ) + r.Post("/invalidate", api.postInvalidateTemplatePresets) + }) + r.Route("/groups", func(r chi.Router) { r.Use( api.templateRBACEnabledMW, diff --git a/enterprise/coderd/coderdenttest/coderdenttest.go b/enterprise/coderd/coderdenttest/coderdenttest.go index a31d1d495bb6e..29758c3dbf02a 100644 --- a/enterprise/coderd/coderdenttest/coderdenttest.go +++ b/enterprise/coderd/coderdenttest/coderdenttest.go @@ -414,6 +414,7 @@ func newExternalProvisionerDaemon(t testing.TB, client *codersdk.Client, org uui ServeOptions: &provisionersdk.ServeOptions{ Listener: provisionerSrv, WorkDirectory: t.TempDir(), + Experiments: codersdk.Experiments{}, }, })) }() diff --git a/enterprise/coderd/license/license.go b/enterprise/coderd/license/license.go index e66915f221f5a..3cf23823d2d5d 100644 --- a/enterprise/coderd/license/license.go +++ b/enterprise/coderd/license/license.go @@ -262,6 +262,36 @@ func LicensesEntitlements( claims.FeatureSet = codersdk.FeatureSetEnterprise } + // Temporary: If the license doesn't have a managed agent limit, we add + // a default of 1000 managed agents per deployment for a 100 + // year license term. + // This only applies to "Premium" licenses. + if claims.FeatureSet == codersdk.FeatureSetPremium { + var ( + // We intentionally use a fixed issue time here, before the + // entitlement was added to any new licenses, so any + // licenses with the corresponding features actually set + // trump this default entitlement, even if they are set to a + // smaller value. + defaultManagedAgentsIsuedAt = time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC) + defaultManagedAgentsStart = defaultManagedAgentsIsuedAt + defaultManagedAgentsEnd = defaultManagedAgentsStart.AddDate(100, 0, 0) + defaultManagedAgentsSoftLimit int64 = 1000 + defaultManagedAgentsHardLimit int64 = 1000 + ) + entitlements.AddFeature(codersdk.FeatureManagedAgentLimit, codersdk.Feature{ + Enabled: true, + Entitlement: entitlement, + SoftLimit: &defaultManagedAgentsSoftLimit, + Limit: &defaultManagedAgentsHardLimit, + UsagePeriod: &codersdk.UsagePeriod{ + IssuedAt: defaultManagedAgentsIsuedAt, + Start: defaultManagedAgentsStart, + End: defaultManagedAgentsEnd, + }, + }) + } + // Add all features from the feature set defined. for _, featureName := range claims.FeatureSet.Features() { if _, ok := licenseForbiddenFeatures[featureName]; ok { @@ -338,33 +368,6 @@ func LicensesEntitlements( Limit: &featureValue, Actual: &featureArguments.ActiveUserCount, }) - - // Temporary: If the license doesn't have a managed agent limit, - // we add a default of 800 managed agents per user. - // This only applies to "Premium" licenses. - if claims.FeatureSet == codersdk.FeatureSetPremium { - var ( - // We intentionally use a fixed issue time here, before the - // entitlement was added to any new licenses, so any - // licenses with the corresponding features actually set - // trump this default entitlement, even if they are set to a - // smaller value. - issueTime = time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC) - defaultSoftAgentLimit = 800 * featureValue - defaultHardAgentLimit = 1000 * featureValue - ) - entitlements.AddFeature(codersdk.FeatureManagedAgentLimit, codersdk.Feature{ - Enabled: true, - Entitlement: entitlement, - SoftLimit: &defaultSoftAgentLimit, - Limit: &defaultHardAgentLimit, - UsagePeriod: &codersdk.UsagePeriod{ - IssuedAt: issueTime, - Start: usagePeriodStart, - End: usagePeriodEnd, - }, - }) - } default: if featureValue <= 0 { // The feature is disabled. diff --git a/enterprise/coderd/license/license_test.go b/enterprise/coderd/license/license_test.go index 0e540989b69da..6c53fb3d89f22 100644 --- a/enterprise/coderd/license/license_test.go +++ b/enterprise/coderd/license/license_test.go @@ -520,8 +520,8 @@ func TestEntitlements(t *testing.T) { t.Run("Premium", func(t *testing.T) { t.Parallel() const userLimit = 1 - const expectedAgentSoftLimit = 800 * userLimit - const expectedAgentHardLimit = 1000 * userLimit + const expectedAgentSoftLimit = 1000 + const expectedAgentHardLimit = 1000 db, _ := dbtestutil.NewDB(t) licenseOptions := coderdenttest.LicenseOptions{ @@ -530,9 +530,7 @@ func TestEntitlements(t *testing.T) { ExpiresAt: dbtime.Now().Add(time.Hour * 24 * 2), FeatureSet: codersdk.FeatureSetPremium, Features: license.Features{ - // Temporary: allows the default value for the - // managed_agent_limit feature to be used. - codersdk.FeatureUserLimit: 1, + codersdk.FeatureUserLimit: userLimit, }, } _, err := db.InsertLicense(context.Background(), database.InsertLicenseParams{ @@ -557,11 +555,15 @@ func TestEntitlements(t *testing.T) { require.Equal(t, codersdk.EntitlementEntitled, agentEntitlement.Entitlement) require.EqualValues(t, expectedAgentSoftLimit, *agentEntitlement.SoftLimit) require.EqualValues(t, expectedAgentHardLimit, *agentEntitlement.Limit) + // This might be shocking, but there's a sound reason for this. // See license.go for more details. - require.Equal(t, time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC), agentEntitlement.UsagePeriod.IssuedAt) - require.WithinDuration(t, licenseOptions.NotBefore, agentEntitlement.UsagePeriod.Start, time.Second) - require.WithinDuration(t, licenseOptions.ExpiresAt, agentEntitlement.UsagePeriod.End, time.Second) + agentUsagePeriodIssuedAt := time.Date(2025, 7, 1, 0, 0, 0, 0, time.UTC) + agentUsagePeriodStart := agentUsagePeriodIssuedAt + agentUsagePeriodEnd := agentUsagePeriodStart.AddDate(100, 0, 0) + require.Equal(t, agentUsagePeriodIssuedAt, agentEntitlement.UsagePeriod.IssuedAt) + require.WithinDuration(t, agentUsagePeriodStart, agentEntitlement.UsagePeriod.Start, time.Second) + require.WithinDuration(t, agentUsagePeriodEnd, agentEntitlement.UsagePeriod.End, time.Second) continue } @@ -1496,14 +1498,14 @@ func TestManagedAgentLimitDefault(t *testing.T) { }) // "Premium" licenses should receive a default managed agent limit of: - // soft = 800 * user_limit - // hard = 1000 * user_limit + // soft = 1000 + // hard = 1000 t.Run("Premium", func(t *testing.T) { t.Parallel() - const userLimit = 100 - const softLimit = 800 * userLimit - const hardLimit = 1000 * userLimit + const userLimit = 33 + const softLimit = 1000 + const hardLimit = 1000 lic := database.License{ ID: 1, UploadedAt: time.Now(), diff --git a/enterprise/coderd/prebuilds/membership.go b/enterprise/coderd/prebuilds/membership.go index f843d33f7f106..9436f68737d4a 100644 --- a/enterprise/coderd/prebuilds/membership.go +++ b/enterprise/coderd/prebuilds/membership.go @@ -2,12 +2,13 @@ package prebuilds import ( "context" - "database/sql" "errors" "github.com/google/uuid" "golang.org/x/xerrors" + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/database" "github.com/coder/quartz" ) @@ -21,114 +22,117 @@ const ( // organizations for which prebuilt workspaces are requested. This is necessary because our data model requires that such // prebuilt workspaces belong to a member of the organization of their eventual claimant. type StoreMembershipReconciler struct { - store database.Store - clock quartz.Clock + store database.Store + clock quartz.Clock + logger slog.Logger } -func NewStoreMembershipReconciler(store database.Store, clock quartz.Clock) StoreMembershipReconciler { +func NewStoreMembershipReconciler(store database.Store, clock quartz.Clock, logger slog.Logger) StoreMembershipReconciler { return StoreMembershipReconciler{ - store: store, - clock: clock, + store: store, + clock: clock, + logger: logger, } } -// ReconcileAll compares the current organization and group memberships of a user to the memberships required -// in order to create prebuilt workspaces. If the user in question is not yet a member of an organization that -// needs prebuilt workspaces, ReconcileAll will create the membership required. +// ReconcileAll ensures the prebuilds system user has the necessary memberships to create prebuilt workspaces. +// For each organization with prebuilds configured, it ensures: +// * The user is a member of the organization +// * A group exists with quota 0 +// * The user is a member of that group // -// To facilitate quota management, ReconcileAll will ensure: -// * the existence of a group (defined by PrebuiltWorkspacesGroupName) in each organization that needs prebuilt workspaces -// * that the prebuilds system user belongs to the group in each organization that needs prebuilt workspaces -// * that the group has a quota of 0 by default, which users can adjust based on their needs. +// Unique constraint violations are safely ignored (concurrent creation). // // ReconcileAll does not have an opinion on transaction or lock management. These responsibilities are left to the caller. -func (s StoreMembershipReconciler) ReconcileAll(ctx context.Context, userID uuid.UUID, presets []database.GetTemplatePresetsWithPrebuildsRow) error { - organizationMemberships, err := s.store.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ - UserID: userID, - Deleted: sql.NullBool{ - Bool: false, - Valid: true, - }, +func (s StoreMembershipReconciler) ReconcileAll(ctx context.Context, userID uuid.UUID, groupName string) error { + orgStatuses, err := s.store.GetOrganizationsWithPrebuildStatus(ctx, database.GetOrganizationsWithPrebuildStatusParams{ + UserID: userID, + GroupName: groupName, }) if err != nil { - return xerrors.Errorf("determine prebuild organization membership: %w", err) - } - - orgMemberships := make(map[uuid.UUID]struct{}, 0) - defaultOrg, err := s.store.GetDefaultOrganization(ctx) - if err != nil { - return xerrors.Errorf("get default organization: %w", err) - } - orgMemberships[defaultOrg.ID] = struct{}{} - for _, o := range organizationMemberships { - orgMemberships[o.ID] = struct{}{} + return xerrors.Errorf("get organizations with prebuild status: %w", err) } var membershipInsertionErrors error - for _, preset := range presets { - _, alreadyOrgMember := orgMemberships[preset.OrganizationID] - if !alreadyOrgMember { - // Add the organization to our list of memberships regardless of potential failure below - // to avoid a retry that will probably be doomed anyway. - orgMemberships[preset.OrganizationID] = struct{}{} + for _, orgStatus := range orgStatuses { + s.logger.Debug(ctx, "organization prebuild status", + slog.F("organization_id", orgStatus.OrganizationID), + slog.F("organization_name", orgStatus.OrganizationName), + slog.F("has_prebuild_user", orgStatus.HasPrebuildUser), + slog.F("has_prebuild_group", orgStatus.PrebuildsGroupID.Valid), + slog.F("has_prebuild_user_in_group", orgStatus.HasPrebuildUserInGroup)) - // Insert the missing membership + // Add user to org if needed + if !orgStatus.HasPrebuildUser { _, err = s.store.InsertOrganizationMember(ctx, database.InsertOrganizationMemberParams{ - OrganizationID: preset.OrganizationID, + OrganizationID: orgStatus.OrganizationID, UserID: userID, CreatedAt: s.clock.Now(), UpdatedAt: s.clock.Now(), Roles: []string{}, }) - if err != nil { - membershipInsertionErrors = errors.Join(membershipInsertionErrors, xerrors.Errorf("insert membership for prebuilt workspaces: %w", err)) + // Unique violation means organization membership was created after status check, safe to ignore. + if err != nil && !database.IsUniqueViolation(err) { + membershipInsertionErrors = errors.Join(membershipInsertionErrors, err) continue } - } - - // determine whether the org already has a prebuilds group - prebuildsGroupExists := true - prebuildsGroup, err := s.store.GetGroupByOrgAndName(ctx, database.GetGroupByOrgAndNameParams{ - OrganizationID: preset.OrganizationID, - Name: PrebuiltWorkspacesGroupName, - }) - if err != nil { - if !xerrors.Is(err, sql.ErrNoRows) { - membershipInsertionErrors = errors.Join(membershipInsertionErrors, xerrors.Errorf("get prebuilds group: %w", err)) - continue + if err == nil { + s.logger.Info(ctx, "added prebuilds user to organization", + slog.F("organization_id", orgStatus.OrganizationID), + slog.F("organization_name", orgStatus.OrganizationName), + slog.F("prebuilds_user", userID.String())) } - prebuildsGroupExists = false } - // if the prebuilds group does not exist, create it - if !prebuildsGroupExists { - // create a "prebuilds" group in the organization and add the system user to it - // this group will have a quota of 0 by default, which users can adjust based on their needs - prebuildsGroup, err = s.store.InsertGroup(ctx, database.InsertGroupParams{ + // Create group if it doesn't exist + var groupID uuid.UUID + if !orgStatus.PrebuildsGroupID.Valid { + // Group doesn't exist, create it + group, err := s.store.InsertGroup(ctx, database.InsertGroupParams{ ID: uuid.New(), Name: PrebuiltWorkspacesGroupName, DisplayName: PrebuiltWorkspacesGroupDisplayName, - OrganizationID: preset.OrganizationID, + OrganizationID: orgStatus.OrganizationID, AvatarURL: "", - QuotaAllowance: 0, // Default quota of 0, users should set this based on their needs + QuotaAllowance: 0, }) - if err != nil { - membershipInsertionErrors = errors.Join(membershipInsertionErrors, xerrors.Errorf("create prebuilds group: %w", err)) + // Unique violation means group was created after status check, safe to ignore. + if err != nil && !database.IsUniqueViolation(err) { + membershipInsertionErrors = errors.Join(membershipInsertionErrors, err) continue } + if err == nil { + s.logger.Info(ctx, "created prebuilds group in organization", + slog.F("organization_id", orgStatus.OrganizationID), + slog.F("organization_name", orgStatus.OrganizationName), + slog.F("prebuilds_group", group.ID.String())) + } + groupID = group.ID + } else { + // Group exists + groupID = orgStatus.PrebuildsGroupID.UUID } - // add the system user to the prebuilds group - err = s.store.InsertGroupMember(ctx, database.InsertGroupMemberParams{ - GroupID: prebuildsGroup.ID, - UserID: userID, - }) - if err != nil { - // ignore unique violation errors as the user might already be in the group - if !database.IsUniqueViolation(err) { - membershipInsertionErrors = errors.Join(membershipInsertionErrors, xerrors.Errorf("add system user to prebuilds group: %w", err)) + // Add user to group if needed + if !orgStatus.HasPrebuildUserInGroup { + err = s.store.InsertGroupMember(ctx, database.InsertGroupMemberParams{ + GroupID: groupID, + UserID: userID, + }) + // Unique violation means group membership was created after status check, safe to ignore. + if err != nil && !database.IsUniqueViolation(err) { + membershipInsertionErrors = errors.Join(membershipInsertionErrors, err) + continue + } + if err == nil { + s.logger.Info(ctx, "added prebuilds user to prebuilds group", + slog.F("organization_id", orgStatus.OrganizationID), + slog.F("organization_name", orgStatus.OrganizationName), + slog.F("prebuilds_user", userID.String()), + slog.F("prebuilds_group", groupID.String())) } } } + return membershipInsertionErrors } diff --git a/enterprise/coderd/prebuilds/membership_test.go b/enterprise/coderd/prebuilds/membership_test.go index 55d6557b12495..fe4ec26259889 100644 --- a/enterprise/coderd/prebuilds/membership_test.go +++ b/enterprise/coderd/prebuilds/membership_test.go @@ -7,16 +7,17 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/require" - "tailscale.com/types/ptr" - "github.com/coder/quartz" + "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" + "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/enterprise/coderd/prebuilds" "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) // TestReconcileAll verifies that StoreMembershipReconciler correctly updates membership @@ -26,169 +27,178 @@ func TestReconcileAll(t *testing.T) { clock := quartz.NewMock(t) - // Helper to build a minimal Preset row belonging to a given org. - newPresetRow := func(orgID uuid.UUID) database.GetTemplatePresetsWithPrebuildsRow { - return database.GetTemplatePresetsWithPrebuildsRow{ - ID: uuid.New(), - OrganizationID: orgID, - } - } - tests := []struct { name string - includePreset []bool + includePreset bool preExistingOrgMembership []bool preExistingGroup []bool preExistingGroupMembership []bool // Expected outcomes - expectOrgMembershipExists *bool - expectGroupExists *bool - expectUserInGroup *bool + expectOrgMembershipExists bool + expectGroupExists bool + expectUserInGroup bool }{ { name: "if there are no presets, membership reconciliation is a no-op", - includePreset: []bool{false}, + includePreset: false, preExistingOrgMembership: []bool{true, false}, preExistingGroup: []bool{true, false}, preExistingGroupMembership: []bool{true, false}, - expectOrgMembershipExists: ptr.To(false), - expectGroupExists: ptr.To(false), + expectOrgMembershipExists: false, + expectGroupExists: false, + expectUserInGroup: false, }, { name: "if there is a preset, then we should enforce org and group membership in all cases", - includePreset: []bool{true}, + includePreset: true, preExistingOrgMembership: []bool{true, false}, preExistingGroup: []bool{true, false}, preExistingGroupMembership: []bool{true, false}, - expectOrgMembershipExists: ptr.To(true), - expectGroupExists: ptr.To(true), - expectUserInGroup: ptr.To(true), + expectOrgMembershipExists: true, + expectGroupExists: true, + expectUserInGroup: true, }, } for _, tc := range tests { tc := tc - for _, includePreset := range tc.includePreset { - includePreset := includePreset - for _, preExistingOrgMembership := range tc.preExistingOrgMembership { - preExistingOrgMembership := preExistingOrgMembership - for _, preExistingGroup := range tc.preExistingGroup { - preExistingGroup := preExistingGroup - for _, preExistingGroupMembership := range tc.preExistingGroupMembership { - preExistingGroupMembership := preExistingGroupMembership - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - // nolint:gocritic // Reconciliation happens as prebuilds system user, not a human user. - ctx := dbauthz.AsPrebuildsOrchestrator(testutil.Context(t, testutil.WaitLong)) - _, db := coderdtest.NewWithDatabase(t, nil) - - defaultOrg, err := db.GetDefaultOrganization(ctx) - require.NoError(t, err) - - // introduce an unrelated organization to ensure that the membership reconciler doesn't interfere with it. - unrelatedOrg := dbgen.Organization(t, db, database.Organization{}) - targetOrg := dbgen.Organization(t, db, database.Organization{}) - - // Ensure membership to unrelated org. - dbgen.OrganizationMember(t, db, database.OrganizationMember{OrganizationID: unrelatedOrg.ID, UserID: database.PrebuildsSystemUserID}) - - if preExistingOrgMembership { - // System user already a member of both orgs. - dbgen.OrganizationMember(t, db, database.OrganizationMember{OrganizationID: targetOrg.ID, UserID: database.PrebuildsSystemUserID}) - } + includePreset := tc.includePreset + for _, preExistingOrgMembership := range tc.preExistingOrgMembership { + preExistingOrgMembership := preExistingOrgMembership + for _, preExistingGroup := range tc.preExistingGroup { + preExistingGroup := preExistingGroup + for _, preExistingGroupMembership := range tc.preExistingGroupMembership { + preExistingGroupMembership := preExistingGroupMembership + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + // nolint:gocritic // Reconciliation happens as prebuilds system user, not a human user. + ctx := dbauthz.AsPrebuildsOrchestrator(testutil.Context(t, testutil.WaitLong)) + client, db := coderdtest.NewWithDatabase(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + + defaultOrg, err := db.GetDefaultOrganization(ctx) + require.NoError(t, err) + + // Introduce an unrelated organization to ensure that the membership reconciler doesn't interfere with it. + unrelatedOrg := dbgen.Organization(t, db, database.Organization{}) + dbgen.OrganizationMember(t, db, database.OrganizationMember{OrganizationID: unrelatedOrg.ID, UserID: database.PrebuildsSystemUserID}) + + // Organization to test + targetOrg := dbgen.Organization(t, db, database.Organization{}) + + // Prebuilds system user is a member of the organization + if preExistingOrgMembership { + dbgen.OrganizationMember(t, db, database.OrganizationMember{OrganizationID: targetOrg.ID, UserID: database.PrebuildsSystemUserID}) + } + + // Organization has the prebuilds group + var prebuildsGroup database.Group + if preExistingGroup { + prebuildsGroup = dbgen.Group(t, db, database.Group{ + Name: prebuilds.PrebuiltWorkspacesGroupName, + DisplayName: prebuilds.PrebuiltWorkspacesGroupDisplayName, + OrganizationID: targetOrg.ID, + QuotaAllowance: 0, + }) - // Create pre-existing prebuilds group if required by test case - var prebuildsGroup database.Group - if preExistingGroup { - prebuildsGroup = dbgen.Group(t, db, database.Group{ - Name: prebuilds.PrebuiltWorkspacesGroupName, - DisplayName: prebuilds.PrebuiltWorkspacesGroupDisplayName, - OrganizationID: targetOrg.ID, - QuotaAllowance: 0, + // Add the system user to the group if required by test case + if preExistingGroupMembership { + dbgen.GroupMember(t, db, database.GroupMemberTable{ + GroupID: prebuildsGroup.ID, + UserID: database.PrebuildsSystemUserID, }) - - // Add the system user to the group if preExistingGroupMembership is true - if preExistingGroupMembership { - dbgen.GroupMember(t, db, database.GroupMemberTable{ - GroupID: prebuildsGroup.ID, - UserID: database.PrebuildsSystemUserID, - }) - } } - - presets := []database.GetTemplatePresetsWithPrebuildsRow{newPresetRow(unrelatedOrg.ID)} - if includePreset { - presets = append(presets, newPresetRow(targetOrg.ID)) - } - - // Verify memberships before reconciliation. - preReconcileMemberships, err := db.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ - UserID: database.PrebuildsSystemUserID, - }) - require.NoError(t, err) - expectedMembershipsBefore := []uuid.UUID{defaultOrg.ID, unrelatedOrg.ID} - if preExistingOrgMembership { - expectedMembershipsBefore = append(expectedMembershipsBefore, targetOrg.ID) - } - require.ElementsMatch(t, expectedMembershipsBefore, extractOrgIDs(preReconcileMemberships)) - - // Reconcile - reconciler := prebuilds.NewStoreMembershipReconciler(db, clock) - require.NoError(t, reconciler.ReconcileAll(ctx, database.PrebuildsSystemUserID, presets)) - - // Verify memberships after reconciliation. - postReconcileMemberships, err := db.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ - UserID: database.PrebuildsSystemUserID, - }) + } + + // Setup unrelated org preset + dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: unrelatedOrg.ID, + CreatedBy: owner.UserID, + }).Preset(database.TemplateVersionPreset{ + DesiredInstances: sql.NullInt32{ + Int32: 1, + Valid: true, + }, + }).Do() + + // Setup target org preset + dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: targetOrg.ID, + CreatedBy: owner.UserID, + }).Preset(database.TemplateVersionPreset{ + DesiredInstances: sql.NullInt32{ + Int32: 0, + Valid: includePreset, + }, + }).Do() + + // Verify memberships before reconciliation. + preReconcileMemberships, err := db.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ + UserID: database.PrebuildsSystemUserID, + }) + require.NoError(t, err) + expectedMembershipsBefore := []uuid.UUID{defaultOrg.ID, unrelatedOrg.ID} + if preExistingOrgMembership { + expectedMembershipsBefore = append(expectedMembershipsBefore, targetOrg.ID) + } + require.ElementsMatch(t, expectedMembershipsBefore, extractOrgIDs(preReconcileMemberships)) + + // Reconcile + reconciler := prebuilds.NewStoreMembershipReconciler(db, clock, slogtest.Make(t, nil)) + require.NoError(t, reconciler.ReconcileAll(ctx, database.PrebuildsSystemUserID, prebuilds.PrebuiltWorkspacesGroupName)) + + // Verify memberships after reconciliation. + postReconcileMemberships, err := db.GetOrganizationsByUserID(ctx, database.GetOrganizationsByUserIDParams{ + UserID: database.PrebuildsSystemUserID, + }) + require.NoError(t, err) + expectedMembershipsAfter := expectedMembershipsBefore + if !preExistingOrgMembership && tc.expectOrgMembershipExists { + expectedMembershipsAfter = append(expectedMembershipsAfter, targetOrg.ID) + } + require.ElementsMatch(t, expectedMembershipsAfter, extractOrgIDs(postReconcileMemberships)) + + // Verify prebuilds group behavior based on expected outcomes + prebuildsGroup, err = db.GetGroupByOrgAndName(ctx, database.GetGroupByOrgAndNameParams{ + OrganizationID: targetOrg.ID, + Name: prebuilds.PrebuiltWorkspacesGroupName, + }) + if tc.expectGroupExists { require.NoError(t, err) - expectedMembershipsAfter := expectedMembershipsBefore - if !preExistingOrgMembership && tc.expectOrgMembershipExists != nil && *tc.expectOrgMembershipExists { - expectedMembershipsAfter = append(expectedMembershipsAfter, targetOrg.ID) - } - require.ElementsMatch(t, expectedMembershipsAfter, extractOrgIDs(postReconcileMemberships)) - - // Verify prebuilds group behavior based on expected outcomes - prebuildsGroup, err = db.GetGroupByOrgAndName(ctx, database.GetGroupByOrgAndNameParams{ - OrganizationID: targetOrg.ID, - Name: prebuilds.PrebuiltWorkspacesGroupName, - }) - if tc.expectGroupExists != nil && *tc.expectGroupExists { + require.Equal(t, prebuilds.PrebuiltWorkspacesGroupName, prebuildsGroup.Name) + require.Equal(t, prebuilds.PrebuiltWorkspacesGroupDisplayName, prebuildsGroup.DisplayName) + require.Equal(t, int32(0), prebuildsGroup.QuotaAllowance) // Default quota should be 0 + + if tc.expectUserInGroup { + // Check that the system user is a member of the prebuilds group + groupMembers, err := db.GetGroupMembersByGroupID(ctx, database.GetGroupMembersByGroupIDParams{ + GroupID: prebuildsGroup.ID, + IncludeSystem: true, + }) require.NoError(t, err) - require.Equal(t, prebuilds.PrebuiltWorkspacesGroupName, prebuildsGroup.Name) - require.Equal(t, prebuilds.PrebuiltWorkspacesGroupDisplayName, prebuildsGroup.DisplayName) - require.Equal(t, int32(0), prebuildsGroup.QuotaAllowance) // Default quota should be 0 - - if tc.expectUserInGroup != nil && *tc.expectUserInGroup { - // Check that the system user is a member of the prebuilds group - groupMembers, err := db.GetGroupMembersByGroupID(ctx, database.GetGroupMembersByGroupIDParams{ - GroupID: prebuildsGroup.ID, - IncludeSystem: true, - }) - require.NoError(t, err) - require.Len(t, groupMembers, 1) - require.Equal(t, database.PrebuildsSystemUserID, groupMembers[0].UserID) - } - - // If no preset exists, then we do not enforce group membership: - if tc.expectUserInGroup != nil && !*tc.expectUserInGroup { - // Check that the system user is NOT a member of the prebuilds group - groupMembers, err := db.GetGroupMembersByGroupID(ctx, database.GetGroupMembersByGroupIDParams{ - GroupID: prebuildsGroup.ID, - IncludeSystem: true, - }) - require.NoError(t, err) - require.Len(t, groupMembers, 0) - } + require.Len(t, groupMembers, 1) + require.Equal(t, database.PrebuildsSystemUserID, groupMembers[0].UserID) } - if !preExistingGroup && tc.expectGroupExists != nil && !*tc.expectGroupExists { - // Verify that no prebuilds group exists - require.Error(t, err) - require.True(t, errors.Is(err, sql.ErrNoRows)) + // If no preset exists, then we do not enforce group membership: + if !tc.expectUserInGroup { + // Check that the system user is NOT a member of the prebuilds group + groupMembers, err := db.GetGroupMembersByGroupID(ctx, database.GetGroupMembersByGroupIDParams{ + GroupID: prebuildsGroup.ID, + IncludeSystem: true, + }) + require.NoError(t, err) + require.Len(t, groupMembers, 0) } - }) - } + } + + if !preExistingGroup && !tc.expectGroupExists { + // Verify that no prebuilds group exists + require.Error(t, err) + require.True(t, errors.Is(err, sql.ErrNoRows)) + } + }) } } } diff --git a/enterprise/coderd/prebuilds/metricscollector_test.go b/enterprise/coderd/prebuilds/metricscollector_test.go index f9584e9ec2c25..aa9886fb7ad1b 100644 --- a/enterprise/coderd/prebuilds/metricscollector_test.go +++ b/enterprise/coderd/prebuilds/metricscollector_test.go @@ -485,7 +485,7 @@ func TestMetricsCollector_ReconciliationPausedMetric(t *testing.T) { require.NoError(t, err) // Run reconciliation to update the metric - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Check that the metric shows reconciliation is not paused @@ -514,7 +514,7 @@ func TestMetricsCollector_ReconciliationPausedMetric(t *testing.T) { require.NoError(t, err) // Run reconciliation to update the metric - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Check that the metric shows reconciliation is paused @@ -543,7 +543,7 @@ func TestMetricsCollector_ReconciliationPausedMetric(t *testing.T) { require.NoError(t, err) // Run reconciliation to update the metric - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Check that the metric shows reconciliation is not paused diff --git a/enterprise/coderd/prebuilds/reconcile.go b/enterprise/coderd/prebuilds/reconcile.go index ceb16061bd7a7..17a56d484c9f6 100644 --- a/enterprise/coderd/prebuilds/reconcile.go +++ b/enterprise/coderd/prebuilds/reconcile.go @@ -15,6 +15,7 @@ import ( "github.com/google/uuid" "github.com/hashicorp/go-multierror" "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" "golang.org/x/sync/errgroup" "golang.org/x/xerrors" @@ -44,7 +45,6 @@ type StoreReconciler struct { logger slog.Logger clock quartz.Clock registerer prometheus.Registerer - metrics *MetricsCollector notifEnq notifications.Enqueuer buildUsageChecker *atomic.Pointer[wsbuilder.UsageChecker] @@ -53,10 +53,33 @@ type StoreReconciler struct { stopped atomic.Bool done chan struct{} provisionNotifyCh chan database.ProvisionerJob + + // Prebuild state metrics + metrics *MetricsCollector + // Operational metrics + reconciliationDuration prometheus.Histogram } var _ prebuilds.ReconciliationOrchestrator = &StoreReconciler{} +type DeprovisionMode int + +const ( + DeprovisionModeNormal DeprovisionMode = iota + DeprovisionModeOrphan +) + +func (d DeprovisionMode) String() string { + switch d { + case DeprovisionModeOrphan: + return "orphan" + case DeprovisionModeNormal: + return "normal" + default: + return "unknown" + } +} + func NewStoreReconciler(store database.Store, ps pubsub.Pubsub, fileCache *files.Cache, @@ -87,6 +110,15 @@ func NewStoreReconciler(store database.Store, // If the registerer fails to register the metrics collector, it's not fatal. logger.Error(context.Background(), "failed to register prometheus metrics", slog.Error(err)) } + + factory := promauto.With(registerer) + reconciler.reconciliationDuration = factory.NewHistogram(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "prebuilds", + Name: "reconciliation_duration_seconds", + Help: "Duration of each prebuilds reconciliation cycle.", + Buckets: prometheus.DefBuckets, + }) } return reconciler @@ -158,10 +190,15 @@ func (c *StoreReconciler) Run(ctx context.Context) { // instead of waiting for the next reconciliation interval case <-ticker.C: // Trigger a new iteration on each tick. - err := c.ReconcileAll(ctx) + stats, err := c.ReconcileAll(ctx) if err != nil { c.logger.Error(context.Background(), "reconciliation failed", slog.Error(err)) } + + if c.reconciliationDuration != nil { + c.reconciliationDuration.Observe(stats.Elapsed.Seconds()) + } + c.logger.Debug(ctx, "reconciliation stats", slog.F("elapsed", stats.Elapsed)) case <-ctx.Done(): // nolint:gocritic // it's okay to use slog.F() for an error in this case // because we want to differentiate two different types of errors: ctx.Err() and context.Cause() @@ -245,19 +282,24 @@ func (c *StoreReconciler) Stop(ctx context.Context, cause error) { // be reconciled again, leading to another workspace being provisioned. Two workspace builds will be occurring // simultaneously for the same preset, but once both jobs have completed the reconciliation loop will notice the // extraneous instance and delete it. -func (c *StoreReconciler) ReconcileAll(ctx context.Context) error { +func (c *StoreReconciler) ReconcileAll(ctx context.Context) (stats prebuilds.ReconcileStats, err error) { + start := c.clock.Now() + defer func() { + stats.Elapsed = c.clock.Since(start) + }() + logger := c.logger.With(slog.F("reconcile_context", "all")) select { case <-ctx.Done(): logger.Warn(context.Background(), "reconcile exiting prematurely; context done", slog.Error(ctx.Err())) - return nil + return stats, nil default: } logger.Debug(ctx, "starting reconciliation") - err := c.WithReconciliationLock(ctx, logger, func(ctx context.Context, _ database.Store) error { + err = c.WithReconciliationLock(ctx, logger, func(ctx context.Context, _ database.Store) error { // Check if prebuilds reconciliation is paused settingsJSON, err := c.store.GetPrebuildsSettings(ctx) if err != nil { @@ -280,6 +322,12 @@ func (c *StoreReconciler) ReconcileAll(ctx context.Context) error { return nil } + membershipReconciler := NewStoreMembershipReconciler(c.store, c.clock, logger) + err = membershipReconciler.ReconcileAll(ctx, database.PrebuildsSystemUserID, PrebuiltWorkspacesGroupName) + if err != nil { + return xerrors.Errorf("reconcile prebuild membership: %w", err) + } + snapshot, err := c.SnapshotState(ctx, c.store) if err != nil { return xerrors.Errorf("determine current snapshot: %w", err) @@ -292,12 +340,6 @@ func (c *StoreReconciler) ReconcileAll(ctx context.Context) error { return nil } - membershipReconciler := NewStoreMembershipReconciler(c.store, c.clock) - err = membershipReconciler.ReconcileAll(ctx, database.PrebuildsSystemUserID, snapshot.Presets) - if err != nil { - return xerrors.Errorf("reconcile prebuild membership: %w", err) - } - var eg errgroup.Group // Reconcile presets in parallel. Each preset in its own goroutine. for _, preset := range snapshot.Presets { @@ -330,7 +372,7 @@ func (c *StoreReconciler) ReconcileAll(ctx context.Context) error { logger.Error(ctx, "failed to reconcile", slog.Error(err)) } - return err + return stats, err } func (c *StoreReconciler) reportHardLimitedPresets(snapshot *prebuilds.GlobalSnapshot) { @@ -642,34 +684,7 @@ func (c *StoreReconciler) executeReconciliationAction(ctx context.Context, logge return multiErr.ErrorOrNil() case prebuilds.ActionTypeCancelPending: - // Cancel pending prebuild jobs from non-active template versions to avoid - // provisioning obsolete workspaces that would immediately be deprovisioned. - // This uses a criteria-based update to ensure only jobs that are still pending - // at execution time are canceled, avoiding race conditions where jobs may have - // transitioned to running status between query and update. - canceledJobs, err := c.store.UpdatePrebuildProvisionerJobWithCancel( - ctx, - database.UpdatePrebuildProvisionerJobWithCancelParams{ - Now: c.clock.Now(), - PresetID: uuid.NullUUID{ - UUID: ps.Preset.ID, - Valid: true, - }, - }) - if err != nil { - logger.Error(ctx, "failed to cancel pending prebuild jobs", - slog.F("template_version_id", ps.Preset.TemplateVersionID.String()), - slog.F("preset_id", ps.Preset.ID), - slog.Error(err)) - return err - } - if len(canceledJobs) > 0 { - logger.Info(ctx, "canceled pending prebuild jobs for inactive version", - slog.F("template_version_id", ps.Preset.TemplateVersionID.String()), - slog.F("preset_id", ps.Preset.ID), - slog.F("count", len(canceledJobs))) - } - return nil + return c.cancelAndOrphanDeletePendingPrebuilds(ctx, ps.Preset.TemplateID, ps.Preset.TemplateVersionID, ps.Preset.ID) default: return xerrors.Errorf("unknown action type: %v", action.ActionType) @@ -682,7 +697,8 @@ func (c *StoreReconciler) createPrebuiltWorkspace(ctx context.Context, prebuiltW return xerrors.Errorf("failed to generate unique prebuild ID: %w", err) } - return c.store.InTx(func(db database.Store) error { + var provisionerJob *database.ProvisionerJob + err = c.store.InTx(func(db database.Store) error { template, err := db.GetTemplateByID(ctx, templateID) if err != nil { return xerrors.Errorf("failed to get template: %w", err) @@ -717,37 +733,140 @@ func (c *StoreReconciler) createPrebuiltWorkspace(ctx context.Context, prebuiltW c.logger.Info(ctx, "attempting to create prebuild", slog.F("name", name), slog.F("workspace_id", prebuiltWorkspaceID.String()), slog.F("preset_id", presetID.String())) - return c.provision(ctx, db, prebuiltWorkspaceID, template, presetID, database.WorkspaceTransitionStart, workspace) + provisionerJob, err = c.provision(ctx, db, prebuiltWorkspaceID, template, presetID, database.WorkspaceTransitionStart, workspace, DeprovisionModeNormal) + return err }, &database.TxOptions{ Isolation: sql.LevelRepeatableRead, ReadOnly: false, }) + if err != nil { + return err + } + + // Publish provisioner job event to notify the acquirer that a new job was posted + c.publishProvisionerJob(ctx, provisionerJob, prebuiltWorkspaceID) + + return nil } -func (c *StoreReconciler) deletePrebuiltWorkspace(ctx context.Context, prebuiltWorkspaceID uuid.UUID, templateID uuid.UUID, presetID uuid.UUID) error { - return c.store.InTx(func(db database.Store) error { - workspace, err := db.GetWorkspaceByID(ctx, prebuiltWorkspaceID) +// provisionDelete provisions a delete transition for a prebuilt workspace. +// +// If mode is DeprovisionModeOrphan, the builder will not send Terraform state to the provisioner. +// This allows the workspace to be deleted even when no provisioners are available, and is safe +// when no Terraform resources were actually created (e.g., for pending prebuilds that were canceled +// before provisioning started). +// +// IMPORTANT: This function must be called within a database transaction. It does not create its own transaction. +// The caller is responsible for managing the transaction boundary via db.InTx(). +func (c *StoreReconciler) provisionDelete(ctx context.Context, db database.Store, workspaceID uuid.UUID, templateID uuid.UUID, presetID uuid.UUID, mode DeprovisionMode) (*database.ProvisionerJob, error) { + workspace, err := db.GetWorkspaceByID(ctx, workspaceID) + if err != nil { + return nil, xerrors.Errorf("get workspace by ID: %w", err) + } + + template, err := db.GetTemplateByID(ctx, templateID) + if err != nil { + return nil, xerrors.Errorf("failed to get template: %w", err) + } + + if workspace.OwnerID != database.PrebuildsSystemUserID { + return nil, xerrors.Errorf("prebuilt workspace is not owned by prebuild user anymore, probably it was claimed") + } + + c.logger.Info(ctx, "attempting to delete prebuild", slog.F("orphan", mode.String()), + slog.F("name", workspace.Name), slog.F("workspace_id", workspaceID.String()), slog.F("preset_id", presetID.String())) + + return c.provision(ctx, db, workspaceID, template, presetID, database.WorkspaceTransitionDelete, workspace, mode) +} + +// cancelAndOrphanDeletePendingPrebuilds cancels pending prebuild jobs from inactive template versions +// and orphan-deletes their associated workspaces. +// +// The cancel operation uses a criteria-based update to ensure only jobs that are still pending at +// execution time are canceled, avoiding race conditions where jobs may have transitioned to running. +// +// Since these jobs were never processed by a provisioner, no Terraform resources were created, +// making it safe to orphan-delete the workspaces (skipping Terraform destroy). +func (c *StoreReconciler) cancelAndOrphanDeletePendingPrebuilds(ctx context.Context, templateID uuid.UUID, templateVersionID uuid.UUID, presetID uuid.UUID) error { + var canceledProvisionerJob *database.ProvisionerJob + var canceledWorkspaceID uuid.UUID + err := c.store.InTx(func(db database.Store) error { + canceledJobs, err := db.UpdatePrebuildProvisionerJobWithCancel( + ctx, + database.UpdatePrebuildProvisionerJobWithCancelParams{ + Now: c.clock.Now(), + PresetID: uuid.NullUUID{ + UUID: presetID, + Valid: true, + }, + }) if err != nil { - return xerrors.Errorf("get workspace by ID: %w", err) + c.logger.Error(ctx, "failed to cancel pending prebuild jobs", + slog.F("template_id", templateID.String()), + slog.F("template_version_id", templateVersionID.String()), + slog.F("preset_id", presetID.String()), + slog.Error(err)) + return err } - template, err := db.GetTemplateByID(ctx, templateID) - if err != nil { - return xerrors.Errorf("failed to get template: %w", err) + if len(canceledJobs) > 0 { + c.logger.Info(ctx, "canceled pending prebuild jobs for inactive version", + slog.F("template_id", templateID.String()), + slog.F("template_version_id", templateVersionID.String()), + slog.F("preset_id", presetID.String()), + slog.F("count", len(canceledJobs))) } - if workspace.OwnerID != database.PrebuildsSystemUserID { - return xerrors.Errorf("prebuilt workspace is not owned by prebuild user anymore, probably it was claimed") + var multiErr multierror.Error + for _, job := range canceledJobs { + provisionerJob, err := c.provisionDelete(ctx, db, job.WorkspaceID, job.TemplateID, presetID, DeprovisionModeOrphan) + if err != nil { + c.logger.Error(ctx, "failed to orphan delete canceled prebuild", + slog.F("workspace_id", job.WorkspaceID.String()), slog.Error(err)) + multiErr.Errors = append(multiErr.Errors, err) + } else if canceledProvisionerJob == nil { + canceledProvisionerJob = provisionerJob + canceledWorkspaceID = job.WorkspaceID + } } - c.logger.Info(ctx, "attempting to delete prebuild", - slog.F("workspace_id", prebuiltWorkspaceID.String()), slog.F("preset_id", presetID.String())) + return multiErr.ErrorOrNil() + }, &database.TxOptions{ + Isolation: sql.LevelRepeatableRead, + ReadOnly: false, + }) + if err != nil { + return err + } + + // Job event notifications contain organization, provisioner type, and tags. + // Since all canceled jobs have the same values, we only send one notification + // for the first successfully canceled job, which is sufficient to trigger the + // provisioner chain that processes all remaining jobs. + if canceledProvisionerJob != nil { + c.publishProvisionerJob(ctx, canceledProvisionerJob, canceledWorkspaceID) + } - return c.provision(ctx, db, prebuiltWorkspaceID, template, presetID, database.WorkspaceTransitionDelete, workspace) + return nil +} + +func (c *StoreReconciler) deletePrebuiltWorkspace(ctx context.Context, prebuiltWorkspaceID uuid.UUID, templateID uuid.UUID, presetID uuid.UUID) error { + var provisionerJob *database.ProvisionerJob + err := c.store.InTx(func(db database.Store) (err error) { + provisionerJob, err = c.provisionDelete(ctx, db, prebuiltWorkspaceID, templateID, presetID, DeprovisionModeNormal) + return err }, &database.TxOptions{ Isolation: sql.LevelRepeatableRead, ReadOnly: false, }) + if err != nil { + return err + } + + // Publish provisioner job event to notify the acquirer that a new job was posted + c.publishProvisionerJob(ctx, provisionerJob, prebuiltWorkspaceID) + + return nil } func (c *StoreReconciler) provision( @@ -758,10 +877,11 @@ func (c *StoreReconciler) provision( presetID uuid.UUID, transition database.WorkspaceTransition, workspace database.Workspace, -) error { + mode DeprovisionMode, +) (*database.ProvisionerJob, error) { tvp, err := db.GetPresetParametersByTemplateVersionID(ctx, template.ActiveVersionID) if err != nil { - return xerrors.Errorf("fetch preset details: %w", err) + return nil, xerrors.Errorf("fetch preset details: %w", err) } var params []codersdk.WorkspaceBuildParameter @@ -795,6 +915,11 @@ func (c *StoreReconciler) provision( builder = builder.RichParameterValues(params) } + // Use orphan mode for deletes when no Terraform resources exist + if transition == database.WorkspaceTransitionDelete && mode == DeprovisionModeOrphan { + builder = builder.Orphan() + } + _, provisionerJob, _, err := builder.Build( ctx, db, @@ -805,26 +930,34 @@ func (c *StoreReconciler) provision( audit.WorkspaceBuildBaggage{}, ) if err != nil { - return xerrors.Errorf("provision workspace: %w", err) + return nil, xerrors.Errorf("provision workspace: %w", err) } - if provisionerJob == nil { - return nil - } - - // Publish provisioner job event outside of transaction. - select { - case c.provisionNotifyCh <- *provisionerJob: - default: // channel full, drop the message; provisioner will pick this job up later with its periodic check, though. - c.logger.Warn(ctx, "provisioner job notification queue full, dropping", - slog.F("job_id", provisionerJob.ID), slog.F("prebuild_id", prebuildID.String())) + // This should not happen, builder.Build() should either return a job or an error. + // Returning an error to fail fast if we hit this unexpected case. + return nil, xerrors.Errorf("provision succeeded but returned no job") } c.logger.Info(ctx, "prebuild job scheduled", slog.F("transition", transition), slog.F("prebuild_id", prebuildID.String()), slog.F("preset_id", presetID.String()), slog.F("job_id", provisionerJob.ID)) - return nil + return provisionerJob, nil +} + +// publishProvisionerJob publishes a provisioner job event to notify the acquirer that a new job has been created. +// This must be called after the database transaction that creates the job has committed to ensure +// the job is visible to provisioners when they query the database. +func (c *StoreReconciler) publishProvisionerJob(ctx context.Context, provisionerJob *database.ProvisionerJob, workspaceID uuid.UUID) { + if provisionerJob == nil { + return + } + select { + case c.provisionNotifyCh <- *provisionerJob: + default: // channel full, drop the message; provisioner will pick this job up later with its periodic check + c.logger.Warn(ctx, "provisioner job notification queue full, dropping", + slog.F("job_id", provisionerJob.ID), slog.F("prebuild_id", workspaceID.String())) + } } // ForceMetricsUpdate forces the metrics collector, if defined, to update its state (we cache the metrics state to diff --git a/enterprise/coderd/prebuilds/reconcile_test.go b/enterprise/coderd/prebuilds/reconcile_test.go index 33b99145d8e12..7548faebd7dab 100644 --- a/enterprise/coderd/prebuilds/reconcile_test.go +++ b/enterprise/coderd/prebuilds/reconcile_test.go @@ -72,7 +72,8 @@ func TestNoReconciliationActionsIfNoPresets(t *testing.T) { require.Equal(t, templateVersion, gotTemplateVersion) // when we trigger the reconciliation loop for all templates - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // then no reconciliation actions are taken // because without presets, there are no prebuilds @@ -126,7 +127,8 @@ func TestNoReconciliationActionsIfNoPrebuilds(t *testing.T) { require.NotEmpty(t, presetParameters) // when we trigger the reconciliation loop for all templates - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // then no reconciliation actions are taken // because without prebuilds, there is nothing to reconcile @@ -204,7 +206,10 @@ func TestPrebuildReconciliation(t *testing.T) { templateDeleted: []bool{false}, }, { - name: "never attempt to interfere with active builds", + // TODO(ssncferreira): Investigate why the GetRunningPrebuiltWorkspaces query is returning 0 rows. + // When a template version is inactive (templateVersionActive = false), any prebuilds in the + // database.ProvisionerJobStatusRunning state should be deleted. + name: "never attempt to interfere with prebuilds from an active template version", // The workspace builder does not allow scheduling a new build if there is already a build // pending, running, or canceling. As such, we should never attempt to start, stop or delete // such prebuilds. Rather, we should wait for the existing build to complete and reconcile @@ -215,7 +220,7 @@ func TestPrebuildReconciliation(t *testing.T) { database.ProvisionerJobStatusRunning, database.ProvisionerJobStatusCanceling, }, - templateVersionActive: []bool{true, false}, + templateVersionActive: []bool{true}, shouldDeleteOldPrebuild: ptr.To(false), templateDeleted: []bool{false}, }, @@ -425,7 +430,8 @@ func (tc testCase) run(t *testing.T) { // Run the reconciliation multiple times to ensure idempotency // 8 was arbitrary, but large enough to reasonably trust the result for i := 1; i <= 8; i++ { - require.NoErrorf(t, controller.ReconcileAll(ctx), "failed on iteration %d", i) + _, err := controller.ReconcileAll(ctx) + require.NoErrorf(t, err, "failed on iteration %d", i) if tc.shouldCreateNewPrebuild != nil { newPrebuildCount := 0 @@ -539,7 +545,8 @@ func TestMultiplePresetsPerTemplateVersion(t *testing.T) { // Run the reconciliation multiple times to ensure idempotency // 8 was arbitrary, but large enough to reasonably trust the result for i := 1; i <= 8; i++ { - require.NoErrorf(t, controller.ReconcileAll(ctx), "failed on iteration %d", i) + _, err := controller.ReconcileAll(ctx) + require.NoErrorf(t, err, "failed on iteration %d", i) newPrebuildCount := 0 workspaces, err := db.GetWorkspacesByTemplateID(ctx, template.ID) @@ -665,7 +672,7 @@ func TestPrebuildScheduling(t *testing.T) { DesiredInstances: 5, }) - err := controller.ReconcileAll(ctx) + _, err := controller.ReconcileAll(ctx) require.NoError(t, err) // get workspace builds @@ -748,7 +755,8 @@ func TestInvalidPreset(t *testing.T) { // Run the reconciliation multiple times to ensure idempotency // 8 was arbitrary, but large enough to reasonably trust the result for i := 1; i <= 8; i++ { - require.NoErrorf(t, controller.ReconcileAll(ctx), "failed on iteration %d", i) + _, err := controller.ReconcileAll(ctx) + require.NoErrorf(t, err, "failed on iteration %d", i) workspaces, err := db.GetWorkspacesByTemplateID(ctx, template.ID) require.NoError(t, err) @@ -814,7 +822,8 @@ func TestDeletionOfPrebuiltWorkspaceWithInvalidPreset(t *testing.T) { }) // Old prebuilt workspace should be deleted. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) builds, err := db.GetWorkspaceBuildsByWorkspaceID(ctx, database.GetWorkspaceBuildsByWorkspaceIDParams{ WorkspaceID: prebuiltWorkspace.ID, @@ -913,12 +922,15 @@ func TestSkippingHardLimitedPresets(t *testing.T) { // Trigger reconciliation to attempt creating a new prebuild. // The outcome depends on whether the hard limit has been reached. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // These two additional calls to ReconcileAll should not trigger any notifications. // A notification is only sent once. - require.NoError(t, controller.ReconcileAll(ctx)) - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // Verify the final state after reconciliation. workspaces, err = db.GetWorkspacesByTemplateID(ctx, template.ID) @@ -1090,12 +1102,15 @@ func TestHardLimitedPresetShouldNotBlockDeletion(t *testing.T) { // Trigger reconciliation to attempt creating a new prebuild. // The outcome depends on whether the hard limit has been reached. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // These two additional calls to ReconcileAll should not trigger any notifications. // A notification is only sent once. - require.NoError(t, controller.ReconcileAll(ctx)) - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // Verify the final state after reconciliation. // When hard limit is reached, no new workspace should be created. @@ -1138,7 +1153,8 @@ func TestHardLimitedPresetShouldNotBlockDeletion(t *testing.T) { } // Trigger reconciliation to make sure that successful, but outdated prebuilt workspace will be deleted. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) workspaces, err = db.GetWorkspacesByTemplateID(ctx, template.ID) require.NoError(t, err) @@ -1737,7 +1753,8 @@ func TestExpiredPrebuildsMultipleActions(t *testing.T) { } // Trigger reconciliation to process expired prebuilds and enforce desired state. - require.NoError(t, controller.ReconcileAll(ctx)) + _, err = controller.ReconcileAll(ctx) + require.NoError(t, err) // Sort non-expired workspaces by CreatedAt in ascending order (oldest first) sort.Slice(nonExpiredWorkspaces, func(i, j int) bool { @@ -2121,16 +2138,16 @@ func TestCancelPendingPrebuilds(t *testing.T) { }, }).SkipCreateTemplate().Do() - var workspace dbfake.WorkspaceResponse + var pendingWorkspace dbfake.WorkspaceResponse if tt.activeTemplateVersion { // Given: a prebuilt workspace, workspace build and respective provisioner job from an // active template version - workspace = tt.setupBuild(t, db, client, + pendingWorkspace = tt.setupBuild(t, db, client, owner.OrganizationID, templateID, activeTemplateVersion.TemplateVersion.ID, activePresetID) } else { // Given: a prebuilt workspace, workspace build and respective provisioner job from a // non-active template version - workspace = tt.setupBuild(t, db, client, + pendingWorkspace = tt.setupBuild(t, db, client, owner.OrganizationID, templateID, nonActiveTemplateVersion.TemplateVersion.ID, nonActivePresetID) } @@ -2142,18 +2159,32 @@ func TestCancelPendingPrebuilds(t *testing.T) { require.NoError(t, err) // When: the reconciliation loop is triggered - require.NoError(t, reconciler.ReconcileAll(ctx)) + _, err = reconciler.ReconcileAll(ctx) + require.NoError(t, err) if tt.shouldCancel { - // Then: the prebuild related jobs from non-active version should be canceled - cancelledJob, err := db.GetProvisionerJobByID(ctx, workspace.Build.JobID) + // Then: the pending prebuild job from non-active version should be canceled + cancelledJob, err := db.GetProvisionerJobByID(ctx, pendingWorkspace.Build.JobID) require.NoError(t, err) require.Equal(t, clock.Now().UTC(), cancelledJob.CanceledAt.Time.UTC()) require.Equal(t, clock.Now().UTC(), cancelledJob.CompletedAt.Time.UTC()) require.Equal(t, database.ProvisionerJobStatusCanceled, cancelledJob.JobStatus) + + // Then: the workspace should be deleted + deletedWorkspace, err := db.GetWorkspaceByID(ctx, pendingWorkspace.Workspace.ID) + require.NoError(t, err) + require.True(t, deletedWorkspace.Deleted) + latestBuild, err := db.GetLatestWorkspaceBuildByWorkspaceID(ctx, deletedWorkspace.ID) + require.NoError(t, err) + require.Equal(t, database.WorkspaceTransitionDelete, latestBuild.Transition) + deleteJob, err := db.GetProvisionerJobByID(ctx, latestBuild.JobID) + require.NoError(t, err) + require.True(t, deleteJob.CompletedAt.Valid) + require.False(t, deleteJob.WorkerID.Valid) + require.Equal(t, database.ProvisionerJobStatusSucceeded, deleteJob.JobStatus) } else { - // Then: the provisioner job should not be canceled - job, err := db.GetProvisionerJobByID(ctx, workspace.Build.JobID) + // Then: the pending prebuild job should not be canceled + job, err := db.GetProvisionerJobByID(ctx, pendingWorkspace.Build.JobID) require.NoError(t, err) if !tt.previouslyCanceled { require.Zero(t, job.CanceledAt.Time.UTC()) @@ -2162,6 +2193,11 @@ func TestCancelPendingPrebuilds(t *testing.T) { if !tt.previouslyCompleted { require.Zero(t, job.CompletedAt.Time.UTC()) } + + // Then: the workspace should not be deleted + workspace, err := db.GetWorkspaceByID(ctx, pendingWorkspace.Workspace.ID) + require.NoError(t, err) + require.False(t, workspace.Deleted) } }) } @@ -2235,25 +2271,45 @@ func TestCancelPendingPrebuilds(t *testing.T) { return prebuilds } - checkIfJobCanceled := func( + checkIfJobCanceledAndDeleted := func( t *testing.T, clock *quartz.Mock, ctx context.Context, db database.Store, - shouldBeCanceled bool, + shouldBeCanceledAndDeleted bool, prebuilds []dbfake.WorkspaceResponse, ) { for _, prebuild := range prebuilds { - job, err := db.GetProvisionerJobByID(ctx, prebuild.Build.JobID) + pendingJob, err := db.GetProvisionerJobByID(ctx, prebuild.Build.JobID) require.NoError(t, err) - if shouldBeCanceled { - require.Equal(t, database.ProvisionerJobStatusCanceled, job.JobStatus) - require.Equal(t, clock.Now().UTC(), job.CanceledAt.Time.UTC()) - require.Equal(t, clock.Now().UTC(), job.CompletedAt.Time.UTC()) + if shouldBeCanceledAndDeleted { + // Pending job should be canceled + require.Equal(t, database.ProvisionerJobStatusCanceled, pendingJob.JobStatus) + require.Equal(t, clock.Now().UTC(), pendingJob.CanceledAt.Time.UTC()) + require.Equal(t, clock.Now().UTC(), pendingJob.CompletedAt.Time.UTC()) + + // Workspace should be deleted + deletedWorkspace, err := db.GetWorkspaceByID(ctx, prebuild.Workspace.ID) + require.NoError(t, err) + require.True(t, deletedWorkspace.Deleted) + latestBuild, err := db.GetLatestWorkspaceBuildByWorkspaceID(ctx, deletedWorkspace.ID) + require.NoError(t, err) + require.Equal(t, database.WorkspaceTransitionDelete, latestBuild.Transition) + deleteJob, err := db.GetProvisionerJobByID(ctx, latestBuild.JobID) + require.NoError(t, err) + require.True(t, deleteJob.CompletedAt.Valid) + require.False(t, deleteJob.WorkerID.Valid) + require.Equal(t, database.ProvisionerJobStatusSucceeded, deleteJob.JobStatus) } else { - require.NotEqual(t, database.ProvisionerJobStatusCanceled, job.JobStatus) - require.Zero(t, job.CanceledAt.Time.UTC()) + // Pending job should not be canceled + require.NotEqual(t, database.ProvisionerJobStatusCanceled, pendingJob.JobStatus) + require.Zero(t, pendingJob.CanceledAt.Time.UTC()) + + // Workspace should not be deleted + workspace, err := db.GetWorkspaceByID(ctx, prebuild.Workspace.ID) + require.NoError(t, err) + require.False(t, workspace.Deleted) } } } @@ -2306,26 +2362,72 @@ func TestCancelPendingPrebuilds(t *testing.T) { templateBVersion3Pending := setupPrebuilds(t, db, owner.OrganizationID, templateBID, templateBVersion3ID, templateBVersion3PresetID, 1, true) // When: the reconciliation loop is executed - require.NoError(t, reconciler.ReconcileAll(ctx)) + _, err := reconciler.ReconcileAll(ctx) + require.NoError(t, err) // Then: template A version 1 running workspaces should not be canceled - checkIfJobCanceled(t, clock, ctx, db, false, templateAVersion1Running) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateAVersion1Running) // Then: template A version 1 pending workspaces should be canceled - checkIfJobCanceled(t, clock, ctx, db, true, templateAVersion1Pending) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, true, templateAVersion1Pending) // Then: template A version 2 running and pending workspaces should not be canceled - checkIfJobCanceled(t, clock, ctx, db, false, templateAVersion2Running) - checkIfJobCanceled(t, clock, ctx, db, false, templateAVersion2Pending) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateAVersion2Running) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateAVersion2Pending) // Then: template B version 1 running workspaces should not be canceled - checkIfJobCanceled(t, clock, ctx, db, false, templateBVersion1Running) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateBVersion1Running) // Then: template B version 1 pending workspaces should be canceled - checkIfJobCanceled(t, clock, ctx, db, true, templateBVersion1Pending) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, true, templateBVersion1Pending) // Then: template B version 2 pending workspaces should be canceled - checkIfJobCanceled(t, clock, ctx, db, true, templateBVersion2Pending) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, true, templateBVersion2Pending) // Then: template B version 3 running and pending workspaces should not be canceled - checkIfJobCanceled(t, clock, ctx, db, false, templateBVersion3Running) - checkIfJobCanceled(t, clock, ctx, db, false, templateBVersion3Pending) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateBVersion3Running) + checkIfJobCanceledAndDeleted(t, clock, ctx, db, false, templateBVersion3Pending) + }) +} + +func TestReconciliationStats(t *testing.T) { + t.Parallel() + + // Setup + clock := quartz.NewReal() + db, ps := dbtestutil.NewDB(t) + client, _, _ := coderdtest.NewWithAPI(t, &coderdtest.Options{ + Database: db, + Pubsub: ps, + Clock: clock, }) + fakeEnqueuer := newFakeEnqueuer() + registry := prometheus.NewRegistry() + cache := files.New(registry, &coderdtest.FakeAuthorizer{}) + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: false}).Leveled(slog.LevelDebug) + reconciler := prebuilds.NewStoreReconciler(db, ps, cache, codersdk.PrebuildsConfig{}, logger, clock, registry, fakeEnqueuer, newNoopUsageCheckerPtr()) + owner := coderdtest.CreateFirstUser(t, client) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + // Create a template version with a preset + dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + }).Preset(database.TemplateVersionPreset{ + DesiredInstances: sql.NullInt32{ + Int32: 1, + Valid: true, + }, + }).Do() + + // Verify that ReconcileAll tracks and returns elapsed time + start := time.Now() + stats, err := reconciler.ReconcileAll(ctx) + actualElapsed := time.Since(start) + require.NoError(t, err) + require.Greater(t, stats.Elapsed, time.Duration(0)) + + // Verify stats.Elapsed matches actual execution time + require.InDelta(t, actualElapsed.Milliseconds(), stats.Elapsed.Milliseconds(), 100) + // Verify reconciliation loop is not unexpectedly slow + require.Less(t, stats.Elapsed, 5*time.Second) } func newNoopEnqueuer() *notifications.NoopEnqueuer { @@ -2822,7 +2924,7 @@ func TestReconciliationRespectsPauseSetting(t *testing.T) { _ = setupTestDBPreset(t, db, templateVersionID, 2, "test") // Initially, reconciliation should create prebuilds - err := reconciler.ReconcileAll(ctx) + _, err := reconciler.ReconcileAll(ctx) require.NoError(t, err) // Verify that prebuilds were created @@ -2849,7 +2951,7 @@ func TestReconciliationRespectsPauseSetting(t *testing.T) { require.Len(t, workspaces, 0, "prebuilds should be deleted") // Run reconciliation again - it should be paused and not recreate prebuilds - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Verify that no new prebuilds were created because reconciliation is paused @@ -2862,7 +2964,7 @@ func TestReconciliationRespectsPauseSetting(t *testing.T) { require.NoError(t, err) // Run reconciliation again - it should now recreate the prebuilds - err = reconciler.ReconcileAll(ctx) + _, err = reconciler.ReconcileAll(ctx) require.NoError(t, err) // Verify that prebuilds were recreated diff --git a/enterprise/coderd/provisionerdaemons.go b/enterprise/coderd/provisionerdaemons.go index be03af29293f9..0f6db2508af97 100644 --- a/enterprise/coderd/provisionerdaemons.go +++ b/enterprise/coderd/provisionerdaemons.go @@ -362,6 +362,7 @@ func (api *API) provisionerDaemonServe(rw http.ResponseWriter, r *http.Request) api.NotificationsEnqueuer, &api.AGPL.PrebuildsReconciler, api.ProvisionerdServerMetrics, + api.AGPL.Experiments, ) if err != nil { if !xerrors.Is(err, context.Canceled) { diff --git a/enterprise/coderd/templates.go b/enterprise/coderd/templates.go index 16f2e7fc4fac9..ff74d9035c1b8 100644 --- a/enterprise/coderd/templates.go +++ b/enterprise/coderd/templates.go @@ -8,6 +8,8 @@ import ( "github.com/google/uuid" "golang.org/x/xerrors" + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/db2sdk" @@ -338,3 +340,45 @@ func (api *API) RequireFeatureMW(feat codersdk.FeatureName) func(http.Handler) h }) } } + +// @Summary Invalidate presets for template +// @ID invalidate-presets-for-template +// @Security CoderSessionToken +// @Produce json +// @Tags Enterprise +// @Param template path string true "Template ID" format(uuid) +// @Success 200 {object} codersdk.InvalidatePresetsResponse +// @Router /templates/{template}/prebuilds/invalidate [post] +func (api *API) postInvalidateTemplatePresets(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + template := httpmw.TemplateParam(r) + + // Authorization: user must be able to update the template + if !api.Authorize(r, policy.ActionUpdate, template) { + httpapi.ResourceNotFound(rw) + return + } + + // Update last_invalidated_at for all presets of the active template version + invalidatedPresets, err := api.Database.UpdatePresetsLastInvalidatedAt(ctx, database.UpdatePresetsLastInvalidatedAtParams{ + TemplateID: template.ID, + LastInvalidatedAt: sql.NullTime{Time: api.Clock.Now(), Valid: true}, + }) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Failed to invalidate presets.", + Detail: err.Error(), + }) + return + } + + api.Logger.Info(ctx, "invalidated presets", + slog.F("template_id", template.ID), + slog.F("template_name", template.Name), + slog.F("preset_count", len(invalidatedPresets)), + ) + + httpapi.Write(ctx, rw, http.StatusOK, codersdk.InvalidatePresetsResponse{ + Invalidated: db2sdk.InvalidatedPresets(invalidatedPresets), + }) +} diff --git a/enterprise/coderd/templates_test.go b/enterprise/coderd/templates_test.go index e5eafa82f8d1c..f9c431b6446f4 100644 --- a/enterprise/coderd/templates_test.go +++ b/enterprise/coderd/templates_test.go @@ -2111,3 +2111,100 @@ func TestMultipleOrganizationTemplates(t *testing.T) { t.FailNow() } } + +func TestInvalidateTemplatePrebuilds(t *testing.T) { + t.Parallel() + + // Given the following parameters and presets... + templateVersionParameters := []*proto.RichParameter{ + {Name: "param1", Type: "string", Required: false, DefaultValue: "default1"}, + {Name: "param2", Type: "string", Required: false, DefaultValue: "default2"}, + {Name: "param3", Type: "string", Required: false, DefaultValue: "default3"}, + } + presetWithParameters1 := &proto.Preset{ + Name: "Preset With Parameters 1", + Parameters: []*proto.PresetParameter{ + {Name: "param1", Value: "value1"}, + {Name: "param2", Value: "value2"}, + {Name: "param3", Value: "value3"}, + }, + } + presetWithParameters2 := &proto.Preset{ + Name: "Preset With Parameters 2", + Parameters: []*proto.PresetParameter{ + {Name: "param1", Value: "value4"}, + {Name: "param2", Value: "value5"}, + {Name: "param3", Value: "value6"}, + }, + } + + presetWithParameters3 := &proto.Preset{ + Name: "Preset With Parameters 3", + Parameters: []*proto.PresetParameter{ + {Name: "param1", Value: "value7"}, + {Name: "param2", Value: "value8"}, + {Name: "param3", Value: "value9"}, + }, + } + + // Given the template versions and template... + ownerClient, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureTemplateRBAC: 1, + }, + }, + }) + templateAdminClient, _ := coderdtest.CreateAnotherUser(t, ownerClient, owner.OrganizationID, rbac.RoleTemplateAdmin()) + + buildPlanResponse := func(presets ...*proto.Preset) *proto.Response { + return &proto.Response{ + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Presets: presets, + Parameters: templateVersionParameters, + }, + }, + } + } + + version1 := coderdtest.CreateTemplateVersion(t, templateAdminClient, owner.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{buildPlanResponse(presetWithParameters1, presetWithParameters2)}, + ProvisionApply: echo.ApplyComplete, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, templateAdminClient, version1.ID) + template := coderdtest.CreateTemplate(t, templateAdminClient, owner.OrganizationID, version1.ID) + + // When + ctx := testutil.Context(t, testutil.WaitLong) + invalidated, err := templateAdminClient.InvalidateTemplatePresets(ctx, template.ID) + require.NoError(t, err) + + // Then + require.Len(t, invalidated.Invalidated, 2) + require.Equal(t, codersdk.InvalidatedPreset{TemplateName: template.Name, TemplateVersionName: version1.Name, PresetName: presetWithParameters1.Name}, invalidated.Invalidated[0]) + require.Equal(t, codersdk.InvalidatedPreset{TemplateName: template.Name, TemplateVersionName: version1.Name, PresetName: presetWithParameters2.Name}, invalidated.Invalidated[1]) + + // Given the template is updated... + version2 := coderdtest.UpdateTemplateVersion(t, templateAdminClient, owner.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{buildPlanResponse(presetWithParameters2, presetWithParameters3)}, + ProvisionApply: echo.ApplyComplete, + }, template.ID) + coderdtest.AwaitTemplateVersionJobCompleted(t, templateAdminClient, version2.ID) + err = templateAdminClient.UpdateActiveTemplateVersion(ctx, template.ID, codersdk.UpdateActiveTemplateVersion{ID: version2.ID}) + require.NoError(t, err) + + // When + invalidated, err = templateAdminClient.InvalidateTemplatePresets(ctx, template.ID) + require.NoError(t, err) + + // Then: it should only invalidate the presets from the currently active version (preset2 and preset3) + require.Len(t, invalidated.Invalidated, 2) + require.Equal(t, codersdk.InvalidatedPreset{TemplateName: template.Name, TemplateVersionName: version2.Name, PresetName: presetWithParameters2.Name}, invalidated.Invalidated[0]) + require.Equal(t, codersdk.InvalidatedPreset{TemplateName: template.Name, TemplateVersionName: version2.Name, PresetName: presetWithParameters3.Name}, invalidated.Invalidated[1]) +} diff --git a/enterprise/coderd/workspaceagents_test.go b/enterprise/coderd/workspaceagents_test.go index 2e4690bc961a9..a150c0cdc06d5 100644 --- a/enterprise/coderd/workspaceagents_test.go +++ b/enterprise/coderd/workspaceagents_test.go @@ -95,7 +95,7 @@ func TestReinitializeAgent(t *testing.T) { // Ensure that workspace agents can reinitialize against claimed prebuilds in non-default organizations: for _, useDefaultOrg := range []bool{true, false} { - t.Run("", func(t *testing.T) { + t.Run(fmt.Sprintf("useDefaultOrg=%t", useDefaultOrg), func(t *testing.T) { t.Parallel() tempAgentLog := testutil.CreateTemp(t, "", "testReinitializeAgent") diff --git a/enterprise/coderd/workspaces_test.go b/enterprise/coderd/workspaces_test.go index 5201e613f7a1d..7cf9cd890b6df 100644 --- a/enterprise/coderd/workspaces_test.go +++ b/enterprise/coderd/workspaces_test.go @@ -7,8 +7,6 @@ import ( "encoding/json" "fmt" "net/http" - "os" - "os/exec" "path/filepath" "strings" "sync/atomic" @@ -833,6 +831,73 @@ func TestWorkspaceAutobuild(t *testing.T) { require.True(t, ws.LastUsedAt.After(dormantLastUsedAt)) }) + // This test has been added to ensure we don't introduce a regression + // to this issue https://github.com/coder/coder/issues/20711. + t.Run("DormantAutostop", func(t *testing.T) { + t.Parallel() + + var ( + ticker = make(chan time.Time) + statCh = make(chan autobuild.Stats) + inactiveTTL = time.Minute + logger = slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) + ) + + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + AutobuildTicker: ticker, + AutobuildStats: statCh, + IncludeProvisionerDaemon: true, + TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(agplUserQuietHoursScheduleStore(), notifications.NewNoopEnqueuer(), logger, nil), + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{codersdk.FeatureAdvancedTemplateScheduling: 1}, + }, + }) + + // Create a template version that includes agents on both start AND stop builds. + // This simulates a template without `count = data.coder_workspace.me.start_count`. + authToken := uuid.NewString() + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: echo.PlanComplete, + ProvisionApply: echo.ProvisionApplyWithAgent(authToken), + }) + + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { + ctr.TimeTilDormantMillis = ptr.Ref[int64](inactiveTTL.Milliseconds()) + }) + + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + ws := coderdtest.CreateWorkspace(t, client, template.ID) + build := coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + require.Equal(t, codersdk.WorkspaceStatusRunning, build.Status) + + // Simulate the workspace becoming inactive and transitioning to dormant. + tickTime := ws.LastUsedAt.Add(inactiveTTL * 2) + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime + stats := <-statCh + + // Expect workspace to transition to stopped state. + require.Len(t, stats.Transitions, 1) + require.Equal(t, stats.Transitions[ws.ID], database.WorkspaceTransitionStop) + + // The autostop build should succeed even though the template includes + // agents without `count = data.coder_workspace.me.start_count`. + // This verifies that provisionerd has permission to create agents on + // dormant workspaces during stop builds. + ws = coderdtest.MustWorkspace(t, client, ws.ID) + require.NotNil(t, ws.DormantAt, "workspace should be marked as dormant") + require.Equal(t, codersdk.WorkspaceTransitionStop, ws.LatestBuild.Transition) + + latestBuild := coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + require.Equal(t, codersdk.WorkspaceStatusStopped, latestBuild.Status) + }) + // This test serves as a regression prevention for generating // audit logs in the same transaction the transition workspaces to // the dormant state. The auditor that is passed to autobuild does @@ -3390,52 +3455,23 @@ func workspaceTagsTerraform(t *testing.T, tc testWorkspaceTagsTerraformCase, dyn } } -// downloadProviders is a test helper that creates a temporary file and writes a -// terraform CLI config file with a provider_installation stanza for coder/coder -// using dev_overrides. It also fetches the latest provider release from GitHub -// and extracts the binary to the temporary dir. It is the responsibility of the -// caller to set TF_CLI_CONFIG_FILE. +// downloadProviders is a test helper that caches Terraform providers and returns +// the path to a Terraform CLI config file that uses the cached providers. +// This uses the shared testutil caching infrastructure to avoid re-downloading +// providers on every test run. It is the responsibility of the caller to set +// TF_CLI_CONFIG_FILE. +// On Windows, provider caching is not supported and an empty string is returned. func downloadProviders(t *testing.T, providersTf string) string { t.Helper() - // We firstly write a Terraform CLI config file to a temporary directory: - var ( - tempDir = t.TempDir() - cacheDir = filepath.Join(tempDir, ".cache") - providersTfPath = filepath.Join(tempDir, "providers.tf") - cliConfigPath = filepath.Join(tempDir, "local.tfrc") - ) - // Write files to disk - require.NoError(t, os.MkdirAll(cacheDir, os.ModePerm|os.ModeDir)) - require.NoError(t, os.WriteFile(providersTfPath, []byte(providersTf), os.ModePerm)) // nolint:gosec - cliConfigTemplate := ` - provider_installation { - filesystem_mirror { - path = %q - include = ["*/*/*"] - } - direct { - exclude = ["*/*/*"] - } - }` - err := os.WriteFile(cliConfigPath, []byte(fmt.Sprintf(cliConfigTemplate, cacheDir)), os.ModePerm) // nolint:gosec - require.NoError(t, err, "failed to write %s", cliConfigPath) - - ctx := testutil.Context(t, testutil.WaitLong) - - // Run terraform providers mirror to mirror required providers to cacheDir - cmd := exec.CommandContext(ctx, "terraform", "providers", "mirror", cacheDir) - cmd.Env = os.Environ() // without this terraform may complain about path - cmd.Env = append(cmd.Env, "TF_CLI_CONFIG_FILE="+cliConfigPath) - cmd.Dir = tempDir - out, err := cmd.CombinedOutput() - if !assert.NoError(t, err) { - t.Log("failed to download providers:") - t.Log(string(out)) - t.FailNow() - } + cacheRootDir := filepath.Join(testutil.PersistentCacheDir(t), "terraform_workspace_tags_test") + templateFiles := map[string]string{"providers.tf": providersTf} + testName := "TestWorkspaceTagsTerraform" - t.Logf("Set TF_CLI_CONFIG_FILE=%s", cliConfigPath) + cliConfigPath := testutil.CacheTFProviders(t, cacheRootDir, testName, templateFiles) + if cliConfigPath != "" { + t.Logf("Set TF_CLI_CONFIG_FILE=%s", cliConfigPath) + } return cliConfigPath } diff --git a/enterprise/scaletest/prebuilds/run_test.go b/enterprise/scaletest/prebuilds/run_test.go new file mode 100644 index 0000000000000..4334d0c0961bc --- /dev/null +++ b/enterprise/scaletest/prebuilds/run_test.go @@ -0,0 +1,141 @@ +package prebuilds_test + +import ( + "io" + "strconv" + "sync" + "testing" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + "golang.org/x/sync/errgroup" + + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" + "github.com/coder/coder/v2/enterprise/coderd/license" + "github.com/coder/coder/v2/scaletest/prebuilds" + "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" +) + +func TestRun(t *testing.T) { + t.Parallel() + + t.Skip("This test takes several minutes to run, and is intended as a manual regression test") + + ctx := testutil.Context(t, testutil.WaitSuperLong*3) + + client, user := coderdenttest.New(t, &coderdenttest.Options{ + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspacePrebuilds: 1, + codersdk.FeatureExternalProvisionerDaemons: 1, + }, + }, + }) + + // This is a real Terraform provisioner + _ = coderdenttest.NewExternalProvisionerDaemonTerraform(t, client, user.OrganizationID, nil) + + numTemplates := 2 + numPresets := 1 + numPresetPrebuilds := 1 + + //nolint:gocritic // It's fine to use the owner user to pause prebuilds + err := client.PutPrebuildsSettings(ctx, codersdk.PrebuildsSettings{ + ReconciliationPaused: true, + }) + require.NoError(t, err) + + setupBarrier := new(sync.WaitGroup) + setupBarrier.Add(numTemplates) + creationBarrier := new(sync.WaitGroup) + creationBarrier.Add(numTemplates) + deletionSetupBarrier := new(sync.WaitGroup) + deletionSetupBarrier.Add(1) + deletionBarrier := new(sync.WaitGroup) + deletionBarrier.Add(numTemplates) + + metrics := prebuilds.NewMetrics(prometheus.NewRegistry()) + + eg, runCtx := errgroup.WithContext(ctx) + + runners := make([]*prebuilds.Runner, 0, numTemplates) + for i := range numTemplates { + cfg := prebuilds.Config{ + OrganizationID: user.OrganizationID, + NumPresets: numPresets, + NumPresetPrebuilds: numPresetPrebuilds, + TemplateVersionJobTimeout: testutil.WaitSuperLong * 2, + PrebuildWorkspaceTimeout: testutil.WaitSuperLong * 2, + Metrics: metrics, + SetupBarrier: setupBarrier, + CreationBarrier: creationBarrier, + DeletionSetupBarrier: deletionSetupBarrier, + DeletionBarrier: deletionBarrier, + Clock: quartz.NewReal(), + } + err := cfg.Validate() + require.NoError(t, err) + + runner := prebuilds.NewRunner(client, cfg) + runners = append(runners, runner) + eg.Go(func() error { + return runner.Run(runCtx, strconv.Itoa(i), io.Discard) + }) + } + + // Wait for all runners to reach the setup barrier (templates created) + setupBarrier.Wait() + + // Resume prebuilds to trigger prebuild creation + err = client.PutPrebuildsSettings(ctx, codersdk.PrebuildsSettings{ + ReconciliationPaused: false, + }) + require.NoError(t, err) + + // Wait for all runners to reach the creation barrier (prebuilds created) + creationBarrier.Wait() + + //nolint:gocritic // Owner user is fine here as we want to view all workspaces + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{}) + require.NoError(t, err) + expectedWorkspaces := numTemplates * numPresets * numPresetPrebuilds + require.Equal(t, workspaces.Count, expectedWorkspaces) + + // Pause prebuilds before deletion setup + err = client.PutPrebuildsSettings(ctx, codersdk.PrebuildsSettings{ + ReconciliationPaused: true, + }) + require.NoError(t, err) + + // Signal runners that prebuilds are paused and they can prepare for deletion + deletionSetupBarrier.Done() + + // Wait for all runners to reach the deletion barrier (template versions updated to 0 prebuilds) + deletionBarrier.Wait() + + // Resume prebuilds to trigger prebuild deletion + err = client.PutPrebuildsSettings(ctx, codersdk.PrebuildsSettings{ + ReconciliationPaused: false, + }) + require.NoError(t, err) + + err = eg.Wait() + require.NoError(t, err) + + //nolint:gocritic // Owner user is fine here as we want to view all workspaces + workspaces, err = client.Workspaces(ctx, codersdk.WorkspaceFilter{}) + require.NoError(t, err) + require.Equal(t, workspaces.Count, 0) + + cleanupEg, cleanupCtx := errgroup.WithContext(ctx) + for i, runner := range runners { + cleanupEg.Go(func() error { + return runner.Cleanup(cleanupCtx, strconv.Itoa(i), io.Discard) + }) + } + + err = cleanupEg.Wait() + require.NoError(t, err) +} diff --git a/enterprise/wsproxy/wsproxy.go b/enterprise/wsproxy/wsproxy.go index 734f6b2b594c8..ecd5df9cde17a 100644 --- a/enterprise/wsproxy/wsproxy.go +++ b/enterprise/wsproxy/wsproxy.go @@ -39,6 +39,7 @@ import ( "github.com/coder/coder/v2/enterprise/derpmesh" "github.com/coder/coder/v2/enterprise/replicasync" "github.com/coder/coder/v2/enterprise/wsproxy/wsproxysdk" + sharedhttpmw "github.com/coder/coder/v2/httpmw" "github.com/coder/coder/v2/site" "github.com/coder/coder/v2/tailnet" ) @@ -328,7 +329,7 @@ func New(ctx context.Context, opts *Options) (*Server, error) { // Persistent middlewares to all routes r.Use( // TODO: @emyrk Should we standardize these in some other package? - httpmw.Recover(s.Logger), + sharedhttpmw.Recover(s.Logger), httpmw.WithProfilingLabels, tracing.StatusWriterMiddleware, tracing.Middleware(s.TracerProvider), diff --git a/examples/templates/docker/main.tf b/examples/templates/docker/main.tf index a3535042b0799..7bb580e514920 100644 --- a/examples/templates/docker/main.tf +++ b/examples/templates/docker/main.tf @@ -141,7 +141,7 @@ module "jetbrains" { agent_id = coder_agent.main.id agent_name = "main" folder = "/home/coder" - tooltip = "You need to [Install Coder Desktop](https://coder.com/docs/user-guides/desktop#install-coder-desktop) to use this button." + tooltip = "You need to [install JetBrains Toolbox](https://coder.com/docs/user-guides/workspace-access/jetbrains/toolbox) to use this app." } resource "docker_volume" "home_volume" { diff --git a/examples/templates/tasks-docker/main.tf b/examples/templates/tasks-docker/main.tf index 8a457584a4674..c378335c45d0c 100644 --- a/examples/templates/tasks-docker/main.tf +++ b/examples/templates/tasks-docker/main.tf @@ -1,7 +1,8 @@ terraform { required_providers { coder = { - source = "coder/coder" + source = "coder/coder" + version = ">= 2.13" } docker = { source = "kreuzwerker/docker" @@ -12,22 +13,32 @@ terraform { # This template requires a valid Docker socket # However, you can reference our Kubernetes/VM # example templates and adapt the Claude Code module -# -# see: https://registry.coder.com/templates +# +# See: https://registry.coder.com/templates provider "docker" {} +# A `coder_ai_task` resource enables Tasks and associates +# the task with the coder_app that will act as an AI agent. +resource "coder_ai_task" "task" { + count = data.coder_workspace.me.start_count + app_id = module.claude-code[count.index].task_app_id +} + +# You can read the task prompt from the `coder_task` data source. +data "coder_task" "me" {} + # The Claude Code module does the automatic task reporting # Other agent modules: https://registry.coder.com/modules?search=agent -# Or use a custom agent: +# Or use a custom agent: module "claude-code" { count = data.coder_workspace.me.start_count source = "registry.coder.com/coder/claude-code/coder" - version = "3.3.2" + version = "4.2.1" agent_id = coder_agent.main.id workdir = "/home/coder/projects" order = 999 claude_api_key = "" - ai_prompt = data.coder_parameter.ai_prompt.value + ai_prompt = data.coder_task.me.prompt system_prompt = data.coder_parameter.system_prompt.value model = "sonnet" permission_mode = "plan" @@ -51,13 +62,13 @@ data "coder_workspace_preset" "default" { (servers, dev watchers, GUI apps). - Built-in tools - use for everything else: (file operations, git commands, builds & installs, one-off shell commands) - + Remember this decision rule: - Stays running? → desktop-commander - Finishes immediately? → built-in tools - + -- Context -- - There is an existing app and tmux dev server running on port 8000. Be sure to read it's CLAUDE.md (./realworld-django-rest-framework-angular/CLAUDE.md) to learn more about it. + There is an existing app and tmux dev server running on port 8000. Be sure to read it's CLAUDE.md (./realworld-django-rest-framework-angular/CLAUDE.md) to learn more about it. Since this app is for demo purposes and the user is previewing the homepage and subsequent pages, aim to make the first visual change/prototype very quickly so the user can preview it, then focus on backend or logic which can be a more involved, long-running architecture plan. @@ -107,7 +118,7 @@ data "coder_workspace_preset" "default" { # Pre-builds is a Coder Premium # feature to speed up workspace creation - # + # # see https://coder.com/docs/admin/templates/extending-templates/prebuilt-workspaces # prebuilds { # instances = 1 @@ -126,13 +137,6 @@ data "coder_parameter" "system_prompt" { description = "System prompt for the agent with generalized instructions" mutable = false } -data "coder_parameter" "ai_prompt" { - type = "string" - name = "AI Prompt" - default = "" - description = "Write a prompt for Claude Code" - mutable = true -} data "coder_parameter" "setup_script" { name = "setup_script" display_name = "Setup Script" @@ -373,4 +377,4 @@ resource "docker_container" "workspace" { label = "coder.workspace_name" value = data.coder_workspace.me.name } -} \ No newline at end of file +} diff --git a/flake.nix b/flake.nix index c76c5bbba61ba..38eb53b68faee 100644 --- a/flake.nix +++ b/flake.nix @@ -84,6 +84,31 @@ vendorHash = null; }; + # Custom sqlc build from coder/sqlc fork to fix ambiguous column bug, see: + # - https://github.com/coder/sqlc/pull/1 + # - https://github.com/sqlc-dev/sqlc/pull/4159 + # + # To update hashes: + # 1. Run: `nix --extra-experimental-features 'nix-command flakes' build .#devShells.x86_64-linux.default` + # 2. Nix will fail with the correct sha256 hash for src + # 3. Update the sha256 and run again + # 4. Nix will fail with the correct vendorHash + # 5. Update the vendorHash + sqlc-custom = unstablePkgs.buildGo124Module { + pname = "sqlc"; + version = "coder-fork-aab4e865a51df0c43e1839f81a9d349b41d14f05"; + + src = pkgs.fetchFromGitHub { + owner = "coder"; + repo = "sqlc"; + rev = "aab4e865a51df0c43e1839f81a9d349b41d14f05"; + sha256 = "sha256-zXjTypEFWDOkoZMKHMMRtAz2coNHSCkQ+nuZ8rOnzZ8="; + }; + + subPackages = [ "cmd/sqlc" ]; + vendorHash = "sha256-69kg3qkvEWyCAzjaCSr3a73MNonub9sZTYyGaCW+UTI="; + }; + # Packages required to build the frontend frontendPackages = with pkgs; @@ -163,7 +188,8 @@ ripgrep shellcheck (pinnedPkgs.shfmt) - sqlc + # sqlc + sqlc-custom syft unstablePkgs.terraform typos diff --git a/go.mod b/go.mod index d87c8300234c8..44713ef25fc17 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/coder/coder/v2 -go 1.24.6 +go 1.24.10 // Required until a v3 of chroma is created to lazily initialize all XML files. // None of our dependencies seem to use the registries anyways, so this @@ -82,7 +82,7 @@ require ( github.com/aquasecurity/trivy-iac v0.8.0 github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2 github.com/awalterschulze/gographviz v2.0.3+incompatible - github.com/aws/smithy-go v1.23.0 + github.com/aws/smithy-go v1.23.2 github.com/bramvdbogaerde/go-scp v1.5.0 github.com/briandowns/spinner v1.23.0 github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 @@ -98,13 +98,13 @@ require ( github.com/coder/flog v1.1.0 github.com/coder/guts v1.6.1 github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 - github.com/coder/quartz v0.2.1 + github.com/coder/quartz v0.3.0 github.com/coder/retry v1.5.1 - github.com/coder/serpent v0.11.0 - github.com/coder/terraform-provider-coder/v2 v2.12.0 + github.com/coder/serpent v0.12.0 + github.com/coder/terraform-provider-coder/v2 v2.13.1 github.com/coder/websocket v1.8.13 github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0 - github.com/coreos/go-oidc/v3 v3.16.0 + github.com/coreos/go-oidc/v3 v3.17.0 github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf github.com/creack/pty v1.1.21 github.com/dave/dst v0.27.2 @@ -144,7 +144,7 @@ require ( github.com/hashicorp/yamux v0.1.2 github.com/hinshun/vt10x v0.0.0-20220301184237-5011da428d02 github.com/imulab/go-scim/pkg/v2 v2.2.0 - github.com/jedib0t/go-pretty/v6 v6.6.7 + github.com/jedib0t/go-pretty/v6 v6.7.1 github.com/jmoiron/sqlx v1.4.0 github.com/justinas/nosurf v1.2.0 github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 @@ -165,9 +165,9 @@ require ( github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e github.com/pkg/sftp v1.13.7 github.com/prometheus-community/pro-bing v0.7.0 - github.com/prometheus/client_golang v1.23.0 + github.com/prometheus/client_golang v1.23.2 github.com/prometheus/client_model v0.6.2 - github.com/prometheus/common v0.65.0 + github.com/prometheus/common v0.67.4 github.com/quasilyte/go-ruleguard/dsl v0.3.22 github.com/robfig/cron/v3 v3.0.1 github.com/shirou/gopsutil/v4 v4.25.5 @@ -189,25 +189,25 @@ require ( go.opentelemetry.io/otel v1.38.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.37.0 - go.opentelemetry.io/otel/sdk v1.37.0 + go.opentelemetry.io/otel/sdk v1.38.0 go.opentelemetry.io/otel/trace v1.38.0 go.uber.org/atomic v1.11.0 go.uber.org/goleak v1.3.1-0.20240429205332-517bace7cc29 go.uber.org/mock v0.6.0 go4.org/netipx v0.0.0-20230728180743-ad4cb58a6516 - golang.org/x/crypto v0.43.0 + golang.org/x/crypto v0.45.0 golang.org/x/exp v0.0.0-20251023183803-a4bb9ffd2546 - golang.org/x/mod v0.29.0 - golang.org/x/net v0.46.0 - golang.org/x/oauth2 v0.32.0 - golang.org/x/sync v0.17.0 - golang.org/x/sys v0.37.0 - golang.org/x/term v0.36.0 - golang.org/x/text v0.30.0 - golang.org/x/tools v0.38.0 + golang.org/x/mod v0.30.0 + golang.org/x/net v0.47.0 + golang.org/x/oauth2 v0.33.0 + golang.org/x/sync v0.18.0 + golang.org/x/sys v0.38.0 + golang.org/x/term v0.37.0 + golang.org/x/text v0.31.0 + golang.org/x/tools v0.39.0 golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da - google.golang.org/api v0.253.0 - google.golang.org/grpc v1.76.0 + google.golang.org/api v0.256.0 + google.golang.org/grpc v1.77.0 google.golang.org/protobuf v1.36.10 gopkg.in/DataDog/dd-trace-go.v1 v1.74.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 @@ -255,20 +255,20 @@ require ( github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c // indirect github.com/atotto/clipboard v0.1.4 // indirect - github.com/aws/aws-sdk-go-v2 v1.39.2 - github.com/aws/aws-sdk-go-v2/config v1.31.3 - github.com/aws/aws-sdk-go-v2/credentials v1.18.7 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.4 // indirect + github.com/aws/aws-sdk-go-v2 v1.40.0 + github.com/aws/aws-sdk-go-v2/config v1.32.1 + github.com/aws/aws-sdk-go-v2/credentials v1.19.1 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 // indirect github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2 - github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.4 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 // indirect github.com/aws/aws-sdk-go-v2/service/ssm v1.60.1 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.28.2 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.0 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.38.0 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.30.4 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.9 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.41.1 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect @@ -324,7 +324,7 @@ require ( github.com/google/pprof v0.0.0-20250607225305-033d6d78b36a // indirect github.com/google/s2a-go v0.1.9 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect - github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect github.com/googleapis/gax-go/v2 v2.15.0 // indirect github.com/gorilla/css v1.0.1 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 // indirect @@ -381,7 +381,7 @@ require ( github.com/niklasfasching/go-org v1.9.1 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.1 // indirect - github.com/opencontainers/runc v1.2.3 // indirect + github.com/opencontainers/runc v1.2.8 // indirect github.com/outcaste-io/ristretto v0.2.3 // indirect github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c // indirect @@ -434,7 +434,7 @@ require ( github.com/yusufpapurcu/wmi v1.2.4 // indirect github.com/zclconf/go-cty v1.17.0 github.com/zeebo/errs v1.4.0 // indirect - go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/auto/sdk v1.2.1 // indirect go.opentelemetry.io/collector/component v1.27.0 // indirect go.opentelemetry.io/collector/pdata v1.27.0 // indirect go.opentelemetry.io/collector/pdata/pprofile v0.121.0 // indirect @@ -452,15 +452,15 @@ require ( golang.zx2c4.com/wireguard/windows v0.5.3 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250804133106-a7a43d27e69b // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20251014184007-4626949a642f // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20251022142026-3a174f9686a8 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 // indirect gopkg.in/ini.v1 v1.67.0 // indirect howett.net/plist v1.0.0 // indirect kernel.org/pub/linux/libs/security/libcap/psx v1.2.73 // indirect sigs.k8s.io/yaml v1.5.0 // indirect ) -require github.com/coder/clistat v1.1.1 +require github.com/coder/clistat v1.1.2 require github.com/SherClockHolmes/webpush-go v1.4.0 @@ -468,18 +468,19 @@ require ( github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect github.com/charmbracelet/x/cellbuf v0.0.13 // indirect github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 // indirect - github.com/golang-jwt/jwt/v5 v5.2.3 // indirect + github.com/golang-jwt/jwt/v5 v5.3.0 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect ) require ( - github.com/anthropics/anthropic-sdk-go v1.13.0 - github.com/brianvoe/gofakeit/v7 v7.8.0 + github.com/anthropics/anthropic-sdk-go v1.18.0 + github.com/brianvoe/gofakeit/v7 v7.9.0 github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225 - github.com/coder/aibridge v0.1.6 + github.com/coder/aibridge v0.2.0 github.com/coder/aisdk-go v0.0.9 github.com/coder/boundary v1.0.1-0.20250925154134-55a44f2a7945 github.com/coder/preview v1.0.4 + github.com/danieljoos/wincred v1.2.3 github.com/dgraph-io/ristretto/v2 v2.3.0 github.com/fsnotify/fsnotify v1.9.0 github.com/go-git/go-git/v5 v5.16.2 @@ -498,7 +499,7 @@ require ( github.com/DataDog/datadog-agent/comp/core/tagger/origindetection v0.64.2 // indirect github.com/DataDog/datadog-agent/pkg/version v0.64.2 // indirect github.com/DataDog/dd-trace-go/v2 v2.0.0 // indirect - github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect + github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 // indirect github.com/Masterminds/semver/v3 v3.3.1 // indirect @@ -510,13 +511,14 @@ require ( github.com/aquasecurity/trivy-checks v1.11.3-0.20250604022615-9a7efa7c9169 // indirect github.com/aws/aws-sdk-go v1.55.7 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 // indirect + github.com/aws/aws-sdk-go-v2/service/signin v1.0.1 // indirect github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect github.com/buger/jsonparser v1.1.1 // indirect github.com/cenkalti/backoff/v5 v5.0.2 // indirect github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect - github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect - github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect + github.com/cncf/xds/go v0.0.0-20251022180443-0feb69152e9f // indirect + github.com/envoyproxy/go-control-plane/envoy v1.35.0 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect github.com/esiqveland/notify v0.13.3 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect @@ -541,18 +543,19 @@ require ( github.com/samber/lo v1.51.0 // indirect github.com/sergeymakinen/go-bmp v1.0.0 // indirect github.com/sergeymakinen/go-ico v1.0.0-beta.0 // indirect - github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect + github.com/spiffe/go-spiffe/v2 v2.6.0 // indirect github.com/tidwall/sjson v1.2.5 // indirect github.com/tmaxmax/go-sse v0.11.0 // indirect github.com/ulikunitz/xz v0.5.15 // indirect github.com/vektah/gqlparser/v2 v2.5.28 // indirect github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect + github.com/xhit/go-str2duration/v2 v2.1.0 // indirect github.com/yosida95/uritemplate/v3 v3.0.2 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect - go.opentelemetry.io/contrib/detectors/gcp v1.37.0 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.38.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.37.0 // indirect - go.yaml.in/yaml/v2 v2.4.2 // indirect + go.opentelemetry.io/otel/sdk/metric v1.38.0 // indirect + go.yaml.in/yaml/v2 v2.4.3 // indirect google.golang.org/genai v1.12.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect k8s.io/utils v0.0.0-20241210054802-24370beab758 // indirect diff --git a/go.sum b/go.sum index 2655cb93f2cbd..2cdd8dcccfbb9 100644 --- a/go.sum +++ b/go.sum @@ -668,8 +668,8 @@ github.com/DataDog/opentelemetry-mapping-go/pkg/otlp/attributes v0.26.0 h1:GlvoS github.com/DataDog/opentelemetry-mapping-go/pkg/otlp/attributes v0.26.0/go.mod h1:mYQmU7mbHH6DrCaS8N6GZcxwPoeNfyuopUoLQltwSzs= github.com/DataDog/sketches-go v1.4.7 h1:eHs5/0i2Sdf20Zkj0udVFWuCrXGRFig2Dcfm5rtcTxc= github.com/DataDog/sketches-go v1.4.7/go.mod h1:eAmQ/EBmtSO+nQp7IZMZVRPT4BQTmIc5RZQ+deGlTPM= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 h1:UQUsRi8WTzhZntp5313l+CHIAT95ojUI2lpP/ExlZa4= -github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0/go.mod h1:Cz6ft6Dkn3Et6l2v2a9/RpN7epQ1GtDlO6lj8bEcOvw= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0 h1:sBEjpZlNHzK1voKq9695PJSX2o5NEXl7/OL3coiIY0c= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.30.0/go.mod h1:P4WPRUkOhJC13W//jWpyfJNDAIpvRbAUIYLX/4jtlE0= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM= github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0/go.mod h1:ZPpqegjbE99EPKsu3iUWV22A04wzGPcAY/ziSIQEEgs= github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/cloudmock v0.53.0 h1:4LP6hvB4I5ouTbGgWtixJhgED6xdf67twf9PoY96Tbg= @@ -726,8 +726,8 @@ github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwTo github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/anthropics/anthropic-sdk-go v1.13.0 h1:Bhbe8sRoDPtipttg8bQYrMCKe2b79+q6rFW1vOKEUKI= -github.com/anthropics/anthropic-sdk-go v1.13.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE= +github.com/anthropics/anthropic-sdk-go v1.18.0 h1:jfxRA7AqZoCm83nHO/OVQp8xuwjUKtBziEdMbfmofHU= +github.com/anthropics/anthropic-sdk-go v1.18.0/go.mod h1:WTz31rIUHUHqai2UslPpw5CwXrQP3geYBioRV4WOLvE= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0= github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI= @@ -762,38 +762,40 @@ github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw github.com/aws/aws-sdk-go v1.44.122/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/aws/aws-sdk-go v1.55.7 h1:UJrkFq7es5CShfBwlWAC8DA077vp8PyVbQd3lqLiztE= github.com/aws/aws-sdk-go v1.55.7/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= -github.com/aws/aws-sdk-go-v2 v1.39.2 h1:EJLg8IdbzgeD7xgvZ+I8M1e0fL0ptn/M47lianzth0I= -github.com/aws/aws-sdk-go-v2 v1.39.2/go.mod h1:sDioUELIUO9Znk23YVmIk86/9DOpkbyyVb1i/gUNFXY= +github.com/aws/aws-sdk-go-v2 v1.40.0 h1:/WMUA0kjhZExjOQN2z3oLALDREea1A7TobfuiBrKlwc= +github.com/aws/aws-sdk-go-v2 v1.40.0/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 h1:12SpdwU8Djs+YGklkinSSlcrPyj3H4VifVsKf78KbwA= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11/go.mod h1:dd+Lkp6YmMryke+qxW/VnKyhMBDTYP41Q2Bb+6gNZgY= -github.com/aws/aws-sdk-go-v2/config v1.31.3 h1:RIb3yr/+PZ18YYNe6MDiG/3jVoJrPmdoCARwNkMGvco= -github.com/aws/aws-sdk-go-v2/config v1.31.3/go.mod h1:jjgx1n7x0FAKl6TnakqrpkHWWKcX3xfWtdnIJs5K9CE= -github.com/aws/aws-sdk-go-v2/credentials v1.18.7 h1:zqg4OMrKj+t5HlswDApgvAHjxKtlduKS7KicXB+7RLg= -github.com/aws/aws-sdk-go-v2/credentials v1.18.7/go.mod h1:/4M5OidTskkgkv+nCIfC9/tbiQ/c8qTox9QcUDV0cgc= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.4 h1:lpdMwTzmuDLkgW7086jE94HweHCqG+uOJwHf3LZs7T0= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.4/go.mod h1:9xzb8/SV62W6gHQGC/8rrvgNXU6ZoYM3sAIJCIrXJxY= +github.com/aws/aws-sdk-go-v2/config v1.32.1 h1:iODUDLgk3q8/flEC7ymhmxjfoAnBDwEEYEVyKZ9mzjU= +github.com/aws/aws-sdk-go-v2/config v1.32.1/go.mod h1:xoAgo17AGrPpJBSLg81W+ikM0cpOZG8ad04T2r+d5P0= +github.com/aws/aws-sdk-go-v2/credentials v1.19.1 h1:JeW+EwmtTE0yXFK8SmklrFh/cGTTXsQJumgMZNlbxfM= +github.com/aws/aws-sdk-go-v2/credentials v1.19.1/go.mod h1:BOoXiStwTF+fT2XufhO0Efssbi1CNIO/ZXpZu87N0pw= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 h1:WZVR5DbDgxzA0BJeudId89Kmgy6DIU4ORpxwsVHz0qA= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14/go.mod h1:Dadl9QO0kHgbrH1GRqGiZdYtW5w+IXXaBNCHTIaheM4= github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2 h1:QbFjOdplTkOgviHNKyTW/TZpvIYhD6lqEc3tkIvqMoQ= github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2/go.mod h1:d0pTYUeTv5/tPSlbPZZQSqssM158jZBs02jx2LDslM8= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9 h1:se2vOWGD3dWQUtfn4wEjRQJb1HK1XsNIt825gskZ970= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.9/go.mod h1:hijCGH2VfbZQxqCDN7bwz/4dzxV+hkyhjawAtdPWKZA= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9 h1:6RBnKZLkJM4hQ+kN6E7yWFveOTg8NLPHAkqrs4ZPlTU= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.9/go.mod h1:V9rQKRmK7AWuEsOMnHzKj8WyrIir1yUJbZxDuZLFvXI= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 h1:6+lZi2JeGKtCraAj1rpoZfKqnQ9SptseRZioejfUOLM= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0/go.mod h1:eb3gfbVIxIoGgJsi9pGne19dhCBpK6opTYpQqAmdy44= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.4 h1:ueB2Te0NacDMnaC+68za9jLwkjzxGWm0KB5HTUHjLTI= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.4/go.mod h1:nLEfLnVMmLvyIG58/6gsSA03F1voKGaCfHV7+lR8S7s= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 h1:PZHqQACxYb8mYgms4RZbhZG0a7dPW06xOjmaH0EJC/I= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14/go.mod h1:VymhrMJUWs69D8u0/lZ7jSB6WgaG/NqHi3gX0aYf6U0= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 h1:bOS19y6zlJwagBfHxs0ESzr1XCOU2KXJCWcq3E2vfjY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14/go.mod h1:1ipeGBMAxZ0xcTm6y6paC2C/J6f6OO7LBODV9afuAyM= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 h1:FIouAnCE46kyYqyhs0XEBDFFSREtdnr8HQuLPQPLCrY= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14/go.mod h1:UTwDc5COa5+guonQU8qBikJo1ZJ4ln2r1MkF7Dqag1E= +github.com/aws/aws-sdk-go-v2/service/signin v1.0.1 h1:BDgIUYGEo5TkayOWv/oBLPphWwNm/A91AebUjAu5L5g= +github.com/aws/aws-sdk-go-v2/service/signin v1.0.1/go.mod h1:iS6EPmNeqCsGo+xQmXv0jIMjyYtQfnwg36zl2FwEouk= github.com/aws/aws-sdk-go-v2/service/ssm v1.60.1 h1:OwMzNDe5VVTXD4kGmeK/FtqAITiV8Mw4TCa8IyNO0as= github.com/aws/aws-sdk-go-v2/service/ssm v1.60.1/go.mod h1:IyVabkWrs8SNdOEZLyFFcW9bUltV4G6OQS0s6H20PHg= -github.com/aws/aws-sdk-go-v2/service/sso v1.28.2 h1:ve9dYBB8CfJGTFqcQ3ZLAAb/KXWgYlgu/2R2TZL2Ko0= -github.com/aws/aws-sdk-go-v2/service/sso v1.28.2/go.mod h1:n9bTZFZcBa9hGGqVz3i/a6+NG0zmZgtkB9qVVFDqPA8= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.0 h1:Bnr+fXrlrPEoR1MAFrHVsge3M/WoK4n23VNhRM7TPHI= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.0/go.mod h1:eknndR9rU8UpE/OmFpqU78V1EcXPKFTTm5l/buZYgvM= -github.com/aws/aws-sdk-go-v2/service/sts v1.38.0 h1:iV1Ko4Em/lkJIsoKyGfc0nQySi+v0Udxr6Igq+y9JZc= -github.com/aws/aws-sdk-go-v2/service/sts v1.38.0/go.mod h1:bEPcjW7IbolPfK67G1nilqWyoxYMSPrDiIQ3RdIdKgo= -github.com/aws/smithy-go v1.23.0 h1:8n6I3gXzWJB2DxBDnfxgBaSX6oe0d/t10qGz7OKqMCE= -github.com/aws/smithy-go v1.23.0/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= +github.com/aws/aws-sdk-go-v2/service/sso v1.30.4 h1:U//SlnkE1wOQiIImxzdY5PXat4Wq+8rlfVEw4Y7J8as= +github.com/aws/aws-sdk-go-v2/service/sso v1.30.4/go.mod h1:av+ArJpoYf3pgyrj6tcehSFW+y9/QvAY8kMooR9bZCw= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.9 h1:LU8S9W/mPDAU9q0FjCLi0TrCheLMGwzbRpvUMwYspcA= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.9/go.mod h1:/j67Z5XBVDx8nZVp9EuFM9/BS5dvBznbqILGuu73hug= +github.com/aws/aws-sdk-go-v2/service/sts v1.41.1 h1:GdGmKtG+/Krag7VfyOXV17xjTCz0i9NT+JnqLTOI5nA= +github.com/aws/aws-sdk-go-v2/service/sts v1.41.1/go.mod h1:6TxbXoDSgBQ225Qd8Q+MbxUxUh6TtNKwbRt/EPS9xso= +github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM= +github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8= @@ -842,8 +844,8 @@ github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bramvdbogaerde/go-scp v1.5.0 h1:a9BinAjTfQh273eh7vd3qUgmBC+bx+3TRDtkZWmIpzM= github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ= -github.com/brianvoe/gofakeit/v7 v7.8.0 h1:FHLerglGVodD2O4pnQPCmFlkmIRXp8MpAflnarW5sQM= -github.com/brianvoe/gofakeit/v7 v7.8.0/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= +github.com/brianvoe/gofakeit/v7 v7.9.0 h1:6NsaMy9D5ZKVwIZ1V8L//J2FrOF3546FcXDElWLx994= +github.com/brianvoe/gofakeit/v7 v7.9.0/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/bytecodealliance/wasmtime-go/v3 v3.0.2 h1:3uZCA/BLTIu+DqCfguByNMJa2HVHpXvjfy0Dy7g6fuA= @@ -913,20 +915,20 @@ github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= -github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/xds/go v0.0.0-20251022180443-0feb69152e9f h1:Y8xYupdHxryycyPlc9Y+bSQAYZnetRJ70VMVKm5CKI0= +github.com/cncf/xds/go v0.0.0-20251022180443-0feb69152e9f/go.mod h1:HlzOvOjVBOfTGSRXRyY0OiCS/3J1akRGQQpRO/7zyF4= github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225 h1:tRIViZ5JRmzdOEo5wUWngaGEFBG8OaE1o2GIHN5ujJ8= github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225/go.mod h1:rNLVpYgEVeu1Zk29K64z6Od8RBP9DwqCu9OfCzh8MR4= -github.com/coder/aibridge v0.1.6 h1:Ax9hfAQeg+vLeE9M/IMmARuBuGyCuFdMw8bpy72/fD8= -github.com/coder/aibridge v0.1.6/go.mod h1:7GhrLbzf6uM3sCA7OPaDzvq9QNrCjNuzMy+WgipYwfQ= +github.com/coder/aibridge v0.2.0 h1:kAWhHD6fsmDLH1WxIwXPu9Ineijj+lVniko45C003Vo= +github.com/coder/aibridge v0.2.0/go.mod h1:2T0RSnIX1WTqFajzXsaNsoNe6mmNsNeCTxiHBWEsFnE= github.com/coder/aisdk-go v0.0.9 h1:Vzo/k2qwVGLTR10ESDeP2Ecek1SdPfZlEjtTfMveiVo= github.com/coder/aisdk-go v0.0.9/go.mod h1:KF6/Vkono0FJJOtWtveh5j7yfNrSctVTpwgweYWSp5M= github.com/coder/boundary v1.0.1-0.20250925154134-55a44f2a7945 h1:hDUf02kTX8EGR3+5B+v5KdYvORs4YNfDPci0zCs+pC0= github.com/coder/boundary v1.0.1-0.20250925154134-55a44f2a7945/go.mod h1:d1AMFw81rUgrGHuZzWdPNhkY0G8w7pvLNLYF0e3ceC4= github.com/coder/bubbletea v1.2.2-0.20241212190825-007a1cdb2c41 h1:SBN/DA63+ZHwuWwPHPYoCZ/KLAjHv5g4h2MS4f2/MTI= github.com/coder/bubbletea v1.2.2-0.20241212190825-007a1cdb2c41/go.mod h1:I9ULxr64UaOSUv7hcb3nX4kowodJCVS7vt7VVJk/kW4= -github.com/coder/clistat v1.1.1 h1:T45dlwr7fSmjLPGLk7QRKgynnDeMOPoraHSGtLIHY3s= -github.com/coder/clistat v1.1.1/go.mod h1:F+gLef+F9chVrleq808RBxdaoq52R4VLopuLdAsh8Y4= +github.com/coder/clistat v1.1.2 h1:1WzCsEQ/VFBNyxu5ryy0Pdb6rrMh+byCp3aZMkn9k/E= +github.com/coder/clistat v1.1.2/go.mod h1:F+gLef+F9chVrleq808RBxdaoq52R4VLopuLdAsh8Y4= github.com/coder/flog v1.1.0 h1:kbAes1ai8fIS5OeV+QAnKBQE22ty1jRF/mcAwHpLBa4= github.com/coder/flog v1.1.0/go.mod h1:UQlQvrkJBvnRGo69Le8E24Tcl5SJleAAR7gYEHzAmdQ= github.com/coder/glog v1.0.1-0.20220322161911-7365fe7f2cd1/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= @@ -942,20 +944,20 @@ github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 h1:3A0ES21Ke+FxEM8CXx github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0/go.mod h1:5UuS2Ts+nTToAMeOjNlnHFkPahrtDkmpydBen/3wgZc= github.com/coder/preview v1.0.4 h1:f506bnyhHtI3ICl/8Eb/gemcKvm/AGzQ91uyxjF+D9k= github.com/coder/preview v1.0.4/go.mod h1:PpLayC3ngQQ0iUhW2yVRFszOooto4JrGGMomv1rqUvA= -github.com/coder/quartz v0.2.1 h1:QgQ2Vc1+mvzewg2uD/nj8MJ9p9gE+QhGJm+Z+NGnrSE= -github.com/coder/quartz v0.2.1/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA= +github.com/coder/quartz v0.3.0 h1:bUoSEJ77NBfKtUqv6CPSC0AS8dsjqAqqAv7bN02m1mg= +github.com/coder/quartz v0.3.0/go.mod h1:BgE7DOj/8NfvRgvKw0jPLDQH/2Lya2kxcTaNJ8X0rZk= github.com/coder/retry v1.5.1 h1:iWu8YnD8YqHs3XwqrqsjoBTAVqT9ml6z9ViJ2wlMiqc= github.com/coder/retry v1.5.1/go.mod h1:blHMk9vs6LkoRT9ZHyuZo360cufXEhrxqvEzeMtRGoY= -github.com/coder/serpent v0.11.0 h1:VKIIbBg0ManopqqDsutBGf7YYTUXsPQgBx//m1SJQ90= -github.com/coder/serpent v0.11.0/go.mod h1:cZFW6/fP+kE9nd/oRkEHJpG6sXCtQ+AX7WMMEHv0Y3Q= +github.com/coder/serpent v0.12.0 h1:fUu3qVjeRvVy3DB/C2EFFvOctm+f2HKyckyfA86O63Q= +github.com/coder/serpent v0.12.0/go.mod h1:mPEpD8Cq106E0glBs5ROAAGoALLtD5HAAMVZmjf4zO0= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788 h1:YoUSJ19E8AtuUFVYBpXuOD6a/zVP3rcxezNsoDseTUw= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788/go.mod h1:aGQbuCLyhRLMzZF067xc84Lh7JDs1FKwCmF1Crl9dxQ= github.com/coder/tailscale v1.1.1-0.20250829055706-6eafe0f9199e h1:9RKGKzGLHtTvVBQublzDGtCtal3cXP13diCHoAIGPeI= github.com/coder/tailscale v1.1.1-0.20250829055706-6eafe0f9199e/go.mod h1:jU9T1vEs+DOs8NtGp1F2PT0/TOGVwtg/JCCKYRgvMOs= github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e h1:JNLPDi2P73laR1oAclY6jWzAbucf70ASAvf5mh2cME0= github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI= -github.com/coder/terraform-provider-coder/v2 v2.12.0 h1:guxDoZdBRfZqAgVlsJ+TLvV2uIBQ4RelsRpSPOT84tk= -github.com/coder/terraform-provider-coder/v2 v2.12.0/go.mod h1:4LVPWatHaTAdQS1v5A0pVn3g8XkNKkQ/xh+U2oXr/o0= +github.com/coder/terraform-provider-coder/v2 v2.13.1 h1:dtPaJUvueFm+XwBPUMWQCc5Z1QUQBW4B4RNyzX4h4y8= +github.com/coder/terraform-provider-coder/v2 v2.13.1/go.mod h1:2irB3W8xRUo73nP5w6lN/dhN3abeCIKpqg8zElKIX/I= github.com/coder/trivy v0.0.0-20250807211036-0bb0acd620a8 h1:VYB/6cIIKsVkwXOAWbqpj4Ux+WwF/XTnRyvHcwfHZ7A= github.com/coder/trivy v0.0.0-20250807211036-0bb0acd620a8/go.mod h1:O73tP+UvJlI2GQZD060Jt0sf+6alKcGAgORh6sgB0+M= github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE= @@ -976,8 +978,8 @@ github.com/containerd/platforms v1.0.0-rc.1 h1:83KIq4yy1erSRgOVHNk1HYdPvzdJ5CnsW github.com/containerd/platforms v1.0.0-rc.1/go.mod h1:J71L7B+aiM5SdIEqmd9wp6THLVRzJGXfNuWCZCllLA4= github.com/coreos/go-iptables v0.6.0 h1:is9qnZMPYjLd8LYqmm/qlE+wwEgJIkTYdhV3rfZo4jk= github.com/coreos/go-iptables v0.6.0/go.mod h1:Qe8Bv2Xik5FyTXwgIbLAnv2sWSBmvWdFETJConOQ//Q= -github.com/coreos/go-oidc/v3 v3.16.0 h1:qRQUCFstKpXwmEjDQTIbyY/5jF00+asXzSkmkoa/mow= -github.com/coreos/go-oidc/v3 v3.16.0/go.mod h1:wqPbKFrVnE90vty060SB40FCJ8fTHTxSwyXJqZH+sI8= +github.com/coreos/go-oidc/v3 v3.17.0 h1:hWBGaQfbi0iVviX4ibC7bk8OKT5qNr4klBaCHVNvehc= +github.com/coreos/go-oidc/v3 v3.17.0/go.mod h1:wqPbKFrVnE90vty060SB40FCJ8fTHTxSwyXJqZH+sI8= github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf h1:iW4rZ826su+pqaw19uhpSCzhj44qo35pNgKFGqzDKkU= github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= @@ -985,8 +987,10 @@ github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHf github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.21 h1:1/QdRyBaHHJP61QkWMXlOIBfsgdDeeKfK8SYVUWJKf0= github.com/creack/pty v1.1.21/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= -github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s= -github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= +github.com/cyphar/filepath-securejoin v0.5.1 h1:eYgfMq5yryL4fbWfkLpFFy2ukSELzaJOTaUTuh+oF48= +github.com/cyphar/filepath-securejoin v0.5.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= +github.com/danieljoos/wincred v1.2.3 h1:v7dZC2x32Ut3nEfRH+vhoZGvN72+dQ/snVXo/vMFLdQ= +github.com/danieljoos/wincred v1.2.3/go.mod h1:6qqX0WNrS4RzPZ1tnroDzq9kY3fu1KwE7MRLQK4X0bs= github.com/dave/dst v0.27.2 h1:4Y5VFTkhGLC1oddtNwuxxe36pnyLxMFXT51FOzH8Ekc= github.com/dave/dst v0.27.2/go.mod h1:jHh6EOibnHgcUW3WjKHisiooEkYwqpHLBSX1iOBhEyc= github.com/dave/jennifer v1.6.1 h1:T4T/67t6RAA5AIV6+NP8Uk/BIsXgDoqEowgycdQQLuk= @@ -1054,10 +1058,10 @@ github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go. github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/go-control-plane v0.10.3/go.mod h1:fJJn/j26vwOu972OllsvAgJJM//w9BV6Fxbg2LuVd34= github.com/envoyproxy/go-control-plane v0.11.1-0.20230524094728-9239064ad72f/go.mod h1:sfYdkwUW4BA3PbKjySwjJy+O4Pu0h62rlqCMHNk+K+Q= -github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= -github.com/envoyproxy/go-control-plane v0.13.4/go.mod h1:kDfuBlDVsSj2MjrLEtRWtHlsWIFcGyB2RMO44Dc5GZA= -github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= -github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= +github.com/envoyproxy/go-control-plane v0.13.5-0.20251024222203-75eaa193e329 h1:K+fnvUM0VZ7ZFJf0n4L/BRlnsb9pL/GuDG6FqaH+PwM= +github.com/envoyproxy/go-control-plane v0.13.5-0.20251024222203-75eaa193e329/go.mod h1:Alz8LEClvR7xKsrq3qzoc4N0guvVNSS8KmSChGYr9hs= +github.com/envoyproxy/go-control-plane/envoy v1.35.0 h1:ixjkELDE+ru6idPxcHLj8LBVc2bFP7iBytj353BoHUo= +github.com/envoyproxy/go-control-plane/envoy v1.35.0/go.mod h1:09qwbGVuSWWAyN5t/b3iyVfz5+z8QWGrzkoqm/8SbEs= github.com/envoyproxy/go-control-plane/ratelimit v0.1.0 h1:/G9QYbddjL25KvtKTv3an9lx6VBE2cnb8wp1vEGNYGI= github.com/envoyproxy/go-control-plane/ratelimit v0.1.0/go.mod h1:Wk+tMFAFbCXaJPzVVHnPgRKdUdwW/KdbRt94AzgRee4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= @@ -1219,8 +1223,8 @@ github.com/gohugoio/localescompressed v1.0.1/go.mod h1:jBF6q8D7a0vaEmcWPNcAjUZLJ github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI= github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= -github.com/golang-jwt/jwt/v5 v5.2.3 h1:kkGXqQOBSDDWRhWNXTFpqGSCMyh/PLnqUvMGJPDJDs0= -github.com/golang-jwt/jwt/v5 v5.2.3/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo= +github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE= github.com/golang-migrate/migrate/v4 v4.19.0 h1:RcjOnCGz3Or6HQYEJ/EEVLfWnmw9KnoigPSjzhCuaSE= github.com/golang-migrate/migrate/v4 v4.19.0/go.mod h1:9dyEcu+hO+G9hPSw8AIg50yg622pXJsoHItQnDGZkI0= github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= @@ -1342,8 +1346,8 @@ github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY github.com/googleapis/enterprise-certificate-proxy v0.2.0/go.mod h1:8C0jb7/mgJe/9KK8Lm7X9ctZC2t60YyIpYEI16jx0Qg= github.com/googleapis/enterprise-certificate-proxy v0.2.1/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= github.com/googleapis/enterprise-certificate-proxy v0.2.3/go.mod h1:AwSRAtLfXpU5Nm3pW+v7rGDHp09LsPtGY9MduiEsR9k= -github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= -github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= +github.com/googleapis/enterprise-certificate-proxy v0.3.7 h1:zrn2Ee/nWmHulBx5sAVrGgAa0f2/R35S4DJwfFaUPFQ= +github.com/googleapis/enterprise-certificate-proxy v0.3.7/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -1454,8 +1458,8 @@ github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOl github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jdkato/prose v1.2.1 h1:Fp3UnJmLVISmlc57BgKUzdjr0lOtjqTZicL3PaYy6cU= github.com/jdkato/prose v1.2.1/go.mod h1:AiRHgVagnEx2JbQRQowVBKjG0bcs/vtkGCH1dYAL1rA= -github.com/jedib0t/go-pretty/v6 v6.6.7 h1:m+LbHpm0aIAPLzLbMfn8dc3Ht8MW7lsSO4MPItz/Uuo= -github.com/jedib0t/go-pretty/v6 v6.6.7/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU= +github.com/jedib0t/go-pretty/v6 v6.7.1 h1:bHDSsj93NuJ563hHuM7ohk/wpX7BmRFNIsVv1ssI2/M= +github.com/jedib0t/go-pretty/v6 v6.7.1/go.mod h1:YwC5CE4fJ1HFUDeivSV1r//AmANFHyqczZk+U6BDALU= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath v0.4.1-0.20220621161143-b0104c826a24 h1:liMMTbpW34dhU4az1GN0pTPADwNmvoRSeoZ6PItiqnY= @@ -1660,8 +1664,8 @@ github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8 github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040= github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M= -github.com/opencontainers/runc v1.2.3 h1:fxE7amCzfZflJO2lHXf4y/y8M1BoAqp+FVmG19oYB80= -github.com/opencontainers/runc v1.2.3/go.mod h1:nSxcWUydXrsBZVYNSkTjoQ/N6rcyTtn+1SD5D4+kRIM= +github.com/opencontainers/runc v1.2.8 h1:RnEICeDReapbZ5lZEgHvj7E9Q3Eex9toYmaGBsbvU5Q= +github.com/opencontainers/runc v1.2.8/go.mod h1:cC0YkmZcuvr+rtBZ6T7NBoVbMGNAdLa/21vIElJDOzI= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde h1:x0TT0RDC7UhAVbbWWBzr41ElhJx5tXPWkIHA2HWPRuw= @@ -1714,15 +1718,15 @@ github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/prometheus-community/pro-bing v0.7.0 h1:KFYFbxC2f2Fp6c+TyxbCOEarf7rbnzr9Gw8eIb0RfZA= github.com/prometheus-community/pro-bing v0.7.0/go.mod h1:Moob9dvlY50Bfq6i88xIwfyw7xLFHH69LUgx9n5zqCE= -github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc= -github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE= +github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o= +github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= -github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= -github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/common v0.67.4 h1:yR3NqWO1/UyO1w2PhUvXlGQs/PtFmoveVO0KZ4+Lvsc= +github.com/prometheus/common v0.67.4/go.mod h1:gP0fq6YjjNCLssJCQp0yk4M8W6ikLURwkdd/YKtTbyI= github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg= @@ -1781,8 +1785,8 @@ github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qq github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk= github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= -github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= +github.com/spiffe/go-spiffe/v2 v2.6.0 h1:l+DolpxNWYgruGQVV0xsfeya3CsC7m8iBzDnMpsbLuo= +github.com/spiffe/go-spiffe/v2 v2.6.0/go.mod h1:gm2SeUoMZEtpnzPNs2Csc0D/gX33k1xIx7lEzqblHEs= github.com/sqlc-dev/pqtype v0.3.0 h1:b09TewZ3cSnO5+M1Kqq05y0+OjqIptxELaSayg7bmqk= github.com/sqlc-dev/pqtype v0.3.0/go.mod h1:oyUjp5981ctiL9UYvj1bVvCKi8OXkCa0u645hce7CAs= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -1911,6 +1915,8 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc= +github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU= github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo= github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos= github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= @@ -1954,8 +1960,8 @@ go.mozilla.org/pkcs7 v0.9.0 h1:yM4/HS9dYv7ri2biPtxt8ikvB37a980dg69/pKmS+eI= go.mozilla.org/pkcs7 v0.9.0/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= go.nhat.io/otelsql v0.16.0 h1:MUKhNSl7Vk1FGyopy04FBDimyYogpRFs0DBB9frQal0= go.nhat.io/otelsql v0.16.0/go.mod h1:YB2ocf0Q8+kK4kxzXYUOHj7P2Km8tNmE2QlRS0frUtc= -go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= -go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64= +go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y= go.opentelemetry.io/collector/component v1.27.0 h1:6wk0K23YT9lSprX8BH9x5w8ssAORE109ekH/ix2S614= go.opentelemetry.io/collector/component v1.27.0/go.mod h1:fIyBHoa7vDyZL3Pcidgy45cx24tBe7iHWne097blGgo= go.opentelemetry.io/collector/component/componentstatus v0.120.0 h1:hzKjI9+AIl8A/saAARb47JqabWsge0kMp8NSPNiCNOQ= @@ -1987,8 +1993,8 @@ go.opentelemetry.io/collector/semconv v0.123.0/go.mod h1:te6VQ4zZJO5Lp8dM2XIhDxD go.opentelemetry.io/contrib v1.0.0/go.mod h1:EH4yDYeNoaTqn/8yCWQmfNB78VHfGX2Jt2bvnvzBlGM= go.opentelemetry.io/contrib v1.19.0 h1:rnYI7OEPMWFeM4QCqWQ3InMJ0arWMR1i0Cx9A5hcjYM= go.opentelemetry.io/contrib v1.19.0/go.mod h1:gIzjwWFoGazJmtCaDgViqOSJPde2mCWzv60o0bWPcZs= -go.opentelemetry.io/contrib/detectors/gcp v1.37.0 h1:B+WbN9RPsvobe6q4vP6KgM8/9plR/HNjgGBrfcOlweA= -go.opentelemetry.io/contrib/detectors/gcp v1.37.0/go.mod h1:K5zQ3TT7p2ru9Qkzk0bKtCql0RGkPj9pRjpXgZJZ+rU= +go.opentelemetry.io/contrib/detectors/gcp v1.38.0 h1:ZoYbqX7OaA/TAikspPl3ozPI6iY6LiIY9I8cUfm+pJs= +go.opentelemetry.io/contrib/detectors/gcp v1.38.0/go.mod h1:SU+iU7nu5ud4oCb3LQOhIZ3nRLj6FNVrKgtflbaf2ts= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0 h1:rbRJ8BBoVMsQShESYZ0FkvcITu8X8QNwJogcLUmDNNw= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0/go.mod h1:ru6KHrNtNHxM4nD/vd6QrLVWgKhxPYgblq4VAtNawTQ= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU= @@ -2009,10 +2015,10 @@ go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.37.0/go.mod h1:tx8OOlGH go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA= go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI= go.opentelemetry.io/otel/sdk v1.3.0/go.mod h1:rIo4suHNhQwBIPg9axF8V9CA72Wz2mKF1teNrup8yzs= -go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI= -go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg= -go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc= -go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps= +go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E= +go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg= +go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM= +go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA= go.opentelemetry.io/otel/trace v1.3.0/go.mod h1:c/VDhno8888bvQYmbYLqe41/Ldmr/KKunbvWM4/fEjk= go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE= go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs= @@ -2032,8 +2038,8 @@ go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= -go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI= -go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU= +go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0= +go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8= go.yaml.in/yaml/v3 v3.0.3 h1:bXOww4E/J3f66rav3pX3m8w6jDE4knZjGOw8b5Y6iNE= go.yaml.in/yaml/v3 v3.0.3/go.mod h1:tBHosrYAkRZjRAOREWbDnBXUf08JOwYq++0QNwQiWzI= go4.org/mem v0.0.0-20220726221520-4f986261bf13 h1:CbZeCBZ0aZj8EfVgnqQcYZgf0lpZ3H9rmp5nkDTAst8= @@ -2055,8 +2061,8 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= -golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= -golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -2121,8 +2127,8 @@ golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= -golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= +golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk= +golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -2185,8 +2191,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= -golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -2216,8 +2222,8 @@ golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec golang.org/x/oauth2 v0.5.0/go.mod h1:9/XBHVqLaWO3/BRHs5jbpYCnOZVjj5V0ndyaAM7KB4I= golang.org/x/oauth2 v0.6.0/go.mod h1:ycmewcwgD4Rpr3eZJLSB4Kyyljb3qDh40vJ8STE5HKw= golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4= -golang.org/x/oauth2 v0.32.0 h1:jsCblLleRMDrxMN29H3z/k1KliIvpLgCkE6R8FXXNgY= -golang.org/x/oauth2 v0.32.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= +golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo= +golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -2239,8 +2245,8 @@ golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= -golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -2337,8 +2343,8 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -2357,8 +2363,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= -golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= -golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2381,8 +2387,8 @@ golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= -golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= -golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -2456,8 +2462,8 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= -golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= +golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ= +golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2541,8 +2547,8 @@ google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/ google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0= google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= -google.golang.org/api v0.253.0 h1:apU86Eq9Q2eQco3NsUYFpVTfy7DwemojL7LmbAj7g/I= -google.golang.org/api v0.253.0/go.mod h1:PX09ad0r/4du83vZVAaGg7OaeyGnaUmT/CYPNvtLCbw= +google.golang.org/api v0.256.0 h1:u6Khm8+F9sxbCTYNoBHg6/Hwv0N/i+V94MvkOSor6oI= +google.golang.org/api v0.256.0/go.mod h1:KIgPhksXADEKJlnEoRa9qAII4rXcy40vfI8HRqcU964= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -2685,10 +2691,10 @@ google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOl google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79 h1:Nt6z9UHqSlIdIGJdz6KhTIs2VRx/iOsA5iE8bmQNcxs= google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79/go.mod h1:kTmlBHMPqR5uCZPBvwa2B18mvubkjyY3CRLI0c6fj0s= -google.golang.org/genproto/googleapis/api v0.0.0-20250804133106-a7a43d27e69b h1:ULiyYQ0FdsJhwwZUwbaXpZF5yUE3h+RA+gxvBu37ucc= -google.golang.org/genproto/googleapis/api v0.0.0-20250804133106-a7a43d27e69b/go.mod h1:oDOGiMSXHL4sDTJvFvIB9nRQCGdLP1o/iVaqQK8zB+M= -google.golang.org/genproto/googleapis/rpc v0.0.0-20251014184007-4626949a642f h1:1FTH6cpXFsENbPR5Bu8NQddPSaUUE6NA2XdZdDSAJK4= -google.golang.org/genproto/googleapis/rpc v0.0.0-20251014184007-4626949a642f/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= +google.golang.org/genproto/googleapis/api v0.0.0-20251022142026-3a174f9686a8 h1:mepRgnBZa07I4TRuomDE4sTIYieg/osKmzIf4USdWS4= +google.golang.org/genproto/googleapis/api v0.0.0-20251022142026-3a174f9686a8/go.mod h1:fDMmzKV90WSg1NbozdqrE64fkuTv6mlq2zxo9ad+3yo= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101 h1:tRPGkdGHuewF4UisLzzHHr1spKw92qLM98nIzxbC0wY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -2730,8 +2736,8 @@ google.golang.org/grpc v1.52.3/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5v google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/grpc v1.76.0 h1:UnVkv1+uMLYXoIz6o7chp59WfQUYA2ex/BXQ9rHZu7A= -google.golang.org/grpc v1.76.0/go.mod h1:Ju12QI8M6iQJtbcsV+awF5a4hfJMLi4X0JLo94ULZ6c= +google.golang.org/grpc v1.77.0 h1:wVVY6/8cGA6vvffn+wWK5ToddbgdU3d8MNENr4evgXM= +google.golang.org/grpc v1.77.0/go.mod h1:z0BY1iVj0q8E1uSQCjL9cppRj+gnZjzDnzV0dHhrNig= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/helm/coder/tests/chart_test.go b/helm/coder/tests/chart_test.go index 0e6d5cda10c94..d175bab802e23 100644 --- a/helm/coder/tests/chart_test.go +++ b/helm/coder/tests/chart_test.go @@ -133,6 +133,10 @@ var testCases = []testCase{ name: "namespace_rbac", expectedError: "", }, + { + name: "priority_class_name", + expectedError: "", + }, } type testCase struct { diff --git a/helm/coder/tests/testdata/namespace_rbac.golden b/helm/coder/tests/testdata/namespace_rbac.golden index 68650a02b3fb4..57a4ba3e2b214 100644 --- a/helm/coder/tests/testdata/namespace_rbac.golden +++ b/helm/coder/tests/testdata/namespace_rbac.golden @@ -117,34 +117,6 @@ rules: # Source: coder/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: Role -metadata: - name: coder-workspace-perms - namespace: test-namespace2 -rules: - - apiGroups: - - apps - resources: - - deployments - verbs: - - create - - delete - - deletecollection - - get - - list - - patch - - update - - watch - - apiGroups: - - networking.k8s.io - resources: - - ingresses - verbs: - - get - - list ---- -# Source: coder/templates/rbac.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role metadata: name: coder-workspace-perms namespace: test-namespace3 @@ -262,21 +234,6 @@ roleRef: # Source: coder/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding -metadata: - name: "coder" - namespace: test-namespace2 -subjects: - - kind: ServiceAccount - name: "coder" - namespace: default -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: coder-workspace-perms ---- -# Source: coder/templates/rbac.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding metadata: name: "coder" namespace: test-namespace3 diff --git a/helm/coder/tests/testdata/namespace_rbac_coder.golden b/helm/coder/tests/testdata/namespace_rbac_coder.golden index 239eb73f8ee51..2687504879629 100644 --- a/helm/coder/tests/testdata/namespace_rbac_coder.golden +++ b/helm/coder/tests/testdata/namespace_rbac_coder.golden @@ -117,34 +117,6 @@ rules: # Source: coder/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: Role -metadata: - name: coder-workspace-perms - namespace: test-namespace2 -rules: - - apiGroups: - - apps - resources: - - deployments - verbs: - - create - - delete - - deletecollection - - get - - list - - patch - - update - - watch - - apiGroups: - - networking.k8s.io - resources: - - ingresses - verbs: - - get - - list ---- -# Source: coder/templates/rbac.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role metadata: name: coder-workspace-perms namespace: test-namespace3 @@ -262,21 +234,6 @@ roleRef: # Source: coder/templates/rbac.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding -metadata: - name: "coder" - namespace: test-namespace2 -subjects: - - kind: ServiceAccount - name: "coder" - namespace: coder -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: coder-workspace-perms ---- -# Source: coder/templates/rbac.yaml -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding metadata: name: "coder" namespace: test-namespace3 diff --git a/helm/coder/tests/testdata/priority_class_name.golden b/helm/coder/tests/testdata/priority_class_name.golden new file mode 100644 index 0000000000000..0736d9dabba7f --- /dev/null +++ b/helm/coder/tests/testdata/priority_class_name.golden @@ -0,0 +1,206 @@ +--- +# Source: coder/templates/coder.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + name: coder + namespace: default +--- +# Source: coder/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: coder-workspace-perms + namespace: default +rules: + - apiGroups: [""] + resources: ["pods"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: [""] + resources: ["persistentvolumeclaims"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: + - apps + resources: + - deployments + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch +--- +# Source: coder/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: "coder" + namespace: default +subjects: + - kind: ServiceAccount + name: "coder" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: coder-workspace-perms +--- +# Source: coder/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: coder + namespace: default + labels: + helm.sh/chart: coder-0.1.0 + app.kubernetes.io/name: coder + app.kubernetes.io/instance: release-name + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: "0.1.0" + app.kubernetes.io/managed-by: Helm + annotations: + {} +spec: + type: LoadBalancer + sessionAffinity: None + ports: + - name: "http" + port: 80 + targetPort: "http" + protocol: TCP + nodePort: + externalTrafficPolicy: "Cluster" + selector: + app.kubernetes.io/name: coder + app.kubernetes.io/instance: release-name +--- +# Source: coder/templates/coder.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + name: coder + namespace: default +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/name: coder + template: + metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + spec: + affinity: + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - podAffinityTerm: + labelSelector: + matchExpressions: + - key: app.kubernetes.io/instance + operator: In + values: + - coder + topologyKey: kubernetes.io/hostname + weight: 1 + containers: + - args: + - server + command: + - /opt/coder + env: + - name: CODER_HTTP_ADDRESS + value: 0.0.0.0:8080 + - name: CODER_PROMETHEUS_ADDRESS + value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 + - name: CODER_ACCESS_URL + value: http://coder.default.svc.cluster.local + - name: KUBE_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: CODER_DERP_SERVER_RELAY_URL + value: http://$(KUBE_POD_IP):8080 + image: ghcr.io/coder/coder:latest + imagePullPolicy: IfNotPresent + lifecycle: {} + livenessProbe: + httpGet: + path: /healthz + port: http + scheme: HTTP + initialDelaySeconds: 0 + name: coder + ports: + - containerPort: 8080 + name: http + protocol: TCP + readinessProbe: + httpGet: + path: /healthz + port: http + scheme: HTTP + initialDelaySeconds: 0 + resources: + limits: + cpu: 2000m + memory: 4096Mi + requests: + cpu: 2000m + memory: 4096Mi + securityContext: + allowPrivilegeEscalation: false + readOnlyRootFilesystem: null + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + seccompProfile: + type: RuntimeDefault + volumeMounts: [] + priorityClassName: high-priority + restartPolicy: Always + serviceAccountName: coder + terminationGracePeriodSeconds: 60 + volumes: [] diff --git a/helm/coder/tests/testdata/priority_class_name.yaml b/helm/coder/tests/testdata/priority_class_name.yaml new file mode 100644 index 0000000000000..15ed574c28d4f --- /dev/null +++ b/helm/coder/tests/testdata/priority_class_name.yaml @@ -0,0 +1,4 @@ +coder: + image: + tag: latest + priorityClassName: high-priority diff --git a/helm/coder/tests/testdata/priority_class_name_coder.golden b/helm/coder/tests/testdata/priority_class_name_coder.golden new file mode 100644 index 0000000000000..e06d69dcf3612 --- /dev/null +++ b/helm/coder/tests/testdata/priority_class_name_coder.golden @@ -0,0 +1,206 @@ +--- +# Source: coder/templates/coder.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + name: coder + namespace: coder +--- +# Source: coder/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: coder-workspace-perms + namespace: coder +rules: + - apiGroups: [""] + resources: ["pods"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: [""] + resources: ["persistentvolumeclaims"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: + - apps + resources: + - deployments + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch +--- +# Source: coder/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: "coder" + namespace: coder +subjects: + - kind: ServiceAccount + name: "coder" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: coder-workspace-perms +--- +# Source: coder/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: coder + namespace: coder + labels: + helm.sh/chart: coder-0.1.0 + app.kubernetes.io/name: coder + app.kubernetes.io/instance: release-name + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: "0.1.0" + app.kubernetes.io/managed-by: Helm + annotations: + {} +spec: + type: LoadBalancer + sessionAffinity: None + ports: + - name: "http" + port: 80 + targetPort: "http" + protocol: TCP + nodePort: + externalTrafficPolicy: "Cluster" + selector: + app.kubernetes.io/name: coder + app.kubernetes.io/instance: release-name +--- +# Source: coder/templates/coder.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + name: coder + namespace: coder +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/name: coder + template: + metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + spec: + affinity: + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - podAffinityTerm: + labelSelector: + matchExpressions: + - key: app.kubernetes.io/instance + operator: In + values: + - coder + topologyKey: kubernetes.io/hostname + weight: 1 + containers: + - args: + - server + command: + - /opt/coder + env: + - name: CODER_HTTP_ADDRESS + value: 0.0.0.0:8080 + - name: CODER_PROMETHEUS_ADDRESS + value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 + - name: CODER_ACCESS_URL + value: http://coder.coder.svc.cluster.local + - name: KUBE_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: CODER_DERP_SERVER_RELAY_URL + value: http://$(KUBE_POD_IP):8080 + image: ghcr.io/coder/coder:latest + imagePullPolicy: IfNotPresent + lifecycle: {} + livenessProbe: + httpGet: + path: /healthz + port: http + scheme: HTTP + initialDelaySeconds: 0 + name: coder + ports: + - containerPort: 8080 + name: http + protocol: TCP + readinessProbe: + httpGet: + path: /healthz + port: http + scheme: HTTP + initialDelaySeconds: 0 + resources: + limits: + cpu: 2000m + memory: 4096Mi + requests: + cpu: 2000m + memory: 4096Mi + securityContext: + allowPrivilegeEscalation: false + readOnlyRootFilesystem: null + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + seccompProfile: + type: RuntimeDefault + volumeMounts: [] + priorityClassName: high-priority + restartPolicy: Always + serviceAccountName: coder + terminationGracePeriodSeconds: 60 + volumes: [] diff --git a/helm/coder/values.yaml b/helm/coder/values.yaml index 467a7d1c57836..85a4e4519f61a 100644 --- a/helm/coder/values.yaml +++ b/helm/coder/values.yaml @@ -82,6 +82,11 @@ coder: # https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ podLabels: {} + # coder.priorityClassName -- The priority class name to assign to the Coder pod. See: + # https://kubernetes.io/docs/concepts/configuration/pod-priority-preemption/ + # The PriorityClass must exist in the cluster prior to deploying Coder with this set. + priorityClassName: "" + # coder.serviceAccount -- Configuration for the automatically created service # account. Creation of the service account cannot be disabled. serviceAccount: diff --git a/helm/libcoder/templates/_coder.yaml b/helm/libcoder/templates/_coder.yaml index 6001df90d6580..52caaf58919f3 100644 --- a/helm/libcoder/templates/_coder.yaml +++ b/helm/libcoder/templates/_coder.yaml @@ -26,6 +26,9 @@ spec: {{- toYaml .Values.coder.podAnnotations | nindent 8 }} spec: serviceAccountName: {{ .Values.coder.serviceAccount.name | quote }} + {{- with .Values.coder.priorityClassName }} + priorityClassName: {{ . | quote }} + {{- end }} {{- with .Values.coder.podSecurityContext }} securityContext: {{- toYaml . | nindent 8 }} diff --git a/helm/libcoder/templates/_rbac.yaml b/helm/libcoder/templates/_rbac.yaml index 73ba2bd4e1394..633a8252e8a0f 100644 --- a/helm/libcoder/templates/_rbac.yaml +++ b/helm/libcoder/templates/_rbac.yaml @@ -1,7 +1,9 @@ {{- define "libcoder.rbac.forNamespace" -}} {{- $nsPerms := ternary .workspacePerms .Top.Values.coder.serviceAccount.workspacePerms (hasKey . "workspacePerms") -}} - {{- $nsDeploy := ternary .enableDeployments .Top.Values.coder.serviceAccount.enableDeployments (hasKey . "enableDeployments") -}} - {{- $nsExtra := ternary .extraRules .Top.Values.coder.serviceAccount.extraRules (hasKey . "extraRules") -}} + {{- $nsDeployRaw := ternary .enableDeployments .Top.Values.coder.serviceAccount.enableDeployments (hasKey . "enableDeployments") -}} + {{- $nsExtraRaw := ternary .extraRules .Top.Values.coder.serviceAccount.extraRules (hasKey . "extraRules") -}} + {{- $nsDeploy := and $nsPerms $nsDeployRaw -}} + {{- $nsExtra := ternary $nsExtraRaw (list) $nsPerms -}} {{- if or $nsPerms (or $nsDeploy $nsExtra) }} --- diff --git a/coderd/httpmw/recover.go b/httpmw/recover.go similarity index 100% rename from coderd/httpmw/recover.go rename to httpmw/recover.go diff --git a/coderd/httpmw/recover_test.go b/httpmw/recover_test.go similarity index 96% rename from coderd/httpmw/recover_test.go rename to httpmw/recover_test.go index d4d4227ff15ef..89c6140d02070 100644 --- a/coderd/httpmw/recover_test.go +++ b/httpmw/recover_test.go @@ -7,8 +7,8 @@ import ( "github.com/stretchr/testify/require" - "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/tracing" + "github.com/coder/coder/v2/httpmw" "github.com/coder/coder/v2/testutil" ) diff --git a/install.sh b/install.sh index 1dbf813b96690..99752791a90ae 100755 --- a/install.sh +++ b/install.sh @@ -273,7 +273,7 @@ EOF main() { MAINLINE=1 STABLE=0 - TERRAFORM_VERSION="1.13.0" + TERRAFORM_VERSION="1.13.4" if [ "${TRACE-}" ]; then set -x diff --git a/offlinedocs/package.json b/offlinedocs/package.json index 26073286ddb65..4e52395036dd4 100644 --- a/offlinedocs/package.json +++ b/offlinedocs/package.json @@ -20,7 +20,7 @@ "framer-motion": "^10.18.0", "front-matter": "4.0.2", "lodash": "4.17.21", - "next": "15.5.4", + "next": "15.5.6", "react": "18.3.1", "react-dom": "18.3.1", "react-icons": "4.12.0", @@ -31,7 +31,7 @@ }, "devDependencies": { "@types/lodash": "4.17.20", - "@types/node": "20.19.19", + "@types/node": "20.19.24", "@types/react": "18.3.12", "@types/react-dom": "18.3.1", "@types/sanitize-html": "2.16.0", diff --git a/offlinedocs/pnpm-lock.yaml b/offlinedocs/pnpm-lock.yaml index 7c4466814364c..1b49cbd1518db 100644 --- a/offlinedocs/pnpm-lock.yaml +++ b/offlinedocs/pnpm-lock.yaml @@ -34,8 +34,8 @@ importers: specifier: 4.17.21 version: 4.17.21 next: - specifier: 15.5.4 - version: 15.5.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: 15.5.6 + version: 15.5.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: specifier: 18.3.1 version: 18.3.1 @@ -62,8 +62,8 @@ importers: specifier: 4.17.20 version: 4.17.20 '@types/node': - specifier: 20.19.19 - version: 20.19.19 + specifier: 20.19.24 + version: 20.19.24 '@types/react': specifier: 18.3.12 version: 18.3.12 @@ -175,8 +175,8 @@ packages: '@emnapi/core@1.5.0': resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==} - '@emnapi/runtime@1.5.0': - resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==} + '@emnapi/runtime@1.6.0': + resolution: {integrity: sha512-obtUmAHTMjll499P+D9A3axeJFlhdjOWdKUNs/U6QIGT7V5RjcUW1xToAzjvmgTSQhDbYn/NwfTRoJcQ2rNBxA==} '@emnapi/wasi-threads@1.1.0': resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} @@ -428,56 +428,56 @@ packages: '@napi-rs/wasm-runtime@0.2.12': resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} - '@next/env@15.5.4': - resolution: {integrity: sha512-27SQhYp5QryzIT5uO8hq99C69eLQ7qkzkDPsk3N+GuS2XgOgoYEeOav7Pf8Tn4drECOVDsDg8oj+/DVy8qQL2A==} + '@next/env@15.5.6': + resolution: {integrity: sha512-3qBGRW+sCGzgbpc5TS1a0p7eNxnOarGVQhZxfvTdnV0gFI61lX7QNtQ4V1TSREctXzYn5NetbUsLvyqwLFJM6Q==} '@next/eslint-plugin-next@14.2.33': resolution: {integrity: sha512-DQTJFSvlB+9JilwqMKJ3VPByBNGxAGFTfJ7BuFj25cVcbBy7jm88KfUN+dngM4D3+UxZ8ER2ft+WH9JccMvxyg==} - '@next/swc-darwin-arm64@15.5.4': - resolution: {integrity: sha512-nopqz+Ov6uvorej8ndRX6HlxCYWCO3AHLfKK2TYvxoSB2scETOcfm/HSS3piPqc3A+MUgyHoqE6je4wnkjfrOA==} + '@next/swc-darwin-arm64@15.5.6': + resolution: {integrity: sha512-ES3nRz7N+L5Umz4KoGfZ4XX6gwHplwPhioVRc25+QNsDa7RtUF/z8wJcbuQ2Tffm5RZwuN2A063eapoJ1u4nPg==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] - '@next/swc-darwin-x64@15.5.4': - resolution: {integrity: sha512-QOTCFq8b09ghfjRJKfb68kU9k2K+2wsC4A67psOiMn849K9ZXgCSRQr0oVHfmKnoqCbEmQWG1f2h1T2vtJJ9mA==} + '@next/swc-darwin-x64@15.5.6': + resolution: {integrity: sha512-JIGcytAyk9LQp2/nuVZPAtj8uaJ/zZhsKOASTjxDug0SPU9LAM3wy6nPU735M1OqacR4U20LHVF5v5Wnl9ptTA==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] - '@next/swc-linux-arm64-gnu@15.5.4': - resolution: {integrity: sha512-eRD5zkts6jS3VfE/J0Kt1VxdFqTnMc3QgO5lFE5GKN3KDI/uUpSyK3CjQHmfEkYR4wCOl0R0XrsjpxfWEA++XA==} + '@next/swc-linux-arm64-gnu@15.5.6': + resolution: {integrity: sha512-qvz4SVKQ0P3/Im9zcS2RmfFL/UCQnsJKJwQSkissbngnB/12c6bZTCB0gHTexz1s6d/mD0+egPKXAIRFVS7hQg==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-arm64-musl@15.5.4': - resolution: {integrity: sha512-TOK7iTxmXFc45UrtKqWdZ1shfxuL4tnVAOuuJK4S88rX3oyVV4ZkLjtMT85wQkfBrOOvU55aLty+MV8xmcJR8A==} + '@next/swc-linux-arm64-musl@15.5.6': + resolution: {integrity: sha512-FsbGVw3SJz1hZlvnWD+T6GFgV9/NYDeLTNQB2MXoPN5u9VA9OEDy6fJEfePfsUKAhJufFbZLgp0cPxMuV6SV0w==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@next/swc-linux-x64-gnu@15.5.4': - resolution: {integrity: sha512-7HKolaj+481FSW/5lL0BcTkA4Ueam9SPYWyN/ib/WGAFZf0DGAN8frNpNZYFHtM4ZstrHZS3LY3vrwlIQfsiMA==} + '@next/swc-linux-x64-gnu@15.5.6': + resolution: {integrity: sha512-3QnHGFWlnvAgyxFxt2Ny8PTpXtQD7kVEeaFat5oPAHHI192WKYB+VIKZijtHLGdBBvc16tiAkPTDmQNOQ0dyrA==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-linux-x64-musl@15.5.4': - resolution: {integrity: sha512-nlQQ6nfgN0nCO/KuyEUwwOdwQIGjOs4WNMjEUtpIQJPR2NUfmGpW2wkJln1d4nJ7oUzd1g4GivH5GoEPBgfsdw==} + '@next/swc-linux-x64-musl@15.5.6': + resolution: {integrity: sha512-OsGX148sL+TqMK9YFaPFPoIaJKbFJJxFzkXZljIgA9hjMjdruKht6xDCEv1HLtlLNfkx3c5w2GLKhj7veBQizQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@next/swc-win32-arm64-msvc@15.5.4': - resolution: {integrity: sha512-PcR2bN7FlM32XM6eumklmyWLLbu2vs+D7nJX8OAIoWy69Kef8mfiN4e8TUv2KohprwifdpFKPzIP1njuCjD0YA==} + '@next/swc-win32-arm64-msvc@15.5.6': + resolution: {integrity: sha512-ONOMrqWxdzXDJNh2n60H6gGyKed42Ieu6UTVPZteXpuKbLZTH4G4eBMsr5qWgOBA+s7F+uB4OJbZnrkEDnZ5Fg==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] - '@next/swc-win32-x64-msvc@15.5.4': - resolution: {integrity: sha512-1ur2tSHZj8Px/KMAthmuI9FMp/YFusMMGoRNJaRZMOlSkgvLjzosSdQI0cJAKogdHl3qXUQKL9MGaYvKwA7DXg==} + '@next/swc-win32-x64-msvc@15.5.6': + resolution: {integrity: sha512-pxK4VIjFRx1MY92UycLOOw7dTdvccWsNETQ0kDHkBlcFH1GrTLUjSiHU1ohrznnux6TqRHgv5oflhfIWZwVROQ==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -547,8 +547,8 @@ packages: '@types/ms@2.1.0': resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} - '@types/node@20.19.19': - resolution: {integrity: sha512-pb1Uqj5WJP7wrcbLU7Ru4QtA0+3kAXrkutGiD26wUKzSMgNNaPARTUDQmElUXp64kh3cWdou3Q0C7qwwxqSFmg==} + '@types/node@20.19.24': + resolution: {integrity: sha512-FE5u0ezmi6y9OZEzlJfg37mqqf6ZDSF2V/NLjUyGrR9uTZ7Sb9F7bLNZ03S4XVUNRWGA7Ck4c1kK+YnuWjl+DA==} '@types/parse-json@4.0.2': resolution: {integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==} @@ -890,8 +890,8 @@ packages: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} - caniuse-lite@1.0.30001746: - resolution: {integrity: sha512-eA7Ys/DGw+pnkWWSE/id29f2IcPHVoE8wxtvE5JdvD2V28VTDPy1yEeo11Guz0sJ4ZeGRcm3uaTcAqK1LXaphA==} + caniuse-lite@1.0.30001752: + resolution: {integrity: sha512-vKUk7beoukxE47P5gcVNKkDRzXdVofotshHwfR9vmpeFKxmI5PBpgOMC18LUJUA/DvJ70Y7RveasIBraqsyO/g==} ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} @@ -1031,8 +1031,8 @@ packages: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} - detect-libc@2.1.1: - resolution: {integrity: sha512-ecqj/sy1jcK1uWrwpR67UhYrIFQ+5WlGxth34WquCbamhFA6hkkwiu37o6J5xCHdo1oixJRfVRw+ywV+Hq/0Aw==} + detect-libc@2.1.2: + resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} detect-node-es@1.1.0: @@ -1906,8 +1906,8 @@ packages: natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} - next@15.5.4: - resolution: {integrity: sha512-xH4Yjhb82sFYQfY3vbkJfgSDgXvBB6a8xPs9i35k6oZJRoQRihZH+4s9Yo2qsWpzBmZ3lPXaJ2KPXLfkvW4LnA==} + next@15.5.6: + resolution: {integrity: sha512-zTxsnI3LQo3c9HSdSf91O1jMNsEzIXDShXd4wVdg9y5shwLqBXi4ZtUUJyB86KGVSJLZx0PFONvO54aheGX8QQ==} engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0} hasBin: true peerDependencies: @@ -2238,8 +2238,8 @@ packages: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true - semver@7.7.2: - resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} + semver@7.7.3: + resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==} engines: {node: '>=10'} hasBin: true @@ -2718,7 +2718,7 @@ snapshots: tslib: 2.8.1 optional: true - '@emnapi/runtime@1.5.0': + '@emnapi/runtime@1.6.0': dependencies: tslib: 2.8.1 optional: true @@ -2938,7 +2938,7 @@ snapshots: '@img/sharp-wasm32@0.34.4': dependencies: - '@emnapi/runtime': 1.5.0 + '@emnapi/runtime': 1.6.0 optional: true '@img/sharp-win32-arm64@0.34.4': @@ -2976,38 +2976,38 @@ snapshots: '@napi-rs/wasm-runtime@0.2.12': dependencies: '@emnapi/core': 1.5.0 - '@emnapi/runtime': 1.5.0 + '@emnapi/runtime': 1.6.0 '@tybys/wasm-util': 0.10.1 optional: true - '@next/env@15.5.4': {} + '@next/env@15.5.6': {} '@next/eslint-plugin-next@14.2.33': dependencies: glob: 10.3.10 - '@next/swc-darwin-arm64@15.5.4': + '@next/swc-darwin-arm64@15.5.6': optional: true - '@next/swc-darwin-x64@15.5.4': + '@next/swc-darwin-x64@15.5.6': optional: true - '@next/swc-linux-arm64-gnu@15.5.4': + '@next/swc-linux-arm64-gnu@15.5.6': optional: true - '@next/swc-linux-arm64-musl@15.5.4': + '@next/swc-linux-arm64-musl@15.5.6': optional: true - '@next/swc-linux-x64-gnu@15.5.4': + '@next/swc-linux-x64-gnu@15.5.6': optional: true - '@next/swc-linux-x64-musl@15.5.4': + '@next/swc-linux-x64-musl@15.5.6': optional: true - '@next/swc-win32-arm64-msvc@15.5.4': + '@next/swc-win32-arm64-msvc@15.5.6': optional: true - '@next/swc-win32-x64-msvc@15.5.4': + '@next/swc-win32-x64-msvc@15.5.6': optional: true '@nodelib/fs.scandir@2.1.5': @@ -3074,7 +3074,7 @@ snapshots: '@types/ms@2.1.0': {} - '@types/node@20.19.19': + '@types/node@20.19.24': dependencies: undici-types: 6.21.0 @@ -3172,7 +3172,7 @@ snapshots: fast-glob: 3.3.3 is-glob: 4.0.3 minimatch: 9.0.5 - semver: 7.7.2 + semver: 7.7.3 ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -3445,7 +3445,7 @@ snapshots: callsites@3.1.0: {} - caniuse-lite@1.0.30001746: {} + caniuse-lite@1.0.30001752: {} ccount@2.0.1: {} @@ -3574,7 +3574,7 @@ snapshots: dequal@2.0.3: {} - detect-libc@2.1.1: + detect-libc@2.1.2: optional: true detect-node-es@1.1.0: {} @@ -4282,7 +4282,7 @@ snapshots: is-bun-module@2.0.0: dependencies: - semver: 7.7.2 + semver: 7.7.3 is-callable@1.2.7: {} @@ -4854,24 +4854,24 @@ snapshots: natural-compare@1.4.0: {} - next@15.5.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next@15.5.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - '@next/env': 15.5.4 + '@next/env': 15.5.6 '@swc/helpers': 0.5.15 - caniuse-lite: 1.0.30001746 + caniuse-lite: 1.0.30001752 postcss: 8.4.31 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) styled-jsx: 5.1.6(react@18.3.1) optionalDependencies: - '@next/swc-darwin-arm64': 15.5.4 - '@next/swc-darwin-x64': 15.5.4 - '@next/swc-linux-arm64-gnu': 15.5.4 - '@next/swc-linux-arm64-musl': 15.5.4 - '@next/swc-linux-x64-gnu': 15.5.4 - '@next/swc-linux-x64-musl': 15.5.4 - '@next/swc-win32-arm64-msvc': 15.5.4 - '@next/swc-win32-x64-msvc': 15.5.4 + '@next/swc-darwin-arm64': 15.5.6 + '@next/swc-darwin-x64': 15.5.6 + '@next/swc-linux-arm64-gnu': 15.5.6 + '@next/swc-linux-arm64-musl': 15.5.6 + '@next/swc-linux-x64-gnu': 15.5.6 + '@next/swc-linux-x64-musl': 15.5.6 + '@next/swc-win32-arm64-msvc': 15.5.6 + '@next/swc-win32-x64-msvc': 15.5.6 sharp: 0.34.4 transitivePeerDependencies: - '@babel/core' @@ -5258,7 +5258,7 @@ snapshots: semver@6.3.1: {} - semver@7.7.2: {} + semver@7.7.3: {} set-function-length@1.2.2: dependencies: @@ -5285,8 +5285,8 @@ snapshots: sharp@0.34.4: dependencies: '@img/colour': 1.0.0 - detect-libc: 2.1.1 - semver: 7.7.2 + detect-libc: 2.1.2 + semver: 7.7.3 optionalDependencies: '@img/sharp-darwin-arm64': 0.34.4 '@img/sharp-darwin-x64': 0.34.4 diff --git a/provisioner/echo/serve.go b/provisioner/echo/serve.go index 5069424156009..26d1fcbe3ad06 100644 --- a/provisioner/echo/serve.go +++ b/provisioner/echo/serve.go @@ -122,8 +122,8 @@ func readResponses(sess *provisionersdk.Session, trans string, suffix string) ([ for i := 0; ; i++ { paths := []string{ // Try more specific path first, then fallback to generic. - filepath.Join(sess.WorkDirectory, fmt.Sprintf("%d.%s.%s", i, trans, suffix)), - filepath.Join(sess.WorkDirectory, fmt.Sprintf("%d.%s", i, suffix)), + filepath.Join(sess.Files.WorkDirectory(), fmt.Sprintf("%d.%s.%s", i, trans, suffix)), + filepath.Join(sess.Files.WorkDirectory(), fmt.Sprintf("%d.%s", i, suffix)), } for pathIndex, path := range paths { _, err := os.Stat(path) diff --git a/provisioner/terraform/convertstate_test.go b/provisioner/terraform/convertstate_test.go new file mode 100644 index 0000000000000..895dd3bcdcea9 --- /dev/null +++ b/provisioner/terraform/convertstate_test.go @@ -0,0 +1,129 @@ +//go:build linux || darwin + +package terraform_test + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "slices" + "strings" + "testing" + + tfjson "github.com/hashicorp/terraform-json" + "github.com/stretchr/testify/require" + + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/provisioner/terraform" + "github.com/coder/coder/v2/testutil" +) + +// TestConvertStateGolden compares the output of ConvertState to a golden +// file to prevent regressions. If the logic changes, update the golden files +// accordingly. +// +// This was created to aid in refactoring `ConvertState`. +func TestConvertStateGolden(t *testing.T) { + t.Parallel() + + testResourceDirectories := filepath.Join("testdata", "resources") + entries, err := os.ReadDir(testResourceDirectories) + require.NoError(t, err) + + for _, testDirectory := range entries { + if !testDirectory.IsDir() { + continue + } + + testFiles, err := os.ReadDir(filepath.Join(testResourceDirectories, testDirectory.Name())) + require.NoError(t, err) + + // ConvertState works on both a plan file and a state file. + // The test should create a golden file for both. + for _, step := range []string{"plan", "state"} { + srcIdc := slices.IndexFunc(testFiles, func(entry os.DirEntry) bool { + return strings.HasSuffix(entry.Name(), fmt.Sprintf(".tf%s.json", step)) + }) + dotIdx := slices.IndexFunc(testFiles, func(entry os.DirEntry) bool { + return strings.HasSuffix(entry.Name(), fmt.Sprintf(".tf%s.dot", step)) + }) + + // If the directory is missing these files, we cannot run ConvertState + // on it. So it's skipped. + if srcIdc == -1 || dotIdx == -1 { + continue + } + + t.Run(step+"_"+testDirectory.Name(), func(t *testing.T) { + t.Parallel() + testDirectoryPath := filepath.Join(testResourceDirectories, testDirectory.Name()) + planFile := filepath.Join(testDirectoryPath, testFiles[srcIdc].Name()) + dotFile := filepath.Join(testDirectoryPath, testFiles[dotIdx].Name()) + + ctx := testutil.Context(t, testutil.WaitMedium) + logger := slogtest.Make(t, nil) + + // Gather plan + tfStepRaw, err := os.ReadFile(planFile) + require.NoError(t, err) + + var modules []*tfjson.StateModule + switch step { + case "plan": + var tfPlan tfjson.Plan + err = json.Unmarshal(tfStepRaw, &tfPlan) + require.NoError(t, err) + + modules = []*tfjson.StateModule{tfPlan.PlannedValues.RootModule} + if tfPlan.PriorState != nil { + modules = append(modules, tfPlan.PriorState.Values.RootModule) + } + case "state": + var tfState tfjson.State + err = json.Unmarshal(tfStepRaw, &tfState) + require.NoError(t, err) + modules = []*tfjson.StateModule{tfState.Values.RootModule} + default: + t.Fatalf("unknown step: %s", step) + } + + // Gather graph + dotFileRaw, err := os.ReadFile(dotFile) + require.NoError(t, err) + + // expectedOutput is `any` to support errors too. If `ConvertState` returns an + // error, that error is the golden file output. + var expectedOutput any + state, err := terraform.ConvertState(ctx, modules, string(dotFileRaw), logger) + if err == nil { + sortResources(state.Resources) + sortExternalAuthProviders(state.ExternalAuthProviders) + deterministicAppIDs(state.Resources) + expectedOutput = state + } else { + // Write the error to the file then. Track errors as much as valid paths. + expectedOutput = err.Error() + } + + expPath := filepath.Join(testDirectoryPath, fmt.Sprintf("converted_state.%s.golden", step)) + if *updateGoldenFiles { + gotBytes, err := json.MarshalIndent(expectedOutput, "", " ") + require.NoError(t, err, "marshaling converted state to JSON") + // Newline at end of file for git purposes + err = os.WriteFile(expPath, append(gotBytes, '\n'), 0o600) + require.NoError(t, err) + return + } + + gotBytes, err := json.Marshal(expectedOutput) + require.NoError(t, err, "marshaling converted state to JSON") + + expBytes, err := os.ReadFile(expPath) + require.NoError(t, err) + + require.JSONEq(t, string(expBytes), string(gotBytes), "converted state") + }) + } + } +} diff --git a/provisioner/terraform/executor.go b/provisioner/terraform/executor.go index 5ef7f626f9f58..3d9270a6ddbab 100644 --- a/provisioner/terraform/executor.go +++ b/provisioner/terraform/executor.go @@ -10,7 +10,6 @@ import ( "io" "os" "os/exec" - "path/filepath" "runtime" "strings" "sync" @@ -22,6 +21,7 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/provisionersdk/tfpath" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/tracing" @@ -38,10 +38,10 @@ type executor struct { server *server mut *sync.Mutex binaryPath string - // cachePath and workdir must not be used by multiple processes at once. + // cachePath and files must not be used by multiple processes at once. cachePath string cliConfigPath string - workdir string + files tfpath.Layouter // used to capture execution times at various stages timings *timingAggregator } @@ -90,7 +90,7 @@ func (e *executor) execWriteOutput(ctx, killCtx context.Context, args, env []str // #nosec cmd := exec.CommandContext(killCtx, e.binaryPath, args...) - cmd.Dir = e.workdir + cmd.Dir = e.files.WorkDirectory() if env == nil { // We don't want to passthrough host env when unset. env = []string{} @@ -131,7 +131,7 @@ func (e *executor) execParseJSON(ctx, killCtx context.Context, args, env []strin // #nosec cmd := exec.CommandContext(killCtx, e.binaryPath, args...) - cmd.Dir = e.workdir + cmd.Dir = e.files.WorkDirectory() cmd.Env = env out := &bytes.Buffer{} stdErr := &bytes.Buffer{} @@ -225,7 +225,7 @@ func (e *executor) init(ctx, killCtx context.Context, logr logSink) error { defer e.mut.Unlock() // Record lock file checksum before init - lockFilePath := filepath.Join(e.workdir, ".terraform.lock.hcl") + lockFilePath := e.files.TerraformLockFile() preInitChecksum := checksumFileCRC32(ctx, e.logger, lockFilePath) outWriter, doneOut := e.provisionLogWriter(logr) @@ -289,14 +289,6 @@ func checksumFileCRC32(ctx context.Context, logger slog.Logger, path string) uin return crc32.ChecksumIEEE(content) } -func getPlanFilePath(workdir string) string { - return filepath.Join(workdir, "terraform.tfplan") -} - -func getStateFilePath(workdir string) string { - return filepath.Join(workdir, "terraform.tfstate") -} - // revive:disable-next-line:flag-parameter func (e *executor) plan(ctx, killCtx context.Context, env, vars []string, logr logSink, req *proto.PlanRequest) (*proto.PlanComplete, error) { ctx, span := e.server.startTrace(ctx, tracing.FuncName()) @@ -307,7 +299,7 @@ func (e *executor) plan(ctx, killCtx context.Context, env, vars []string, logr l metadata := req.Metadata - planfilePath := getPlanFilePath(e.workdir) + planfilePath := e.files.PlanFilePath() args := []string{ "plan", "-no-color", @@ -333,7 +325,9 @@ func (e *executor) plan(ctx, killCtx context.Context, env, vars []string, logr l <-doneErr }() + endStage := e.timings.startStage(database.ProvisionerJobTimingStagePlan) err := e.execWriteOutput(ctx, killCtx, args, env, outWriter, errWriter) + endStage(err) if err != nil { return nil, xerrors.Errorf("terraform plan: %w", err) } @@ -359,7 +353,7 @@ func (e *executor) plan(ctx, killCtx context.Context, env, vars []string, logr l // a workspace build. This removes some added costs of sending the modules // payload back to coderd if coderd is just going to ignore it. if !req.OmitModuleFiles { - moduleFiles, err = GetModulesArchive(os.DirFS(e.workdir)) + moduleFiles, err = GetModulesArchive(os.DirFS(e.files.WorkDirectory())) if err != nil { // TODO: we probably want to persist this error or make it louder eventually e.logger.Warn(ctx, "failed to archive terraform modules", slog.Error(err)) @@ -544,14 +538,18 @@ func (e *executor) graph(ctx, killCtx context.Context) (string, error) { if err != nil { return "", err } - args := []string{"graph"} + args := []string{ + "graph", + // TODO: When the plan is present, we should probably use it? + // "-plan=" + e.files.PlanFilePath(), + } if ver.GreaterThanOrEqual(version170) { args = append(args, "-type=plan") } var out strings.Builder cmd := exec.CommandContext(killCtx, e.binaryPath, args...) // #nosec cmd.Stdout = &out - cmd.Dir = e.workdir + cmd.Dir = e.files.WorkDirectory() cmd.Env = e.basicEnv() e.server.logger.Debug(ctx, "executing terraform command graph", @@ -588,7 +586,7 @@ func (e *executor) apply( "-auto-approve", "-input=false", "-json", - getPlanFilePath(e.workdir), + e.files.PlanFilePath(), } outWriter, doneOut := e.provisionLogWriter(logr) @@ -600,26 +598,32 @@ func (e *executor) apply( <-doneErr }() + // `terraform apply` + endStage := e.timings.startStage(database.ProvisionerJobTimingStageApply) err := e.execWriteOutput(ctx, killCtx, args, env, outWriter, errWriter) + endStage(err) if err != nil { return nil, xerrors.Errorf("terraform apply: %w", err) } + + // `terraform show` & `terraform graph` state, err := e.stateResources(ctx, killCtx) if err != nil { return nil, err } - statefilePath := filepath.Join(e.workdir, "terraform.tfstate") + statefilePath := e.files.StateFilePath() stateContent, err := os.ReadFile(statefilePath) if err != nil { return nil, xerrors.Errorf("read statefile %q: %w", statefilePath, err) } + agg := e.timings.aggregate() return &proto.ApplyComplete{ Parameters: state.Parameters, Resources: state.Resources, ExternalAuthProviders: state.ExternalAuthProviders, State: stateContent, - Timings: e.timings.aggregate(), + Timings: agg, AiTasks: state.AITasks, }, nil } diff --git a/provisioner/terraform/inittimings.go b/provisioner/terraform/inittimings.go index e72d237b5268f..7905ead772e82 100644 --- a/provisioner/terraform/inittimings.go +++ b/provisioner/terraform/inittimings.go @@ -137,30 +137,3 @@ func (t *timingAggregator) finishPrevious(ts time.Time, s *timingSpan) { t.lookupMu.Unlock() } - -// mergeInitTimings merges manual init timings with existing timings that are -// sourced by the logs. This is done because prior to Terraform v1.9, init logs -// did not have a `-json` formatting option. -// So before v1.9, the init stage is manually timed outside the `terraform init`. -// After v1.9, the init stage is timed via logs. -func mergeInitTimings(manualInit []*proto.Timing, existing []*proto.Timing) []*proto.Timing { - initFailed := slices.ContainsFunc(existing, func(timing *proto.Timing) bool { - return timing.State == proto.TimingState_FAILED - }) - - if initFailed { - // The init logs do not provide enough information for failed init timings. - // So use the manual timings in this case. - return append(manualInit, existing...) - } - - hasInitStage := slices.ContainsFunc(existing, func(timing *proto.Timing) bool { - return timing.Stage == string(database.ProvisionerJobTimingStageInit) - }) - - if hasInitStage { - return existing - } - - return append(manualInit, existing...) -} diff --git a/provisioner/terraform/install.go b/provisioner/terraform/install.go index 63d6b0278231d..83791abfc11a6 100644 --- a/provisioner/terraform/install.go +++ b/provisioner/terraform/install.go @@ -22,7 +22,7 @@ var ( // when Terraform is not available on the system. // NOTE: Keep this in sync with the version in scripts/Dockerfile.base. // NOTE: Keep this in sync with the version in install.sh. - TerraformVersion = version.Must(version.NewVersion("1.13.0")) + TerraformVersion = version.Must(version.NewVersion("1.13.4")) minTerraformVersion = version.Must(version.NewVersion("1.1.0")) maxTerraformVersion = version.Must(version.NewVersion("1.13.9")) // use .9 to automatically allow patch releases @@ -34,7 +34,7 @@ var ( // operation. // //nolint:revive // verbose is a control flag that controls the verbosity of the log output. -func Install(ctx context.Context, log slog.Logger, verbose bool, dir string, wantVersion *version.Version) (string, error) { +func Install(ctx context.Context, log slog.Logger, verbose bool, dir string, wantVersion *version.Version, baseUrl string) (string, error) { err := os.MkdirAll(dir, 0o750) if err != nil { return "", err @@ -68,6 +68,9 @@ func Install(ctx context.Context, log slog.Logger, verbose bool, dir string, wan Version: TerraformVersion, } installer.SetLogger(slog.Stdlib(ctx, log, slog.LevelDebug)) + if baseUrl != "" { + installer.ApiBaseURL = baseUrl + } logInstall := log.Debug if verbose { diff --git a/provisioner/terraform/install_test.go b/provisioner/terraform/install_test.go index 6a1be707dd146..c259ccd2d2ebc 100644 --- a/provisioner/terraform/install_test.go +++ b/provisioner/terraform/install_test.go @@ -7,7 +7,14 @@ package terraform_test import ( "context" + "errors" + "io" + "net" + "net/http" + "net/url" "os" + "path/filepath" + "strings" "sync" "testing" "time" @@ -20,6 +27,96 @@ import ( "github.com/coder/coder/v2/testutil" ) +const ( + cacheSubDir = "terraform_install_test" + terraformURL = "https://releases.hashicorp.com" +) + +var ( + version1 = terraform.TerraformVersion + version2 = version.Must(version.NewVersion("1.2.0")) +) + +type terraformProxy struct { + t *testing.T + cacheRoot string + listener net.Listener + srv *http.Server + fsHandler http.Handler + httpClient *http.Client + mutex *sync.Mutex +} + +// Simple cached proxy for terraform files. +// Serves files from persistent cache or forwards requests to releases.hashicorp.com +// Modifies downloaded index.json files so they point to proxy. +func persistentlyCachedProxy(t *testing.T) *terraformProxy { + cacheRoot := filepath.Join(testutil.PersistentCacheDir(t), cacheSubDir) + proxy := terraformProxy{ + t: t, + mutex: &sync.Mutex{}, + cacheRoot: cacheRoot, + fsHandler: http.FileServer(http.Dir(cacheRoot)), + httpClient: &http.Client{}, + } + + listener, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + t.Fatalf("failed to create listener") + } + proxy.listener = listener + + m := http.NewServeMux() + m.HandleFunc("GET /", proxy.handleGet) + + proxy.srv = &http.Server{ + WriteTimeout: 30 * time.Second, + ReadTimeout: 30 * time.Second, + Handler: m, + } + return &proxy +} + +func uriToFilename(u url.URL) string { + return strings.ReplaceAll(u.RequestURI(), "/", "_") +} + +func (p *terraformProxy) handleGet(w http.ResponseWriter, r *http.Request) { + p.mutex.Lock() + defer p.mutex.Unlock() + + filename := uriToFilename(*r.URL) + path := filepath.Join(p.cacheRoot, filename) + if _, err := os.Stat(path); errors.Is(err, os.ErrNotExist) { + require.NoError(p.t, os.MkdirAll(p.cacheRoot, os.ModeDir|0o700)) + + // Update cache + req, err := http.NewRequestWithContext(p.t.Context(), "GET", terraformURL+r.URL.Path, nil) + require.NoError(p.t, err) + + resp, err := p.httpClient.Do(req) + require.NoError(p.t, err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + require.NoError(p.t, err) + + // update index.json so urls in it point to proxy by making them relative + // "https://releases.hashicorp.com/terraform/1.13.4/terraform_1.13.4_windows_amd64.zip" -> "/terraform/1.13.4/terraform_1.13.4_windows_amd64.zip" + if strings.HasSuffix(r.URL.Path, "index.json") { + body = []byte(strings.ReplaceAll(string(body), terraformURL, "")) + } + require.NoError(p.t, os.WriteFile(path, body, 0o400)) + } else if err != nil { + p.t.Errorf("unexpected error when trying to read file from cache: %v", err) + } + + // Serve from cache + r.URL.Path = filename + r.URL.RawPath = filename + p.fsHandler.ServeHTTP(w, r) +} + func TestInstall(t *testing.T) { t.Parallel() if testing.Short() { @@ -29,6 +126,12 @@ func TestInstall(t *testing.T) { dir := t.TempDir() log := testutil.Logger(t) + proxy := persistentlyCachedProxy(t) + go proxy.srv.Serve(proxy.listener) + t.Cleanup(func() { + require.NoError(t, proxy.srv.Close()) + }) + // Install spins off 8 installs with Version and waits for them all // to complete. The locking mechanism within Install should // prevent multiple binaries from being installed, so the function @@ -40,7 +143,7 @@ func TestInstall(t *testing.T) { wg.Add(1) go func() { defer wg.Done() - p, err := terraform.Install(ctx, log, false, dir, version) + p, err := terraform.Install(ctx, log, false, dir, version, "http://"+proxy.listener.Addr().String()) assert.NoError(t, err) paths <- p }() @@ -60,7 +163,6 @@ func TestInstall(t *testing.T) { return firstPath } - version1 := terraform.TerraformVersion binPath := install(version1) checkBinModTime := func() time.Time { @@ -73,13 +175,11 @@ func TestInstall(t *testing.T) { modTime1 := checkBinModTime() // Since we're using the same version the install should be idempotent. - install(terraform.TerraformVersion) + install(version1) modTime2 := checkBinModTime() require.Equal(t, modTime1, modTime2) // Ensure a new install happens when version changes - version2 := version.Must(version.NewVersion("1.2.0")) - // Sanity-check require.NotEqual(t, version2.String(), version1.String()) diff --git a/provisioner/terraform/modules.go b/provisioner/terraform/modules.go index f0b40ea9517e0..048a5b3314a2c 100644 --- a/provisioner/terraform/modules.go +++ b/provisioner/terraform/modules.go @@ -7,7 +7,6 @@ import ( "io" "io/fs" "os" - "path/filepath" "strings" "time" @@ -15,6 +14,7 @@ import ( "github.com/coder/coder/v2/coderd/util/xio" "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/provisionersdk/tfpath" ) const ( @@ -39,10 +39,6 @@ type modulesFile struct { Modules []*module `json:"Modules"` } -func getModulesFilePath(workdir string) string { - return filepath.Join(workdir, ".terraform", "modules", "modules.json") -} - func parseModulesFile(filePath string) ([]*proto.Module, error) { modules := &modulesFile{} data, err := os.ReadFile(filePath) @@ -62,8 +58,8 @@ func parseModulesFile(filePath string) ([]*proto.Module, error) { // getModules returns the modules from the modules file if it exists. // It returns nil if the file does not exist. // Modules become available after terraform init. -func getModules(workdir string) ([]*proto.Module, error) { - filePath := getModulesFilePath(workdir) +func getModules(files tfpath.Layouter) ([]*proto.Module, error) { + filePath := files.ModulesFilePath() if _, err := os.Stat(filePath); os.IsNotExist(err) { return nil, nil } diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index d5b59df327f65..2f5a8c7f5c38a 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -25,9 +25,9 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- defer span.End() // Load the module and print any parse errors. - parser, diags := tfparse.New(sess.WorkDirectory, tfparse.WithLogger(s.logger.Named("tfparse"))) + parser, diags := tfparse.New(sess.Files.WorkDirectory(), tfparse.WithLogger(s.logger.Named("tfparse"))) if diags.HasErrors() { - return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags)) + return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.Files.WorkDirectory(), diags)) } workspaceTags, _, err := parser.WorkspaceTags(ctx) diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index ec9f96c3ed397..c99ee55ad8cc6 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -76,7 +76,7 @@ func (s *server) Plan( defer cancel() defer kill() - e := s.executor(sess.WorkDirectory, database.ProvisionerJobTimingStagePlan) + e := s.executor(sess.Files, database.ProvisionerJobTimingStagePlan) if err := e.checkMinVersion(ctx); err != nil { return provisionersdk.PlanErrorf("%s", err.Error()) } @@ -92,7 +92,7 @@ func (s *server) Plan( return &proto.PlanComplete{} } - statefilePath := getStateFilePath(sess.WorkDirectory) + statefilePath := sess.Files.StateFilePath() if len(sess.Config.State) > 0 { err := os.WriteFile(statefilePath, sess.Config.State, 0o600) if err != nil { @@ -110,12 +110,11 @@ func (s *server) Plan( // The JSON output of `terraform init` doesn't include discrete fields for capturing timings of each plugin, // so we capture the whole init process. initTimings := newTimingAggregator(database.ProvisionerJobTimingStageInit) - initTimings.ingest(createInitTimingsEvent(timingInitStart)) + endStage := initTimings.startStage(database.ProvisionerJobTimingStageInit) err = e.init(ctx, killCtx, sess) + endStage(err) if err != nil { - initTimings.ingest(createInitTimingsEvent(timingInitErrored)) - s.logger.Debug(ctx, "init failed", slog.Error(err)) // Special handling for "text file busy" c.f. https://github.com/coder/coder/issues/14726 @@ -141,15 +140,13 @@ func (s *server) Plan( return provisionersdk.PlanErrorf("initialize terraform: %s", err) } - modules, err := getModules(sess.WorkDirectory) + modules, err := getModules(sess.Files) if err != nil { // We allow getModules to fail, as the result is used only // for telemetry purposes now. s.logger.Error(ctx, "failed to get modules from disk", slog.Error(err)) } - initTimings.ingest(createInitTimingsEvent(timingInitComplete)) - s.logger.Debug(ctx, "ran initialization") env, err := provisionEnv(sess.Config, request.Metadata, request.PreviousParameterValues, request.RichParameterValues, request.ExternalAuthProviders) @@ -170,7 +167,7 @@ func (s *server) Plan( // Prepend init timings since they occur prior to plan timings. // Order is irrelevant; this is merely indicative. - resp.Timings = mergeInitTimings(initTimings.aggregate(), resp.Timings) + resp.Timings = append(initTimings.aggregate(), resp.Timings...) // mergeInitTimings(initTimings.aggregate(), resp.Timings) resp.Modules = modules return resp } @@ -184,7 +181,7 @@ func (s *server) Apply( defer cancel() defer kill() - e := s.executor(sess.WorkDirectory, database.ProvisionerJobTimingStageApply) + e := s.executor(sess.Files, database.ProvisionerJobTimingStageApply) if err := e.checkMinVersion(ctx); err != nil { return provisionersdk.ApplyErrorf("%s", err.Error()) } @@ -201,7 +198,7 @@ func (s *server) Apply( } // Earlier in the session, Plan() will have written the state file and the plan file. - statefilePath := getStateFilePath(sess.WorkDirectory) + statefilePath := sess.Files.StateFilePath() env, err := provisionEnv(sess.Config, request.Metadata, nil, nil, nil) if err != nil { return provisionersdk.ApplyErrorf("provision env: %s", err) @@ -348,7 +345,7 @@ func logTerraformEnvVars(sink logSink) { // shipped in v1.0.4. It will return the stacktraces of the provider, which will hopefully allow us // to figure out why it hasn't exited. func tryGettingCoderProviderStacktrace(sess *provisionersdk.Session) string { - path := filepath.Clean(filepath.Join(sess.WorkDirectory, "../.coder/pprof")) + path := filepath.Clean(filepath.Join(sess.Files.WorkDirectory(), "../.coder/pprof")) sess.Logger.Info(sess.Context(), "attempting to get stack traces", slog.F("path", path)) c := http.Client{ Transport: &http.Transport{ diff --git a/provisioner/terraform/provision_test.go b/provisioner/terraform/provision_test.go index 450dd04b061a6..9a8a49c29b9ab 100644 --- a/provisioner/terraform/provision_test.go +++ b/provisioner/terraform/provision_test.go @@ -3,17 +3,13 @@ package terraform_test import ( - "bytes" "context" - "crypto/sha256" - "encoding/hex" "encoding/json" "errors" "fmt" "net" "net/http" "os" - "os/exec" "path/filepath" "sort" "strings" @@ -94,168 +90,6 @@ func configure(ctx context.Context, t *testing.T, client proto.DRPCProvisionerCl return sess } -func hashTemplateFilesAndTestName(t *testing.T, testName string, templateFiles map[string]string) string { - t.Helper() - - sortedFileNames := make([]string, 0, len(templateFiles)) - for fileName := range templateFiles { - sortedFileNames = append(sortedFileNames, fileName) - } - sort.Strings(sortedFileNames) - - // Inserting a delimiter between the file name and the file content - // ensures that a file named `ab` with content `cd` - // will not hash to the same value as a file named `abc` with content `d`. - // This can still happen if the file name or content include the delimiter, - // but hopefully they won't. - delimiter := []byte("🎉 🌱 🌷") - - hasher := sha256.New() - for _, fileName := range sortedFileNames { - file := templateFiles[fileName] - _, err := hasher.Write([]byte(fileName)) - require.NoError(t, err) - _, err = hasher.Write(delimiter) - require.NoError(t, err) - _, err = hasher.Write([]byte(file)) - require.NoError(t, err) - } - _, err := hasher.Write(delimiter) - require.NoError(t, err) - _, err = hasher.Write([]byte(testName)) - require.NoError(t, err) - - return hex.EncodeToString(hasher.Sum(nil)) -} - -const ( - terraformConfigFileName = "terraform.rc" - cacheProvidersDirName = "providers" - cacheTemplateFilesDirName = "files" -) - -// Writes a Terraform CLI config file (`terraform.rc`) in `dir` to enforce using the local provider mirror. -// This blocks network access for providers, forcing Terraform to use only what's cached in `dir`. -// Returns the path to the generated config file. -func writeCliConfig(t *testing.T, dir string) string { - t.Helper() - - cliConfigPath := filepath.Join(dir, terraformConfigFileName) - require.NoError(t, os.MkdirAll(filepath.Dir(cliConfigPath), 0o700)) - - content := fmt.Sprintf(` - provider_installation { - filesystem_mirror { - path = "%s" - include = ["*/*"] - } - direct { - exclude = ["*/*"] - } - } - `, filepath.Join(dir, cacheProvidersDirName)) - require.NoError(t, os.WriteFile(cliConfigPath, []byte(content), 0o600)) - return cliConfigPath -} - -func runCmd(t *testing.T, dir string, args ...string) { - t.Helper() - - stdout, stderr := bytes.NewBuffer(nil), bytes.NewBuffer(nil) - cmd := exec.Command(args[0], args[1:]...) //#nosec - cmd.Dir = dir - cmd.Stdout = stdout - cmd.Stderr = stderr - if err := cmd.Run(); err != nil { - t.Fatalf("failed to run %s: %s\nstdout: %s\nstderr: %s", strings.Join(args, " "), err, stdout.String(), stderr.String()) - } -} - -// Each test gets a unique cache dir based on its name and template files. -// This ensures that tests can download providers in parallel and that they -// will redownload providers if the template files change. -func getTestCacheDir(t *testing.T, rootDir string, testName string, templateFiles map[string]string) string { - t.Helper() - - hash := hashTemplateFilesAndTestName(t, testName, templateFiles) - dir := filepath.Join(rootDir, hash[:12]) - return dir -} - -// Ensures Terraform providers are downloaded and cached locally in a unique directory for the test. -// Uses `terraform init` then `mirror` to populate the cache if needed. -// Returns the cache directory path. -func downloadProviders(t *testing.T, rootDir string, testName string, templateFiles map[string]string) string { - t.Helper() - - dir := getTestCacheDir(t, rootDir, testName, templateFiles) - if _, err := os.Stat(dir); err == nil { - t.Logf("%s: using cached terraform providers", testName) - return dir - } - filesDir := filepath.Join(dir, cacheTemplateFilesDirName) - defer func() { - // The files dir will contain a copy of terraform providers generated - // by the terraform init command. We don't want to persist them since - // we already have a registry mirror in the providers dir. - if err := os.RemoveAll(filesDir); err != nil { - t.Logf("failed to remove files dir %s: %s", filesDir, err) - } - if !t.Failed() { - return - } - // If `downloadProviders` function failed, clean up the cache dir. - // We don't want to leave it around because it may be incomplete or corrupted. - if err := os.RemoveAll(dir); err != nil { - t.Logf("failed to remove dir %s: %s", dir, err) - } - }() - - require.NoError(t, os.MkdirAll(filesDir, 0o700)) - - for fileName, file := range templateFiles { - filePath := filepath.Join(filesDir, fileName) - require.NoError(t, os.MkdirAll(filepath.Dir(filePath), 0o700)) - require.NoError(t, os.WriteFile(filePath, []byte(file), 0o600)) - } - - providersDir := filepath.Join(dir, cacheProvidersDirName) - require.NoError(t, os.MkdirAll(providersDir, 0o700)) - - // We need to run init because if a test uses modules in its template, - // the mirror command will fail without it. - runCmd(t, filesDir, "terraform", "init") - // Now, mirror the providers into `providersDir`. We use this explicit mirror - // instead of relying only on the standard Terraform plugin cache. - // - // Why? Because this mirror, when used with the CLI config from `writeCliConfig`, - // prevents Terraform from hitting the network registry during `plan`. This cuts - // down on network calls, making CI tests less flaky. - // - // In contrast, the standard cache *still* contacts the registry for metadata - // during `init`, even if the plugins are already cached locally - see link below. - // - // Ref: https://developer.hashicorp.com/terraform/cli/config/config-file#provider-plugin-cache - // > When a plugin cache directory is enabled, the terraform init command will - // > still use the configured or implied installation methods to obtain metadata - // > about which plugins are available - runCmd(t, filesDir, "terraform", "providers", "mirror", providersDir) - - return dir -} - -// Caches providers locally and generates a Terraform CLI config to use *only* that cache. -// This setup prevents network access for providers during `terraform init`, improving reliability -// in subsequent test runs. -// Returns the path to the generated CLI config file. -func cacheProviders(t *testing.T, rootDir string, testName string, templateFiles map[string]string) string { - t.Helper() - - providersParentDir := downloadProviders(t, rootDir, testName, templateFiles) - cliConfigPath := writeCliConfig(t, providersParentDir) - return cliConfigPath -} - func readProvisionLog(t *testing.T, response proto.DRPCProvisioner_SessionClient) string { var logBuf strings.Builder for { @@ -1177,7 +1011,7 @@ func TestProvision(t *testing.T) { cacheRootDir := filepath.Join(testutil.PersistentCacheDir(t), "terraform_provision_test") expectedCacheDirs := make(map[string]bool) for _, testCase := range testCases { - cacheDir := getTestCacheDir(t, cacheRootDir, testCase.Name, testCase.Files) + cacheDir := testutil.GetTestTFCacheDir(t, cacheRootDir, testCase.Name, testCase.Files) expectedCacheDirs[cacheDir] = true } currentCacheDirs, err := filepath.Glob(filepath.Join(cacheRootDir, "*")) @@ -1199,7 +1033,7 @@ func TestProvision(t *testing.T) { cliConfigPath := "" if !testCase.SkipCacheProviders { - cliConfigPath = cacheProviders( + cliConfigPath = testutil.CacheTFProviders( t, cacheRootDir, testCase.Name, diff --git a/provisioner/terraform/resources_test.go b/provisioner/terraform/resources_test.go index a2c5b536ac2db..449df09cfaa00 100644 --- a/provisioner/terraform/resources_test.go +++ b/provisioner/terraform/resources_test.go @@ -2,6 +2,7 @@ package terraform_test import ( "context" + "crypto/sha256" "encoding/json" "fmt" "os" @@ -12,6 +13,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" + "github.com/google/uuid" tfjson "github.com/hashicorp/terraform-json" "github.com/stretchr/testify/require" protobuf "google.golang.org/protobuf/proto" @@ -1669,3 +1671,18 @@ func sortExternalAuthProviders(providers []*proto.ExternalAuthProviderResource) return strings.Compare(providers[i].Id, providers[j].Id) == -1 }) } + +// deterministicAppIDs handles setting agent app ids to something deterministic. +// In plan files, ids are not present. In state files, they are. +// It is simpler for comparisons if we just set it to something deterministic. +func deterministicAppIDs(resources []*proto.Resource) { + for _, resource := range resources { + for _, agent := range resource.Agents { + for _, app := range agent.Apps { + data := sha256.Sum256([]byte(app.Slug + app.DisplayName)) + id, _ := uuid.FromBytes(data[:16]) + app.Id = id.String() + } + } + } +} diff --git a/provisioner/terraform/serve.go b/provisioner/terraform/serve.go index 3e671b0c68e56..32b5343f6f3ce 100644 --- a/provisioner/terraform/serve.go +++ b/provisioner/terraform/serve.go @@ -14,6 +14,7 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/provisionersdk/tfpath" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/jobreaper" @@ -102,7 +103,7 @@ func Serve(ctx context.Context, options *ServeOptions) error { slog.F("min_version", minTerraformVersion.String())) } - binPath, err := Install(ctx, options.Logger, options.ExternalProvisioner, options.CachePath, TerraformVersion) + binPath, err := Install(ctx, options.Logger, options.ExternalProvisioner, options.CachePath, TerraformVersion, "") if err != nil { return xerrors.Errorf("install terraform: %w", err) } @@ -160,14 +161,14 @@ func (s *server) startTrace(ctx context.Context, name string, opts ...trace.Span ))...) } -func (s *server) executor(workdir string, stage database.ProvisionerJobTimingStage) *executor { +func (s *server) executor(files tfpath.Layouter, stage database.ProvisionerJobTimingStage) *executor { return &executor{ server: s, mut: s.execMut, binaryPath: s.binaryPath, cachePath: s.cachePath, cliConfigPath: s.cliConfigPath, - workdir: workdir, + files: files, logger: s.logger.Named("executor"), timings: newTimingAggregator(stage), } diff --git a/provisioner/terraform/testdata/resources/ai-tasks-app/converted_state.plan.golden b/provisioner/terraform/testdata/resources/ai-tasks-app/converted_state.plan.golden new file mode 100644 index 0000000000000..84ba18790acbe --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-app/converted_state.plan.golden @@ -0,0 +1,21 @@ +{ + "Resources": [ + { + "name": "a", + "type": "coder_ai_task" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [ + { + "sidebar_app": { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + } + ], + "HasAITasks": true, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-app/converted_state.state.golden b/provisioner/terraform/testdata/resources/ai-tasks-app/converted_state.state.golden new file mode 100644 index 0000000000000..7be30d4b4d5cd --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-app/converted_state.state.golden @@ -0,0 +1,22 @@ +{ + "Resources": [ + { + "name": "a", + "type": "coder_ai_task" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [ + { + "id": "c4f032b8-97e4-42b0-aa2f-30a9e698f8d4", + "sidebar_app": { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + } + ], + "HasAITasks": true, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-multiple/converted_state.plan.golden b/provisioner/terraform/testdata/resources/ai-tasks-multiple/converted_state.plan.golden new file mode 100644 index 0000000000000..687d4920b8bec --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-multiple/converted_state.plan.golden @@ -0,0 +1,31 @@ +{ + "Resources": [ + { + "name": "a", + "type": "coder_ai_task" + }, + { + "name": "b", + "type": "coder_ai_task" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [ + { + "sidebar_app": { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + { + "sidebar_app": { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + } + ], + "HasAITasks": true, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-multiple/converted_state.state.golden b/provisioner/terraform/testdata/resources/ai-tasks-multiple/converted_state.state.golden new file mode 100644 index 0000000000000..10e510eac1c75 --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-multiple/converted_state.state.golden @@ -0,0 +1,33 @@ +{ + "Resources": [ + { + "name": "a", + "type": "coder_ai_task" + }, + { + "name": "b", + "type": "coder_ai_task" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [ + { + "id": "89e6ab36-2e98-4d13-9b4c-69b7588b7e1d", + "sidebar_app": { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd", + "sidebar_app": { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + } + ], + "HasAITasks": true, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-sidebar/converted_state.plan.golden b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/converted_state.plan.golden new file mode 100644 index 0000000000000..84ba18790acbe --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/converted_state.plan.golden @@ -0,0 +1,21 @@ +{ + "Resources": [ + { + "name": "a", + "type": "coder_ai_task" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [ + { + "sidebar_app": { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + } + ], + "HasAITasks": true, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/ai-tasks-sidebar/converted_state.state.golden b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/converted_state.state.golden new file mode 100644 index 0000000000000..4984e279fb851 --- /dev/null +++ b/provisioner/terraform/testdata/resources/ai-tasks-sidebar/converted_state.state.golden @@ -0,0 +1,22 @@ +{ + "Resources": [ + { + "name": "a", + "type": "coder_ai_task" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [ + { + "id": "89e6ab36-2e98-4d13-9b4c-69b7588b7e1d", + "sidebar_app": { + "id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + }, + "app_id": "5ece4674-dd35-4f16-88c8-82e40e72e2fd" + } + ], + "HasAITasks": true, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/calling-module/converted_state.plan.golden b/provisioner/terraform/testdata/resources/calling-module/converted_state.plan.golden new file mode 100644 index 0000000000000..ed13fb19fd719 --- /dev/null +++ b/provisioner/terraform/testdata/resources/calling-module/converted_state.plan.golden @@ -0,0 +1,34 @@ +{ + "Resources": [ + { + "name": "example", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ], + "module_path": "module.module" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/calling-module/converted_state.state.golden b/provisioner/terraform/testdata/resources/calling-module/converted_state.state.golden new file mode 100644 index 0000000000000..cefa9f257f7e2 --- /dev/null +++ b/provisioner/terraform/testdata/resources/calling-module/converted_state.state.golden @@ -0,0 +1,35 @@ +{ + "Resources": [ + { + "name": "example", + "type": "null_resource", + "agents": [ + { + "id": "8cb7c83a-eddb-45e9-a78c-4b50d0f10e5e", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "59bcf169-14fe-497d-9a97-709c1d837848" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ], + "module_path": "module.module" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/chaining-resources/converted_state.plan.golden b/provisioner/terraform/testdata/resources/chaining-resources/converted_state.plan.golden new file mode 100644 index 0000000000000..5314f549e7fdd --- /dev/null +++ b/provisioner/terraform/testdata/resources/chaining-resources/converted_state.plan.golden @@ -0,0 +1,37 @@ +{ + "Resources": [ + { + "name": "a", + "type": "null_resource" + }, + { + "name": "b", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/chaining-resources/converted_state.state.golden b/provisioner/terraform/testdata/resources/chaining-resources/converted_state.state.golden new file mode 100644 index 0000000000000..48879277d69f7 --- /dev/null +++ b/provisioner/terraform/testdata/resources/chaining-resources/converted_state.state.golden @@ -0,0 +1,38 @@ +{ + "Resources": [ + { + "name": "a", + "type": "null_resource" + }, + { + "name": "b", + "type": "null_resource", + "agents": [ + { + "id": "d9f5159f-58be-4035-b13c-8e9d988ea2fc", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "20b314d3-9acc-4ae7-8fd7-b8fcfc456e06" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/conflicting-resources/converted_state.plan.golden b/provisioner/terraform/testdata/resources/conflicting-resources/converted_state.plan.golden new file mode 100644 index 0000000000000..ee1553bc9b329 --- /dev/null +++ b/provisioner/terraform/testdata/resources/conflicting-resources/converted_state.plan.golden @@ -0,0 +1,37 @@ +{ + "Resources": [ + { + "name": "first", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "second", + "type": "null_resource" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/conflicting-resources/converted_state.state.golden b/provisioner/terraform/testdata/resources/conflicting-resources/converted_state.state.golden new file mode 100644 index 0000000000000..6da4224355b3c --- /dev/null +++ b/provisioner/terraform/testdata/resources/conflicting-resources/converted_state.state.golden @@ -0,0 +1,38 @@ +{ + "Resources": [ + { + "name": "first", + "type": "null_resource", + "agents": [ + { + "id": "e78db244-3076-4c04-8ac3-5a55dae032e7", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "c0a7e7f5-2616-429e-ac69-a8c3d9bbbb5d" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "second", + "type": "null_resource" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/devcontainer/converted_state.plan.golden b/provisioner/terraform/testdata/resources/devcontainer/converted_state.plan.golden new file mode 100644 index 0000000000000..fded49faa9e15 --- /dev/null +++ b/provisioner/terraform/testdata/resources/devcontainer/converted_state.plan.golden @@ -0,0 +1,52 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "devcontainers": [ + { + "workspace_folder": "/workspace1", + "name": "dev1" + }, + { + "workspace_folder": "/workspace2", + "config_path": "/workspace2/.devcontainer/devcontainer.json", + "name": "dev2" + } + ], + "api_key_scope": "all" + } + ] + }, + { + "name": "dev1", + "type": "coder_devcontainer" + }, + { + "name": "dev2", + "type": "coder_devcontainer" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/devcontainer/converted_state.state.golden b/provisioner/terraform/testdata/resources/devcontainer/converted_state.state.golden new file mode 100644 index 0000000000000..fe89c7bcc76c2 --- /dev/null +++ b/provisioner/terraform/testdata/resources/devcontainer/converted_state.state.golden @@ -0,0 +1,53 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "eb1fa705-34c6-405b-a2ec-70e4efd1614e", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "e8663cf8-6991-40ca-b534-b9d48575cc4e" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "devcontainers": [ + { + "workspace_folder": "/workspace1", + "name": "dev1" + }, + { + "workspace_folder": "/workspace2", + "config_path": "/workspace2/.devcontainer/devcontainer.json", + "name": "dev2" + } + ], + "api_key_scope": "all" + } + ] + }, + { + "name": "dev1", + "type": "coder_devcontainer" + }, + { + "name": "dev2", + "type": "coder_devcontainer" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/display-apps-disabled/converted_state.plan.golden b/provisioner/terraform/testdata/resources/display-apps-disabled/converted_state.plan.golden new file mode 100644 index 0000000000000..cdce3f15b2ea5 --- /dev/null +++ b/provisioner/terraform/testdata/resources/display-apps-disabled/converted_state.plan.golden @@ -0,0 +1,28 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": {}, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/display-apps-disabled/converted_state.state.golden b/provisioner/terraform/testdata/resources/display-apps-disabled/converted_state.state.golden new file mode 100644 index 0000000000000..924814c69ada2 --- /dev/null +++ b/provisioner/terraform/testdata/resources/display-apps-disabled/converted_state.state.golden @@ -0,0 +1,29 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "149d8647-ec80-4a63-9aa5-2c82452e69a6", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "bd20db5f-7645-411f-b253-033e494e6c89" + }, + "connection_timeout_seconds": 120, + "display_apps": {}, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/display-apps/converted_state.plan.golden b/provisioner/terraform/testdata/resources/display-apps/converted_state.plan.golden new file mode 100644 index 0000000000000..d7fe5795eb0a1 --- /dev/null +++ b/provisioner/terraform/testdata/resources/display-apps/converted_state.plan.golden @@ -0,0 +1,31 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode_insiders": true, + "web_terminal": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/display-apps/converted_state.state.golden b/provisioner/terraform/testdata/resources/display-apps/converted_state.state.golden new file mode 100644 index 0000000000000..63ef183e8925c --- /dev/null +++ b/provisioner/terraform/testdata/resources/display-apps/converted_state.state.golden @@ -0,0 +1,32 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "c49a0e36-fd67-4946-a75f-ff52b77e9f95", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "d9775224-6ecb-4c53-b24d-931555a7c86a" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode_insiders": true, + "web_terminal": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/external-agents/converted_state.plan.golden b/provisioner/terraform/testdata/resources/external-agents/converted_state.plan.golden new file mode 100644 index 0000000000000..2a806a7e08571 --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-agents/converted_state.plan.golden @@ -0,0 +1,33 @@ +{ + "Resources": [ + { + "name": "dev1", + "type": "coder_external_agent", + "agents": [ + { + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": true +} diff --git a/provisioner/terraform/testdata/resources/external-agents/converted_state.state.golden b/provisioner/terraform/testdata/resources/external-agents/converted_state.state.golden new file mode 100644 index 0000000000000..da0af3790a2e1 --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-agents/converted_state.state.golden @@ -0,0 +1,34 @@ +{ + "Resources": [ + { + "name": "dev1", + "type": "coder_external_agent", + "agents": [ + { + "id": "15a35370-3b2e-4ee7-8b28-81cef0152d8b", + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "d054c66b-cc5c-41ae-aa0c-2098a1075272" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": true +} diff --git a/provisioner/terraform/testdata/resources/external-auth-providers/converted_state.plan.golden b/provisioner/terraform/testdata/resources/external-auth-providers/converted_state.plan.golden new file mode 100644 index 0000000000000..91bc3bdf09da7 --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-auth-providers/converted_state.plan.golden @@ -0,0 +1,41 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [ + { + "id": "github" + }, + { + "id": "gitlab", + "optional": true + } + ], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/external-auth-providers/converted_state.state.golden b/provisioner/terraform/testdata/resources/external-auth-providers/converted_state.state.golden new file mode 100644 index 0000000000000..87a47db1206f1 --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-auth-providers/converted_state.state.golden @@ -0,0 +1,42 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "1682dc74-4f8a-49da-8c36-3df839f5c1f0", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "c018b99e-4370-409c-b81d-6305c5cd9078" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [ + { + "id": "github" + }, + { + "id": "gitlab", + "optional": true + } + ], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/instance-id/converted_state.plan.golden b/provisioner/terraform/testdata/resources/instance-id/converted_state.plan.golden new file mode 100644 index 0000000000000..954495aa0b11f --- /dev/null +++ b/provisioner/terraform/testdata/resources/instance-id/converted_state.plan.golden @@ -0,0 +1,33 @@ +{ + "Resources": [ + { + "name": "main", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "InstanceId": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/instance-id/converted_state.state.golden b/provisioner/terraform/testdata/resources/instance-id/converted_state.state.golden new file mode 100644 index 0000000000000..031e264526c5b --- /dev/null +++ b/provisioner/terraform/testdata/resources/instance-id/converted_state.state.golden @@ -0,0 +1,34 @@ +{ + "Resources": [ + { + "name": "main", + "type": "null_resource", + "agents": [ + { + "id": "8e130bb7-437f-4892-a2e4-ae892f95d824", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "InstanceId": "example" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/kubernetes-metadata/converted_state.plan.golden b/provisioner/terraform/testdata/resources/kubernetes-metadata/converted_state.plan.golden new file mode 100644 index 0000000000000..b9400c3917df2 --- /dev/null +++ b/provisioner/terraform/testdata/resources/kubernetes-metadata/converted_state.plan.golden @@ -0,0 +1,85 @@ +{ + "Resources": [ + { + "name": "coder_workspace", + "type": "kubernetes_config_map" + }, + { + "name": "coder_workspace", + "type": "kubernetes_role" + }, + { + "name": "coder_workspace", + "type": "kubernetes_role_binding" + }, + { + "name": "coder_workspace", + "type": "kubernetes_secret" + }, + { + "name": "coder_workspace", + "type": "kubernetes_service_account" + }, + { + "name": "main", + "type": "kubernetes_pod", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "code-server", + "display_name": "code-server", + "url": "http://localhost:13337?folder=/home/coder", + "icon": "/icon/code.svg", + "open_in": 1, + "id": "73971185-3dea-f456-c568-4f285dbcdb52" + } + ], + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "scripts": [ + { + "display_name": "Startup Script", + "icon": "/emojis/25b6-fe0f.png", + "script": " #!/bin/bash\n # home folder can be empty, so copying default bash settings\n if [ ! -f ~/.profile ]; then\n cp /etc/skel/.profile $HOME\n fi\n if [ ! -f ~/.bashrc ]; then\n cp /etc/skel/.bashrc $HOME\n fi\n # install and start code-server\n curl -fsSL https://code-server.dev/install.sh | sh | tee code-server-install.log\n code-server --auth none --port 13337 | tee code-server-install.log \u0026\n", + "run_on_start": true, + "log_path": "coder-startup-script.log" + } + ], + "resources_monitoring": {} + } + ], + "metadata": [ + { + "key": "cpu", + "value": "1" + }, + { + "key": "memory", + "value": "1Gi" + }, + { + "key": "gpu", + "value": "1" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/kubernetes-metadata/converted_state.state.golden b/provisioner/terraform/testdata/resources/kubernetes-metadata/converted_state.state.golden new file mode 100644 index 0000000000000..d70291e74adcc --- /dev/null +++ b/provisioner/terraform/testdata/resources/kubernetes-metadata/converted_state.state.golden @@ -0,0 +1,86 @@ +{ + "Resources": [ + { + "name": "coder_workspace", + "type": "kubernetes_config_map" + }, + { + "name": "coder_workspace", + "type": "kubernetes_role" + }, + { + "name": "coder_workspace", + "type": "kubernetes_role_binding" + }, + { + "name": "coder_workspace", + "type": "kubernetes_secret" + }, + { + "name": "coder_workspace", + "type": "kubernetes_service_account" + }, + { + "name": "main", + "type": "kubernetes_pod", + "agents": [ + { + "id": "b65f06b5-8698-4e47-80fb-e78f9b920e3d", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "code-server", + "display_name": "code-server", + "url": "http://localhost:13337?folder=/home/coder", + "icon": "/icon/code.svg", + "open_in": 1, + "id": "73971185-3dea-f456-c568-4f285dbcdb52" + } + ], + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "scripts": [ + { + "display_name": "Startup Script", + "icon": "/emojis/25b6-fe0f.png", + "script": " #!/bin/bash\n # home folder can be empty, so copying default bash settings\n if [ ! -f ~/.profile ]; then\n cp /etc/skel/.profile $HOME\n fi\n if [ ! -f ~/.bashrc ]; then\n cp /etc/skel/.bashrc $HOME\n fi\n # install and start code-server\n curl -fsSL https://code-server.dev/install.sh | sh | tee code-server-install.log\n code-server --auth none --port 13337 | tee code-server-install.log \u0026\n", + "run_on_start": true, + "log_path": "coder-startup-script.log" + } + ], + "resources_monitoring": {} + } + ], + "metadata": [ + { + "key": "cpu", + "value": "1" + }, + { + "key": "memory", + "value": "1Gi" + }, + { + "key": "gpu", + "value": "1" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/mapped-apps/converted_state.plan.golden b/provisioner/terraform/testdata/resources/mapped-apps/converted_state.plan.golden new file mode 100644 index 0000000000000..b868351cd00c0 --- /dev/null +++ b/provisioner/terraform/testdata/resources/mapped-apps/converted_state.plan.golden @@ -0,0 +1,47 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app1", + "display_name": "app1", + "open_in": 1, + "id": "634ec976-f595-9122-c51e-8da2e3c6e3ce" + }, + { + "slug": "app2", + "display_name": "app2", + "open_in": 1, + "id": "13922208-d2bc-196b-54cb-3fc084916309" + } + ], + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/mapped-apps/converted_state.state.golden b/provisioner/terraform/testdata/resources/mapped-apps/converted_state.state.golden new file mode 100644 index 0000000000000..e932aa73dc4f4 --- /dev/null +++ b/provisioner/terraform/testdata/resources/mapped-apps/converted_state.state.golden @@ -0,0 +1,48 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "bac96c8e-acef-4e1c-820d-0933d6989874", + "name": "dev", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app1", + "display_name": "app1", + "open_in": 1, + "id": "634ec976-f595-9122-c51e-8da2e3c6e3ce" + }, + { + "slug": "app2", + "display_name": "app2", + "open_in": 1, + "id": "13922208-d2bc-196b-54cb-3fc084916309" + } + ], + "Auth": { + "Token": "d52f0d63-5b51-48b3-b342-fd48de4bf957" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents-multiple-apps/converted_state.plan.golden b/provisioner/terraform/testdata/resources/multiple-agents-multiple-apps/converted_state.plan.golden new file mode 100644 index 0000000000000..5cfdb43ad5de9 --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents-multiple-apps/converted_state.plan.golden @@ -0,0 +1,84 @@ +{ + "Resources": [ + { + "name": "dev1", + "type": "null_resource", + "agents": [ + { + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app1", + "display_name": "app1", + "open_in": 1, + "id": "634ec976-f595-9122-c51e-8da2e3c6e3ce" + }, + { + "slug": "app2", + "display_name": "app2", + "subdomain": true, + "healthcheck": { + "url": "http://localhost:13337/healthz", + "interval": 5, + "threshold": 6 + }, + "open_in": 1, + "id": "13922208-d2bc-196b-54cb-3fc084916309" + } + ], + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "dev2", + "type": "null_resource", + "agents": [ + { + "name": "dev2", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app3", + "display_name": "app3", + "open_in": 1, + "id": "a2714999-3f82-11a4-b8fe-3a11d88f3021" + } + ], + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents-multiple-apps/converted_state.state.golden b/provisioner/terraform/testdata/resources/multiple-agents-multiple-apps/converted_state.state.golden new file mode 100644 index 0000000000000..bf3722980dd25 --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents-multiple-apps/converted_state.state.golden @@ -0,0 +1,86 @@ +{ + "Resources": [ + { + "name": "dev1", + "type": "null_resource", + "agents": [ + { + "id": "b67999d7-9356-4d32-b3ed-f9ffd283cd5b", + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app1", + "display_name": "app1", + "open_in": 1, + "id": "634ec976-f595-9122-c51e-8da2e3c6e3ce" + }, + { + "slug": "app2", + "display_name": "app2", + "subdomain": true, + "healthcheck": { + "url": "http://localhost:13337/healthz", + "interval": 5, + "threshold": 6 + }, + "open_in": 1, + "id": "13922208-d2bc-196b-54cb-3fc084916309" + } + ], + "Auth": { + "Token": "f736f6d7-6fce-47b6-9fe0-3c99ce17bd8f" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "dev2", + "type": "null_resource", + "agents": [ + { + "id": "cb18360a-0bad-4371-a26d-50c30e1d33f7", + "name": "dev2", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app3", + "display_name": "app3", + "open_in": 1, + "id": "a2714999-3f82-11a4-b8fe-3a11d88f3021" + } + ], + "Auth": { + "Token": "5d1d447c-65b0-47ba-998b-1ba752db7d78" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents-multiple-envs/converted_state.plan.golden b/provisioner/terraform/testdata/resources/multiple-agents-multiple-envs/converted_state.plan.golden new file mode 100644 index 0000000000000..75500696591e1 --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents-multiple-envs/converted_state.plan.golden @@ -0,0 +1,84 @@ +{ + "Resources": [ + { + "name": "dev1", + "type": "null_resource", + "agents": [ + { + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "extra_envs": [ + { + "name": "ENV_1", + "value": "Env 1" + }, + { + "name": "ENV_2", + "value": "Env 2" + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "dev2", + "type": "null_resource", + "agents": [ + { + "name": "dev2", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "extra_envs": [ + { + "name": "ENV_3", + "value": "Env 3" + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "env1", + "type": "coder_env" + }, + { + "name": "env2", + "type": "coder_env" + }, + { + "name": "env3", + "type": "coder_env" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents-multiple-envs/converted_state.state.golden b/provisioner/terraform/testdata/resources/multiple-agents-multiple-envs/converted_state.state.golden new file mode 100644 index 0000000000000..c041641367c19 --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents-multiple-envs/converted_state.state.golden @@ -0,0 +1,86 @@ +{ + "Resources": [ + { + "name": "dev1", + "type": "null_resource", + "agents": [ + { + "id": "fac6034b-1d42-4407-b266-265e35795241", + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "1ef61ba1-3502-4e65-b934-8cc63b16877c" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "extra_envs": [ + { + "name": "ENV_1", + "value": "Env 1" + }, + { + "name": "ENV_2", + "value": "Env 2" + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "dev2", + "type": "null_resource", + "agents": [ + { + "id": "a02262af-b94b-4d6d-98ec-6e36b775e328", + "name": "dev2", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "3d5caada-8239-4074-8d90-6a28a11858f9" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "extra_envs": [ + { + "name": "ENV_3", + "value": "Env 3" + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "env1", + "type": "coder_env" + }, + { + "name": "env2", + "type": "coder_env" + }, + { + "name": "env3", + "type": "coder_env" + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents-multiple-monitors/converted_state.plan.golden b/provisioner/terraform/testdata/resources/multiple-agents-multiple-monitors/converted_state.plan.golden new file mode 100644 index 0000000000000..084a038a9bf37 --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents-multiple-monitors/converted_state.plan.golden @@ -0,0 +1,91 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app1", + "display_name": "app1", + "open_in": 1, + "id": "634ec976-f595-9122-c51e-8da2e3c6e3ce" + }, + { + "slug": "app2", + "display_name": "app2", + "subdomain": true, + "healthcheck": { + "url": "http://localhost:13337/healthz", + "interval": 5, + "threshold": 6 + }, + "open_in": 1, + "id": "13922208-d2bc-196b-54cb-3fc084916309" + } + ], + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": { + "memory": { + "enabled": true, + "threshold": 80 + } + }, + "api_key_scope": "all" + }, + { + "name": "dev2", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": { + "memory": { + "enabled": true, + "threshold": 99 + }, + "volumes": [ + { + "path": "/volume2", + "threshold": 50 + }, + { + "path": "/volume1", + "enabled": true, + "threshold": 80 + } + ] + }, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents-multiple-monitors/converted_state.state.golden b/provisioner/terraform/testdata/resources/multiple-agents-multiple-monitors/converted_state.state.golden new file mode 100644 index 0000000000000..ded45301131cd --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents-multiple-monitors/converted_state.state.golden @@ -0,0 +1,93 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "ca077115-5e6d-4ae5-9ca1-10d3b4f21ca8", + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app1", + "display_name": "app1", + "open_in": 1, + "id": "634ec976-f595-9122-c51e-8da2e3c6e3ce" + }, + { + "slug": "app2", + "display_name": "app2", + "subdomain": true, + "healthcheck": { + "url": "http://localhost:13337/healthz", + "interval": 5, + "threshold": 6 + }, + "open_in": 1, + "id": "13922208-d2bc-196b-54cb-3fc084916309" + } + ], + "Auth": { + "Token": "91e41276-344e-4664-a560-85f0ceb71a7e" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": { + "memory": { + "enabled": true, + "threshold": 80 + } + }, + "api_key_scope": "all" + }, + { + "id": "e3ce0177-ce0c-4136-af81-90d0751bf3de", + "name": "dev2", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "2ce64d1c-c57f-4b6b-af87-b693c5998182" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": { + "memory": { + "enabled": true, + "threshold": 99 + }, + "volumes": [ + { + "path": "/volume2", + "threshold": 50 + }, + { + "path": "/volume1", + "enabled": true, + "threshold": 80 + } + ] + }, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents-multiple-scripts/converted_state.plan.golden b/provisioner/terraform/testdata/resources/multiple-agents-multiple-scripts/converted_state.plan.golden new file mode 100644 index 0000000000000..14f2b6ec314f1 --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents-multiple-scripts/converted_state.plan.golden @@ -0,0 +1,75 @@ +{ + "Resources": [ + { + "name": "dev1", + "type": "null_resource", + "agents": [ + { + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "scripts": [ + { + "display_name": "Foobar Script 1", + "script": "echo foobar 1", + "run_on_start": true + }, + { + "display_name": "Foobar Script 2", + "script": "echo foobar 2", + "run_on_start": true + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "dev2", + "type": "null_resource", + "agents": [ + { + "name": "dev2", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "scripts": [ + { + "display_name": "Foobar Script 3", + "script": "echo foobar 3", + "run_on_start": true + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents-multiple-scripts/converted_state.state.golden b/provisioner/terraform/testdata/resources/multiple-agents-multiple-scripts/converted_state.state.golden new file mode 100644 index 0000000000000..9cfdd52317aab --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents-multiple-scripts/converted_state.state.golden @@ -0,0 +1,77 @@ +{ + "Resources": [ + { + "name": "dev1", + "type": "null_resource", + "agents": [ + { + "id": "9d9c16e7-5828-4ca4-9c9d-ba4b61d2b0db", + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "2054bc44-b3d1-44e3-8f28-4ce327081ddb" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "scripts": [ + { + "display_name": "Foobar Script 1", + "script": "echo foobar 1", + "run_on_start": true + }, + { + "display_name": "Foobar Script 2", + "script": "echo foobar 2", + "run_on_start": true + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + }, + { + "name": "dev2", + "type": "null_resource", + "agents": [ + { + "id": "69cb645c-7a6a-4ad6-be86-dcaab810e7c1", + "name": "dev2", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "c3e73db7-a589-4364-bcf7-0224a9be5c70" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "scripts": [ + { + "display_name": "Foobar Script 3", + "script": "echo foobar 3", + "run_on_start": true + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents/converted_state.plan.golden b/provisioner/terraform/testdata/resources/multiple-agents/converted_state.plan.golden new file mode 100644 index 0000000000000..9ad64531d747a --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents/converted_state.plan.golden @@ -0,0 +1,95 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + }, + { + "name": "dev2", + "operating_system": "darwin", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 1, + "motd_file": "/etc/motd", + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "scripts": [ + { + "display_name": "Shutdown Script", + "icon": "/emojis/25c0.png", + "script": "echo bye bye", + "run_on_stop": true, + "log_path": "coder-shutdown-script.log" + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + }, + { + "name": "dev3", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "troubleshooting_url": "https://coder.com/troubleshoot", + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + }, + { + "name": "dev4", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-agents/converted_state.state.golden b/provisioner/terraform/testdata/resources/multiple-agents/converted_state.state.golden new file mode 100644 index 0000000000000..7c8d16459485b --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-agents/converted_state.state.golden @@ -0,0 +1,99 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "d3113fa6-6ff3-4532-adc2-c7c51f418fca", + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "ecd3c234-6923-4066-9c49-a4ab05f8b25b" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + }, + { + "id": "65036667-6670-4ae9-b081-9e47a659b2a3", + "name": "dev2", + "operating_system": "darwin", + "architecture": "amd64", + "Auth": { + "Token": "d18a13a0-bb95-4500-b789-b341be481710" + }, + "connection_timeout_seconds": 1, + "motd_file": "/etc/motd", + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "scripts": [ + { + "display_name": "Shutdown Script", + "icon": "/emojis/25c0.png", + "script": "echo bye bye", + "run_on_stop": true, + "log_path": "coder-shutdown-script.log" + } + ], + "resources_monitoring": {}, + "api_key_scope": "all" + }, + { + "id": "ca951672-300e-4d31-859f-72ea307ef692", + "name": "dev3", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "4df063e4-150e-447d-b7fb-8de08f19feca" + }, + "connection_timeout_seconds": 120, + "troubleshooting_url": "https://coder.com/troubleshoot", + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + }, + { + "id": "40b28bed-7b37-4f70-8209-114f26eb09d8", + "name": "dev4", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "d8694897-083f-4a0c-8633-70107a9d45fb" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-apps/converted_state.plan.golden b/provisioner/terraform/testdata/resources/multiple-apps/converted_state.plan.golden new file mode 100644 index 0000000000000..703e01ac4061a --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-apps/converted_state.plan.golden @@ -0,0 +1,59 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app1", + "display_name": "app1", + "open_in": 1, + "id": "634ec976-f595-9122-c51e-8da2e3c6e3ce" + }, + { + "slug": "app2", + "display_name": "app2", + "subdomain": true, + "healthcheck": { + "url": "http://localhost:13337/healthz", + "interval": 5, + "threshold": 6 + }, + "open_in": 1, + "id": "13922208-d2bc-196b-54cb-3fc084916309" + }, + { + "slug": "app3", + "display_name": "app3", + "open_in": 1, + "id": "a2714999-3f82-11a4-b8fe-3a11d88f3021" + } + ], + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/multiple-apps/converted_state.state.golden b/provisioner/terraform/testdata/resources/multiple-apps/converted_state.state.golden new file mode 100644 index 0000000000000..869c56d7974d6 --- /dev/null +++ b/provisioner/terraform/testdata/resources/multiple-apps/converted_state.state.golden @@ -0,0 +1,60 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "947c273b-8ec8-4d7e-9f5f-82d777dd7233", + "name": "dev1", + "operating_system": "linux", + "architecture": "amd64", + "apps": [ + { + "slug": "app1", + "display_name": "app1", + "open_in": 1, + "id": "634ec976-f595-9122-c51e-8da2e3c6e3ce" + }, + { + "slug": "app2", + "display_name": "app2", + "subdomain": true, + "healthcheck": { + "url": "http://localhost:13337/healthz", + "interval": 5, + "threshold": 6 + }, + "open_in": 1, + "id": "13922208-d2bc-196b-54cb-3fc084916309" + }, + { + "slug": "app3", + "display_name": "app3", + "open_in": 1, + "id": "a2714999-3f82-11a4-b8fe-3a11d88f3021" + } + ], + "Auth": { + "Token": "fcb257f7-62fe-48c9-a8fd-b0b80c9fb3c8" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/presets-multiple-defaults/converted_state.plan.golden b/provisioner/terraform/testdata/resources/presets-multiple-defaults/converted_state.plan.golden new file mode 100644 index 0000000000000..c1059056c6e4e --- /dev/null +++ b/provisioner/terraform/testdata/resources/presets-multiple-defaults/converted_state.plan.golden @@ -0,0 +1 @@ +"a maximum of 1 coder_workspace_preset can be marked as default, but 2 are set" diff --git a/provisioner/terraform/testdata/resources/presets-multiple-defaults/converted_state.state.golden b/provisioner/terraform/testdata/resources/presets-multiple-defaults/converted_state.state.golden new file mode 100644 index 0000000000000..c1059056c6e4e --- /dev/null +++ b/provisioner/terraform/testdata/resources/presets-multiple-defaults/converted_state.state.golden @@ -0,0 +1 @@ +"a maximum of 1 coder_workspace_preset can be marked as default, but 2 are set" diff --git a/provisioner/terraform/testdata/resources/presets-single-default/converted_state.plan.golden b/provisioner/terraform/testdata/resources/presets-single-default/converted_state.plan.golden new file mode 100644 index 0000000000000..2113065502811 --- /dev/null +++ b/provisioner/terraform/testdata/resources/presets-single-default/converted_state.plan.golden @@ -0,0 +1,67 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "instance_type", + "description": "Instance type", + "type": "string", + "default_value": "t3.micro", + "form_type": 4 + } + ], + "Presets": [ + { + "name": "development", + "parameters": [ + { + "name": "instance_type", + "value": "t3.micro" + } + ], + "prebuild": { + "instances": 1 + }, + "default": true + }, + { + "name": "production", + "parameters": [ + { + "name": "instance_type", + "value": "t3.large" + } + ], + "prebuild": { + "instances": 2 + } + } + ], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/presets-single-default/converted_state.state.golden b/provisioner/terraform/testdata/resources/presets-single-default/converted_state.state.golden new file mode 100644 index 0000000000000..ecf470e46a67e --- /dev/null +++ b/provisioner/terraform/testdata/resources/presets-single-default/converted_state.state.golden @@ -0,0 +1,68 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "5d66372f-a526-44ee-9eac-0c16bcc57aa2", + "name": "dev", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "70ab06e5-ef86-4ac2-a1d9-58c8ad85d379" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "instance_type", + "description": "Instance type", + "type": "string", + "default_value": "t3.micro", + "form_type": 4 + } + ], + "Presets": [ + { + "name": "development", + "parameters": [ + { + "name": "instance_type", + "value": "t3.micro" + } + ], + "prebuild": { + "instances": 1 + }, + "default": true + }, + { + "name": "production", + "parameters": [ + { + "name": "instance_type", + "value": "t3.large" + } + ], + "prebuild": { + "instances": 2 + } + } + ], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/presets/converted_state.plan.golden b/provisioner/terraform/testdata/resources/presets/converted_state.plan.golden new file mode 100644 index 0000000000000..ecfa791e257d3 --- /dev/null +++ b/provisioner/terraform/testdata/resources/presets/converted_state.plan.golden @@ -0,0 +1,102 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "First parameter from child module", + "description": "First parameter from child module", + "type": "string", + "mutable": true, + "default_value": "abcdef", + "form_type": 4 + }, + { + "name": "Second parameter from child module", + "description": "Second parameter from child module", + "type": "string", + "mutable": true, + "default_value": "ghijkl", + "form_type": 4 + }, + { + "name": "First parameter from module", + "description": "First parameter from module", + "type": "string", + "mutable": true, + "default_value": "abcdef", + "form_type": 4 + }, + { + "name": "Second parameter from module", + "description": "Second parameter from module", + "type": "string", + "mutable": true, + "default_value": "ghijkl", + "form_type": 4 + }, + { + "name": "Sample", + "description": "blah blah", + "type": "string", + "default_value": "ok", + "form_type": 4 + } + ], + "Presets": [ + { + "name": "My First Project", + "parameters": [ + { + "name": "Sample", + "value": "A1B2C3" + } + ], + "prebuild": { + "instances": 4, + "expiration_policy": { + "ttl": 86400 + }, + "scheduling": { + "timezone": "America/Los_Angeles", + "schedule": [ + { + "cron": "* 8-18 * * 1-5", + "instances": 3 + }, + { + "cron": "* 8-14 * * 6", + "instances": 1 + } + ] + } + } + } + ], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/presets/converted_state.state.golden b/provisioner/terraform/testdata/resources/presets/converted_state.state.golden new file mode 100644 index 0000000000000..a1b67adb76f4e --- /dev/null +++ b/provisioner/terraform/testdata/resources/presets/converted_state.state.golden @@ -0,0 +1,103 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "8cfc2f0d-5cd6-4631-acfa-c3690ae5557c", + "name": "dev", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "abc9d31e-d1d6-4f2c-9e35-005ebe39aeec" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "First parameter from child module", + "description": "First parameter from child module", + "type": "string", + "mutable": true, + "default_value": "abcdef", + "form_type": 4 + }, + { + "name": "Second parameter from child module", + "description": "Second parameter from child module", + "type": "string", + "mutable": true, + "default_value": "ghijkl", + "form_type": 4 + }, + { + "name": "First parameter from module", + "description": "First parameter from module", + "type": "string", + "mutable": true, + "default_value": "abcdef", + "form_type": 4 + }, + { + "name": "Second parameter from module", + "description": "Second parameter from module", + "type": "string", + "mutable": true, + "default_value": "ghijkl", + "form_type": 4 + }, + { + "name": "Sample", + "description": "blah blah", + "type": "string", + "default_value": "ok", + "form_type": 4 + } + ], + "Presets": [ + { + "name": "My First Project", + "parameters": [ + { + "name": "Sample", + "value": "A1B2C3" + } + ], + "prebuild": { + "instances": 4, + "expiration_policy": { + "ttl": 86400 + }, + "scheduling": { + "timezone": "America/Los_Angeles", + "schedule": [ + { + "cron": "* 8-18 * * 1-5", + "instances": 3 + }, + { + "cron": "* 8-14 * * 6", + "instances": 1 + } + ] + } + } + } + ], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/resource-metadata-duplicate/converted_state.plan.golden b/provisioner/terraform/testdata/resources/resource-metadata-duplicate/converted_state.plan.golden new file mode 100644 index 0000000000000..8731a0c260de1 --- /dev/null +++ b/provisioner/terraform/testdata/resources/resource-metadata-duplicate/converted_state.plan.golden @@ -0,0 +1 @@ +"duplicate metadata resource: null_resource.about" diff --git a/provisioner/terraform/testdata/resources/resource-metadata-duplicate/converted_state.state.golden b/provisioner/terraform/testdata/resources/resource-metadata-duplicate/converted_state.state.golden new file mode 100644 index 0000000000000..8731a0c260de1 --- /dev/null +++ b/provisioner/terraform/testdata/resources/resource-metadata-duplicate/converted_state.state.golden @@ -0,0 +1 @@ +"duplicate metadata resource: null_resource.about" diff --git a/provisioner/terraform/testdata/resources/resource-metadata/converted_state.plan.golden b/provisioner/terraform/testdata/resources/resource-metadata/converted_state.plan.golden new file mode 100644 index 0000000000000..2a351e856ef7d --- /dev/null +++ b/provisioner/terraform/testdata/resources/resource-metadata/converted_state.plan.golden @@ -0,0 +1,63 @@ +{ + "Resources": [ + { + "name": "about", + "type": "null_resource", + "agents": [ + { + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "metadata": [ + { + "key": "process_count", + "display_name": "Process Count", + "script": "ps -ef | wc -l", + "interval": 5, + "timeout": 1, + "order": 7 + } + ], + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ], + "metadata": [ + { + "key": "hello", + "value": "world" + }, + { + "key": "null" + }, + { + "key": "empty" + }, + { + "key": "secret", + "value": "squirrel", + "sensitive": true + } + ], + "hide": true, + "icon": "/icon/server.svg", + "daily_cost": 29 + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/resource-metadata/converted_state.state.golden b/provisioner/terraform/testdata/resources/resource-metadata/converted_state.state.golden new file mode 100644 index 0000000000000..3f0578713e01a --- /dev/null +++ b/provisioner/terraform/testdata/resources/resource-metadata/converted_state.state.golden @@ -0,0 +1,65 @@ +{ + "Resources": [ + { + "name": "about", + "type": "null_resource", + "agents": [ + { + "id": "9a5911cd-2335-4050-aba8-4c26ba1ca704", + "name": "main", + "operating_system": "linux", + "architecture": "amd64", + "Auth": { + "Token": "2b4471d9-1281-45bf-8be2-9b182beb9285" + }, + "connection_timeout_seconds": 120, + "metadata": [ + { + "key": "process_count", + "display_name": "Process Count", + "script": "ps -ef | wc -l", + "interval": 5, + "timeout": 1, + "order": 7 + } + ], + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ], + "metadata": [ + { + "key": "hello", + "value": "world" + }, + { + "key": "null", + "is_null": true + }, + { + "key": "empty" + }, + { + "key": "secret", + "value": "squirrel", + "sensitive": true + } + ], + "hide": true, + "icon": "/icon/server.svg", + "daily_cost": 29 + } + ], + "Parameters": [], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/rich-parameters-order/converted_state.plan.golden b/provisioner/terraform/testdata/resources/rich-parameters-order/converted_state.plan.golden new file mode 100644 index 0000000000000..5a76d1778b382 --- /dev/null +++ b/provisioner/terraform/testdata/resources/rich-parameters-order/converted_state.plan.golden @@ -0,0 +1,49 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "Example", + "type": "string", + "required": true, + "order": 55, + "form_type": 4 + }, + { + "name": "Sample", + "description": "blah blah", + "type": "string", + "default_value": "ok", + "order": 99, + "form_type": 4 + } + ], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/rich-parameters-order/converted_state.state.golden b/provisioner/terraform/testdata/resources/rich-parameters-order/converted_state.state.golden new file mode 100644 index 0000000000000..5f001d4f104bc --- /dev/null +++ b/provisioner/terraform/testdata/resources/rich-parameters-order/converted_state.state.golden @@ -0,0 +1,50 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "09d607d0-f6dc-4d6b-b76c-0c532f34721e", + "name": "dev", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "ac504187-c31b-408f-8f1a-f7927a6de3bc" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "Example", + "type": "string", + "required": true, + "order": 55, + "form_type": 4 + }, + { + "name": "Sample", + "description": "blah blah", + "type": "string", + "default_value": "ok", + "order": 99, + "form_type": 4 + } + ], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/rich-parameters-validation/converted_state.plan.golden b/provisioner/terraform/testdata/resources/rich-parameters-validation/converted_state.plan.golden new file mode 100644 index 0000000000000..1476afaf6f2d8 --- /dev/null +++ b/provisioner/terraform/testdata/resources/rich-parameters-validation/converted_state.plan.golden @@ -0,0 +1,78 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "number_example", + "type": "number", + "mutable": true, + "default_value": "4", + "ephemeral": true, + "form_type": 4 + }, + { + "name": "number_example_max", + "type": "number", + "default_value": "4", + "validation_max": 6, + "form_type": 4 + }, + { + "name": "number_example_max_zero", + "type": "number", + "default_value": "-3", + "validation_max": 0, + "form_type": 4 + }, + { + "name": "number_example_min", + "type": "number", + "default_value": "4", + "validation_min": 3, + "form_type": 4 + }, + { + "name": "number_example_min_max", + "type": "number", + "default_value": "4", + "validation_min": 3, + "validation_max": 6, + "form_type": 4 + }, + { + "name": "number_example_min_zero", + "type": "number", + "default_value": "4", + "validation_min": 0, + "form_type": 4 + } + ], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/rich-parameters-validation/converted_state.state.golden b/provisioner/terraform/testdata/resources/rich-parameters-validation/converted_state.state.golden new file mode 100644 index 0000000000000..d8817ca5e900e --- /dev/null +++ b/provisioner/terraform/testdata/resources/rich-parameters-validation/converted_state.state.golden @@ -0,0 +1,79 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "9c8368da-924c-4df4-a049-940a9a035051", + "name": "dev", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "e09a4d7d-8341-4adf-b93b-21f3724d76d7" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "number_example", + "type": "number", + "mutable": true, + "default_value": "4", + "ephemeral": true, + "form_type": 4 + }, + { + "name": "number_example_max", + "type": "number", + "default_value": "4", + "validation_max": 6, + "form_type": 4 + }, + { + "name": "number_example_max_zero", + "type": "number", + "default_value": "-3", + "validation_max": 0, + "form_type": 4 + }, + { + "name": "number_example_min", + "type": "number", + "default_value": "4", + "validation_min": 3, + "form_type": 4 + }, + { + "name": "number_example_min_max", + "type": "number", + "default_value": "4", + "validation_min": 3, + "validation_max": 6, + "form_type": 4 + }, + { + "name": "number_example_min_zero", + "type": "number", + "default_value": "4", + "validation_min": 0, + "form_type": 4 + } + ], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/rich-parameters/converted_state.plan.golden b/provisioner/terraform/testdata/resources/rich-parameters/converted_state.plan.golden new file mode 100644 index 0000000000000..1089e51a88db8 --- /dev/null +++ b/provisioner/terraform/testdata/resources/rich-parameters/converted_state.plan.golden @@ -0,0 +1,119 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "name": "dev", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "First parameter from child module", + "description": "First parameter from child module", + "type": "string", + "mutable": true, + "default_value": "abcdef", + "form_type": 4 + }, + { + "name": "Second parameter from child module", + "description": "Second parameter from child module", + "type": "string", + "mutable": true, + "default_value": "ghijkl", + "form_type": 4 + }, + { + "name": "First parameter from module", + "description": "First parameter from module", + "type": "string", + "mutable": true, + "default_value": "abcdef", + "form_type": 4 + }, + { + "name": "Second parameter from module", + "description": "Second parameter from module", + "type": "string", + "mutable": true, + "default_value": "ghijkl", + "form_type": 4 + }, + { + "name": "Example", + "type": "string", + "options": [ + { + "name": "First Option", + "value": "first" + }, + { + "name": "Second Option", + "value": "second" + } + ], + "required": true, + "form_type": 2 + }, + { + "name": "number_example", + "type": "number", + "default_value": "4", + "form_type": 4 + }, + { + "name": "number_example_max_zero", + "type": "number", + "default_value": "-2", + "validation_min": -3, + "validation_max": 0, + "form_type": 4 + }, + { + "name": "number_example_min_max", + "type": "number", + "default_value": "4", + "validation_min": 3, + "validation_max": 6, + "form_type": 4 + }, + { + "name": "number_example_min_zero", + "type": "number", + "default_value": "4", + "validation_min": 0, + "validation_max": 6, + "form_type": 4 + }, + { + "name": "Sample", + "description": "blah blah", + "type": "string", + "default_value": "ok", + "form_type": 4 + } + ], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/rich-parameters/converted_state.state.golden b/provisioner/terraform/testdata/resources/rich-parameters/converted_state.state.golden new file mode 100644 index 0000000000000..1a0efa09663fb --- /dev/null +++ b/provisioner/terraform/testdata/resources/rich-parameters/converted_state.state.golden @@ -0,0 +1,120 @@ +{ + "Resources": [ + { + "name": "dev", + "type": "null_resource", + "agents": [ + { + "id": "047fe781-ea5d-411a-b31c-4400a00e6166", + "name": "dev", + "operating_system": "windows", + "architecture": "arm64", + "Auth": { + "Token": "261ca0f7-a388-42dd-b113-d25e31e346c9" + }, + "connection_timeout_seconds": 120, + "display_apps": { + "vscode": true, + "web_terminal": true, + "ssh_helper": true, + "port_forwarding_helper": true + }, + "resources_monitoring": {}, + "api_key_scope": "all" + } + ] + } + ], + "Parameters": [ + { + "name": "First parameter from child module", + "description": "First parameter from child module", + "type": "string", + "mutable": true, + "default_value": "abcdef", + "form_type": 4 + }, + { + "name": "Second parameter from child module", + "description": "Second parameter from child module", + "type": "string", + "mutable": true, + "default_value": "ghijkl", + "form_type": 4 + }, + { + "name": "First parameter from module", + "description": "First parameter from module", + "type": "string", + "mutable": true, + "default_value": "abcdef", + "form_type": 4 + }, + { + "name": "Second parameter from module", + "description": "Second parameter from module", + "type": "string", + "mutable": true, + "default_value": "ghijkl", + "form_type": 4 + }, + { + "name": "Example", + "type": "string", + "options": [ + { + "name": "First Option", + "value": "first" + }, + { + "name": "Second Option", + "value": "second" + } + ], + "required": true, + "form_type": 2 + }, + { + "name": "number_example", + "type": "number", + "default_value": "4", + "form_type": 4 + }, + { + "name": "number_example_max_zero", + "type": "number", + "default_value": "-2", + "validation_min": -3, + "validation_max": 0, + "form_type": 4 + }, + { + "name": "number_example_min_max", + "type": "number", + "default_value": "4", + "validation_min": 3, + "validation_max": 6, + "form_type": 4 + }, + { + "name": "number_example_min_zero", + "type": "number", + "default_value": "4", + "validation_min": 0, + "validation_max": 6, + "form_type": 4 + }, + { + "name": "Sample", + "description": "blah blah", + "type": "string", + "default_value": "ok", + "form_type": 4 + } + ], + "Presets": [], + "ExternalAuthProviders": [], + "AITasks": [], + "HasAITasks": false, + "HasExternalAgents": false +} diff --git a/provisioner/terraform/testdata/resources/version.txt b/provisioner/terraform/testdata/resources/version.txt index feaae22bac7e9..80138e7146693 100644 --- a/provisioner/terraform/testdata/resources/version.txt +++ b/provisioner/terraform/testdata/resources/version.txt @@ -1 +1 @@ -1.13.0 +1.13.4 diff --git a/provisioner/terraform/testdata/version.txt b/provisioner/terraform/testdata/version.txt index feaae22bac7e9..80138e7146693 100644 --- a/provisioner/terraform/testdata/version.txt +++ b/provisioner/terraform/testdata/version.txt @@ -1 +1 @@ -1.13.0 +1.13.4 diff --git a/provisioner/terraform/timings.go b/provisioner/terraform/timings.go index d2fe74239826e..0b150d2eafd4d 100644 --- a/provisioner/terraform/timings.go +++ b/provisioner/terraform/timings.go @@ -19,6 +19,13 @@ type timingKind string // Copied from https://github.com/hashicorp/terraform/blob/01c0480e77263933b2b086dc8d600a69f80fad2d/internal/command/jsonformat/renderer.go // We cannot reference these because they're in an internal package. const ( + // Stage markers are used to denote the beginning and end of stages. Without + // these, only discrete events (i.e. resource changes) within stages can be + // measured, which may omit setup/teardown time or other unmeasured overhead. + timingStageStart timingKind = "stage_start" + timingStageEnd timingKind = "stage_end" + timingStageError timingKind = "stage_error" + timingApplyStart timingKind = "apply_start" timingApplyProgress timingKind = "apply_progress" timingApplyComplete timingKind = "apply_complete" @@ -37,9 +44,6 @@ const ( timingResourceDrift timingKind = "resource_drift" timingVersion timingKind = "version" // These are not part of message_types, but we want to track init/graph timings as well. - timingInitStart timingKind = "init_start" - timingInitComplete timingKind = "init_complete" - timingInitErrored timingKind = "init_errored" timingGraphStart timingKind = "graph_start" timingGraphComplete timingKind = "graph_complete" timingGraphErrored timingKind = "graph_errored" @@ -109,13 +113,13 @@ func (t *timingAggregator) ingest(ts time.Time, s *timingSpan) { ts = dbtime.Time(ts.UTC()) switch s.kind { - case timingApplyStart, timingProvisionStart, timingRefreshStart, timingInitStart, timingGraphStart: + case timingApplyStart, timingProvisionStart, timingRefreshStart, timingGraphStart, timingStageStart: s.start = ts s.state = proto.TimingState_STARTED - case timingApplyComplete, timingProvisionComplete, timingRefreshComplete, timingInitComplete, timingGraphComplete: + case timingApplyComplete, timingProvisionComplete, timingRefreshComplete, timingGraphComplete, timingStageEnd: s.end = ts s.state = proto.TimingState_COMPLETED - case timingApplyErrored, timingProvisionErrored, timingInitErrored, timingGraphErrored: + case timingApplyErrored, timingProvisionErrored, timingGraphErrored, timingStageError: s.end = ts s.state = proto.TimingState_FAILED case timingInitOutput: @@ -176,8 +180,35 @@ func (t *timingAggregator) aggregate() []*proto.Timing { return out } +// startStage denotes the beginning of a stage and returns a function which +// should be called to mark the end of the stage. This is used to measure a +// stage's total duration across all it's discrete events and unmeasured +// overhead/events. +func (t *timingAggregator) startStage(stage database.ProvisionerJobTimingStage) (end func(err error)) { + ts := timingSpan{ + kind: timingStageStart, + stage: stage, + resource: "coder_stage_" + string(stage), + action: "terraform", + provider: "coder", + } + endTs := ts + t.ingest(dbtime.Now(), &ts) + + return func(err error) { + endTs.kind = timingStageEnd + if err != nil { + endTs.kind = timingStageError + } + t.ingest(dbtime.Now(), &endTs) + } +} + func (l timingKind) Valid() bool { return slices.Contains([]timingKind{ + timingStageStart, + timingStageEnd, + timingStageError, timingApplyStart, timingApplyProgress, timingApplyComplete, @@ -194,9 +225,6 @@ func (l timingKind) Valid() bool { timingOutputs, timingResourceDrift, timingVersion, - timingInitStart, - timingInitComplete, - timingInitErrored, timingGraphStart, timingGraphComplete, timingGraphErrored, @@ -210,7 +238,9 @@ func (l timingKind) Valid() bool { // if all other attributes are identical. func (l timingKind) Category() string { switch l { - case timingInitStart, timingInitComplete, timingInitErrored, timingInitOutput: + case timingStageStart, timingStageEnd, timingStageError: + return "stage" + case timingInitOutput: return "init" case timingGraphStart, timingGraphComplete, timingGraphErrored: return "graph" @@ -252,15 +282,6 @@ func (e *timingSpan) toProto() *proto.Timing { } } -func createInitTimingsEvent(event timingKind) (time.Time, *timingSpan) { - return dbtime.Now(), &timingSpan{ - kind: event, - action: "initializing terraform", - provider: "terraform", - resource: "init", - } -} - func createGraphTimingsEvent(event timingKind) (time.Time, *timingSpan) { return dbtime.Now(), &timingSpan{ kind: event, diff --git a/provisioner/terraform/timings_internal_test.go b/provisioner/terraform/timings_internal_test.go index 552bec5a1953e..99f057a97e6af 100644 --- a/provisioner/terraform/timings_internal_test.go +++ b/provisioner/terraform/timings_internal_test.go @@ -155,3 +155,18 @@ func printTimings(t *testing.T, timings []*proto.Timing) { terraform_internal.PrintTiming(t, a) } } + +func TestTimingStages(t *testing.T) { + t.Parallel() + + agg := &timingAggregator{ + stage: database.ProvisionerJobTimingStageApply, + stateLookup: make(map[uint64]*timingSpan), + } + + end := agg.startStage(database.ProvisionerJobTimingStageApply) + end(nil) + + evts := agg.aggregate() + require.Len(t, evts, 1) +} diff --git a/provisioner/terraform/timings_test.go b/provisioner/terraform/timings_test.go index fe167b830ff05..7a9ac84220a51 100644 --- a/provisioner/terraform/timings_test.go +++ b/provisioner/terraform/timings_test.go @@ -6,11 +6,13 @@ import ( "context" "os" "path/filepath" + "strings" "testing" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/util/slice" terraform_internal "github.com/coder/coder/v2/provisioner/terraform/internal" "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/testutil" @@ -95,6 +97,12 @@ func TestTimingsFromProvision(t *testing.T) { // Sort the timings stably to keep reduce flakiness. terraform_internal.StableSortTimings(t, timings) + // `coder_stage_` timings use `dbtime.Now()`, which makes them hard to compare to + // a static set of expected timings. Filter them out. This test is good for + // testing timings sourced from terraform logs, not internal coder timings. + timings = slice.Filter(timings, func(tim *proto.Timing) bool { + return !strings.HasPrefix(tim.Resource, "coder_stage_") + }) // Then: the received timings should match the expected values below. // NOTE: These timings have been encoded to JSON format to make the tests more readable. diff --git a/provisionerd/proto/provisionerd.pb.go b/provisionerd/proto/provisionerd.pb.go index 818719f1b3995..e66e1a33de1f4 100644 --- a/provisionerd/proto/provisionerd.pb.go +++ b/provisionerd/proto/provisionerd.pb.go @@ -952,7 +952,8 @@ type AcquiredJob_WorkspaceBuild struct { // previous_parameter_values is used to pass the values of the previous // workspace build. Omit these values if the workspace is being created // for the first time. - PreviousParameterValues []*proto.RichParameterValue `protobuf:"bytes,10,rep,name=previous_parameter_values,json=previousParameterValues,proto3" json:"previous_parameter_values,omitempty"` + PreviousParameterValues []*proto.RichParameterValue `protobuf:"bytes,10,rep,name=previous_parameter_values,json=previousParameterValues,proto3" json:"previous_parameter_values,omitempty"` + ExpReuseTerraformWorkspace *bool `protobuf:"varint,11,opt,name=exp_reuse_terraform_workspace,json=expReuseTerraformWorkspace,proto3,oneof" json:"exp_reuse_terraform_workspace,omitempty"` } func (x *AcquiredJob_WorkspaceBuild) Reset() { @@ -1050,6 +1051,13 @@ func (x *AcquiredJob_WorkspaceBuild) GetPreviousParameterValues() []*proto.RichP return nil } +func (x *AcquiredJob_WorkspaceBuild) GetExpReuseTerraformWorkspace() bool { + if x != nil && x.ExpReuseTerraformWorkspace != nil { + return *x.ExpReuseTerraformWorkspace + } + return false +} + type AcquiredJob_TemplateImport struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1593,7 +1601,7 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x1a, 0x26, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x07, 0x0a, - 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0xf9, 0x0b, 0x0a, 0x0b, 0x41, 0x63, 0x71, 0x75, 0x69, + 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0xe3, 0x0c, 0x0a, 0x0b, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, @@ -1626,7 +1634,7 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x54, 0x72, 0x61, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x74, 0x72, 0x61, 0x63, 0x65, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0xa3, 0x04, 0x0a, 0x0e, 0x57, 0x6f, 0x72, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x8d, 0x05, 0x0a, 0x0e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, @@ -1660,264 +1668,271 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x17, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x1a, 0x91, - 0x01, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x4c, 0x0a, 0x14, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x72, - 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x12, - 0x75, 0x73, 0x65, 0x72, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x1a, 0xe3, 0x01, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, - 0x72, 0x79, 0x52, 0x75, 0x6e, 0x12, 0x53, 0x0a, 0x15, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, - 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, - 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, - 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x4a, 0x04, 0x08, 0x01, 0x10, 0x02, 0x1a, 0x40, 0x0a, 0x12, 0x54, 0x72, 0x61, 0x63, - 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x22, 0xd4, 0x03, 0x0a, 0x09, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, - 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x51, 0x0a, - 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, - 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x48, 0x00, - 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, - 0x12, 0x51, 0x0a, 0x0f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x6d, 0x70, - 0x6f, 0x72, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, - 0x6f, 0x62, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, - 0x74, 0x48, 0x00, 0x52, 0x0e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, - 0x6f, 0x72, 0x74, 0x12, 0x52, 0x0a, 0x10, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, - 0x64, 0x72, 0x79, 0x5f, 0x72, 0x75, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x46, 0x61, 0x69, - 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, - 0x72, 0x79, 0x52, 0x75, 0x6e, 0x48, 0x00, 0x52, 0x0e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x43, 0x6f, 0x64, 0x65, 0x1a, 0x55, 0x0a, 0x0e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x2d, - 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, - 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x1a, 0x10, 0x0a, - 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, - 0x10, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, - 0x6e, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xbb, 0x0b, 0x0a, 0x0c, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, + 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x46, 0x0a, 0x1d, 0x65, 0x78, 0x70, 0x5f, + 0x72, 0x65, 0x75, 0x73, 0x65, 0x5f, 0x74, 0x65, 0x72, 0x72, 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x5f, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x48, + 0x00, 0x52, 0x1a, 0x65, 0x78, 0x70, 0x52, 0x65, 0x75, 0x73, 0x65, 0x54, 0x65, 0x72, 0x72, 0x61, + 0x66, 0x6f, 0x72, 0x6d, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x88, 0x01, 0x01, + 0x42, 0x20, 0x0a, 0x1e, 0x5f, 0x65, 0x78, 0x70, 0x5f, 0x72, 0x65, 0x75, 0x73, 0x65, 0x5f, 0x74, + 0x65, 0x72, 0x72, 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x1a, 0x91, 0x01, 0x0a, 0x0e, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x4c, + 0x0a, 0x14, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, + 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x12, 0x75, 0x73, 0x65, 0x72, 0x56, 0x61, + 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0xe3, 0x01, 0x0a, + 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x12, + 0x53, 0x0a, 0x15, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, + 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, + 0x13, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, + 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, + 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, + 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x4a, 0x04, 0x08, 0x01, + 0x10, 0x02, 0x1a, 0x40, 0x0a, 0x12, 0x54, 0x72, 0x61, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x02, 0x38, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xd4, 0x03, 0x0a, + 0x09, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, - 0x64, 0x12, 0x54, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, - 0x75, 0x69, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x42, 0x75, 0x69, 0x6c, 0x64, 0x48, 0x00, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x12, 0x54, 0x0a, 0x0f, 0x74, 0x65, 0x6d, 0x70, 0x6c, - 0x61, 0x74, 0x65, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x54, 0x65, 0x6d, + 0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x51, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, + 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x48, 0x00, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x12, 0x51, 0x0a, 0x0f, 0x74, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x0e, 0x74, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x55, 0x0a, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x52, 0x0a, 0x10, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x64, 0x72, 0x79, 0x5f, 0x72, 0x75, - 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, - 0x4a, 0x6f, 0x62, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, - 0x75, 0x6e, 0x48, 0x00, 0x52, 0x0e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, - 0x79, 0x52, 0x75, 0x6e, 0x1a, 0xc0, 0x02, 0x0a, 0x0e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, - 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x03, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, - 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, - 0x12, 0x55, 0x0a, 0x15, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x70, - 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, - 0x73, 0x6b, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, - 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x1a, 0xcf, 0x05, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x3e, 0x0a, 0x0f, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0e, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3c, 0x0a, 0x0e, 0x73, 0x74, - 0x6f, 0x70, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0d, 0x73, 0x74, 0x6f, 0x70, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x72, 0x69, 0x63, 0x68, - 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0e, 0x72, - 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x41, 0x0a, - 0x1d, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x1a, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, - 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x73, - 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, - 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, - 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, - 0x65, 0x72, 0x73, 0x12, 0x38, 0x0a, 0x0d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6d, 0x6f, 0x64, - 0x75, 0x6c, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, - 0x0c, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x36, 0x0a, - 0x0c, 0x73, 0x74, 0x6f, 0x70, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, + 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, + 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x48, + 0x00, 0x52, 0x0e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, + 0x6e, 0x12, 0x1d, 0x0a, 0x0a, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x43, 0x6f, 0x64, 0x65, + 0x1a, 0x55, 0x0a, 0x0e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, + 0x6c, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, + 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x1a, 0x10, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x10, 0x0a, 0x0e, 0x54, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x42, 0x06, 0x0a, 0x04, 0x74, + 0x79, 0x70, 0x65, 0x22, 0xbb, 0x0b, 0x0a, 0x0c, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, + 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x54, 0x0a, 0x0f, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, + 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x48, + 0x00, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, + 0x64, 0x12, 0x54, 0x0a, 0x0f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x0e, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x55, 0x0a, 0x10, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x5f, 0x64, 0x72, 0x79, 0x5f, 0x72, 0x75, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, + 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x54, 0x65, + 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x48, 0x00, 0x52, 0x0e, + 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x1a, 0xc0, + 0x02, 0x0a, 0x0e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, + 0x64, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, + 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x6d, + 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, + 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x55, 0x0a, 0x15, 0x72, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x14, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, + 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x0b, 0x73, 0x74, 0x6f, 0x70, 0x4d, 0x6f, - 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, - 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x52, 0x07, 0x70, 0x72, 0x65, - 0x73, 0x65, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x09, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x21, 0x0a, 0x0c, 0x6d, 0x6f, 0x64, 0x75, - 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, - 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6d, - 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x68, 0x61, 0x73, 0x68, - 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, - 0x6c, 0x65, 0x73, 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, 0x61, 0x73, 0x5f, 0x61, - 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x68, - 0x61, 0x73, 0x41, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x68, 0x61, 0x73, - 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, - 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x68, 0x61, 0x73, 0x45, 0x78, 0x74, 0x65, 0x72, - 0x6e, 0x61, 0x6c, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x74, 0x0a, 0x0e, 0x54, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x12, 0x33, 0x0a, 0x09, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, - 0x12, 0x2d, 0x0a, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x42, - 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xb0, 0x01, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, - 0x2f, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, - 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, - 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1d, 0x0a, - 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x14, 0x0a, 0x05, - 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, - 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0xa6, 0x03, 0x0a, 0x10, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, + 0x73, 0x1a, 0xcf, 0x05, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x12, 0x3e, 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x52, 0x0e, 0x73, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x73, 0x12, 0x3c, 0x0a, 0x0e, 0x73, 0x74, 0x6f, 0x70, 0x5f, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x52, 0x0d, 0x73, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0e, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x41, 0x0a, 0x1d, 0x65, 0x78, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, + 0x72, 0x73, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, 0x52, 0x1a, + 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, + 0x69, 0x64, 0x65, 0x72, 0x73, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x38, 0x0a, + 0x0d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x06, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x0c, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x36, 0x0a, 0x0c, 0x73, 0x74, 0x6f, 0x70, 0x5f, + 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, + 0x6c, 0x65, 0x52, 0x0b, 0x73, 0x74, 0x6f, 0x70, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, + 0x2d, 0x0a, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, + 0x72, 0x65, 0x73, 0x65, 0x74, 0x52, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x12, 0x12, + 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x70, 0x6c, + 0x61, 0x6e, 0x12, 0x21, 0x0a, 0x0c, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, + 0x65, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, + 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, + 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x48, 0x61, 0x73, + 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, 0x61, 0x73, 0x5f, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, + 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x68, 0x61, 0x73, 0x41, 0x69, 0x54, 0x61, + 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x68, 0x61, 0x73, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x11, 0x68, 0x61, 0x73, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x67, 0x65, + 0x6e, 0x74, 0x73, 0x1a, 0x74, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, + 0x72, 0x79, 0x52, 0x75, 0x6e, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, + 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x6d, 0x6f, + 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, + 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x22, 0xb0, 0x01, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, + 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, + 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, + 0x74, 0x70, 0x75, 0x74, 0x22, 0xa6, 0x03, 0x0a, 0x10, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, + 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, + 0x12, 0x25, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, + 0x67, 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, + 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, + 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x4c, 0x0a, 0x14, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x76, 0x61, + 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, + 0x12, 0x75, 0x73, 0x65, 0x72, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x12, 0x58, 0x0a, 0x0e, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, 0x07, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, + 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x22, 0x7a, 0x0a, + 0x11, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x65, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x65, 0x64, 0x12, 0x43, + 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x22, 0x4a, 0x0a, 0x12, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x12, 0x4c, 0x0a, - 0x12, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, - 0x6c, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, - 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x4c, 0x0a, 0x14, 0x75, - 0x73, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x12, 0x75, 0x73, 0x65, 0x72, 0x56, 0x61, 0x72, 0x69, 0x61, - 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, - 0x64, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, - 0x65, 0x12, 0x58, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, - 0x61, 0x67, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, - 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, - 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x4a, 0x04, 0x08, - 0x03, 0x10, 0x04, 0x22, 0x7a, 0x0a, 0x11, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x6e, 0x63, - 0x65, 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x63, 0x61, 0x6e, 0x63, - 0x65, 0x6c, 0x65, 0x64, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, - 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, - 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, - 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x22, - 0x4a, 0x0a, 0x12, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, - 0x64, 0x61, 0x69, 0x6c, 0x79, 0x5f, 0x63, 0x6f, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, - 0x52, 0x09, 0x64, 0x61, 0x69, 0x6c, 0x79, 0x43, 0x6f, 0x73, 0x74, 0x22, 0x68, 0x0a, 0x13, 0x43, - 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x6f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x02, - 0x6f, 0x6b, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x72, 0x65, 0x64, 0x69, 0x74, 0x73, 0x5f, 0x63, 0x6f, - 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0f, 0x63, 0x72, - 0x65, 0x64, 0x69, 0x74, 0x73, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x64, 0x12, 0x16, 0x0a, - 0x06, 0x62, 0x75, 0x64, 0x67, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x62, - 0x75, 0x64, 0x67, 0x65, 0x74, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, - 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x22, 0x93, 0x01, 0x0a, 0x11, 0x55, 0x70, 0x6c, 0x6f, 0x61, - 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x0b, - 0x64, 0x61, 0x74, 0x61, 0x5f, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, - 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x3a, 0x0a, 0x0b, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x5f, 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x68, 0x75, 0x6e, - 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x50, - 0x69, 0x65, 0x63, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x34, 0x0a, 0x09, - 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x12, 0x50, 0x52, 0x4f, - 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x5f, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, - 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, - 0x10, 0x01, 0x32, 0x8b, 0x04, 0x0a, 0x11, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, 0x6f, 0x6e, 0x12, 0x41, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, - 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, - 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x22, 0x03, 0x88, 0x02, 0x01, 0x12, 0x52, 0x0a, 0x14, 0x41, - 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x57, 0x69, 0x74, 0x68, 0x43, 0x61, 0x6e, - 0x63, 0x65, 0x6c, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, - 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, - 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x28, 0x01, 0x30, 0x01, 0x12, - 0x52, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x12, 0x20, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, - 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, - 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, - 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x37, 0x0a, 0x07, 0x46, 0x61, 0x69, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x17, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x46, 0x61, 0x69, 0x6c, - 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x44, 0x0a, 0x0a, 0x55, 0x70, - 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, - 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x28, 0x01, - 0x42, 0x2e, 0x5a, 0x2c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, - 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x61, 0x69, 0x6c, 0x79, 0x5f, + 0x63, 0x6f, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x64, 0x61, 0x69, 0x6c, + 0x79, 0x43, 0x6f, 0x73, 0x74, 0x22, 0x68, 0x0a, 0x13, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, + 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, + 0x6f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x02, 0x6f, 0x6b, 0x12, 0x29, 0x0a, 0x10, + 0x63, 0x72, 0x65, 0x64, 0x69, 0x74, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0f, 0x63, 0x72, 0x65, 0x64, 0x69, 0x74, 0x73, 0x43, + 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x75, 0x64, 0x67, 0x65, + 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x62, 0x75, 0x64, 0x67, 0x65, 0x74, 0x22, + 0x0f, 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, + 0x22, 0x93, 0x01, 0x0a, 0x11, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x75, + 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, + 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, + 0x61, 0x64, 0x12, 0x3a, 0x0a, 0x0b, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x70, 0x69, 0x65, 0x63, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, + 0x48, 0x00, 0x52, 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x42, 0x06, + 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x34, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x12, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, + 0x45, 0x52, 0x5f, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x50, + 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x10, 0x01, 0x32, 0x8b, 0x04, 0x0a, + 0x11, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, + 0x6f, 0x6e, 0x12, 0x41, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, + 0x12, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, + 0x22, 0x03, 0x88, 0x02, 0x01, 0x12, 0x52, 0x0a, 0x14, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, + 0x4a, 0x6f, 0x62, 0x57, 0x69, 0x74, 0x68, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x12, 0x1b, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, + 0x63, 0x65, 0x6c, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, + 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x28, 0x01, 0x30, 0x01, 0x12, 0x52, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, + 0x6f, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, + 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x46, + 0x61, 0x69, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, + 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, + 0x6d, 0x70, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, + 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, + 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, + 0x6d, 0x70, 0x74, 0x79, 0x12, 0x44, 0x0a, 0x0a, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, + 0x6c, 0x65, 0x12, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x64, 0x2e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x28, 0x01, 0x42, 0x2e, 0x5a, 0x2c, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( @@ -2307,6 +2322,7 @@ func file_provisionerd_proto_provisionerd_proto_init() { (*UploadFileRequest_DataUpload)(nil), (*UploadFileRequest_ChunkPiece)(nil), } + file_provisionerd_proto_provisionerd_proto_msgTypes[11].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ diff --git a/provisionerd/proto/provisionerd.proto b/provisionerd/proto/provisionerd.proto index b008da33ea87e..5c54a600c0c1a 100644 --- a/provisionerd/proto/provisionerd.proto +++ b/provisionerd/proto/provisionerd.proto @@ -26,6 +26,7 @@ message AcquiredJob { // workspace build. Omit these values if the workspace is being created // for the first time. repeated provisioner.RichParameterValue previous_parameter_values = 10; + optional bool exp_reuse_terraform_workspace = 11; } message TemplateImport { provisioner.Metadata metadata = 1; diff --git a/provisionerd/proto/version.go b/provisionerd/proto/version.go index a7ea326d0f466..0c23b3939d4f2 100644 --- a/provisionerd/proto/version.go +++ b/provisionerd/proto/version.go @@ -57,9 +57,14 @@ import "github.com/coder/coder/v2/apiversion" // API v1.11: // - Added new fields `task_id` and `task_prompt` to `Manifest`. // - Added new field `app_id` to `AITask` +// +// API v1.12: +// - Added new field `template_version_id` to `provisioner.Metadata` +// - Added new field `exp_reuse_terraform_workspace` to `provisioner.Job.WorkspaceBuild` +// - Added fields `template_version_id`, `template_id`, and `exp_reuse_terraform_workspace` to `provisioner.Config` const ( CurrentMajor = 1 - CurrentMinor = 11 + CurrentMinor = 12 ) // CurrentVersion is the current provisionerd API version. diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index 1b4b6720b48e9..fc4d069a88597 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -26,6 +26,7 @@ import ( "github.com/coder/coder/v2/provisionerd/proto" "github.com/coder/coder/v2/provisionersdk" sdkproto "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/provisionersdk/tfpath" "github.com/coder/coder/v2/testutil" ) @@ -318,8 +319,8 @@ func TestProvisionerd(t *testing.T) { JobId: "test", Provisioner: "someprovisioner", TemplateSourceArchive: testutil.CreateTar(t, map[string]string{ - "test.txt": "content", - provisionersdk.ReadmeFile: "# A cool template 😎\n", + "test.txt": "content", + tfpath.ReadmeFile: "# A cool template 😎\n", }), Type: &proto.AcquiredJob_TemplateImport_{ TemplateImport: &proto.AcquiredJob_TemplateImport{ @@ -353,7 +354,7 @@ func TestProvisionerd(t *testing.T) { _ *sdkproto.ParseRequest, cancelOrComplete <-chan struct{}, ) *sdkproto.ParseComplete { - data, err := os.ReadFile(filepath.Join(s.WorkDirectory, "test.txt")) + data, err := os.ReadFile(filepath.Join(s.Files.WorkDirectory(), "test.txt")) require.NoError(t, err) require.Equal(t, "content", string(data)) s.ProvisionLog(sdkproto.LogLevel_INFO, "hello") diff --git a/provisionerd/runner/runner.go b/provisionerd/runner/runner.go index 924f0628820ce..22b6403fe729d 100644 --- a/provisionerd/runner/runner.go +++ b/provisionerd/runner/runner.go @@ -21,6 +21,7 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + strings2 "github.com/coder/coder/v2/coderd/util/strings" "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/coderd/util/ptr" @@ -514,7 +515,10 @@ func (r *Runner) runTemplateImport(ctx context.Context) (*proto.CompletedJob, *p defer span.End() failedJob := r.configure(&sdkproto.Config{ - TemplateSourceArchive: r.job.GetTemplateSourceArchive(), + TemplateSourceArchive: r.job.GetTemplateSourceArchive(), + TemplateId: strings2.EmptyToNil(r.job.GetTemplateImport().Metadata.TemplateId), + TemplateVersionId: strings2.EmptyToNil(r.job.GetTemplateImport().Metadata.TemplateVersionId), + ExpReuseTerraformWorkspace: ptr.Ref(false), }) if failedJob != nil { return nil, failedJob @@ -1010,9 +1014,12 @@ func (r *Runner) runWorkspaceBuild(ctx context.Context) (*proto.CompletedJob, *p } failedJob := r.configure(&sdkproto.Config{ - TemplateSourceArchive: r.job.GetTemplateSourceArchive(), - State: r.job.GetWorkspaceBuild().State, - ProvisionerLogLevel: r.job.GetWorkspaceBuild().LogLevel, + TemplateSourceArchive: r.job.GetTemplateSourceArchive(), + State: r.job.GetWorkspaceBuild().State, + ProvisionerLogLevel: r.job.GetWorkspaceBuild().LogLevel, + TemplateId: strings2.EmptyToNil(r.job.GetWorkspaceBuild().Metadata.TemplateId), + TemplateVersionId: strings2.EmptyToNil(r.job.GetWorkspaceBuild().Metadata.TemplateVersionId), + ExpReuseTerraformWorkspace: r.job.GetWorkspaceBuild().ExpReuseTerraformWorkspace, }) if failedJob != nil { return nil, failedJob diff --git a/provisionersdk/cleanup.go b/provisionersdk/cleanup.go deleted file mode 100644 index b515c636b4eba..0000000000000 --- a/provisionersdk/cleanup.go +++ /dev/null @@ -1,48 +0,0 @@ -package provisionersdk - -import ( - "context" - "path/filepath" - "time" - - "github.com/spf13/afero" - "golang.org/x/xerrors" - - "cdr.dev/slog" -) - -// CleanStaleSessions browses the work directory searching for stale session -// directories. Coder provisioner is supposed to remove them once after finishing the provisioning, -// but there is a risk of keeping them in case of a failure. -func CleanStaleSessions(ctx context.Context, workDirectory string, fs afero.Fs, now time.Time, logger slog.Logger) error { - entries, err := afero.ReadDir(fs, workDirectory) - if err != nil { - return xerrors.Errorf("can't read %q directory", workDirectory) - } - - for _, fi := range entries { - dirName := fi.Name() - - if fi.IsDir() && isValidSessionDir(dirName) { - sessionDirPath := filepath.Join(workDirectory, dirName) - - modTime := fi.ModTime() // fallback to modTime if modTime is not available (afero) - - if modTime.Add(staleSessionRetention).After(now) { - continue - } - - logger.Info(ctx, "remove stale session directory", slog.F("session_path", sessionDirPath)) - err = fs.RemoveAll(sessionDirPath) - if err != nil { - return xerrors.Errorf("can't remove %q directory: %w", sessionDirPath, err) - } - } - } - return nil -} - -func isValidSessionDir(dirName string) bool { - match, err := filepath.Match(sessionDirPrefix+"*", dirName) - return err == nil && match -} diff --git a/provisionersdk/cleanup_test.go b/provisionersdk/cleanup_test.go index e23c7a9f78f9a..3bc0064f88132 100644 --- a/provisionersdk/cleanup_test.go +++ b/provisionersdk/cleanup_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" "cdr.dev/slog" - "github.com/coder/coder/v2/provisionersdk" + "github.com/coder/coder/v2/provisionersdk/tfpath" "github.com/coder/coder/v2/testutil" ) @@ -40,15 +40,18 @@ func TestStaleSessions(t *testing.T) { fs, logger := prepare() // given - first := provisionersdk.SessionDir(uuid.NewString()) + first := tfpath.Session(workDirectory, uuid.NewString()) addSessionFolder(t, fs, first, now.Add(-7*24*time.Hour)) - second := provisionersdk.SessionDir(uuid.NewString()) + second := tfpath.Session(workDirectory, uuid.NewString()) addSessionFolder(t, fs, second, now.Add(-8*24*time.Hour)) - third := provisionersdk.SessionDir(uuid.NewString()) + third := tfpath.Session(workDirectory, uuid.NewString()) addSessionFolder(t, fs, third, now.Add(-9*24*time.Hour)) + // tfDir is a fake session that will clean up the others + tfDir := tfpath.Session(workDirectory, uuid.NewString()) // when - provisionersdk.CleanStaleSessions(ctx, workDirectory, fs, now, logger) + err := tfDir.CleanStaleSessions(ctx, logger, fs, now) + require.NoError(t, err) // then entries, err := afero.ReadDir(fs, workDirectory) @@ -65,19 +68,21 @@ func TestStaleSessions(t *testing.T) { fs, logger := prepare() // given - first := provisionersdk.SessionDir(uuid.NewString()) + first := tfpath.Session(workDirectory, uuid.NewString()) addSessionFolder(t, fs, first, now.Add(-7*24*time.Hour)) - second := provisionersdk.SessionDir(uuid.NewString()) + second := tfpath.Session(workDirectory, uuid.NewString()) addSessionFolder(t, fs, second, now.Add(-6*24*time.Hour)) + tfDir := tfpath.Session(workDirectory, uuid.NewString()) // when - provisionersdk.CleanStaleSessions(ctx, workDirectory, fs, now, logger) + err := tfDir.CleanStaleSessions(ctx, logger, fs, now) + require.NoError(t, err) // then entries, err := afero.ReadDir(fs, workDirectory) require.NoError(t, err) require.Len(t, entries, 1, "one session should be present") - require.Equal(t, second, entries[0].Name(), 1) + require.Equal(t, second.WorkDirectory(), filepath.Join(workDirectory, entries[0].Name()), 1) }) t.Run("no stale sessions", func(t *testing.T) { @@ -89,13 +94,15 @@ func TestStaleSessions(t *testing.T) { fs, logger := prepare() // given - first := provisionersdk.SessionDir(uuid.NewString()) + first := tfpath.Session(workDirectory, uuid.NewString()) addSessionFolder(t, fs, first, now.Add(-6*24*time.Hour)) - second := provisionersdk.SessionDir(uuid.NewString()) + second := tfpath.Session(workDirectory, uuid.NewString()) addSessionFolder(t, fs, second, now.Add(-5*24*time.Hour)) + tfDir := tfpath.Session(workDirectory, uuid.NewString()) // when - provisionersdk.CleanStaleSessions(ctx, workDirectory, fs, now, logger) + err := tfDir.CleanStaleSessions(ctx, logger, fs, now) + require.NoError(t, err) // then entries, err := afero.ReadDir(fs, workDirectory) @@ -104,9 +111,10 @@ func TestStaleSessions(t *testing.T) { }) } -func addSessionFolder(t *testing.T, fs afero.Fs, sessionName string, modTime time.Time) { - err := fs.MkdirAll(filepath.Join(workDirectory, sessionName), 0o755) +func addSessionFolder(t *testing.T, fs afero.Fs, files tfpath.Layout, modTime time.Time) { + workdir := files.WorkDirectory() + err := fs.MkdirAll(workdir, 0o755) require.NoError(t, err, "can't create session folder") - require.NoError(t, fs.Chtimes(filepath.Join(workDirectory, sessionName), now, modTime), "can't chtime of session dir") + require.NoError(t, fs.Chtimes(workdir, now, modTime), "can't chtime of session dir") require.NoError(t, err, "can't set times") } diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index b884f5a21aca6..72741a1036b41 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -2946,6 +2946,7 @@ type Metadata struct { RunningAgentAuthTokens []*RunningAgentAuthToken `protobuf:"bytes,21,rep,name=running_agent_auth_tokens,json=runningAgentAuthTokens,proto3" json:"running_agent_auth_tokens,omitempty"` TaskId string `protobuf:"bytes,22,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` TaskPrompt string `protobuf:"bytes,23,opt,name=task_prompt,json=taskPrompt,proto3" json:"task_prompt,omitempty"` + TemplateVersionId string `protobuf:"bytes,24,opt,name=template_version_id,json=templateVersionId,proto3" json:"template_version_id,omitempty"` } func (x *Metadata) Reset() { @@ -3141,6 +3142,13 @@ func (x *Metadata) GetTaskPrompt() string { return "" } +func (x *Metadata) GetTemplateVersionId() string { + if x != nil { + return x.TemplateVersionId + } + return "" +} + // Config represents execution configuration shared by all subsequent requests in the Session type Config struct { state protoimpl.MessageState @@ -3152,6 +3160,11 @@ type Config struct { // state is the provisioner state (if any) State []byte `protobuf:"bytes,2,opt,name=state,proto3" json:"state,omitempty"` ProvisionerLogLevel string `protobuf:"bytes,3,opt,name=provisioner_log_level,json=provisionerLogLevel,proto3" json:"provisioner_log_level,omitempty"` + // Template imports can omit template id + TemplateId *string `protobuf:"bytes,4,opt,name=template_id,json=templateId,proto3,oneof" json:"template_id,omitempty"` + // Dry runs omit version id + TemplateVersionId *string `protobuf:"bytes,5,opt,name=template_version_id,json=templateVersionId,proto3,oneof" json:"template_version_id,omitempty"` + ExpReuseTerraformWorkspace *bool `protobuf:"varint,6,opt,name=exp_reuse_terraform_workspace,json=expReuseTerraformWorkspace,proto3,oneof" json:"exp_reuse_terraform_workspace,omitempty"` // Whether to reuse existing terraform workspaces if they exist. } func (x *Config) Reset() { @@ -3207,6 +3220,27 @@ func (x *Config) GetProvisionerLogLevel() string { return "" } +func (x *Config) GetTemplateId() string { + if x != nil && x.TemplateId != nil { + return *x.TemplateId + } + return "" +} + +func (x *Config) GetTemplateVersionId() string { + if x != nil && x.TemplateVersionId != nil { + return *x.TemplateVersionId + } + return "" +} + +func (x *Config) GetExpReuseTerraformWorkspace() bool { + if x != nil && x.ExpReuseTerraformWorkspace != nil { + return *x.ExpReuseTerraformWorkspace + } + return false +} + // ParseRequest consumes source-code to produce inputs. type ParseRequest struct { state protoimpl.MessageState @@ -4766,7 +4800,7 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x62, 0x61, 0x72, 0x41, 0x70, 0x70, 0x48, 0x00, 0x52, 0x0a, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x41, 0x70, 0x70, 0x88, 0x01, 0x01, 0x12, 0x15, 0x0a, 0x06, 0x61, 0x70, 0x70, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x61, 0x70, 0x70, 0x49, 0x64, 0x42, 0x0e, - 0x0a, 0x0c, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x5f, 0x61, 0x70, 0x70, 0x22, 0x84, + 0x0a, 0x0c, 0x5f, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x5f, 0x61, 0x70, 0x70, 0x22, 0xb4, 0x0a, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x12, 0x53, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, @@ -4847,7 +4881,10 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x61, 0x73, 0x6b, 0x5f, 0x69, 0x64, 0x18, 0x16, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x74, 0x61, 0x73, 0x6b, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x61, 0x73, 0x6b, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x18, 0x17, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74, 0x61, 0x73, 0x6b, 0x50, - 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x8a, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x12, 0x2e, 0x0a, 0x13, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x18, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x22, 0xf7, 0x02, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x36, 0x0a, 0x17, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x15, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, @@ -4856,242 +4893,257 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, - 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, - 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, - 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, - 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, - 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, - 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, - 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, - 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, - 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xbe, 0x03, 0x0a, 0x0b, 0x50, 0x6c, 0x61, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72, - 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, - 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, - 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, - 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, - 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, - 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, - 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, - 0x12, 0x5b, 0x0a, 0x19, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x52, 0x17, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x2a, 0x0a, - 0x11, 0x6f, 0x6d, 0x69, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, - 0x65, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x6f, 0x6d, 0x69, 0x74, 0x4d, 0x6f, - 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x22, 0xc1, 0x05, 0x0a, 0x0c, 0x50, 0x6c, - 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, - 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, - 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, - 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, - 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, - 0x6e, 0x67, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, - 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x52, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, - 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x55, 0x0a, 0x15, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0a, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x0a, 0x0c, - 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x0b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, - 0x2a, 0x0a, 0x11, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, - 0x68, 0x61, 0x73, 0x68, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, - 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, - 0x61, 0x73, 0x5f, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x0a, 0x68, 0x61, 0x73, 0x41, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, - 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, - 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, - 0x13, 0x68, 0x61, 0x73, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x67, - 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x68, 0x61, 0x73, 0x45, - 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x41, 0x0a, - 0x0c, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x22, 0xee, 0x02, 0x0a, 0x0d, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, - 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x65, 0x6c, 0x12, 0x24, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, + 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x13, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x46, 0x0a, + 0x1d, 0x65, 0x78, 0x70, 0x5f, 0x72, 0x65, 0x75, 0x73, 0x65, 0x5f, 0x74, 0x65, 0x72, 0x72, 0x61, + 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x08, 0x48, 0x02, 0x52, 0x1a, 0x65, 0x78, 0x70, 0x52, 0x65, 0x75, 0x73, 0x65, + 0x54, 0x65, 0x72, 0x72, 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x5f, 0x69, 0x64, 0x42, 0x16, 0x0a, 0x14, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, + 0x74, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x42, 0x20, 0x0a, + 0x1e, 0x5f, 0x65, 0x78, 0x70, 0x5f, 0x72, 0x65, 0x75, 0x73, 0x65, 0x5f, 0x74, 0x65, 0x72, 0x72, + 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, + 0x0e, 0x0a, 0x0c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, + 0xa3, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, + 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, + 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x12, 0x54, 0x0a, + 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, + 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, + 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xbe, 0x03, 0x0a, 0x0b, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72, 0x69, 0x63, 0x68, + 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, - 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, - 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, - 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, - 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, - 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, - 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x07, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, - 0x73, 0x22, 0xfa, 0x01, 0x0a, 0x06, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x05, - 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2c, - 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, - 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, - 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x2e, - 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, - 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x0f, - 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, - 0x8c, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x48, 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, - 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, - 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, - 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, - 0x12, 0x34, 0x0a, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, - 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, - 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc9, - 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, - 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, - 0x67, 0x12, 0x32, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, - 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, - 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, - 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x65, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, - 0x74, 0x61, 0x5f, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, - 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, - 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x3a, 0x0a, 0x0b, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, - 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, - 0x69, 0x65, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, - 0x63, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x9c, 0x01, 0x0a, 0x0a, 0x44, - 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x75, 0x70, 0x6c, - 0x6f, 0x61, 0x64, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, - 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x75, 0x70, 0x6c, - 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, - 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, - 0x48, 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73, 0x69, 0x7a, - 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x69, 0x7a, - 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x05, 0x52, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x22, 0x67, 0x0a, 0x0a, 0x43, 0x68, 0x75, - 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x66, - 0x75, 0x6c, 0x6c, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x66, 0x75, 0x6c, 0x6c, 0x44, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, - 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x65, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x70, 0x69, 0x65, 0x63, 0x65, 0x49, 0x6e, 0x64, - 0x65, 0x78, 0x2a, 0xa8, 0x01, 0x0a, 0x11, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x46, 0x6f, 0x72, 0x6d, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x46, 0x41, - 0x55, 0x4c, 0x54, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x45, 0x52, - 0x52, 0x4f, 0x52, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x52, 0x41, 0x44, 0x49, 0x4f, 0x10, 0x02, - 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x52, 0x4f, 0x50, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x03, 0x12, 0x09, - 0x0a, 0x05, 0x49, 0x4e, 0x50, 0x55, 0x54, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x54, 0x45, 0x58, - 0x54, 0x41, 0x52, 0x45, 0x41, 0x10, 0x05, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x4c, 0x49, 0x44, 0x45, - 0x52, 0x10, 0x06, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x48, 0x45, 0x43, 0x4b, 0x42, 0x4f, 0x58, 0x10, - 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x57, 0x49, 0x54, 0x43, 0x48, 0x10, 0x08, 0x12, 0x0d, 0x0a, - 0x09, 0x54, 0x41, 0x47, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x10, 0x09, 0x12, 0x0f, 0x0a, 0x0b, - 0x4d, 0x55, 0x4c, 0x54, 0x49, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x10, 0x0a, 0x2a, 0x3f, 0x0a, - 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, - 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, - 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, - 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b, - 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, - 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, - 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, - 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x09, 0x41, - 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x12, 0x0e, 0x0a, 0x06, 0x57, 0x49, 0x4e, 0x44, - 0x4f, 0x57, 0x10, 0x00, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x4c, 0x49, 0x4d, - 0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x54, 0x41, 0x42, - 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, - 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, - 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b, - 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, 0x4f, 0x59, 0x10, 0x02, 0x2a, 0x3e, 0x0a, 0x1b, 0x50, - 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x4f, - 0x4e, 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, - 0x12, 0x09, 0x0a, 0x05, 0x43, 0x4c, 0x41, 0x49, 0x4d, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x0b, 0x54, - 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, - 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, 0x50, 0x4c, - 0x45, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, - 0x10, 0x02, 0x2a, 0x47, 0x0a, 0x0e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, - 0x54, 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, 0x13, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, - 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1c, 0x0a, - 0x18, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, - 0x55, 0x4c, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x53, 0x10, 0x01, 0x32, 0x49, 0x0a, 0x0b, 0x50, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, - 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, - 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, - 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x43, 0x0a, + 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x73, 0x12, 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, + 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, + 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x5b, 0x0a, + 0x19, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, + 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x52, 0x17, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6f, 0x6d, + 0x69, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x6f, 0x6d, 0x69, 0x74, 0x4d, 0x6f, 0x64, 0x75, 0x6c, + 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x22, 0xc1, 0x05, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x6e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, + 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, + 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, + 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, + 0x12, 0x2d, 0x0a, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, + 0x2d, 0x0a, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, + 0x72, 0x65, 0x73, 0x65, 0x74, 0x52, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x12, 0x12, + 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x70, 0x6c, + 0x61, 0x6e, 0x12, 0x55, 0x0a, 0x15, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, + 0x65, 0x6e, 0x74, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, + 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x6d, 0x6f, 0x64, + 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x0b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, + 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x68, 0x61, 0x73, + 0x68, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, + 0x69, 0x6c, 0x65, 0x73, 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, 0x61, 0x73, 0x5f, + 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, + 0x68, 0x61, 0x73, 0x41, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, + 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, + 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x68, 0x61, + 0x73, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, + 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x68, 0x61, 0x73, 0x45, 0x78, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x41, 0x0a, 0x0c, 0x41, 0x70, + 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xee, 0x02, + 0x0a, 0x0d, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, + 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, + 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, + 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, + 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x2e, + 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, + 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x22, 0xfa, + 0x01, 0x0a, 0x06, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x05, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x03, 0x65, + 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, + 0x74, 0x61, 0x6d, 0x70, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x2e, 0x0a, 0x05, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, + 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x0f, 0x0a, 0x0d, 0x43, + 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, + 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, + 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, + 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, + 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, + 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, + 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, + 0x63, 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc9, 0x02, 0x0a, 0x08, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, + 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, + 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, + 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, + 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, + 0x6c, 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, + 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, + 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x55, + 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, + 0x6f, 0x61, 0x64, 0x12, 0x3a, 0x0a, 0x0b, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x70, 0x69, 0x65, + 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, + 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x42, + 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x9c, 0x01, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, + 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, + 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, + 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x68, 0x61, 0x73, + 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, + 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x16, + 0x0a, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, + 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x22, 0x67, 0x0a, 0x0a, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, + 0x69, 0x65, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x66, 0x75, 0x6c, 0x6c, + 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x0c, 0x66, 0x75, 0x6c, 0x6c, 0x44, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1f, + 0x0a, 0x0b, 0x70, 0x69, 0x65, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x0a, 0x70, 0x69, 0x65, 0x63, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x2a, + 0xa8, 0x01, 0x0a, 0x11, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x46, 0x6f, 0x72, + 0x6d, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x46, 0x41, 0x55, 0x4c, 0x54, + 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, + 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x52, 0x41, 0x44, 0x49, 0x4f, 0x10, 0x02, 0x12, 0x0c, 0x0a, + 0x08, 0x44, 0x52, 0x4f, 0x50, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x49, + 0x4e, 0x50, 0x55, 0x54, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x54, 0x45, 0x58, 0x54, 0x41, 0x52, + 0x45, 0x41, 0x10, 0x05, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x4c, 0x49, 0x44, 0x45, 0x52, 0x10, 0x06, + 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x48, 0x45, 0x43, 0x4b, 0x42, 0x4f, 0x58, 0x10, 0x07, 0x12, 0x0a, + 0x0a, 0x06, 0x53, 0x57, 0x49, 0x54, 0x43, 0x48, 0x10, 0x08, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x41, + 0x47, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x10, 0x09, 0x12, 0x0f, 0x0a, 0x0b, 0x4d, 0x55, 0x4c, + 0x54, 0x49, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x10, 0x0a, 0x2a, 0x3f, 0x0a, 0x08, 0x4c, 0x6f, + 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, 0x45, 0x10, + 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, + 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, 0x03, + 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b, 0x0a, 0x0f, 0x41, + 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, + 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x41, 0x55, 0x54, + 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, + 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x09, 0x41, 0x70, 0x70, 0x4f, + 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x12, 0x0e, 0x0a, 0x06, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x10, + 0x00, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x4c, 0x49, 0x4d, 0x5f, 0x57, 0x49, + 0x4e, 0x44, 0x4f, 0x57, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x54, 0x41, 0x42, 0x10, 0x02, 0x2a, + 0x37, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x52, 0x54, 0x10, + 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x44, + 0x45, 0x53, 0x54, 0x52, 0x4f, 0x59, 0x10, 0x02, 0x2a, 0x3e, 0x0a, 0x1b, 0x50, 0x72, 0x65, 0x62, + 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, + 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x4f, 0x4e, 0x45, 0x10, + 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, 0x09, 0x0a, + 0x05, 0x43, 0x4c, 0x41, 0x49, 0x4d, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x0b, 0x54, 0x69, 0x6d, 0x69, + 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x41, 0x52, 0x54, + 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, + 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x02, 0x2a, + 0x47, 0x0a, 0x0e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, + 0x65, 0x12, 0x17, 0x0a, 0x13, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, 0x59, 0x50, 0x45, + 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1c, 0x0a, 0x18, 0x55, 0x50, + 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x55, 0x4c, 0x45, + 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x53, 0x10, 0x01, 0x32, 0x49, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, + 0x01, 0x30, 0x01, 0x42, 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, + 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -5845,6 +5897,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { (*Agent_InstanceId)(nil), } file_provisionersdk_proto_provisioner_proto_msgTypes[32].OneofWrappers = []interface{}{} + file_provisionersdk_proto_provisioner_proto_msgTypes[34].OneofWrappers = []interface{}{} file_provisionersdk_proto_provisioner_proto_msgTypes[43].OneofWrappers = []interface{}{ (*Request_Config)(nil), (*Request_Parse)(nil), diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index 803f3e2197ecd..89a69ce7022ca 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -364,6 +364,7 @@ message Metadata { repeated RunningAgentAuthToken running_agent_auth_tokens = 21; string task_id = 22; string task_prompt = 23; + string template_version_id = 24; } // Config represents execution configuration shared by all subsequent requests in the Session @@ -373,6 +374,11 @@ message Config { // state is the provisioner state (if any) bytes state = 2; string provisioner_log_level = 3; + // Template imports can omit template id + optional string template_id = 4; + // Dry runs omit version id + optional string template_version_id = 5; + optional bool exp_reuse_terraform_workspace = 6; // Whether to reuse existing terraform workspaces if they exist. } // ParseRequest consumes source-code to produce inputs. diff --git a/provisionersdk/serve.go b/provisionersdk/serve.go index c652cfa94949d..3bac226e58379 100644 --- a/provisionersdk/serve.go +++ b/provisionersdk/serve.go @@ -15,6 +15,7 @@ import ( "storj.io/drpc/drpcserver" "cdr.dev/slog" + "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/drpcsdk" "github.com/coder/coder/v2/coderd/tracing" @@ -30,6 +31,7 @@ type ServeOptions struct { Logger slog.Logger WorkDirectory string ExternalProvisioner bool + Experiments codersdk.Experiments } type Server interface { diff --git a/provisionersdk/session.go b/provisionersdk/session.go index 3fd23628854e5..59034a761e09d 100644 --- a/provisionersdk/session.go +++ b/provisionersdk/session.go @@ -1,14 +1,10 @@ package provisionersdk import ( - "archive/tar" - "bytes" "context" "fmt" - "hash/crc32" "io" "os" - "path/filepath" "strings" "time" @@ -17,21 +13,16 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/drpcsdk" + "github.com/coder/coder/v2/provisionersdk/tfpath" + "github.com/coder/coder/v2/provisionersdk/tfpath/x" protobuf "google.golang.org/protobuf/proto" "github.com/coder/coder/v2/provisionersdk/proto" ) -const ( - // ReadmeFile is the location we look for to extract documentation from template versions. - ReadmeFile = "README.md" - - sessionDirPrefix = "Session" - staleSessionRetention = 7 * 24 * time.Hour -) - // protoServer is a wrapper that translates the dRPC protocol into a Session with method calls into the Server. type protoServer struct { server Server @@ -46,36 +37,12 @@ func (p *protoServer) Session(stream proto.DRPCProvisioner_SessionStream) error server: p.server, } - err := CleanStaleSessions(s.Context(), p.opts.WorkDirectory, afero.NewOsFs(), time.Now(), s.Logger) - if err != nil { - return xerrors.Errorf("unable to clean stale sessions %q: %w", s.WorkDirectory, err) - } + s.Files = tfpath.Session(p.opts.WorkDirectory, sessID) - s.WorkDirectory = filepath.Join(p.opts.WorkDirectory, SessionDir(sessID)) - err = os.MkdirAll(s.WorkDirectory, 0o700) - if err != nil { - return xerrors.Errorf("create work directory %q: %w", s.WorkDirectory, err) - } defer func() { - var err error - // Cleanup the work directory after execution. - for attempt := 0; attempt < 5; attempt++ { - err = os.RemoveAll(s.WorkDirectory) - if err != nil { - // On Windows, open files cannot be removed. - // When the provisioner daemon is shutting down, - // it may take a few milliseconds for processes to exit. - // See: https://github.com/golang/go/issues/50510 - s.Logger.Debug(s.Context(), "failed to clean work directory; trying again", slog.Error(err)) - time.Sleep(250 * time.Millisecond) - continue - } - s.Logger.Debug(s.Context(), "cleaned up work directory") - return - } - s.Logger.Error(s.Context(), "failed to clean up work directory after multiple attempts", - slog.F("path", s.WorkDirectory), slog.Error(err)) + s.Files.Cleanup(s.Context(), s.Logger, afero.NewOsFs()) }() + req, err := stream.Recv() if err != nil { return xerrors.Errorf("receive config: %w", err) @@ -89,7 +56,17 @@ func (p *protoServer) Session(stream proto.DRPCProvisioner_SessionStream) error s.logLevel = proto.LogLevel_value[strings.ToUpper(s.Config.ProvisionerLogLevel)] } - err = s.extractArchive() + if p.opts.Experiments.Enabled(codersdk.ExperimentTerraformWorkspace) { + s.Files = x.SessionDir(p.opts.WorkDirectory, sessID, config) + } + + // Cleanup any previously left stale sessions. + err = s.Files.CleanStaleSessions(s.Context(), s.Logger, afero.NewOsFs(), time.Now()) + if err != nil { + return xerrors.Errorf("unable to clean stale sessions %q: %w", s.Files, err) + } + + err = s.Files.ExtractArchive(s.Context(), s.Logger, afero.NewOsFs(), s.Config) if err != nil { return xerrors.Errorf("extract archive: %w", err) } @@ -144,7 +121,7 @@ func (s *Session) handleRequests() error { return err } // Handle README centrally, so that individual provisioners don't need to mess with it. - readme, err := os.ReadFile(filepath.Join(s.WorkDirectory, ReadmeFile)) + readme, err := os.ReadFile(s.Files.ReadmeFilePath()) if err == nil { complete.Readme = readme } else { @@ -220,9 +197,9 @@ func (s *Session) handleRequests() error { } type Session struct { - Logger slog.Logger - WorkDirectory string - Config *proto.Config + Logger slog.Logger + Files tfpath.Layouter + Config *proto.Config server Server stream proto.DRPCProvisioner_SessionStream @@ -233,92 +210,6 @@ func (s *Session) Context() context.Context { return s.stream.Context() } -func (s *Session) extractArchive() error { - ctx := s.Context() - - s.Logger.Info(ctx, "unpacking template source archive", - slog.F("size_bytes", len(s.Config.TemplateSourceArchive)), - ) - - reader := tar.NewReader(bytes.NewBuffer(s.Config.TemplateSourceArchive)) - // for safety, nil out the reference on Config, since the reader now owns it. - s.Config.TemplateSourceArchive = nil - for { - header, err := reader.Next() - if err != nil { - if xerrors.Is(err, io.EOF) { - break - } - return xerrors.Errorf("read template source archive: %w", err) - } - s.Logger.Debug(context.Background(), "read archive entry", - slog.F("name", header.Name), - slog.F("mod_time", header.ModTime), - slog.F("size", header.Size)) - - // Security: don't untar absolute or relative paths, as this can allow a malicious tar to overwrite - // files outside the workdir. - if !filepath.IsLocal(header.Name) { - return xerrors.Errorf("refusing to extract to non-local path") - } - // nolint: gosec - headerPath := filepath.Join(s.WorkDirectory, header.Name) - if !strings.HasPrefix(headerPath, filepath.Clean(s.WorkDirectory)) { - return xerrors.New("tar attempts to target relative upper directory") - } - mode := header.FileInfo().Mode() - if mode == 0 { - mode = 0o600 - } - - // Always check for context cancellation before reading the next header. - // This is mainly important for unit tests, since a canceled context means - // the underlying directory is going to be deleted. There still exists - // the small race condition that the context is canceled after this, and - // before the disk write. - if ctx.Err() != nil { - return xerrors.Errorf("context canceled: %w", ctx.Err()) - } - switch header.Typeflag { - case tar.TypeDir: - err = os.MkdirAll(headerPath, mode) - if err != nil { - return xerrors.Errorf("mkdir %q: %w", headerPath, err) - } - s.Logger.Debug(context.Background(), "extracted directory", - slog.F("path", headerPath), - slog.F("mode", fmt.Sprintf("%O", mode))) - case tar.TypeReg: - file, err := os.OpenFile(headerPath, os.O_CREATE|os.O_RDWR, mode) - if err != nil { - return xerrors.Errorf("create file %q (mode %s): %w", headerPath, mode, err) - } - - hash := crc32.NewIEEE() - hashReader := io.TeeReader(reader, hash) - // Max file size of 10MiB. - size, err := io.CopyN(file, hashReader, 10<<20) - if xerrors.Is(err, io.EOF) { - err = nil - } - if err != nil { - _ = file.Close() - return xerrors.Errorf("copy file %q: %w", headerPath, err) - } - err = file.Close() - if err != nil { - return xerrors.Errorf("close file %q: %s", headerPath, err) - } - s.Logger.Debug(context.Background(), "extracted file", - slog.F("size_bytes", size), - slog.F("path", headerPath), - slog.F("mode", mode), - slog.F("checksum", fmt.Sprintf("%x", hash.Sum(nil)))) - } - } - return nil -} - func (s *Session) ProvisionLog(level proto.LogLevel, output string) { if int32(level) < s.logLevel { return @@ -379,8 +270,3 @@ func (r *request[R, C]) do() (C, error) { return c, nil } } - -// SessionDir returns the directory name with mandatory prefix. -func SessionDir(sessID string) string { - return sessionDirPrefix + sessID -} diff --git a/provisionersdk/tfpath/tfpath.go b/provisionersdk/tfpath/tfpath.go new file mode 100644 index 0000000000000..019552e48d0de --- /dev/null +++ b/provisionersdk/tfpath/tfpath.go @@ -0,0 +1,252 @@ +package tfpath + +import ( + "archive/tar" + "bytes" + "context" + "fmt" + "hash/crc32" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/spf13/afero" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "github.com/coder/coder/v2/provisionersdk/proto" +) + +type Layouter interface { + WorkDirectory() string + StateFilePath() string + PlanFilePath() string + TerraformLockFile() string + ReadmeFilePath() string + TerraformMetadataDir() string + ModulesDirectory() string + ModulesFilePath() string + ExtractArchive(ctx context.Context, logger slog.Logger, fs afero.Fs, cfg *proto.Config) error + Cleanup(ctx context.Context, logger slog.Logger, fs afero.Fs) + CleanStaleSessions(ctx context.Context, logger slog.Logger, fs afero.Fs, now time.Time) error +} + +var _ Layouter = (*Layout)(nil) + +const ( + // ReadmeFile is the location we look for to extract documentation from template versions. + ReadmeFile = "README.md" + + sessionDirPrefix = "Session" + staleSessionRetention = 7 * 24 * time.Hour +) + +// Session creates a directory structure layout for terraform execution. The +// SessionID is a unique value for creating an ephemeral working directory inside +// the parentDirPath. All helper functions will return paths for various +// terraform asserts inside this working directory. +func Session(parentDirPath, sessionID string) Layout { + return Layout(filepath.Join(parentDirPath, sessionDirPrefix+sessionID)) +} + +func FromWorkingDirectory(workDir string) Layout { + return Layout(workDir) +} + +// Layout is the terraform execution working directory structure. +// It also contains some methods for common file operations within that layout. +// Such as "Cleanup" and "ExtractArchive". +// TODO: Maybe we should include the afero.FS here as well, then all operations +// would be on the same FS? +type Layout string + +// WorkDirectory returns the root working directory for Terraform files. +func (l Layout) WorkDirectory() string { return string(l) } + +func (l Layout) StateFilePath() string { + return filepath.Join(l.WorkDirectory(), "terraform.tfstate") +} + +func (l Layout) PlanFilePath() string { + return filepath.Join(l.WorkDirectory(), "terraform.tfplan") +} + +func (l Layout) TerraformLockFile() string { + return filepath.Join(l.WorkDirectory(), ".terraform.lock.hcl") +} + +func (l Layout) ReadmeFilePath() string { + return filepath.Join(l.WorkDirectory(), ReadmeFile) +} + +func (l Layout) TerraformMetadataDir() string { + return filepath.Join(l.WorkDirectory(), ".terraform") +} + +func (l Layout) ModulesDirectory() string { + return filepath.Join(l.TerraformMetadataDir(), "modules") +} + +func (l Layout) ModulesFilePath() string { + return filepath.Join(l.ModulesDirectory(), "modules.json") +} + +func (l Layout) ExtractArchive(ctx context.Context, logger slog.Logger, fs afero.Fs, cfg *proto.Config) error { + logger.Info(ctx, "unpacking template source archive", + slog.F("size_bytes", len(cfg.TemplateSourceArchive)), + ) + + err := fs.MkdirAll(l.WorkDirectory(), 0o700) + if err != nil { + return xerrors.Errorf("create work directory %q: %w", l.WorkDirectory(), err) + } + + // TODO: Pass in cfg.TemplateSourceArchive, not the full config. + // niling out the config field is a bit hacky. + reader := tar.NewReader(bytes.NewBuffer(cfg.TemplateSourceArchive)) + // for safety, nil out the reference on Config, since the reader now owns it. + cfg.TemplateSourceArchive = nil + for { + header, err := reader.Next() + if err != nil { + if xerrors.Is(err, io.EOF) { + break + } + return xerrors.Errorf("read template source archive: %w", err) + } + logger.Debug(context.Background(), "read archive entry", + slog.F("name", header.Name), + slog.F("mod_time", header.ModTime), + slog.F("size", header.Size)) + + // Security: don't untar absolute or relative paths, as this can allow a malicious tar to overwrite + // files outside the workdir. + if !filepath.IsLocal(header.Name) { + return xerrors.Errorf("refusing to extract to non-local path") + } + + // nolint: gosec // Safe to no-lint because the filepath.IsLocal check above. + headerPath := filepath.Join(l.WorkDirectory(), header.Name) + if !strings.HasPrefix(headerPath, filepath.Clean(l.WorkDirectory())) { + return xerrors.New("tar attempts to target relative upper directory") + } + mode := header.FileInfo().Mode() + if mode == 0 { + mode = 0o600 + } + + // Always check for context cancellation before reading the next header. + // This is mainly important for unit tests, since a canceled context means + // the underlying directory is going to be deleted. There still exists + // the small race condition that the context is canceled after this, and + // before the disk write. + if ctx.Err() != nil { + return xerrors.Errorf("context canceled: %w", ctx.Err()) + } + switch header.Typeflag { + case tar.TypeDir: + err = fs.MkdirAll(headerPath, mode) + if err != nil { + return xerrors.Errorf("mkdir %q: %w", headerPath, err) + } + logger.Debug(context.Background(), "extracted directory", + slog.F("path", headerPath), + slog.F("mode", fmt.Sprintf("%O", mode))) + case tar.TypeReg: + file, err := fs.OpenFile(headerPath, os.O_CREATE|os.O_RDWR, mode) + if err != nil { + return xerrors.Errorf("create file %q (mode %s): %w", headerPath, mode, err) + } + + hash := crc32.NewIEEE() + hashReader := io.TeeReader(reader, hash) + // Max file size of 10MiB. + size, err := io.CopyN(file, hashReader, 10<<20) + if xerrors.Is(err, io.EOF) { + err = nil + } + if err != nil { + _ = file.Close() + return xerrors.Errorf("copy file %q: %w", headerPath, err) + } + err = file.Close() + if err != nil { + return xerrors.Errorf("close file %q: %s", headerPath, err) + } + logger.Debug(context.Background(), "extracted file", + slog.F("size_bytes", size), + slog.F("path", headerPath), + slog.F("mode", mode), + slog.F("checksum", fmt.Sprintf("%x", hash.Sum(nil)))) + } + } + + return nil +} + +// Cleanup removes the work directory and all of its contents. +func (l Layout) Cleanup(ctx context.Context, logger slog.Logger, fs afero.Fs) { + var err error + path := l.WorkDirectory() + + for attempt := 0; attempt < 5; attempt++ { + err := fs.RemoveAll(path) + if err != nil { + // On Windows, open files cannot be removed. + // When the provisioner daemon is shutting down, + // it may take a few milliseconds for processes to exit. + // See: https://github.com/golang/go/issues/50510 + logger.Debug(ctx, "failed to clean work directory; trying again", slog.Error(err)) + // TODO: Should we abort earlier if the context is done? + time.Sleep(250 * time.Millisecond) + continue + } + logger.Debug(ctx, "cleaned up work directory") + return + } + + // Returning an error at this point cannot do any good. The caller cannot resolve + // this. There is a routine cleanup task that will remove old work directories + // when this fails. + logger.Error(ctx, "failed to clean up work directory after multiple attempts", + slog.F("path", path), slog.Error(err)) +} + +// CleanStaleSessions browses the work directory searching for stale session +// directories. Coder provisioner is supposed to remove them once after finishing the provisioning, +// but there is a risk of keeping them in case of a failure. +func (l Layout) CleanStaleSessions(ctx context.Context, logger slog.Logger, fs afero.Fs, now time.Time) error { + parent := filepath.Dir(l.WorkDirectory()) + entries, err := afero.ReadDir(fs, filepath.Dir(l.WorkDirectory())) + if err != nil { + return xerrors.Errorf("can't read %q directory", parent) + } + + for _, fi := range entries { + dirName := fi.Name() + + if fi.IsDir() && isValidSessionDir(dirName) { + sessionDirPath := filepath.Join(parent, dirName) + + modTime := fi.ModTime() // fallback to modTime if modTime is not available (afero) + + if modTime.Add(staleSessionRetention).After(now) { + continue + } + + logger.Info(ctx, "remove stale session directory", slog.F("session_path", sessionDirPath)) + err = fs.RemoveAll(sessionDirPath) + if err != nil { + return xerrors.Errorf("can't remove %q directory: %w", sessionDirPath, err) + } + } + } + return nil +} + +func isValidSessionDir(dirName string) bool { + match, err := filepath.Match(sessionDirPrefix+"*", dirName) + return err == nil && match +} diff --git a/provisionersdk/tfpath/x/tfpath.go b/provisionersdk/tfpath/x/tfpath.go new file mode 100644 index 0000000000000..c6b9f5d669e94 --- /dev/null +++ b/provisionersdk/tfpath/x/tfpath.go @@ -0,0 +1,320 @@ +package x + +// This file will replace the `tfpath.go` in the parent `tfpath` package when the +// `terraform-directory-reuse` experiment is graduated. + +import ( + "archive/tar" + "bytes" + "context" + "fmt" + "hash/crc32" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/google/uuid" + "github.com/spf13/afero" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/provisionersdk/tfpath" +) + +var _ tfpath.Layouter = (*Layout)(nil) + +func SessionDir(parentDir, sessID string, config *proto.Config) Layout { + // TODO: These conditionals are messy. nil, "", or uuid.Nil are all considered the same. Maybe a helper function? + missingID := config.TemplateId == nil || *config.TemplateId == "" || *config.TemplateId == uuid.Nil.String() || + config.TemplateVersionId == nil || *config.TemplateVersionId == "" || *config.TemplateVersionId == uuid.Nil.String() + + // Both templateID and templateVersionID must be set to reuse workspace. + if config.ExpReuseTerraformWorkspace == nil || !*config.ExpReuseTerraformWorkspace || missingID { + return EphemeralSessionDir(parentDir, sessID) + } + + return Layout{ + workDirectory: filepath.Join(parentDir, *config.TemplateId, *config.TemplateVersionId), + sessionID: sessID, + ephemeral: false, + } +} + +// EphemeralSessionDir returns the directory name with mandatory prefix. These +// directories are created for each provisioning session and are meant to be +// ephemeral. +func EphemeralSessionDir(parentDir, sessID string) Layout { + return Layout{ + workDirectory: filepath.Join(parentDir, sessionDirPrefix+sessID), + sessionID: sessID, + ephemeral: true, + } +} + +type Layout struct { + workDirectory string + sessionID string + ephemeral bool +} + +const ( + // ReadmeFile is the location we look for to extract documentation from template versions. + ReadmeFile = "README.md" + + sessionDirPrefix = "Session" +) + +func (td Layout) WorkDirectory() string { + return td.workDirectory +} + +// StateSessionDirectory follows the same directory structure as Terraform +// workspaces. All build specific state is stored within this directory. +// +// These files should be cleaned up on exit. In the case of a failure, they will +// not collide with other builds since each build uses a unique session ID. +func (td Layout) StateSessionDirectory() string { + return filepath.Join(td.workDirectory, "terraform.tfstate.d", td.sessionID) +} + +func (td Layout) StateFilePath() string { + return filepath.Join(td.StateSessionDirectory(), "terraform.tfstate") +} + +func (td Layout) PlanFilePath() string { + return filepath.Join(td.StateSessionDirectory(), "terraform.tfplan") +} + +func (td Layout) TerraformLockFile() string { + return filepath.Join(td.WorkDirectory(), ".terraform.lock.hcl") +} + +func (td Layout) ReadmeFilePath() string { + return filepath.Join(td.WorkDirectory(), ReadmeFile) +} + +func (td Layout) TerraformMetadataDir() string { + return filepath.Join(td.WorkDirectory(), ".terraform") +} + +func (td Layout) ModulesDirectory() string { + return filepath.Join(td.TerraformMetadataDir(), "modules") +} + +func (td Layout) ModulesFilePath() string { + return filepath.Join(td.ModulesDirectory(), "modules.json") +} + +func (td Layout) WorkspaceEnvironmentFilePath() string { + return filepath.Join(td.TerraformMetadataDir(), "environment") +} + +func (td Layout) Cleanup(ctx context.Context, logger slog.Logger, fs afero.Fs) { + var err error + path := td.WorkDirectory() + if !td.ephemeral { + // Non-ephemeral directories only clean up the session subdirectory. + // Leaving in place the wider work directory for reuse. + path = td.StateSessionDirectory() + } + for attempt := 0; attempt < 5; attempt++ { + err := fs.RemoveAll(path) + if err != nil { + // On Windows, open files cannot be removed. + // When the provisioner daemon is shutting down, + // it may take a few milliseconds for processes to exit. + // See: https://github.com/golang/go/issues/50510 + logger.Debug(ctx, "failed to clean work directory; trying again", slog.Error(err)) + // TODO: Should we abort earlier if the context is done? + time.Sleep(250 * time.Millisecond) + continue + } + logger.Debug(ctx, "cleaned up work directory", slog.F("path", path)) + return + } + + logger.Error(ctx, "failed to clean up work directory after multiple attempts", + slog.F("path", path), slog.Error(err)) +} + +func (td Layout) ExtractArchive(ctx context.Context, logger slog.Logger, fs afero.Fs, cfg *proto.Config) error { + logger.Info(ctx, "unpacking template source archive", + slog.F("size_bytes", len(cfg.TemplateSourceArchive)), + ) + + err := fs.MkdirAll(td.WorkDirectory(), 0o700) + if err != nil { + return xerrors.Errorf("create work directory %q: %w", td.WorkDirectory(), err) + } + + err = fs.MkdirAll(td.StateSessionDirectory(), 0o700) + if err != nil { + return xerrors.Errorf("create state directory %q: %w", td.WorkDirectory(), err) + } + + // TODO: This is a bit hacky. We should use `terraform workspace select` to create this + // environment file. However, since we know the backend is `local`, this is a quicker + // way to accomplish the same thing. + err = td.SelectWorkspace(fs) + if err != nil { + return xerrors.Errorf("select terraform workspace: %w", err) + } + + reader := tar.NewReader(bytes.NewBuffer(cfg.TemplateSourceArchive)) + // for safety, nil out the reference on Config, since the reader now owns it. + cfg.TemplateSourceArchive = nil + for { + header, err := reader.Next() + if err != nil { + if xerrors.Is(err, io.EOF) { + break + } + return xerrors.Errorf("read template source archive: %w", err) + } + logger.Debug(context.Background(), "read archive entry", + slog.F("name", header.Name), + slog.F("mod_time", header.ModTime), + slog.F("size", header.Size)) + + // Security: don't untar absolute or relative paths, as this can allow a malicious tar to overwrite + // files outside the workdir. + if !filepath.IsLocal(header.Name) { + return xerrors.Errorf("refusing to extract to non-local path") + } + // nolint: gosec + headerPath := filepath.Join(td.WorkDirectory(), header.Name) + if !strings.HasPrefix(headerPath, filepath.Clean(td.WorkDirectory())) { + return xerrors.New("tar attempts to target relative upper directory") + } + mode := header.FileInfo().Mode() + if mode == 0 { + mode = 0o600 + } + + // Always check for context cancellation before reading the next header. + // This is mainly important for unit tests, since a canceled context means + // the underlying directory is going to be deleted. There still exists + // the small race condition that the context is canceled after this, and + // before the disk write. + if ctx.Err() != nil { + return xerrors.Errorf("context canceled: %w", ctx.Err()) + } + switch header.Typeflag { + case tar.TypeDir: + err = fs.MkdirAll(headerPath, mode) + if err != nil { + return xerrors.Errorf("mkdir %q: %w", headerPath, err) + } + logger.Debug(context.Background(), "extracted directory", + slog.F("path", headerPath), + slog.F("mode", fmt.Sprintf("%O", mode))) + case tar.TypeReg: + // TODO: If we are overwriting an existing file, that means we are reusing + // the terraform directory. In that case, we should check the file content + // matches what already exists on disk. Or just continue to overwrite it. + file, err := fs.OpenFile(headerPath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, mode) + if err != nil { + return xerrors.Errorf("create file %q (mode %s): %w", headerPath, mode, err) + } + + hash := crc32.NewIEEE() + hashReader := io.TeeReader(reader, hash) + // Max file size of 10MiB. + size, err := io.CopyN(file, hashReader, 10<<20) + if xerrors.Is(err, io.EOF) { + err = nil + } + if err != nil { + _ = file.Close() + return xerrors.Errorf("copy file %q: %w", headerPath, err) + } + err = file.Close() + if err != nil { + return xerrors.Errorf("close file %q: %s", headerPath, err) + } + logger.Debug(context.Background(), "extracted file", + slog.F("size_bytes", size), + slog.F("path", headerPath), + slog.F("mode", mode), + slog.F("checksum", fmt.Sprintf("%x", hash.Sum(nil)))) + } + } + + return nil +} + +// CleanStaleSessions assumes this Layout is the latest active template version. +// Assuming that, any other template version directories found alongside it are +// considered inactive and can be removed. Inactive template versions should use +// ephemeral TerraformDirectories. +func (td Layout) CleanStaleSessions(ctx context.Context, logger slog.Logger, fs afero.Fs, now time.Time) error { + if td.ephemeral { + // Use the existing cleanup for ephemeral sessions. + return tfpath.FromWorkingDirectory(td.workDirectory).CleanStaleSessions(ctx, logger, fs, now) + } + + // All template versions share the same parent directory. Since only the latest + // active version should remain, remove all other version directories. + wd := td.WorkDirectory() + templateDir := filepath.Dir(wd) + versionDir := filepath.Base(wd) + + entries, err := afero.ReadDir(fs, templateDir) + if xerrors.Is(err, os.ErrNotExist) { + // Nothing to clean, this template dir does not exist. + return nil + } + if err != nil { + return xerrors.Errorf("can't read %q directory: %w", templateDir, err) + } + + for _, fi := range entries { + if !fi.IsDir() { + continue + } + + if fi.Name() == versionDir { + continue + } + + // Note: There is a .coder directory here with a pprof unix file. + // This is from the previous provisioner run, and will be removed here. + // TODO: Add more explicit pprof cleanup/handling. + + oldVerDir := filepath.Join(templateDir, fi.Name()) + logger.Info(ctx, "remove inactive template version directory", slog.F("version_path", oldVerDir)) + err = fs.RemoveAll(oldVerDir) + if err != nil { + logger.Error(ctx, "failed to remove inactive template version directory", slog.F("version_path", oldVerDir), slog.Error(err)) + } + } + return nil +} + +// SelectWorkspace writes the terraform workspace environment file, which acts as +// `terraform workspace select `. It is quicker than using the cli command. +// More importantly this code can be written without changing the executor +// behavior, which is nice encapsulation for this experiment. +func (td Layout) SelectWorkspace(fs afero.Fs) error { + // Also set up the terraform workspace to use + err := fs.MkdirAll(td.TerraformMetadataDir(), 0o700) + if err != nil { + return xerrors.Errorf("create terraform metadata directory %q: %w", td.TerraformMetadataDir(), err) + } + + file, err := fs.OpenFile(td.WorkspaceEnvironmentFilePath(), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o600) + if err != nil { + return xerrors.Errorf("create workspace environment file: %w", err) + } + defer file.Close() + + _, err = file.WriteString(td.sessionID) + if err != nil { + _ = file.Close() + return xerrors.Errorf("write workspace environment file: %w", err) + } + return nil +} diff --git a/scaletest/createworkspaces/run.go b/scaletest/createworkspaces/run.go index 49fe0548b38e5..09903c06cfab2 100644 --- a/scaletest/createworkspaces/run.go +++ b/scaletest/createworkspaces/run.go @@ -87,10 +87,14 @@ func (r *Runner) Run(ctx context.Context, id string, logs io.Writer) error { workspaceBuildConfig.OrganizationID = r.cfg.User.OrganizationID workspaceBuildConfig.UserID = user.ID.String() r.workspacebuildRunner = workspacebuild.NewRunner(client, workspaceBuildConfig) - workspace, err := r.workspacebuildRunner.RunReturningWorkspace(ctx, id, logs) + slimWorkspace, err := r.workspacebuildRunner.RunReturningWorkspace(ctx, id, logs) if err != nil { return xerrors.Errorf("create workspace: %w", err) } + workspace, err := client.Workspace(ctx, slimWorkspace.ID) + if err != nil { + return xerrors.Errorf("get full workspace info: %w", err) + } if r.cfg.Workspace.NoWaitForAgents { return nil diff --git a/scaletest/dynamicparameters/template.go b/scaletest/dynamicparameters/template.go index 5faf67e531320..dbe4b079b1504 100644 --- a/scaletest/dynamicparameters/template.go +++ b/scaletest/dynamicparameters/template.go @@ -1,15 +1,12 @@ package dynamicparameters import ( - "archive/tar" "bytes" "context" _ "embed" "encoding/json" "fmt" "io" - "path/filepath" - "slices" "strings" "text/template" "time" @@ -20,6 +17,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" + "github.com/coder/coder/v2/scaletest/loadtestutil" "github.com/coder/quartz" ) @@ -89,48 +87,6 @@ func GetModuleFiles() map[string][]byte { } } -func createTarFromFiles(files map[string][]byte) ([]byte, error) { - buf := new(bytes.Buffer) - writer := tar.NewWriter(buf) - dirs := []string{} - for name, content := range files { - // We need to add directories before any files that use them. But, we only need to do this - // once. - dir := filepath.Dir(name) - if dir != "." && !slices.Contains(dirs, dir) { - dirs = append(dirs, dir) - err := writer.WriteHeader(&tar.Header{ - Name: dir, - Mode: 0o755, - Typeflag: tar.TypeDir, - }) - if err != nil { - return nil, err - } - } - - err := writer.WriteHeader(&tar.Header{ - Name: name, - Size: int64(len(content)), - Mode: 0o644, - }) - if err != nil { - return nil, err - } - - _, err = writer.Write(content) - if err != nil { - return nil, err - } - } - // `writer.Close()` function flushes the writer buffer, and adds extra padding to create a legal tarball. - err := writer.Close() - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - func TemplateTarData() ([]byte, error) { mainTF, err := TemplateContent() if err != nil { @@ -144,7 +100,7 @@ func TemplateTarData() ([]byte, error) { for k, v := range moduleFiles { files[k] = v } - tarData, err := createTarFromFiles(files) + tarData, err := loadtestutil.CreateTarFromFiles(files) if err != nil { return nil, xerrors.Errorf("failed to create tarball: %w", err) } diff --git a/scaletest/loadtestutil/files.go b/scaletest/loadtestutil/files.go new file mode 100644 index 0000000000000..2890700f4efd5 --- /dev/null +++ b/scaletest/loadtestutil/files.go @@ -0,0 +1,50 @@ +package loadtestutil + +import ( + "archive/tar" + "bytes" + "path/filepath" + "slices" +) + +func CreateTarFromFiles(files map[string][]byte) ([]byte, error) { + buf := new(bytes.Buffer) + writer := tar.NewWriter(buf) + dirs := []string{} + for name, content := range files { + // We need to add directories before any files that use them. But, we only need to do this + // once. + dir := filepath.Dir(name) + if dir != "." && !slices.Contains(dirs, dir) { + dirs = append(dirs, dir) + err := writer.WriteHeader(&tar.Header{ + Name: dir, + Mode: 0o755, + Typeflag: tar.TypeDir, + }) + if err != nil { + return nil, err + } + } + + err := writer.WriteHeader(&tar.Header{ + Name: name, + Size: int64(len(content)), + Mode: 0o644, + }) + if err != nil { + return nil, err + } + + _, err = writer.Write(content) + if err != nil { + return nil, err + } + } + // `writer.Close()` function flushes the writer buffer, and adds extra padding to create a legal tarball. + err := writer.Close() + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} diff --git a/scaletest/notifications/config.go b/scaletest/notifications/config.go index ac8daeb9ef9cb..5296577396536 100644 --- a/scaletest/notifications/config.go +++ b/scaletest/notifications/config.go @@ -1,6 +1,7 @@ package notifications import ( + "net/http" "sync" "time" @@ -37,6 +38,12 @@ type Config struct { // SMTPApiUrl is the URL of the SMTP mock HTTP API SMTPApiURL string `json:"smtp_api_url"` + + // SMTPRequestTimeout is the timeout for SMTP requests. + SMTPRequestTimeout time.Duration `json:"smtp_request_timeout"` + + // SMTPHttpClient is the HTTP client for SMTP requests. + SMTPHttpClient *http.Client `json:"-"` } func (c Config) Validate() error { @@ -61,6 +68,14 @@ func (c Config) Validate() error { return xerrors.New("notification_timeout must be greater than 0") } + if c.SMTPApiURL != "" && c.SMTPRequestTimeout <= 0 { + return xerrors.New("smtp_request_timeout must be set if smtp_api_url is set") + } + + if c.SMTPApiURL != "" && c.SMTPHttpClient == nil { + return xerrors.New("smtp_http_client must be set if smtp_api_url is set") + } + if c.DialTimeout <= 0 { return xerrors.New("dial_timeout must be greater than 0") } diff --git a/scaletest/notifications/metrics.go b/scaletest/notifications/metrics.go index 0bf3ebad74044..6d9c1a03fa956 100644 --- a/scaletest/notifications/metrics.go +++ b/scaletest/notifications/metrics.go @@ -28,6 +28,12 @@ func NewMetrics(reg prometheus.Registerer) *Metrics { Subsystem: "scaletest", Name: "notification_delivery_latency_seconds", Help: "Time between notification-creating action and receipt of notification by client", + Buckets: []float64{ + 1, 5, 10, 30, 60, + 120, 180, 240, 300, 360, 420, 480, 540, 600, 660, 720, 780, 840, 900, + 1200, 1500, 1800, 2100, 2400, 2700, 3000, 3300, 3600, 3900, 4200, 4500, + 5400, 7200, + }, }, []string{"notification_id", "notification_type"}) errors := prometheus.NewCounterVec(prometheus.CounterOpts{ Namespace: "coderd", diff --git a/scaletest/notifications/run.go b/scaletest/notifications/run.go index abe844574659e..213875b85bd6e 100644 --- a/scaletest/notifications/run.go +++ b/scaletest/notifications/run.go @@ -298,15 +298,16 @@ func (r *Runner) watchNotificationsSMTP(ctx context.Context, user codersdk.User, receivedNotifications := make(map[uuid.UUID]struct{}) apiURL := fmt.Sprintf("%s/messages?email=%s", r.cfg.SMTPApiURL, user.Email) - httpClient := &http.Client{ - Timeout: 10 * time.Second, - } + httpClient := r.cfg.SMTPHttpClient const smtpPollInterval = 2 * time.Second done := xerrors.New("done") tkr := r.clock.TickerFunc(ctx, smtpPollInterval, func() error { - req, err := http.NewRequestWithContext(ctx, http.MethodGet, apiURL, nil) + reqCtx, cancel := context.WithTimeout(ctx, r.cfg.SMTPRequestTimeout) + defer cancel() + + req, err := http.NewRequestWithContext(reqCtx, http.MethodGet, apiURL, nil) if err != nil { logger.Error(ctx, "create SMTP API request", slog.Error(err)) r.cfg.Metrics.AddError("smtp_create_request") @@ -317,14 +318,16 @@ func (r *Runner) watchNotificationsSMTP(ctx context.Context, user codersdk.User, if err != nil { logger.Error(ctx, "poll smtp api for notifications", slog.Error(err)) r.cfg.Metrics.AddError("smtp_poll") - return xerrors.Errorf("poll smtp api: %w", err) + return nil } if resp.StatusCode != http.StatusOK { + // discard the response to allow reusing of the connection + _, _ = io.Copy(io.Discard, resp.Body) _ = resp.Body.Close() logger.Error(ctx, "smtp api returned non-200 status", slog.F("status", resp.StatusCode)) r.cfg.Metrics.AddError("smtp_bad_status") - return xerrors.Errorf("smtp api returned status %d", resp.StatusCode) + return nil } var summaries []smtpmock.EmailSummary diff --git a/scaletest/notifications/run_test.go b/scaletest/notifications/run_test.go index 1e198e9edd91d..a9ef6f4b2960e 100644 --- a/scaletest/notifications/run_test.go +++ b/scaletest/notifications/run_test.go @@ -212,6 +212,8 @@ func TestRunWithSMTP(t *testing.T) { smtpTrap := mClock.Trap().TickerFunc("smtp") defer smtpTrap.Close() + httpClient := &http.Client{} + // Start receiving runners who will receive notifications receivingRunners := make([]*notifications.Runner, 0, numReceivingUsers) for i := range numReceivingUsers { @@ -228,6 +230,8 @@ func TestRunWithSMTP(t *testing.T) { ReceivingWatchBarrier: receivingWatchBarrier, ExpectedNotificationsIDs: expectedNotificationsIDs, SMTPApiURL: smtpAPIServer.URL, + SMTPRequestTimeout: testutil.WaitLong, + SMTPHttpClient: httpClient, } err := runnerCfg.Validate() require.NoError(t, err) diff --git a/scaletest/prebuilds/config.go b/scaletest/prebuilds/config.go new file mode 100644 index 0000000000000..05f1fc48ad85e --- /dev/null +++ b/scaletest/prebuilds/config.go @@ -0,0 +1,86 @@ +package prebuilds + +import ( + "sync" + "time" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "github.com/coder/quartz" +) + +type Config struct { + // OrganizationID is the ID of the organization to create the prebuilds in. + OrganizationID uuid.UUID `json:"organization_id"` + // NumPresets is the number of presets the template should have. + NumPresets int `json:"num_presets"` + // NumPresetPrebuilds is the number of prebuilds per preset. + // Total prebuilds = NumPresets * NumPresetPrebuilds + NumPresetPrebuilds int `json:"num_preset_prebuilds"` + + // TemplateVersionJobTimeout is how long to wait for template version + // provisioning jobs to complete. + TemplateVersionJobTimeout time.Duration `json:"template_version_job_timeout"` + + // PrebuildWorkspaceTimeout is how long to wait for all prebuild + // workspaces to be created and completed. + PrebuildWorkspaceTimeout time.Duration `json:"prebuild_workspace_timeout"` + + Metrics *Metrics `json:"-"` + + // SetupBarrier is used to ensure all templates have been created + // before unpausing prebuilds. + SetupBarrier *sync.WaitGroup `json:"-"` + + // CreationBarrier is used to ensure all prebuild creation has completed + // before pausing prebuilds for deletion. + CreationBarrier *sync.WaitGroup `json:"-"` + + // DeletionSetupBarrier is used by the runner owner (CLI/test) to signal when + // prebuilds have been paused, allowing runners to create new template versions + // with 0 prebuilds. Only the owner calls Done(), runners only Wait(). + DeletionSetupBarrier *sync.WaitGroup `json:"-"` + + // DeletionBarrier is used to ensure all templates have been updated + // with 0 prebuilds before resuming prebuilds. + DeletionBarrier *sync.WaitGroup `json:"-"` + + Clock quartz.Clock `json:"-"` +} + +func (c Config) Validate() error { + if c.TemplateVersionJobTimeout <= 0 { + return xerrors.New("template_version_job_timeout must be greater than 0") + } + + if c.PrebuildWorkspaceTimeout <= 0 { + return xerrors.New("prebuild_workspace_timeout must be greater than 0") + } + + if c.SetupBarrier == nil { + return xerrors.New("setup barrier must be set") + } + + if c.CreationBarrier == nil { + return xerrors.New("creation barrier must be set") + } + + if c.DeletionSetupBarrier == nil { + return xerrors.New("deletion setup barrier must be set") + } + + if c.DeletionBarrier == nil { + return xerrors.New("deletion barrier must be set") + } + + if c.Metrics == nil { + return xerrors.New("metrics must be set") + } + + if c.Clock == nil { + return xerrors.New("clock must be set") + } + + return nil +} diff --git a/scaletest/prebuilds/metrics.go b/scaletest/prebuilds/metrics.go new file mode 100644 index 0000000000000..553b874e2d3ec --- /dev/null +++ b/scaletest/prebuilds/metrics.go @@ -0,0 +1,125 @@ +package prebuilds + +import ( + "github.com/prometheus/client_golang/prometheus" +) + +type Metrics struct { + PrebuildJobsCreated prometheus.GaugeVec + PrebuildJobsRunning prometheus.GaugeVec + PrebuildJobsFailed prometheus.GaugeVec + PrebuildJobsCompleted prometheus.GaugeVec + + PrebuildDeletionJobsCreated prometheus.GaugeVec + PrebuildDeletionJobsRunning prometheus.GaugeVec + PrebuildDeletionJobsFailed prometheus.GaugeVec + PrebuildDeletionJobsCompleted prometheus.GaugeVec + + PrebuildErrorsTotal prometheus.CounterVec +} + +func NewMetrics(reg prometheus.Registerer) *Metrics { + m := &Metrics{ + PrebuildJobsCreated: *prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_jobs_created", + Help: "Number of prebuild jobs that have been created.", + }, []string{"template_name"}), + PrebuildJobsRunning: *prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_jobs_running", + Help: "Number of prebuild jobs that are currently running.", + }, []string{"template_name"}), + PrebuildJobsFailed: *prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_jobs_failed", + Help: "Number of prebuild jobs that have failed.", + }, []string{"template_name"}), + PrebuildJobsCompleted: *prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_jobs_completed", + Help: "Number of prebuild jobs that have completed successfully.", + }, []string{"template_name"}), + PrebuildDeletionJobsCreated: *prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_deletion_jobs_created", + Help: "Number of prebuild deletion jobs that have been created.", + }, []string{"template_name"}), + PrebuildDeletionJobsRunning: *prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_deletion_jobs_running", + Help: "Number of prebuild deletion jobs that are currently running.", + }, []string{"template_name"}), + PrebuildDeletionJobsFailed: *prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_deletion_jobs_failed", + Help: "Number of prebuild deletion jobs that have failed.", + }, []string{"template_name"}), + PrebuildDeletionJobsCompleted: *prometheus.NewGaugeVec(prometheus.GaugeOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_deletion_jobs_completed", + Help: "Number of prebuild deletion jobs that have completed successfully.", + }, []string{"template_name"}), + PrebuildErrorsTotal: *prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "prebuild_errors_total", + Help: "Total number of prebuild errors", + }, []string{"template_name", "action"}), + } + + reg.MustRegister(m.PrebuildJobsCreated) + reg.MustRegister(m.PrebuildJobsRunning) + reg.MustRegister(m.PrebuildJobsFailed) + reg.MustRegister(m.PrebuildJobsCompleted) + reg.MustRegister(m.PrebuildDeletionJobsCreated) + reg.MustRegister(m.PrebuildDeletionJobsRunning) + reg.MustRegister(m.PrebuildDeletionJobsFailed) + reg.MustRegister(m.PrebuildDeletionJobsCompleted) + reg.MustRegister(m.PrebuildErrorsTotal) + return m +} + +func (m *Metrics) SetJobsCreated(count int, templateName string) { + m.PrebuildJobsCreated.WithLabelValues(templateName).Set(float64(count)) +} + +func (m *Metrics) SetJobsRunning(count int, templateName string) { + m.PrebuildJobsRunning.WithLabelValues(templateName).Set(float64(count)) +} + +func (m *Metrics) SetJobsFailed(count int, templateName string) { + m.PrebuildJobsFailed.WithLabelValues(templateName).Set(float64(count)) +} + +func (m *Metrics) SetJobsCompleted(count int, templateName string) { + m.PrebuildJobsCompleted.WithLabelValues(templateName).Set(float64(count)) +} + +func (m *Metrics) SetDeletionJobsCreated(count int, templateName string) { + m.PrebuildDeletionJobsCreated.WithLabelValues(templateName).Set(float64(count)) +} + +func (m *Metrics) SetDeletionJobsRunning(count int, templateName string) { + m.PrebuildDeletionJobsRunning.WithLabelValues(templateName).Set(float64(count)) +} + +func (m *Metrics) SetDeletionJobsFailed(count int, templateName string) { + m.PrebuildDeletionJobsFailed.WithLabelValues(templateName).Set(float64(count)) +} + +func (m *Metrics) SetDeletionJobsCompleted(count int, templateName string) { + m.PrebuildDeletionJobsCompleted.WithLabelValues(templateName).Set(float64(count)) +} + +func (m *Metrics) AddError(templateName string, action string) { + m.PrebuildErrorsTotal.WithLabelValues(templateName, action).Inc() +} diff --git a/scaletest/prebuilds/run.go b/scaletest/prebuilds/run.go new file mode 100644 index 0000000000000..7a62e3638bf8b --- /dev/null +++ b/scaletest/prebuilds/run.go @@ -0,0 +1,343 @@ +package prebuilds + +import ( + "bytes" + "context" + _ "embed" + "html/template" + "io" + "time" + + "golang.org/x/xerrors" + + "github.com/google/uuid" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "github.com/coder/coder/v2/coderd/tracing" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/scaletest/harness" + "github.com/coder/coder/v2/scaletest/loadtestutil" +) + +type Runner struct { + client *codersdk.Client + cfg Config + + template codersdk.Template +} + +var ( + _ harness.Runnable = &Runner{} + _ harness.Cleanable = &Runner{} +) + +func NewRunner(client *codersdk.Client, cfg Config) *Runner { + return &Runner{ + client: client, + cfg: cfg, + } +} + +func (r *Runner) Run(ctx context.Context, id string, logs io.Writer) error { + ctx, span := tracing.StartSpan(ctx) + defer span.End() + + reachedSetupBarrier := false + reachedCreationBarrier := false + reachedDeletionBarrier := false + defer func() { + if !reachedSetupBarrier { + r.cfg.SetupBarrier.Done() + } + if !reachedCreationBarrier { + r.cfg.CreationBarrier.Done() + } + if !reachedDeletionBarrier { + r.cfg.DeletionBarrier.Done() + } + }() + + logs = loadtestutil.NewSyncWriter(logs) + logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug) + r.client.SetLogger(logger) + r.client.SetLogBodies(true) + + templateName := "scaletest-prebuilds-template-" + id + + version, err := r.createTemplateVersion(ctx, uuid.Nil, r.cfg.NumPresets, r.cfg.NumPresetPrebuilds) + if err != nil { + r.cfg.Metrics.AddError(templateName, "create_template_version") + return err + } + + templateReq := codersdk.CreateTemplateRequest{ + Name: templateName, + Description: "`coder exp scaletest prebuilds` template", + VersionID: version.ID, + } + templ, err := r.client.CreateTemplate(ctx, r.cfg.OrganizationID, templateReq) + if err != nil { + r.cfg.Metrics.AddError(templateName, "create_template") + return xerrors.Errorf("create template: %w", err) + } + logger.Info(ctx, "created template", slog.F("template_id", templ.ID)) + + r.template = templ + + logger.Info(ctx, "waiting for all runners to reach setup barrier") + reachedSetupBarrier = true + r.cfg.SetupBarrier.Done() + r.cfg.SetupBarrier.Wait() + logger.Info(ctx, "all runners reached setup barrier, proceeding with prebuild creation test") + + err = r.measureCreation(ctx, logger) + if err != nil { + return err + } + + logger.Info(ctx, "waiting for all runners to reach creation barrier") + reachedCreationBarrier = true + r.cfg.CreationBarrier.Done() + r.cfg.CreationBarrier.Wait() + logger.Info(ctx, "all runners reached creation barrier") + + logger.Info(ctx, "waiting for runner owner to pause prebuilds (deletion setup barrier)") + r.cfg.DeletionSetupBarrier.Wait() + logger.Info(ctx, "prebuilds paused, preparing for deletion") + + // Now prepare for deletion by creating an empty template version + // At this point, prebuilds should be paused by the caller + logger.Info(ctx, "creating empty template version for deletion") + emptyVersion, err := r.createTemplateVersion(ctx, r.template.ID, 0, 0) + if err != nil { + r.cfg.Metrics.AddError(r.template.Name, "create_empty_template_version") + return xerrors.Errorf("create empty template version for deletion: %w", err) + } + + err = r.client.UpdateActiveTemplateVersion(ctx, r.template.ID, codersdk.UpdateActiveTemplateVersion{ + ID: emptyVersion.ID, + }) + if err != nil { + r.cfg.Metrics.AddError(r.template.Name, "update_active_template_version") + return xerrors.Errorf("update active template version to empty for deletion: %w", err) + } + + logger.Info(ctx, "waiting for all runners to reach deletion barrier") + reachedDeletionBarrier = true + r.cfg.DeletionBarrier.Done() + r.cfg.DeletionBarrier.Wait() + logger.Info(ctx, "all runners reached deletion barrier, proceeding with prebuild deletion test") + + err = r.measureDeletion(ctx, logger) + if err != nil { + return err + } + + return nil +} + +func (r *Runner) measureCreation(ctx context.Context, logger slog.Logger) error { + testStartTime := time.Now().UTC() + const workspacesPollInterval = 500 * time.Millisecond + + targetNumWorkspaces := r.cfg.NumPresets * r.cfg.NumPresetPrebuilds + + workspacesCtx, cancel := context.WithTimeout(ctx, r.cfg.PrebuildWorkspaceTimeout) + defer cancel() + + tkr := r.cfg.Clock.TickerFunc(workspacesCtx, workspacesPollInterval, func() error { + workspaces, err := r.client.Workspaces(workspacesCtx, codersdk.WorkspaceFilter{ + Template: r.template.Name, + }) + if err != nil { + return xerrors.Errorf("list workspaces: %w", err) + } + + createdCount := len(workspaces.Workspaces) + runningCount := 0 + failedCount := 0 + succeededCount := 0 + + for _, ws := range workspaces.Workspaces { + switch ws.LatestBuild.Job.Status { + case codersdk.ProvisionerJobRunning: + runningCount++ + case codersdk.ProvisionerJobFailed, codersdk.ProvisionerJobCanceled: + failedCount++ + case codersdk.ProvisionerJobSucceeded: + succeededCount++ + } + } + + r.cfg.Metrics.SetJobsCreated(createdCount, r.template.Name) + r.cfg.Metrics.SetJobsRunning(runningCount, r.template.Name) + r.cfg.Metrics.SetJobsFailed(failedCount, r.template.Name) + r.cfg.Metrics.SetJobsCompleted(succeededCount, r.template.Name) + + if succeededCount >= targetNumWorkspaces { + // All jobs succeeded + return errTickerDone + } + + return nil + }, "waitForPrebuildWorkspaces") + err := tkr.Wait() + if !xerrors.Is(err, errTickerDone) { + r.cfg.Metrics.AddError(r.template.Name, "wait_for_workspaces") + return xerrors.Errorf("wait for workspaces: %w", err) + } + + logger.Info(ctx, "all prebuild workspaces created successfully", slog.F("template_name", r.template.Name), slog.F("duration", time.Since(testStartTime).String())) + return nil +} + +func (r *Runner) measureDeletion(ctx context.Context, logger slog.Logger) error { + deletionStartTime := time.Now().UTC() + const deletionPollInterval = 500 * time.Millisecond + + targetNumWorkspaces := r.cfg.NumPresets * r.cfg.NumPresetPrebuilds + + deletionCtx, cancel := context.WithTimeout(ctx, r.cfg.PrebuildWorkspaceTimeout) + defer cancel() + + tkr := r.cfg.Clock.TickerFunc(deletionCtx, deletionPollInterval, func() error { + workspaces, err := r.client.Workspaces(deletionCtx, codersdk.WorkspaceFilter{ + Template: r.template.Name, + }) + if err != nil { + return xerrors.Errorf("list workspaces: %w", err) + } + + createdCount := 0 + runningCount := 0 + failedCount := 0 + + for _, ws := range workspaces.Workspaces { + if ws.LatestBuild.Transition == codersdk.WorkspaceTransitionDelete { + createdCount++ + switch ws.LatestBuild.Job.Status { + case codersdk.ProvisionerJobRunning: + runningCount++ + case codersdk.ProvisionerJobFailed, codersdk.ProvisionerJobCanceled: + failedCount++ + } + } + } + + completedCount := targetNumWorkspaces - len(workspaces.Workspaces) + createdCount += completedCount + + r.cfg.Metrics.SetDeletionJobsCreated(createdCount, r.template.Name) + r.cfg.Metrics.SetDeletionJobsRunning(runningCount, r.template.Name) + r.cfg.Metrics.SetDeletionJobsFailed(failedCount, r.template.Name) + r.cfg.Metrics.SetDeletionJobsCompleted(completedCount, r.template.Name) + + if len(workspaces.Workspaces) == 0 { + return errTickerDone + } + + return nil + }, "waitForPrebuildWorkspacesDeletion") + err := tkr.Wait() + if !xerrors.Is(err, errTickerDone) { + r.cfg.Metrics.AddError(r.template.Name, "wait_for_workspace_deletion") + return xerrors.Errorf("wait for workspace deletion: %w", err) + } + + logger.Info(ctx, "all prebuild workspaces deleted successfully", slog.F("template_name", r.template.Name), slog.F("duration", time.Since(deletionStartTime).String())) + return nil +} + +func (r *Runner) createTemplateVersion(ctx context.Context, templateID uuid.UUID, numPresets, numPresetPrebuilds int) (codersdk.TemplateVersion, error) { + tarData, err := TemplateTarData(numPresets, numPresetPrebuilds) + if err != nil { + return codersdk.TemplateVersion{}, xerrors.Errorf("create prebuilds template tar: %w", err) + } + uploadResp, err := r.client.Upload(ctx, codersdk.ContentTypeTar, bytes.NewReader(tarData)) + if err != nil { + return codersdk.TemplateVersion{}, xerrors.Errorf("upload prebuilds template tar: %w", err) + } + + versionReq := codersdk.CreateTemplateVersionRequest{ + TemplateID: templateID, + FileID: uploadResp.ID, + Message: "Template version for scaletest prebuilds", + StorageMethod: codersdk.ProvisionerStorageMethodFile, + Provisioner: codersdk.ProvisionerTypeTerraform, + } + version, err := r.client.CreateTemplateVersion(ctx, r.cfg.OrganizationID, versionReq) + if err != nil { + return codersdk.TemplateVersion{}, xerrors.Errorf("create template version: %w", err) + } + if version.MatchedProvisioners != nil && version.MatchedProvisioners.Count == 0 { + return codersdk.TemplateVersion{}, xerrors.Errorf("no provisioners matched for template version") + } + + const pollInterval = 2 * time.Second + versionCtx, cancel := context.WithTimeout(ctx, r.cfg.TemplateVersionJobTimeout) + defer cancel() + + tkr := r.cfg.Clock.TickerFunc(versionCtx, pollInterval, func() error { + version, err := r.client.TemplateVersion(versionCtx, version.ID) + if err != nil { + return xerrors.Errorf("get template version: %w", err) + } + switch version.Job.Status { + case codersdk.ProvisionerJobSucceeded: + return errTickerDone + case codersdk.ProvisionerJobPending, codersdk.ProvisionerJobRunning: + return nil + default: + return xerrors.Errorf("template version provisioning failed: status %s", version.Job.Status) + } + }) + err = tkr.Wait() + if !xerrors.Is(err, errTickerDone) { + return codersdk.TemplateVersion{}, xerrors.Errorf("wait for template version provisioning: %w", err) + } + return version, nil +} + +var errTickerDone = xerrors.New("done") + +func (r *Runner) Cleanup(ctx context.Context, _ string, logs io.Writer) error { + logs = loadtestutil.NewSyncWriter(logs) + logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug) + + logger.Info(ctx, "deleting template", slog.F("template_name", r.template.Name)) + + err := r.client.DeleteTemplate(ctx, r.template.ID) + if err != nil { + return xerrors.Errorf("delete template: %w", err) + } + + logger.Info(ctx, "template deleted successfully", slog.F("template_name", r.template.Name)) + return nil +} + +//go:embed tf/main.tf.tpl +var templateContent string + +func TemplateTarData(numPresets, numPresetPrebuilds int) ([]byte, error) { + tmpl, err := template.New("prebuilds-template").Parse(templateContent) + if err != nil { + return nil, err + } + result := bytes.Buffer{} + err = tmpl.Execute(&result, map[string]int{ + "NumPresets": numPresets, + "NumPresetPrebuilds": numPresetPrebuilds, + }) + if err != nil { + return nil, err + } + files := map[string][]byte{ + "main.tf": result.Bytes(), + } + tarBytes, err := loadtestutil.CreateTarFromFiles(files) + if err != nil { + return nil, err + } + return tarBytes, nil +} diff --git a/scaletest/prebuilds/tf/main.tf.tpl b/scaletest/prebuilds/tf/main.tf.tpl new file mode 100644 index 0000000000000..9465281ac2ba9 --- /dev/null +++ b/scaletest/prebuilds/tf/main.tf.tpl @@ -0,0 +1,18 @@ +terraform { + required_providers { + coder = { + source = "coder/coder" + version = "2.5.3" + } + } +} + +resource "null_resource" "workspace" {} + +data "coder_workspace_preset" "presets" { + count = {{.NumPresets}} + name = "preset-${count.index + 1}" + prebuilds { + instances = {{.NumPresetPrebuilds}} + } +} diff --git a/scaletest/taskstatus/client.go b/scaletest/taskstatus/client.go new file mode 100644 index 0000000000000..d60f20ab8be07 --- /dev/null +++ b/scaletest/taskstatus/client.go @@ -0,0 +1,144 @@ +package taskstatus + +import ( + "context" + "net/http" + "net/url" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/codersdk/agentsdk" + "github.com/coder/quartz" +) + +// client abstracts the details of using codersdk.Client for workspace operations. +// This interface allows for easier testing by enabling mock implementations and +// provides a cleaner separation of concerns. +// +// The interface is designed to be initialized in two phases: +// 1. Create the client with newClient(coderClient) +// 2. Configure logging when the io.Writer is available in Run() +type client interface { + // CreateUserWorkspace creates a workspace for a user. + CreateUserWorkspace(ctx context.Context, userID string, req codersdk.CreateWorkspaceRequest) (codersdk.Workspace, error) + + // WorkspaceByOwnerAndName retrieves a workspace by owner and name. + WorkspaceByOwnerAndName(ctx context.Context, owner string, name string, params codersdk.WorkspaceOptions) (codersdk.Workspace, error) + + // WorkspaceExternalAgentCredentials retrieves credentials for an external agent. + WorkspaceExternalAgentCredentials(ctx context.Context, workspaceID uuid.UUID, agentName string) (codersdk.ExternalAgentCredentials, error) + + // watchWorkspace watches for updates to a workspace. + watchWorkspace(ctx context.Context, workspaceID uuid.UUID) (<-chan codersdk.Workspace, error) + + // deleteWorkspace deletes the workspace by creating a build with delete transition. + deleteWorkspace(ctx context.Context, workspaceID uuid.UUID) error + + // initialize sets up the client with the provided logger, which is only available after Run() is called. + initialize(logger slog.Logger) +} + +// appStatusPatcher abstracts the details of using agentsdk.Client for updating app status. +// This interface is separate from client because it requires an agent token which is only +// available after creating an external workspace. +type appStatusPatcher interface { + // patchAppStatus updates the status of a workspace app. + patchAppStatus(ctx context.Context, req agentsdk.PatchAppStatus) error + + // initialize sets up the patcher with the provided logger and agent token. + initialize(logger slog.Logger, agentToken string) +} + +// sdkClient is the concrete implementation of the client interface using +// codersdk.Client. +type sdkClient struct { + coderClient *codersdk.Client + clock quartz.Clock + logger slog.Logger +} + +// newClient creates a new client implementation using the provided codersdk.Client. +func newClient(coderClient *codersdk.Client) client { + return &sdkClient{ + coderClient: coderClient, + clock: quartz.NewReal(), + } +} + +func (c *sdkClient) CreateUserWorkspace(ctx context.Context, userID string, req codersdk.CreateWorkspaceRequest) (codersdk.Workspace, error) { + return c.coderClient.CreateUserWorkspace(ctx, userID, req) +} + +func (c *sdkClient) WorkspaceByOwnerAndName(ctx context.Context, owner string, name string, params codersdk.WorkspaceOptions) (codersdk.Workspace, error) { + return c.coderClient.WorkspaceByOwnerAndName(ctx, owner, name, params) +} + +func (c *sdkClient) WorkspaceExternalAgentCredentials(ctx context.Context, workspaceID uuid.UUID, agentName string) (codersdk.ExternalAgentCredentials, error) { + return c.coderClient.WorkspaceExternalAgentCredentials(ctx, workspaceID, agentName) +} + +func (c *sdkClient) watchWorkspace(ctx context.Context, workspaceID uuid.UUID) (<-chan codersdk.Workspace, error) { + return c.coderClient.WatchWorkspace(ctx, workspaceID) +} + +func (c *sdkClient) deleteWorkspace(ctx context.Context, workspaceID uuid.UUID) error { + // Create a build with delete transition to delete the workspace + _, err := c.coderClient.CreateWorkspaceBuild(ctx, workspaceID, codersdk.CreateWorkspaceBuildRequest{ + Transition: codersdk.WorkspaceTransitionDelete, + Reason: codersdk.CreateWorkspaceBuildReasonCLI, + }) + if err != nil { + return xerrors.Errorf("create delete build: %w", err) + } + return nil +} + +func (c *sdkClient) initialize(logger slog.Logger) { + // Configure the coder client logging + c.logger = logger + c.coderClient.SetLogger(logger) + c.coderClient.SetLogBodies(true) +} + +// sdkAppStatusPatcher is the concrete implementation of the appStatusPatcher interface +// using agentsdk.Client. +type sdkAppStatusPatcher struct { + agentClient *agentsdk.Client + url *url.URL + httpClient *http.Client +} + +// newAppStatusPatcher creates a new appStatusPatcher implementation. +func newAppStatusPatcher(client *codersdk.Client) appStatusPatcher { + return &sdkAppStatusPatcher{ + url: client.URL, + httpClient: client.HTTPClient, + } +} + +func (p *sdkAppStatusPatcher) patchAppStatus(ctx context.Context, req agentsdk.PatchAppStatus) error { + if p.agentClient == nil { + panic("agentClient not initialized - call initialize first") + } + return p.agentClient.PatchAppStatus(ctx, req) +} + +func (p *sdkAppStatusPatcher) initialize(logger slog.Logger, agentToken string) { + // Create and configure the agent client with the provided token + p.agentClient = agentsdk.New( + p.url, + agentsdk.WithFixedToken(agentToken), + codersdk.WithHTTPClient(p.httpClient), + codersdk.WithLogger(logger), + codersdk.WithLogBodies(), + ) +} + +// Ensure sdkClient implements the client interface. +var _ client = (*sdkClient)(nil) + +// Ensure sdkAppStatusPatcher implements the appStatusPatcher interface. +var _ appStatusPatcher = (*sdkAppStatusPatcher)(nil) diff --git a/scaletest/taskstatus/config.go b/scaletest/taskstatus/config.go new file mode 100644 index 0000000000000..1c3f26cfabfa1 --- /dev/null +++ b/scaletest/taskstatus/config.go @@ -0,0 +1,73 @@ +package taskstatus + +import ( + "sync" + "time" + + "github.com/google/uuid" + "golang.org/x/xerrors" +) + +type Config struct { + // TemplateID is the template ID to use for creating the external workspace. + TemplateID uuid.UUID `json:"template_id"` + + // WorkspaceName is the name for the external workspace to create. + WorkspaceName string `json:"workspace_name"` + + // AppSlug is the slug of the app designated as the AI Agent. + AppSlug string `json:"app_slug"` + + // When the runner has connected to the watch-ws endpoint, it will call Done once on this wait group. Used to + // coordinate multiple runners from the higher layer. + ConnectedWaitGroup *sync.WaitGroup `json:"-"` + + // We read on this channel before starting to report task statuses. Used to coordinate multiple runners from the + // higher layer. + StartReporting chan struct{} `json:"-"` + + // Time between reporting task statuses. + ReportStatusPeriod time.Duration `json:"report_status_period"` + + // Total time to report task statuses, starting from when we successfully read from the StartReporting channel. + ReportStatusDuration time.Duration `json:"report_status_duration"` + + Metrics *Metrics `json:"-"` + MetricLabelValues []string `json:"metric_label_values"` +} + +func (c *Config) Validate() error { + if c.TemplateID == uuid.Nil { + return xerrors.Errorf("validate template_id: must not be nil") + } + + if c.WorkspaceName == "" { + return xerrors.Errorf("validate workspace_name: must not be empty") + } + + if c.AppSlug == "" { + return xerrors.Errorf("validate app_slug: must not be empty") + } + + if c.ConnectedWaitGroup == nil { + return xerrors.Errorf("validate connected_wait_group: must not be nil") + } + + if c.StartReporting == nil { + return xerrors.Errorf("validate start_reporting: must not be nil") + } + + if c.ReportStatusPeriod <= 0 { + return xerrors.Errorf("validate report_status_period: must be greater than zero") + } + + if c.ReportStatusDuration <= 0 { + return xerrors.Errorf("validate report_status_duration: must be greater than zero") + } + + if c.Metrics == nil { + return xerrors.Errorf("validate metrics: must not be nil") + } + + return nil +} diff --git a/scaletest/taskstatus/metrics.go b/scaletest/taskstatus/metrics.go new file mode 100644 index 0000000000000..1b312a41a3338 --- /dev/null +++ b/scaletest/taskstatus/metrics.go @@ -0,0 +1,36 @@ +package taskstatus + +import "github.com/prometheus/client_golang/prometheus" + +type Metrics struct { + TaskStatusToWorkspaceUpdateLatencySeconds prometheus.HistogramVec + MissingStatusUpdatesTotal prometheus.CounterVec + ReportTaskStatusErrorsTotal prometheus.CounterVec +} + +func NewMetrics(reg prometheus.Registerer, labelNames ...string) *Metrics { + m := &Metrics{ + TaskStatusToWorkspaceUpdateLatencySeconds: *prometheus.NewHistogramVec(prometheus.HistogramOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "task_status_to_workspace_update_latency_seconds", + Help: "Time in seconds between reporting a task status and receiving the workspace update.", + }, labelNames), + MissingStatusUpdatesTotal: *prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "missing_status_updates_total", + Help: "Total number of missing status updates.", + }, labelNames), + ReportTaskStatusErrorsTotal: *prometheus.NewCounterVec(prometheus.CounterOpts{ + Namespace: "coderd", + Subsystem: "scaletest", + Name: "report_task_status_errors_total", + Help: "Total number of errors when reporting task status.", + }, labelNames), + } + reg.MustRegister(m.TaskStatusToWorkspaceUpdateLatencySeconds) + reg.MustRegister(m.MissingStatusUpdatesTotal) + reg.MustRegister(m.ReportTaskStatusErrorsTotal) + return m +} diff --git a/scaletest/taskstatus/run.go b/scaletest/taskstatus/run.go new file mode 100644 index 0000000000000..87f0cbedd3b29 --- /dev/null +++ b/scaletest/taskstatus/run.go @@ -0,0 +1,340 @@ +package taskstatus + +import ( + "context" + "io" + "strconv" + "strings" + "sync" + "time" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/codersdk/agentsdk" + "github.com/coder/coder/v2/scaletest/harness" + "github.com/coder/coder/v2/scaletest/loadtestutil" + "github.com/coder/quartz" +) + +const statusUpdatePrefix = "scaletest status update:" + +// createExternalWorkspaceResult contains the results from creating an external workspace. +type createExternalWorkspaceResult struct { + workspaceID uuid.UUID + agentToken string +} + +type Runner struct { + client client + patcher appStatusPatcher + cfg Config + + logger slog.Logger + + // workspaceID is set after creating the external workspace + workspaceID uuid.UUID + + mu sync.Mutex + reportTimes map[int]time.Time + doneReporting bool + + // testing only + clock quartz.Clock +} + +var ( + _ harness.Runnable = &Runner{} + _ harness.Cleanable = &Runner{} +) + +// NewRunner creates a new Runner with the provided codersdk.Client and configuration. +func NewRunner(coderClient *codersdk.Client, cfg Config) *Runner { + return &Runner{ + client: newClient(coderClient), + patcher: newAppStatusPatcher(coderClient), + cfg: cfg, + clock: quartz.NewReal(), + reportTimes: make(map[int]time.Time), + } +} + +func (r *Runner) Run(ctx context.Context, name string, logs io.Writer) error { + shouldMarkConnectedDone := true + defer func() { + if shouldMarkConnectedDone { + r.cfg.ConnectedWaitGroup.Done() + } + }() + + // ensure these labels are initialized, so we see the time series right away in prometheus. + r.cfg.Metrics.MissingStatusUpdatesTotal.WithLabelValues(r.cfg.MetricLabelValues...).Add(0) + r.cfg.Metrics.ReportTaskStatusErrorsTotal.WithLabelValues(r.cfg.MetricLabelValues...).Add(0) + + logs = loadtestutil.NewSyncWriter(logs) + r.logger = slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug).Named(name) + r.client.initialize(r.logger) + + // Create the external workspace + r.logger.Info(ctx, "creating external workspace", + slog.F("template_id", r.cfg.TemplateID), + slog.F("workspace_name", r.cfg.WorkspaceName)) + + result, err := r.createExternalWorkspace(ctx, codersdk.CreateWorkspaceRequest{ + TemplateID: r.cfg.TemplateID, + Name: r.cfg.WorkspaceName, + }) + if err != nil { + r.cfg.Metrics.ReportTaskStatusErrorsTotal.WithLabelValues(r.cfg.MetricLabelValues...).Inc() + return xerrors.Errorf("create external workspace: %w", err) + } + + // Set the workspace ID + r.workspaceID = result.workspaceID + r.logger.Info(ctx, "created external workspace", slog.F("workspace_id", r.workspaceID)) + + // Initialize the patcher with the agent token + r.patcher.initialize(r.logger, result.agentToken) + r.logger.Info(ctx, "initialized app status patcher with agent token") + + workspaceUpdatesCtx, cancelWorkspaceUpdates := context.WithCancel(ctx) + defer cancelWorkspaceUpdates() + workspaceUpdatesResult := make(chan error, 1) + shouldMarkConnectedDone = false // we are passing this responsibility to the watchWorkspaceUpdates goroutine + go func() { + workspaceUpdatesResult <- r.watchWorkspaceUpdates(workspaceUpdatesCtx) + }() + + err = r.reportTaskStatus(ctx) + if err != nil { + return xerrors.Errorf("report task status: %w", err) + } + + err = <-workspaceUpdatesResult + if err != nil { + return xerrors.Errorf("watch workspace: %w", err) + } + return nil +} + +// Cleanup deletes the external workspace created by this runner. +func (r *Runner) Cleanup(ctx context.Context, id string, logs io.Writer) error { + if r.workspaceID == uuid.Nil { + // No workspace was created, nothing to cleanup + return nil + } + + logs = loadtestutil.NewSyncWriter(logs) + logger := slog.Make(sloghuman.Sink(logs)).Leveled(slog.LevelDebug).Named(id) + + logger.Info(ctx, "deleting external workspace", slog.F("workspace_id", r.workspaceID)) + + err := r.client.deleteWorkspace(ctx, r.workspaceID) + if err != nil { + logger.Error(ctx, "failed to delete external workspace", + slog.F("workspace_id", r.workspaceID), + slog.Error(err)) + return xerrors.Errorf("delete external workspace: %w", err) + } + + logger.Info(ctx, "successfully deleted external workspace", slog.F("workspace_id", r.workspaceID)) + return nil +} + +func (r *Runner) watchWorkspaceUpdates(ctx context.Context) error { + shouldMarkConnectedDone := true + defer func() { + if shouldMarkConnectedDone { + r.cfg.ConnectedWaitGroup.Done() + } + }() + updates, err := r.client.watchWorkspace(ctx, r.workspaceID) + if err != nil { + return xerrors.Errorf("watch workspace: %w", err) + } + shouldMarkConnectedDone = false + r.cfg.ConnectedWaitGroup.Done() + defer func() { + r.mu.Lock() + defer r.mu.Unlock() + r.cfg.Metrics.MissingStatusUpdatesTotal. + WithLabelValues(r.cfg.MetricLabelValues...). + Add(float64(len(r.reportTimes))) + }() + for { + select { + case <-ctx.Done(): + return ctx.Err() + case workspace := <-updates: + if workspace.LatestAppStatus == nil { + continue + } + msgNo, ok := parseStatusMessage(workspace.LatestAppStatus.Message) + if !ok { + continue + } + + r.mu.Lock() + reportTime, ok := r.reportTimes[msgNo] + delete(r.reportTimes, msgNo) + allDone := r.doneReporting && len(r.reportTimes) == 0 + r.mu.Unlock() + + if !ok { + return xerrors.Errorf("report time not found for message %d", msgNo) + } + latency := r.clock.Since(reportTime, "watchWorkspaceUpdates") + r.cfg.Metrics.TaskStatusToWorkspaceUpdateLatencySeconds. + WithLabelValues(r.cfg.MetricLabelValues...). + Observe(latency.Seconds()) + if allDone { + return nil + } + } + } +} + +func (r *Runner) reportTaskStatus(ctx context.Context) error { + defer func() { + r.mu.Lock() + defer r.mu.Unlock() + r.doneReporting = true + }() + + select { + case <-ctx.Done(): + return ctx.Err() + case <-r.cfg.StartReporting: + r.logger.Info(ctx, "starting to report task status") + } + startedReporting := r.clock.Now("reportTaskStatus", "startedReporting") + msgNo := 0 + + done := xerrors.New("done reporting task status") // sentinel error + waiter := r.clock.TickerFunc(ctx, r.cfg.ReportStatusPeriod, func() error { + r.mu.Lock() + now := r.clock.Now("reportTaskStatus", "tick") + r.reportTimes[msgNo] = now + // It's important that we set doneReporting along with a final report, since the watchWorkspaceUpdates goroutine + // needs a update to wake up and check if we're done. We could introduce a secondary signaling channel, but + // it adds a lot of complexity and will be hard to test. We expect the tick period to be much smaller than the + // report status duration, so one extra tick is not a big deal. + if now.After(startedReporting.Add(r.cfg.ReportStatusDuration)) { + r.doneReporting = true + } + r.mu.Unlock() + + err := r.patcher.patchAppStatus(ctx, agentsdk.PatchAppStatus{ + AppSlug: r.cfg.AppSlug, + Message: statusUpdatePrefix + strconv.Itoa(msgNo), + State: codersdk.WorkspaceAppStatusStateWorking, + URI: "https://example.com/example-status/", + }) + if err != nil { + r.logger.Error(ctx, "failed to report task status", slog.Error(err)) + r.cfg.Metrics.ReportTaskStatusErrorsTotal.WithLabelValues(r.cfg.MetricLabelValues...).Inc() + } + msgNo++ + // note that it's safe to read r.doneReporting here without a lock because we're the only goroutine that sets + // it. + if r.doneReporting { + return done // causes the ticker to exit due to the sentinel error + } + return nil + }, "reportTaskStatus") + err := waiter.Wait() + if xerrors.Is(err, done) { + return nil + } + return err +} + +func parseStatusMessage(message string) (int, bool) { + if !strings.HasPrefix(message, statusUpdatePrefix) { + return 0, false + } + message = strings.TrimPrefix(message, statusUpdatePrefix) + msgNo, err := strconv.Atoi(message) + if err != nil { + return 0, false + } + return msgNo, true +} + +// createExternalWorkspace creates an external workspace and returns the workspace ID +// and agent token for the first external agent found in the workspace resources. +func (r *Runner) createExternalWorkspace(ctx context.Context, req codersdk.CreateWorkspaceRequest) (createExternalWorkspaceResult, error) { + // Create the workspace + workspace, err := r.client.CreateUserWorkspace(ctx, codersdk.Me, req) + if err != nil { + return createExternalWorkspaceResult{}, err + } + + r.logger.Info(ctx, "waiting for workspace build to complete", + slog.F("workspace_name", workspace.Name), + slog.F("workspace_id", workspace.ID)) + + // Poll the workspace until the build is complete + var finalWorkspace codersdk.Workspace + buildComplete := xerrors.New("build complete") // sentinel error + waiter := r.clock.TickerFunc(ctx, 30*time.Second, func() error { + // Get the workspace with latest build details + workspace, err := r.client.WorkspaceByOwnerAndName(ctx, codersdk.Me, workspace.Name, codersdk.WorkspaceOptions{}) + if err != nil { + r.logger.Error(ctx, "failed to poll workspace while waiting for build to complete", slog.Error(err)) + return nil + } + + jobStatus := workspace.LatestBuild.Job.Status + r.logger.Debug(ctx, "checking workspace build status", + slog.F("status", jobStatus), + slog.F("build_id", workspace.LatestBuild.ID)) + + switch jobStatus { + case codersdk.ProvisionerJobSucceeded: + // Build succeeded + r.logger.Info(ctx, "workspace build succeeded") + finalWorkspace = workspace + return buildComplete + case codersdk.ProvisionerJobFailed: + return xerrors.Errorf("workspace build failed: %s", workspace.LatestBuild.Job.Error) + case codersdk.ProvisionerJobCanceled: + return xerrors.Errorf("workspace build was canceled") + case codersdk.ProvisionerJobPending, codersdk.ProvisionerJobRunning, codersdk.ProvisionerJobCanceling: + // Still in progress, continue polling + return nil + default: + return xerrors.Errorf("unexpected job status: %s", jobStatus) + } + }, "createExternalWorkspace") + + err = waiter.Wait() + if err != nil && !xerrors.Is(err, buildComplete) { + return createExternalWorkspaceResult{}, xerrors.Errorf("wait for build completion: %w", err) + } + + // Find external agents in resources + for _, resource := range finalWorkspace.LatestBuild.Resources { + if resource.Type != "coder_external_agent" || len(resource.Agents) == 0 { + continue + } + + // Get credentials for the first agent + agent := resource.Agents[0] + credentials, err := r.client.WorkspaceExternalAgentCredentials(ctx, finalWorkspace.ID, agent.Name) + if err != nil { + return createExternalWorkspaceResult{}, err + } + + return createExternalWorkspaceResult{ + workspaceID: finalWorkspace.ID, + agentToken: credentials.AgentToken, + }, nil + } + + return createExternalWorkspaceResult{}, xerrors.Errorf("no external agent found in workspace") +} diff --git a/scaletest/taskstatus/run_internal_test.go b/scaletest/taskstatus/run_internal_test.go new file mode 100644 index 0000000000000..7a82d4c6b2ad3 --- /dev/null +++ b/scaletest/taskstatus/run_internal_test.go @@ -0,0 +1,714 @@ +package taskstatus + +import ( + "context" + "fmt" + "sync" + "testing" + "time" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" + "github.com/coder/quartz" + + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/codersdk/agentsdk" + "github.com/coder/coder/v2/testutil" +) + +// fakeClient implements the client interface for testing +type fakeClient struct { + t *testing.T + logger slog.Logger + + // Channels for controlling the behavior + workspaceUpdatesCh chan codersdk.Workspace + workspaceByOwnerAndNameStatus chan codersdk.ProvisionerJobStatus + workspaceByOwnerAndNameErrors chan error +} + +func newFakeClient(t *testing.T) *fakeClient { + return &fakeClient{ + t: t, + workspaceUpdatesCh: make(chan codersdk.Workspace), + workspaceByOwnerAndNameStatus: make(chan codersdk.ProvisionerJobStatus), + workspaceByOwnerAndNameErrors: make(chan error, 1), + } +} + +func (m *fakeClient) initialize(logger slog.Logger) { + m.logger = logger +} + +func (m *fakeClient) watchWorkspace(ctx context.Context, workspaceID uuid.UUID) (<-chan codersdk.Workspace, error) { + m.logger.Debug(ctx, "called fake WatchWorkspace", slog.F("workspace_id", workspaceID.String())) + return m.workspaceUpdatesCh, nil +} + +const ( + testAgentToken = "test-agent-token" + testAgentName = "test-agent" + testWorkspaceName = "test-workspace" +) + +var ( + testWorkspaceID = uuid.UUID{1, 2, 3, 4} + testBuildID = uuid.UUID{5, 6, 7, 8} +) + +func workspaceWithJobStatus(status codersdk.ProvisionerJobStatus) codersdk.Workspace { + return codersdk.Workspace{ + ID: testWorkspaceID, // Fake workspace ID + Name: testWorkspaceName, + LatestBuild: codersdk.WorkspaceBuild{ + ID: testBuildID, + Job: codersdk.ProvisionerJob{ + Status: status, + }, + Resources: []codersdk.WorkspaceResource{ + { + Type: "coder_external_agent", + Agents: []codersdk.WorkspaceAgent{ + { + Name: testAgentName, + }, + }, + }, + }, + }, + } +} + +func (m *fakeClient) CreateUserWorkspace(ctx context.Context, userID string, req codersdk.CreateWorkspaceRequest) (codersdk.Workspace, error) { + m.logger.Debug(ctx, "called fake CreateUserWorkspace", slog.F("user_id", userID), slog.F("req", req)) + return workspaceWithJobStatus(codersdk.ProvisionerJobPending), nil +} + +func (m *fakeClient) WorkspaceByOwnerAndName(ctx context.Context, owner string, name string, params codersdk.WorkspaceOptions) (codersdk.Workspace, error) { + m.logger.Debug(ctx, "called fake WorkspaceByOwnerAndName", slog.F("owner", owner), slog.F("name", name)) + status := <-m.workspaceByOwnerAndNameStatus + var err error + select { + case err = <-m.workspaceByOwnerAndNameErrors: + return codersdk.Workspace{}, err + default: + return workspaceWithJobStatus(status), nil + } +} + +func (m *fakeClient) WorkspaceExternalAgentCredentials(ctx context.Context, workspaceID uuid.UUID, agentName string) (codersdk.ExternalAgentCredentials, error) { + m.logger.Debug(ctx, "called fake WorkspaceExternalAgentCredentials", slog.F("workspace_id", workspaceID), slog.F("agent_name", agentName)) + // Return fake credentials for testing + return codersdk.ExternalAgentCredentials{ + AgentToken: testAgentToken, + }, nil +} + +func (m *fakeClient) deleteWorkspace(ctx context.Context, workspaceID uuid.UUID) error { + m.logger.Debug(ctx, "called fake DeleteWorkspace", slog.F("workspace_id", workspaceID.String())) + // Simulate successful deletion in tests + return nil +} + +// fakeAppStatusPatcher implements the appStatusPatcher interface for testing +type fakeAppStatusPatcher struct { + t *testing.T + logger slog.Logger + agentToken string + + // Channels for controlling the behavior + patchStatusCalls chan agentsdk.PatchAppStatus + patchStatusErrors chan error +} + +func newFakeAppStatusPatcher(t *testing.T) *fakeAppStatusPatcher { + return &fakeAppStatusPatcher{ + t: t, + patchStatusCalls: make(chan agentsdk.PatchAppStatus), + patchStatusErrors: make(chan error, 1), + } +} + +func (p *fakeAppStatusPatcher) initialize(logger slog.Logger, agentToken string) { + p.logger = logger + p.agentToken = agentToken +} + +func (p *fakeAppStatusPatcher) patchAppStatus(ctx context.Context, req agentsdk.PatchAppStatus) error { + assert.NotEmpty(p.t, p.agentToken) + p.logger.Debug(ctx, "called fake PatchAppStatus", slog.F("req", req)) + // Send the request to the channel so tests can verify it + select { + case p.patchStatusCalls <- req: + case <-ctx.Done(): + return ctx.Err() + } + + // Check if there's an error to return + select { + case err := <-p.patchStatusErrors: + return err + default: + return nil + } +} + +func TestRunner_Run(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + + mClock := quartz.NewMock(t) + fClient := newFakeClient(t) + fPatcher := newFakeAppStatusPatcher(t) + templateID := uuid.UUID{5, 6, 7, 8} + workspaceName := "test-workspace" + appSlug := "test-app" + + reg := prometheus.NewRegistry() + metrics := NewMetrics(reg, "test") + + connectedWaitGroup := &sync.WaitGroup{} + connectedWaitGroup.Add(1) + startReporting := make(chan struct{}) + + cfg := Config{ + TemplateID: templateID, + WorkspaceName: workspaceName, + AppSlug: appSlug, + ConnectedWaitGroup: connectedWaitGroup, + StartReporting: startReporting, + ReportStatusPeriod: 10 * time.Second, + ReportStatusDuration: 35 * time.Second, + Metrics: metrics, + MetricLabelValues: []string{"test"}, + } + runner := &Runner{ + client: fClient, + patcher: fPatcher, + cfg: cfg, + clock: mClock, + reportTimes: make(map[int]time.Time), + } + + reportTickerTrap := mClock.Trap().TickerFunc("reportTaskStatus") + defer reportTickerTrap.Close() + sinceTrap := mClock.Trap().Since("watchWorkspaceUpdates") + defer sinceTrap.Close() + buildTickerTrap := mClock.Trap().TickerFunc("createExternalWorkspace") + defer buildTickerTrap.Close() + + // Run the runner in a goroutine + runErr := make(chan error, 1) + go func() { + runErr <- runner.Run(ctx, "test-runner", testutil.NewTestLogWriter(t)) + }() + + // complete the build + buildTickerTrap.MustWait(ctx).MustRelease(ctx) + w := mClock.Advance(30 * time.Second) + testutil.RequireSend(ctx, t, fClient.workspaceByOwnerAndNameStatus, codersdk.ProvisionerJobSucceeded) + w.MustWait(ctx) + + // Wait for the runner to connect and watch workspace + connectedWaitGroup.Wait() + + // Signal to start reporting + close(startReporting) + + // Wait for the initial TickerFunc call before advancing time, otherwise our ticks will be off. + reportTickerTrap.MustWait(ctx).MustRelease(ctx) + + // at this point, the patcher must be initialized + require.Equal(t, testAgentToken, fPatcher.agentToken) + + updateDelay := time.Duration(0) + for i := 0; i < 4; i++ { + tickWaiter := mClock.Advance((10 * time.Second) - updateDelay) + + patchCall := testutil.RequireReceive(ctx, t, fPatcher.patchStatusCalls) + require.Equal(t, appSlug, patchCall.AppSlug) + require.Equal(t, fmt.Sprintf("scaletest status update:%d", i), patchCall.Message) + require.Equal(t, codersdk.WorkspaceAppStatusStateWorking, patchCall.State) + tickWaiter.MustWait(ctx) + + // Send workspace update 1, 2, 3, or 4 seconds after the report + updateDelay = time.Duration(i+1) * time.Second + mClock.Advance(updateDelay) + + workspace := codersdk.Workspace{ + LatestAppStatus: &codersdk.WorkspaceAppStatus{ + Message: fmt.Sprintf("scaletest status update:%d", i), + }, + } + testutil.RequireSend(ctx, t, fClient.workspaceUpdatesCh, workspace) + sinceTrap.MustWait(ctx).MustRelease(ctx) + } + + // Wait for the runner to complete + err := testutil.RequireReceive(ctx, t, runErr) + require.NoError(t, err) + + // Verify metrics were updated correctly + metricFamilies, err := reg.Gather() + require.NoError(t, err) + + var latencyMetricFound bool + var missingUpdatesFound bool + for _, mf := range metricFamilies { + switch mf.GetName() { + case "coderd_scaletest_task_status_to_workspace_update_latency_seconds": + latencyMetricFound = true + require.Len(t, mf.GetMetric(), 1) + hist := mf.GetMetric()[0].GetHistogram() + assert.Equal(t, uint64(4), hist.GetSampleCount()) + case "coderd_scaletest_missing_status_updates_total": + missingUpdatesFound = true + require.Len(t, mf.GetMetric(), 1) + counter := mf.GetMetric()[0].GetCounter() + assert.Equal(t, float64(0), counter.GetValue()) + } + } + assert.True(t, latencyMetricFound, "latency metric not found") + assert.True(t, missingUpdatesFound, "missing updates metric not found") +} + +func TestRunner_RunMissedUpdate(t *testing.T) { + t.Parallel() + + testCtx := testutil.Context(t, testutil.WaitShort) + runCtx, cancel := context.WithCancel(testCtx) + defer cancel() + + mClock := quartz.NewMock(t) + fClient := newFakeClient(t) + fPatcher := newFakeAppStatusPatcher(t) + templateID := uuid.UUID{5, 6, 7, 8} + workspaceName := "test-workspace" + appSlug := "test-app" + + reg := prometheus.NewRegistry() + metrics := NewMetrics(reg, "test") + + connectedWaitGroup := &sync.WaitGroup{} + connectedWaitGroup.Add(1) + startReporting := make(chan struct{}) + + cfg := Config{ + TemplateID: templateID, + WorkspaceName: workspaceName, + AppSlug: appSlug, + ConnectedWaitGroup: connectedWaitGroup, + StartReporting: startReporting, + ReportStatusPeriod: 10 * time.Second, + ReportStatusDuration: 35 * time.Second, + Metrics: metrics, + MetricLabelValues: []string{"test"}, + } + runner := &Runner{ + client: fClient, + patcher: fPatcher, + cfg: cfg, + clock: mClock, + reportTimes: make(map[int]time.Time), + } + + tickerTrap := mClock.Trap().TickerFunc("reportTaskStatus") + defer tickerTrap.Close() + sinceTrap := mClock.Trap().Since("watchWorkspaceUpdates") + defer sinceTrap.Close() + buildTickerTrap := mClock.Trap().TickerFunc("createExternalWorkspace") + defer buildTickerTrap.Close() + + // Run the runner in a goroutine + runErr := make(chan error, 1) + go func() { + runErr <- runner.Run(runCtx, "test-runner", testutil.NewTestLogWriter(t)) + }() + + // complete the build + buildTickerTrap.MustWait(testCtx).MustRelease(testCtx) + w := mClock.Advance(30 * time.Second) + testutil.RequireSend(testCtx, t, fClient.workspaceByOwnerAndNameStatus, codersdk.ProvisionerJobSucceeded) + w.MustWait(testCtx) + + // Wait for the runner to connect and watch workspace + connectedWaitGroup.Wait() + + // Signal to start reporting + close(startReporting) + + // Wait for the initial TickerFunc call before advancing time, otherwise our ticks will be off. + tickerTrap.MustWait(testCtx).MustRelease(testCtx) + + updateDelay := time.Duration(0) + for i := 0; i < 4; i++ { + tickWaiter := mClock.Advance((10 * time.Second) - updateDelay) + patchCall := testutil.RequireReceive(testCtx, t, fPatcher.patchStatusCalls) + require.Equal(t, appSlug, patchCall.AppSlug) + require.Equal(t, fmt.Sprintf("scaletest status update:%d", i), patchCall.Message) + require.Equal(t, codersdk.WorkspaceAppStatusStateWorking, patchCall.State) + tickWaiter.MustWait(testCtx) + + // Send workspace update 1, 2, 3, or 4 seconds after the report + updateDelay = time.Duration(i+1) * time.Second + mClock.Advance(updateDelay) + + workspace := codersdk.Workspace{ + LatestAppStatus: &codersdk.WorkspaceAppStatus{ + Message: fmt.Sprintf("scaletest status update:%d", i), + }, + } + if i != 2 { + // skip the third update, to test that we report missed updates and still complete. + testutil.RequireSend(testCtx, t, fClient.workspaceUpdatesCh, workspace) + sinceTrap.MustWait(testCtx).MustRelease(testCtx) + } + } + + // Cancel the run context to simulate the runner being killed. + cancel() + + // Wait for the runner to complete + err := testutil.RequireReceive(testCtx, t, runErr) + require.ErrorIs(t, err, context.Canceled) + + // Verify metrics were updated correctly + metricFamilies, err := reg.Gather() + require.NoError(t, err) + + // Check that metrics were recorded + var latencyMetricFound bool + var missingUpdatesFound bool + for _, mf := range metricFamilies { + switch mf.GetName() { + case "coderd_scaletest_task_status_to_workspace_update_latency_seconds": + latencyMetricFound = true + require.Len(t, mf.GetMetric(), 1) + hist := mf.GetMetric()[0].GetHistogram() + assert.Equal(t, uint64(3), hist.GetSampleCount()) + case "coderd_scaletest_missing_status_updates_total": + missingUpdatesFound = true + require.Len(t, mf.GetMetric(), 1) + counter := mf.GetMetric()[0].GetCounter() + assert.Equal(t, float64(1), counter.GetValue()) + } + } + assert.True(t, latencyMetricFound, "latency metric not found") + assert.True(t, missingUpdatesFound, "missing updates metric not found") +} + +func TestRunner_Run_WithErrors(t *testing.T) { + t.Parallel() + + testCtx := testutil.Context(t, testutil.WaitShort) + runCtx, cancel := context.WithCancel(testCtx) + defer cancel() + + mClock := quartz.NewMock(t) + fClient := newFakeClient(t) + fPatcher := newFakeAppStatusPatcher(t) + templateID := uuid.UUID{5, 6, 7, 8} + workspaceName := "test-workspace" + appSlug := "test-app" + + reg := prometheus.NewRegistry() + metrics := NewMetrics(reg, "test") + + connectedWaitGroup := &sync.WaitGroup{} + connectedWaitGroup.Add(1) + startReporting := make(chan struct{}) + + cfg := Config{ + TemplateID: templateID, + WorkspaceName: workspaceName, + AppSlug: appSlug, + ConnectedWaitGroup: connectedWaitGroup, + StartReporting: startReporting, + ReportStatusPeriod: 10 * time.Second, + ReportStatusDuration: 35 * time.Second, + Metrics: metrics, + MetricLabelValues: []string{"test"}, + } + runner := &Runner{ + client: fClient, + patcher: fPatcher, + cfg: cfg, + clock: mClock, + reportTimes: make(map[int]time.Time), + } + + tickerTrap := mClock.Trap().TickerFunc("reportTaskStatus") + defer tickerTrap.Close() + buildTickerTrap := mClock.Trap().TickerFunc("createExternalWorkspace") + defer buildTickerTrap.Close() + // Run the runner in a goroutine + runErr := make(chan error, 1) + go func() { + runErr <- runner.Run(runCtx, "test-runner", testutil.NewTestLogWriter(t)) + }() + + // complete the build + buildTickerTrap.MustWait(testCtx).MustRelease(testCtx) + w := mClock.Advance(30 * time.Second) + testutil.RequireSend(testCtx, t, fClient.workspaceByOwnerAndNameStatus, codersdk.ProvisionerJobSucceeded) + w.MustWait(testCtx) + + connectedWaitGroup.Wait() + close(startReporting) + + // Wait for the initial TickerFunc call before advancing time, otherwise our ticks will be off. + tickerTrap.MustWait(testCtx).MustRelease(testCtx) + + for i := 0; i < 4; i++ { + tickWaiter := mClock.Advance(10 * time.Second) + testutil.RequireSend(testCtx, t, fPatcher.patchStatusErrors, xerrors.New("a bad thing happened")) + _ = testutil.RequireReceive(testCtx, t, fPatcher.patchStatusCalls) + tickWaiter.MustWait(testCtx) + } + + // Cancel the run context to simulate the runner being killed. + cancel() + + // Wait for the runner to complete + err := testutil.RequireReceive(testCtx, t, runErr) + require.ErrorIs(t, err, context.Canceled) + + // Verify metrics were updated correctly + metricFamilies, err := reg.Gather() + require.NoError(t, err) + + var missingUpdatesFound bool + var reportTaskStatusErrorsFound bool + for _, mf := range metricFamilies { + switch mf.GetName() { + case "coderd_scaletest_missing_status_updates_total": + missingUpdatesFound = true + require.Len(t, mf.GetMetric(), 1) + counter := mf.GetMetric()[0].GetCounter() + assert.Equal(t, float64(4), counter.GetValue()) + case "coderd_scaletest_report_task_status_errors_total": + reportTaskStatusErrorsFound = true + require.Len(t, mf.GetMetric(), 1) + counter := mf.GetMetric()[0].GetCounter() + assert.Equal(t, float64(4), counter.GetValue()) + } + } + + assert.True(t, missingUpdatesFound, "missing updates metric not found") + assert.True(t, reportTaskStatusErrorsFound, "report task status errors metric not found") +} + +func TestRunner_Run_BuildFailed(t *testing.T) { + t.Parallel() + + testCtx := testutil.Context(t, testutil.WaitShort) + runCtx, cancel := context.WithCancel(testCtx) + defer cancel() + + mClock := quartz.NewMock(t) + fClient := newFakeClient(t) + fPatcher := newFakeAppStatusPatcher(t) + templateID := uuid.UUID{5, 6, 7, 8} + workspaceName := "test-workspace" + appSlug := "test-app" + + reg := prometheus.NewRegistry() + metrics := NewMetrics(reg, "test") + + connectedWaitGroup := &sync.WaitGroup{} + connectedWaitGroup.Add(1) + startReporting := make(chan struct{}) + + cfg := Config{ + TemplateID: templateID, + WorkspaceName: workspaceName, + AppSlug: appSlug, + ConnectedWaitGroup: connectedWaitGroup, + StartReporting: startReporting, + ReportStatusPeriod: 10 * time.Second, + ReportStatusDuration: 35 * time.Second, + Metrics: metrics, + MetricLabelValues: []string{"test"}, + } + runner := &Runner{ + client: fClient, + patcher: fPatcher, + cfg: cfg, + clock: mClock, + reportTimes: make(map[int]time.Time), + } + + buildTickerTrap := mClock.Trap().TickerFunc("createExternalWorkspace") + defer buildTickerTrap.Close() + // Run the runner in a goroutine + runErr := make(chan error, 1) + go func() { + runErr <- runner.Run(runCtx, "test-runner", testutil.NewTestLogWriter(t)) + }() + + // complete the build + buildTickerTrap.MustWait(testCtx).MustRelease(testCtx) + w := mClock.Advance(30 * time.Second) + testutil.RequireSend(testCtx, t, fClient.workspaceByOwnerAndNameStatus, codersdk.ProvisionerJobFailed) + w.MustWait(testCtx) + + connectedWaitGroup.Wait() + + // Wait for the runner to complete + err := testutil.RequireReceive(testCtx, t, runErr) + require.ErrorContains(t, err, "workspace build failed") + + // Verify metrics were updated correctly + metricFamilies, err := reg.Gather() + require.NoError(t, err) + + var missingUpdatesFound bool + var reportTaskStatusErrorsFound bool + for _, mf := range metricFamilies { + switch mf.GetName() { + case "coderd_scaletest_missing_status_updates_total": + missingUpdatesFound = true + require.Len(t, mf.GetMetric(), 1) + counter := mf.GetMetric()[0].GetCounter() + assert.Equal(t, float64(0), counter.GetValue()) + case "coderd_scaletest_report_task_status_errors_total": + reportTaskStatusErrorsFound = true + require.Len(t, mf.GetMetric(), 1) + counter := mf.GetMetric()[0].GetCounter() + assert.Equal(t, float64(1), counter.GetValue()) + } + } + + assert.True(t, missingUpdatesFound, "missing updates metric not found") + assert.True(t, reportTaskStatusErrorsFound, "report task status errors metric not found") +} + +func TestParseStatusMessage(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + message string + wantNum int + wantOk bool + }{ + { + name: "valid message", + message: "scaletest status update:42", + wantNum: 42, + wantOk: true, + }, + { + name: "valid message zero", + message: "scaletest status update:0", + wantNum: 0, + wantOk: true, + }, + { + name: "invalid prefix", + message: "wrong prefix:42", + wantNum: 0, + wantOk: false, + }, + { + name: "invalid number", + message: "scaletest status update:abc", + wantNum: 0, + wantOk: false, + }, + { + name: "empty message", + message: "", + wantNum: 0, + wantOk: false, + }, + { + name: "missing number", + message: "scaletest status update:", + wantNum: 0, + wantOk: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + gotNum, gotOk := parseStatusMessage(tt.message) + assert.Equal(t, tt.wantNum, gotNum) + assert.Equal(t, tt.wantOk, gotOk) + }) + } +} + +func TestRunner_Cleanup(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitMedium) + + fakeClient := &fakeClientWithCleanupTracking{ + fakeClient: newFakeClient(t), + deleteWorkspaceCalls: make([]uuid.UUID, 0), + } + fakeClient.initialize(slog.Make(sloghuman.Sink(testutil.NewTestLogWriter(t))).Leveled(slog.LevelDebug)) + + cfg := Config{ + AppSlug: "test-app", + TemplateID: uuid.UUID{5, 6, 7, 8}, + WorkspaceName: "test-workspace", + MetricLabelValues: []string{"test"}, + Metrics: NewMetrics(prometheus.NewRegistry(), "test"), + ReportStatusPeriod: 100 * time.Millisecond, + ReportStatusDuration: 200 * time.Millisecond, + StartReporting: make(chan struct{}), + ConnectedWaitGroup: &sync.WaitGroup{}, + } + + runner := &Runner{ + client: fakeClient, + patcher: newFakeAppStatusPatcher(t), + cfg: cfg, + clock: quartz.NewMock(t), + } + + logWriter := testutil.NewTestLogWriter(t) + + // Case 1: No workspace created - Cleanup should do nothing + err := runner.Cleanup(ctx, "test-runner", logWriter) + require.NoError(t, err) + require.Len(t, fakeClient.deleteWorkspaceCalls, 0, "deleteWorkspace should not be called when no workspace was created") + + // Case 2: Workspace created - Cleanup should delete it + runner.workspaceID = uuid.UUID{1, 2, 3, 4} + err = runner.Cleanup(ctx, "test-runner", logWriter) + require.NoError(t, err) + require.Len(t, fakeClient.deleteWorkspaceCalls, 1, "deleteWorkspace should be called once") + require.Equal(t, runner.workspaceID, fakeClient.deleteWorkspaceCalls[0], "deleteWorkspace should be called with correct workspace ID") + + // Case 3: Cleanup with error + fakeClient.deleteError = xerrors.New("delete failed") + runner.workspaceID = uuid.UUID{5, 6, 7, 8} + err = runner.Cleanup(ctx, "test-runner", logWriter) + require.Error(t, err) + require.Contains(t, err.Error(), "delete external workspace") +} + +// fakeClientWithCleanupTracking extends fakeClient to track deleteWorkspace calls +type fakeClientWithCleanupTracking struct { + *fakeClient + deleteWorkspaceCalls []uuid.UUID + deleteError error +} + +func (c *fakeClientWithCleanupTracking) deleteWorkspace(ctx context.Context, workspaceID uuid.UUID) error { + c.deleteWorkspaceCalls = append(c.deleteWorkspaceCalls, workspaceID) + c.logger.Debug(ctx, "called fake DeleteWorkspace with tracking", slog.F("workspace_id", workspaceID.String())) + return c.deleteError +} diff --git a/scaletest/workspacebuild/run.go b/scaletest/workspacebuild/run.go index 308c18f0b6a03..fd3f1be54b9b6 100644 --- a/scaletest/workspacebuild/run.go +++ b/scaletest/workspacebuild/run.go @@ -33,8 +33,13 @@ func NewRunner(client *codersdk.Client, cfg Config) *Runner { } } +type SlimWorkspace struct { + ID uuid.UUID + Name string +} + // Run implements Runnable. -func (r *Runner) RunReturningWorkspace(ctx context.Context, id string, logs io.Writer) (codersdk.Workspace, error) { +func (r *Runner) RunReturningWorkspace(ctx context.Context, id string, logs io.Writer) (SlimWorkspace, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -47,14 +52,14 @@ func (r *Runner) RunReturningWorkspace(ctx context.Context, id string, logs io.W if req.Name == "" { randName, err := loadtestutil.GenerateWorkspaceName(id) if err != nil { - return codersdk.Workspace{}, xerrors.Errorf("generate random name for workspace: %w", err) + return SlimWorkspace{}, xerrors.Errorf("generate random name for workspace: %w", err) } req.Name = randName } workspace, err := r.client.CreateWorkspace(ctx, r.cfg.OrganizationID, r.cfg.UserID, req) if err != nil { - return codersdk.Workspace{}, xerrors.Errorf("create workspace: %w", err) + return SlimWorkspace{}, xerrors.Errorf("create workspace: %w", err) } r.workspaceID = workspace.ID @@ -72,7 +77,7 @@ func (r *Runner) RunReturningWorkspace(ctx context.Context, id string, logs io.W TemplateVersionID: req.TemplateVersionID, }) if err != nil { - return codersdk.Workspace{}, xerrors.Errorf("create workspace build: %w", err) + return SlimWorkspace{}, xerrors.Errorf("create workspace build: %w", err) } err = waitForBuild(ctx, logs, r.client, workspace.LatestBuild.ID) if err == nil { @@ -80,7 +85,7 @@ func (r *Runner) RunReturningWorkspace(ctx context.Context, id string, logs io.W } } if err != nil { - return codersdk.Workspace{}, xerrors.Errorf("wait for build: %w", err) + return SlimWorkspace{}, xerrors.Errorf("wait for build: %w", err) } } } @@ -91,16 +96,13 @@ func (r *Runner) RunReturningWorkspace(ctx context.Context, id string, logs io.W _, _ = fmt.Fprintln(logs, "") err = waitForAgents(ctx, logs, r.client, workspace.ID) if err != nil { - return codersdk.Workspace{}, xerrors.Errorf("wait for agent: %w", err) + return SlimWorkspace{}, xerrors.Errorf("wait for agent: %w", err) } } - workspace, err = r.client.Workspace(ctx, workspace.ID) - if err != nil { - return codersdk.Workspace{}, xerrors.Errorf("get workspace %q: %w", workspace.ID.String(), err) - } - - return workspace, nil + // Some users of this runner might not need the full workspace, and + // want to avoid querying the workspace. + return SlimWorkspace{ID: workspace.ID, Name: workspace.Name}, nil } // CleanupRunner is a runner that deletes a workspace in the Run phase. @@ -145,12 +147,12 @@ func (r *CleanupRunner) Run(ctx context.Context, _ string, logs io.Writer) error if err == nil && build.Job.Status.Active() { // mark the build as canceled logger.Info(ctx, "canceling workspace build", slog.F("build_id", build.ID), slog.F("workspace_id", r.workspaceID)) - if err = r.client.CancelWorkspaceBuild(ctx, build.ID, codersdk.CancelWorkspaceBuildParams{}); err == nil { - // Wait for the job to cancel before we delete it - _ = waitForBuild(ctx, logs, r.client, build.ID) // it will return a "build canceled" error - } else { - logger.Warn(ctx, "failed to cancel workspace build, attempting to delete anyway", slog.Error(err)) + if err = r.client.CancelWorkspaceBuild(ctx, build.ID, codersdk.CancelWorkspaceBuildParams{}); err != nil { + logger.Warn(ctx, "failed to cancel workspace build", slog.Error(err)) } + // Wait for either the build or the cancellation to finish + // either is necessary or we'll fail at the delete step. + _ = waitForBuild(ctx, logs, r.client, build.ID) // it will return a "build canceled" error } else { logger.Warn(ctx, "unable to lookup latest workspace build, attempting to delete anyway", slog.Error(err)) } diff --git a/scaletest/workspaceupdates/run.go b/scaletest/workspaceupdates/run.go index 4addf2b5a5939..fa05d290f0e54 100644 --- a/scaletest/workspaceupdates/run.go +++ b/scaletest/workspaceupdates/run.go @@ -116,6 +116,10 @@ func (r *Runner) Run(ctx context.Context, id string, logs io.Writer) error { workspaceBuildConfig.OrganizationID = r.cfg.User.OrganizationID workspaceBuildConfig.UserID = newUser.ID.String() workspaceBuildConfig.Request.Name = workspaceName + // We'll watch for completion ourselves via the tailnet workspace + // updates stream. + workspaceBuildConfig.NoWaitForAgents = true + workspaceBuildConfig.NoWaitForBuild = true runner := workspacebuild.NewRunner(newUserClient, workspaceBuildConfig) r.workspacebuildRunners = append(r.workspacebuildRunners, runner) diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index d14d88e1a544d..da27f98a57dcb 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -12,7 +12,8 @@ RUN apk add --no-cache \ bash \ git \ openssl \ - openssh-client && \ + openssh-client \ + tzdata && \ addgroup \ -g 1000 \ coder && \ @@ -26,7 +27,7 @@ RUN apk add --no-cache \ # Terraform was disabled in the edge repo due to a build issue. # https://gitlab.alpinelinux.org/alpine/aports/-/commit/f3e263d94cfac02d594bef83790c280e045eba35 # Using wget for now. Note that busybox unzip doesn't support streaming. -RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; elif [ "${ARCH}" == "armv7l" ]; then ARCH="arm"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.13.0/terraform_1.13.0_linux_${ARCH}.zip" && \ +RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; elif [ "${ARCH}" == "armv7l" ]; then ARCH="arm"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.13.4/terraform_1.13.4_linux_${ARCH}.zip" && \ busybox unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ diff --git a/scripts/apidocgen/postprocess/main.go b/scripts/apidocgen/postprocess/main.go index a37b85c975b3d..c4bc3f19ea4d5 100644 --- a/scripts/apidocgen/postprocess/main.go +++ b/scripts/apidocgen/postprocess/main.go @@ -198,20 +198,26 @@ func writeDocs(sections [][]byte) error { } for i, r := range m.Routes { - if r.Title != "API" { + if r.Title != "Reference" { continue } + for j, child := range r.Children { + if child.Title != "REST API" { + continue + } - var children []route - for _, mdf := range mdFiles { - docRoute := route{ - Title: mdf.title, - Path: mdf.path, + var children []route + for _, mdf := range mdFiles { + docRoute := route{ + Title: mdf.title, + Path: mdf.path, + } + children = append(children, docRoute) } - children = append(children, docRoute) - } - m.Routes[i].Children = children + m.Routes[i].Children[j].Children = children + break + } break } @@ -239,5 +245,5 @@ func extractSectionName(section []byte) (string, error) { } func toMdFilename(sectionName string) string { - return nonAlphanumericRegex.ReplaceAllLiteralString(strings.ToLower(sectionName), "-") + ".md" + return nonAlphanumericRegex.ReplaceAllLiteralString(strings.ReplaceAll(strings.ToLower(sectionName), " ", ""), "-") + ".md" } diff --git a/scripts/metricsdocgen/main.go b/scripts/metricsdocgen/main.go index ea7e8f79663c1..efdf55b29c809 100644 --- a/scripts/metricsdocgen/main.go +++ b/scripts/metricsdocgen/main.go @@ -64,7 +64,7 @@ func readMetrics() ([]*dto.MetricFamily, error) { var metrics []*dto.MetricFamily - decoder := expfmt.NewDecoder(f, expfmt.NewFormat(expfmt.TypeProtoText)) + decoder := expfmt.NewDecoder(f, expfmt.NewFormat(expfmt.TypeTextPlain)) for { var m dto.MetricFamily err = decoder.Decode(&m) diff --git a/scripts/metricsdocgen/metrics b/scripts/metricsdocgen/metrics index ba9a991fc2a06..e1942fbda7edd 100644 --- a/scripts/metricsdocgen/metrics +++ b/scripts/metricsdocgen/metrics @@ -878,3 +878,40 @@ promhttp_metric_handler_requests_in_flight 1 promhttp_metric_handler_requests_total{code="200"} 2 promhttp_metric_handler_requests_total{code="500"} 0 promhttp_metric_handler_requests_total{code="503"} 0 +# HELP coder_aibridged_injected_tool_invocations_total The number of times an injected MCP tool was invoked by aibridge. +# TYPE coder_aibridged_injected_tool_invocations_total counter +coder_aibridged_injected_tool_invocations_total{model="gpt-5-nano",name="coder_list_templates",provider="openai",server="https://xxx.pit-1.try.coder.app/api/experimental/mcp/http"} 1 +# HELP coder_aibridged_interceptions_duration_seconds The total duration of intercepted requests, in seconds. The majority of this time will be the upstream processing of the request. aibridge has no control over upstream processing time, so it's just an illustrative metric. +# TYPE coder_aibridged_interceptions_duration_seconds histogram +coder_aibridged_interceptions_duration_seconds_bucket{model="gpt-5-nano",provider="openai",le="0.5"} 0 +coder_aibridged_interceptions_duration_seconds_bucket{model="gpt-5-nano",provider="openai",le="2"} 0 +coder_aibridged_interceptions_duration_seconds_bucket{model="gpt-5-nano",provider="openai",le="5"} 3 +coder_aibridged_interceptions_duration_seconds_bucket{model="gpt-5-nano",provider="openai",le="15"} 6 +coder_aibridged_interceptions_duration_seconds_bucket{model="gpt-5-nano",provider="openai",le="30"} 6 +coder_aibridged_interceptions_duration_seconds_bucket{model="gpt-5-nano",provider="openai",le="60"} 6 +coder_aibridged_interceptions_duration_seconds_bucket{model="gpt-5-nano",provider="openai",le="120"} 6 +coder_aibridged_interceptions_duration_seconds_bucket{model="gpt-5-nano",provider="openai",le="+Inf"} 6 +coder_aibridged_interceptions_duration_seconds_sum{model="gpt-5-nano",provider="openai"} 34.120188692 +coder_aibridged_interceptions_duration_seconds_count{model="gpt-5-nano",provider="openai"} 6 +# HELP coder_aibridged_interceptions_inflight The number of intercepted requests which are being processed. +# TYPE coder_aibridged_interceptions_inflight gauge +coder_aibridged_interceptions_inflight{model="gpt-5-nano",provider="openai",route="/v1/chat/completions"} 0 +# HELP coder_aibridged_interceptions_total The count of intercepted requests. +# TYPE coder_aibridged_interceptions_total counter +coder_aibridged_interceptions_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",method="POST",model="gpt-5-nano",provider="openai",route="/v1/chat/completions",status="completed"} 6 +# HELP coder_aibridged_non_injected_tool_selections_total The number of times an AI model selected a tool to be invoked by the client. +# TYPE coder_aibridged_non_injected_tool_selections_total counter +coder_aibridged_non_injected_tool_selections_total{model="gpt-5-nano",name="read_file",provider="openai"} 2 +# HELP coder_aibridged_prompts_total The number of prompts issued by users (initiators). +# TYPE coder_aibridged_prompts_total counter +coder_aibridged_prompts_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai"} 4 +# HELP coder_aibridged_tokens_total The number of tokens used by intercepted requests. +# TYPE coder_aibridged_tokens_total counter +coder_aibridged_tokens_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai",type="completion_accepted_prediction"} 0 +coder_aibridged_tokens_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai",type="completion_audio"} 0 +coder_aibridged_tokens_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai",type="completion_reasoning"} 1664 +coder_aibridged_tokens_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai",type="completion_rejected_prediction"} 0 +coder_aibridged_tokens_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai",type="input"} 13823 +coder_aibridged_tokens_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai",type="output"} 2014 +coder_aibridged_tokens_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai",type="prompt_audio"} 0 +coder_aibridged_tokens_total{initiator_id="95f6752b-08cc-4cf1-97f7-c2165e3519c5",model="gpt-5-nano",provider="openai",type="prompt_cached"} 31872 diff --git a/site/.knip.jsonc b/site/.knip.jsonc index 1d620f9134781..312d4a9782ea0 100644 --- a/site/.knip.jsonc +++ b/site/.knip.jsonc @@ -8,5 +8,8 @@ "@types/react-virtualized-auto-sizer", "jest_workaround", "ts-proto" - ] + ], + "jest": { + "entry": "./src/**/*.jest.{ts,tsx}" + } } diff --git a/site/.storybook/preview.tsx b/site/.storybook/preview.tsx index 21b63e59bea45..13a875442db70 100644 --- a/site/.storybook/preview.tsx +++ b/site/.storybook/preview.tsx @@ -11,6 +11,7 @@ import isChromatic from "chromatic/isChromatic"; import { StrictMode } from "react"; import { QueryClient, QueryClientProvider } from "react-query"; import { withRouter } from "storybook-addon-remix-react-router"; +import { TooltipProvider } from "../src/components/Tooltip/Tooltip"; import "theme/globalFonts"; import type { Decorator, Loader, Parameters } from "@storybook/react-vite"; import themes from "../src/theme"; @@ -100,8 +101,10 @@ const withTheme: Decorator = (Story, context) => { - - + + + + diff --git a/site/e2e/helpers.ts b/site/e2e/helpers.ts index 6f0d0e4f92b50..b1df166f96be4 100644 --- a/site/e2e/helpers.ts +++ b/site/e2e/helpers.ts @@ -183,34 +183,37 @@ export const verifyParameters = async ( ); } - const parameterLabel = await page.waitForSelector( - `[data-testid='parameter-field-${richParameter.name}']`, - { state: "visible" }, + const parameterLabel = page.getByTestId( + `parameter-field-${richParameter.displayName}`, ); + await expect(parameterLabel).toBeVisible(); - const muiDisabled = richParameter.mutable ? "" : ".Mui-disabled"; + if (richParameter.options.length > 0) { + const parameterValue = parameterLabel.getByLabel(buildParameter.value); + const value = await parameterValue.isChecked(); + expect(value).toBe(true); + continue; + } - if (richParameter.type === "bool") { - const parameterField = await parameterLabel.waitForSelector( - `[data-testid='parameter-field-bool'] .MuiRadio-root.Mui-checked${muiDisabled} input`, - ); - const value = await parameterField.inputValue(); - expect(value).toEqual(buildParameter.value); - } else if (richParameter.options.length > 0) { - const parameterField = await parameterLabel.waitForSelector( - `[data-testid='parameter-field-options'] .MuiRadio-root.Mui-checked${muiDisabled} input`, - ); - const value = await parameterField.inputValue(); - expect(value).toEqual(buildParameter.value); - } else if (richParameter.type === "list(string)") { - throw new Error("not implemented yet"); // FIXME - } else { - // text or number - const parameterField = await parameterLabel.waitForSelector( - `[data-testid='parameter-field-text'] input${muiDisabled}`, - ); - const value = await parameterField.inputValue(); - expect(value).toEqual(buildParameter.value); + switch (richParameter.type) { + case "bool": + { + const parameterField = parameterLabel.locator("input"); + const value = await parameterField.isChecked(); + expect(value.toString()).toEqual(buildParameter.value); + } + break; + case "string": + case "number": + { + const parameterField = parameterLabel.locator("input"); + const value = await parameterField.inputValue(); + expect(value).toEqual(buildParameter.value); + } + break; + default: + // Some types like `list(string)` are not tested + throw new Error("not implemented yet"); } } }; @@ -373,25 +376,22 @@ export const stopWorkspace = async (page: Page, workspaceName: string) => { }); }; -export const buildWorkspaceWithParameters = async ( +export const startWorkspaceWithEphemeralParameters = async ( page: Page, workspaceName: string, richParameters: RichParameter[] = [], buildParameters: WorkspaceBuildParameter[] = [], - confirm = false, ) => { const user = currentUser(page); await page.goto(`/@${user.username}/${workspaceName}`, { waitUntil: "domcontentloaded", }); - await page.getByTestId("build-parameters-button").click(); + await page.getByTestId("workspace-start").click(); + await page.getByTestId("workspace-parameters").click(); await fillParameters(page, richParameters, buildParameters); - await page.getByTestId("build-parameters-submit").click(); - if (confirm) { - await page.getByTestId("confirm-button").click(); - } + await page.getByRole("button", { name: "Update and restart" }).click(); await page.waitForSelector("text=Workspace status: Running", { state: "visible", @@ -547,6 +547,9 @@ interface EchoProvisionerResponses { plan?: RecursivePartial[]; // apply occurs when the workspace is built apply?: RecursivePartial[]; + // extraFiles allows the bundling of terraform files in echo provisioner tars + // in order to support dynamic parameters + extraFiles?: Map; } const emptyPlan = new TextEncoder().encode("{}"); @@ -595,6 +598,13 @@ const createTemplateVersionTar = async ( } const tar = new TarWriter(); + + if (responses.extraFiles) { + for (const [fileName, fileContents] of responses.extraFiles) { + tar.addFile(fileName, fileContents); + } + } + responses.parse.forEach((response, index) => { response.parse = { templateVariables: [], @@ -830,6 +840,50 @@ export const findSessionToken = async (page: Page): Promise => { export const echoResponsesWithParameters = ( richParameters: RichParameter[], ): EchoProvisionerResponses => { + let tf = `terraform { + required_providers { + coder = { + source = "coder/coder" + } + } +} +`; + + for (const parameter of richParameters) { + let options = ""; + if (parameter.options) { + for (const option of parameter.options) { + options += ` + option { + name = ${JSON.stringify(option.name)} + description = ${JSON.stringify(option.description)} + value = ${JSON.stringify(option.value)} + icon = ${JSON.stringify(option.icon)} + } +`; + } + } + + tf += ` +data "coder_parameter" "${parameter.name}" { + type = ${JSON.stringify(parameter.type)} + name = ${JSON.stringify(parameter.displayName)} + icon = ${JSON.stringify(parameter.icon)} + description = ${JSON.stringify(parameter.description)} + mutable = ${JSON.stringify(parameter.mutable)}`; + + if (!parameter.required) { + tf += ` + default = ${JSON.stringify(parameter.defaultValue)}`; + } + + tf += ` + order = ${JSON.stringify(parameter.order)} + ephemeral = ${JSON.stringify(parameter.ephemeral)} +${options}} +`; + } + return { parse: [ { @@ -854,6 +908,7 @@ export const echoResponsesWithParameters = ( }, }, ], + extraFiles: new Map([["main.tf", tf]]), }; }; @@ -903,30 +958,36 @@ const fillParameters = async ( ); } - // Use modern locator approach instead of waitForSelector const parameterLabel = page.getByTestId( - `parameter-field-${richParameter.name}`, + `parameter-field-${richParameter.displayName}`, ); await expect(parameterLabel).toBeVisible(); - if (richParameter.type === "bool") { - const parameterField = parameterLabel - .getByTestId("parameter-field-bool") - .locator(`.MuiRadio-root input[value='${buildParameter.value}']`); - await parameterField.click(); - } else if (richParameter.options.length > 0) { - const parameterField = parameterLabel - .getByTestId("parameter-field-options") - .locator(`.MuiRadio-root input[value='${buildParameter.value}']`); - await parameterField.click(); - } else if (richParameter.type === "list(string)") { - throw new Error("not implemented yet"); // FIXME - } else { - // text or number - const parameterField = parameterLabel - .getByTestId("parameter-field-text") - .locator("input"); - await parameterField.fill(buildParameter.value); + if (richParameter.options.length > 0) { + const parameterValue = parameterLabel.getByRole("button", { + name: buildParameter.value, + }); + await parameterValue.click(); + continue; + } + + switch (richParameter.type) { + case "bool": + { + const parameterField = parameterLabel.locator("button"); + await parameterField.click(); + } + break; + case "string": + case "number": + { + const parameterField = parameterLabel.locator("input"); + await parameterField.fill(buildParameter.value); + } + break; + default: + // Some types like `list(string)` are not tested + throw new Error("not implemented yet"); } } }; @@ -1021,27 +1082,13 @@ export const updateWorkspace = async ( await page.getByTestId("workspace-update-button").click(); await page.getByTestId("confirm-button").click(); - await page.waitForSelector('[data-testid="dialog"]', { state: "visible" }); + await page + .getByRole("button", { name: /go to workspace parameters/i }) + .click(); await fillParameters(page, richParameters, buildParameters); - await page.getByRole("button", { name: /update parameters/i }).click(); - // Wait for the update button to detach. - await page.waitForSelector( - "button[data-testid='workspace-update-button']:enabled", - { state: "detached" }, - ); - // Wait for the workspace to be running again. - await page.waitForSelector("text=Workspace status: Running", { - state: "visible", - }); - // Wait for the stop button to be enabled again - await page.waitForSelector( - "button[data-testid='workspace-stop-button']:enabled", - { - state: "visible", - }, - ); + await page.getByRole("button", { name: /update and restart/i }).click(); }; export const updateWorkspaceParameters = async ( @@ -1056,7 +1103,7 @@ export const updateWorkspaceParameters = async ( }); await fillParameters(page, richParameters, buildParameters); - await page.getByRole("button", { name: /submit and restart/i }).click(); + await page.getByRole("button", { name: /update and restart/i }).click(); await page.waitForSelector("text=Workspace status: Running", { state: "visible", @@ -1209,48 +1256,3 @@ export async function addUserToOrganization( } await page.mouse.click(10, 10); // close the popover by clicking outside of it } - -/** - * disableDynamicParameters navigates to the template settings page and disables - * dynamic parameters by unchecking the "Enable dynamic parameters" checkbox. - */ -export const disableDynamicParameters = async ( - page: Page, - templateName: string, - orgName = defaultOrganizationName, -) => { - await page.goto(`/templates/${orgName}/${templateName}/settings`, { - waitUntil: "domcontentloaded", - }); - - await page.waitForSelector("form", { state: "visible" }); - - // Find and uncheck the "Enable dynamic parameters" checkbox - const dynamicParamsCheckbox = page.getByRole("checkbox", { - name: /Enable dynamic parameters for workspace creation/, - }); - - await dynamicParamsCheckbox.waitFor({ state: "visible" }); - - // If the checkbox is checked, uncheck it - if (await dynamicParamsCheckbox.isChecked()) { - await dynamicParamsCheckbox.click(); - } - - // Save the changes - const saveButton = page.getByRole("button", { name: /save/i }); - await saveButton.waitFor({ state: "visible" }); - await saveButton.click(); - - // Wait for the success message or page to update - await page - .locator("[role='alert']:has-text('Template updated successfully')") - .first() - .waitFor({ - state: "visible", - timeout: 15000, - }); - - // Additional wait to ensure the changes are persisted - await page.waitForTimeout(500); -}; diff --git a/site/e2e/parameters.ts b/site/e2e/parameters.ts index 3b672f334c039..603a62e3dbb1e 100644 --- a/site/e2e/parameters.ts +++ b/site/e2e/parameters.ts @@ -53,6 +53,7 @@ export const thirdParameter: RichParameter = { ...emptyParameter, name: "third_parameter", + displayName: "Third parameter", type: "string", description: "This is third parameter.", defaultValue: "", @@ -65,6 +66,7 @@ export const fourthParameter: RichParameter = { ...emptyParameter, name: "fourth_parameter", + displayName: "Fourth parameter", type: "bool", description: "This is fourth parameter.", defaultValue: "true", diff --git a/site/e2e/playwright.config.ts b/site/e2e/playwright.config.ts index 1454eb25c097b..a24ab8e61e833 100644 --- a/site/e2e/playwright.config.ts +++ b/site/e2e/playwright.config.ts @@ -81,9 +81,12 @@ export default defineConfig({ "--provisioner-daemons=10", "--web-terminal-renderer=dom", "--pprof-enable", + "--log-filter=.*", + `--log-human=${path.join(__dirname, "test-results/debug.log")}`, ] .filter(Boolean) .join(" "), + stdout: "pipe", env: { ...process.env, // Otherwise, the runner fails on Mac with: could not determine kind of name for C.uuid_string_t diff --git a/site/e2e/provisionerGenerated.ts b/site/e2e/provisionerGenerated.ts index c5a7d16274a1c..ba9071ab625e8 100644 --- a/site/e2e/provisionerGenerated.ts +++ b/site/e2e/provisionerGenerated.ts @@ -405,6 +405,7 @@ export interface Metadata { runningAgentAuthTokens: RunningAgentAuthToken[]; taskId: string; taskPrompt: string; + templateVersionId: string; } /** Config represents execution configuration shared by all subsequent requests in the Session */ @@ -414,6 +415,16 @@ export interface Config { /** state is the provisioner state (if any) */ state: Uint8Array; provisionerLogLevel: string; + /** Template imports can omit template id */ + templateId?: + | string + | undefined; + /** Dry runs omit version id */ + templateVersionId?: + | string + | undefined; + /** Whether to reuse existing terraform workspaces if they exist. */ + expReuseTerraformWorkspace?: boolean | undefined; } /** ParseRequest consumes source-code to produce inputs. */ @@ -1298,6 +1309,9 @@ export const Metadata = { if (message.taskPrompt !== "") { writer.uint32(186).string(message.taskPrompt); } + if (message.templateVersionId !== "") { + writer.uint32(194).string(message.templateVersionId); + } return writer; }, }; @@ -1313,6 +1327,15 @@ export const Config = { if (message.provisionerLogLevel !== "") { writer.uint32(26).string(message.provisionerLogLevel); } + if (message.templateId !== undefined) { + writer.uint32(34).string(message.templateId); + } + if (message.templateVersionId !== undefined) { + writer.uint32(42).string(message.templateVersionId); + } + if (message.expReuseTerraformWorkspace !== undefined) { + writer.uint32(48).bool(message.expReuseTerraformWorkspace); + } return writer; }, }; diff --git a/site/e2e/tests/workspaces/autoCreateWorkspace.spec.ts b/site/e2e/tests/workspaces/autoCreateWorkspace.spec.ts index 74b3c07ca78df..b30e2386b24df 100644 --- a/site/e2e/tests/workspaces/autoCreateWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/autoCreateWorkspace.spec.ts @@ -19,7 +19,7 @@ test.beforeAll(async ({ browser }) => { await login(page, users.templateAdmin); const richParameters: RichParameter[] = [ - { ...emptyParameter, name: "repo", type: "string" }, + { ...emptyParameter, name: "repo", displayName: "Repo", type: "string" }, ]; template = await createTemplate( page, diff --git a/site/e2e/tests/workspaces/createWorkspace.spec.ts b/site/e2e/tests/workspaces/createWorkspace.spec.ts index 9fcbcaf31c9dd..c6371c9c9a3b7 100644 --- a/site/e2e/tests/workspaces/createWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/createWorkspace.spec.ts @@ -3,7 +3,6 @@ import { users } from "../../constants"; import { createTemplate, createWorkspace, - disableDynamicParameters, echoResponsesWithParameters, login, openTerminalWindow, @@ -36,9 +35,6 @@ test("create workspace", async ({ page }) => { apply: [{ apply: { resources: [{ name: "example" }] } }], }); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); await createWorkspace(page, template); }); @@ -55,9 +51,6 @@ test("create workspace with default immutable parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template); await verifyParameters(page, workspaceName, richParameters, [ @@ -75,9 +68,6 @@ test("create workspace with default mutable parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template); await verifyParameters(page, workspaceName, richParameters, [ @@ -105,9 +95,6 @@ test("create workspace with default and required parameters", async ({ echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template, { richParameters, @@ -140,14 +127,16 @@ test("create workspace and overwrite default parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template, { richParameters, buildParameters, }); + + await page.waitForSelector("text=Workspace status: Running", { + state: "visible", + }); + await verifyParameters(page, workspaceName, richParameters, buildParameters); }); @@ -163,9 +152,6 @@ test("create workspace with disable_param search params", async ({ page }) => { echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, templateName); - await login(page, users.member); await page.goto( `/templates/${templateName}/workspace?disable_params=first_parameter,second_parameter`, @@ -184,9 +170,6 @@ test.skip("create docker workspace", async ({ context, page }) => { await login(page, users.templateAdmin); const template = await createTemplate(page, StarterTemplates.STARTER_DOCKER); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template); diff --git a/site/e2e/tests/workspaces/restartWorkspace.spec.ts b/site/e2e/tests/workspaces/restartWorkspace.spec.ts deleted file mode 100644 index 987f3c279cc26..0000000000000 --- a/site/e2e/tests/workspaces/restartWorkspace.spec.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { test } from "@playwright/test"; -import { users } from "../../constants"; -import { - buildWorkspaceWithParameters, - createTemplate, - createWorkspace, - disableDynamicParameters, - echoResponsesWithParameters, - login, - verifyParameters, -} from "../../helpers"; -import { beforeCoderTest } from "../../hooks"; -import { firstBuildOption, secondBuildOption } from "../../parameters"; -import type { RichParameter } from "../../provisionerGenerated"; - -test.beforeEach(async ({ page }) => { - beforeCoderTest(page); -}); - -test("restart workspace with ephemeral parameters", async ({ page }) => { - await login(page, users.templateAdmin); - const richParameters: RichParameter[] = [firstBuildOption, secondBuildOption]; - const template = await createTemplate( - page, - echoResponsesWithParameters(richParameters), - ); - - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - - await login(page, users.member); - const workspaceName = await createWorkspace(page, template); - - // Verify that build options are default (not selected). - await verifyParameters(page, workspaceName, richParameters, [ - { name: richParameters[0].name, value: firstBuildOption.defaultValue }, - { name: richParameters[1].name, value: secondBuildOption.defaultValue }, - ]); - - // Now, restart the workspace with ephemeral parameters selected. - const buildParameters = [ - { name: richParameters[0].name, value: "AAAAA" }, - { name: richParameters[1].name, value: "true" }, - ]; - await buildWorkspaceWithParameters( - page, - workspaceName, - richParameters, - buildParameters, - true, - ); - - // Verify that build options are default (not selected). - await verifyParameters(page, workspaceName, richParameters, [ - { name: richParameters[0].name, value: firstBuildOption.defaultValue }, - { name: richParameters[1].name, value: secondBuildOption.defaultValue }, - ]); -}); diff --git a/site/e2e/tests/workspaces/startWorkspace.spec.ts b/site/e2e/tests/workspaces/startWorkspace.spec.ts index 30a83a01d6dca..5e88780e34fc3 100644 --- a/site/e2e/tests/workspaces/startWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/startWorkspace.spec.ts @@ -1,12 +1,11 @@ import { test } from "@playwright/test"; import { users } from "../../constants"; import { - buildWorkspaceWithParameters, createTemplate, createWorkspace, - disableDynamicParameters, echoResponsesWithParameters, login, + startWorkspaceWithEphemeralParameters, stopWorkspace, verifyParameters, } from "../../helpers"; @@ -26,9 +25,6 @@ test("start workspace with ephemeral parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template); @@ -47,13 +43,16 @@ test("start workspace with ephemeral parameters", async ({ page }) => { { name: richParameters[1].name, value: "true" }, ]; - await buildWorkspaceWithParameters( + await startWorkspaceWithEphemeralParameters( page, workspaceName, richParameters, buildParameters, ); + // Stop the workspace + await stopWorkspace(page, workspaceName); + // Verify that build options are default (not selected). await verifyParameters(page, workspaceName, richParameters, [ { name: richParameters[0].name, value: firstBuildOption.defaultValue }, diff --git a/site/e2e/tests/workspaces/updateWorkspace.spec.ts b/site/e2e/tests/workspaces/updateWorkspace.spec.ts index b731b76abbf1a..7ffc0652d9724 100644 --- a/site/e2e/tests/workspaces/updateWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/updateWorkspace.spec.ts @@ -3,9 +3,9 @@ import { users } from "../../constants"; import { createTemplate, createWorkspace, - disableDynamicParameters, echoResponsesWithParameters, login, + stopWorkspace, updateTemplate, updateWorkspace, updateWorkspaceParameters, @@ -25,7 +25,12 @@ test.beforeEach(async ({ page }) => { beforeCoderTest(page); }); -test("update workspace, new optional, immutable parameter added", async ({ +// TODO: this needs to be fixed for the new dynamic parameters flow which +// sends you to the parameters settings page instead of prompting for new +// values in a modal, but that flow is broken! because we don't let you set +// immutable parameters on that page even if they are new, and detecting if +// they are new is non-trivial. +test.skip("update workspace, new optional, immutable parameter added", async ({ page, }) => { await login(page, users.templateAdmin); @@ -35,9 +40,6 @@ test("update workspace, new optional, immutable parameter added", async ({ echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template); @@ -81,9 +83,6 @@ test("update workspace, new required, mutable parameter added", async ({ echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template); @@ -113,6 +112,10 @@ test("update workspace, new required, mutable parameter added", async ({ buildParameters, ); + await page.waitForSelector("text=Workspace status: Running", { + state: "visible", + }); + // Verify parameter values. await verifyParameters(page, workspaceName, updatedRichParameters, [ { name: firstParameter.name, value: firstParameter.defaultValue }, @@ -129,9 +132,6 @@ test("update workspace with ephemeral parameter enabled", async ({ page }) => { echoResponsesWithParameters(richParameters), ); - // Disable dynamic parameters to use classic parameter flow for this test - await disableDynamicParameters(page, template); - await login(page, users.member); const workspaceName = await createWorkspace(page, template); @@ -150,6 +150,9 @@ test("update workspace with ephemeral parameter enabled", async ({ page }) => { buildParameters, ); + // Stop the workspace + await stopWorkspace(page, workspaceName); + // Verify that parameter values are default. await verifyParameters(page, workspaceName, richParameters, [ { name: firstParameter.name, value: firstParameter.defaultValue }, diff --git a/site/jest.config.ts b/site/jest.config.ts index 887b91fb9dee6..5ee9ec7ebd36b 100644 --- a/site/jest.config.ts +++ b/site/jest.config.ts @@ -31,7 +31,7 @@ module.exports = { testEnvironmentOptions: { customExportConditions: [""], }, - testRegex: "(/__tests__/.*|(\\.|/)(test|spec))\\.tsx?$", + testRegex: "(/__tests__/.*|(\\.|/)(jest))\\.tsx?$", testPathIgnorePatterns: [ "/node_modules/", "/e2e/", diff --git a/site/package.json b/site/package.json index 7b74cfae58254..574528e2bb327 100644 --- a/site/package.json +++ b/site/package.json @@ -27,10 +27,10 @@ "storybook": "STORYBOOK=true storybook dev -p 6006", "storybook:build": "storybook build", "storybook:ci": "storybook build --test", - "test": "jest", - "test:ci": "jest --selectProjects test --silent", - "test:coverage": "jest --selectProjects test --collectCoverage", - "test:watch": "jest --selectProjects test --watch", + "test": "vitest run && jest", + "test:ci": "vitest run && jest --silent", + "test:watch": "vitest", + "test:watch-jest": "jest --watch", "stats": "STATS=true pnpm build && npx http-server ./stats -p 8081 -c-1", "update-emojis": "cp -rf ./node_modules/emoji-datasource-apple/img/apple/64/* ./static/emojis" }, @@ -41,31 +41,31 @@ "@emotion/css": "11.13.5", "@emotion/react": "11.14.0", "@emotion/styled": "11.14.1", - "@fontsource-variable/inter": "5.1.1", + "@fontsource-variable/inter": "5.2.8", "@fontsource/fira-code": "5.2.7", "@fontsource/ibm-plex-mono": "5.2.7", - "@fontsource/jetbrains-mono": "5.2.5", - "@fontsource/source-code-pro": "5.2.5", + "@fontsource/jetbrains-mono": "5.2.8", + "@fontsource/source-code-pro": "5.2.7", "@monaco-editor/react": "4.7.0", "@mui/material": "5.18.0", "@mui/system": "5.18.0", "@mui/utils": "5.17.1", "@mui/x-tree-view": "7.29.10", - "@radix-ui/react-avatar": "1.1.2", - "@radix-ui/react-checkbox": "1.1.4", - "@radix-ui/react-collapsible": "1.1.2", - "@radix-ui/react-dialog": "1.1.4", - "@radix-ui/react-dropdown-menu": "2.1.4", - "@radix-ui/react-label": "2.1.0", - "@radix-ui/react-popover": "1.1.5", - "@radix-ui/react-radio-group": "1.2.3", - "@radix-ui/react-scroll-area": "1.2.3", + "@radix-ui/react-avatar": "1.1.10", + "@radix-ui/react-checkbox": "1.3.3", + "@radix-ui/react-collapsible": "1.1.12", + "@radix-ui/react-dialog": "1.1.15", + "@radix-ui/react-dropdown-menu": "2.1.16", + "@radix-ui/react-label": "2.1.7", + "@radix-ui/react-popover": "1.1.15", + "@radix-ui/react-radio-group": "1.3.8", + "@radix-ui/react-scroll-area": "1.2.10", "@radix-ui/react-select": "2.2.6", "@radix-ui/react-separator": "1.1.7", - "@radix-ui/react-slider": "1.2.2", + "@radix-ui/react-slider": "1.3.6", "@radix-ui/react-slot": "1.2.3", - "@radix-ui/react-switch": "1.1.1", - "@radix-ui/react-tooltip": "1.1.7", + "@radix-ui/react-switch": "1.2.6", + "@radix-ui/react-tooltip": "1.2.8", "@tanstack/react-query-devtools": "5.77.0", "@xterm/addon-canvas": "0.7.0", "@xterm/addon-fit": "0.10.0", @@ -74,14 +74,14 @@ "@xterm/addon-webgl": "0.18.0", "@xterm/xterm": "5.5.0", "ansi-to-html": "0.7.2", - "axios": "1.12.0", + "axios": "1.13.1", "chroma-js": "2.6.0", "class-variance-authority": "0.7.1", "clsx": "2.1.1", - "cmdk": "1.0.4", + "cmdk": "1.1.1", "color-convert": "2.0.1", "cron-parser": "4.9.0", - "cronstrue": "2.50.0", + "cronstrue": "2.59.0", "dayjs": "1.11.18", "emoji-mart": "5.6.0", "file-saver": "2.0.5", @@ -90,32 +90,32 @@ "humanize-duration": "3.32.2", "jszip": "3.10.1", "lodash": "4.17.21", - "lucide-react": "0.545.0", - "monaco-editor": "0.53.0", + "lucide-react": "0.552.0", + "monaco-editor": "0.55.1", "pretty-bytes": "6.1.1", - "react": "19.1.1", + "react": "19.2.0", "react-color": "2.19.3", "react-confetti": "6.4.0", "react-date-range": "1.4.0", - "react-dom": "19.1.1", + "react-dom": "19.2.0", "react-markdown": "9.1.0", "react-query": "npm:@tanstack/react-query@5.77.0", "react-resizable-panels": "3.0.6", - "react-router": "7.8.0", + "react-router": "7.9.5", "react-syntax-highlighter": "15.6.1", "react-textarea-autosize": "8.5.9", "react-virtualized-auto-sizer": "1.0.26", "react-window": "1.8.11", - "recharts": "2.15.0", + "recharts": "2.15.4", "remark-gfm": "4.0.1", "resize-observer-polyfill": "1.5.1", - "semver": "7.7.2", + "semver": "7.7.3", "tailwind-merge": "2.6.0", "tailwindcss-animate": "1.0.7", "tzdata": "1.0.46", "ua-parser-js": "1.0.41", "ufuzzy": "npm:@leeoniya/ufuzzy@1.0.10", - "undici": "6.21.3", + "undici": "6.22.0", "unique-names-generator": "4.7.1", "uuid": "9.0.1", "websocket-ts": "2.2.1", @@ -124,7 +124,7 @@ "devDependencies": { "@biomejs/biome": "2.2.4", "@chromatic-com/storybook": "4.1.0", - "@octokit/types": "12.3.0", + "@octokit/types": "12.6.0", "@playwright/test": "1.50.1", "@storybook/addon-docs": "9.1.2", "@storybook/addon-links": "9.1.2", @@ -133,7 +133,7 @@ "@swc/core": "1.3.38", "@swc/jest": "0.2.37", "@tailwindcss/typography": "0.5.16", - "@testing-library/jest-dom": "6.6.3", + "@testing-library/jest-dom": "6.9.1", "@testing-library/react": "14.3.1", "@testing-library/user-event": "14.6.1", "@types/chroma-js": "2.4.0", @@ -144,10 +144,10 @@ "@types/jest": "29.5.14", "@types/lodash": "4.17.20", "@types/node": "20.17.16", - "@types/react": "19.1.17", + "@types/react": "19.2.2", "@types/react-color": "3.0.13", "@types/react-date-range": "1.4.4", - "@types/react-dom": "19.1.11", + "@types/react-dom": "19.2.2", "@types/react-syntax-highlighter": "15.5.13", "@types/react-virtualized-auto-sizer": "1.0.8", "@types/react-window": "1.8.8", @@ -155,7 +155,7 @@ "@types/ssh2": "1.15.5", "@types/ua-parser-js": "0.7.36", "@types/uuid": "9.0.2", - "@vitejs/plugin-react": "5.0.4", + "@vitejs/plugin-react": "5.1.0", "autoprefixer": "10.4.21", "chromatic": "11.29.0", "dpdm": "3.14.0", @@ -167,20 +167,22 @@ "jest-location-mock": "2.0.0", "jest-websocket-mock": "2.5.0", "jest_workaround": "0.1.14", - "knip": "5.64.1", + "jsdom": "27.0.1", + "knip": "5.66.4", "msw": "2.4.8", "postcss": "8.5.6", - "protobufjs": "7.4.0", + "protobufjs": "7.5.4", "rollup-plugin-visualizer": "5.14.0", - "rxjs": "7.8.1", + "rxjs": "7.8.2", "ssh2": "1.17.0", - "storybook": "9.1.2", + "storybook": "9.1.16", "storybook-addon-remix-react-router": "5.0.0", "tailwindcss": "3.4.18", "ts-proto": "1.181.2", "typescript": "5.6.3", - "vite": "7.1.11", - "vite-plugin-checker": "0.11.0" + "vite": "7.1.12", + "vite-plugin-checker": "0.11.0", + "vitest": "4.0.6" }, "browserslist": [ "chrome 110", @@ -189,7 +191,7 @@ ], "resolutions": { "optionator": "0.9.3", - "semver": "7.7.2" + "semver": "7.7.3" }, "engines": { "pnpm": ">=10.0.0 <11.0.0", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index c70eff146bcee..86001e2f4244e 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -6,7 +6,7 @@ settings: overrides: optionator: 0.9.3 - semver: 7.7.2 + semver: 7.7.3 '@babel/runtime': 7.26.10 '@babel/helpers': 7.26.10 esbuild: ^0.25.0 @@ -24,7 +24,7 @@ importers: version: 1.2.1 '@emoji-mart/react': specifier: 1.1.1 - version: 1.1.1(emoji-mart@5.6.0)(react@19.1.1) + version: 1.1.1(emoji-mart@5.6.0)(react@19.2.0) '@emotion/cache': specifier: 11.14.0 version: 11.14.0 @@ -33,13 +33,13 @@ importers: version: 11.13.5 '@emotion/react': specifier: 11.14.0 - version: 11.14.0(@types/react@19.1.17)(react@19.1.1) + version: 11.14.0(@types/react@19.2.2)(react@19.2.0) '@emotion/styled': specifier: 11.14.1 - version: 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) + version: 11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0) '@fontsource-variable/inter': - specifier: 5.1.1 - version: 5.1.1 + specifier: 5.2.8 + version: 5.2.8 '@fontsource/fira-code': specifier: 5.2.7 version: 5.2.7 @@ -47,74 +47,74 @@ importers: specifier: 5.2.7 version: 5.2.7 '@fontsource/jetbrains-mono': - specifier: 5.2.5 - version: 5.2.5 + specifier: 5.2.8 + version: 5.2.8 '@fontsource/source-code-pro': - specifier: 5.2.5 - version: 5.2.5 + specifier: 5.2.7 + version: 5.2.7 '@monaco-editor/react': specifier: 4.7.0 - version: 4.7.0(monaco-editor@0.53.0)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 4.7.0(monaco-editor@0.55.1)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@mui/material': specifier: 5.18.0 - version: 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@mui/system': specifier: 5.18.0 - version: 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) + version: 5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0) '@mui/utils': specifier: 5.17.1 - version: 5.17.1(@types/react@19.1.17)(react@19.1.1) + version: 5.17.1(@types/react@19.2.2)(react@19.2.0) '@mui/x-tree-view': specifier: 7.29.10 - version: 7.29.10(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 7.29.10(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-avatar': - specifier: 1.1.2 - version: 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.1.10 + version: 1.1.10(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-checkbox': - specifier: 1.1.4 - version: 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.3.3 + version: 1.3.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-collapsible': - specifier: 1.1.2 - version: 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.1.12 + version: 1.1.12(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-dialog': - specifier: 1.1.4 - version: 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.1.15 + version: 1.1.15(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-dropdown-menu': - specifier: 2.1.4 - version: 2.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 2.1.16 + version: 2.1.16(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-label': - specifier: 2.1.0 - version: 2.1.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 2.1.7 + version: 2.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-popover': - specifier: 1.1.5 - version: 1.1.5(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.1.15 + version: 1.1.15(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-radio-group': - specifier: 1.2.3 - version: 1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.3.8 + version: 1.3.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-scroll-area': - specifier: 1.2.3 - version: 1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.2.10 + version: 1.2.10(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-select': specifier: 2.2.6 - version: 2.2.6(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 2.2.6(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-separator': specifier: 1.1.7 - version: 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-slider': - specifier: 1.2.2 - version: 1.2.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.3.6 + version: 1.3.6(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-slot': specifier: 1.2.3 - version: 1.2.3(@types/react@19.1.17)(react@19.1.1) + version: 1.2.3(@types/react@19.2.2)(react@19.2.0) '@radix-ui/react-switch': - specifier: 1.1.1 - version: 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.2.6 + version: 1.2.6(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@radix-ui/react-tooltip': - specifier: 1.1.7 - version: 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.2.8 + version: 1.2.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@tanstack/react-query-devtools': specifier: 5.77.0 - version: 5.77.0(@tanstack/react-query@5.77.0(react@19.1.1))(react@19.1.1) + version: 5.77.0(@tanstack/react-query@5.77.0(react@19.2.0))(react@19.2.0) '@xterm/addon-canvas': specifier: 0.7.0 version: 0.7.0(@xterm/xterm@5.5.0) @@ -137,8 +137,8 @@ importers: specifier: 0.7.2 version: 0.7.2 axios: - specifier: 1.12.0 - version: 1.12.0 + specifier: 1.13.1 + version: 1.13.1 chroma-js: specifier: 2.6.0 version: 2.6.0 @@ -149,8 +149,8 @@ importers: specifier: 2.1.1 version: 2.1.1 cmdk: - specifier: 1.0.4 - version: 1.0.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 1.1.1 + version: 1.1.1(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) color-convert: specifier: 2.0.1 version: 2.0.1 @@ -158,8 +158,8 @@ importers: specifier: 4.9.0 version: 4.9.0 cronstrue: - specifier: 2.50.0 - version: 2.50.0 + specifier: 2.59.0 + version: 2.59.0 dayjs: specifier: 1.11.18 version: 1.11.18 @@ -171,7 +171,7 @@ importers: version: 2.0.5 formik: specifier: 2.4.6 - version: 2.4.6(react@19.1.1) + version: 2.4.6(react@19.2.0) front-matter: specifier: 4.0.2 version: 4.0.2 @@ -185,56 +185,56 @@ importers: specifier: 4.17.21 version: 4.17.21 lucide-react: - specifier: 0.545.0 - version: 0.545.0(react@19.1.1) + specifier: 0.552.0 + version: 0.552.0(react@19.2.0) monaco-editor: - specifier: 0.53.0 - version: 0.53.0 + specifier: 0.55.1 + version: 0.55.1 pretty-bytes: specifier: 6.1.1 version: 6.1.1 react: - specifier: 19.1.1 - version: 19.1.1 + specifier: 19.2.0 + version: 19.2.0 react-color: specifier: 2.19.3 - version: 2.19.3(react@19.1.1) + version: 2.19.3(react@19.2.0) react-confetti: specifier: 6.4.0 - version: 6.4.0(react@19.1.1) + version: 6.4.0(react@19.2.0) react-date-range: specifier: 1.4.0 - version: 1.4.0(date-fns@2.30.0)(react@19.1.1) + version: 1.4.0(date-fns@2.30.0)(react@19.2.0) react-dom: - specifier: 19.1.1 - version: 19.1.1(react@19.1.1) + specifier: 19.2.0 + version: 19.2.0(react@19.2.0) react-markdown: specifier: 9.1.0 - version: 9.1.0(@types/react@19.1.17)(react@19.1.1) + version: 9.1.0(@types/react@19.2.2)(react@19.2.0) react-query: specifier: npm:@tanstack/react-query@5.77.0 - version: '@tanstack/react-query@5.77.0(react@19.1.1)' + version: '@tanstack/react-query@5.77.0(react@19.2.0)' react-resizable-panels: specifier: 3.0.6 - version: 3.0.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 3.0.6(react-dom@19.2.0(react@19.2.0))(react@19.2.0) react-router: - specifier: 7.8.0 - version: 7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 7.9.5 + version: 7.9.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0) react-syntax-highlighter: specifier: 15.6.1 - version: 15.6.1(react@19.1.1) + version: 15.6.1(react@19.2.0) react-textarea-autosize: specifier: 8.5.9 - version: 8.5.9(@types/react@19.1.17)(react@19.1.1) + version: 8.5.9(@types/react@19.2.2)(react@19.2.0) react-virtualized-auto-sizer: specifier: 1.0.26 - version: 1.0.26(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.0.26(react-dom@19.2.0(react@19.2.0))(react@19.2.0) react-window: specifier: 1.8.11 - version: 1.8.11(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.8.11(react-dom@19.2.0(react@19.2.0))(react@19.2.0) recharts: - specifier: 2.15.0 - version: 2.15.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + specifier: 2.15.4 + version: 2.15.4(react-dom@19.2.0(react@19.2.0))(react@19.2.0) remark-gfm: specifier: 4.0.1 version: 4.0.1 @@ -242,8 +242,8 @@ importers: specifier: 1.5.1 version: 1.5.1 semver: - specifier: 7.7.2 - version: 7.7.2 + specifier: 7.7.3 + version: 7.7.3 tailwind-merge: specifier: 2.6.0 version: 2.6.0 @@ -260,8 +260,8 @@ importers: specifier: npm:@leeoniya/ufuzzy@1.0.10 version: '@leeoniya/ufuzzy@1.0.10' undici: - specifier: 6.21.3 - version: 6.21.3 + specifier: 6.22.0 + version: 6.22.0 unique-names-generator: specifier: 4.7.1 version: 4.7.1 @@ -280,25 +280,25 @@ importers: version: 2.2.4 '@chromatic-com/storybook': specifier: 4.1.0 - version: 4.1.0(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + version: 4.1.0(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@octokit/types': - specifier: 12.3.0 - version: 12.3.0 + specifier: 12.6.0 + version: 12.6.0 '@playwright/test': specifier: 1.50.1 version: 1.50.1 '@storybook/addon-docs': specifier: 9.1.2 - version: 9.1.2(@types/react@19.1.17)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + version: 9.1.2(@types/react@19.2.2)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@storybook/addon-links': specifier: 9.1.2 - version: 9.1.2(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + version: 9.1.2(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@storybook/addon-themes': specifier: 9.1.2 - version: 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + version: 9.1.2(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) '@storybook/react-vite': specifier: 9.1.2 - version: 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(rollup@4.52.5)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + version: 9.1.2(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(rollup@4.52.5)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) '@swc/core': specifier: 1.3.38 version: 1.3.38 @@ -309,11 +309,11 @@ importers: specifier: 0.5.16 version: 0.5.16(tailwindcss@3.4.18(yaml@2.7.0)) '@testing-library/jest-dom': - specifier: 6.6.3 - version: 6.6.3 + specifier: 6.9.1 + version: 6.9.1 '@testing-library/react': specifier: 14.3.1 - version: 14.3.1(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 14.3.1(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@testing-library/user-event': specifier: 14.6.1 version: 14.6.1(@testing-library/dom@10.4.0) @@ -342,23 +342,23 @@ importers: specifier: 20.17.16 version: 20.17.16 '@types/react': - specifier: 19.1.17 - version: 19.1.17 + specifier: 19.2.2 + version: 19.2.2 '@types/react-color': specifier: 3.0.13 - version: 3.0.13(@types/react@19.1.17) + version: 3.0.13(@types/react@19.2.2) '@types/react-date-range': specifier: 1.4.4 version: 1.4.4 '@types/react-dom': - specifier: 19.1.11 - version: 19.1.11(@types/react@19.1.17) + specifier: 19.2.2 + version: 19.2.2(@types/react@19.2.2) '@types/react-syntax-highlighter': specifier: 15.5.13 version: 15.5.13 '@types/react-virtualized-auto-sizer': specifier: 1.0.8 - version: 1.0.8(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + version: 1.0.8(react-dom@19.2.0(react@19.2.0))(react@19.2.0) '@types/react-window': specifier: 1.8.8 version: 1.8.8 @@ -375,8 +375,8 @@ importers: specifier: 9.0.2 version: 9.0.2 '@vitejs/plugin-react': - specifier: 5.0.4 - version: 5.0.4(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + specifier: 5.1.0 + version: 5.1.0(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) autoprefixer: specifier: 10.4.21 version: 10.4.21(postcss@8.5.6) @@ -410,9 +410,12 @@ importers: jest_workaround: specifier: 0.1.14 version: 0.1.14(@swc/core@1.3.38)(@swc/jest@0.2.37(@swc/core@1.3.38)) + jsdom: + specifier: 27.0.1 + version: 27.0.1 knip: - specifier: 5.64.1 - version: 5.64.1(@types/node@20.17.16)(typescript@5.6.3) + specifier: 5.66.4 + version: 5.66.4(@types/node@20.17.16)(typescript@5.6.3) msw: specifier: 2.4.8 version: 2.4.8(typescript@5.6.3) @@ -420,23 +423,23 @@ importers: specifier: 8.5.6 version: 8.5.6 protobufjs: - specifier: 7.4.0 - version: 7.4.0 + specifier: 7.5.4 + version: 7.5.4 rollup-plugin-visualizer: specifier: 5.14.0 version: 5.14.0(rollup@4.52.5) rxjs: - specifier: 7.8.1 - version: 7.8.1 + specifier: 7.8.2 + version: 7.8.2 ssh2: specifier: 1.17.0 version: 1.17.0 storybook: - specifier: 9.1.2 - version: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + specifier: 9.1.16 + version: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) storybook-addon-remix-react-router: specifier: 5.0.0 - version: 5.0.0(react-dom@19.1.1(react@19.1.1))(react-router@7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + version: 5.0.0(react-dom@19.2.0(react@19.2.0))(react-router@7.9.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) tailwindcss: specifier: 3.4.18 version: 3.4.18(yaml@2.7.0) @@ -447,11 +450,14 @@ importers: specifier: 5.6.3 version: 5.6.3 vite: - specifier: 7.1.11 - version: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + specifier: 7.1.12 + version: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) vite-plugin-checker: specifier: 0.11.0 - version: 0.11.0(@biomejs/biome@2.2.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + version: 0.11.0(@biomejs/biome@2.2.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + vitest: + specifier: 4.0.6 + version: 4.0.6(@types/debug@4.1.12)(@types/node@20.17.16)(jiti@1.21.7)(jsdom@27.0.1)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0) packages: @@ -466,20 +472,29 @@ packages: resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==, tarball: https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz} engines: {node: '>=10'} + '@asamuzakjp/css-color@4.0.5': + resolution: {integrity: sha512-lMrXidNhPGsDjytDy11Vwlb6OIGrT3CmLg3VWNFyWkLWtijKl7xjvForlh8vuj0SHGjgl4qZEQzUmYTeQA2JFQ==, tarball: https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.0.5.tgz} + + '@asamuzakjp/dom-selector@6.7.3': + resolution: {integrity: sha512-kiGFeY+Hxf5KbPpjRLf+ffWbkos1aGo8MBfd91oxS3O57RgU3XhZrt/6UzoVF9VMpWbC3v87SRc9jxGrc9qHtQ==, tarball: https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.7.3.tgz} + + '@asamuzakjp/nwsapi@2.3.9': + resolution: {integrity: sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q==, tarball: https://registry.npmjs.org/@asamuzakjp/nwsapi/-/nwsapi-2.3.9.tgz} + '@babel/code-frame@7.27.1': resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==, tarball: https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz} engines: {node: '>=6.9.0'} - '@babel/compat-data@7.28.4': - resolution: {integrity: sha512-YsmSKC29MJwf0gF8Rjjrg5LQCmyh+j/nD8/eP7f+BeoQTKYqs9RoWbjGOdy0+1Ekr68RJZMUOPVQaQisnIo4Rw==, tarball: https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.4.tgz} + '@babel/compat-data@7.28.5': + resolution: {integrity: sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==, tarball: https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz} engines: {node: '>=6.9.0'} - '@babel/core@7.28.4': - resolution: {integrity: sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==, tarball: https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz} + '@babel/core@7.28.5': + resolution: {integrity: sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==, tarball: https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz} engines: {node: '>=6.9.0'} - '@babel/generator@7.28.3': - resolution: {integrity: sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==, tarball: https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz} + '@babel/generator@7.28.5': + resolution: {integrity: sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==, tarball: https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz} engines: {node: '>=6.9.0'} '@babel/helper-compilation-targets@7.27.2': @@ -512,6 +527,10 @@ packages: resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==, tarball: https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz} engines: {node: '>=6.9.0'} + '@babel/helper-validator-identifier@7.28.5': + resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==, tarball: https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz} + engines: {node: '>=6.9.0'} + '@babel/helper-validator-option@7.27.1': resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==, tarball: https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz} engines: {node: '>=6.9.0'} @@ -520,8 +539,8 @@ packages: resolution: {integrity: sha512-UPYc3SauzZ3JGgj87GgZ89JVdC5dj0AoetR5Bw6wj4niittNyFh6+eOGonYvJ1ao6B8lEa3Q3klS7ADZ53bc5g==, tarball: https://registry.npmjs.org/@babel/helpers/-/helpers-7.26.10.tgz} engines: {node: '>=6.9.0'} - '@babel/parser@7.28.4': - resolution: {integrity: sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==, tarball: https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz} + '@babel/parser@7.28.5': + resolution: {integrity: sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==, tarball: https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz} engines: {node: '>=6.0.0'} hasBin: true @@ -640,16 +659,16 @@ packages: resolution: {integrity: sha512-ZCYtZciz1IWJB4U61UPu4KEaqyfj+r5T1Q5mqPo+IBpcG9kHv30Z0aD8LXPgC1trYa6rK0orRyAhqUgk4MjmEg==, tarball: https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.1.tgz} engines: {node: '>=6.9.0'} - '@babel/traverse@7.28.4': - resolution: {integrity: sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==, tarball: https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz} + '@babel/traverse@7.28.5': + resolution: {integrity: sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==, tarball: https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz} engines: {node: '>=6.9.0'} '@babel/types@7.27.1': resolution: {integrity: sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==, tarball: https://registry.npmjs.org/@babel/types/-/types-7.27.1.tgz} engines: {node: '>=6.9.0'} - '@babel/types@7.28.4': - resolution: {integrity: sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==, tarball: https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz} + '@babel/types@7.28.5': + resolution: {integrity: sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==, tarball: https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz} engines: {node: '>=6.9.0'} '@bcoe/v8-coverage@0.2.3': @@ -727,11 +746,43 @@ packages: resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==, tarball: https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz} engines: {node: '>=12'} - '@emnapi/core@1.5.0': - resolution: {integrity: sha512-sbP8GzB1WDzacS8fgNPpHlp6C9VZe+SJP3F90W9rLemaQj2PzIuTEl1qDOYQf58YIpyjViI24y9aPWCjEzY2cg==, tarball: https://registry.npmjs.org/@emnapi/core/-/core-1.5.0.tgz} + '@csstools/color-helpers@5.1.0': + resolution: {integrity: sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==, tarball: https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz} + engines: {node: '>=18'} + + '@csstools/css-calc@2.1.4': + resolution: {integrity: sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==, tarball: https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-parser-algorithms': ^3.0.5 + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-color-parser@3.1.0': + resolution: {integrity: sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==, tarball: https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-parser-algorithms': ^3.0.5 + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-parser-algorithms@3.0.5': + resolution: {integrity: sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==, tarball: https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-syntax-patches-for-csstree@1.0.15': + resolution: {integrity: sha512-q0p6zkVq2lJnmzZVPR33doA51G7YOja+FBvRdp5ISIthL0MtFCgYHHhR563z9WFGxcOn0WfjSkPDJ5Qig3H3Sw==, tarball: https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.15.tgz} + engines: {node: '>=18'} + + '@csstools/css-tokenizer@3.0.4': + resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==, tarball: https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz} + engines: {node: '>=18'} + + '@emnapi/core@1.6.0': + resolution: {integrity: sha512-zq/ay+9fNIJJtJiZxdTnXS20PllcYMX3OE23ESc4HK/bdYu3cOWYVhsOhVnXALfU/uqJIxn5NBPd9z4v+SfoSg==, tarball: https://registry.npmjs.org/@emnapi/core/-/core-1.6.0.tgz} - '@emnapi/runtime@1.5.0': - resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==, tarball: https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.5.0.tgz} + '@emnapi/runtime@1.6.0': + resolution: {integrity: sha512-obtUmAHTMjll499P+D9A3axeJFlhdjOWdKUNs/U6QIGT7V5RjcUW1xToAzjvmgTSQhDbYn/NwfTRoJcQ2rNBxA==, tarball: https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.6.0.tgz} '@emnapi/wasi-threads@1.1.0': resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==, tarball: https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.1.0.tgz} @@ -808,252 +859,126 @@ packages: cpu: [ppc64] os: [aix] - '@esbuild/aix-ppc64@0.25.3': - resolution: {integrity: sha512-W8bFfPA8DowP8l//sxjJLSLkD8iEjMc7cBVyP+u4cEv9sM7mdUCkgsj+t0n/BWPFtv7WWCN5Yzj0N6FJNUUqBQ==, tarball: https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [ppc64] - os: [aix] - '@esbuild/android-arm64@0.25.11': resolution: {integrity: sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ==, tarball: https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [android] - '@esbuild/android-arm64@0.25.3': - resolution: {integrity: sha512-XelR6MzjlZuBM4f5z2IQHK6LkK34Cvv6Rj2EntER3lwCBFdg6h2lKbtRjpTTsdEjD/WSe1q8UyPBXP1x3i/wYQ==, tarball: https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm64] - os: [android] - '@esbuild/android-arm@0.25.11': resolution: {integrity: sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg==, tarball: https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm] os: [android] - '@esbuild/android-arm@0.25.3': - resolution: {integrity: sha512-PuwVXbnP87Tcff5I9ngV0lmiSu40xw1At6i3GsU77U7cjDDB4s0X2cyFuBiDa1SBk9DnvWwnGvVaGBqoFWPb7A==, tarball: https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm] - os: [android] - '@esbuild/android-x64@0.25.11': resolution: {integrity: sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g==, tarball: https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [android] - '@esbuild/android-x64@0.25.3': - resolution: {integrity: sha512-ogtTpYHT/g1GWS/zKM0cc/tIebFjm1F9Aw1boQ2Y0eUQ+J89d0jFY//s9ei9jVIlkYi8AfOjiixcLJSGNSOAdQ==, tarball: https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [x64] - os: [android] - '@esbuild/darwin-arm64@0.25.11': resolution: {integrity: sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w==, tarball: https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [darwin] - '@esbuild/darwin-arm64@0.25.3': - resolution: {integrity: sha512-eESK5yfPNTqpAmDfFWNsOhmIOaQA59tAcF/EfYvo5/QWQCzXn5iUSOnqt3ra3UdzBv073ykTtmeLJZGt3HhA+w==, tarball: https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm64] - os: [darwin] - '@esbuild/darwin-x64@0.25.11': resolution: {integrity: sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ==, tarball: https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [darwin] - '@esbuild/darwin-x64@0.25.3': - resolution: {integrity: sha512-Kd8glo7sIZtwOLcPbW0yLpKmBNWMANZhrC1r6K++uDR2zyzb6AeOYtI6udbtabmQpFaxJ8uduXMAo1gs5ozz8A==, tarball: https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [x64] - os: [darwin] - '@esbuild/freebsd-arm64@0.25.11': resolution: {integrity: sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA==, tarball: https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-arm64@0.25.3': - resolution: {integrity: sha512-EJiyS70BYybOBpJth3M0KLOus0n+RRMKTYzhYhFeMwp7e/RaajXvP+BWlmEXNk6uk+KAu46j/kaQzr6au+JcIw==, tarball: https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm64] - os: [freebsd] - '@esbuild/freebsd-x64@0.25.11': resolution: {integrity: sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw==, tarball: https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [freebsd] - '@esbuild/freebsd-x64@0.25.3': - resolution: {integrity: sha512-Q+wSjaLpGxYf7zC0kL0nDlhsfuFkoN+EXrx2KSB33RhinWzejOd6AvgmP5JbkgXKmjhmpfgKZq24pneodYqE8Q==, tarball: https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [x64] - os: [freebsd] - '@esbuild/linux-arm64@0.25.11': resolution: {integrity: sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA==, tarball: https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [linux] - '@esbuild/linux-arm64@0.25.3': - resolution: {integrity: sha512-xCUgnNYhRD5bb1C1nqrDV1PfkwgbswTTBRbAd8aH5PhYzikdf/ddtsYyMXFfGSsb/6t6QaPSzxtbfAZr9uox4A==, tarball: https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm64] - os: [linux] - '@esbuild/linux-arm@0.25.11': resolution: {integrity: sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw==, tarball: https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm] os: [linux] - '@esbuild/linux-arm@0.25.3': - resolution: {integrity: sha512-dUOVmAUzuHy2ZOKIHIKHCm58HKzFqd+puLaS424h6I85GlSDRZIA5ycBixb3mFgM0Jdh+ZOSB6KptX30DD8YOQ==, tarball: https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm] - os: [linux] - '@esbuild/linux-ia32@0.25.11': resolution: {integrity: sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw==, tarball: https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.11.tgz} engines: {node: '>=18'} cpu: [ia32] os: [linux] - '@esbuild/linux-ia32@0.25.3': - resolution: {integrity: sha512-yplPOpczHOO4jTYKmuYuANI3WhvIPSVANGcNUeMlxH4twz/TeXuzEP41tGKNGWJjuMhotpGabeFYGAOU2ummBw==, tarball: https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [ia32] - os: [linux] - '@esbuild/linux-loong64@0.25.11': resolution: {integrity: sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw==, tarball: https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.11.tgz} engines: {node: '>=18'} cpu: [loong64] os: [linux] - '@esbuild/linux-loong64@0.25.3': - resolution: {integrity: sha512-P4BLP5/fjyihmXCELRGrLd793q/lBtKMQl8ARGpDxgzgIKJDRJ/u4r1A/HgpBpKpKZelGct2PGI4T+axcedf6g==, tarball: https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [loong64] - os: [linux] - '@esbuild/linux-mips64el@0.25.11': resolution: {integrity: sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ==, tarball: https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.11.tgz} engines: {node: '>=18'} cpu: [mips64el] os: [linux] - '@esbuild/linux-mips64el@0.25.3': - resolution: {integrity: sha512-eRAOV2ODpu6P5divMEMa26RRqb2yUoYsuQQOuFUexUoQndm4MdpXXDBbUoKIc0iPa4aCO7gIhtnYomkn2x+bag==, tarball: https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [mips64el] - os: [linux] - '@esbuild/linux-ppc64@0.25.11': resolution: {integrity: sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw==, tarball: https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.11.tgz} engines: {node: '>=18'} cpu: [ppc64] os: [linux] - '@esbuild/linux-ppc64@0.25.3': - resolution: {integrity: sha512-ZC4jV2p7VbzTlnl8nZKLcBkfzIf4Yad1SJM4ZMKYnJqZFD4rTI+pBG65u8ev4jk3/MPwY9DvGn50wi3uhdaghg==, tarball: https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [ppc64] - os: [linux] - '@esbuild/linux-riscv64@0.25.11': resolution: {integrity: sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww==, tarball: https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.11.tgz} engines: {node: '>=18'} cpu: [riscv64] os: [linux] - '@esbuild/linux-riscv64@0.25.3': - resolution: {integrity: sha512-LDDODcFzNtECTrUUbVCs6j9/bDVqy7DDRsuIXJg6so+mFksgwG7ZVnTruYi5V+z3eE5y+BJZw7VvUadkbfg7QA==, tarball: https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [riscv64] - os: [linux] - '@esbuild/linux-s390x@0.25.11': resolution: {integrity: sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw==, tarball: https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.11.tgz} engines: {node: '>=18'} cpu: [s390x] os: [linux] - '@esbuild/linux-s390x@0.25.3': - resolution: {integrity: sha512-s+w/NOY2k0yC2p9SLen+ymflgcpRkvwwa02fqmAwhBRI3SC12uiS10edHHXlVWwfAagYSY5UpmT/zISXPMW3tQ==, tarball: https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [s390x] - os: [linux] - '@esbuild/linux-x64@0.25.11': resolution: {integrity: sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ==, tarball: https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [linux] - '@esbuild/linux-x64@0.25.3': - resolution: {integrity: sha512-nQHDz4pXjSDC6UfOE1Fw9Q8d6GCAd9KdvMZpfVGWSJztYCarRgSDfOVBY5xwhQXseiyxapkiSJi/5/ja8mRFFA==, tarball: https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [x64] - os: [linux] - '@esbuild/netbsd-arm64@0.25.11': resolution: {integrity: sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg==, tarball: https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-arm64@0.25.3': - resolution: {integrity: sha512-1QaLtOWq0mzK6tzzp0jRN3eccmN3hezey7mhLnzC6oNlJoUJz4nym5ZD7mDnS/LZQgkrhEbEiTn515lPeLpgWA==, tarball: https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm64] - os: [netbsd] - '@esbuild/netbsd-x64@0.25.11': resolution: {integrity: sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A==, tarball: https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [netbsd] - '@esbuild/netbsd-x64@0.25.3': - resolution: {integrity: sha512-i5Hm68HXHdgv8wkrt+10Bc50zM0/eonPb/a/OFVfB6Qvpiirco5gBA5bz7S2SHuU+Y4LWn/zehzNX14Sp4r27g==, tarball: https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [x64] - os: [netbsd] - '@esbuild/openbsd-arm64@0.25.11': resolution: {integrity: sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg==, tarball: https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-arm64@0.25.3': - resolution: {integrity: sha512-zGAVApJEYTbOC6H/3QBr2mq3upG/LBEXr85/pTtKiv2IXcgKV0RT0QA/hSXZqSvLEpXeIxah7LczB4lkiYhTAQ==, tarball: https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm64] - os: [openbsd] - '@esbuild/openbsd-x64@0.25.11': resolution: {integrity: sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw==, tarball: https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [openbsd] - '@esbuild/openbsd-x64@0.25.3': - resolution: {integrity: sha512-fpqctI45NnCIDKBH5AXQBsD0NDPbEFczK98hk/aa6HJxbl+UtLkJV2+Bvy5hLSLk3LHmqt0NTkKNso1A9y1a4w==, tarball: https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [x64] - os: [openbsd] - '@esbuild/openharmony-arm64@0.25.11': resolution: {integrity: sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ==, tarball: https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.11.tgz} engines: {node: '>=18'} @@ -1066,56 +991,32 @@ packages: cpu: [x64] os: [sunos] - '@esbuild/sunos-x64@0.25.3': - resolution: {integrity: sha512-ROJhm7d8bk9dMCUZjkS8fgzsPAZEjtRJqCAmVgB0gMrvG7hfmPmz9k1rwO4jSiblFjYmNvbECL9uhaPzONMfgA==, tarball: https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [x64] - os: [sunos] - '@esbuild/win32-arm64@0.25.11': resolution: {integrity: sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q==, tarball: https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.11.tgz} engines: {node: '>=18'} cpu: [arm64] os: [win32] - '@esbuild/win32-arm64@0.25.3': - resolution: {integrity: sha512-YWcow8peiHpNBiIXHwaswPnAXLsLVygFwCB3A7Bh5jRkIBFWHGmNQ48AlX4xDvQNoMZlPYzjVOQDYEzWCqufMQ==, tarball: https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [arm64] - os: [win32] - '@esbuild/win32-ia32@0.25.11': resolution: {integrity: sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA==, tarball: https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.11.tgz} engines: {node: '>=18'} cpu: [ia32] os: [win32] - '@esbuild/win32-ia32@0.25.3': - resolution: {integrity: sha512-qspTZOIGoXVS4DpNqUYUs9UxVb04khS1Degaw/MnfMe7goQ3lTfQ13Vw4qY/Nj0979BGvMRpAYbs/BAxEvU8ew==, tarball: https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [ia32] - os: [win32] - '@esbuild/win32-x64@0.25.11': resolution: {integrity: sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA==, tarball: https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.11.tgz} engines: {node: '>=18'} cpu: [x64] os: [win32] - '@esbuild/win32-x64@0.25.3': - resolution: {integrity: sha512-ICgUR+kPimx0vvRzf+N/7L7tVSQeE3BYY+NhHRHXS1kBuPO7z2+7ea2HbhDyZdTephgvNvKrlDDKUexuCVBVvg==, tarball: https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.3.tgz} - engines: {node: '>=18'} - cpu: [x64] - os: [win32] - '@eslint-community/eslint-utils@4.9.0': resolution: {integrity: sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==, tarball: https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - '@eslint-community/regexpp@4.12.1': - resolution: {integrity: sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==, tarball: https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz} + '@eslint-community/regexpp@4.12.2': + resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==, tarball: https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz} engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} '@eslint/eslintrc@2.1.4': @@ -1126,24 +1027,12 @@ packages: resolution: {integrity: sha512-mjZVbpaeMZludF2fsWLD0Z9gCref1Tk4i9+wddjRvpUNqqcndPkBD09N/Mapey0b3jaXbLm2kICwFv2E64QinA==, tarball: https://registry.npmjs.org/@eslint/js/-/js-8.52.0.tgz} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - '@floating-ui/core@1.6.9': - resolution: {integrity: sha512-uMXCuQ3BItDUbAMhIXw7UPXRfAlOAvZzdK9BWpE60MCn+Svt3aLn9jsPTi/WNGlRUu2uI0v5S7JiIUsbsvh3fw==, tarball: https://registry.npmjs.org/@floating-ui/core/-/core-1.6.9.tgz} - '@floating-ui/core@1.7.3': resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==, tarball: https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz} - '@floating-ui/dom@1.6.13': - resolution: {integrity: sha512-umqzocjDgNRGTuO7Q8CU32dkHkECqI8ZdMZ5Swb6QAM0t5rnlrN3lGo1hdpscRd3WS8T6DKYK4ephgIH9iRh3w==, tarball: https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.13.tgz} - '@floating-ui/dom@1.7.4': resolution: {integrity: sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==, tarball: https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.4.tgz} - '@floating-ui/react-dom@2.1.2': - resolution: {integrity: sha512-06okr5cgPzMNBy+Ycse2A6udMi4bqwW/zgBF/rwjcNqWkyr82Mcg8b0vjX8OJpZFy/FKjJmw6wV7t44kK6kW7A==, tarball: https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.2.tgz} - peerDependencies: - react: '>=16.8.0' - react-dom: '>=16.8.0' - '@floating-ui/react-dom@2.1.6': resolution: {integrity: sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==, tarball: https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.6.tgz} peerDependencies: @@ -1153,11 +1042,8 @@ packages: '@floating-ui/utils@0.2.10': resolution: {integrity: sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==, tarball: https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz} - '@floating-ui/utils@0.2.9': - resolution: {integrity: sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==, tarball: https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.9.tgz} - - '@fontsource-variable/inter@5.1.1': - resolution: {integrity: sha512-OpXFTmiH6tHkYijMvQTycFKBLK4X+SRV6tet1m4YOUH7SzIIlMqDja+ocDtiCA72UthBH/vF+3ZtlMr2rN/wIw==, tarball: https://registry.npmjs.org/@fontsource-variable/inter/-/inter-5.1.1.tgz} + '@fontsource-variable/inter@5.2.8': + resolution: {integrity: sha512-kOfP2D+ykbcX/P3IFnokOhVRNoTozo5/JxhAIVYLpea/UBmCQ/YWPBfWIDuBImXX/15KH+eKh4xpEUyS2sQQGQ==, tarball: https://registry.npmjs.org/@fontsource-variable/inter/-/inter-5.2.8.tgz} '@fontsource/fira-code@5.2.7': resolution: {integrity: sha512-tnB9NNund9TwIym8/7DMJe573nlPEQb+fKUV5GL8TBYXjIhDvL0D7mgmNVNQUPhXp+R7RylQeiBdkA4EbOHPGQ==, tarball: https://registry.npmjs.org/@fontsource/fira-code/-/fira-code-5.2.7.tgz} @@ -1165,11 +1051,11 @@ packages: '@fontsource/ibm-plex-mono@5.2.7': resolution: {integrity: sha512-MKAb8qV+CaiMQn2B0dIi1OV3565NYzp3WN5b4oT6LTkk+F0jR6j0ZN+5BKJiIhffDC3rtBULsYZE65+0018z9w==, tarball: https://registry.npmjs.org/@fontsource/ibm-plex-mono/-/ibm-plex-mono-5.2.7.tgz} - '@fontsource/jetbrains-mono@5.2.5': - resolution: {integrity: sha512-TPZ9b/uq38RMdrlZZkl0RwN8Ju9JxuqMETrw76pUQFbGtE1QbwQaNsLlnUrACNNBNbd0NZRXiJJSkC8ajPgbew==, tarball: https://registry.npmjs.org/@fontsource/jetbrains-mono/-/jetbrains-mono-5.2.5.tgz} + '@fontsource/jetbrains-mono@5.2.8': + resolution: {integrity: sha512-6w8/SG4kqvIMu7xd7wt6x3idn1Qux3p9N62s6G3rfldOUYHpWcc2FKrqf+Vo44jRvqWj2oAtTHrZXEP23oSKwQ==, tarball: https://registry.npmjs.org/@fontsource/jetbrains-mono/-/jetbrains-mono-5.2.8.tgz} - '@fontsource/source-code-pro@5.2.5': - resolution: {integrity: sha512-1k7b9IdhVSdK/rJ8CkqqGFZ01C3NaXNynPZqKaTetODog/GPJiMYd6E8z+LTwSUTIX8dm2QZORDC+Uh91cjXSg==, tarball: https://registry.npmjs.org/@fontsource/source-code-pro/-/source-code-pro-5.2.5.tgz} + '@fontsource/source-code-pro@5.2.7': + resolution: {integrity: sha512-7papq9TH94KT+S5VSY8cU7tFmwuGkIe3qxXRMscuAXH6AjMU+KJI75f28FzgBVDrlMfA0jjlTV4/x5+H5o/5EQ==, tarball: https://registry.npmjs.org/@fontsource/source-code-pro/-/source-code-pro-5.2.7.tgz} '@humanwhocodes/config-array@0.11.14': resolution: {integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==, tarball: https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz} @@ -1471,8 +1357,8 @@ packages: '@emotion/styled': optional: true - '@napi-rs/wasm-runtime@1.0.5': - resolution: {integrity: sha512-TBr9Cf9onSAS2LQ2+QHx6XcC6h9+RIzJgbqG3++9TUZSH204AwEy5jg3BTQ0VATsyoGj4ee49tN/y6rvaOOtcg==, tarball: https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.0.5.tgz} + '@napi-rs/wasm-runtime@1.0.7': + resolution: {integrity: sha512-SeDnOO0Tk7Okiq6DbXmmBODgOAb9dp9gjlphokTUxmt8U3liIP1ZsozBahH69j/RJv+Rfs6IwUKHTgQYJ/HBAw==, tarball: https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.0.7.tgz} '@neoconfetti/react@1.0.0': resolution: {integrity: sha512-klcSooChXXOzIm+SE5IISIAn3bYzYfPjbX7D7HoqZL84oAfgREeSg5vSIaSFH+DaGzzvImTyWe1OyrJ67vik4A==, tarball: https://registry.npmjs.org/@neoconfetti/react/-/react-1.0.0.tgz} @@ -1489,11 +1375,11 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==, tarball: https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz} engines: {node: '>= 8'} - '@octokit/openapi-types@19.0.2': - resolution: {integrity: sha512-8li32fUDUeml/ACRp/njCWTsk5t17cfTM1jp9n08pBrqs5cDFJubtjsSnuz56r5Tad6jdEPJld7LxNp9dNcyjQ==, tarball: https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-19.0.2.tgz} + '@octokit/openapi-types@20.0.0': + resolution: {integrity: sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==, tarball: https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz} - '@octokit/types@12.3.0': - resolution: {integrity: sha512-nJ8X2HRr234q3w/FcovDlA+ttUU4m1eJAourvfUUtwAWeqL8AsyRqfnLvVnYn3NFbUnsmzQCzLNdFerPwdmcDQ==, tarball: https://registry.npmjs.org/@octokit/types/-/types-12.3.0.tgz} + '@octokit/types@12.6.0': + resolution: {integrity: sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==, tarball: https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz} '@open-draft/deferred-promise@2.2.0': resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==, tarball: https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz} @@ -1504,98 +1390,98 @@ packages: '@open-draft/until@2.1.0': resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==, tarball: https://registry.npmjs.org/@open-draft/until/-/until-2.1.0.tgz} - '@oxc-resolver/binding-android-arm-eabi@11.8.4': - resolution: {integrity: sha512-6BjMji0TcvQfJ4EoSunOSyu/SiyHKficBD0V3Y0NxF0beaNnnZ7GYEi2lHmRNnRCuIPK8IuVqQ6XizYau+CkKw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-android-arm-eabi/-/binding-android-arm-eabi-11.8.4.tgz} + '@oxc-resolver/binding-android-arm-eabi@11.12.0': + resolution: {integrity: sha512-/IfGWLNdmS1kVYM2g+Xw4qXNWtCPZ/i5YMprflA8FC3vAjT4N0VucQcDxUPHxatQwre4qnhbFFWqRa1mz6Cgkw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-android-arm-eabi/-/binding-android-arm-eabi-11.12.0.tgz} cpu: [arm] os: [android] - '@oxc-resolver/binding-android-arm64@11.8.4': - resolution: {integrity: sha512-SxF4X6rzCBS9XNPXKZGoIHIABjfGmtQpEgRBDzpDHx5VTuLAUmwLTHXnVBAZoX5bmnhF79RiMElavzFdJ2cA1A==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-android-arm64/-/binding-android-arm64-11.8.4.tgz} + '@oxc-resolver/binding-android-arm64@11.12.0': + resolution: {integrity: sha512-H3Ehyinfx2VO8F5TwdaD/WY686Ia6J1H3LP0tgpNjlPGH2TrTniPERiwjqtOm/xHEef0KJvb/yfmUKLbHudhCA==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-android-arm64/-/binding-android-arm64-11.12.0.tgz} cpu: [arm64] os: [android] - '@oxc-resolver/binding-darwin-arm64@11.8.4': - resolution: {integrity: sha512-8zWeERrzgscAniE6kh1TQ4E7GJyglYsvdoKrHYLBCbHWD+0/soffiwAYxZuckKEQSc2RXMSPjcu+JTCALaY0Dw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-darwin-arm64/-/binding-darwin-arm64-11.8.4.tgz} + '@oxc-resolver/binding-darwin-arm64@11.12.0': + resolution: {integrity: sha512-hmm+A/0WdEtIeBrPtUHoSTzJefrZkhGSrmv5pwELKiqNqd+/gctzmTlt6wWrU8BMIryDMT9fWqLSQ3+NYfqAEA==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-darwin-arm64/-/binding-darwin-arm64-11.12.0.tgz} cpu: [arm64] os: [darwin] - '@oxc-resolver/binding-darwin-x64@11.8.4': - resolution: {integrity: sha512-BUwggKz8Hi5uEQ0AeVTSun1+sp4lzNcItn+L7fDsHu5Cx0Zueuo10BtVm+dIwmYVVPL5oGYOeD0fS7MKAazKiw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-darwin-x64/-/binding-darwin-x64-11.8.4.tgz} + '@oxc-resolver/binding-darwin-x64@11.12.0': + resolution: {integrity: sha512-g1tVu53EMfuRKs67o0PZR0+y/WXl/Tfn3d2ggjK3Hj17pQPcb9x1+Y6W7n4EjIDttwLZbCPCEr06X+aC03I45A==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-darwin-x64/-/binding-darwin-x64-11.12.0.tgz} cpu: [x64] os: [darwin] - '@oxc-resolver/binding-freebsd-x64@11.8.4': - resolution: {integrity: sha512-fPO5TQhnn8gA6yP4o49lc4Gn8KeDwAp9uYd4PlE3Q00JVqU6cY9WecDhYHrWtiFcyoZ8UVBlIxuhRqT/DP4Z4A==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-freebsd-x64/-/binding-freebsd-x64-11.8.4.tgz} + '@oxc-resolver/binding-freebsd-x64@11.12.0': + resolution: {integrity: sha512-TiMatzvcVMSOiAx8sbnAw7UCfQpZDlm91ItywZrSHlQIJqDBipOmjIEYUMc2p823Y+fJ2ADL5UBjUB2kfqpedw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-freebsd-x64/-/binding-freebsd-x64-11.12.0.tgz} cpu: [x64] os: [freebsd] - '@oxc-resolver/binding-linux-arm-gnueabihf@11.8.4': - resolution: {integrity: sha512-QuNbdUaVGiP0W0GrXsvCDZjqeL4lZGU7aXlx/S2tCvyTk3wh6skoiLJgqUf/eeqXfUPnzTfntYqyfolzCAyBYA==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-11.8.4.tgz} + '@oxc-resolver/binding-linux-arm-gnueabihf@11.12.0': + resolution: {integrity: sha512-zU+9UgxPIvfReqmRr/dqZt3387HPgcH0hA4u0QGE+280EFjBYYL2rxGDxK0L+keO6vc2+ITWVDXm9KIj+alofg==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-11.12.0.tgz} cpu: [arm] os: [linux] - '@oxc-resolver/binding-linux-arm-musleabihf@11.8.4': - resolution: {integrity: sha512-p/zLMfza8OsC4BDKxqeZ9Qel+4eA/oiMSyKLRkMrTgt6OWQq1d5nHntjfG35Abcw4ev6Q9lRU3NOW5hj0xlUbw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-11.8.4.tgz} + '@oxc-resolver/binding-linux-arm-musleabihf@11.12.0': + resolution: {integrity: sha512-dfO1rrOeELYWD/BewMCp81k1I3pOdtAi2VCKg/A1I8z0uI4OR6cThb5dV9fpHkj7zlb0Y5iZFPe+NTbI/u1MgQ==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-arm-musleabihf/-/binding-linux-arm-musleabihf-11.12.0.tgz} cpu: [arm] os: [linux] - '@oxc-resolver/binding-linux-arm64-gnu@11.8.4': - resolution: {integrity: sha512-bvJF9wWxF1+a5YZATlS5JojpOMC7OsnTatA6sXVHoOb7MIigjledYB5ZMAeRrnWWexRMiEX3YSaA46oSfOzmOg==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-11.8.4.tgz} + '@oxc-resolver/binding-linux-arm64-gnu@11.12.0': + resolution: {integrity: sha512-JJNyN1ueryETKTUsG57+u0GDbtHKVcwcUoC6YyJmDdWE0o/3twXtHuS+F/121a2sVK8PKlROqGAev+STx3AuuQ==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-11.12.0.tgz} cpu: [arm64] os: [linux] - '@oxc-resolver/binding-linux-arm64-musl@11.8.4': - resolution: {integrity: sha512-gf4nwGBfu+EFwOn5p7/T7VF4jmIdfodwJS9MRkOBHvuAm3LQgCX7O6d3Y80mm0TV7ZMRD/trfW628rHfd5++vQ==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-musl/-/binding-linux-arm64-musl-11.8.4.tgz} + '@oxc-resolver/binding-linux-arm64-musl@11.12.0': + resolution: {integrity: sha512-rQHoxL0H0WwYUuukPUscLyzWwTl/hyogptYsY+Ye6AggJEOuvgJxMum2glY7etGIGOXxrfjareHnNO1tNY7WYg==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-arm64-musl/-/binding-linux-arm64-musl-11.12.0.tgz} cpu: [arm64] os: [linux] - '@oxc-resolver/binding-linux-ppc64-gnu@11.8.4': - resolution: {integrity: sha512-T120R5GIzRd41rYWWKCI6cSYrZjmRQzf3X4xeE1WX396Uabz5DX8KU7RnVHihSK+KDxccCVOFBxcH3ITd+IEpw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-11.8.4.tgz} + '@oxc-resolver/binding-linux-ppc64-gnu@11.12.0': + resolution: {integrity: sha512-XPUZSctO+FrC0314Tcth+GrTtzy2yaYqyl8weBMAbKFMwuV8VnR2SHg9dmtI9vkukmM3auOLj0Kqjpl3YXwXiw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-ppc64-gnu/-/binding-linux-ppc64-gnu-11.12.0.tgz} cpu: [ppc64] os: [linux] - '@oxc-resolver/binding-linux-riscv64-gnu@11.8.4': - resolution: {integrity: sha512-PVG7SxBFFjAaQ76p9O/0Xt5mTBlziRwpck+6cRNhy/hbWY/hSt8BFfPqw0EDSfnl40Uuh+NPsHFMnaWWyxbQEg==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-11.8.4.tgz} + '@oxc-resolver/binding-linux-riscv64-gnu@11.12.0': + resolution: {integrity: sha512-AmMjcP+6zHLF1JNq/p3yPEcXmZW/Xw5Xl19Zd0eBCSyGORJRuUOkcnyC8bwMO43b/G7PtausB83fclnFL5KZ3w==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-gnu/-/binding-linux-riscv64-gnu-11.12.0.tgz} cpu: [riscv64] os: [linux] - '@oxc-resolver/binding-linux-riscv64-musl@11.8.4': - resolution: {integrity: sha512-L0OklUhM2qLGaKvPSyKmwWpoijfc++VJtPyVgz031ShOXyo0WjD0ZGzusyJMsA1a/gdulAmN6CQ/0Sf4LGXEcw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-11.8.4.tgz} + '@oxc-resolver/binding-linux-riscv64-musl@11.12.0': + resolution: {integrity: sha512-K2/yFBqFQOKyVwQxYDAKqDtk2kS4g58aGyj/R1bvYPr2P7v7971aUG/5m2WD5u2zSqWBfu1o4PdhX0lsqvA3vQ==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-riscv64-musl/-/binding-linux-riscv64-musl-11.12.0.tgz} cpu: [riscv64] os: [linux] - '@oxc-resolver/binding-linux-s390x-gnu@11.8.4': - resolution: {integrity: sha512-18Ajz5hqO4cRGuoHzLFUsIPod9GIaIRDiXFg2m6CS3NgVdHx7iCZscplYH7KtjdE42M8nGWYMyyq5BOk7QVgPw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-11.8.4.tgz} + '@oxc-resolver/binding-linux-s390x-gnu@11.12.0': + resolution: {integrity: sha512-uSl4jo78tONGZtwsOA4ldT/OI7/hoHJhSMlGYE4Z/lzwMjkAaBdX4soAK5P/rL+U2yCJlRMnnoUckhXlZvDbSw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-s390x-gnu/-/binding-linux-s390x-gnu-11.12.0.tgz} cpu: [s390x] os: [linux] - '@oxc-resolver/binding-linux-x64-gnu@11.8.4': - resolution: {integrity: sha512-uHvH4RyYBdQ/lFGV9H+R1ScHg6EBnAhE3mnX+u+mO/btnalvg7j80okuHf8Qw0tLQiP5P1sEBoVeE6zviXY9IA==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-gnu/-/binding-linux-x64-gnu-11.8.4.tgz} + '@oxc-resolver/binding-linux-x64-gnu@11.12.0': + resolution: {integrity: sha512-YjL8VAkbPyQ1kUuR6pOBk1O+EkxOoLROTa+ia1/AmFLuXYNltLGI1YxOY14i80cKpOf0Z59IXnlrY3coAI9NDQ==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-gnu/-/binding-linux-x64-gnu-11.12.0.tgz} cpu: [x64] os: [linux] - '@oxc-resolver/binding-linux-x64-musl@11.8.4': - resolution: {integrity: sha512-X5z44qh5DdJfVhcqXAQFTDFUpcxdpf6DT/lHL5CFcdQGIZxatjc7gFUy05IXPI9xwfq39RValjJBvFovUk9XBw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-musl/-/binding-linux-x64-musl-11.8.4.tgz} + '@oxc-resolver/binding-linux-x64-musl@11.12.0': + resolution: {integrity: sha512-qpHPU0qqeJXh7cPzA+I+WWA6RxtRArfmSrhTXidbiQ08G5A1e55YQwExWkitB2rSqN6YFxnpfhHKo9hyhpyfSg==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-linux-x64-musl/-/binding-linux-x64-musl-11.12.0.tgz} cpu: [x64] os: [linux] - '@oxc-resolver/binding-wasm32-wasi@11.8.4': - resolution: {integrity: sha512-z3906y+cd8RRhBGNwHRrRAFxnKjXsBeL3+rdQjZpBrUyrhhsaV5iKD/ROx64FNJ9GjL/9mfon8A5xx/McYIqHA==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-wasm32-wasi/-/binding-wasm32-wasi-11.8.4.tgz} + '@oxc-resolver/binding-wasm32-wasi@11.12.0': + resolution: {integrity: sha512-oqg80bERZAagWLqYmngnesE0/2miv4lST7+wiiZniD6gyb1SoRckwEkbTsytGutkudFtw7O61Pon6pNlOvyFaA==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-wasm32-wasi/-/binding-wasm32-wasi-11.12.0.tgz} engines: {node: '>=14.0.0'} cpu: [wasm32] - '@oxc-resolver/binding-win32-arm64-msvc@11.8.4': - resolution: {integrity: sha512-70vXFs74uA3X5iYOkpclbkWlQEF+MI325uAQ+Or2n8HJip2T0SEmuBlyw/sRL2E8zLC4oocb+1g25fmzlDVkmg==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-11.8.4.tgz} + '@oxc-resolver/binding-win32-arm64-msvc@11.12.0': + resolution: {integrity: sha512-qKH816ycEN9yR/TX91CP1/i6xyVNHKX9VEOYa3XzQROPVtcYG2F6A3ng/PhwpJvS1cmL/DlilhglZe9KWkhNjg==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-11.12.0.tgz} cpu: [arm64] os: [win32] - '@oxc-resolver/binding-win32-ia32-msvc@11.8.4': - resolution: {integrity: sha512-SEOUAzTvr+nyMia3nx1dMtD7YUxZwuhQ3QAPnxy21261Lj0yT3JY4EIfwWH54lAWWfMdRSRRMFuGeF/dq7XjEw==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-11.8.4.tgz} + '@oxc-resolver/binding-win32-ia32-msvc@11.12.0': + resolution: {integrity: sha512-3bgxubTlhzF6BwBnhGz5BTboarl1upuanEr6i0dncjfEcU+Z9xAOgbtA7Ip3G3EKDjE1objRKK+ny8PKJZ3b7Q==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-11.12.0.tgz} cpu: [ia32] os: [win32] - '@oxc-resolver/binding-win32-x64-msvc@11.8.4': - resolution: {integrity: sha512-1gARIQsOPOU7LJ7jvMyPmZEVMapL/PymeG3J7naOdLZDrIZKX6CTvgawJmETYKt+8icP8M6KbBinrVkKVqFd+A==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-win32-x64-msvc/-/binding-win32-x64-msvc-11.8.4.tgz} + '@oxc-resolver/binding-win32-x64-msvc@11.12.0': + resolution: {integrity: sha512-rbiWYQWxwy+x7+KgNAoAGYIPB3xUclQlFVV3L5lwfsbp4PQPomJohHowlWgi3GRAEybM5+ZL9xny0YfpJOsthA==, tarball: https://registry.npmjs.org/@oxc-resolver/binding-win32-x64-msvc/-/binding-win32-x64-msvc-11.12.0.tgz} cpu: [x64] os: [win32] @@ -1641,34 +1527,12 @@ packages: '@protobufjs/utf8@1.1.0': resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==, tarball: https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz} - '@radix-ui/number@1.1.0': - resolution: {integrity: sha512-V3gRzhVNU1ldS5XhAPTom1fOIo4ccrjjJgmE+LI2h/WaFpHmx0MQApT+KZHnx8abG6Avtfcz4WoEciMnpFT3HQ==, tarball: https://registry.npmjs.org/@radix-ui/number/-/number-1.1.0.tgz} - '@radix-ui/number@1.1.1': resolution: {integrity: sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==, tarball: https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz} - '@radix-ui/primitive@1.1.0': - resolution: {integrity: sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==, tarball: https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz} - - '@radix-ui/primitive@1.1.1': - resolution: {integrity: sha512-SJ31y+Q/zAyShtXJc8x83i9TYdbAfHZ++tUZnvjJJqFjzsdUnKsxPL6IEtBlxKkU7yzer//GQtZSV4GbldL3YA==, tarball: https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.1.tgz} - '@radix-ui/primitive@1.1.3': resolution: {integrity: sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==, tarball: https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz} - '@radix-ui/react-arrow@1.1.1': - resolution: {integrity: sha512-NaVpZfmv8SKeZbn4ijN2V3jlHA9ngBG16VnIIm22nUR0Yk8KUALyBxT3KYEUnNuch9sTE8UTsS3whzBgKOL30w==, tarball: https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - '@radix-ui/react-arrow@1.1.7': resolution: {integrity: sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==, tarball: https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz} peerDependencies: @@ -1682,8 +1546,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-avatar@1.1.2': - resolution: {integrity: sha512-GaC7bXQZ5VgZvVvsJ5mu/AEbjYLnhhkoidOboC50Z6FFlLA03wG2ianUoH+zgDQ31/9gCF59bE4+2bBgTyMiig==, tarball: https://registry.npmjs.org/@radix-ui/react-avatar/-/react-avatar-1.1.2.tgz} + '@radix-ui/react-avatar@1.1.10': + resolution: {integrity: sha512-V8piFfWapM5OmNCXTzVQY+E1rDa53zY+MQ4Y7356v4fFz6vqCyUtIz2rUD44ZEdwg78/jKmMJHj07+C/Z/rcog==, tarball: https://registry.npmjs.org/@radix-ui/react-avatar/-/react-avatar-1.1.10.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1695,8 +1559,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-checkbox@1.1.4': - resolution: {integrity: sha512-wP0CPAHq+P5I4INKe3hJrIa1WoNqqrejzW+zoU0rOvo1b9gDEJJFl2rYfO1PYJUQCc2H1WZxIJmyv9BS8i5fLw==, tarball: https://registry.npmjs.org/@radix-ui/react-checkbox/-/react-checkbox-1.1.4.tgz} + '@radix-ui/react-checkbox@1.3.3': + resolution: {integrity: sha512-wBbpv+NQftHDdG86Qc0pIyXk5IR3tM8Vd0nWLKDcX8nNn4nXFOFwsKuqw2okA/1D/mpaAkmuyndrPJTYDNZtFw==, tarball: https://registry.npmjs.org/@radix-ui/react-checkbox/-/react-checkbox-1.3.3.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1708,34 +1572,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-collapsible@1.1.2': - resolution: {integrity: sha512-PliMB63vxz7vggcyq0IxNYk8vGDrLXVWw4+W4B8YnwI1s18x7YZYqlG9PLX7XxAJUi0g2DxP4XKJMFHh/iVh9A==, tarball: https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-1.1.2.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-collection@1.1.1': - resolution: {integrity: sha512-LwT3pSho9Dljg+wY2KN2mrrh6y3qELfftINERIzBUO9e0N+t0oMTyn3k9iv+ZqgrwGkRnLpNJrsMv9BZlt2yuA==, tarball: https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-collection@1.1.2': - resolution: {integrity: sha512-9z54IEKRxIa9VityapoEYMuByaG42iSy1ZXlY2KcuLSEtq8x4987/N6m15ppoMffgZX72gER2uHe1D9Y6Unlcw==, tarball: https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.2.tgz} + '@radix-ui/react-collapsible@1.1.12': + resolution: {integrity: sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==, tarball: https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-1.1.12.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1760,24 +1598,6 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-compose-refs@1.1.0': - resolution: {integrity: sha512-b4inOtiaOnYf9KWyO3jAeeCG6FeyfY6ldiEPanbUjWd+xIk5wZeHa8yVwmrJ2vderhu/BQvzCrJI0lHd+wIiqw==, tarball: https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-compose-refs@1.1.1': - resolution: {integrity: sha512-Y9VzoRDSJtgFMUCoiZBDVo084VQ5hfpXxVE+NgkdNsjiDBByiImMZKKhxMwCbdHvhlENG6a833CbFkOQvTricw==, tarball: https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-compose-refs@1.1.2': resolution: {integrity: sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==, tarball: https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz} peerDependencies: @@ -1787,15 +1607,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-context@1.1.1': - resolution: {integrity: sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q==, tarball: https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-context@1.1.2': resolution: {integrity: sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==, tarball: https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz} peerDependencies: @@ -1805,8 +1616,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-dialog@1.1.4': - resolution: {integrity: sha512-Ur7EV1IwQGCyaAuyDRiOLA5JIUZxELJljF+MbM/2NC0BYwfuRrbpS30BiQBJrVruscgUkieKkqXYDOoByaxIoA==, tarball: https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.4.tgz} + '@radix-ui/react-dialog@1.1.15': + resolution: {integrity: sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==, tarball: https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1818,15 +1629,6 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-direction@1.1.0': - resolution: {integrity: sha512-BUuBvgThEiAXh2DWu93XsT+a3aWrGqolGlqqw5VU1kG7p/ZH2cuDlM1sRLNnY3QcBS69UIz2mcKhMxDsdewhjg==, tarball: https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-direction@1.1.1': resolution: {integrity: sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==, tarball: https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz} peerDependencies: @@ -1849,34 +1651,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-dismissable-layer@1.1.3': - resolution: {integrity: sha512-onrWn/72lQoEucDmJnr8uczSNTujT0vJnA/X5+3AkChVPowr8n1yvIKIabhWyMQeMvvmdpsvcyDqx3X1LEXCPg==, tarball: https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.3.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-dismissable-layer@1.1.4': - resolution: {integrity: sha512-XDUI0IVYVSwjMXxM6P4Dfti7AH+Y4oS/TB+sglZ/EXc7cqLwGAmp1NlMrcUjj7ks6R5WTZuWKv44FBbLpwU3sA==, tarball: https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.4.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-dropdown-menu@2.1.4': - resolution: {integrity: sha512-iXU1Ab5ecM+yEepGAWK8ZhMyKX4ubFdCNtol4sT9D0OVErG9PNElfx3TQhjw7n7BC5nFVz68/5//clWy+8TXzA==, tarball: https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.4.tgz} + '@radix-ui/react-dropdown-menu@2.1.16': + resolution: {integrity: sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==, tarball: https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.16.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1888,15 +1664,6 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-focus-guards@1.1.1': - resolution: {integrity: sha512-pSIwfrT1a6sIoDASCSpFwOasEwKTZWDw/iBdtnqKO7v6FeOzYJ7U53cPzYFVR3geGGXgVHaH+CdngrrAzqUGxg==, tarball: https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-focus-guards@1.1.3': resolution: {integrity: sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==, tarball: https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz} peerDependencies: @@ -1906,19 +1673,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-focus-scope@1.1.1': - resolution: {integrity: sha512-01omzJAYRxXdG2/he/+xy+c8a8gCydoQ1yOxnWNcRhrrBW5W+RQJ22EK1SaO8tb3WoUsuEw7mJjBozPzihDFjA==, tarball: https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - '@radix-ui/react-focus-scope@1.1.7': resolution: {integrity: sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==, tarball: https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz} peerDependencies: @@ -1932,15 +1686,6 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-id@1.1.0': - resolution: {integrity: sha512-EJUrI8yYh7WOjNOqpoJaf1jlFIH2LvtgAl+YcFqNCa+4hj64ZXmPkAKOFs/ukjz3byN6bdb/AVUqHkI8/uWWMA==, tarball: https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-id@1.1.1': resolution: {integrity: sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==, tarball: https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz} peerDependencies: @@ -1950,8 +1695,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-label@2.1.0': - resolution: {integrity: sha512-peLblDlFw/ngk3UWq0VnYaOLy6agTZZ+MUO/WhVfm14vJGML+xH4FAl2XQGLqdefjNb7ApRg6Yn7U42ZhmYXdw==, tarball: https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.0.tgz} + '@radix-ui/react-label@2.1.7': + resolution: {integrity: sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ==, tarball: https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.7.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1963,8 +1708,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-menu@2.1.4': - resolution: {integrity: sha512-BnOgVoL6YYdHAG6DtXONaR29Eq4nvbi8rutrV/xlr3RQCMMb3yqP85Qiw/3NReozrSW+4dfLkK+rc1hb4wPU/A==, tarball: https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.4.tgz} + '@radix-ui/react-menu@2.1.16': + resolution: {integrity: sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==, tarball: https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.16.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1976,21 +1721,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-popover@1.1.5': - resolution: {integrity: sha512-YXkTAftOIW2Bt3qKH8vYr6n9gCkVrvyvfiTObVjoHVTHnNj26rmvO87IKa3VgtgCjb8FAQ6qOjNViwl+9iIzlg==, tarball: https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.1.5.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-popper@1.2.1': - resolution: {integrity: sha512-3kn5Me69L+jv82EKRuQCXdYyf1DqHwD2U/sxoNgBGCB7K9TRc3bQamQ+5EPM9EvyPdli0W41sROd+ZU1dTCztw==, tarball: https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.1.tgz} + '@radix-ui/react-popover@1.1.15': + resolution: {integrity: sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==, tarball: https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.1.15.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2015,19 +1747,6 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-portal@1.1.3': - resolution: {integrity: sha512-NciRqhXnGojhT93RPyDaMPfLH3ZSl4jjIFbZQ1b/vxvZEdHsBZ49wP9w8L3HzUQwep01LcWtkUvm0OVB5JAHTw==, tarball: https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.3.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - '@radix-ui/react-portal@1.1.9': resolution: {integrity: sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==, tarball: https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz} peerDependencies: @@ -2041,47 +1760,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-presence@1.1.2': - resolution: {integrity: sha512-18TFr80t5EVgL9x1SwF/YGtfG+l0BS0PRAlCWBDoBEiDQjeKgnNZRVJp/oVBl24sr3Gbfwc/Qpj4OcWTQMsAEg==, tarball: https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.2.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-primitive@2.0.0': - resolution: {integrity: sha512-ZSpFm0/uHa8zTvKBDjLFWLo8dkr4MBsiDLz0g3gMUwqgLHz9rTaRRGYDgvZPtBJgYCBKXkS9fzmoySgr8CO6Cw==, tarball: https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.0.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-primitive@2.0.1': - resolution: {integrity: sha512-sHCWTtxwNn3L3fH8qAfnF3WbUZycW93SM1j3NFDzXBiz8D6F5UTTy8G1+WFEaiCdvCVRJWj6N2R4Xq6HdiHmDg==, tarball: https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.1.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-primitive@2.0.2': - resolution: {integrity: sha512-Ec/0d38EIuvDF+GZjcMU/Ze6MxntVJYO/fRlCPhCaVUyPY9WTalHJw54tp9sXeJo3tlShWpy41vQRgLRGOuz+w==, tarball: https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.2.tgz} + '@radix-ui/react-presence@1.1.5': + resolution: {integrity: sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==, tarball: https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2106,21 +1786,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-radio-group@1.2.3': - resolution: {integrity: sha512-xtCsqt8Rp09FK50ItqEqTJ7Sxanz8EM8dnkVIhJrc/wkMMomSmXHvYbhv3E7Zx4oXh98aaLt9W679SUYXg4IDA==, tarball: https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.2.3.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - - '@radix-ui/react-roving-focus@1.1.1': - resolution: {integrity: sha512-QE1RoxPGJ/Nm8Qmk0PxP8ojmoaS67i0s7hVssS7KuI2FQoc/uzVlZsqKfQvxPE6D8hICCPHJ4D88zNhT3OOmkw==, tarball: https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.1.tgz} + '@radix-ui/react-radio-group@1.3.8': + resolution: {integrity: sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ==, tarball: https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2132,8 +1799,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-roving-focus@1.1.2': - resolution: {integrity: sha512-zgMQWkNO169GtGqRvYrzb0Zf8NhMHS2DuEB/TiEmVnpr5OqPU3i8lfbxaAmC2J/KYuIQxyoQQ6DxepyXp61/xw==, tarball: https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.2.tgz} + '@radix-ui/react-roving-focus@1.1.11': + resolution: {integrity: sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==, tarball: https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2145,8 +1812,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-scroll-area@1.2.3': - resolution: {integrity: sha512-l7+NNBfBYYJa9tNqVcP2AGvxdE3lmE6kFTBXdvHgUaZuy+4wGCL1Cl2AfaR7RKyimj7lZURGLwFO59k4eBnDJQ==, tarball: https://registry.npmjs.org/@radix-ui/react-scroll-area/-/react-scroll-area-1.2.3.tgz} + '@radix-ui/react-scroll-area@1.2.10': + resolution: {integrity: sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A==, tarball: https://registry.npmjs.org/@radix-ui/react-scroll-area/-/react-scroll-area-1.2.10.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2184,8 +1851,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-slider@1.2.2': - resolution: {integrity: sha512-sNlU06ii1/ZcbHf8I9En54ZPW0Vil/yPVg4vQMcFNjrIx51jsHbFl1HYHQvCIWJSr1q0ZmA+iIs/ZTv8h7HHSA==, tarball: https://registry.npmjs.org/@radix-ui/react-slider/-/react-slider-1.2.2.tgz} + '@radix-ui/react-slider@1.3.6': + resolution: {integrity: sha512-JPYb1GuM1bxfjMRlNLE+BcmBC8onfCi60Blk7OBqi2MLTFdS+8401U4uFjnwkOr49BLmXxLC6JHkvAsx5OJvHw==, tarball: https://registry.npmjs.org/@radix-ui/react-slider/-/react-slider-1.3.6.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2197,57 +1864,17 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-slot@1.1.0': - resolution: {integrity: sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw==, tarball: https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-slot@1.1.1': - resolution: {integrity: sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==, tarball: https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-slot@1.1.2': - resolution: {integrity: sha512-YAKxaiGsSQJ38VzKH86/BPRC4rh+b1Jpa+JneA5LRE7skmLPNAyeG8kPJj/oo4STLvlrs8vkf/iYyc3A5stYCQ==, tarball: https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.2.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-slot@1.2.3': resolution: {integrity: sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==, tarball: https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz} peerDependencies: '@types/react': '*' react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc peerDependenciesMeta: - '@types/react': - optional: true - - '@radix-ui/react-switch@1.1.1': - resolution: {integrity: sha512-diPqDDoBcZPSicYoMWdWx+bCPuTRH4QSp9J+65IvtdS0Kuzt67bI6n32vCj8q6NZmYW/ah+2orOtMwcX5eQwIg==, tarball: https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': + '@types/react': optional: true - '@radix-ui/react-tooltip@1.1.7': - resolution: {integrity: sha512-ss0s80BC0+g0+Zc53MvilcnTYSOi4mSuFWBPYPuTOFGjx+pUU+ZrmamMNwS56t8MTFlniA5ocjd4jYm/CdhbOg==, tarball: https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.1.7.tgz} + '@radix-ui/react-switch@1.2.6': + resolution: {integrity: sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==, tarball: https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.2.6.tgz} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -2259,14 +1886,18 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-use-callback-ref@1.1.0': - resolution: {integrity: sha512-CasTfvsy+frcFkbXtSJ2Zu9JHpN8TYKxkgJGWbjiZhFivxaeW7rMeZt7QELGVLaYVfFMsKHjb7Ak0nMEe+2Vfw==, tarball: https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.0.tgz} + '@radix-ui/react-tooltip@1.2.8': + resolution: {integrity: sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==, tarball: https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.2.8.tgz} peerDependencies: '@types/react': '*' + '@types/react-dom': '*' react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc peerDependenciesMeta: '@types/react': optional: true + '@types/react-dom': + optional: true '@radix-ui/react-use-callback-ref@1.1.1': resolution: {integrity: sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==, tarball: https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz} @@ -2277,15 +1908,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-use-controllable-state@1.1.0': - resolution: {integrity: sha512-MtfMVJiSr2NjzS0Aa90NPTnvTSg6C/JLCV7ma0W6+OMV78vd8OyRpID+Ng9LxzsPbLeuBnWBA1Nq30AtBIDChw==, tarball: https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-use-controllable-state@1.2.2': resolution: {integrity: sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==, tarball: https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz} peerDependencies: @@ -2304,15 +1926,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-use-escape-keydown@1.1.0': - resolution: {integrity: sha512-L7vwWlR1kTTQ3oh7g1O0CBF3YCyyTj8NmhLR+phShpyA50HCfBFKVJTpshm9PzLiKmehsrQzTYTpX9HvmC9rhw==, tarball: https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-use-escape-keydown@1.1.1': resolution: {integrity: sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==, tarball: https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz} peerDependencies: @@ -2322,8 +1935,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-use-layout-effect@1.1.0': - resolution: {integrity: sha512-+FPE0rOdziWSrH9athwI1R0HDVbWlEhd+FR+aSDk4uWGmSJ9Z54sdZVDQPZAinJhJXwfT+qnj969mCsT2gfm5w==, tarball: https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.0.tgz} + '@radix-ui/react-use-is-hydrated@0.1.0': + resolution: {integrity: sha512-U+UORVEq+cTnRIaostJv9AGdV3G6Y+zbVd+12e18jQ5A3c0xL03IhnHuiU4UV69wolOQp5GfR58NW/EgdQhwOA==, tarball: https://registry.npmjs.org/@radix-ui/react-use-is-hydrated/-/react-use-is-hydrated-0.1.0.tgz} peerDependencies: '@types/react': '*' react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc @@ -2340,15 +1953,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-use-previous@1.1.0': - resolution: {integrity: sha512-Z/e78qg2YFnnXcW88A4JmTtm4ADckLno6F7OXotmkQfeuCVaKuYzqAATPhVzl3delXE7CxIV8shofPn3jPc5Og==, tarball: https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-use-previous@1.1.1': resolution: {integrity: sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==, tarball: https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.1.tgz} peerDependencies: @@ -2358,15 +1962,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-use-rect@1.1.0': - resolution: {integrity: sha512-0Fmkebhr6PiseyZlYAOtLS+nb7jLmpqTrJyv61Pe68MKYW6OWdRE2kI70TaYY27u7H0lajqM3hSMMLFq18Z7nQ==, tarball: https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-use-rect@1.1.1': resolution: {integrity: sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==, tarball: https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz} peerDependencies: @@ -2376,15 +1971,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-use-size@1.1.0': - resolution: {integrity: sha512-XW3/vWuIXHa+2Uwcc2ABSfcCledmXhhQPlGbfcRXbiUQI5Icjcg19BGCZVKKInYbvUCut/ufbbLLPFC5cbb1hw==, tarball: https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@radix-ui/react-use-size@1.1.1': resolution: {integrity: sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==, tarball: https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz} peerDependencies: @@ -2394,19 +1980,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-visually-hidden@1.1.1': - resolution: {integrity: sha512-vVfA2IZ9q/J+gEamvj761Oq1FpWgCDaNOOIfbPVp2MVPLEomUr5+Vf7kJGwQ24YxZSlQVar7Bes8kyTo5Dshpg==, tarball: https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.1.1.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - '@radix-ui/react-visually-hidden@1.2.3': resolution: {integrity: sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==, tarball: https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.2.3.tgz} peerDependencies: @@ -2420,14 +1993,11 @@ packages: '@types/react-dom': optional: true - '@radix-ui/rect@1.1.0': - resolution: {integrity: sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==, tarball: https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.0.tgz} - '@radix-ui/rect@1.1.1': resolution: {integrity: sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==, tarball: https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz} - '@rolldown/pluginutils@1.0.0-beta.38': - resolution: {integrity: sha512-N/ICGKleNhA5nc9XXQG/kkKHJ7S55u0x0XUJbbkmdCnFuoRkM1Il12q9q0eX19+M7KKUEPw/daUPIRnxhcxAIw==, tarball: https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.38.tgz} + '@rolldown/pluginutils@1.0.0-beta.43': + resolution: {integrity: sha512-5Uxg7fQUCmfhax7FJke2+8B6cqgeUJUD9o2uXIKXhD+mG0mL6NObmVoi9wXEU1tY89mZKgAYA6fTbftx3q2ZPQ==, tarball: https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.43.tgz} '@rollup/pluginutils@5.0.5': resolution: {integrity: sha512-6aEYR910NyP73oHiJglti74iRyOwgFU4x3meH/H8OJx6Ry0j6cOVZ5X/wTvub7G7Ao6qaHBEaNsV3GLJkSsF+Q==, tarball: https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.5.tgz} @@ -2557,6 +2127,9 @@ packages: '@sinonjs/fake-timers@10.3.0': resolution: {integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==, tarball: https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz} + '@standard-schema/spec@1.0.0': + resolution: {integrity: sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==, tarball: https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz} + '@storybook/addon-docs@9.1.2': resolution: {integrity: sha512-U3eHJ8lQFfEZ/OcgdKkUBbW2Y2tpAsHfy8lQOBgs5Pgj9biHEJcUmq+drOS/sJhle673eoBcUFmspXulI4KP1w==, tarball: https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-9.1.2.tgz} peerDependencies: @@ -2728,8 +2301,8 @@ packages: resolution: {integrity: sha512-fB0R+fa3AUqbLHWyxXa2kGVtf1Fe1ZZFr0Zp6AIbIAzXb2mKbEXl+PCQNUOaq5lbTab5tfctfXRNsWXxa2f7Aw==, tarball: https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.3.tgz} engines: {node: '>=14'} - '@testing-library/jest-dom@6.6.3': - resolution: {integrity: sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==, tarball: https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.6.3.tgz} + '@testing-library/jest-dom@6.9.1': + resolution: {integrity: sha512-zIcONa+hVtVSSep9UT3jZ5rizo2BsxgyDYU7WFD5eICBE7no3881HGeb/QkGfsJs6JTkY1aQhT7rIPC7e+0nnA==, tarball: https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.9.1.tgz} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} '@testing-library/react@14.3.1': @@ -2788,8 +2361,8 @@ packages: '@types/body-parser@1.19.2': resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==, tarball: https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz} - '@types/chai@5.2.2': - resolution: {integrity: sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==, tarball: https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz} + '@types/chai@5.2.3': + resolution: {integrity: sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==, tarball: https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz} '@types/chroma-js@2.4.0': resolution: {integrity: sha512-JklMxityrwjBTjGY2anH8JaTx3yjRU3/sEHSblLH1ba5lqcSh1LnImXJZO5peJfXyqKYWjHTGy4s5Wz++hARrw==, tarball: https://registry.npmjs.org/@types/chroma-js/-/chroma-js-2.4.0.tgz} @@ -2806,8 +2379,8 @@ packages: '@types/cookie@0.6.0': resolution: {integrity: sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==, tarball: https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz} - '@types/d3-array@3.2.1': - resolution: {integrity: sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==, tarball: https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz} + '@types/d3-array@3.2.2': + resolution: {integrity: sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==, tarball: https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz} '@types/d3-color@3.1.3': resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==, tarball: https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz} @@ -2818,11 +2391,11 @@ packages: '@types/d3-interpolate@3.0.4': resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==, tarball: https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz} - '@types/d3-path@3.1.0': - resolution: {integrity: sha512-P2dlU/q51fkOc/Gfl3Ul9kicV7l+ra934qBFXCFhrZMOL6du1TM0pm1ThYvENukyOn5h9v+yMJ9Fn5JK4QozrQ==, tarball: https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.0.tgz} + '@types/d3-path@3.1.1': + resolution: {integrity: sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==, tarball: https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz} - '@types/d3-scale@4.0.8': - resolution: {integrity: sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==, tarball: https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz} + '@types/d3-scale@4.0.9': + resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==, tarball: https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz} '@types/d3-shape@3.1.7': resolution: {integrity: sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==, tarball: https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.7.tgz} @@ -2929,8 +2502,8 @@ packages: '@types/node@20.17.16': resolution: {integrity: sha512-vOTpLduLkZXePLxHiHsBLp98mHGnl8RptV4YAO3HfKO5UHjDvySGbxKtpYfy8Sx5+WKcgc45qNreJJRVM3L6mw==, tarball: https://registry.npmjs.org/@types/node/-/node-20.17.16.tgz} - '@types/node@22.18.8': - resolution: {integrity: sha512-pAZSHMiagDR7cARo/cch1f3rXy0AEXwsVsVH09FcyeJVAzCnGgmYis7P3JidtTUjyadhTeSo8TgRPswstghDaw==, tarball: https://registry.npmjs.org/@types/node/-/node-22.18.8.tgz} + '@types/node@22.18.13': + resolution: {integrity: sha512-Bo45YKIjnmFtv6I1TuC8AaHBbqXtIo+Om5fE4QiU1Tj8QR/qt+8O3BAtOimG5IFmwaWiPmB3Mv3jtYzBA4Us2A==, tarball: https://registry.npmjs.org/@types/node/-/node-22.18.13.tgz} '@types/parse-json@4.0.2': resolution: {integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==, tarball: https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz} @@ -2957,10 +2530,10 @@ packages: peerDependencies: '@types/react': ^18.0.0 - '@types/react-dom@19.1.11': - resolution: {integrity: sha512-3BKc/yGdNTYQVVw4idqHtSOcFsgGuBbMveKCOgF8wQ5QtrYOc3jDIlzg3jef04zcXFIHLelyGlj0T+BJ8+KN+w==, tarball: https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.11.tgz} + '@types/react-dom@19.2.2': + resolution: {integrity: sha512-9KQPoO6mZCi7jcIStSnlOWn2nEF3mNmyr3rIAsGnAbQKYbRLyqmeSc39EVgtxXVia+LMT8j3knZLAZAh+xLmrw==, tarball: https://registry.npmjs.org/@types/react-dom/-/react-dom-19.2.2.tgz} peerDependencies: - '@types/react': ^19.0.0 + '@types/react': ^19.2.0 '@types/react-syntax-highlighter@15.5.13': resolution: {integrity: sha512-uLGJ87j6Sz8UaBAooU0T6lWJ0dBmjZgN1PZTrj05TNql2/XpC6+4HhMT5syIdFUUt+FASfCeLLv4kBygNU+8qA==, tarball: https://registry.npmjs.org/@types/react-syntax-highlighter/-/react-syntax-highlighter-15.5.13.tgz} @@ -2977,8 +2550,8 @@ packages: '@types/react-window@1.8.8': resolution: {integrity: sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==, tarball: https://registry.npmjs.org/@types/react-window/-/react-window-1.8.8.tgz} - '@types/react@19.1.17': - resolution: {integrity: sha512-Qec1E3mhALmaspIrhWt9jkQMNdw6bReVu64mjvhbhq2NFPftLPVr+l1SZgmw/66WwBNpDh7ao5AT6gF5v41PFA==, tarball: https://registry.npmjs.org/@types/react/-/react-19.1.17.tgz} + '@types/react@19.2.2': + resolution: {integrity: sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA==, tarball: https://registry.npmjs.org/@types/react/-/react-19.2.2.tgz} '@types/reactcss@1.2.13': resolution: {integrity: sha512-gi3S+aUi6kpkF5vdhUsnkwbiSEIU/BEJyD7kBy2SudWBUuKmJk8AQKE0OVcQQeEy40Azh0lV6uynxlikYIJuwg==, tarball: https://registry.npmjs.org/@types/reactcss/-/reactcss-1.2.13.tgz} @@ -3015,8 +2588,8 @@ packages: '@types/tough-cookie@4.0.5': resolution: {integrity: sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==, tarball: https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz} - '@types/trusted-types@1.0.6': - resolution: {integrity: sha512-230RC8sFeHoT6sSUlRO6a8cAnclO06eeiq1QDfiv2FGCLWFvvERWgwIQD4FWqD9A69BN7Lzee4OXwoMVnnsWDw==, tarball: https://registry.npmjs.org/@types/trusted-types/-/trusted-types-1.0.6.tgz} + '@types/trusted-types@2.0.7': + resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==, tarball: https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz} '@types/ua-parser-js@0.7.36': resolution: {integrity: sha512-N1rW+njavs70y2cApeIw1vLMYXRwfBy+7trgavGuuTfOd7j1Yh7QTRc/yqsPl6ncokt72ZXuxEU0PiCp9bSwNQ==, tarball: https://registry.npmjs.org/@types/ua-parser-js/-/ua-parser-js-0.7.36.tgz} @@ -3048,8 +2621,8 @@ packages: '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==, tarball: https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz} - '@vitejs/plugin-react@5.0.4': - resolution: {integrity: sha512-La0KD0vGkVkSk6K+piWDKRUyg8Rl5iAIKRMH0vMJI0Eg47bq1eOxmoObAaQG37WMW9MSyk7Cs8EIWwJC1PtzKA==, tarball: https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.0.4.tgz} + '@vitejs/plugin-react@5.1.0': + resolution: {integrity: sha512-4LuWrg7EKWgQaMJfnN+wcmbAW+VSsCmqGohftWjuct47bv8uE4n/nPpq4XjJPsxgq00GGG5J8dvBczp8uxScew==, tarball: https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.0.tgz} engines: {node: ^20.19.0 || >=22.12.0} peerDependencies: vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 @@ -3057,6 +2630,9 @@ packages: '@vitest/expect@3.2.4': resolution: {integrity: sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==, tarball: https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz} + '@vitest/expect@4.0.6': + resolution: {integrity: sha512-5j8UUlBVhOjhj4lR2Nt9sEV8b4WtbcYh8vnfhTNA2Kn5+smtevzjNq+xlBuVhnFGXiyPPNzGrOVvmyHWkS5QGg==, tarball: https://registry.npmjs.org/@vitest/expect/-/expect-4.0.6.tgz} + '@vitest/mocker@3.2.4': resolution: {integrity: sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==, tarball: https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz} peerDependencies: @@ -3068,15 +2644,41 @@ packages: vite: optional: true + '@vitest/mocker@4.0.6': + resolution: {integrity: sha512-3COEIew5HqdzBFEYN9+u0dT3i/NCwppLnO1HkjGfAP1Vs3vti1Hxm/MvcbC4DAn3Szo1M7M3otiAaT83jvqIjA==, tarball: https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.6.tgz} + peerDependencies: + msw: ^2.4.9 + vite: ^6.0.0 || ^7.0.0-0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + '@vitest/pretty-format@3.2.4': resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==, tarball: https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz} + '@vitest/pretty-format@4.0.6': + resolution: {integrity: sha512-4vptgNkLIA1W1Nn5X4x8rLJBzPiJwnPc+awKtfBE5hNMVsoAl/JCCPPzNrbf+L4NKgklsis5Yp2gYa+XAS442g==, tarball: https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.6.tgz} + + '@vitest/runner@4.0.6': + resolution: {integrity: sha512-trPk5qpd7Jj+AiLZbV/e+KiiaGXZ8ECsRxtnPnCrJr9OW2mLB72Cb824IXgxVz/mVU3Aj4VebY+tDTPn++j1Og==, tarball: https://registry.npmjs.org/@vitest/runner/-/runner-4.0.6.tgz} + + '@vitest/snapshot@4.0.6': + resolution: {integrity: sha512-PaYLt7n2YzuvxhulDDu6c9EosiRuIE+FI2ECKs6yvHyhoga+2TBWI8dwBjs+IeuQaMtZTfioa9tj3uZb7nev1g==, tarball: https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.6.tgz} + '@vitest/spy@3.2.4': resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==, tarball: https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz} + '@vitest/spy@4.0.6': + resolution: {integrity: sha512-g9jTUYPV1LtRPRCQfhbMintW7BTQz1n6WXYQYRQ25qkyffA4bjVXjkROokZnv7t07OqfaFKw1lPzqKGk1hmNuQ==, tarball: https://registry.npmjs.org/@vitest/spy/-/spy-4.0.6.tgz} + '@vitest/utils@3.2.4': resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==, tarball: https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz} + '@vitest/utils@4.0.6': + resolution: {integrity: sha512-bG43VS3iYKrMIZXBo+y8Pti0O7uNju3KvNn6DrQWhQQKcLavMB+0NZfO1/QBAEbq0MaQ3QjNsnnXlGQvsh0Z6A==, tarball: https://registry.npmjs.org/@vitest/utils/-/utils-4.0.6.tgz} + '@xterm/addon-canvas@0.7.0': resolution: {integrity: sha512-LF5LYcfvefJuJ7QotNRdRSPc9YASAVDeoT5uyXS/nZshZXjYplGXRECBGiznwvhNL2I8bq1Lf5MzRwstsYQ2Iw==, tarball: https://registry.npmjs.org/@xterm/addon-canvas/-/addon-canvas-0.7.0.tgz} peerDependencies: @@ -3144,6 +2746,10 @@ packages: resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==, tarball: https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz} engines: {node: '>= 6.0.0'} + agent-base@7.1.4: + resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==, tarball: https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz} + engines: {node: '>= 14'} + ajv@6.12.6: resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==, tarball: https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz} @@ -3195,10 +2801,6 @@ packages: argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==, tarball: https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz} - aria-hidden@1.2.4: - resolution: {integrity: sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==, tarball: https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz} - engines: {node: '>=10'} - aria-hidden@1.2.6: resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==, tarball: https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz} engines: {node: '>=10'} @@ -3252,8 +2854,8 @@ packages: resolution: {integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==, tarball: https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz} engines: {node: '>= 0.4'} - axios@1.12.0: - resolution: {integrity: sha512-oXTDccv8PcfjZmPGlWsPSwtOJCZ/b6W5jAMCNcfwJbCzDckwG0jrYJFaWH1yvivfCXjVzV/SPDEhMB3Q+DSurg==, tarball: https://registry.npmjs.org/axios/-/axios-1.12.0.tgz} + axios@1.13.1: + resolution: {integrity: sha512-hU4EGxxt+j7TQijx1oYdAjw4xuIp1wRQSsbMFwSthCWeBQur1eF+qJ5iQ5sN3Tw8YRzQNKb8jszgBdMDVqwJcw==, tarball: https://registry.npmjs.org/axios/-/axios-1.13.1.tgz} babel-jest@29.7.0: resolution: {integrity: sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==, tarball: https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz} @@ -3297,6 +2899,10 @@ packages: resolution: {integrity: sha512-uLfgBi+7IBNay8ECBO2mVMGZAc1VgZWEChxm4lv+TobGdG82LnXMjuNGo/BSSZZL4UmkWhxEHP2f5ziLNwGWMA==, tarball: https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.10.tgz} hasBin: true + baseline-browser-mapping@2.8.22: + resolution: {integrity: sha512-/tk9kky/d8T8CTXIQYASLyhAxR5VwL3zct1oAoVTaOUHwrmsGnfbRwNdEq+vOl2BN8i3PcDdP0o4Q+jjKQoFbQ==, tarball: https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.22.tgz} + hasBin: true + bcrypt-pbkdf@1.0.2: resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==, tarball: https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz} @@ -3304,6 +2910,9 @@ packages: resolution: {integrity: sha512-aVNobHnJqLiUelTaHat9DZ1qM2w0C0Eym4LPI/3JxOnSokGVdsl1T1kN7TFvsEAD8G47A6VKQ0TVHqbBnYMJlQ==, tarball: https://registry.npmjs.org/better-opn/-/better-opn-3.0.2.tgz} engines: {node: '>=12.0.0'} + bidi-js@1.0.3: + resolution: {integrity: sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==, tarball: https://registry.npmjs.org/bidi-js/-/bidi-js-1.0.3.tgz} + binary-extensions@2.3.0: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==, tarball: https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz} engines: {node: '>=8'} @@ -3327,6 +2936,11 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true + browserslist@4.27.0: + resolution: {integrity: sha512-AXVQwdhot1eqLihwasPElhX2tAZiBjWdJ9i/Zcj2S6QYIjkx62OKSfnobkriB81C3l4w0rVy3Nt4jaTBltYEpw==, tarball: https://registry.npmjs.org/browserslist/-/browserslist-4.27.0.tgz} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + bser@2.1.1: resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==, tarball: https://registry.npmjs.org/bser/-/bser-2.1.1.tgz} @@ -3379,6 +2993,9 @@ packages: caniuse-lite@1.0.30001746: resolution: {integrity: sha512-eA7Ys/DGw+pnkWWSE/id29f2IcPHVoE8wxtvE5JdvD2V28VTDPy1yEeo11Guz0sJ4ZeGRcm3uaTcAqK1LXaphA==, tarball: https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001746.tgz} + caniuse-lite@1.0.30001752: + resolution: {integrity: sha512-vKUk7beoukxE47P5gcVNKkDRzXdVofotshHwfR9vmpeFKxmI5PBpgOMC18LUJUA/DvJ70Y7RveasIBraqsyO/g==, tarball: https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001752.tgz} + case-anything@2.1.13: resolution: {integrity: sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==, tarball: https://registry.npmjs.org/case-anything/-/case-anything-2.1.13.tgz} engines: {node: '>=12.13'} @@ -3386,13 +3003,13 @@ packages: ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==, tarball: https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz} - chai@5.2.1: - resolution: {integrity: sha512-5nFxhUrX0PqtyogoYOA8IPswy5sZFTOsBFl/9bNsmDLgsxYTzSZQJDPppDnZPTQbzSEm0hqGjWPzRemQCYbD6A==, tarball: https://registry.npmjs.org/chai/-/chai-5.2.1.tgz} + chai@5.3.3: + resolution: {integrity: sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==, tarball: https://registry.npmjs.org/chai/-/chai-5.3.3.tgz} engines: {node: '>=18'} - chalk@3.0.0: - resolution: {integrity: sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==, tarball: https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz} - engines: {node: '>=8'} + chai@6.2.0: + resolution: {integrity: sha512-aUTnJc/JipRzJrNADXVvpVqi6CO0dn3nx4EVPxijri+fj3LUUDyZQOgVeW54Ob3Y1Xh9Iz8f+CgaCl8v0mn9bA==, tarball: https://registry.npmjs.org/chai/-/chai-6.2.0.tgz} + engines: {node: '>=18'} chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==, tarball: https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz} @@ -3499,8 +3116,8 @@ packages: resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==, tarball: https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz} engines: {node: '>=6'} - cmdk@1.0.4: - resolution: {integrity: sha512-AnsjfHyHpQ/EFeAnG216WY7A5LiYCoZzCSygiLvfXC3H3LFGCprErteUcszaVluGOhuOTbJS3jWHrSDYPBBygg==, tarball: https://registry.npmjs.org/cmdk/-/cmdk-1.0.4.tgz} + cmdk@1.1.1: + resolution: {integrity: sha512-Vsv7kFaXm+ptHDMZ7izaRsP70GgrW9NBNGswt9OZaVBLlE0SNpDq8eu/VGXyF9r7M0azK3Wy7OlYXsuyYLFzHg==, tarball: https://registry.npmjs.org/cmdk/-/cmdk-1.1.1.tgz} peerDependencies: react: ^18 || ^19 || ^19.0.0-rc react-dom: ^18 || ^19 || ^19.0.0-rc @@ -3591,14 +3208,18 @@ packages: resolution: {integrity: sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==, tarball: https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz} engines: {node: '>=12.0.0'} - cronstrue@2.50.0: - resolution: {integrity: sha512-ULYhWIonJzlScCCQrPUG5uMXzXxSixty4djud9SS37DoNxDdkeRocxzHuAo4ImRBUK+mAuU5X9TSwEDccnnuPg==, tarball: https://registry.npmjs.org/cronstrue/-/cronstrue-2.50.0.tgz} + cronstrue@2.59.0: + resolution: {integrity: sha512-YKGmAy84hKH+hHIIER07VCAHf9u0Ldelx1uU6EBxsRPDXIA1m5fsKmJfyC3xBhw6cVC/1i83VdbL4PvepTrt8A==, tarball: https://registry.npmjs.org/cronstrue/-/cronstrue-2.59.0.tgz} hasBin: true cross-spawn@7.0.6: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==, tarball: https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz} engines: {node: '>= 8'} + css-tree@3.1.0: + resolution: {integrity: sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==, tarball: https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz} + engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0} + css.escape@1.5.1: resolution: {integrity: sha512-YUifsXXuknHlUsmlgyY0PKzgPOr7/FjCePfHNt0jxm83wHZi44VDMQ7/fGNkjY3/jV1MC+1CmZbaHzugyeRtpg==, tarball: https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz} @@ -3620,6 +3241,10 @@ packages: resolution: {integrity: sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A==, tarball: https://registry.npmjs.org/cssstyle/-/cssstyle-2.3.0.tgz} engines: {node: '>=8'} + cssstyle@5.3.1: + resolution: {integrity: sha512-g5PC9Aiph9eiczFpcgUhd9S4UUO3F+LHGRIi5NUMZ+4xtoIYbHNZwZnWA2JsFGe8OU8nl4WyaEFiZuGuxlutJQ==, tarball: https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.1.tgz} + engines: {node: '>=20'} + csstype@3.1.3: resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==, tarball: https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz} @@ -3671,6 +3296,10 @@ packages: resolution: {integrity: sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==, tarball: https://registry.npmjs.org/data-urls/-/data-urls-3.0.2.tgz} engines: {node: '>=12'} + data-urls@6.0.0: + resolution: {integrity: sha512-BnBS08aLUM+DKamupXs3w2tJJoqU+AkaE/+6vQxi/G/DPmIZFJJp9Dkb1kM03AZx8ADehDUZgsNxju3mPXZYIA==, tarball: https://registry.npmjs.org/data-urls/-/data-urls-6.0.0.tgz} + engines: {node: '>=20'} + date-fns@2.30.0: resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==, tarball: https://registry.npmjs.org/date-fns/-/date-fns-2.30.0.tgz} engines: {node: '>=0.11'} @@ -3686,15 +3315,6 @@ packages: supports-color: optional: true - debug@4.4.1: - resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==, tarball: https://registry.npmjs.org/debug/-/debug-4.4.1.tgz} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - debug@4.4.3: resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==, tarball: https://registry.npmjs.org/debug/-/debug-4.4.3.tgz} engines: {node: '>=6.0'} @@ -3710,6 +3330,9 @@ packages: decimal.js@10.4.3: resolution: {integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==, tarball: https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz} + decimal.js@10.6.0: + resolution: {integrity: sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==, tarball: https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz} + decode-named-character-reference@1.2.0: resolution: {integrity: sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==, tarball: https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz} @@ -3821,6 +3444,9 @@ packages: engines: {node: '>=12'} deprecated: Use your platform's native DOMException instead + dompurify@3.2.6: + resolution: {integrity: sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==, tarball: https://registry.npmjs.org/dompurify/-/dompurify-3.2.6.tgz} + dpdm@3.14.0: resolution: {integrity: sha512-YJzsFSyEtj88q5eTELg3UWU7TVZkG1dpbF4JDQ3t1b07xuzXmdoGeSz9TKOke1mUuOpWlk4q+pBh+aHzD6GBTg==, tarball: https://registry.npmjs.org/dpdm/-/dpdm-3.14.0.tgz} hasBin: true @@ -3841,6 +3467,9 @@ packages: electron-to-chromium@1.5.228: resolution: {integrity: sha512-nxkiyuqAn4MJ1QbobwqJILiDtu/jk14hEAWaMiJmNPh1Z+jqoFlBFZjdXwLWGeVSeu9hGLg6+2G9yJaW8rBIFA==, tarball: https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.228.tgz} + electron-to-chromium@1.5.244: + resolution: {integrity: sha512-OszpBN7xZX4vWMPJwB9illkN/znA8M36GQqQxi6MNy9axWxhOfJyZZJtSLQCpEFLHP2xK33BiWx9aIuIEXVCcw==, tarball: https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.244.tgz} + emittery@0.13.1: resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==, tarball: https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz} engines: {node: '>=12'} @@ -3869,6 +3498,10 @@ packages: resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==, tarball: https://registry.npmjs.org/entities/-/entities-4.5.0.tgz} engines: {node: '>=0.12'} + entities@6.0.1: + resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==, tarball: https://registry.npmjs.org/entities/-/entities-6.0.1.tgz} + engines: {node: '>=0.12'} + error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==, tarball: https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz} @@ -3883,6 +3516,9 @@ packages: es-get-iterator@1.1.3: resolution: {integrity: sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==, tarball: https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz} + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==, tarball: https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz} + es-object-atoms@1.1.1: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==, tarball: https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz} engines: {node: '>= 0.4'} @@ -3901,11 +3537,6 @@ packages: engines: {node: '>=18'} hasBin: true - esbuild@0.25.3: - resolution: {integrity: sha512-qKA6Pvai73+M2FtftpNKRxJ78GIjmFXFxd/1DVBqGo/qNhLSfv+G12n9pNoWdytJC8U00TrViOwpjT0zgqQS8Q==, tarball: https://registry.npmjs.org/esbuild/-/esbuild-0.25.3.tgz} - engines: {node: '>=18'} - hasBin: true - escalade@3.2.0: resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==, tarball: https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz} engines: {node: '>=6'} @@ -3993,6 +3624,10 @@ packages: resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==, tarball: https://registry.npmjs.org/exit/-/exit-0.1.2.tgz} engines: {node: '>= 0.8.0'} + expect-type@1.2.2: + resolution: {integrity: sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==, tarball: https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz} + engines: {node: '>=12.0.0'} + expect@29.7.0: resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==, tarball: https://registry.npmjs.org/expect/-/expect-29.7.0.tgz} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -4007,8 +3642,8 @@ packages: fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==, tarball: https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz} - fast-equals@5.2.2: - resolution: {integrity: sha512-V7/RktU11J3I36Nwq2JnZEM7tNm17eBJz+u25qdxBZeCKiX6BkVSZQjwWIr+IobgnZy+ag73tTZgZi7tr0LrBw==, tarball: https://registry.npmjs.org/fast-equals/-/fast-equals-5.2.2.tgz} + fast-equals@5.3.2: + resolution: {integrity: sha512-6rxyATwPCkaFIL3JLqw8qXqMpIZ942pTX/tbQFkRsDGblS8tNGtlUauA/+mt6RUfqn/4MoEr+WDkYoIQbibWuQ==, tarball: https://registry.npmjs.org/fast-equals/-/fast-equals-5.3.2.tgz} engines: {node: '>=6.0.0'} fast-glob@3.3.3: @@ -4167,6 +3802,10 @@ packages: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==, tarball: https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz} engines: {node: 6.* || 8.* || >= 10.*} + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==, tarball: https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz} + engines: {node: '>= 0.4'} + get-intrinsic@1.3.1: resolution: {integrity: sha512-fk1ZVEeOX9hVZ6QzoBNEC55+Ucqg4sTVwrVuigZhuRPESVFpMyXnd3sbXvPOwp7Y9riVyANiqhEuRF0G1aVSeQ==, tarball: https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.1.tgz} engines: {node: '>= 0.4'} @@ -4278,6 +3917,10 @@ packages: resolution: {integrity: sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==, tarball: https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz} engines: {node: '>=12'} + html-encoding-sniffer@4.0.0: + resolution: {integrity: sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==, tarball: https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz} + engines: {node: '>=18'} + html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==, tarball: https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz} @@ -4292,10 +3935,18 @@ packages: resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==, tarball: https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz} engines: {node: '>= 6'} + http-proxy-agent@7.0.2: + resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==, tarball: https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz} + engines: {node: '>= 14'} + https-proxy-agent@5.0.1: resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==, tarball: https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz} engines: {node: '>= 6'} + https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==, tarball: https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz} + engines: {node: '>= 14'} + human-signals@2.1.0: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==, tarball: https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz} engines: {node: '>=10.17.0'} @@ -4751,6 +4402,15 @@ packages: canvas: optional: true + jsdom@27.0.1: + resolution: {integrity: sha512-SNSQteBL1IlV2zqhwwolaG9CwhIhTvVHWg3kTss/cLE7H/X4644mtPQqYvCfsSrGQWt9hSZcgOXX8bOZaMN+kA==, tarball: https://registry.npmjs.org/jsdom/-/jsdom-27.0.1.tgz} + engines: {node: '>=20'} + peerDependencies: + canvas: ^3.0.0 + peerDependenciesMeta: + canvas: + optional: true + jsesc@3.1.0: resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==, tarball: https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz} engines: {node: '>=6'} @@ -4789,8 +4449,8 @@ packages: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==, tarball: https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz} engines: {node: '>=6'} - knip@5.64.1: - resolution: {integrity: sha512-80XnLsyeXuyxj1F4+NBtQFHxaRH0xWRw8EKwfQ6EkVZZ0bSz/kqqan08k/Qg8ajWsFPhFq+0S2RbLCBGIQtuOg==, tarball: https://registry.npmjs.org/knip/-/knip-5.64.1.tgz} + knip@5.66.4: + resolution: {integrity: sha512-HmTnxdmoHAvwKmFktRGY1++tXRI8J36eVrOpfj/ybTVVT1QBKBlbBEN1s3cJBx9UL+hXTZDNQif+gs7fUKldbw==, tarball: https://registry.npmjs.org/knip/-/knip-5.66.4.tgz} engines: {node: '>=18.18.0'} hasBin: true peerDependencies: @@ -4846,9 +4506,6 @@ packages: resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==, tarball: https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz} engines: {node: '>=10'} - long@5.2.3: - resolution: {integrity: sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==, tarball: https://registry.npmjs.org/long/-/long-5.2.3.tgz} - long@5.3.2: resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==, tarball: https://registry.npmjs.org/long/-/long-5.3.2.tgz} @@ -4859,8 +4516,8 @@ packages: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==, tarball: https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz} hasBin: true - loupe@3.2.0: - resolution: {integrity: sha512-2NCfZcT5VGVNX9mSZIxLRkEAegDGBpuQZBy13desuHeVORmBDyAET4TkJr4SjqQy3A8JDofMN6LpkK8Xcm/dlw==, tarball: https://registry.npmjs.org/loupe/-/loupe-3.2.0.tgz} + loupe@3.2.1: + resolution: {integrity: sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==, tarball: https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz} lowlight@1.20.0: resolution: {integrity: sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==, tarball: https://registry.npmjs.org/lowlight/-/lowlight-1.20.0.tgz} @@ -4868,11 +4525,15 @@ packages: lru-cache@10.4.3: resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==, tarball: https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz} + lru-cache@11.2.2: + resolution: {integrity: sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==, tarball: https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.2.tgz} + engines: {node: 20 || >=22} + lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==, tarball: https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz} - lucide-react@0.545.0: - resolution: {integrity: sha512-7r1/yUuflQDSt4f1bpn5ZAocyIxcTyVyBBChSVtBKn5M+392cPmI5YJMWOJKk/HUWGm5wg83chlAZtCcGbEZtw==, tarball: https://registry.npmjs.org/lucide-react/-/lucide-react-0.545.0.tgz} + lucide-react@0.552.0: + resolution: {integrity: sha512-g9WCjmfwqbexSnZE+2cl21PCfXOcqnGeWeMTNAOGEfpPbm/ZF4YIq77Z8qWrxbu660EKuLB4nSLggoKnCb+isw==, tarball: https://registry.npmjs.org/lucide-react/-/lucide-react-0.552.0.tgz} peerDependencies: react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -4887,6 +4548,9 @@ packages: magic-string@0.30.17: resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==, tarball: https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz} + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==, tarball: https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz} + make-dir@4.0.0: resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==, tarball: https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz} engines: {node: '>=10'} @@ -4900,6 +4564,11 @@ packages: markdown-table@3.0.4: resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==, tarball: https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz} + marked@14.0.0: + resolution: {integrity: sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ==, tarball: https://registry.npmjs.org/marked/-/marked-14.0.0.tgz} + engines: {node: '>= 18'} + hasBin: true + material-colors@1.2.6: resolution: {integrity: sha512-6qE4B9deFBIa9YSpOc9O0Sgc43zTeVYbgDT5veRKSlB2+ZuHNoVVxA1L/ckMUayV9Ay9y7Z/SZCLcGteW9i7bg==, tarball: https://registry.npmjs.org/material-colors/-/material-colors-1.2.6.tgz} @@ -4952,6 +4621,9 @@ packages: mdast-util-to-string@4.0.0: resolution: {integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==, tarball: https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz} + mdn-data@2.12.2: + resolution: {integrity: sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==, tarball: https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz} + media-typer@0.3.0: resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==, tarball: https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz} engines: {node: '>= 0.6'} @@ -5100,8 +4772,8 @@ packages: resolution: {integrity: sha512-qxBgB7Qa2sEQgHFjj0dSigq7fX4k6Saisd5Nelwp2q8mlbAFh5dHV9JTTlF8viYJLSSWgMCZFUom8PJcMNBoJw==, tarball: https://registry.npmjs.org/mock-socket/-/mock-socket-9.3.1.tgz} engines: {node: '>= 8'} - monaco-editor@0.53.0: - resolution: {integrity: sha512-0WNThgC6CMWNXXBxTbaYYcunj08iB5rnx4/G56UOPeL9UVIUGGHA1GR0EWIh9Ebabj7NpCRawQ5b0hfN1jQmYQ==, tarball: https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.53.0.tgz} + monaco-editor@0.55.1: + resolution: {integrity: sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A==, tarball: https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.55.1.tgz} moo-color@1.0.3: resolution: {integrity: sha512-i/+ZKXMDf6aqYtBhuOcej71YSlbjT3wCO/4H1j8rPvxDJEifdwgg5MaFyu6iYAT8GBZJg2z0dkgK4YMzvURALQ==, tarball: https://registry.npmjs.org/moo-color/-/moo-color-1.0.3.tgz} @@ -5137,11 +4809,6 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - napi-postinstall@0.3.3: - resolution: {integrity: sha512-uTp172LLXSxuSYHv/kou+f6KW3SMppU9ivthaVTXian9sOt3XM/zHYHpRZiLgQoxeWfYUnslNWQHF1+G71xcow==, tarball: https://registry.npmjs.org/napi-postinstall/-/napi-postinstall-0.3.3.tgz} - engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - hasBin: true - natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==, tarball: https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz} @@ -5155,6 +4822,9 @@ packages: node-releases@2.0.21: resolution: {integrity: sha512-5b0pgg78U3hwXkCM8Z9b2FJdPZlr9Psr9V2gQPESdGHqbntyFJKFW4r5TeWGFzafGY3hzs1JC62VEQMbl1JFkw==, tarball: https://registry.npmjs.org/node-releases/-/node-releases-2.0.21.tgz} + node-releases@2.0.27: + resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==, tarball: https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz} + normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==, tarball: https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz} engines: {node: '>=0.10.0'} @@ -5224,8 +4894,8 @@ packages: outvariant@1.4.3: resolution: {integrity: sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==, tarball: https://registry.npmjs.org/outvariant/-/outvariant-1.4.3.tgz} - oxc-resolver@11.8.4: - resolution: {integrity: sha512-qpimS3tHHEf+kgESMAme+q+rj7aCzMya00u9YdKOKyX2o7q4lozjPo6d7ZTTi979KHEcVOPWdNTueAKdeNq72w==, tarball: https://registry.npmjs.org/oxc-resolver/-/oxc-resolver-11.8.4.tgz} + oxc-resolver@11.12.0: + resolution: {integrity: sha512-zmS2q2txiB+hS2u0aiIwmvITIJN8c8ThlWoWB762Wx5nUw8WBlttp0rzt8nnuP1cGIq9YJ7sGxfsgokm+SQk5Q==, tarball: https://registry.npmjs.org/oxc-resolver/-/oxc-resolver-11.12.0.tgz} p-limit@2.3.0: resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==, tarball: https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz} @@ -5278,6 +4948,9 @@ packages: parse5@7.1.2: resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==, tarball: https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz} + parse5@8.0.0: + resolution: {integrity: sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==, tarball: https://registry.npmjs.org/parse5/-/parse5-8.0.0.tgz} + parseurl@1.3.3: resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==, tarball: https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz} engines: {node: '>= 0.8'} @@ -5319,8 +4992,11 @@ packages: resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==, tarball: https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz} engines: {node: '>=8'} - pathval@2.0.0: - resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==, tarball: https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz} + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==, tarball: https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz} + + pathval@2.0.1: + resolution: {integrity: sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==, tarball: https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz} engines: {node: '>= 14.16'} picocolors@1.1.1: @@ -5459,8 +5135,8 @@ packages: property-information@7.1.0: resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==, tarball: https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz} - protobufjs@7.4.0: - resolution: {integrity: sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==, tarball: https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz} + protobufjs@7.5.4: + resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==, tarball: https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz} engines: {node: '>=12.0.0'} proxy-addr@2.0.7: @@ -5524,10 +5200,10 @@ packages: resolution: {integrity: sha512-kmob/FOTwep7DUWf9KjuenKX0vyvChr3oTdvvPt09V60Iz75FJp+T/0ZeHMbAfJj2WaVWqAPP5Hmm3PYzSPPKg==, tarball: https://registry.npmjs.org/react-docgen/-/react-docgen-8.0.0.tgz} engines: {node: ^20.9.0 || >=22} - react-dom@19.1.1: - resolution: {integrity: sha512-Dlq/5LAZgF0Gaz6yiqZCf6VCcZs1ghAJyrsu84Q/GT0gV+mCxbfmKNoGRKBYMJ8IEdGPqu49YWXD02GCknEDkw==, tarball: https://registry.npmjs.org/react-dom/-/react-dom-19.1.1.tgz} + react-dom@19.2.0: + resolution: {integrity: sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ==, tarball: https://registry.npmjs.org/react-dom/-/react-dom-19.2.0.tgz} peerDependencies: - react: ^19.1.1 + react: ^19.2.0 react-fast-compare@2.0.4: resolution: {integrity: sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==, tarball: https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz} @@ -5560,8 +5236,8 @@ packages: '@types/react': '>=18' react: '>=18' - react-refresh@0.17.0: - resolution: {integrity: sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==, tarball: https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz} + react-refresh@0.18.0: + resolution: {integrity: sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==, tarball: https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz} engines: {node: '>=0.10.0'} react-remove-scroll-bar@2.3.8: @@ -5574,16 +5250,6 @@ packages: '@types/react': optional: true - react-remove-scroll@2.6.3: - resolution: {integrity: sha512-pnAi91oOk8g8ABQKGF5/M9qxmmOPxaAnopyTHYfqYEwJhyFrbbBtHuSgtKEoH0jpcxx5o3hXqH1mNd9/Oi+8iQ==, tarball: https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.6.3.tgz} - engines: {node: '>=10'} - peerDependencies: - '@types/react': '*' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - react-remove-scroll@2.7.1: resolution: {integrity: sha512-HpMh8+oahmIdOuS5aFKKY6Pyog+FNaZV/XyJOq7b4YFwsFHe5yYfdbIalI4k3vU2nSDql7YskmUseHsRrJqIPA==, tarball: https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.1.tgz} engines: {node: '>=10'} @@ -5600,8 +5266,8 @@ packages: react: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - react-router@7.8.0: - resolution: {integrity: sha512-r15M3+LHKgM4SOapNmsH3smAizWds1vJ0Z9C4mWaKnT9/wD7+d/0jYcj6LmOvonkrO4Rgdyp4KQ/29gWN2i1eg==, tarball: https://registry.npmjs.org/react-router/-/react-router-7.8.0.tgz} + react-router@7.9.5: + resolution: {integrity: sha512-JmxqrnBZ6E9hWmf02jzNn9Jm3UqyeimyiwzD69NjxGySG6lIz/1LVPsoTCwN7NBX2XjCEa1LIX5EMz1j2b6u6A==, tarball: https://registry.npmjs.org/react-router/-/react-router-7.9.5.tgz} engines: {node: '>=20.0.0'} peerDependencies: react: '>=18' @@ -5656,8 +5322,8 @@ packages: react: ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^15.0.0 || ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - react@19.1.1: - resolution: {integrity: sha512-w8nqGImo45dmMIfljjMwOGtbmC/mk4CMYhWIicdSflH91J9TyCyczcPFXJzrZ/ZXcgGRFeP6BU0BEJTw6tZdfQ==, tarball: https://registry.npmjs.org/react/-/react-19.1.1.tgz} + react@19.2.0: + resolution: {integrity: sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ==, tarball: https://registry.npmjs.org/react/-/react-19.2.0.tgz} engines: {node: '>=0.10.0'} reactcss@1.2.3: @@ -5683,15 +5349,15 @@ packages: resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==, tarball: https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz} engines: {node: '>= 14.18.0'} - recast@0.23.9: - resolution: {integrity: sha512-Hx/BGIbwj+Des3+xy5uAtAbdCyqK9y9wbBcDFDYanLS9JnMqf7OeF87HQwUimE87OEc72mr6tkKUKMBBL+hF9Q==, tarball: https://registry.npmjs.org/recast/-/recast-0.23.9.tgz} + recast@0.23.11: + resolution: {integrity: sha512-YTUo+Flmw4ZXiWfQKGcwwc11KnoRAYgzAE2E7mXKCjSviTKShtxBsN6YUUBB2gtaBzKzeKunxhUwNHQuRryhWA==, tarball: https://registry.npmjs.org/recast/-/recast-0.23.11.tgz} engines: {node: '>= 4'} recharts-scale@0.4.5: resolution: {integrity: sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==, tarball: https://registry.npmjs.org/recharts-scale/-/recharts-scale-0.4.5.tgz} - recharts@2.15.0: - resolution: {integrity: sha512-cIvMxDfpAmqAmVgc4yb7pgm/O1tmmkl/CjrvXuW+62/+7jj/iF9Ykm+hb/UJt42TREHMyd3gb+pkgoa2MxgDIw==, tarball: https://registry.npmjs.org/recharts/-/recharts-2.15.0.tgz} + recharts@2.15.4: + resolution: {integrity: sha512-UT/q6fwS3c1dHbXv2uFgYJ9BMFHu3fwnd7AYZaEQhXuYQ4hgsxLvsUXzGdKeZrW5xopzDCvuA2N41WJ88I7zIw==, tarball: https://registry.npmjs.org/recharts/-/recharts-2.15.4.tgz} engines: {node: '>=14'} peerDependencies: react: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -5727,6 +5393,10 @@ packages: resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==, tarball: https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz} engines: {node: '>=0.10.0'} + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==, tarball: https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz} + engines: {node: '>=0.10.0'} + requires-port@1.0.0: resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==, tarball: https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz} @@ -5785,11 +5455,14 @@ packages: engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true + rrweb-cssom@0.8.0: + resolution: {integrity: sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==, tarball: https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz} + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==, tarball: https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz} - rxjs@7.8.1: - resolution: {integrity: sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==, tarball: https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz} + rxjs@7.8.2: + resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==, tarball: https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz} safe-buffer@5.1.2: resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==, tarball: https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz} @@ -5804,11 +5477,11 @@ packages: resolution: {integrity: sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==, tarball: https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz} engines: {node: '>=v12.22.7'} - scheduler@0.26.0: - resolution: {integrity: sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==, tarball: https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz} + scheduler@0.27.0: + resolution: {integrity: sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==, tarball: https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz} - semver@7.7.2: - resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==, tarball: https://registry.npmjs.org/semver/-/semver-7.7.2.tgz} + semver@7.7.3: + resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==, tarball: https://registry.npmjs.org/semver/-/semver-7.7.3.tgz} engines: {node: '>=10'} hasBin: true @@ -5820,8 +5493,8 @@ packages: resolution: {integrity: sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==, tarball: https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz} engines: {node: '>= 0.8.0'} - set-cookie-parser@2.7.1: - resolution: {integrity: sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==, tarball: https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz} + set-cookie-parser@2.7.2: + resolution: {integrity: sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==, tarball: https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz} set-function-length@1.2.2: resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==, tarball: https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz} @@ -5864,6 +5537,9 @@ packages: resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==, tarball: https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz} engines: {node: '>= 0.4'} + siginfo@2.0.0: + resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==, tarball: https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz} + signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==, tarball: https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz} @@ -5918,6 +5594,9 @@ packages: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==, tarball: https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz} engines: {node: '>=10'} + stackback@0.0.2: + resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==, tarball: https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz} + state-local@1.0.7: resolution: {integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==, tarball: https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz} @@ -5929,6 +5608,9 @@ packages: resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==, tarball: https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz} engines: {node: '>= 0.8'} + std-env@3.10.0: + resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==, tarball: https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz} + stop-iteration-iterator@1.0.0: resolution: {integrity: sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==, tarball: https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz} engines: {node: '>= 0.4'} @@ -5946,8 +5628,8 @@ packages: react-dom: optional: true - storybook@9.1.2: - resolution: {integrity: sha512-TYcq7WmgfVCAQge/KueGkVlM/+g33sQcmbATlC3X6y/g2FEeSSLGrb6E6d3iemht8oio+aY6ld3YOdAnMwx45Q==, tarball: https://registry.npmjs.org/storybook/-/storybook-9.1.2.tgz} + storybook@9.1.16: + resolution: {integrity: sha512-339U14K6l46EFyRvaPS2ZlL7v7Pb+LlcXT8KAETrGPxq8v1sAjj2HAOB6zrlAK3M+0+ricssfAwsLCwt7Eg8TQ==, tarball: https://registry.npmjs.org/storybook/-/storybook-9.1.16.tgz} hasBin: true peerDependencies: prettier: ^2 || ^3 @@ -6084,9 +5766,15 @@ packages: tiny-warning@1.0.3: resolution: {integrity: sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==, tarball: https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz} + tinybench@2.9.0: + resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==, tarball: https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz} + tinycolor2@1.6.0: resolution: {integrity: sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==, tarball: https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.6.0.tgz} + tinyexec@0.3.2: + resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==, tarball: https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz} + tinyglobby@0.2.15: resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==, tarball: https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz} engines: {node: '>=12.0.0'} @@ -6095,10 +5783,21 @@ packages: resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==, tarball: https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz} engines: {node: '>=14.0.0'} - tinyspy@4.0.3: - resolution: {integrity: sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==, tarball: https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz} + tinyrainbow@3.0.3: + resolution: {integrity: sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==, tarball: https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.0.3.tgz} + engines: {node: '>=14.0.0'} + + tinyspy@4.0.4: + resolution: {integrity: sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==, tarball: https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.4.tgz} engines: {node: '>=14.0.0'} + tldts-core@7.0.17: + resolution: {integrity: sha512-DieYoGrP78PWKsrXr8MZwtQ7GLCUeLxihtjC1jZsW1DnvSMdKPitJSe8OSYDM2u5H6g3kWJZpePqkp43TfLh0g==, tarball: https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.17.tgz} + + tldts@7.0.17: + resolution: {integrity: sha512-Y1KQBgDd/NUc+LfOtKS6mNsC9CCaH+m2P1RoIZy7RAPo3C3/t8X45+zgut31cRZtZ3xKPjfn3TkGTrctC2TQIQ==, tarball: https://registry.npmjs.org/tldts/-/tldts-7.0.17.tgz} + hasBin: true + tmpl@1.0.5: resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==, tarball: https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz} @@ -6117,10 +5816,18 @@ packages: resolution: {integrity: sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==, tarball: https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz} engines: {node: '>=6'} + tough-cookie@6.0.0: + resolution: {integrity: sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==, tarball: https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz} + engines: {node: '>=16'} + tr46@3.0.0: resolution: {integrity: sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==, tarball: https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz} engines: {node: '>=12'} + tr46@6.0.0: + resolution: {integrity: sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==, tarball: https://registry.npmjs.org/tr46/-/tr46-6.0.0.tgz} + engines: {node: '>=20'} + trim-lines@3.0.1: resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==, tarball: https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz} @@ -6223,8 +5930,8 @@ packages: undici-types@6.21.0: resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==, tarball: https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz} - undici@6.21.3: - resolution: {integrity: sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==, tarball: https://registry.npmjs.org/undici/-/undici-6.21.3.tgz} + undici@6.22.0: + resolution: {integrity: sha512-hU/10obOIu62MGYjdskASR3CUAiYaFTtC9Pa6vHyf//mAipSvSQg6od2CnJswq7fvzNS3zJhxoRkgNVaHurWKw==, tarball: https://registry.npmjs.org/undici/-/undici-6.22.0.tgz} engines: {node: '>=18.17'} unicorn-magic@0.1.0: @@ -6278,6 +5985,12 @@ packages: peerDependencies: browserslist: '>= 4.21.0' + update-browserslist-db@1.1.4: + resolution: {integrity: sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A==, tarball: https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.4.tgz} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==, tarball: https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz} @@ -6331,8 +6044,8 @@ packages: '@types/react': optional: true - use-sync-external-store@1.4.0: - resolution: {integrity: sha512-9WXSPC5fMv61vaupRkCKCxsPxBocVnwakBEkMIHHpkTTg6icbJtg6jzgtLDm4bl3cSHAca52rYWih0k4K3PfHw==, tarball: https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.4.0.tgz} + use-sync-external-store@1.6.0: + resolution: {integrity: sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==, tarball: https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -6404,8 +6117,8 @@ packages: vue-tsc: optional: true - vite@7.1.11: - resolution: {integrity: sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg==, tarball: https://registry.npmjs.org/vite/-/vite-7.1.11.tgz} + vite@7.1.12: + resolution: {integrity: sha512-ZWyE8YXEXqJrrSLvYgrRP7p62OziLW7xI5HYGWFzOvupfAlrLvURSzv/FyGyy0eidogEM3ujU+kUG1zuHgb6Ug==, tarball: https://registry.npmjs.org/vite/-/vite-7.1.12.tgz} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: @@ -6444,6 +6157,40 @@ packages: yaml: optional: true + vitest@4.0.6: + resolution: {integrity: sha512-gR7INfiVRwnEOkCk47faros/9McCZMp5LM+OMNWGLaDBSvJxIzwjgNFufkuePBNaesGRnLmNfW+ddbUJRZn0nQ==, tarball: https://registry.npmjs.org/vitest/-/vitest-4.0.6.tgz} + engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/debug': ^4.1.12 + '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 + '@vitest/browser-playwright': 4.0.6 + '@vitest/browser-preview': 4.0.6 + '@vitest/browser-webdriverio': 4.0.6 + '@vitest/ui': 4.0.6 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/debug': + optional: true + '@types/node': + optional: true + '@vitest/browser-playwright': + optional: true + '@vitest/browser-preview': + optional: true + '@vitest/browser-webdriverio': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + vscode-uri@3.1.0: resolution: {integrity: sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==, tarball: https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz} @@ -6451,6 +6198,10 @@ packages: resolution: {integrity: sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==, tarball: https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz} engines: {node: '>=14'} + w3c-xmlserializer@5.0.0: + resolution: {integrity: sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==, tarball: https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz} + engines: {node: '>=18'} + walk-up-path@4.0.0: resolution: {integrity: sha512-3hu+tD8YzSLGuFYtPRb48vdhKMi0KQV5sn+uWr8+7dMEq/2G/dtLrdDinkLjqq5TIbIBjYJ4Ax/n3YiaW7QM8A==, tarball: https://registry.npmjs.org/walk-up-path/-/walk-up-path-4.0.0.tgz} engines: {node: 20 || >=22} @@ -6465,6 +6216,10 @@ packages: resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==, tarball: https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz} engines: {node: '>=12'} + webidl-conversions@8.0.0: + resolution: {integrity: sha512-n4W4YFyz5JzOfQeA8oN7dUYpR+MBP3PIUsn2jLjWXwK5ASUzt0Jc/A5sAUZoCYFJRGF0FBKJ+1JjN43rNdsQzA==, tarball: https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-8.0.0.tgz} + engines: {node: '>=20'} + webpack-sources@3.2.3: resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==, tarball: https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz} engines: {node: '>=10.13.0'} @@ -6479,14 +6234,26 @@ packages: resolution: {integrity: sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==, tarball: https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz} engines: {node: '>=12'} + whatwg-encoding@3.1.1: + resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==, tarball: https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz} + engines: {node: '>=18'} + whatwg-mimetype@3.0.0: resolution: {integrity: sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==, tarball: https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz} engines: {node: '>=12'} + whatwg-mimetype@4.0.0: + resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==, tarball: https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz} + engines: {node: '>=18'} + whatwg-url@11.0.0: resolution: {integrity: sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==, tarball: https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz} engines: {node: '>=12'} + whatwg-url@15.1.0: + resolution: {integrity: sha512-2ytDk0kiEj/yu90JOAp44PVPUkO9+jVhyf+SybKlRHSDlvOOZhdPIrr7xTH64l4WixO2cP+wQIcgujkGBPPz6g==, tarball: https://registry.npmjs.org/whatwg-url/-/whatwg-url-15.1.0.tgz} + engines: {node: '>=20'} + which-boxed-primitive@1.0.2: resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==, tarball: https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz} @@ -6502,6 +6269,11 @@ packages: engines: {node: '>= 8'} hasBin: true + why-is-node-running@2.3.0: + resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==, tarball: https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz} + engines: {node: '>=8'} + hasBin: true + wrap-ansi@6.2.0: resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==, tarball: https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz} engines: {node: '>=8'} @@ -6521,20 +6293,8 @@ packages: resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==, tarball: https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - ws@8.17.1: - resolution: {integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==, tarball: https://registry.npmjs.org/ws/-/ws-8.17.1.tgz} - engines: {node: '>=10.0.0'} - peerDependencies: - bufferutil: ^4.0.1 - utf-8-validate: '>=5.0.2' - peerDependenciesMeta: - bufferutil: - optional: true - utf-8-validate: - optional: true - - ws@8.18.0: - resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==, tarball: https://registry.npmjs.org/ws/-/ws-8.18.0.tgz} + ws@8.18.3: + resolution: {integrity: sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==, tarball: https://registry.npmjs.org/ws/-/ws-8.18.3.tgz} engines: {node: '>=10.0.0'} peerDependencies: bufferutil: ^4.0.1 @@ -6549,6 +6309,10 @@ packages: resolution: {integrity: sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==, tarball: https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz} engines: {node: '>=12'} + xml-name-validator@5.0.0: + resolution: {integrity: sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==, tarball: https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz} + engines: {node: '>=18'} + xmlchars@2.2.0: resolution: {integrity: sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==, tarball: https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz} @@ -6599,8 +6363,8 @@ packages: yup@1.6.1: resolution: {integrity: sha512-JED8pB50qbA4FOkDol0bYF/p60qSEDQqBD0/qeIrUCG1KbPBIQ776fCUNb9ldbPcSTxA69g/47XTo4TqWiuXOA==, tarball: https://registry.npmjs.org/yup/-/yup-1.6.1.tgz} - zod@4.1.11: - resolution: {integrity: sha512-WPsqwxITS2tzx1bzhIKsEs19ABD5vmCVa4xBo2tq/SrV4RNZtfws1EnCWQXM6yh8bD08a1idvkB5MZSBiZsjwg==, tarball: https://registry.npmjs.org/zod/-/zod-4.1.11.tgz} + zod@4.1.12: + resolution: {integrity: sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==, tarball: https://registry.npmjs.org/zod/-/zod-4.1.12.tgz} zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==, tarball: https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz} @@ -6614,65 +6378,83 @@ snapshots: '@alloc/quick-lru@5.2.0': {} + '@asamuzakjp/css-color@4.0.5': + dependencies: + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + lru-cache: 11.2.2 + + '@asamuzakjp/dom-selector@6.7.3': + dependencies: + '@asamuzakjp/nwsapi': 2.3.9 + bidi-js: 1.0.3 + css-tree: 3.1.0 + is-potential-custom-element-name: 1.0.1 + lru-cache: 11.2.2 + + '@asamuzakjp/nwsapi@2.3.9': {} + '@babel/code-frame@7.27.1': dependencies: '@babel/helper-validator-identifier': 7.27.1 js-tokens: 4.0.0 picocolors: 1.1.1 - '@babel/compat-data@7.28.4': {} + '@babel/compat-data@7.28.5': {} - '@babel/core@7.28.4': + '@babel/core@7.28.5': dependencies: '@babel/code-frame': 7.27.1 - '@babel/generator': 7.28.3 + '@babel/generator': 7.28.5 '@babel/helper-compilation-targets': 7.27.2 - '@babel/helper-module-transforms': 7.28.3(@babel/core@7.28.4) + '@babel/helper-module-transforms': 7.28.3(@babel/core@7.28.5) '@babel/helpers': 7.26.10 - '@babel/parser': 7.28.4 + '@babel/parser': 7.28.5 '@babel/template': 7.27.2 - '@babel/traverse': 7.28.4 - '@babel/types': 7.28.4 + '@babel/traverse': 7.28.5 + '@babel/types': 7.28.5 '@jridgewell/remapping': 2.3.5 convert-source-map: 2.0.0 debug: 4.4.3 gensync: 1.0.0-beta.2 json5: 2.2.3 - semver: 7.7.2 + semver: 7.7.3 transitivePeerDependencies: - supports-color - '@babel/generator@7.28.3': + '@babel/generator@7.28.5': dependencies: - '@babel/parser': 7.28.4 - '@babel/types': 7.28.4 + '@babel/parser': 7.28.5 + '@babel/types': 7.28.5 '@jridgewell/gen-mapping': 0.3.13 '@jridgewell/trace-mapping': 0.3.31 jsesc: 3.1.0 '@babel/helper-compilation-targets@7.27.2': dependencies: - '@babel/compat-data': 7.28.4 + '@babel/compat-data': 7.28.5 '@babel/helper-validator-option': 7.27.1 - browserslist: 4.26.3 + browserslist: 4.27.0 lru-cache: 5.1.1 - semver: 7.7.2 + semver: 7.7.3 '@babel/helper-globals@7.28.0': {} '@babel/helper-module-imports@7.27.1': dependencies: - '@babel/traverse': 7.28.4 - '@babel/types': 7.28.4 + '@babel/traverse': 7.28.5 + '@babel/types': 7.28.5 transitivePeerDependencies: - supports-color - '@babel/helper-module-transforms@7.28.3(@babel/core@7.28.4)': + '@babel/helper-module-transforms@7.28.3(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-module-imports': 7.27.1 - '@babel/helper-validator-identifier': 7.27.1 - '@babel/traverse': 7.28.4 + '@babel/helper-validator-identifier': 7.28.5 + '@babel/traverse': 7.28.5 transitivePeerDependencies: - supports-color @@ -6682,110 +6464,112 @@ snapshots: '@babel/helper-validator-identifier@7.27.1': {} + '@babel/helper-validator-identifier@7.28.5': {} + '@babel/helper-validator-option@7.27.1': {} '@babel/helpers@7.26.10': dependencies: '@babel/template': 7.27.2 - '@babel/types': 7.28.4 + '@babel/types': 7.28.5 - '@babel/parser@7.28.4': + '@babel/parser@7.28.5': dependencies: - '@babel/types': 7.28.4 + '@babel/types': 7.28.5 - '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.28.4)': + '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.28.4)': + '@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.28.4)': + '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.28.4)': + '@babel/plugin-syntax-class-static-block@7.14.5(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-import-attributes@7.24.7(@babel/core@7.28.4)': + '@babel/plugin-syntax-import-attributes@7.24.7(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.28.4)': + '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.28.4)': + '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-jsx@7.24.7(@babel/core@7.28.4)': + '@babel/plugin-syntax-jsx@7.24.7(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.28.4)': + '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.28.4)': + '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.28.4)': + '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.28.4)': + '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.28.4)': + '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.28.4)': + '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.28.4)': + '@babel/plugin-syntax-private-property-in-object@7.14.5(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.28.4)': + '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-typescript@7.24.7(@babel/core@7.28.4)': + '@babel/plugin-syntax-typescript@7.24.7(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-transform-react-jsx-self@7.27.1(@babel/core@7.28.4)': + '@babel/plugin-transform-react-jsx-self@7.27.1(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-transform-react-jsx-source@7.27.1(@babel/core@7.28.4)': + '@babel/plugin-transform-react-jsx-source@7.27.1(@babel/core@7.28.5)': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/helper-plugin-utils': 7.27.1 '@babel/runtime@7.26.10': @@ -6795,14 +6579,14 @@ snapshots: '@babel/template@7.27.2': dependencies: '@babel/code-frame': 7.27.1 - '@babel/parser': 7.28.4 - '@babel/types': 7.28.4 + '@babel/parser': 7.28.5 + '@babel/types': 7.28.5 '@babel/traverse@7.27.1': dependencies: '@babel/code-frame': 7.27.1 - '@babel/generator': 7.28.3 - '@babel/parser': 7.28.4 + '@babel/generator': 7.28.5 + '@babel/parser': 7.28.5 '@babel/template': 7.27.2 '@babel/types': 7.27.1 debug: 4.4.3 @@ -6810,14 +6594,14 @@ snapshots: transitivePeerDependencies: - supports-color - '@babel/traverse@7.28.4': + '@babel/traverse@7.28.5': dependencies: '@babel/code-frame': 7.27.1 - '@babel/generator': 7.28.3 + '@babel/generator': 7.28.5 '@babel/helper-globals': 7.28.0 - '@babel/parser': 7.28.4 + '@babel/parser': 7.28.5 '@babel/template': 7.27.2 - '@babel/types': 7.28.4 + '@babel/types': 7.28.5 debug: 4.4.3 transitivePeerDependencies: - supports-color @@ -6827,10 +6611,10 @@ snapshots: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.27.1 - '@babel/types@7.28.4': + '@babel/types@7.28.5': dependencies: '@babel/helper-string-parser': 7.27.1 - '@babel/helper-validator-identifier': 7.27.1 + '@babel/helper-validator-identifier': 7.28.5 '@bcoe/v8-coverage@0.2.3': {} @@ -6882,13 +6666,13 @@ snapshots: '@types/tough-cookie': 4.0.5 tough-cookie: 4.1.4 - '@chromatic-com/storybook@4.1.0(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': + '@chromatic-com/storybook@4.1.0(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: '@neoconfetti/react': 1.0.0 chromatic: 12.2.0 filesize: 10.1.2 jsonfile: 6.1.0 - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) strip-ansi: 7.1.0 transitivePeerDependencies: - '@chromatic-com/cypress' @@ -6899,13 +6683,35 @@ snapshots: '@jridgewell/trace-mapping': 0.3.9 optional: true - '@emnapi/core@1.5.0': + '@csstools/color-helpers@5.1.0': {} + + '@csstools/css-calc@2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-color-parser@3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/color-helpers': 5.1.0 + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-syntax-patches-for-csstree@1.0.15': {} + + '@csstools/css-tokenizer@3.0.4': {} + + '@emnapi/core@1.6.0': dependencies: '@emnapi/wasi-threads': 1.1.0 tslib: 2.8.1 optional: true - '@emnapi/runtime@1.5.0': + '@emnapi/runtime@1.6.0': dependencies: tslib: 2.8.1 optional: true @@ -6917,10 +6723,10 @@ snapshots: '@emoji-mart/data@1.2.1': {} - '@emoji-mart/react@1.1.1(emoji-mart@5.6.0)(react@19.1.1)': + '@emoji-mart/react@1.1.1(emoji-mart@5.6.0)(react@19.2.0)': dependencies: emoji-mart: 5.6.0 - react: 19.1.1 + react: 19.2.0 '@emotion/babel-plugin@11.13.5': dependencies: @@ -6964,19 +6770,19 @@ snapshots: '@emotion/memoize@0.9.0': {} - '@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1)': + '@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 '@emotion/babel-plugin': 11.13.5 '@emotion/cache': 11.14.0 '@emotion/serialize': 1.3.3 - '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.1.1) + '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.2.0) '@emotion/utils': 1.4.2 '@emotion/weak-memoize': 0.4.0 hoist-non-react-statics: 3.3.2 - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 transitivePeerDependencies: - supports-color @@ -6990,26 +6796,26 @@ snapshots: '@emotion/sheet@1.4.0': {} - '@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1)': + '@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 '@emotion/babel-plugin': 11.13.5 '@emotion/is-prop-valid': 1.4.0 - '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) + '@emotion/react': 11.14.0(@types/react@19.2.2)(react@19.2.0) '@emotion/serialize': 1.3.3 - '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.1.1) + '@emotion/use-insertion-effect-with-fallbacks': 1.2.0(react@19.2.0) '@emotion/utils': 1.4.2 - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 transitivePeerDependencies: - supports-color '@emotion/unitless@0.10.0': {} - '@emotion/use-insertion-effect-with-fallbacks@1.2.0(react@19.1.1)': + '@emotion/use-insertion-effect-with-fallbacks@1.2.0(react@19.2.0)': dependencies: - react: 19.1.1 + react: 19.2.0 '@emotion/utils@1.4.2': {} @@ -7018,163 +6824,88 @@ snapshots: '@esbuild/aix-ppc64@0.25.11': optional: true - '@esbuild/aix-ppc64@0.25.3': - optional: true - '@esbuild/android-arm64@0.25.11': optional: true - '@esbuild/android-arm64@0.25.3': - optional: true - '@esbuild/android-arm@0.25.11': optional: true - '@esbuild/android-arm@0.25.3': - optional: true - '@esbuild/android-x64@0.25.11': optional: true - '@esbuild/android-x64@0.25.3': - optional: true - '@esbuild/darwin-arm64@0.25.11': optional: true - '@esbuild/darwin-arm64@0.25.3': - optional: true - '@esbuild/darwin-x64@0.25.11': optional: true - '@esbuild/darwin-x64@0.25.3': - optional: true - '@esbuild/freebsd-arm64@0.25.11': optional: true - '@esbuild/freebsd-arm64@0.25.3': - optional: true - '@esbuild/freebsd-x64@0.25.11': optional: true - '@esbuild/freebsd-x64@0.25.3': - optional: true - '@esbuild/linux-arm64@0.25.11': optional: true - '@esbuild/linux-arm64@0.25.3': - optional: true - '@esbuild/linux-arm@0.25.11': optional: true - '@esbuild/linux-arm@0.25.3': - optional: true - '@esbuild/linux-ia32@0.25.11': optional: true - '@esbuild/linux-ia32@0.25.3': - optional: true - '@esbuild/linux-loong64@0.25.11': optional: true - '@esbuild/linux-loong64@0.25.3': - optional: true - '@esbuild/linux-mips64el@0.25.11': optional: true - '@esbuild/linux-mips64el@0.25.3': - optional: true - '@esbuild/linux-ppc64@0.25.11': optional: true - '@esbuild/linux-ppc64@0.25.3': - optional: true - '@esbuild/linux-riscv64@0.25.11': optional: true - '@esbuild/linux-riscv64@0.25.3': - optional: true - '@esbuild/linux-s390x@0.25.11': optional: true - '@esbuild/linux-s390x@0.25.3': - optional: true - '@esbuild/linux-x64@0.25.11': optional: true - '@esbuild/linux-x64@0.25.3': - optional: true - '@esbuild/netbsd-arm64@0.25.11': optional: true - '@esbuild/netbsd-arm64@0.25.3': - optional: true - '@esbuild/netbsd-x64@0.25.11': optional: true - '@esbuild/netbsd-x64@0.25.3': - optional: true - '@esbuild/openbsd-arm64@0.25.11': optional: true - '@esbuild/openbsd-arm64@0.25.3': - optional: true - '@esbuild/openbsd-x64@0.25.11': optional: true - '@esbuild/openbsd-x64@0.25.3': - optional: true - '@esbuild/openharmony-arm64@0.25.11': optional: true - '@esbuild/sunos-x64@0.25.11': - optional: true - - '@esbuild/sunos-x64@0.25.3': + '@esbuild/sunos-x64@0.25.11': optional: true '@esbuild/win32-arm64@0.25.11': optional: true - '@esbuild/win32-arm64@0.25.3': - optional: true - '@esbuild/win32-ia32@0.25.11': optional: true - '@esbuild/win32-ia32@0.25.3': - optional: true - '@esbuild/win32-x64@0.25.11': optional: true - '@esbuild/win32-x64@0.25.3': - optional: true - '@eslint-community/eslint-utils@4.9.0(eslint@8.52.0)': dependencies: eslint: 8.52.0 eslint-visitor-keys: 3.4.3 optional: true - '@eslint-community/regexpp@4.12.1': + '@eslint-community/regexpp@4.12.2': optional: true '@eslint/eslintrc@2.1.4': @@ -7195,49 +6926,32 @@ snapshots: '@eslint/js@8.52.0': optional: true - '@floating-ui/core@1.6.9': - dependencies: - '@floating-ui/utils': 0.2.9 - '@floating-ui/core@1.7.3': dependencies: '@floating-ui/utils': 0.2.10 - '@floating-ui/dom@1.6.13': - dependencies: - '@floating-ui/core': 1.6.9 - '@floating-ui/utils': 0.2.9 - '@floating-ui/dom@1.7.4': dependencies: '@floating-ui/core': 1.7.3 '@floating-ui/utils': 0.2.10 - '@floating-ui/react-dom@2.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@floating-ui/dom': 1.6.13 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - - '@floating-ui/react-dom@2.1.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@floating-ui/react-dom@2.1.6(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@floating-ui/dom': 1.7.4 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) '@floating-ui/utils@0.2.10': {} - '@floating-ui/utils@0.2.9': {} - - '@fontsource-variable/inter@5.1.1': {} + '@fontsource-variable/inter@5.2.8': {} '@fontsource/fira-code@5.2.7': {} '@fontsource/ibm-plex-mono@5.2.7': {} - '@fontsource/jetbrains-mono@5.2.5': {} + '@fontsource/jetbrains-mono@5.2.8': {} - '@fontsource/source-code-pro@5.2.5': {} + '@fontsource/source-code-pro@5.2.7': {} '@humanwhocodes/config-array@0.11.14': dependencies: @@ -7254,9 +6968,9 @@ snapshots: '@humanwhocodes/object-schema@2.0.3': optional: true - '@icons/material@0.2.4(react@19.1.1)': + '@icons/material@0.2.4(react@19.2.0)': dependencies: - react: 19.1.1 + react: 19.2.0 '@inquirer/confirm@3.2.0': dependencies: @@ -7268,7 +6982,7 @@ snapshots: '@inquirer/figures': 1.0.13 '@inquirer/type': 2.0.0 '@types/mute-stream': 0.0.4 - '@types/node': 22.18.8 + '@types/node': 22.18.13 '@types/wrap-ansi': 3.0.0 ansi-escapes: 4.3.2 cli-width: 4.1.0 @@ -7464,7 +7178,7 @@ snapshots: '@jest/transform@29.7.0': dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@jest/types': 29.6.3 '@jridgewell/trace-mapping': 0.3.25 babel-plugin-istanbul: 6.1.1 @@ -7500,12 +7214,12 @@ snapshots: '@types/yargs': 17.0.33 chalk: 4.1.2 - '@joshwooding/vite-plugin-react-docgen-typescript@0.6.1(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': + '@joshwooding/vite-plugin-react-docgen-typescript@0.6.1(typescript@5.6.3)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: glob: 10.4.5 magic-string: 0.30.17 react-docgen-typescript: 2.2.2(typescript@5.6.3) - vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + vite: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) optionalDependencies: typescript: 5.6.3 @@ -7543,11 +7257,11 @@ snapshots: '@leeoniya/ufuzzy@1.0.10': {} - '@mdx-js/react@3.0.1(@types/react@19.1.17)(react@19.1.1)': + '@mdx-js/react@3.0.1(@types/react@19.2.2)(react@19.2.0)': dependencies: '@types/mdx': 2.0.9 - '@types/react': 19.1.17 - react: 19.1.1 + '@types/react': 19.2.2 + react: 19.2.0 '@mjackson/form-data-parser@0.4.0': dependencies: @@ -7563,12 +7277,12 @@ snapshots: dependencies: state-local: 1.0.7 - '@monaco-editor/react@4.7.0(monaco-editor@0.53.0)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@monaco-editor/react@4.7.0(monaco-editor@0.55.1)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@monaco-editor/loader': 1.5.0 - monaco-editor: 0.53.0 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + monaco-editor: 0.55.1 + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) '@mswjs/interceptors@0.35.9': dependencies: @@ -7581,111 +7295,111 @@ snapshots: '@mui/core-downloads-tracker@5.18.0': {} - '@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 '@mui/core-downloads-tracker': 5.18.0 - '@mui/system': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) - '@mui/types': 7.2.24(@types/react@19.1.17) - '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) + '@mui/system': 5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0) + '@mui/types': 7.2.24(@types/react@19.2.2) + '@mui/utils': 5.17.1(@types/react@19.2.2)(react@19.2.0) '@popperjs/core': 2.11.8 - '@types/react-transition-group': 4.4.12(@types/react@19.1.17) + '@types/react-transition-group': 4.4.12(@types/react@19.2.2) clsx: 2.1.1 csstype: 3.1.3 prop-types: 15.8.1 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) react-is: 19.1.1 - react-transition-group: 4.4.5(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + react-transition-group: 4.4.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0) optionalDependencies: - '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) - '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) - '@types/react': 19.1.17 + '@emotion/react': 11.14.0(@types/react@19.2.2)(react@19.2.0) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0) + '@types/react': 19.2.2 - '@mui/private-theming@5.17.1(@types/react@19.1.17)(react@19.1.1)': + '@mui/private-theming@5.17.1(@types/react@19.2.2)(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 - '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) + '@mui/utils': 5.17.1(@types/react@19.2.2)(react@19.2.0) prop-types: 15.8.1 - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@mui/styled-engine@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(react@19.1.1)': + '@mui/styled-engine@5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 '@emotion/cache': 11.14.0 '@emotion/serialize': 1.3.3 csstype: 3.1.3 prop-types: 15.8.1 - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) - '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) + '@emotion/react': 11.14.0(@types/react@19.2.2)(react@19.2.0) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0) - '@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1)': + '@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 - '@mui/private-theming': 5.17.1(@types/react@19.1.17)(react@19.1.1) - '@mui/styled-engine': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(react@19.1.1) - '@mui/types': 7.2.24(@types/react@19.1.17) - '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) + '@mui/private-theming': 5.17.1(@types/react@19.2.2)(react@19.2.0) + '@mui/styled-engine': 5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(react@19.2.0) + '@mui/types': 7.2.24(@types/react@19.2.2) + '@mui/utils': 5.17.1(@types/react@19.2.2)(react@19.2.0) clsx: 2.1.1 csstype: 3.1.3 prop-types: 15.8.1 - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) - '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) - '@types/react': 19.1.17 + '@emotion/react': 11.14.0(@types/react@19.2.2)(react@19.2.0) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0) + '@types/react': 19.2.2 - '@mui/types@7.2.24(@types/react@19.1.17)': + '@mui/types@7.2.24(@types/react@19.2.2)': optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@mui/utils@5.17.1(@types/react@19.1.17)(react@19.1.1)': + '@mui/utils@5.17.1(@types/react@19.2.2)(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 - '@mui/types': 7.2.24(@types/react@19.1.17) + '@mui/types': 7.2.24(@types/react@19.2.2) '@types/prop-types': 15.7.15 clsx: 2.1.1 prop-types: 15.8.1 - react: 19.1.1 + react: 19.2.0 react-is: 19.1.1 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@mui/x-internals@7.29.0(@types/react@19.1.17)(react@19.1.1)': + '@mui/x-internals@7.29.0(@types/react@19.2.2)(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 - '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@mui/utils': 5.17.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 transitivePeerDependencies: - '@types/react' - '@mui/x-tree-view@7.29.10(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@mui/x-tree-view@7.29.10(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@mui/material@5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(@mui/system@5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 - '@mui/material': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@mui/system': 5.18.0(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) - '@mui/utils': 5.17.1(@types/react@19.1.17)(react@19.1.1) - '@mui/x-internals': 7.29.0(@types/react@19.1.17)(react@19.1.1) - '@types/react-transition-group': 4.4.12(@types/react@19.1.17) + '@mui/material': 5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@mui/system': 5.18.0(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@emotion/styled@11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0) + '@mui/utils': 5.17.1(@types/react@19.2.2)(react@19.2.0) + '@mui/x-internals': 7.29.0(@types/react@19.2.2)(react@19.2.0) + '@types/react-transition-group': 4.4.12(@types/react@19.2.2) clsx: 2.1.1 prop-types: 15.8.1 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - react-transition-group: 4.4.5(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + react-transition-group: 4.4.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0) optionalDependencies: - '@emotion/react': 11.14.0(@types/react@19.1.17)(react@19.1.1) - '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.1.17)(react@19.1.1))(@types/react@19.1.17)(react@19.1.1) + '@emotion/react': 11.14.0(@types/react@19.2.2)(react@19.2.0) + '@emotion/styled': 11.14.1(@emotion/react@11.14.0(@types/react@19.2.2)(react@19.2.0))(@types/react@19.2.2)(react@19.2.0) transitivePeerDependencies: - '@types/react' - '@napi-rs/wasm-runtime@1.0.5': + '@napi-rs/wasm-runtime@1.0.7': dependencies: - '@emnapi/core': 1.5.0 - '@emnapi/runtime': 1.5.0 + '@emnapi/core': 1.6.0 + '@emnapi/runtime': 1.6.0 '@tybys/wasm-util': 0.10.1 optional: true @@ -7703,11 +7417,11 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.19.1 - '@octokit/openapi-types@19.0.2': {} + '@octokit/openapi-types@20.0.0': {} - '@octokit/types@12.3.0': + '@octokit/types@12.6.0': dependencies: - '@octokit/openapi-types': 19.0.2 + '@octokit/openapi-types': 20.0.0 '@open-draft/deferred-promise@2.2.0': {} @@ -7718,63 +7432,63 @@ snapshots: '@open-draft/until@2.1.0': {} - '@oxc-resolver/binding-android-arm-eabi@11.8.4': + '@oxc-resolver/binding-android-arm-eabi@11.12.0': optional: true - '@oxc-resolver/binding-android-arm64@11.8.4': + '@oxc-resolver/binding-android-arm64@11.12.0': optional: true - '@oxc-resolver/binding-darwin-arm64@11.8.4': + '@oxc-resolver/binding-darwin-arm64@11.12.0': optional: true - '@oxc-resolver/binding-darwin-x64@11.8.4': + '@oxc-resolver/binding-darwin-x64@11.12.0': optional: true - '@oxc-resolver/binding-freebsd-x64@11.8.4': + '@oxc-resolver/binding-freebsd-x64@11.12.0': optional: true - '@oxc-resolver/binding-linux-arm-gnueabihf@11.8.4': + '@oxc-resolver/binding-linux-arm-gnueabihf@11.12.0': optional: true - '@oxc-resolver/binding-linux-arm-musleabihf@11.8.4': + '@oxc-resolver/binding-linux-arm-musleabihf@11.12.0': optional: true - '@oxc-resolver/binding-linux-arm64-gnu@11.8.4': + '@oxc-resolver/binding-linux-arm64-gnu@11.12.0': optional: true - '@oxc-resolver/binding-linux-arm64-musl@11.8.4': + '@oxc-resolver/binding-linux-arm64-musl@11.12.0': optional: true - '@oxc-resolver/binding-linux-ppc64-gnu@11.8.4': + '@oxc-resolver/binding-linux-ppc64-gnu@11.12.0': optional: true - '@oxc-resolver/binding-linux-riscv64-gnu@11.8.4': + '@oxc-resolver/binding-linux-riscv64-gnu@11.12.0': optional: true - '@oxc-resolver/binding-linux-riscv64-musl@11.8.4': + '@oxc-resolver/binding-linux-riscv64-musl@11.12.0': optional: true - '@oxc-resolver/binding-linux-s390x-gnu@11.8.4': + '@oxc-resolver/binding-linux-s390x-gnu@11.12.0': optional: true - '@oxc-resolver/binding-linux-x64-gnu@11.8.4': + '@oxc-resolver/binding-linux-x64-gnu@11.12.0': optional: true - '@oxc-resolver/binding-linux-x64-musl@11.8.4': + '@oxc-resolver/binding-linux-x64-musl@11.12.0': optional: true - '@oxc-resolver/binding-wasm32-wasi@11.8.4': + '@oxc-resolver/binding-wasm32-wasi@11.12.0': dependencies: - '@napi-rs/wasm-runtime': 1.0.5 + '@napi-rs/wasm-runtime': 1.0.7 optional: true - '@oxc-resolver/binding-win32-arm64-msvc@11.8.4': + '@oxc-resolver/binding-win32-arm64-msvc@11.12.0': optional: true - '@oxc-resolver/binding-win32-ia32-msvc@11.8.4': + '@oxc-resolver/binding-win32-ia32-msvc@11.12.0': optional: true - '@oxc-resolver/binding-win32-x64-msvc@11.8.4': + '@oxc-resolver/binding-win32-x64-msvc@11.12.0': optional: true '@pkgjs/parseargs@0.11.0': @@ -7809,752 +7523,497 @@ snapshots: '@protobufjs/utf8@1.1.0': {} - '@radix-ui/number@1.1.0': {} - '@radix-ui/number@1.1.1': {} - '@radix-ui/primitive@1.1.0': {} - - '@radix-ui/primitive@1.1.1': {} - '@radix-ui/primitive@1.1.3': {} - '@radix-ui/react-arrow@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-arrow@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-avatar@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-checkbox@1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-collapsible@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-collection@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-arrow@1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-collection@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.2(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) + + '@radix-ui/react-avatar@1.1.10(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': + dependencies: + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-is-hydrated': 0.1.0(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-collection@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-checkbox@1.3.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-compose-refs@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-collapsible@1.1.12(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - react: 19.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-compose-refs@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-collection@1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - react: 19.1.1 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-compose-refs@1.1.2(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-compose-refs@1.1.2(@types/react@19.2.2)(react@19.2.0)': dependencies: - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-context@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-context@1.1.2(@types/react@19.2.2)(react@19.2.0)': dependencies: - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-context@1.1.2(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-dialog@1.1.15(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - react: 19.1.1 - optionalDependencies: - '@types/react': 19.1.17 - - '@radix-ui/react-dialog@1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-focus-guards': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) aria-hidden: 1.2.6 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - react-remove-scroll: 2.7.1(@types/react@19.1.17)(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-direction@1.1.0(@types/react@19.1.17)(react@19.1.1)': - dependencies: - react: 19.1.1 + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + react-remove-scroll: 2.7.1(@types/react@19.2.2)(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-direction@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-direction@1.1.1(@types/react@19.2.2)(react@19.2.0)': dependencies: - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-dismissable-layer@1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-escape-keydown': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-dismissable-layer@1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-escape-keydown': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-dropdown-menu@2.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-menu': 2.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-focus-guards@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-dropdown-menu@2.1.16(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - react: 19.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-menu': 2.1.16(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-focus-guards@1.1.3(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-focus-guards@1.1.3(@types/react@19.2.2)(react@19.2.0)': dependencies: - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-focus-scope@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-id@1.1.1(@types/react@19.2.2)(react@19.2.0)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 - '@radix-ui/react-id@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-label@2.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-id@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-menu@2.1.16(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + aria-hidden: 1.2.6 + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + react-remove-scroll: 2.7.1(@types/react@19.2.2)(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-label@2.1.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-popover@1.1.15(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-primitive': 2.0.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-menu@2.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-focus-guards': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-roving-focus': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) aria-hidden: 1.2.6 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - react-remove-scroll: 2.7.1(@types/react@19.1.17)(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-popover@1.1.5(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-focus-guards': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - aria-hidden: 1.2.4 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - react-remove-scroll: 2.6.3(@types/react@19.1.17)(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + react-remove-scroll: 2.7.1(@types/react@19.2.2)(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-popper@1.2.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@floating-ui/react-dom': 2.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-arrow': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-rect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/rect': 1.1.0 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-popper@1.2.8(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@floating-ui/react-dom': 2.1.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-arrow': 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-rect': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@19.1.17)(react@19.1.1) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) + + '@radix-ui/react-popper@1.2.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': + dependencies: + '@floating-ui/react-dom': 2.1.6(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-arrow': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-rect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.2)(react@19.2.0) '@radix-ui/rect': 1.1.1 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-portal@1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-portal@1.1.9(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-portal@1.1.9(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-presence@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-presence@1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-primitive@2.0.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-primitive@2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-slot': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-primitive@2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-radio-group@1.3.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-primitive@2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-roving-focus@1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-slot': 1.1.2(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-primitive@2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-scroll-area@1.2.10(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-radio-group@1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-roving-focus': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-roving-focus@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-roving-focus@1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-collection': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-scroll-area@1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/number': 1.1.0 - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@radix-ui/number': 1.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-select@2.2.6(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@radix-ui/react-select@2.2.6(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@radix-ui/number': 1.1.1 '@radix-ui/primitive': 1.1.3 - '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-direction': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.2.3(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) aria-hidden: 1.2.6 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - react-remove-scroll: 2.7.1(@types/react@19.1.17)(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-separator@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-slider@1.2.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/number': 1.1.0 - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-collection': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-direction': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-slot@1.1.0(@types/react@19.1.17)(react@19.1.1)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - optionalDependencies: - '@types/react': 19.1.17 - - '@radix-ui/react-slot@1.1.1(@types/react@19.1.17)(react@19.1.1)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - optionalDependencies: - '@types/react': 19.1.17 - - '@radix-ui/react-slot@1.1.2(@types/react@19.1.17)(react@19.1.1)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - optionalDependencies: - '@types/react': 19.1.17 - - '@radix-ui/react-slot@1.2.3(@types/react@19.1.17)(react@19.1.1)': - dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - optionalDependencies: - '@types/react': 19.1.17 - - '@radix-ui/react-switch@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.0 - '@radix-ui/react-compose-refs': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.0(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-previous': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-size': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-tooltip@1.1.7(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/primitive': 1.1.1 - '@radix-ui/react-compose-refs': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-context': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-dismissable-layer': 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-popper': 1.2.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-portal': 1.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-presence': 1.1.2(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-slot': 1.1.1(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-visually-hidden': 1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + react-remove-scroll: 2.7.1(@types/react@19.2.2)(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-use-callback-ref@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-separator@1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - react: 19.1.1 + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-use-callback-ref@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-slider@1.3.6(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - react: 19.1.1 + '@radix-ui/number': 1.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-direction': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-use-controllable-state@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-slot@1.2.3(@types/react@19.2.2)(react@19.2.0)': dependencies: - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-controllable-state@1.2.2(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-switch@1.2.6(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-use-effect-event': 0.0.2(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-previous': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-size': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-use-effect-event@0.0.2(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-tooltip@1.2.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-context': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-slot': 1.2.3(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) - '@radix-ui/react-use-escape-keydown@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-callback-ref@1.1.1(@types/react@19.2.2)(react@19.2.0)': dependencies: - '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-controllable-state@1.2.2(@types/react@19.2.2)(react@19.2.0)': dependencies: - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@radix-ui/react-use-effect-event': 0.0.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-layout-effect@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-effect-event@0.0.2(@types/react@19.2.2)(react@19.2.0)': dependencies: - react: 19.1.1 + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-layout-effect@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@19.2.2)(react@19.2.0)': dependencies: - react: 19.1.1 + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-previous@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-is-hydrated@0.1.0(@types/react@19.2.2)(react@19.2.0)': dependencies: - react: 19.1.1 + react: 19.2.0 + use-sync-external-store: 1.6.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-previous@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-layout-effect@1.1.1(@types/react@19.2.2)(react@19.2.0)': dependencies: - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-rect@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-previous@1.1.1(@types/react@19.2.2)(react@19.2.0)': dependencies: - '@radix-ui/rect': 1.1.0 - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-rect@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-rect@1.1.1(@types/react@19.2.2)(react@19.2.0)': dependencies: '@radix-ui/rect': 1.1.1 - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-size@1.1.0(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-use-size@1.1.1(@types/react@19.2.2)(react@19.2.0)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.2.2)(react@19.2.0) + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@radix-ui/react-use-size@1.1.1(@types/react@19.1.17)(react@19.1.1)': + '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@19.1.17)(react@19.1.1) - react: 19.1.1 + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 - - '@radix-ui/react-visually-hidden@1.1.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': - dependencies: - '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - '@types/react-dom': 19.1.11(@types/react@19.1.17) - - '@radix-ui/rect@1.1.0': {} + '@types/react': 19.2.2 + '@types/react-dom': 19.2.2(@types/react@19.2.2) '@radix-ui/rect@1.1.1': {} - '@rolldown/pluginutils@1.0.0-beta.38': {} + '@rolldown/pluginutils@1.0.0-beta.43': {} '@rollup/pluginutils@5.0.5(rollup@4.52.5)': dependencies: @@ -8640,83 +8099,85 @@ snapshots: dependencies: '@sinonjs/commons': 3.0.0 - '@storybook/addon-docs@9.1.2(@types/react@19.1.17)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': + '@standard-schema/spec@1.0.0': {} + + '@storybook/addon-docs@9.1.2(@types/react@19.2.2)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: - '@mdx-js/react': 3.0.1(@types/react@19.1.17)(react@19.1.1) - '@storybook/csf-plugin': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) - '@storybook/icons': 1.4.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@storybook/react-dom-shim': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@mdx-js/react': 3.0.1(@types/react@19.2.2)(react@19.2.0) + '@storybook/csf-plugin': 9.1.2(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + '@storybook/icons': 1.4.0(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@storybook/react-dom-shim': 9.1.2(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) ts-dedent: 2.2.0 transitivePeerDependencies: - '@types/react' - '@storybook/addon-links@9.1.2(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': + '@storybook/addon-links@9.1.2(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: '@storybook/global': 5.0.0 - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) optionalDependencies: - react: 19.1.1 + react: 19.2.0 - '@storybook/addon-themes@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': + '@storybook/addon-themes@9.1.2(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) ts-dedent: 2.2.0 - '@storybook/builder-vite@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': + '@storybook/builder-vite@9.1.2(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: - '@storybook/csf-plugin': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@storybook/csf-plugin': 9.1.2(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) ts-dedent: 2.2.0 - vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + vite: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) - '@storybook/csf-plugin@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': + '@storybook/csf-plugin@9.1.2(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) unplugin: 1.5.0 '@storybook/global@5.0.0': {} - '@storybook/icons@1.4.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@storybook/icons@1.4.0(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) - '@storybook/react-dom-shim@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': + '@storybook/react-dom-shim@9.1.2(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))': dependencies: - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) - '@storybook/react-vite@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(rollup@4.52.5)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': + '@storybook/react-vite@9.1.2(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(rollup@4.52.5)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: - '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.1(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.1(typescript@5.6.3)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) '@rollup/pluginutils': 5.0.5(rollup@4.52.5) - '@storybook/builder-vite': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) - '@storybook/react': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3) + '@storybook/builder-vite': 9.1.2(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@storybook/react': 9.1.2(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3) find-up: 7.0.0 magic-string: 0.30.17 - react: 19.1.1 + react: 19.2.0 react-docgen: 8.0.0 - react-dom: 19.1.1(react@19.1.1) + react-dom: 19.2.0(react@19.2.0) resolve: 1.22.10 - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) tsconfig-paths: 4.2.0 - vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + vite: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) transitivePeerDependencies: - rollup - supports-color - typescript - '@storybook/react@9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)': + '@storybook/react@9.1.2(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)))(typescript@5.6.3)': dependencies: '@storybook/global': 5.0.0 - '@storybook/react-dom-shim': 9.1.2(react-dom@19.1.1(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@storybook/react-dom-shim': 9.1.2(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) optionalDependencies: typescript: 5.6.3 @@ -8784,16 +8245,16 @@ snapshots: '@tanstack/query-devtools@5.76.0': {} - '@tanstack/react-query-devtools@5.77.0(@tanstack/react-query@5.77.0(react@19.1.1))(react@19.1.1)': + '@tanstack/react-query-devtools@5.77.0(@tanstack/react-query@5.77.0(react@19.2.0))(react@19.2.0)': dependencies: '@tanstack/query-devtools': 5.76.0 - '@tanstack/react-query': 5.77.0(react@19.1.1) - react: 19.1.1 + '@tanstack/react-query': 5.77.0(react@19.2.0) + react: 19.2.0 - '@tanstack/react-query@5.77.0(react@19.1.1)': + '@tanstack/react-query@5.77.0(react@19.2.0)': dependencies: '@tanstack/query-core': 5.77.0 - react: 19.1.1 + react: 19.2.0 '@testing-library/dom@10.4.0': dependencies: @@ -8817,23 +8278,22 @@ snapshots: lz-string: 1.5.0 pretty-format: 27.5.1 - '@testing-library/jest-dom@6.6.3': + '@testing-library/jest-dom@6.9.1': dependencies: '@adobe/css-tools': 4.4.1 aria-query: 5.3.2 - chalk: 3.0.0 css.escape: 1.5.1 dom-accessibility-api: 0.6.3 - lodash: 4.17.21 + picocolors: 1.1.1 redent: 3.0.0 - '@testing-library/react@14.3.1(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@testing-library/react@14.3.1(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: '@babel/runtime': 7.26.10 '@testing-library/dom': 9.3.3 - '@types/react-dom': 18.3.7(@types/react@19.1.17) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + '@types/react-dom': 18.3.7(@types/react@19.2.2) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) transitivePeerDependencies: - '@types/react' @@ -8866,20 +8326,20 @@ snapshots: '@types/babel__core@7.20.5': dependencies: - '@babel/parser': 7.28.4 - '@babel/types': 7.28.4 + '@babel/parser': 7.28.5 + '@babel/types': 7.28.5 '@types/babel__generator': 7.27.0 '@types/babel__template': 7.4.4 '@types/babel__traverse': 7.28.0 '@types/babel__generator@7.27.0': dependencies: - '@babel/types': 7.28.4 + '@babel/types': 7.28.5 '@types/babel__template@7.4.4': dependencies: - '@babel/parser': 7.28.4 - '@babel/types': 7.28.4 + '@babel/parser': 7.28.5 + '@babel/types': 7.28.5 '@types/babel__traverse@7.20.6': dependencies: @@ -8887,16 +8347,17 @@ snapshots: '@types/babel__traverse@7.28.0': dependencies: - '@babel/types': 7.28.4 + '@babel/types': 7.28.5 '@types/body-parser@1.19.2': dependencies: '@types/connect': 3.4.35 '@types/node': 20.17.16 - '@types/chai@5.2.2': + '@types/chai@5.2.3': dependencies: '@types/deep-eql': 4.0.2 + assertion-error: 2.0.1 '@types/chroma-js@2.4.0': {} @@ -8912,7 +8373,7 @@ snapshots: '@types/cookie@0.6.0': {} - '@types/d3-array@3.2.1': {} + '@types/d3-array@3.2.2': {} '@types/d3-color@3.1.3': {} @@ -8922,15 +8383,15 @@ snapshots: dependencies: '@types/d3-color': 3.1.3 - '@types/d3-path@3.1.0': {} + '@types/d3-path@3.1.1': {} - '@types/d3-scale@4.0.8': + '@types/d3-scale@4.0.9': dependencies: '@types/d3-time': 3.0.4 '@types/d3-shape@3.1.7': dependencies: - '@types/d3-path': 3.1.0 + '@types/d3-path': 3.1.1 '@types/d3-time@3.0.4': {} @@ -8982,7 +8443,7 @@ snapshots: '@types/hoist-non-react-statics@3.3.5': dependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 hoist-non-react-statics: 3.3.2 '@types/http-errors@2.0.1': {} @@ -9046,7 +8507,7 @@ snapshots: dependencies: undici-types: 6.19.8 - '@types/node@22.18.8': + '@types/node@22.18.13': dependencies: undici-types: 6.21.0 @@ -9058,50 +8519,50 @@ snapshots: '@types/range-parser@1.2.4': {} - '@types/react-color@3.0.13(@types/react@19.1.17)': + '@types/react-color@3.0.13(@types/react@19.2.2)': dependencies: - '@types/react': 19.1.17 - '@types/reactcss': 1.2.13(@types/react@19.1.17) + '@types/react': 19.2.2 + '@types/reactcss': 1.2.13(@types/react@19.2.2) '@types/react-date-range@1.4.4': dependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 date-fns: 2.30.0 - '@types/react-dom@18.3.7(@types/react@19.1.17)': + '@types/react-dom@18.3.7(@types/react@19.2.2)': dependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@types/react-dom@19.1.11(@types/react@19.1.17)': + '@types/react-dom@19.2.2(@types/react@19.2.2)': dependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 '@types/react-syntax-highlighter@15.5.13': dependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@types/react-transition-group@4.4.12(@types/react@19.1.17)': + '@types/react-transition-group@4.4.12(@types/react@19.2.2)': dependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@types/react-virtualized-auto-sizer@1.0.8(react-dom@19.1.1(react@19.1.1))(react@19.1.1)': + '@types/react-virtualized-auto-sizer@1.0.8(react-dom@19.2.0(react@19.2.0))(react@19.2.0)': dependencies: - react-virtualized-auto-sizer: 1.0.26(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + react-virtualized-auto-sizer: 1.0.26(react-dom@19.2.0(react@19.2.0))(react@19.2.0) transitivePeerDependencies: - react - react-dom '@types/react-window@1.8.8': dependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - '@types/react@19.1.17': + '@types/react@19.2.2': dependencies: csstype: 3.1.3 - '@types/reactcss@1.2.13(@types/react@19.1.17)': + '@types/reactcss@1.2.13(@types/react@19.2.2)': dependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 '@types/resolve@1.20.4': {} @@ -9132,7 +8593,8 @@ snapshots: '@types/tough-cookie@4.0.5': {} - '@types/trusted-types@1.0.6': {} + '@types/trusted-types@2.0.7': + optional: true '@types/ua-parser-js@0.7.36': {} @@ -9158,49 +8620,89 @@ snapshots: '@ungap/structured-clone@1.3.0': {} - '@vitejs/plugin-react@5.0.4(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': + '@vitejs/plugin-react@5.1.0(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: - '@babel/core': 7.28.4 - '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.4) - '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.28.4) - '@rolldown/pluginutils': 1.0.0-beta.38 + '@babel/core': 7.28.5 + '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.5) + '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.28.5) + '@rolldown/pluginutils': 1.0.0-beta.43 '@types/babel__core': 7.20.5 - react-refresh: 0.17.0 - vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + react-refresh: 0.18.0 + vite: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) transitivePeerDependencies: - supports-color '@vitest/expect@3.2.4': dependencies: - '@types/chai': 5.2.2 + '@types/chai': 5.2.3 '@vitest/spy': 3.2.4 '@vitest/utils': 3.2.4 - chai: 5.2.1 + chai: 5.3.3 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(msw@2.4.8(typescript@5.6.3))(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': + '@vitest/expect@4.0.6': + dependencies: + '@standard-schema/spec': 1.0.0 + '@types/chai': 5.2.3 + '@vitest/spy': 4.0.6 + '@vitest/utils': 4.0.6 + chai: 6.2.0 + tinyrainbow: 3.0.3 + + '@vitest/mocker@3.2.4(msw@2.4.8(typescript@5.6.3))(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 - magic-string: 0.30.17 + magic-string: 0.30.21 + optionalDependencies: + msw: 2.4.8(typescript@5.6.3) + vite: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + + '@vitest/mocker@4.0.6(msw@2.4.8(typescript@5.6.3))(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))': + dependencies: + '@vitest/spy': 4.0.6 + estree-walker: 3.0.3 + magic-string: 0.30.21 optionalDependencies: msw: 2.4.8(typescript@5.6.3) - vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + vite: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) '@vitest/pretty-format@3.2.4': dependencies: tinyrainbow: 2.0.0 + '@vitest/pretty-format@4.0.6': + dependencies: + tinyrainbow: 3.0.3 + + '@vitest/runner@4.0.6': + dependencies: + '@vitest/utils': 4.0.6 + pathe: 2.0.3 + + '@vitest/snapshot@4.0.6': + dependencies: + '@vitest/pretty-format': 4.0.6 + magic-string: 0.30.21 + pathe: 2.0.3 + '@vitest/spy@3.2.4': dependencies: - tinyspy: 4.0.3 + tinyspy: 4.0.4 + + '@vitest/spy@4.0.6': {} '@vitest/utils@3.2.4': dependencies: '@vitest/pretty-format': 3.2.4 - loupe: 3.2.0 + loupe: 3.2.1 tinyrainbow: 2.0.0 + '@vitest/utils@4.0.6': + dependencies: + '@vitest/pretty-format': 4.0.6 + tinyrainbow: 3.0.3 + '@xterm/addon-canvas@0.7.0(@xterm/xterm@5.5.0)': dependencies: '@xterm/xterm': 5.5.0 @@ -9257,6 +8759,8 @@ snapshots: transitivePeerDependencies: - supports-color + agent-base@7.1.4: {} + ajv@6.12.6: dependencies: fast-deep-equal: 3.1.3 @@ -9303,10 +8807,6 @@ snapshots: argparse@2.0.1: {} - aria-hidden@1.2.4: - dependencies: - tslib: 2.8.1 - aria-hidden@1.2.6: dependencies: tslib: 2.8.1 @@ -9358,7 +8858,7 @@ snapshots: dependencies: possible-typed-array-names: 1.0.0 - axios@1.12.0: + axios@1.13.1: dependencies: follow-redirects: 1.15.11 form-data: 4.0.4 @@ -9366,13 +8866,13 @@ snapshots: transitivePeerDependencies: - debug - babel-jest@29.7.0(@babel/core@7.28.4): + babel-jest@29.7.0(@babel/core@7.28.5): dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@jest/transform': 29.7.0 '@types/babel__core': 7.20.5 babel-plugin-istanbul: 6.1.1 - babel-preset-jest: 29.6.3(@babel/core@7.28.4) + babel-preset-jest: 29.6.3(@babel/core@7.28.5) chalk: 4.1.2 graceful-fs: 4.2.11 slash: 3.0.0 @@ -9392,7 +8892,7 @@ snapshots: babel-plugin-jest-hoist@29.6.3: dependencies: '@babel/template': 7.27.2 - '@babel/types': 7.28.4 + '@babel/types': 7.28.5 '@types/babel__core': 7.20.5 '@types/babel__traverse': 7.28.0 @@ -9402,30 +8902,30 @@ snapshots: cosmiconfig: 7.1.0 resolve: 1.22.10 - babel-preset-current-node-syntax@1.1.0(@babel/core@7.28.4): - dependencies: - '@babel/core': 7.28.4 - '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.28.4) - '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.28.4) - '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.28.4) - '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.28.4) - '@babel/plugin-syntax-import-attributes': 7.24.7(@babel/core@7.28.4) - '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.28.4) - '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.28.4) - '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.28.4) - '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.28.4) - '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.28.4) - '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.28.4) - '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.28.4) - '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.28.4) - '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.28.4) - '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.28.4) - - babel-preset-jest@29.6.3(@babel/core@7.28.4): - dependencies: - '@babel/core': 7.28.4 + babel-preset-current-node-syntax@1.1.0(@babel/core@7.28.5): + dependencies: + '@babel/core': 7.28.5 + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.28.5) + '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.28.5) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.28.5) + '@babel/plugin-syntax-class-static-block': 7.14.5(@babel/core@7.28.5) + '@babel/plugin-syntax-import-attributes': 7.24.7(@babel/core@7.28.5) + '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.28.5) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.28.5) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.28.5) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.28.5) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.28.5) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.28.5) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.28.5) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.28.5) + '@babel/plugin-syntax-private-property-in-object': 7.14.5(@babel/core@7.28.5) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.28.5) + + babel-preset-jest@29.6.3(@babel/core@7.28.5): + dependencies: + '@babel/core': 7.28.5 babel-plugin-jest-hoist: 29.6.3 - babel-preset-current-node-syntax: 1.1.0(@babel/core@7.28.4) + babel-preset-current-node-syntax: 1.1.0(@babel/core@7.28.5) bail@2.0.2: {} @@ -9435,6 +8935,8 @@ snapshots: baseline-browser-mapping@2.8.10: {} + baseline-browser-mapping@2.8.22: {} + bcrypt-pbkdf@1.0.2: dependencies: tweetnacl: 0.14.5 @@ -9443,6 +8945,10 @@ snapshots: dependencies: open: 8.4.2 + bidi-js@1.0.3: + dependencies: + require-from-string: 2.0.2 + binary-extensions@2.3.0: {} bl@4.1.0: @@ -9485,6 +8991,14 @@ snapshots: node-releases: 2.0.21 update-browserslist-db: 1.1.3(browserslist@4.26.3) + browserslist@4.27.0: + dependencies: + baseline-browser-mapping: 2.8.22 + caniuse-lite: 1.0.30001752 + electron-to-chromium: 1.5.244 + node-releases: 2.0.27 + update-browserslist-db: 1.1.4(browserslist@4.27.0) + bser@2.1.1: dependencies: node-int64: 0.4.0 @@ -9511,20 +9025,20 @@ snapshots: es-define-property: 1.0.1 es-errors: 1.3.0 function-bind: 1.1.2 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 set-function-length: 1.2.2 call-bind@1.0.8: dependencies: call-bind-apply-helpers: 1.0.2 es-define-property: 1.0.1 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 set-function-length: 1.2.2 call-bound@1.0.3: dependencies: call-bind-apply-helpers: 1.0.2 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 callsites@3.1.0: {} @@ -9536,22 +9050,21 @@ snapshots: caniuse-lite@1.0.30001746: {} + caniuse-lite@1.0.30001752: {} + case-anything@2.1.13: {} ccount@2.0.1: {} - chai@5.2.1: + chai@5.3.3: dependencies: assertion-error: 2.0.1 check-error: 2.1.1 deep-eql: 5.0.2 - loupe: 3.2.0 - pathval: 2.0.0 + loupe: 3.2.1 + pathval: 2.0.1 - chalk@3.0.0: - dependencies: - ansi-styles: 4.3.0 - supports-color: 7.2.0 + chai@6.2.0: {} chalk@4.1.2: dependencies: @@ -9626,14 +9139,14 @@ snapshots: clsx@2.1.1: {} - cmdk@1.0.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + cmdk@1.1.1(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: - '@radix-ui/react-dialog': 1.1.4(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - '@radix-ui/react-id': 1.1.0(@types/react@19.1.17)(react@19.1.1) - '@radix-ui/react-primitive': 2.0.1(@types/react-dom@19.1.11(@types/react@19.1.17))(@types/react@19.1.17)(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - use-sync-external-store: 1.4.0(react@19.1.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + '@radix-ui/react-id': 1.1.1(@types/react@19.2.2)(react@19.2.0) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@19.2.2(@types/react@19.2.2))(@types/react@19.2.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) transitivePeerDependencies: - '@types/react' - '@types/react-dom' @@ -9718,7 +9231,7 @@ snapshots: dependencies: luxon: 3.3.0 - cronstrue@2.50.0: {} + cronstrue@2.59.0: {} cross-spawn@7.0.6: dependencies: @@ -9726,6 +9239,11 @@ snapshots: shebang-command: 2.0.0 which: 2.0.2 + css-tree@3.1.0: + dependencies: + mdn-data: 2.12.2 + source-map-js: 1.2.1 + css.escape@1.5.1: {} cssesc@3.0.0: {} @@ -9740,6 +9258,12 @@ snapshots: dependencies: cssom: 0.3.8 + cssstyle@5.3.1: + dependencies: + '@asamuzakjp/css-color': 4.0.5 + '@csstools/css-syntax-patches-for-csstree': 1.0.15 + css-tree: 3.1.0 + csstype@3.1.3: {} d3-array@3.2.4: @@ -9786,6 +9310,11 @@ snapshots: whatwg-mimetype: 3.0.0 whatwg-url: 11.0.0 + data-urls@6.0.0: + dependencies: + whatwg-mimetype: 4.0.0 + whatwg-url: 15.1.0 + date-fns@2.30.0: dependencies: '@babel/runtime': 7.26.10 @@ -9796,10 +9325,6 @@ snapshots: dependencies: ms: 2.0.0 - debug@4.4.1: - dependencies: - ms: 2.1.3 - debug@4.4.3: dependencies: ms: 2.1.3 @@ -9808,6 +9333,8 @@ snapshots: decimal.js@10.4.3: {} + decimal.js@10.6.0: {} + decode-named-character-reference@1.2.0: dependencies: character-entities: 2.0.2 @@ -9852,7 +9379,7 @@ snapshots: define-data-property@1.1.1: dependencies: - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 gopd: 1.2.0 has-property-descriptors: 1.0.1 @@ -9914,6 +9441,10 @@ snapshots: dependencies: webidl-conversions: 7.0.0 + dompurify@3.2.6: + optionalDependencies: + '@types/trusted-types': 2.0.7 + dpdm@3.14.0: dependencies: chalk: 4.1.2 @@ -9940,6 +9471,8 @@ snapshots: electron-to-chromium@1.5.228: {} + electron-to-chromium@1.5.244: {} + emittery@0.13.1: {} emoji-mart@5.6.0: {} @@ -9956,6 +9489,8 @@ snapshots: entities@4.5.0: {} + entities@6.0.1: {} + error-ex@1.3.2: dependencies: is-arrayish: 0.2.1 @@ -9967,7 +9502,7 @@ snapshots: es-get-iterator@1.1.3: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 has-symbols: 1.1.0 is-arguments: 1.2.0 is-map: 2.0.2 @@ -9976,6 +9511,8 @@ snapshots: isarray: 2.0.5 stop-iteration-iterator: 1.0.0 + es-module-lexer@1.7.0: {} + es-object-atoms@1.1.1: dependencies: es-errors: 1.3.0 @@ -9983,14 +9520,14 @@ snapshots: es-set-tostringtag@2.1.0: dependencies: es-errors: 1.3.0 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 has-tostringtag: 1.0.2 hasown: 2.0.2 - esbuild-register@3.6.0(esbuild@0.25.3): + esbuild-register@3.6.0(esbuild@0.25.11): dependencies: - debug: 4.4.1 - esbuild: 0.25.3 + debug: 4.4.3 + esbuild: 0.25.11 transitivePeerDependencies: - supports-color @@ -10023,34 +9560,6 @@ snapshots: '@esbuild/win32-ia32': 0.25.11 '@esbuild/win32-x64': 0.25.11 - esbuild@0.25.3: - optionalDependencies: - '@esbuild/aix-ppc64': 0.25.3 - '@esbuild/android-arm': 0.25.3 - '@esbuild/android-arm64': 0.25.3 - '@esbuild/android-x64': 0.25.3 - '@esbuild/darwin-arm64': 0.25.3 - '@esbuild/darwin-x64': 0.25.3 - '@esbuild/freebsd-arm64': 0.25.3 - '@esbuild/freebsd-x64': 0.25.3 - '@esbuild/linux-arm': 0.25.3 - '@esbuild/linux-arm64': 0.25.3 - '@esbuild/linux-ia32': 0.25.3 - '@esbuild/linux-loong64': 0.25.3 - '@esbuild/linux-mips64el': 0.25.3 - '@esbuild/linux-ppc64': 0.25.3 - '@esbuild/linux-riscv64': 0.25.3 - '@esbuild/linux-s390x': 0.25.3 - '@esbuild/linux-x64': 0.25.3 - '@esbuild/netbsd-arm64': 0.25.3 - '@esbuild/netbsd-x64': 0.25.3 - '@esbuild/openbsd-arm64': 0.25.3 - '@esbuild/openbsd-x64': 0.25.3 - '@esbuild/sunos-x64': 0.25.3 - '@esbuild/win32-arm64': 0.25.3 - '@esbuild/win32-ia32': 0.25.3 - '@esbuild/win32-x64': 0.25.3 - escalade@3.2.0: {} escape-html@1.0.3: {} @@ -10081,7 +9590,7 @@ snapshots: eslint@8.52.0: dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@8.52.0) - '@eslint-community/regexpp': 4.12.1 + '@eslint-community/regexpp': 4.12.2 '@eslint/eslintrc': 2.1.4 '@eslint/js': 8.52.0 '@humanwhocodes/config-array': 0.11.14 @@ -10171,6 +9680,8 @@ snapshots: exit@0.1.2: {} + expect-type@1.2.2: {} + expect@29.7.0: dependencies: '@jest/expect-utils': 29.7.0 @@ -10220,7 +9731,7 @@ snapshots: fast-deep-equal@3.1.3: optional: true - fast-equals@5.2.2: {} + fast-equals@5.3.2: {} fast-glob@3.3.3: dependencies: @@ -10334,14 +9845,14 @@ snapshots: dependencies: fd-package-json: 2.0.0 - formik@2.4.6(react@19.1.1): + formik@2.4.6(react@19.2.0): dependencies: '@types/hoist-non-react-statics': 3.3.5 deepmerge: 2.2.1 hoist-non-react-statics: 3.3.2 lodash: 4.17.21 lodash-es: 4.17.21 - react: 19.1.1 + react: 19.2.0 react-fast-compare: 2.0.4 tiny-warning: 1.0.3 tslib: 2.6.2 @@ -10380,6 +9891,19 @@ snapshots: get-caller-file@2.0.5: {} + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + get-intrinsic@1.3.1: dependencies: async-function: 1.0.0 @@ -10455,7 +9979,7 @@ snapshots: has-property-descriptors@1.0.1: dependencies: - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 has-property-descriptors@1.0.2: dependencies: @@ -10519,6 +10043,10 @@ snapshots: dependencies: whatwg-encoding: 2.0.0 + html-encoding-sniffer@4.0.0: + dependencies: + whatwg-encoding: 3.1.1 + html-escaper@2.0.2: {} html-url-attributes@3.0.1: {} @@ -10539,6 +10067,13 @@ snapshots: transitivePeerDependencies: - supports-color + http-proxy-agent@7.0.2: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + https-proxy-agent@5.0.1: dependencies: agent-base: 6.0.2 @@ -10546,6 +10081,13 @@ snapshots: transitivePeerDependencies: - supports-color + https-proxy-agent@7.0.6: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + human-signals@2.1.0: {} humanize-duration@3.32.2: {} @@ -10590,7 +10132,7 @@ snapshots: internal-slot@1.0.6: dependencies: - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 hasown: 2.0.2 side-channel: 1.1.0 @@ -10620,7 +10162,7 @@ snapshots: is-array-buffer@3.0.2: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 is-typed-array: 1.1.15 is-arrayish@0.2.1: {} @@ -10719,7 +10261,7 @@ snapshots: is-weakset@2.0.2: dependencies: call-bind: 1.0.8 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 is-wsl@2.2.0: dependencies: @@ -10735,21 +10277,21 @@ snapshots: istanbul-lib-instrument@5.2.1: dependencies: - '@babel/core': 7.28.4 - '@babel/parser': 7.28.4 + '@babel/core': 7.28.5 + '@babel/parser': 7.28.5 '@istanbuljs/schema': 0.1.3 istanbul-lib-coverage: 3.2.2 - semver: 7.7.2 + semver: 7.7.3 transitivePeerDependencies: - supports-color istanbul-lib-instrument@6.0.3: dependencies: - '@babel/core': 7.28.4 - '@babel/parser': 7.28.4 + '@babel/core': 7.28.5 + '@babel/parser': 7.28.5 '@istanbuljs/schema': 0.1.3 istanbul-lib-coverage: 3.2.2 - semver: 7.7.2 + semver: 7.7.3 transitivePeerDependencies: - supports-color @@ -10836,10 +10378,10 @@ snapshots: jest-config@29.7.0(@types/node@20.17.16)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)): dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@jest/test-sequencer': 29.7.0 '@jest/types': 29.6.3 - babel-jest: 29.7.0(@babel/core@7.28.4) + babel-jest: 29.7.0(@babel/core@7.28.5) chalk: 4.1.2 ci-info: 3.9.0 deepmerge: 4.3.1 @@ -11072,15 +10614,15 @@ snapshots: jest-snapshot@29.7.0: dependencies: - '@babel/core': 7.28.4 - '@babel/generator': 7.28.3 - '@babel/plugin-syntax-jsx': 7.24.7(@babel/core@7.28.4) - '@babel/plugin-syntax-typescript': 7.24.7(@babel/core@7.28.4) - '@babel/types': 7.28.4 + '@babel/core': 7.28.5 + '@babel/generator': 7.28.5 + '@babel/plugin-syntax-jsx': 7.24.7(@babel/core@7.28.5) + '@babel/plugin-syntax-typescript': 7.24.7(@babel/core@7.28.5) + '@babel/types': 7.28.5 '@jest/expect-utils': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - babel-preset-current-node-syntax: 1.1.0(@babel/core@7.28.4) + babel-preset-current-node-syntax: 1.1.0(@babel/core@7.28.5) chalk: 4.1.2 expect: 29.7.0 graceful-fs: 4.2.11 @@ -11091,7 +10633,7 @@ snapshots: jest-util: 29.7.0 natural-compare: 1.4.0 pretty-format: 29.7.0 - semver: 7.7.2 + semver: 7.7.3 transitivePeerDependencies: - supports-color @@ -11203,13 +10745,40 @@ snapshots: whatwg-encoding: 2.0.0 whatwg-mimetype: 3.0.0 whatwg-url: 11.0.0 - ws: 8.17.1 + ws: 8.18.3 xml-name-validator: 4.0.0 transitivePeerDependencies: - bufferutil - supports-color - utf-8-validate + jsdom@27.0.1: + dependencies: + '@asamuzakjp/dom-selector': 6.7.3 + cssstyle: 5.3.1 + data-urls: 6.0.0 + decimal.js: 10.6.0 + html-encoding-sniffer: 4.0.0 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.6 + is-potential-custom-element-name: 1.0.1 + parse5: 8.0.0 + rrweb-cssom: 0.8.0 + saxes: 6.0.0 + symbol-tree: 3.2.4 + tough-cookie: 6.0.0 + w3c-xmlserializer: 5.0.0 + webidl-conversions: 8.0.0 + whatwg-encoding: 3.1.1 + whatwg-mimetype: 4.0.0 + whatwg-url: 15.1.0 + ws: 8.18.3 + xml-name-validator: 5.0.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + jsesc@3.1.0: {} json-buffer@3.0.1: @@ -11247,7 +10816,7 @@ snapshots: kleur@3.0.3: {} - knip@5.64.1(@types/node@20.17.16)(typescript@5.6.3): + knip@5.66.4(@types/node@20.17.16)(typescript@5.6.3): dependencies: '@nodelib/fs.walk': 1.2.8 '@types/node': 20.17.16 @@ -11256,13 +10825,13 @@ snapshots: jiti: 2.6.1 js-yaml: 4.1.0 minimist: 1.2.8 - oxc-resolver: 11.8.4 + oxc-resolver: 11.12.0 picocolors: 1.1.1 picomatch: 4.0.3 smol-toml: 1.4.2 strip-json-comments: 5.0.2 typescript: 5.6.3 - zod: 4.1.11 + zod: 4.1.12 leven@3.1.0: {} @@ -11308,8 +10877,6 @@ snapshots: chalk: 4.1.2 is-unicode-supported: 0.1.0 - long@5.2.3: {} - long@5.3.2: {} longest-streak@3.1.0: {} @@ -11318,7 +10885,7 @@ snapshots: dependencies: js-tokens: 4.0.0 - loupe@3.2.0: {} + loupe@3.2.1: {} lowlight@1.20.0: dependencies: @@ -11327,13 +10894,15 @@ snapshots: lru-cache@10.4.3: {} + lru-cache@11.2.2: {} + lru-cache@5.1.1: dependencies: yallist: 3.1.1 - lucide-react@0.545.0(react@19.1.1): + lucide-react@0.552.0(react@19.2.0): dependencies: - react: 19.1.1 + react: 19.2.0 luxon@3.3.0: {} @@ -11343,9 +10912,13 @@ snapshots: dependencies: '@jridgewell/sourcemap-codec': 1.5.0 + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + make-dir@4.0.0: dependencies: - semver: 7.7.2 + semver: 7.7.3 make-error@1.3.6: optional: true @@ -11356,6 +10929,8 @@ snapshots: markdown-table@3.0.4: {} + marked@14.0.0: {} + material-colors@1.2.6: {} math-intrinsics@1.1.0: {} @@ -11513,6 +11088,8 @@ snapshots: dependencies: '@types/mdast': 4.0.4 + mdn-data@2.12.2: {} + media-typer@0.3.0: {} memoize-one@5.2.1: {} @@ -11747,9 +11324,10 @@ snapshots: mock-socket@9.3.1: {} - monaco-editor@0.53.0: + monaco-editor@0.55.1: dependencies: - '@types/trusted-types': 1.0.6 + dompurify: 3.2.6 + marked: 14.0.0 moo-color@1.0.3: dependencies: @@ -11794,8 +11372,6 @@ snapshots: nanoid@3.3.11: {} - napi-postinstall@0.3.3: {} - natural-compare@1.4.0: {} negotiator@0.6.3: {} @@ -11804,6 +11380,8 @@ snapshots: node-releases@2.0.21: {} + node-releases@2.0.27: {} + normalize-path@3.0.0: {} normalize-range@0.1.2: {} @@ -11881,29 +11459,27 @@ snapshots: outvariant@1.4.3: {} - oxc-resolver@11.8.4: - dependencies: - napi-postinstall: 0.3.3 + oxc-resolver@11.12.0: optionalDependencies: - '@oxc-resolver/binding-android-arm-eabi': 11.8.4 - '@oxc-resolver/binding-android-arm64': 11.8.4 - '@oxc-resolver/binding-darwin-arm64': 11.8.4 - '@oxc-resolver/binding-darwin-x64': 11.8.4 - '@oxc-resolver/binding-freebsd-x64': 11.8.4 - '@oxc-resolver/binding-linux-arm-gnueabihf': 11.8.4 - '@oxc-resolver/binding-linux-arm-musleabihf': 11.8.4 - '@oxc-resolver/binding-linux-arm64-gnu': 11.8.4 - '@oxc-resolver/binding-linux-arm64-musl': 11.8.4 - '@oxc-resolver/binding-linux-ppc64-gnu': 11.8.4 - '@oxc-resolver/binding-linux-riscv64-gnu': 11.8.4 - '@oxc-resolver/binding-linux-riscv64-musl': 11.8.4 - '@oxc-resolver/binding-linux-s390x-gnu': 11.8.4 - '@oxc-resolver/binding-linux-x64-gnu': 11.8.4 - '@oxc-resolver/binding-linux-x64-musl': 11.8.4 - '@oxc-resolver/binding-wasm32-wasi': 11.8.4 - '@oxc-resolver/binding-win32-arm64-msvc': 11.8.4 - '@oxc-resolver/binding-win32-ia32-msvc': 11.8.4 - '@oxc-resolver/binding-win32-x64-msvc': 11.8.4 + '@oxc-resolver/binding-android-arm-eabi': 11.12.0 + '@oxc-resolver/binding-android-arm64': 11.12.0 + '@oxc-resolver/binding-darwin-arm64': 11.12.0 + '@oxc-resolver/binding-darwin-x64': 11.12.0 + '@oxc-resolver/binding-freebsd-x64': 11.12.0 + '@oxc-resolver/binding-linux-arm-gnueabihf': 11.12.0 + '@oxc-resolver/binding-linux-arm-musleabihf': 11.12.0 + '@oxc-resolver/binding-linux-arm64-gnu': 11.12.0 + '@oxc-resolver/binding-linux-arm64-musl': 11.12.0 + '@oxc-resolver/binding-linux-ppc64-gnu': 11.12.0 + '@oxc-resolver/binding-linux-riscv64-gnu': 11.12.0 + '@oxc-resolver/binding-linux-riscv64-musl': 11.12.0 + '@oxc-resolver/binding-linux-s390x-gnu': 11.12.0 + '@oxc-resolver/binding-linux-x64-gnu': 11.12.0 + '@oxc-resolver/binding-linux-x64-musl': 11.12.0 + '@oxc-resolver/binding-wasm32-wasi': 11.12.0 + '@oxc-resolver/binding-win32-arm64-msvc': 11.12.0 + '@oxc-resolver/binding-win32-ia32-msvc': 11.12.0 + '@oxc-resolver/binding-win32-x64-msvc': 11.12.0 p-limit@2.3.0: dependencies: @@ -11970,6 +11546,10 @@ snapshots: dependencies: entities: 4.5.0 + parse5@8.0.0: + dependencies: + entities: 6.0.1 + parseurl@1.3.3: {} path-exists@4.0.0: {} @@ -11995,7 +11575,9 @@ snapshots: path-type@4.0.0: {} - pathval@2.0.0: {} + pathe@2.0.3: {} + + pathval@2.0.1: {} picocolors@1.1.1: {} @@ -12109,7 +11691,7 @@ snapshots: property-information@7.1.0: {} - protobufjs@7.4.0: + protobufjs@7.5.4: dependencies: '@protobufjs/aspromise': 1.1.2 '@protobufjs/base64': 1.1.2 @@ -12122,7 +11704,7 @@ snapshots: '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 '@types/node': 20.17.16 - long: 5.2.3 + long: 5.3.2 proxy-addr@2.0.7: dependencies: @@ -12154,29 +11736,29 @@ snapshots: iconv-lite: 0.4.24 unpipe: 1.0.0 - react-color@2.19.3(react@19.1.1): + react-color@2.19.3(react@19.2.0): dependencies: - '@icons/material': 0.2.4(react@19.1.1) + '@icons/material': 0.2.4(react@19.2.0) lodash: 4.17.21 lodash-es: 4.17.21 material-colors: 1.2.6 prop-types: 15.8.1 - react: 19.1.1 - reactcss: 1.2.3(react@19.1.1) + react: 19.2.0 + reactcss: 1.2.3(react@19.2.0) tinycolor2: 1.6.0 - react-confetti@6.4.0(react@19.1.1): + react-confetti@6.4.0(react@19.2.0): dependencies: - react: 19.1.1 + react: 19.2.0 tween-functions: 1.2.0 - react-date-range@1.4.0(date-fns@2.30.0)(react@19.1.1): + react-date-range@1.4.0(date-fns@2.30.0)(react@19.2.0): dependencies: classnames: 2.3.2 date-fns: 2.30.0 prop-types: 15.8.1 - react: 19.1.1 - react-list: 0.8.17(react@19.1.1) + react: 19.2.0 + react-list: 0.8.17(react@19.2.0) shallow-equal: 1.2.1 react-docgen-typescript@2.2.2(typescript@5.6.3): @@ -12185,7 +11767,7 @@ snapshots: react-docgen@8.0.0: dependencies: - '@babel/core': 7.28.4 + '@babel/core': 7.28.5 '@babel/traverse': 7.27.1 '@babel/types': 7.27.1 '@types/babel__core': 7.20.5 @@ -12198,16 +11780,16 @@ snapshots: transitivePeerDependencies: - supports-color - react-dom@19.1.1(react@19.1.1): + react-dom@19.2.0(react@19.2.0): dependencies: - react: 19.1.1 - scheduler: 0.26.0 + react: 19.2.0 + scheduler: 0.27.0 react-fast-compare@2.0.4: {} - react-inspector@6.0.2(react@19.1.1): + react-inspector@6.0.2(react@19.2.0): dependencies: - react: 19.1.1 + react: 19.2.0 react-is@16.13.1: {} @@ -12217,21 +11799,21 @@ snapshots: react-is@19.1.1: {} - react-list@0.8.17(react@19.1.1): + react-list@0.8.17(react@19.2.0): dependencies: prop-types: 15.8.1 - react: 19.1.1 + react: 19.2.0 - react-markdown@9.1.0(@types/react@19.1.17)(react@19.1.1): + react-markdown@9.1.0(@types/react@19.2.2)(react@19.2.0): dependencies: '@types/hast': 3.0.4 '@types/mdast': 4.0.4 - '@types/react': 19.1.17 + '@types/react': 19.2.2 devlop: 1.1.0 hast-util-to-jsx-runtime: 2.3.6 html-url-attributes: 3.0.1 mdast-util-to-hast: 13.2.0 - react: 19.1.1 + react: 19.2.0 remark-parse: 11.0.0 remark-rehype: 11.1.2 unified: 11.0.5 @@ -12240,113 +11822,102 @@ snapshots: transitivePeerDependencies: - supports-color - react-refresh@0.17.0: {} + react-refresh@0.18.0: {} - react-remove-scroll-bar@2.3.8(@types/react@19.1.17)(react@19.1.1): + react-remove-scroll-bar@2.3.8(@types/react@19.2.2)(react@19.2.0): dependencies: - react: 19.1.1 - react-style-singleton: 2.2.3(@types/react@19.1.17)(react@19.1.1) + react: 19.2.0 + react-style-singleton: 2.2.3(@types/react@19.2.2)(react@19.2.0) tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - react-remove-scroll@2.6.3(@types/react@19.1.17)(react@19.1.1): + react-remove-scroll@2.7.1(@types/react@19.2.2)(react@19.2.0): dependencies: - react: 19.1.1 - react-remove-scroll-bar: 2.3.8(@types/react@19.1.17)(react@19.1.1) - react-style-singleton: 2.2.3(@types/react@19.1.17)(react@19.1.1) + react: 19.2.0 + react-remove-scroll-bar: 2.3.8(@types/react@19.2.2)(react@19.2.0) + react-style-singleton: 2.2.3(@types/react@19.2.2)(react@19.2.0) tslib: 2.8.1 - use-callback-ref: 1.3.3(@types/react@19.1.17)(react@19.1.1) - use-sidecar: 1.1.3(@types/react@19.1.17)(react@19.1.1) + use-callback-ref: 1.3.3(@types/react@19.2.2)(react@19.2.0) + use-sidecar: 1.1.3(@types/react@19.2.2)(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - react-remove-scroll@2.7.1(@types/react@19.1.17)(react@19.1.1): + react-resizable-panels@3.0.6(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: - react: 19.1.1 - react-remove-scroll-bar: 2.3.8(@types/react@19.1.17)(react@19.1.1) - react-style-singleton: 2.2.3(@types/react@19.1.17)(react@19.1.1) - tslib: 2.8.1 - use-callback-ref: 1.3.3(@types/react@19.1.17)(react@19.1.1) - use-sidecar: 1.1.3(@types/react@19.1.17)(react@19.1.1) - optionalDependencies: - '@types/react': 19.1.17 - - react-resizable-panels@3.0.6(react-dom@19.1.1(react@19.1.1))(react@19.1.1): - dependencies: - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) - react-router@7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + react-router@7.9.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: cookie: 1.0.2 - react: 19.1.1 - set-cookie-parser: 2.7.1 + react: 19.2.0 + set-cookie-parser: 2.7.2 optionalDependencies: - react-dom: 19.1.1(react@19.1.1) + react-dom: 19.2.0(react@19.2.0) - react-smooth@4.0.4(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + react-smooth@4.0.4(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: - fast-equals: 5.2.2 + fast-equals: 5.3.2 prop-types: 15.8.1 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) - react-transition-group: 4.4.5(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) + react-transition-group: 4.4.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0) - react-style-singleton@2.2.3(@types/react@19.1.17)(react@19.1.1): + react-style-singleton@2.2.3(@types/react@19.2.2)(react@19.2.0): dependencies: get-nonce: 1.0.1 - react: 19.1.1 + react: 19.2.0 tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - react-syntax-highlighter@15.6.1(react@19.1.1): + react-syntax-highlighter@15.6.1(react@19.2.0): dependencies: '@babel/runtime': 7.26.10 highlight.js: 10.7.3 highlightjs-vue: 1.0.0 lowlight: 1.20.0 prismjs: 1.30.0 - react: 19.1.1 + react: 19.2.0 refractor: 3.6.0 - react-textarea-autosize@8.5.9(@types/react@19.1.17)(react@19.1.1): + react-textarea-autosize@8.5.9(@types/react@19.2.2)(react@19.2.0): dependencies: '@babel/runtime': 7.26.10 - react: 19.1.1 - use-composed-ref: 1.4.0(@types/react@19.1.17)(react@19.1.1) - use-latest: 1.3.0(@types/react@19.1.17)(react@19.1.1) + react: 19.2.0 + use-composed-ref: 1.4.0(@types/react@19.2.2)(react@19.2.0) + use-latest: 1.3.0(@types/react@19.2.2)(react@19.2.0) transitivePeerDependencies: - '@types/react' - react-transition-group@4.4.5(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + react-transition-group@4.4.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: '@babel/runtime': 7.26.10 dom-helpers: 5.2.1 loose-envify: 1.4.0 prop-types: 15.8.1 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) - react-virtualized-auto-sizer@1.0.26(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + react-virtualized-auto-sizer@1.0.26(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) - react-window@1.8.11(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + react-window@1.8.11(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: '@babel/runtime': 7.26.10 memoize-one: 5.2.1 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) - react@19.1.1: {} + react@19.2.0: {} - reactcss@1.2.3(react@19.1.1): + reactcss@1.2.3(react@19.2.0): dependencies: lodash: 4.17.21 - react: 19.1.1 + react: 19.2.0 read-cache@1.0.0: dependencies: @@ -12374,7 +11945,7 @@ snapshots: readdirp@4.1.2: {} - recast@0.23.9: + recast@0.23.11: dependencies: ast-types: 0.16.1 esprima: 4.0.1 @@ -12386,15 +11957,15 @@ snapshots: dependencies: decimal.js-light: 2.5.1 - recharts@2.15.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1): + recharts@2.15.4(react-dom@19.2.0(react@19.2.0))(react@19.2.0): dependencies: clsx: 2.1.1 eventemitter3: 4.0.7 lodash: 4.17.21 - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) react-is: 18.3.1 - react-smooth: 4.0.4(react-dom@19.1.1(react@19.1.1))(react@19.1.1) + react-smooth: 4.0.4(react-dom@19.2.0(react@19.2.0))(react@19.2.0) recharts-scale: 0.4.5 tiny-invariant: 1.3.3 victory-vendor: 36.9.2 @@ -12454,6 +12025,8 @@ snapshots: require-directory@2.1.1: {} + require-from-string@2.0.2: {} + requires-port@1.0.0: {} resize-observer-polyfill@1.5.1: {} @@ -12523,13 +12096,15 @@ snapshots: '@rollup/rollup-win32-x64-msvc': 4.52.5 fsevents: 2.3.3 + rrweb-cssom@0.8.0: {} + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 - rxjs@7.8.1: + rxjs@7.8.2: dependencies: - tslib: 2.6.2 + tslib: 2.8.1 safe-buffer@5.1.2: {} @@ -12541,9 +12116,9 @@ snapshots: dependencies: xmlchars: 2.2.0 - scheduler@0.26.0: {} + scheduler@0.27.0: {} - semver@7.7.2: {} + semver@7.7.3: {} send@0.19.0: dependencies: @@ -12572,14 +12147,14 @@ snapshots: transitivePeerDependencies: - supports-color - set-cookie-parser@2.7.1: {} + set-cookie-parser@2.7.2: {} set-function-length@1.2.2: dependencies: define-data-property: 1.1.4 es-errors: 1.3.0 function-bind: 1.1.2 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 gopd: 1.2.0 has-property-descriptors: 1.0.2 @@ -12610,14 +12185,14 @@ snapshots: dependencies: call-bound: 1.0.3 es-errors: 1.3.0 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 object-inspect: 1.13.3 side-channel-weakmap@1.0.2: dependencies: call-bound: 1.0.3 es-errors: 1.3.0 - get-intrinsic: 1.3.1 + get-intrinsic: 1.3.0 object-inspect: 1.13.3 side-channel-map: 1.0.1 @@ -12629,6 +12204,8 @@ snapshots: side-channel-map: 1.0.1 side-channel-weakmap: 1.0.2 + siginfo@2.0.0: {} + signal-exit@3.0.7: {} signal-exit@4.1.0: {} @@ -12670,41 +12247,45 @@ snapshots: dependencies: escape-string-regexp: 2.0.0 + stackback@0.0.2: {} + state-local@1.0.7: {} statuses@2.0.1: {} statuses@2.0.2: {} + std-env@3.10.0: {} + stop-iteration-iterator@1.0.0: dependencies: internal-slot: 1.0.6 - storybook-addon-remix-react-router@5.0.0(react-dom@19.1.1(react@19.1.1))(react-router@7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1))(react@19.1.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))): + storybook-addon-remix-react-router@5.0.0(react-dom@19.2.0(react@19.2.0))(react-router@7.9.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0))(react@19.2.0)(storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0))): dependencies: '@mjackson/form-data-parser': 0.4.0 compare-versions: 6.1.0 - react-inspector: 6.0.2(react@19.1.1) - react-router: 7.8.0(react-dom@19.1.1(react@19.1.1))(react@19.1.1) - storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + react-inspector: 6.0.2(react@19.2.0) + react-router: 7.9.5(react-dom@19.2.0(react@19.2.0))(react@19.2.0) + storybook: 9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) optionalDependencies: - react: 19.1.1 - react-dom: 19.1.1(react@19.1.1) + react: 19.2.0 + react-dom: 19.2.0(react@19.2.0) - storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)): + storybook@9.1.16(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)): dependencies: '@storybook/global': 5.0.0 - '@testing-library/jest-dom': 6.6.3 + '@testing-library/jest-dom': 6.9.1 '@testing-library/user-event': 14.6.1(@testing-library/dom@10.4.0) '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(msw@2.4.8(typescript@5.6.3))(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@vitest/mocker': 3.2.4(msw@2.4.8(typescript@5.6.3))(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) '@vitest/spy': 3.2.4 better-opn: 3.0.2 - esbuild: 0.25.3 - esbuild-register: 3.6.0(esbuild@0.25.3) - recast: 0.23.9 - semver: 7.7.2 - ws: 8.18.0 + esbuild: 0.25.11 + esbuild-register: 3.6.0(esbuild@0.25.11) + recast: 0.23.11 + semver: 7.7.3 + ws: 8.18.3 optionalDependencies: prettier: 3.4.1 transitivePeerDependencies: @@ -12866,8 +12447,12 @@ snapshots: tiny-warning@1.0.3: {} + tinybench@2.9.0: {} + tinycolor2@1.6.0: {} + tinyexec@0.3.2: {} + tinyglobby@0.2.15: dependencies: fdir: 6.5.0(picomatch@4.0.3) @@ -12875,7 +12460,15 @@ snapshots: tinyrainbow@2.0.0: {} - tinyspy@4.0.3: {} + tinyrainbow@3.0.3: {} + + tinyspy@4.0.4: {} + + tldts-core@7.0.17: {} + + tldts@7.0.17: + dependencies: + tldts-core: 7.0.17 tmpl@1.0.5: {} @@ -12894,10 +12487,18 @@ snapshots: universalify: 0.2.0 url-parse: 1.5.10 + tough-cookie@6.0.0: + dependencies: + tldts: 7.0.17 + tr46@3.0.0: dependencies: punycode: 2.3.1 + tr46@6.0.0: + dependencies: + punycode: 2.3.1 + trim-lines@3.0.1: {} trough@2.2.0: {} @@ -12934,12 +12535,12 @@ snapshots: ts-proto-descriptors@1.16.0: dependencies: long: 5.3.2 - protobufjs: 7.4.0 + protobufjs: 7.5.4 ts-proto@1.181.2: dependencies: case-anything: 2.1.13 - protobufjs: 7.4.0 + protobufjs: 7.5.4 ts-poet: 6.12.0 ts-proto-descriptors: 1.16.0 @@ -12990,7 +12591,7 @@ snapshots: undici-types@6.21.0: {} - undici@6.21.3: {} + undici@6.22.0: {} unicorn-magic@0.1.0: {} @@ -13050,6 +12651,12 @@ snapshots: escalade: 3.2.0 picocolors: 1.1.1 + update-browserslist-db@1.1.4(browserslist@4.27.0): + dependencies: + browserslist: 4.27.0 + escalade: 3.2.0 + picocolors: 1.1.1 + uri-js@4.4.1: dependencies: punycode: 2.3.1 @@ -13060,43 +12667,43 @@ snapshots: querystringify: 2.2.0 requires-port: 1.0.0 - use-callback-ref@1.3.3(@types/react@19.1.17)(react@19.1.1): + use-callback-ref@1.3.3(@types/react@19.2.2)(react@19.2.0): dependencies: - react: 19.1.1 + react: 19.2.0 tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - use-composed-ref@1.4.0(@types/react@19.1.17)(react@19.1.1): + use-composed-ref@1.4.0(@types/react@19.2.2)(react@19.2.0): dependencies: - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - use-isomorphic-layout-effect@1.2.1(@types/react@19.1.17)(react@19.1.1): + use-isomorphic-layout-effect@1.2.1(@types/react@19.2.2)(react@19.2.0): dependencies: - react: 19.1.1 + react: 19.2.0 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - use-latest@1.3.0(@types/react@19.1.17)(react@19.1.1): + use-latest@1.3.0(@types/react@19.2.2)(react@19.2.0): dependencies: - react: 19.1.1 - use-isomorphic-layout-effect: 1.2.1(@types/react@19.1.17)(react@19.1.1) + react: 19.2.0 + use-isomorphic-layout-effect: 1.2.1(@types/react@19.2.2)(react@19.2.0) optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - use-sidecar@1.1.3(@types/react@19.1.17)(react@19.1.1): + use-sidecar@1.1.3(@types/react@19.2.2)(react@19.2.0): dependencies: detect-node-es: 1.1.0 - react: 19.1.1 + react: 19.2.0 tslib: 2.8.1 optionalDependencies: - '@types/react': 19.1.17 + '@types/react': 19.2.2 - use-sync-external-store@1.4.0(react@19.1.1): + use-sync-external-store@1.6.0(react@19.2.0): dependencies: - react: 19.1.1 + react: 19.2.0 util-deprecate@1.0.2: {} @@ -13127,10 +12734,10 @@ snapshots: victory-vendor@36.9.2: dependencies: - '@types/d3-array': 3.2.1 + '@types/d3-array': 3.2.2 '@types/d3-ease': 3.0.2 '@types/d3-interpolate': 3.0.4 - '@types/d3-scale': 4.0.8 + '@types/d3-scale': 4.0.9 '@types/d3-shape': 3.1.7 '@types/d3-time': 3.0.4 '@types/d3-timer': 3.0.2 @@ -13142,7 +12749,7 @@ snapshots: d3-time: 3.1.0 d3-timer: 3.0.1 - vite-plugin-checker@0.11.0(@biomejs/biome@2.2.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)): + vite-plugin-checker@0.11.0(@biomejs/biome@2.2.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)): dependencies: '@babel/code-frame': 7.27.1 chokidar: 4.0.3 @@ -13151,7 +12758,7 @@ snapshots: picomatch: 4.0.3 tiny-invariant: 1.3.3 tinyglobby: 0.2.15 - vite: 7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + vite: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) vscode-uri: 3.1.0 optionalDependencies: '@biomejs/biome': 2.2.4 @@ -13159,7 +12766,7 @@ snapshots: optionator: 0.9.3 typescript: 5.6.3 - vite@7.1.11(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0): + vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0): dependencies: esbuild: 0.25.11 fdir: 6.5.0(picomatch@4.0.3) @@ -13173,12 +12780,56 @@ snapshots: jiti: 1.21.7 yaml: 2.7.0 + vitest@4.0.6(@types/debug@4.1.12)(@types/node@20.17.16)(jiti@1.21.7)(jsdom@27.0.1)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0): + dependencies: + '@vitest/expect': 4.0.6 + '@vitest/mocker': 4.0.6(msw@2.4.8(typescript@5.6.3))(vite@7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0)) + '@vitest/pretty-format': 4.0.6 + '@vitest/runner': 4.0.6 + '@vitest/snapshot': 4.0.6 + '@vitest/spy': 4.0.6 + '@vitest/utils': 4.0.6 + debug: 4.4.3 + es-module-lexer: 1.7.0 + expect-type: 1.2.2 + magic-string: 0.30.21 + pathe: 2.0.3 + picomatch: 4.0.3 + std-env: 3.10.0 + tinybench: 2.9.0 + tinyexec: 0.3.2 + tinyglobby: 0.2.15 + tinyrainbow: 3.0.3 + vite: 7.1.12(@types/node@20.17.16)(jiti@1.21.7)(yaml@2.7.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/debug': 4.1.12 + '@types/node': 20.17.16 + jsdom: 27.0.1 + transitivePeerDependencies: + - jiti + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + - tsx + - yaml + vscode-uri@3.1.0: {} w3c-xmlserializer@4.0.0: dependencies: xml-name-validator: 4.0.0 + w3c-xmlserializer@5.0.0: + dependencies: + xml-name-validator: 5.0.0 + walk-up-path@4.0.0: {} walker@1.0.8: @@ -13191,6 +12842,8 @@ snapshots: webidl-conversions@7.0.0: {} + webidl-conversions@8.0.0: {} + webpack-sources@3.2.3: {} webpack-virtual-modules@0.5.0: {} @@ -13201,13 +12854,24 @@ snapshots: dependencies: iconv-lite: 0.6.3 + whatwg-encoding@3.1.1: + dependencies: + iconv-lite: 0.6.3 + whatwg-mimetype@3.0.0: {} + whatwg-mimetype@4.0.0: {} + whatwg-url@11.0.0: dependencies: tr46: 3.0.0 webidl-conversions: 7.0.0 + whatwg-url@15.1.0: + dependencies: + tr46: 6.0.0 + webidl-conversions: 8.0.0 + which-boxed-primitive@1.0.2: dependencies: is-bigint: 1.0.4 @@ -13236,6 +12900,11 @@ snapshots: dependencies: isexe: 2.0.0 + why-is-node-running@2.3.0: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + wrap-ansi@6.2.0: dependencies: ansi-styles: 4.3.0 @@ -13261,12 +12930,12 @@ snapshots: imurmurhash: 0.1.4 signal-exit: 3.0.7 - ws@8.17.1: {} - - ws@8.18.0: {} + ws@8.18.3: {} xml-name-validator@4.0.0: {} + xml-name-validator@5.0.0: {} + xmlchars@2.2.0: {} xtend@4.0.2: {} @@ -13308,6 +12977,6 @@ snapshots: toposort: 2.0.2 type-fest: 2.19.0 - zod@4.1.11: {} + zod@4.1.12: {} zwitch@2.0.4: {} diff --git a/site/src/App.tsx b/site/src/App.tsx index 57497b586f56d..a4fad65a3d265 100644 --- a/site/src/App.tsx +++ b/site/src/App.tsx @@ -1,5 +1,6 @@ import "./theme/globalFonts"; import { ReactQueryDevtools } from "@tanstack/react-query-devtools"; +import { TooltipProvider } from "components/Tooltip/Tooltip"; import { type FC, type ReactNode, @@ -53,8 +54,10 @@ export const AppProviders: FC = ({ - {children} - + + {children} + + {showDevtools && } diff --git a/site/src/api/api.test.ts b/site/src/api/api.jest.ts similarity index 100% rename from site/src/api/api.test.ts rename to site/src/api/api.jest.ts diff --git a/site/src/api/api.ts b/site/src/api/api.ts index 4c02a96fe2129..eaf4076472a2e 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -1474,6 +1474,19 @@ class ApiMethods { return response.data; }; + getUserPreferenceSettings = + async (): Promise => { + const response = await this.axios.get("/api/v2/users/me/preferences"); + return response.data; + }; + + updateUserPreferenceSettings = async ( + req: TypesGen.UpdateUserPreferenceSettingsRequest, + ): Promise => { + const response = await this.axios.put("/api/v2/users/me/preferences", req); + return response.data; + }; + getUserQuietHoursSchedule = async ( userId: TypesGen.User["id"], ): Promise => { @@ -2644,50 +2657,13 @@ class ApiMethods { markAllInboxNotificationsAsRead = async () => { await this.axios.put("/api/v2/notifications/inbox/mark-all-as-read"); }; -} - -// Experimental API methods call endpoints under the /api/experimental/ prefix. -// These endpoints are not stable and may change or be removed at any time. -// -// All methods must be defined with arrow function syntax. See the docstring -// above the ApiMethods class for a full explanation. - -export type TaskFeedbackRating = "good" | "okay" | "bad"; - -export type CreateTaskFeedbackRequest = { - rate: TaskFeedbackRating; - comment?: string; -}; -class ExperimentalApiMethods { - constructor(protected readonly axios: AxiosInstance) {} - - getAITasksPrompts = async ( - buildIds: TypesGen.WorkspaceBuild["id"][], - ): Promise => { - if (buildIds.length === 0) { - return { - prompts: {}, - }; - } - - const response = await this.axios.get( - "/api/experimental/aitasks/prompts", - { - params: { - build_ids: buildIds.join(","), - }, - }, - ); - - return response.data; - }; createTask = async ( user: string, req: TypesGen.CreateTaskRequest, ): Promise => { const response = await this.axios.post( - `/api/experimental/tasks/${user}`, + `/api/v2/tasks/${user}`, req, ); @@ -2706,7 +2682,7 @@ class ExperimentalApiMethods { } const res = await this.axios.get( - "/api/experimental/tasks", + "/api/v2/tasks", { params: { q: query.join(", "), @@ -2719,14 +2695,14 @@ class ExperimentalApiMethods { getTask = async (user: string, id: string): Promise => { const response = await this.axios.get( - `/api/experimental/tasks/${user}/${id}`, + `/api/v2/tasks/${user}/${id}`, ); return response.data; }; deleteTask = async (user: string, id: string): Promise => { - await this.axios.delete(`/api/experimental/tasks/${user}/${id}`); + await this.axios.delete(`/api/v2/tasks/${user}/${id}`); }; createTaskFeedback = async ( @@ -2739,6 +2715,32 @@ class ExperimentalApiMethods { }; } +export type TaskFeedbackRating = "good" | "okay" | "bad"; + +export type CreateTaskFeedbackRequest = { + rate: TaskFeedbackRating; + comment?: string; +}; + +// Experimental API methods call endpoints under the /api/experimental/ prefix. +// These endpoints are not stable and may change or be removed at any time. +// +// All methods must be defined with arrow function syntax. See the docstring +// above the ApiMethods class for a full explanation. +class ExperimentalApiMethods { + constructor(protected readonly axios: AxiosInstance) {} + + getAIBridgeInterceptions = async (options: SearchParamOptions) => { + const url = getURLWithSearchParams( + "/api/experimental/aibridge/interceptions", + options, + ); + const response = + await this.axios.get(url); + return response.data; + }; +} + // This is a hard coded CSRF token/cookie pair for local development. In prod, // the GoLang webserver generates a random cookie with a new token for each // document request. For local development, we don't use the Go webserver for @@ -2780,7 +2782,7 @@ function getConfiguredAxiosInstance(): AxiosInstance { } } else { // Do not write error logs if we are in a FE unit test. - if (process.env.JEST_WORKER_ID === undefined) { + if (!process.env.JEST_WORKER_ID && !process.env.VITEST) { console.error("CSRF token not found"); } } diff --git a/site/src/api/queries/aiBridge.ts b/site/src/api/queries/aiBridge.ts new file mode 100644 index 0000000000000..1e385bc464564 --- /dev/null +++ b/site/src/api/queries/aiBridge.ts @@ -0,0 +1,22 @@ +import { API } from "api/api"; +import type { AIBridgeListInterceptionsResponse } from "api/typesGenerated"; +import { useFilterParamsKey } from "components/Filter/Filter"; +import type { UsePaginatedQueryOptions } from "hooks/usePaginatedQuery"; + +export const paginatedInterceptions = ( + searchParams: URLSearchParams, +): UsePaginatedQueryOptions => { + return { + searchParams, + queryPayload: () => searchParams.get(useFilterParamsKey) ?? "", + queryKey: ({ payload, pageNumber }) => { + return ["aiBridgeInterceptions", payload, pageNumber] as const; + }, + queryFn: ({ limit, offset, payload }) => + API.experimental.getAIBridgeInterceptions({ + offset, + limit, + q: payload, + }), + }; +}; diff --git a/site/src/api/queries/templates.ts b/site/src/api/queries/templates.ts index 686611cb6cd41..da27333b0febe 100644 --- a/site/src/api/queries/templates.ts +++ b/site/src/api/queries/templates.ts @@ -35,7 +35,7 @@ export const templateByName = (organization: string, name: string) => { } satisfies QueryOptions