Compare commits

...

50 Commits
cache ... dev

Author SHA1 Message Date
Owen
b8d468f6de Bump version 2026-02-25 14:22:24 -08:00
Owen
fc66394243 Merge branch 'main' into dev 2026-02-25 14:18:22 -08:00
miloschwartz
8fca243c9a update tierMatrix 2026-02-25 11:59:16 -08:00
miloschwartz
388f710379 add pg migration 2026-02-25 11:37:31 -08:00
Owen Schwartz
ba3ab4362b Merge pull request #2539 from fosrl/dependabot/npm_and_yarn/prod-minor-updates-22e7e52815
Bump the prod-minor-updates group across 1 directory with 3 updates
2026-02-25 11:32:19 -08:00
miloschwartz
e18c9afc2d add sqlite migration 2026-02-25 11:24:32 -08:00
Owen Schwartz
a9b4a86c4a Merge pull request #2470 from fosrl/dependabot/npm_and_yarn/qs-6.14.2
Bump qs from 6.14.1 to 6.14.2
2026-02-25 11:05:46 -08:00
Owen Schwartz
200ea502dd Merge pull request #2484 from fosrl/dependabot/go_modules/install/minor-updates-80eb8af454
Bump golang.org/x/term from 0.39.0 to 0.40.0 in /install in the minor-updates group
2026-02-25 11:05:34 -08:00
dependabot[bot]
de36db97eb Bump the prod-minor-updates group across 1 directory with 3 updates
Bumps the prod-minor-updates group with 3 updates in the / directory: [pg](https://github.com/brianc/node-postgres/tree/HEAD/packages/pg), [posthog-node](https://github.com/PostHog/posthog-js/tree/HEAD/packages/node) and [tailwind-merge](https://github.com/dcastil/tailwind-merge).


Updates `pg` from 8.18.0 to 8.19.0
- [Changelog](https://github.com/brianc/node-postgres/blob/master/CHANGELOG.md)
- [Commits](https://github.com/brianc/node-postgres/commits/pg@8.19.0/packages/pg)

Updates `posthog-node` from 5.24.15 to 5.26.0
- [Release notes](https://github.com/PostHog/posthog-js/releases)
- [Changelog](https://github.com/PostHog/posthog-js/blob/main/packages/node/CHANGELOG.md)
- [Commits](https://github.com/PostHog/posthog-js/commits/posthog-node@5.26.0/packages/node)

Updates `tailwind-merge` from 3.4.0 to 3.5.0
- [Release notes](https://github.com/dcastil/tailwind-merge/releases)
- [Commits](https://github.com/dcastil/tailwind-merge/compare/v3.4.0...v3.5.0)

---
updated-dependencies:
- dependency-name: pg
  dependency-version: 8.19.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: posthog-node
  dependency-version: 5.26.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: tailwind-merge
  dependency-version: 3.5.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 19:03:51 +00:00
dependabot[bot]
30283b044f Bump golang.org/x/term in /install in the minor-updates group
Bumps the minor-updates group in /install with 1 update: [golang.org/x/term](https://github.com/golang/term).


Updates `golang.org/x/term` from 0.39.0 to 0.40.0
- [Commits](https://github.com/golang/term/compare/v0.39.0...v0.40.0)

---
updated-dependencies:
- dependency-name: golang.org/x/term
  dependency-version: 0.40.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 19:01:31 +00:00
Owen Schwartz
055bed8a07 Merge pull request #2338 from fosrl/dependabot/github_actions/actions/checkout-6.0.2
Bump actions/checkout from 6.0.1 to 6.0.2
2026-02-25 11:00:55 -08:00
Owen Schwartz
12b5c2ab34 Merge pull request #2495 from fosrl/dependabot/npm_and_yarn/dev-patch-updates-ebb414287f
Bump @types/nodemailer from 7.0.9 to 7.0.10 in the dev-patch-updates group across 1 directory
2026-02-25 11:00:24 -08:00
Owen Schwartz
dd78674888 Merge pull request #2514 from fosrl/dependabot/npm_and_yarn/multi-60f2582fdd
Bump fast-xml-parser and @aws-sdk/xml-builder
2026-02-25 11:00:15 -08:00
Owen Schwartz
0d0df63847 Merge pull request #2518 from fosrl/dependabot/github_actions/actions/stale-10.2.0
Bump actions/stale from 10.1.1 to 10.2.0
2026-02-25 11:00:01 -08:00
Owen Schwartz
3ab00d9da8 Merge pull request #2527 from fosrl/dependabot/npm_and_yarn/prod-patch-updates-cbac17d765
Bump the prod-patch-updates group across 1 directory with 7 updates
2026-02-25 10:59:38 -08:00
Owen Schwartz
3e6e72c5c7 Merge pull request #2531 from fosrl/dependabot/npm_and_yarn/multi-40a89e2d0a
Bump minimatch
2026-02-25 10:59:08 -08:00
Owen Schwartz
5d8a55f08c Merge pull request #2415 from fosrl/dependabot/github_actions/aws-actions/configure-aws-credentials-6
Bump aws-actions/configure-aws-credentials from 5 to 6
2026-02-25 10:58:18 -08:00
Owen Schwartz
81c569aae4 Merge pull request #2393 from fosrl/dependabot/github_actions/docker/login-action-3.7.0
Bump docker/login-action from 3.6.0 to 3.7.0
2026-02-25 10:58:11 -08:00
Owen Schwartz
88fd3fc4da Merge pull request #2296 from fosrl/dependabot/npm_and_yarn/lodash-4.17.23
Bump lodash from 4.17.21 to 4.17.23
2026-02-25 10:58:02 -08:00
Owen
2282d3ae39 Fix formatting 2026-02-25 10:53:56 -08:00
Owen
c4dcec463a Merge branch 'dev' into LaurenceJJones-feature/installer-tui 2026-02-25 10:48:05 -08:00
Owen
5b7f893ad7 Merge branch 'main' into dev 2026-02-25 10:46:28 -08:00
Owen
2ede0d498a remove log 2026-02-25 10:46:20 -08:00
Owen
f518e8a0ff Merge branch 'feature/installer-tui' of github.com:LaurenceJJones/pangolin into LaurenceJJones-feature/installer-tui 2026-02-25 10:45:38 -08:00
Owen Schwartz
767284408a Merge pull request #2499 from LaurenceJJones/feature/build-variables
enhance(installer): use ldflags to inject versions
2026-02-25 10:42:04 -08:00
Owen Schwartz
eef51f3b84 Merge pull request #2491 from rodneyosodo/fix/install
fix(install): add error handling, code cleanups, and YAML type refactor
2026-02-25 10:41:03 -08:00
Owen Schwartz
69b7114a49 Merge pull request #2537 from Abhinav-kodes/fix/toggle-hydration-sync
fix: sync resource toggle states with context on initial load
2026-02-25 10:36:58 -08:00
Owen Schwartz
0ea38ea568 Merge pull request #2535 from Abhinav-kodes/fix-resource-session-delete-cookie
fix: correct session DELETE tautology and HTTP cookie domain interpolation
2026-02-25 10:35:09 -08:00
Abhinav-kodes
c600da71e3 fix: sync resource toggle states with context on initial load
- Replace defaultChecked with checked for controlled components
- Add useEffect to sync rulesEnabled, ssoEnabled, whitelistEnabled
  when resource context hydrates after mount
- Add nullish coalescing fallback to prevent undefined initial state
2026-02-25 22:07:08 +05:30
Abhinav-kodes
c64dd14b1a fix: correct session DELETE tautology and HTTP cookie domain interpolation 2026-02-25 17:24:27 +05:30
miloschwartz
8ea6d9fa67 add get user by username search endpoint to integration api 2026-02-24 22:04:15 -08:00
Owen
978ac8f53c Add logging 2026-02-24 20:51:27 -08:00
Owen
49a326cde7 Add trust proxy to the internal api
Fix access logs not having the right ip
2026-02-24 20:23:42 -08:00
Owen
63e208f4ec Use local cache in verify session 2026-02-24 19:56:16 -08:00
miloschwartz
c71f46ede5 move copy button and fix translation 2026-02-24 19:44:08 -08:00
dependabot[bot]
2edebaddc2 Bump the prod-patch-updates group across 1 directory with 7 updates
Bumps the prod-patch-updates group with 7 updates in the / directory:

| Package | From | To |
| --- | --- | --- |
| [@asteasolutions/zod-to-openapi](https://github.com/asteasolutions/zod-to-openapi) | `8.4.0` | `8.4.1` |
| [@react-email/components](https://github.com/resend/react-email/tree/HEAD/packages/components) | `1.0.7` | `1.0.8` |
| [@react-email/tailwind](https://github.com/resend/react-email/tree/HEAD/packages/tailwind) | `2.0.4` | `2.0.5` |
| [@simplewebauthn/server](https://github.com/MasterKale/SimpleWebAuthn/tree/HEAD/packages/server) | `13.2.2` | `13.2.3` |
| [glob](https://github.com/isaacs/node-glob) | `13.0.3` | `13.0.6` |
| [next-intl](https://github.com/amannn/next-intl) | `4.8.2` | `4.8.3` |
| [react-hook-form](https://github.com/react-hook-form/react-hook-form) | `7.71.1` | `7.71.2` |



Updates `@asteasolutions/zod-to-openapi` from 8.4.0 to 8.4.1
- [Release notes](https://github.com/asteasolutions/zod-to-openapi/releases)
- [Commits](https://github.com/asteasolutions/zod-to-openapi/compare/v8.4.0...v8.4.1)

Updates `@react-email/components` from 1.0.7 to 1.0.8
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/components/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/components@1.0.8/packages/components)

Updates `@react-email/tailwind` from 2.0.4 to 2.0.5
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/tailwind/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/tailwind@2.0.5/packages/tailwind)

Updates `@simplewebauthn/server` from 13.2.2 to 13.2.3
- [Release notes](https://github.com/MasterKale/SimpleWebAuthn/releases)
- [Changelog](https://github.com/MasterKale/SimpleWebAuthn/blob/master/CHANGELOG.md)
- [Commits](https://github.com/MasterKale/SimpleWebAuthn/commits/v13.2.3/packages/server)

Updates `glob` from 13.0.3 to 13.0.6
- [Changelog](https://github.com/isaacs/node-glob/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/node-glob/compare/v13.0.3...v13.0.6)

Updates `next-intl` from 4.8.2 to 4.8.3
- [Release notes](https://github.com/amannn/next-intl/releases)
- [Changelog](https://github.com/amannn/next-intl/blob/main/CHANGELOG.md)
- [Commits](https://github.com/amannn/next-intl/compare/v4.8.2...v4.8.3)

Updates `react-hook-form` from 7.71.1 to 7.71.2
- [Release notes](https://github.com/react-hook-form/react-hook-form/releases)
- [Changelog](https://github.com/react-hook-form/react-hook-form/blob/master/CHANGELOG.md)
- [Commits](https://github.com/react-hook-form/react-hook-form/compare/v7.71.1...v7.71.2)

---
updated-dependencies:
- dependency-name: "@asteasolutions/zod-to-openapi"
  dependency-version: 8.4.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: "@react-email/components"
  dependency-version: 1.0.8
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: "@react-email/tailwind"
  dependency-version: 2.0.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: "@simplewebauthn/server"
  dependency-version: 13.2.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: glob
  dependency-version: 13.0.6
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: next-intl
  dependency-version: 4.8.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: react-hook-form
  dependency-version: 7.71.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 01:37:31 +00:00
dependabot[bot]
119e1d4867 Bump @types/nodemailer in the dev-patch-updates group across 1 directory
Bumps the dev-patch-updates group with 1 update in the / directory: [@types/nodemailer](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/nodemailer).


Updates `@types/nodemailer` from 7.0.9 to 7.0.10
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/nodemailer)

---
updated-dependencies:
- dependency-name: "@types/nodemailer"
  dependency-version: 7.0.10
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 01:35:31 +00:00
dependabot[bot]
63e30d3378 Bump minimatch
Bumps  and [minimatch](https://github.com/isaacs/minimatch). These dependencies needed to be updated together.

Updates `minimatch` from 10.2.0 to 10.2.3
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v10.2.0...v10.2.3)

Updates `minimatch` from 3.1.2 to 3.1.4
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v10.2.0...v10.2.3)

Updates `minimatch` from 9.0.5 to 9.0.7
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v10.2.0...v10.2.3)

Updates `minimatch` from 10.1.1 to 10.2.3
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v10.2.0...v10.2.3)

---
updated-dependencies:
- dependency-name: minimatch
  dependency-version: 10.2.3
  dependency-type: indirect
- dependency-name: minimatch
  dependency-version: 3.1.4
  dependency-type: indirect
- dependency-name: minimatch
  dependency-version: 9.0.7
  dependency-type: indirect
- dependency-name: minimatch
  dependency-version: 10.2.3
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 01:04:10 +00:00
miloschwartz
848d4d91e6 fix sidebar 2026-02-23 13:40:08 -08:00
dependabot[bot]
720d3a8135 Bump actions/stale from 10.1.1 to 10.2.0
Bumps [actions/stale](https://github.com/actions/stale) from 10.1.1 to 10.2.0.
- [Release notes](https://github.com/actions/stale/releases)
- [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md)
- [Commits](997185467f...b5d41d4e1d)

---
updated-dependencies:
- dependency-name: actions/stale
  dependency-version: 10.2.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-23 01:36:30 +00:00
dependabot[bot]
9c42458fa5 Bump actions/checkout from 6.0.1 to 6.0.2
Bumps [actions/checkout](https://github.com/actions/checkout) from 6.0.1 to 6.0.2.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](8e8c483db8...de0fac2e45)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: 6.0.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-23 01:31:48 +00:00
dependabot[bot]
bcd3475d17 Bump fast-xml-parser and @aws-sdk/xml-builder
Bumps [fast-xml-parser](https://github.com/NaturalIntelligence/fast-xml-parser) and [@aws-sdk/xml-builder](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/packages-internal/xml-builder). These dependencies needed to be updated together.

Updates `fast-xml-parser` from 5.3.4 to 5.3.6
- [Release notes](https://github.com/NaturalIntelligence/fast-xml-parser/releases)
- [Changelog](https://github.com/NaturalIntelligence/fast-xml-parser/blob/master/CHANGELOG.md)
- [Commits](https://github.com/NaturalIntelligence/fast-xml-parser/compare/v5.3.4...v5.3.6)

Updates `@aws-sdk/xml-builder` from 3.972.4 to 3.972.5
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/packages-internal/xml-builder/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/HEAD/packages-internal/xml-builder)

---
updated-dependencies:
- dependency-name: fast-xml-parser
  dependency-version: 5.3.6
  dependency-type: indirect
- dependency-name: "@aws-sdk/xml-builder"
  dependency-version: 3.972.5
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-21 20:18:36 +00:00
Laurence
e8398cb221 enhance(installer): use huh package to handle input
Instead of relying on stdin and stdout by default, using the huh package from charmbracelet allows us to handle user input more gracefully such as y/n instead of typing 'yes' or 'no'. If a user makes a mistake whilst typing in any text fields they cannot use left or right to edit a single character when using huh it can. This adds a dependancy and may increase the size of installer but overall improves user experience.
2026-02-18 11:05:48 +00:00
Laurence
9460e28c7b ehance(installer): use ldflags to inject versions
Instead of the CI/CD using sed to replace the 'replaceme' text we can instead use ldflags which can inject variables at build time to the versions. The makefile had a bunch of workarounds for dev so these have been removed to cleanup etc etc and fetchs versions from the gh api directly if the variables are not injected like the CI/CD does
2026-02-18 09:43:41 +00:00
dependabot[bot]
d8b45396e3 Bump qs from 6.14.1 to 6.14.2
Bumps [qs](https://github.com/ljharb/qs) from 6.14.1 to 6.14.2.
- [Changelog](https://github.com/ljharb/qs/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ljharb/qs/compare/v6.14.1...v6.14.2)

---
updated-dependencies:
- dependency-name: qs
  dependency-version: 6.14.2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-18 05:08:23 +00:00
Rodney Osodo
952d0c74d0 refactor(install): use any for YAML map types instead of interface{}
Signed-off-by: Rodney Osodo <socials@rodneyosodo.com>
2026-02-17 10:45:06 +03:00
Rodney Osodo
ffbea7af59 fix(install): add error handling and minor code cleanups
Signed-off-by: Rodney Osodo <socials@rodneyosodo.com>
2026-02-17 10:45:06 +03:00
dependabot[bot]
971c375398 Bump docker/login-action from 3.6.0 to 3.7.0
Bumps [docker/login-action](https://github.com/docker/login-action) from 3.6.0 to 3.7.0.
- [Release notes](https://github.com/docker/login-action/releases)
- [Commits](5e57cd1181...c94ce9fb46)

---
updated-dependencies:
- dependency-name: docker/login-action
  dependency-version: 3.7.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-15 19:12:16 +00:00
dependabot[bot]
ac4439c5ae Bump aws-actions/configure-aws-credentials from 5 to 6
Bumps [aws-actions/configure-aws-credentials](https://github.com/aws-actions/configure-aws-credentials) from 5 to 6.
- [Release notes](https://github.com/aws-actions/configure-aws-credentials/releases)
- [Changelog](https://github.com/aws-actions/configure-aws-credentials/blob/main/CHANGELOG.md)
- [Commits](https://github.com/aws-actions/configure-aws-credentials/compare/v5...v6)

---
updated-dependencies:
- dependency-name: aws-actions/configure-aws-credentials
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-15 19:12:05 +00:00
dependabot[bot]
8c15855fc3 Bump lodash from 4.17.21 to 4.17.23
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.21 to 4.17.23.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.21...4.17.23)

---
updated-dependencies:
- dependency-name: lodash
  dependency-version: 4.17.23
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-01-26 01:37:38 +00:00
40 changed files with 1403 additions and 667 deletions

View File

@@ -29,7 +29,7 @@ jobs:
permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
uses: aws-actions/configure-aws-credentials@v6
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600
@@ -62,7 +62,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Monitor storage space
run: |
@@ -77,7 +77,7 @@ jobs:
fi
- name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: docker.io
username: ${{ secrets.DOCKER_HUB_USERNAME }}
@@ -134,7 +134,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Monitor storage space
run: |
@@ -149,7 +149,7 @@ jobs:
fi
- name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: docker.io
username: ${{ secrets.DOCKER_HUB_USERNAME }}
@@ -201,10 +201,10 @@ jobs:
timeout-minutes: 30
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: docker.io
username: ${{ secrets.DOCKER_HUB_USERNAME }}
@@ -256,7 +256,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Extract tag name
id: get-tag
@@ -289,22 +289,14 @@ jobs:
echo "LATEST_BADGER_TAG=$LATEST_TAG" >> $GITHUB_ENV
shell: bash
- name: Update install/main.go
run: |
PANGOLIN_VERSION=${{ env.TAG }}
GERBIL_VERSION=${{ env.LATEST_GERBIL_TAG }}
BADGER_VERSION=${{ env.LATEST_BADGER_TAG }}
sed -i "s/config.PangolinVersion = \".*\"/config.PangolinVersion = \"$PANGOLIN_VERSION\"/" install/main.go
sed -i "s/config.GerbilVersion = \".*\"/config.GerbilVersion = \"$GERBIL_VERSION\"/" install/main.go
sed -i "s/config.BadgerVersion = \".*\"/config.BadgerVersion = \"$BADGER_VERSION\"/" install/main.go
echo "Updated install/main.go with Pangolin version $PANGOLIN_VERSION, Gerbil version $GERBIL_VERSION, and Badger version $BADGER_VERSION"
cat install/main.go
shell: bash
- name: Build installer
working-directory: install
run: |
make go-build-release
make go-build-release \
PANGOLIN_VERSION=${{ env.TAG }} \
GERBIL_VERSION=${{ env.LATEST_GERBIL_TAG }} \
BADGER_VERSION=${{ env.LATEST_BADGER_TAG }}
shell: bash
- name: Upload artifacts from /install/bin
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
@@ -415,7 +407,7 @@ jobs:
shell: bash
- name: Login to GitHub Container Registry (for cosign)
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -578,7 +570,7 @@ jobs:
permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
uses: aws-actions/configure-aws-credentials@v6
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Set up Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0

View File

@@ -14,7 +14,7 @@ jobs:
permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
uses: aws-actions/configure-aws-credentials@v6
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600

View File

@@ -23,7 +23,7 @@ jobs:
permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
uses: aws-actions/configure-aws-credentials@v6
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600
@@ -54,7 +54,7 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Download MaxMind GeoLite2 databases
env:
@@ -104,7 +104,7 @@ jobs:
fi
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
uses: aws-actions/configure-aws-credentials@v6
with:
role-to-assume: arn:aws:iam::${{ secrets.aws_account_id }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600
@@ -145,7 +145,7 @@ jobs:
permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5
uses: aws-actions/configure-aws-credentials@v6
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600

View File

@@ -14,7 +14,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
- uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0
with:
days-before-stale: 14
days-before-close: 14

View File

@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Install Node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
@@ -62,7 +62,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Build Docker image sqlite
run: make dev-build-sqlite
@@ -71,7 +71,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Build Docker image pg
run: make dev-build-pg

View File

@@ -1,41 +1,24 @@
all: update-versions go-build-release put-back
dev-all: dev-update-versions dev-build dev-clean
all: go-build-release
# Build with version injection via ldflags
# Versions can be passed via: make go-build-release PANGOLIN_VERSION=x.x.x GERBIL_VERSION=x.x.x BADGER_VERSION=x.x.x
# Or fetched automatically if not provided (requires curl and jq)
PANGOLIN_VERSION ?= $(shell curl -s https://api.github.com/repos/fosrl/pangolin/tags | jq -r '.[0].name')
GERBIL_VERSION ?= $(shell curl -s https://api.github.com/repos/fosrl/gerbil/tags | jq -r '.[0].name')
BADGER_VERSION ?= $(shell curl -s https://api.github.com/repos/fosrl/badger/tags | jq -r '.[0].name')
LDFLAGS = -X main.pangolinVersion=$(PANGOLIN_VERSION) \
-X main.gerbilVersion=$(GERBIL_VERSION) \
-X main.badgerVersion=$(BADGER_VERSION)
go-build-release:
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o bin/installer_linux_amd64
CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -o bin/installer_linux_arm64
@echo "Building with versions - Pangolin: $(PANGOLIN_VERSION), Gerbil: $(GERBIL_VERSION), Badger: $(BADGER_VERSION)"
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "$(LDFLAGS)" -o bin/installer_linux_amd64
CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -ldflags "$(LDFLAGS)" -o bin/installer_linux_arm64
clean:
rm -f bin/installer_linux_amd64
rm -f bin/installer_linux_arm64
update-versions:
@echo "Fetching latest versions..."
cp main.go main.go.bak && \
$(MAKE) dev-update-versions
put-back:
mv main.go.bak main.go
dev-update-versions:
if [ -z "$(tag)" ]; then \
PANGOLIN_VERSION=$$(curl -s https://api.github.com/repos/fosrl/pangolin/tags | jq -r '.[0].name'); \
else \
PANGOLIN_VERSION=$(tag); \
fi && \
GERBIL_VERSION=$$(curl -s https://api.github.com/repos/fosrl/gerbil/tags | jq -r '.[0].name') && \
BADGER_VERSION=$$(curl -s https://api.github.com/repos/fosrl/badger/tags | jq -r '.[0].name') && \
echo "Latest versions - Pangolin: $$PANGOLIN_VERSION, Gerbil: $$GERBIL_VERSION, Badger: $$BADGER_VERSION" && \
sed -i "s/config.PangolinVersion = \".*\"/config.PangolinVersion = \"$$PANGOLIN_VERSION\"/" main.go && \
sed -i "s/config.GerbilVersion = \".*\"/config.GerbilVersion = \"$$GERBIL_VERSION\"/" main.go && \
sed -i "s/config.BadgerVersion = \".*\"/config.BadgerVersion = \"$$BADGER_VERSION\"/" main.go && \
echo "Updated main.go with latest versions"
dev-build: go-build-release
dev-clean:
@echo "Restoring version values ..."
sed -i "s/config.PangolinVersion = \".*\"/config.PangolinVersion = \"replaceme\"/" main.go && \
sed -i "s/config.GerbilVersion = \".*\"/config.GerbilVersion = \"replaceme\"/" main.go && \
sed -i "s/config.BadgerVersion = \".*\"/config.BadgerVersion = \"replaceme\"/" main.go
@echo "Restored version strings in main.go"
.PHONY: all go-build-release clean

View File

@@ -118,19 +118,19 @@ func copyDockerService(sourceFile, destFile, serviceName string) error {
}
// Parse source Docker Compose YAML
var sourceCompose map[string]interface{}
var sourceCompose map[string]any
if err := yaml.Unmarshal(sourceData, &sourceCompose); err != nil {
return fmt.Errorf("error parsing source Docker Compose file: %w", err)
}
// Parse destination Docker Compose YAML
var destCompose map[string]interface{}
var destCompose map[string]any
if err := yaml.Unmarshal(destData, &destCompose); err != nil {
return fmt.Errorf("error parsing destination Docker Compose file: %w", err)
}
// Get services section from source
sourceServices, ok := sourceCompose["services"].(map[string]interface{})
sourceServices, ok := sourceCompose["services"].(map[string]any)
if !ok {
return fmt.Errorf("services section not found in source file or has invalid format")
}
@@ -142,10 +142,10 @@ func copyDockerService(sourceFile, destFile, serviceName string) error {
}
// Get or create services section in destination
destServices, ok := destCompose["services"].(map[string]interface{})
destServices, ok := destCompose["services"].(map[string]any)
if !ok {
// If services section doesn't exist, create it
destServices = make(map[string]interface{})
destServices = make(map[string]any)
destCompose["services"] = destServices
}
@@ -187,13 +187,12 @@ func backupConfig() error {
return nil
}
func MarshalYAMLWithIndent(data interface{}, indent int) ([]byte, error) {
func MarshalYAMLWithIndent(data any, indent int) ([]byte, error) {
buffer := new(bytes.Buffer)
encoder := yaml.NewEncoder(buffer)
encoder.SetIndent(indent)
err := encoder.Encode(data)
if err != nil {
if err := encoder.Encode(data); err != nil {
return nil, err
}
@@ -209,7 +208,7 @@ func replaceInFile(filepath, oldStr, newStr string) error {
}
// Replace the string
newContent := strings.Replace(string(content), oldStr, newStr, -1)
newContent := strings.ReplaceAll(string(content), oldStr, newStr)
// Write the modified content back to the file
err = os.WriteFile(filepath, []byte(newContent), 0644)
@@ -228,28 +227,28 @@ func CheckAndAddTraefikLogVolume(composePath string) error {
}
// Parse YAML into a generic map
var compose map[string]interface{}
var compose map[string]any
if err := yaml.Unmarshal(data, &compose); err != nil {
return fmt.Errorf("error parsing compose file: %w", err)
}
// Get services section
services, ok := compose["services"].(map[string]interface{})
services, ok := compose["services"].(map[string]any)
if !ok {
return fmt.Errorf("services section not found or invalid")
}
// Get traefik service
traefik, ok := services["traefik"].(map[string]interface{})
traefik, ok := services["traefik"].(map[string]any)
if !ok {
return fmt.Errorf("traefik service not found or invalid")
}
// Check volumes
logVolume := "./config/traefik/logs:/var/log/traefik"
var volumes []interface{}
var volumes []any
if existingVolumes, ok := traefik["volumes"].([]interface{}); ok {
if existingVolumes, ok := traefik["volumes"].([]any); ok {
// Check if volume already exists
for _, v := range existingVolumes {
if v.(string) == logVolume {
@@ -295,13 +294,13 @@ func MergeYAML(baseFile, overlayFile string) error {
}
// Parse base YAML into a map
var baseMap map[string]interface{}
var baseMap map[string]any
if err := yaml.Unmarshal(baseContent, &baseMap); err != nil {
return fmt.Errorf("error parsing base YAML: %v", err)
}
// Parse overlay YAML into a map
var overlayMap map[string]interface{}
var overlayMap map[string]any
if err := yaml.Unmarshal(overlayContent, &overlayMap); err != nil {
return fmt.Errorf("error parsing overlay YAML: %v", err)
}
@@ -324,8 +323,8 @@ func MergeYAML(baseFile, overlayFile string) error {
}
// mergeMap recursively merges two maps
func mergeMap(base, overlay map[string]interface{}) map[string]interface{} {
result := make(map[string]interface{})
func mergeMap(base, overlay map[string]any) map[string]any {
result := make(map[string]any)
// Copy all key-values from base map
for k, v := range base {
@@ -336,8 +335,8 @@ func mergeMap(base, overlay map[string]interface{}) map[string]interface{} {
for k, v := range overlay {
// If both maps have the same key and both values are maps, merge recursively
if baseVal, ok := base[k]; ok {
if baseMap, isBaseMap := baseVal.(map[string]interface{}); isBaseMap {
if overlayMap, isOverlayMap := v.(map[string]interface{}); isOverlayMap {
if baseMap, isBaseMap := baseVal.(map[string]any); isBaseMap {
if overlayMap, isOverlayMap := v.(map[string]any); isOverlayMap {
result[k] = mergeMap(baseMap, overlayMap)
continue
}

View File

@@ -38,9 +38,7 @@ services:
image: docker.io/traefik:v3.6
container_name: traefik
restart: unless-stopped
{{if .InstallGerbil}}
network_mode: service:gerbil # Ports appear on the gerbil service
{{end}}{{if not .InstallGerbil}}
{{if .InstallGerbil}} network_mode: service:gerbil # Ports appear on the gerbil service{{end}}{{if not .InstallGerbil}}
ports:
- 443:443
- 80:80

View File

@@ -144,12 +144,13 @@ func installDocker() error {
}
func startDockerService() error {
if runtime.GOOS == "linux" {
switch runtime.GOOS {
case "linux":
cmd := exec.Command("systemctl", "enable", "--now", "docker")
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
return cmd.Run()
} else if runtime.GOOS == "darwin" {
case "darwin":
// On macOS, Docker is usually started via the Docker Desktop application
fmt.Println("Please start Docker Desktop manually on macOS.")
return nil
@@ -302,7 +303,7 @@ func pullContainers(containerType SupportedContainer) error {
return nil
}
return fmt.Errorf("Unsupported container type: %s", containerType)
return fmt.Errorf("unsupported container type: %s", containerType)
}
// startContainers starts the containers using the appropriate command.
@@ -325,7 +326,7 @@ func startContainers(containerType SupportedContainer) error {
return nil
}
return fmt.Errorf("Unsupported container type: %s", containerType)
return fmt.Errorf("unsupported container type: %s", containerType)
}
// stopContainers stops the containers using the appropriate command.
@@ -347,7 +348,7 @@ func stopContainers(containerType SupportedContainer) error {
return nil
}
return fmt.Errorf("Unsupported container type: %s", containerType)
return fmt.Errorf("unsupported container type: %s", containerType)
}
// restartContainer restarts a specific container using the appropriate command.
@@ -369,5 +370,5 @@ func restartContainer(container string, containerType SupportedContainer) error
return nil
}
return fmt.Errorf("Unsupported container type: %s", containerType)
return fmt.Errorf("unsupported container type: %s", containerType)
}

View File

@@ -27,9 +27,18 @@ func installCrowdsec(config Config) error {
os.Exit(1)
}
os.MkdirAll("config/crowdsec/db", 0755)
os.MkdirAll("config/crowdsec/acquis.d", 0755)
os.MkdirAll("config/traefik/logs", 0755)
if err := os.MkdirAll("config/crowdsec/db", 0755); err != nil {
fmt.Printf("Error creating config files: %v\n", err)
os.Exit(1)
}
if err := os.MkdirAll("config/crowdsec/acquis.d", 0755); err != nil {
fmt.Printf("Error creating config files: %v\n", err)
os.Exit(1)
}
if err := os.MkdirAll("config/traefik/logs", 0755); err != nil {
fmt.Printf("Error creating config files: %v\n", err)
os.Exit(1)
}
if err := copyDockerService("config/crowdsec/docker-compose.yml", "docker-compose.yml", "crowdsec"); err != nil {
fmt.Printf("Error copying docker service: %v\n", err)
@@ -153,34 +162,34 @@ func CheckAndAddCrowdsecDependency(composePath string) error {
}
// Parse YAML into a generic map
var compose map[string]interface{}
var compose map[string]any
if err := yaml.Unmarshal(data, &compose); err != nil {
return fmt.Errorf("error parsing compose file: %w", err)
}
// Get services section
services, ok := compose["services"].(map[string]interface{})
services, ok := compose["services"].(map[string]any)
if !ok {
return fmt.Errorf("services section not found or invalid")
}
// Get traefik service
traefik, ok := services["traefik"].(map[string]interface{})
traefik, ok := services["traefik"].(map[string]any)
if !ok {
return fmt.Errorf("traefik service not found or invalid")
}
// Get dependencies
dependsOn, ok := traefik["depends_on"].(map[string]interface{})
dependsOn, ok := traefik["depends_on"].(map[string]any)
if ok {
// Append the new block for crowdsec
dependsOn["crowdsec"] = map[string]interface{}{
dependsOn["crowdsec"] = map[string]any{
"condition": "service_healthy",
}
} else {
// No dependencies exist, create it
traefik["depends_on"] = map[string]interface{}{
"crowdsec": map[string]interface{}{
traefik["depends_on"] = map[string]any{
"crowdsec": map[string]any{
"condition": "service_healthy",
},
}

View File

@@ -3,8 +3,36 @@ module installer
go 1.24.0
require (
golang.org/x/term v0.39.0
github.com/charmbracelet/huh v0.8.0
github.com/charmbracelet/lipgloss v1.1.0
golang.org/x/term v0.40.0
gopkg.in/yaml.v3 v3.0.1
)
require golang.org/x/sys v0.40.0 // indirect
require (
github.com/atotto/clipboard v0.1.4 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/catppuccin/go v0.3.0 // indirect
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7 // indirect
github.com/charmbracelet/bubbletea v1.3.6 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/x/ansi v0.9.3 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.41.0 // indirect
golang.org/x/text v0.23.0 // indirect
)

View File

@@ -1,7 +1,86 @@
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
<<<<<<< HEAD
github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/aymanbagabas/go-udiff v0.3.1 h1:LV+qyBQ2pqe0u42ZsUEtPiCaUoqgA9gYRDs3vj1nolY=
github.com/aymanbagabas/go-udiff v0.3.1/go.mod h1:G0fsKmG+P6ylD0r6N/KgQD/nWzgfnl8ZBcNLgcbrw8E=
github.com/catppuccin/go v0.3.0 h1:d+0/YicIq+hSTo5oPuRi5kOpqkVA5tAsU6dNhvRu+aY=
github.com/catppuccin/go v0.3.0/go.mod h1:8IHJuMGaUUjQM82qBrGNBv7LFq6JI3NnQCF6MOlZjpc=
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7 h1:JFgG/xnwFfbezlUnFMJy0nusZvytYysV4SCS2cYbvws=
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7/go.mod h1:ISC1gtLcVilLOf23wvTfoQuYbW2q0JevFxPfUzZ9Ybw=
github.com/charmbracelet/bubbletea v1.3.6 h1:VkHIxPJQeDt0aFJIsVxw8BQdh/F/L2KKZGsK6et5taU=
github.com/charmbracelet/bubbletea v1.3.6/go.mod h1:oQD9VCRQFF8KplacJLo28/jofOI2ToOfGYeFgBBxHOc=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
github.com/charmbracelet/huh v0.8.0 h1:Xz/Pm2h64cXQZn/Jvele4J3r7DDiqFCNIVteYukxDvY=
github.com/charmbracelet/huh v0.8.0/go.mod h1:5YVc+SlZ1IhQALxRPpkGwwEKftN/+OlJlnJYlDRFqN4=
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0=
github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k=
github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
github.com/charmbracelet/x/conpty v0.1.0 h1:4zc8KaIcbiL4mghEON8D72agYtSeIgq8FSThSPQIb+U=
github.com/charmbracelet/x/conpty v0.1.0/go.mod h1:rMFsDJoDwVmiYM10aD4bH2XiRgwI7NYJtQgl5yskjEQ=
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86 h1:JSt3B+U9iqk37QUU2Rvb6DSBYRLtWqFqfxf8l5hOZUA=
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86/go.mod h1:2P0UgXMEa6TsToMSuFqKFQR+fZTO9CNGUNokkPatT/0=
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ=
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 h1:qko3AQ4gK1MTS/de7F5hPGx6/k1u0w4TeYmBFwzYVP4=
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0/go.mod h1:pBhA0ybfXv6hDjQUZ7hk1lVxBiUbupdw5R31yPUViVQ=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
github.com/charmbracelet/x/termios v0.1.1/go.mod h1:rB7fnv1TgOPOyyKRJ9o+AsTU/vK5WHJ2ivHeut/Pcwo=
github.com/charmbracelet/x/xpty v0.1.2 h1:Pqmu4TEJ8KeA9uSkISKMU3f+C1F6OGBn8ABuGlqCbtI=
github.com/charmbracelet/x/xpty v0.1.2/go.mod h1:XK2Z0id5rtLWcpeNiMYBccNNBrP2IJnzHI0Lq13Xzq4=
github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s=
github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4=
github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
=======
>>>>>>> main
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
<<<<<<< HEAD
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
=======
>>>>>>> main
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

View File

@@ -1,92 +1,235 @@
package main
import (
"bufio"
"errors"
"fmt"
"strings"
"syscall"
"os"
"strconv"
"github.com/charmbracelet/huh"
"golang.org/x/term"
)
func readString(reader *bufio.Reader, prompt string, defaultValue string) string {
// pangolinTheme is the custom theme using brand colors
var pangolinTheme = ThemePangolin()
// isAccessibleMode checks if we should use accessible mode (simple prompts)
// This is true for: non-TTY, TERM=dumb, or ACCESSIBLE env var set
func isAccessibleMode() bool {
// Check if stdin is not a terminal (piped input, CI, etc.)
if !term.IsTerminal(int(os.Stdin.Fd())) {
return true
}
// Check for dumb terminal
if os.Getenv("TERM") == "dumb" {
return true
}
// Check for explicit accessible mode request
if os.Getenv("ACCESSIBLE") != "" {
return true
}
return false
}
// handleAbort checks if the error is a user abort (Ctrl+C) and exits if so
func handleAbort(err error) {
if err != nil && errors.Is(err, huh.ErrUserAborted) {
fmt.Println("\nInstallation cancelled.")
os.Exit(0)
}
}
// runField runs a single field with the Pangolin theme, handling accessible mode
func runField(field huh.Field) error {
if isAccessibleMode() {
return field.RunAccessible(os.Stdout, os.Stdin)
}
form := huh.NewForm(huh.NewGroup(field)).WithTheme(pangolinTheme)
return form.Run()
}
func readString(prompt string, defaultValue string) string {
var value string
title := prompt
if defaultValue != "" {
fmt.Printf("%s (default: %s): ", prompt, defaultValue)
} else {
fmt.Print(prompt + ": ")
title = fmt.Sprintf("%s (default: %s)", prompt, defaultValue)
}
input, _ := reader.ReadString('\n')
input = strings.TrimSpace(input)
if input == "" {
return defaultValue
}
return input
}
func readStringNoDefault(reader *bufio.Reader, prompt string) string {
fmt.Print(prompt + ": ")
input, _ := reader.ReadString('\n')
return strings.TrimSpace(input)
}
input := huh.NewInput().
Title(title).
Value(&value)
func readPassword(prompt string, reader *bufio.Reader) string {
if term.IsTerminal(int(syscall.Stdin)) {
fmt.Print(prompt + ": ")
// Read password without echo if we're in a terminal
password, err := term.ReadPassword(int(syscall.Stdin))
fmt.Println() // Add a newline since ReadPassword doesn't add one
if err != nil {
return ""
}
input := strings.TrimSpace(string(password))
if input == "" {
return readPassword(prompt, reader)
}
return input
} else {
// Fallback to reading from stdin if not in a terminal
return readString(reader, prompt, "")
// If no default value, this field is required
if defaultValue == "" {
input = input.Validate(func(s string) error {
if s == "" {
return fmt.Errorf("this field is required")
}
return nil
})
}
}
func readBool(reader *bufio.Reader, prompt string, defaultValue bool) bool {
defaultStr := "no"
if defaultValue {
defaultStr = "yes"
}
for {
input := readString(reader, prompt+" (yes/no)", defaultStr)
lower := strings.ToLower(input)
if lower == "yes" {
return true
} else if lower == "no" {
return false
} else {
fmt.Println("Please enter 'yes' or 'no'.")
}
}
}
err := runField(input)
handleAbort(err)
func readBoolNoDefault(reader *bufio.Reader, prompt string) bool {
for {
input := readStringNoDefault(reader, prompt+" (yes/no)")
lower := strings.ToLower(input)
if lower == "yes" {
return true
} else if lower == "no" {
return false
} else {
fmt.Println("Please enter 'yes' or 'no'.")
}
if value == "" {
value = defaultValue
}
}
func readInt(reader *bufio.Reader, prompt string, defaultValue int) int {
input := readString(reader, prompt, fmt.Sprintf("%d", defaultValue))
if input == "" {
return defaultValue
// Print the answer so it remains visible in terminal history (skip in accessible mode as it already shows)
if !isAccessibleMode() {
fmt.Printf("%s: %s\n", prompt, value)
}
value := defaultValue
fmt.Sscanf(input, "%d", &value)
return value
}
func readStringNoDefault(prompt string) string {
var value string
for {
input := huh.NewInput().
Title(prompt).
Value(&value).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("this field is required")
}
return nil
})
err := runField(input)
handleAbort(err)
if value != "" {
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
fmt.Printf("%s: %s\n", prompt, value)
}
return value
}
}
}
func readPassword(prompt string) string {
var value string
for {
input := huh.NewInput().
Title(prompt).
Value(&value).
EchoMode(huh.EchoModePassword).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("password is required")
}
return nil
})
err := runField(input)
handleAbort(err)
if value != "" {
// Print confirmation without revealing the password
if !isAccessibleMode() {
fmt.Printf("%s: %s\n", prompt, "********")
}
return value
}
}
}
func readBool(prompt string, defaultValue bool) bool {
var value = defaultValue
confirm := huh.NewConfirm().
Title(prompt).
Value(&value).
Affirmative("Yes").
Negative("No")
err := runField(confirm)
handleAbort(err)
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
answer := "No"
if value {
answer = "Yes"
}
fmt.Printf("%s: %s\n", prompt, answer)
}
return value
}
func readBoolNoDefault(prompt string) bool {
var value bool
confirm := huh.NewConfirm().
Title(prompt).
Value(&value).
Affirmative("Yes").
Negative("No")
err := runField(confirm)
handleAbort(err)
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
answer := "No"
if value {
answer = "Yes"
}
fmt.Printf("%s: %s\n", prompt, answer)
}
return value
}
func readInt(prompt string, defaultValue int) int {
var value string
title := fmt.Sprintf("%s (default: %d)", prompt, defaultValue)
input := huh.NewInput().
Title(title).
Value(&value).
Validate(func(s string) error {
if s == "" {
return nil
}
_, err := strconv.Atoi(s)
if err != nil {
return fmt.Errorf("please enter a valid number")
}
return nil
})
err := runField(input)
handleAbort(err)
if value == "" {
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
fmt.Printf("%s: %d\n", prompt, defaultValue)
}
return defaultValue
}
result, err := strconv.Atoi(value)
if err != nil {
if !isAccessibleMode() {
fmt.Printf("%s: %d\n", prompt, defaultValue)
}
return defaultValue
}
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
fmt.Printf("%s: %d\n", prompt, result)
}
return result
}

View File

@@ -1,13 +1,12 @@
package main
import (
"bufio"
"crypto/rand"
"embed"
"encoding/base64"
"fmt"
"io"
"io/fs"
"crypto/rand"
"encoding/base64"
"net"
"net/http"
"net/url"
@@ -20,11 +19,17 @@ import (
"time"
)
// DO NOT EDIT THIS FUNCTION; IT MATCHED BY REGEX IN CICD
// Version variables injected at build time via -ldflags
var (
pangolinVersion string
gerbilVersion string
badgerVersion string
)
func loadVersions(config *Config) {
config.PangolinVersion = "replaceme"
config.GerbilVersion = "replaceme"
config.BadgerVersion = "replaceme"
config.PangolinVersion = pangolinVersion
config.GerbilVersion = gerbilVersion
config.BadgerVersion = badgerVersion
}
//go:embed config/*
@@ -82,14 +87,12 @@ func main() {
}
}
reader := bufio.NewReader(os.Stdin)
var config Config
var alreadyInstalled = false
// check if there is already a config file
if _, err := os.Stat("config/config.yml"); err != nil {
config = collectUserInput(reader)
config = collectUserInput()
loadVersions(&config)
config.DoCrowdsecInstall = false
@@ -102,7 +105,10 @@ func main() {
os.Exit(1)
}
moveFile("config/docker-compose.yml", "docker-compose.yml")
if err := moveFile("config/docker-compose.yml", "docker-compose.yml"); err != nil {
fmt.Printf("Error moving docker-compose.yml: %v\n", err)
os.Exit(1)
}
fmt.Println("\nConfiguration files created successfully!")
@@ -117,13 +123,17 @@ func main() {
fmt.Println("\n=== Starting installation ===")
if readBool(reader, "Would you like to install and start the containers?", true) {
if readBool("Would you like to install and start the containers?", true) {
config.InstallationContainerType = podmanOrDocker(reader)
config.InstallationContainerType = podmanOrDocker()
if !isDockerInstalled() && runtime.GOOS == "linux" && config.InstallationContainerType == Docker {
if readBool(reader, "Docker is not installed. Would you like to install it?", true) {
installDocker()
if readBool("Docker is not installed. Would you like to install it?", true) {
if err := installDocker(); err != nil {
fmt.Printf("Error installing Docker: %v\n", err)
return
}
// try to start docker service but ignore errors
if err := startDockerService(); err != nil {
fmt.Println("Error starting Docker service:", err)
@@ -132,7 +142,7 @@ func main() {
}
// wait 10 seconds for docker to start checking if docker is running every 2 seconds
fmt.Println("Waiting for Docker to start...")
for i := 0; i < 5; i++ {
for range 5 {
if isDockerRunning() {
fmt.Println("Docker is running!")
break
@@ -167,7 +177,7 @@ func main() {
fmt.Println("\n=== MaxMind Database Update ===")
if _, err := os.Stat("config/GeoLite2-Country.mmdb"); err == nil {
fmt.Println("MaxMind GeoLite2 Country database found.")
if readBool(reader, "Would you like to update the MaxMind database to the latest version?", false) {
if readBool("Would you like to update the MaxMind database to the latest version?", false) {
if err := downloadMaxMindDatabase(); err != nil {
fmt.Printf("Error updating MaxMind database: %v\n", err)
fmt.Println("You can try updating it manually later if needed.")
@@ -175,7 +185,7 @@ func main() {
}
} else {
fmt.Println("MaxMind GeoLite2 Country database not found.")
if readBool(reader, "Would you like to download the MaxMind GeoLite2 database for geoblocking functionality?", false) {
if readBool("Would you like to download the MaxMind GeoLite2 database for geoblocking functionality?", false) {
if err := downloadMaxMindDatabase(); err != nil {
fmt.Printf("Error downloading MaxMind database: %v\n", err)
fmt.Println("You can try downloading it manually later if needed.")
@@ -192,11 +202,11 @@ func main() {
if !checkIsCrowdsecInstalledInCompose() {
fmt.Println("\n=== CrowdSec Install ===")
// check if crowdsec is installed
if readBool(reader, "Would you like to install CrowdSec?", false) {
if readBool("Would you like to install CrowdSec?", false) {
fmt.Println("This installer constitutes a minimal viable CrowdSec deployment. CrowdSec will add extra complexity to your Pangolin installation and may not work to the best of its abilities out of the box. Users are expected to implement configuration adjustments on their own to achieve the best security posture. Consult the CrowdSec documentation for detailed configuration instructions.")
// BUG: crowdsec installation will be skipped if the user chooses to install on the first installation.
if readBool(reader, "Are you willing to manage CrowdSec?", false) {
if readBool("Are you willing to manage CrowdSec?", false) {
if config.DashboardDomain == "" {
traefikConfig, err := ReadTraefikConfig("config/traefik/traefik_config.yml")
if err != nil {
@@ -225,8 +235,8 @@ func main() {
fmt.Printf("Let's Encrypt Email: %s\n", config.LetsEncryptEmail)
fmt.Printf("Badger Version: %s\n", config.BadgerVersion)
if !readBool(reader, "Are these values correct?", true) {
config = collectUserInput(reader)
if !readBool("Are these values correct?", true) {
config = collectUserInput()
}
}
@@ -235,7 +245,7 @@ func main() {
if detectedType == Undefined {
// If detection fails, prompt the user
fmt.Println("Unable to detect container type from existing installation.")
config.InstallationContainerType = podmanOrDocker(reader)
config.InstallationContainerType = podmanOrDocker()
} else {
config.InstallationContainerType = detectedType
fmt.Printf("Detected container type: %s\n", config.InstallationContainerType)
@@ -277,8 +287,8 @@ func main() {
fmt.Printf("\nTo complete the initial setup, please visit:\nhttps://%s/auth/initial-setup\n", config.DashboardDomain)
}
func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
inputContainer := readString(reader, "Would you like to run Pangolin as Docker or Podman containers?", "docker")
func podmanOrDocker() SupportedContainer {
inputContainer := readString("Would you like to run Pangolin as Docker or Podman containers?", "docker")
chosenContainer := Docker
if strings.EqualFold(inputContainer, "docker") {
@@ -290,7 +300,8 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
os.Exit(1)
}
if chosenContainer == Podman {
switch chosenContainer {
case Podman:
if !isPodmanInstalled() {
fmt.Println("Podman or podman-compose is not installed. Please install both manually. Automated installation will be available in a later release.")
os.Exit(1)
@@ -299,7 +310,7 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
if err := exec.Command("bash", "-c", "cat /etc/sysctl.d/99-podman.conf 2>/dev/null | grep 'net.ipv4.ip_unprivileged_port_start=' || cat /etc/sysctl.conf 2>/dev/null | grep 'net.ipv4.ip_unprivileged_port_start='").Run(); err != nil {
fmt.Println("Would you like to configure ports >= 80 as unprivileged ports? This enables podman containers to listen on low-range ports.")
fmt.Println("Pangolin will experience startup issues if this is not configured, because it needs to listen on port 80/443 by default.")
approved := readBool(reader, "The installer is about to execute \"echo 'net.ipv4.ip_unprivileged_port_start=80' > /etc/sysctl.d/99-podman.conf && sysctl --system\". Approve?", true)
approved := readBool("The installer is about to execute \"echo 'net.ipv4.ip_unprivileged_port_start=80' > /etc/sysctl.d/99-podman.conf && sysctl --system\". Approve?", true)
if approved {
if os.Geteuid() != 0 {
fmt.Println("You need to run the installer as root for such a configuration.")
@@ -311,7 +322,7 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
// Linux only.
if err := run("bash", "-c", "echo 'net.ipv4.ip_unprivileged_port_start=80' > /etc/sysctl.d/99-podman.conf && sysctl --system"); err != nil {
fmt.Printf("Error configuring unprivileged ports: %v\n", err)
fmt.Printf("Error configuring unprivileged ports: %v\n", err)
os.Exit(1)
}
} else {
@@ -321,7 +332,7 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
fmt.Println("Unprivileged ports have been configured.")
}
} else if chosenContainer == Docker {
case Docker:
// check if docker is not installed and the user is root
if !isDockerInstalled() {
if os.Geteuid() != 0 {
@@ -336,7 +347,7 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
fmt.Println("The installer will not be able to run docker commands without running it as root.")
os.Exit(1)
}
} else {
default:
// This shouldn't happen unless there's a third container runtime.
os.Exit(1)
}
@@ -344,35 +355,35 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
return chosenContainer
}
func collectUserInput(reader *bufio.Reader) Config {
func collectUserInput() Config {
config := Config{}
// Basic configuration
fmt.Println("\n=== Basic Configuration ===")
config.IsEnterprise = readBoolNoDefault(reader, "Do you want to install the Enterprise version of Pangolin? The EE is free for personal use or for businesses making less than 100k USD annually.")
config.IsEnterprise = readBoolNoDefault("Do you want to install the Enterprise version of Pangolin? The EE is free for personal use or for businesses making less than 100k USD annually.")
config.BaseDomain = readString(reader, "Enter your base domain (no subdomain e.g. example.com)", "")
config.BaseDomain = readString("Enter your base domain (no subdomain e.g. example.com)", "")
// Set default dashboard domain after base domain is collected
defaultDashboardDomain := ""
if config.BaseDomain != "" {
defaultDashboardDomain = "pangolin." + config.BaseDomain
}
config.DashboardDomain = readString(reader, "Enter the domain for the Pangolin dashboard", defaultDashboardDomain)
config.LetsEncryptEmail = readString(reader, "Enter email for Let's Encrypt certificates", "")
config.InstallGerbil = readBool(reader, "Do you want to use Gerbil to allow tunneled connections", true)
config.DashboardDomain = readString("Enter the domain for the Pangolin dashboard", defaultDashboardDomain)
config.LetsEncryptEmail = readString("Enter email for Let's Encrypt certificates", "")
config.InstallGerbil = readBool("Do you want to use Gerbil to allow tunneled connections", true)
// Email configuration
fmt.Println("\n=== Email Configuration ===")
config.EnableEmail = readBool(reader, "Enable email functionality (SMTP)", false)
config.EnableEmail = readBool("Enable email functionality (SMTP)", false)
if config.EnableEmail {
config.EmailSMTPHost = readString(reader, "Enter SMTP host", "")
config.EmailSMTPPort = readInt(reader, "Enter SMTP port (default 587)", 587)
config.EmailSMTPUser = readString(reader, "Enter SMTP username", "")
config.EmailSMTPPass = readString(reader, "Enter SMTP password", "") // Should this be readPassword?
config.EmailNoReply = readString(reader, "Enter no-reply email address (often the same as SMTP username)", "")
config.EmailSMTPHost = readString("Enter SMTP host", "")
config.EmailSMTPPort = readInt("Enter SMTP port (default 587)", 587)
config.EmailSMTPUser = readString("Enter SMTP username", "")
config.EmailSMTPPass = readPassword("Enter SMTP password")
config.EmailNoReply = readString("Enter no-reply email address (often the same as SMTP username)", "")
}
// Validate required fields
@@ -393,8 +404,8 @@ func collectUserInput(reader *bufio.Reader) Config {
fmt.Println("\n=== Advanced Configuration ===")
config.EnableIPv6 = readBool(reader, "Is your server IPv6 capable?", true)
config.EnableGeoblocking = readBool(reader, "Do you want to download the MaxMind GeoLite2 database for geoblocking functionality?", true)
config.EnableIPv6 = readBool("Is your server IPv6 capable?", true)
config.EnableGeoblocking = readBool("Do you want to download the MaxMind GeoLite2 database for geoblocking functionality?", true)
if config.DashboardDomain == "" {
fmt.Println("Error: Dashboard Domain name is required")
@@ -405,10 +416,18 @@ func collectUserInput(reader *bufio.Reader) Config {
}
func createConfigFiles(config Config) error {
os.MkdirAll("config", 0755)
os.MkdirAll("config/letsencrypt", 0755)
os.MkdirAll("config/db", 0755)
os.MkdirAll("config/logs", 0755)
if err := os.MkdirAll("config", 0755); err != nil {
return fmt.Errorf("failed to create config directory: %v", err)
}
if err := os.MkdirAll("config/letsencrypt", 0755); err != nil {
return fmt.Errorf("failed to create letsencrypt directory: %v", err)
}
if err := os.MkdirAll("config/db", 0755); err != nil {
return fmt.Errorf("failed to create db directory: %v", err)
}
if err := os.MkdirAll("config/logs", 0755); err != nil {
return fmt.Errorf("failed to create logs directory: %v", err)
}
// Walk through all embedded files
err := fs.WalkDir(configFiles, "config", func(path string, d fs.DirEntry, err error) error {
@@ -562,22 +581,24 @@ func showSetupTokenInstructions(containerType SupportedContainer, dashboardDomai
fmt.Println("To get your setup token, you need to:")
fmt.Println("")
fmt.Println("1. Start the containers")
if containerType == Docker {
switch containerType {
case Docker:
fmt.Println(" docker compose up -d")
} else if containerType == Podman {
case Podman:
fmt.Println(" podman-compose up -d")
} else {
}
fmt.Println("")
fmt.Println("2. Wait for the Pangolin container to start and generate the token")
fmt.Println("")
fmt.Println("3. Check the container logs for the setup token")
if containerType == Docker {
switch containerType {
case Docker:
fmt.Println(" docker logs pangolin | grep -A 2 -B 2 'SETUP TOKEN'")
} else if containerType == Podman {
case Podman:
fmt.Println(" podman logs pangolin | grep -A 2 -B 2 'SETUP TOKEN'")
} else {
}
fmt.Println("")
fmt.Println("4. Look for output like")
fmt.Println(" === SETUP TOKEN GENERATED ===")
@@ -639,10 +660,7 @@ func checkPortsAvailable(port int) error {
addr := fmt.Sprintf(":%d", port)
ln, err := net.Listen("tcp", addr)
if err != nil {
return fmt.Errorf(
"ERROR: port %d is occupied or cannot be bound: %w\n\n",
port, err,
)
return fmt.Errorf("ERROR: port %d is occupied or cannot be bound: %w", port, err)
}
if closeErr := ln.Close(); closeErr != nil {
fmt.Fprintf(os.Stderr,

51
install/theme.go Normal file
View File

@@ -0,0 +1,51 @@
package main
import (
"github.com/charmbracelet/huh"
"github.com/charmbracelet/lipgloss"
)
// Pangolin brand colors (converted from oklch to hex)
var (
// Primary orange/amber - oklch(0.6717 0.1946 41.93)
primaryColor = lipgloss.AdaptiveColor{Light: "#D97706", Dark: "#F59E0B"}
// Muted foreground
mutedColor = lipgloss.AdaptiveColor{Light: "#737373", Dark: "#A3A3A3"}
// Success green
successColor = lipgloss.AdaptiveColor{Light: "#16A34A", Dark: "#22C55E"}
// Error red - oklch(0.577 0.245 27.325)
errorColor = lipgloss.AdaptiveColor{Light: "#DC2626", Dark: "#EF4444"}
// Normal text
normalFg = lipgloss.AdaptiveColor{Light: "#171717", Dark: "#FAFAFA"}
)
// ThemePangolin returns a huh theme using Pangolin brand colors
func ThemePangolin() *huh.Theme {
t := huh.ThemeBase()
// Focused state styles
t.Focused.Base = t.Focused.Base.BorderForeground(primaryColor)
t.Focused.Title = t.Focused.Title.Foreground(primaryColor).Bold(true)
t.Focused.Description = t.Focused.Description.Foreground(mutedColor)
t.Focused.ErrorIndicator = t.Focused.ErrorIndicator.Foreground(errorColor)
t.Focused.ErrorMessage = t.Focused.ErrorMessage.Foreground(errorColor)
t.Focused.SelectSelector = t.Focused.SelectSelector.Foreground(primaryColor)
t.Focused.NextIndicator = t.Focused.NextIndicator.Foreground(primaryColor)
t.Focused.PrevIndicator = t.Focused.PrevIndicator.Foreground(primaryColor)
t.Focused.Option = t.Focused.Option.Foreground(normalFg)
t.Focused.SelectedOption = t.Focused.SelectedOption.Foreground(primaryColor)
t.Focused.SelectedPrefix = lipgloss.NewStyle().Foreground(successColor).SetString("✓ ")
t.Focused.UnselectedPrefix = lipgloss.NewStyle().Foreground(mutedColor).SetString(" ")
t.Focused.FocusedButton = t.Focused.FocusedButton.Foreground(lipgloss.Color("#FFFFFF")).Background(primaryColor)
t.Focused.BlurredButton = t.Focused.BlurredButton.Foreground(normalFg).Background(lipgloss.AdaptiveColor{Light: "#E5E5E5", Dark: "#404040"})
t.Focused.TextInput.Cursor = t.Focused.TextInput.Cursor.Foreground(primaryColor)
t.Focused.TextInput.Prompt = t.Focused.TextInput.Prompt.Foreground(primaryColor)
// Blurred state inherits from focused but with hidden border
t.Blurred = t.Focused
t.Blurred.Base = t.Focused.Base.BorderStyle(lipgloss.HiddenBorder())
t.Blurred.Title = t.Blurred.Title.Foreground(mutedColor).Bold(false)
t.Blurred.TextInput.Prompt = t.Blurred.TextInput.Prompt.Foreground(mutedColor)
return t
}

View File

@@ -649,7 +649,8 @@
"resourcesUsersRolesAccess": "User and role-based access control",
"resourcesErrorUpdate": "Failed to toggle resource",
"resourcesErrorUpdateDescription": "An error occurred while updating the resource",
"access": "Access Control",
"access": "Access",
"accessControl": "Access Control",
"shareLink": "{resource} Share Link",
"resourceSelect": "Select resource",
"shareLinks": "Share Links",

511
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -32,7 +32,7 @@
"format": "prettier --write ."
},
"dependencies": {
"@asteasolutions/zod-to-openapi": "8.4.0",
"@asteasolutions/zod-to-openapi": "8.4.1",
"@aws-sdk/client-s3": "3.989.0",
"@faker-js/faker": "10.3.0",
"@headlessui/react": "2.2.9",
@@ -59,11 +59,11 @@
"@radix-ui/react-tabs": "1.1.13",
"@radix-ui/react-toast": "1.2.15",
"@radix-ui/react-tooltip": "1.2.8",
"@react-email/components": "1.0.7",
"@react-email/components": "1.0.8",
"@react-email/render": "2.0.4",
"@react-email/tailwind": "2.0.4",
"@react-email/tailwind": "2.0.5",
"@simplewebauthn/browser": "13.2.2",
"@simplewebauthn/server": "13.2.2",
"@simplewebauthn/server": "13.2.3",
"@tailwindcss/forms": "0.5.11",
"@tanstack/react-query": "5.90.21",
"@tanstack/react-table": "8.21.3",
@@ -81,7 +81,7 @@
"drizzle-orm": "0.45.1",
"express": "5.2.1",
"express-rate-limit": "8.2.1",
"glob": "13.0.3",
"glob": "13.0.6",
"helmet": "8.1.0",
"http-errors": "2.0.1",
"input-otp": "1.4.2",
@@ -93,20 +93,20 @@
"maxmind": "5.0.5",
"moment": "2.30.1",
"next": "15.5.12",
"next-intl": "4.8.2",
"next-intl": "4.8.3",
"next-themes": "0.4.6",
"nextjs-toploader": "3.9.17",
"node-cache": "5.1.2",
"nodemailer": "8.0.1",
"oslo": "1.2.1",
"pg": "8.18.0",
"posthog-node": "5.24.15",
"pg": "8.19.0",
"posthog-node": "5.26.0",
"qrcode.react": "4.2.0",
"react": "19.2.4",
"react-day-picker": "9.13.2",
"react-dom": "19.2.4",
"react-easy-sort": "1.8.0",
"react-hook-form": "7.71.1",
"react-hook-form": "7.71.2",
"react-icons": "5.5.0",
"recharts": "2.15.4",
"reodotdev": "1.0.0",
@@ -115,7 +115,7 @@
"sshpk": "^1.18.0",
"stripe": "20.3.1",
"swagger-ui-express": "5.0.1",
"tailwind-merge": "3.4.0",
"tailwind-merge": "3.5.0",
"topojson-client": "3.1.0",
"tw-animate-css": "1.4.0",
"use-debounce": "^10.1.0",
@@ -147,7 +147,7 @@
"@types/js-yaml": "4.0.9",
"@types/jsonwebtoken": "9.0.10",
"@types/node": "25.2.3",
"@types/nodemailer": "7.0.9",
"@types/nodemailer": "7.0.11",
"@types/nprogress": "0.2.3",
"@types/pg": "8.16.0",
"@types/react": "19.2.14",

View File

@@ -87,7 +87,7 @@ export async function validateResourceSessionToken(
if (Date.now() >= resourceSession.expiresAt) {
await db
.delete(resourceSessions)
.where(eq(resourceSessions.sessionId, resourceSessions.sessionId));
.where(eq(resourceSessions.sessionId, sessionId));
return { resourceSession: null };
} else if (
Date.now() >=
@@ -181,7 +181,7 @@ export function serializeResourceSessionCookie(
return `${cookieName}_s.${now}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Secure; Domain=${domain}`;
} else {
if (expiresAt === undefined) {
return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Path=/; Domain=$domain}`;
return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Path=/; Domain=${domain}`;
}
return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Domain=${domain}`;
}

View File

@@ -16,6 +16,11 @@ const internalPort = config.getRawConfig().server.internal_port;
export function createInternalServer() {
const internalServer = express();
const trustProxy = config.getRawConfig().server.trust_proxy;
if (trustProxy) {
internalServer.set("trust proxy", trustProxy);
}
internalServer.use(helmet());
internalServer.use(cors());
internalServer.use(stripDuplicateSesions);

View File

@@ -48,5 +48,5 @@ export const tierMatrix: Record<TierFeature, Tier[]> = {
"enterprise"
],
[TierFeature.AutoProvisioning]: ["tier1", "tier3", "enterprise"],
[TierFeature.SshPam]: ["enterprise"]
[TierFeature.SshPam]: ["tier1", "tier3", "enterprise"]
};

View File

@@ -4,7 +4,7 @@ import { redisManager } from "@server/private/lib/redis";
// Create local cache with maxKeys limit to prevent memory leaks
// With ~10k requests/day and 5min TTL, 10k keys should be more than sufficient
const localCache = new NodeCache({
export const localCache = new NodeCache({
stdTTL: 3600,
checkperiod: 120,
maxKeys: 10000

View File

@@ -2,7 +2,7 @@ import path from "path";
import { fileURLToPath } from "url";
// This is a placeholder value replaced by the build process
export const APP_VERSION = "1.15.4";
export const APP_VERSION = "1.16.0";
export const __FILENAME = fileURLToPath(import.meta.url);
export const __DIRNAME = path.dirname(__FILENAME);

View File

@@ -37,7 +37,7 @@ import {
enforceResourceSessionLength
} from "#dynamic/lib/checkOrgAccessPolicy";
import { logRequestAudit } from "./logRequestAudit";
import cache from "@server/lib/cache";
import { localCache } from "@server/lib/cache";
import { APP_VERSION } from "@server/lib/consts";
import { isSubscribed } from "#dynamic/lib/isSubscribed";
import { tierMatrix } from "@server/lib/billing/tierMatrix";
@@ -137,7 +137,7 @@ export async function verifyResourceSession(
headerAuthExtendedCompatibility: ResourceHeaderAuthExtendedCompatibility | null;
org: Org;
}
| undefined = await cache.get(resourceCacheKey);
| undefined = localCache.get(resourceCacheKey);
if (!resourceData) {
const result = await getResourceByDomain(cleanHost);
@@ -161,7 +161,7 @@ export async function verifyResourceSession(
}
resourceData = result;
await cache.set(resourceCacheKey, resourceData, 5);
localCache.set(resourceCacheKey, resourceData, 5);
}
const {
@@ -405,7 +405,7 @@ export async function verifyResourceSession(
// check for HTTP Basic Auth header
const clientHeaderAuthKey = `headerAuth:${clientHeaderAuth}`;
if (headerAuth && clientHeaderAuth) {
if (await cache.get(clientHeaderAuthKey)) {
if (localCache.get(clientHeaderAuthKey)) {
logger.debug(
"Resource allowed because header auth is valid (cached)"
);
@@ -428,7 +428,7 @@ export async function verifyResourceSession(
headerAuth.headerAuthHash
)
) {
await cache.set(clientHeaderAuthKey, clientHeaderAuth, 5);
localCache.set(clientHeaderAuthKey, clientHeaderAuth, 5);
logger.debug("Resource allowed because header auth is valid");
logRequestAudit(
@@ -520,7 +520,7 @@ export async function verifyResourceSession(
if (resourceSessionToken) {
const sessionCacheKey = `session:${resourceSessionToken}`;
let resourceSession: any = await cache.get(sessionCacheKey);
let resourceSession: any = localCache.get(sessionCacheKey);
if (!resourceSession) {
const result = await validateResourceSessionToken(
@@ -529,7 +529,7 @@ export async function verifyResourceSession(
);
resourceSession = result?.resourceSession;
await cache.set(sessionCacheKey, resourceSession, 5);
localCache.set(sessionCacheKey, resourceSession, 5);
}
if (resourceSession?.isRequestToken) {
@@ -662,7 +662,7 @@ export async function verifyResourceSession(
}:${resource.resourceId}`;
let allowedUserData: BasicUserData | null | undefined =
await cache.get(userAccessCacheKey);
localCache.get(userAccessCacheKey);
if (allowedUserData === undefined) {
allowedUserData = await isUserAllowedToAccessResource(
@@ -671,7 +671,7 @@ export async function verifyResourceSession(
resourceData.org
);
await cache.set(userAccessCacheKey, allowedUserData, 5);
localCache.set(userAccessCacheKey, allowedUserData, 5);
}
if (
@@ -974,11 +974,11 @@ async function checkRules(
): Promise<"ACCEPT" | "DROP" | "PASS" | undefined> {
const ruleCacheKey = `rules:${resourceId}`;
let rules: ResourceRule[] | undefined = await cache.get(ruleCacheKey);
let rules: ResourceRule[] | undefined = localCache.get(ruleCacheKey);
if (!rules) {
rules = await getResourceRules(resourceId);
await cache.set(ruleCacheKey, rules, 5);
localCache.set(ruleCacheKey, rules, 5);
}
if (rules.length === 0) {
@@ -1208,13 +1208,13 @@ async function isIpInAsn(
async function getAsnFromIp(ip: string): Promise<number | undefined> {
const asnCacheKey = `asn:${ip}`;
let cachedAsn: number | undefined = await cache.get(asnCacheKey);
let cachedAsn: number | undefined = localCache.get(asnCacheKey);
if (!cachedAsn) {
cachedAsn = await getAsnForIp(ip); // do it locally
// Cache for longer since IP ASN doesn't change frequently
if (cachedAsn) {
await cache.set(asnCacheKey, cachedAsn, 300); // 5 minutes
localCache.set(asnCacheKey, cachedAsn, 300); // 5 minutes
}
}
@@ -1224,14 +1224,14 @@ async function getAsnFromIp(ip: string): Promise<number | undefined> {
async function getCountryCodeFromIp(ip: string): Promise<string | undefined> {
const geoIpCacheKey = `geoip:${ip}`;
let cachedCountryCode: string | undefined = await cache.get(geoIpCacheKey);
let cachedCountryCode: string | undefined = localCache.get(geoIpCacheKey);
if (!cachedCountryCode) {
cachedCountryCode = await getCountryCodeForIp(ip); // do it locally
// Only cache successful lookups to avoid filling cache with undefined values
if (cachedCountryCode) {
// Cache for longer since IP geolocation doesn't change frequently
await cache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes
localCache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes
}
}

View File

@@ -689,6 +689,13 @@ authenticated.get(
user.getOrgUser
);
authenticated.get(
"/org/:orgId/user-by-username",
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.getOrgUser),
user.getOrgUserByUsername
);
authenticated.post(
"/user/:userId/2fa",
verifyApiKeyIsRoot,

View File

@@ -11,7 +11,7 @@ import { fromError } from "zod-validation-error";
import { ActionsEnum, checkUserActionPermission } from "@server/auth/actions";
import { OpenAPITags, registry } from "@server/openApi";
async function queryUser(orgId: string, userId: string) {
export async function queryUser(orgId: string, userId: string) {
const [user] = await db
.select({
orgId: userOrgs.orgId,

View File

@@ -0,0 +1,136 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db } from "@server/db";
import { userOrgs, users } from "@server/db";
import { and, eq } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi";
import { queryUser, type GetOrgUserResponse } from "./getOrgUser";
const getOrgUserByUsernameParamsSchema = z.strictObject({
orgId: z.string()
});
const getOrgUserByUsernameQuerySchema = z.strictObject({
username: z.string().min(1, "username is required"),
idpId: z
.string()
.optional()
.transform((v) =>
v === undefined || v === "" ? undefined : parseInt(v, 10)
)
.refine(
(v) =>
v === undefined || (Number.isInteger(v) && (v as number) > 0),
{ message: "idpId must be a positive integer" }
)
});
registry.registerPath({
method: "get",
path: "/org/{orgId}/user-by-username",
description:
"Get a user in an organization by username. When idpId is not passed, only internal users are searched (username is globally unique for them). For external (OIDC) users, pass idpId to search by username within that identity provider.",
tags: [OpenAPITags.Org, OpenAPITags.User],
request: {
params: getOrgUserByUsernameParamsSchema,
query: getOrgUserByUsernameQuerySchema
},
responses: {}
});
export async function getOrgUserByUsername(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedParams = getOrgUserByUsernameParamsSchema.safeParse(
req.params
);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const parsedQuery = getOrgUserByUsernameQuerySchema.safeParse(
req.query
);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error).toString()
)
);
}
const { orgId } = parsedParams.data;
const { username, idpId } = parsedQuery.data;
const conditions = [
eq(userOrgs.orgId, orgId),
eq(users.username, username)
];
if (idpId !== undefined) {
conditions.push(eq(users.idpId, idpId));
} else {
conditions.push(eq(users.type, "internal"));
}
const candidates = await db
.select({ userId: users.userId })
.from(userOrgs)
.innerJoin(users, eq(userOrgs.userId, users.userId))
.where(and(...conditions));
if (candidates.length === 0) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`User with username '${username}' not found in organization`
)
);
}
if (candidates.length > 1) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Multiple users with this username (external users from different identity providers). Specify idpId (identity provider ID) to disambiguate. When not specified, this searches for internal users only."
)
);
}
const user = await queryUser(orgId, candidates[0].userId);
if (!user) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`User with username '${username}' not found in organization`
)
);
}
return response<GetOrgUserResponse>(res, {
data: user,
success: true,
error: false,
message: "User retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -5,6 +5,7 @@ export * from "./addUserRole";
export * from "./inviteUser";
export * from "./acceptInvite";
export * from "./getOrgUser";
export * from "./getOrgUserByUsername";
export * from "./adminListUsers";
export * from "./adminRemoveUser";
export * from "./adminGetUser";

View File

@@ -5,6 +5,7 @@ import semver from "semver";
import { versionMigrations } from "../db/pg";
import { __DIRNAME, APP_VERSION } from "@server/lib/consts";
import path from "path";
import { build } from "@server/build";
import m1 from "./scriptsPg/1.6.0";
import m2 from "./scriptsPg/1.7.0";
import m3 from "./scriptsPg/1.8.0";
@@ -19,7 +20,7 @@ import m11 from "./scriptsPg/1.14.0";
import m12 from "./scriptsPg/1.15.0";
import m13 from "./scriptsPg/1.15.3";
import m14 from "./scriptsPg/1.15.4";
import { build } from "@server/build";
import m15 from "./scriptsPg/1.16.0";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
@@ -39,7 +40,8 @@ const migrations = [
{ version: "1.14.0", run: m11 },
{ version: "1.15.0", run: m12 },
{ version: "1.15.3", run: m13 },
{ version: "1.15.4", run: m14 }
{ version: "1.15.4", run: m14 },
{ version: "1.16.0", run: m15 }
// Add new migrations here as they are created
] as {
version: string;

View File

@@ -7,6 +7,7 @@ import { versionMigrations } from "../db/sqlite";
import { __DIRNAME, APP_PATH, APP_VERSION } from "@server/lib/consts";
import { SqliteError } from "better-sqlite3";
import fs from "fs";
import { build } from "@server/build";
import m1 from "./scriptsSqlite/1.0.0-beta1";
import m2 from "./scriptsSqlite/1.0.0-beta2";
import m3 from "./scriptsSqlite/1.0.0-beta3";
@@ -37,7 +38,7 @@ import m32 from "./scriptsSqlite/1.14.0";
import m33 from "./scriptsSqlite/1.15.0";
import m34 from "./scriptsSqlite/1.15.3";
import m35 from "./scriptsSqlite/1.15.4";
import { build } from "@server/build";
import m36 from "./scriptsSqlite/1.16.0";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
@@ -73,7 +74,8 @@ const migrations = [
{ version: "1.14.0", run: m32 },
{ version: "1.15.0", run: m33 },
{ version: "1.15.3", run: m34 },
{ version: "1.15.4", run: m35 }
{ version: "1.15.4", run: m35 },
{ version: "1.16.0", run: m36 }
// Add new migrations here as they are created
] as const;

View File

@@ -0,0 +1,179 @@
import { db } from "@server/db/pg/driver";
import { sql } from "drizzle-orm";
import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import { encrypt } from "@server/lib/crypto";
import { generateCA } from "@server/private/lib/sshCA";
import fs from "fs";
import yaml from "js-yaml";
const version = "1.16.0";
function getServerSecret(): string {
const envSecret = process.env.SERVER_SECRET;
const configPath = fs.existsSync(configFilePath1)
? configFilePath1
: fs.existsSync(configFilePath2)
? configFilePath2
: null;
// If no config file but an env secret is set, use the env secret directly
if (!configPath) {
if (envSecret && envSecret.length > 0) {
return envSecret;
}
throw new Error(
"Cannot generate org CA keys: no config file found and SERVER_SECRET env var is not set. " +
"Expected config.yml or config.yaml in the config directory, or set SERVER_SECRET."
);
}
const configContent = fs.readFileSync(configPath, "utf8");
const config = yaml.load(configContent) as {
server?: { secret?: string };
};
let secret = config?.server?.secret;
if (!secret || secret.length === 0) {
// Fall back to SERVER_SECRET env var if config does not contain server.secret
if (envSecret && envSecret.length > 0) {
secret = envSecret;
}
}
if (!secret || secret.length === 0) {
throw new Error(
"Cannot generate org CA keys: no server.secret in config and SERVER_SECRET env var is not set. " +
"Set server.secret in config.yml/config.yaml or set SERVER_SECRET."
);
}
return secret;
}
export default async function migration() {
console.log(`Running setup script ${version}...`);
// Ensure server secret exists before running migration (required for org CA key generation)
getServerSecret();
try {
await db.execute(sql`BEGIN`);
// Schema changes
await db.execute(sql`
CREATE TABLE "roundTripMessageTracker" (
"messageId" serial PRIMARY KEY NOT NULL,
"clientId" varchar,
"messageType" varchar,
"sentAt" bigint NOT NULL,
"receivedAt" bigint,
"error" text,
"complete" boolean DEFAULT false NOT NULL
);
`);
await db.execute(
sql`ALTER TABLE "orgs" ADD COLUMN "sshCaPrivateKey" text;`
);
await db.execute(
sql`ALTER TABLE "orgs" ADD COLUMN "sshCaPublicKey" text;`
);
await db.execute(
sql`ALTER TABLE "orgs" ADD COLUMN "isBillingOrg" boolean;`
);
await db.execute(
sql`ALTER TABLE "orgs" ADD COLUMN "billingOrgId" varchar;`
);
await db.execute(
sql`ALTER TABLE "roles" ADD COLUMN "sshSudoMode" varchar(32) DEFAULT 'none';`
);
await db.execute(
sql`ALTER TABLE "roles" ADD COLUMN "sshSudoCommands" text DEFAULT '[]';`
);
await db.execute(
sql`ALTER TABLE "roles" ADD COLUMN "sshCreateHomeDir" boolean DEFAULT true;`
);
await db.execute(
sql`ALTER TABLE "roles" ADD COLUMN "sshUnixGroups" text DEFAULT '[]';`
);
await db.execute(
sql`ALTER TABLE "siteResources" ADD COLUMN "authDaemonPort" integer DEFAULT 22123;`
);
await db.execute(
sql`ALTER TABLE "siteResources" ADD COLUMN "authDaemonMode" varchar(32) DEFAULT 'site';`
);
await db.execute(
sql`ALTER TABLE "userOrgs" ADD COLUMN "pamUsername" varchar;`
);
// Set all admin role sudo to "full"; other roles keep default "none"
await db.execute(
sql`UPDATE "roles" SET "sshSudoMode" = 'full' WHERE "isAdmin" = true;`
);
await db.execute(sql`COMMIT`);
console.log("Migrated database");
} catch (e) {
await db.execute(sql`ROLLBACK`);
console.log("Unable to migrate database");
console.log(e);
throw e;
}
// Generate and store encrypted SSH CA keys for all orgs
try {
const secret = getServerSecret();
const orgQuery = await db.execute(sql`SELECT "orgId" FROM "orgs"`);
const orgRows = orgQuery.rows as { orgId: string }[];
const failedOrgIds: string[] = [];
for (const row of orgRows) {
try {
const ca = generateCA(`pangolin-ssh-ca-${row.orgId}`);
const encryptedPrivateKey = encrypt(ca.privateKeyPem, secret);
await db.execute(sql`
UPDATE "orgs"
SET "sshCaPrivateKey" = ${encryptedPrivateKey},
"sshCaPublicKey" = ${ca.publicKeyOpenSSH}
WHERE "orgId" = ${row.orgId};
`);
} catch (err) {
failedOrgIds.push(row.orgId);
console.error(
`Error: No CA was generated for organization "${row.orgId}".`,
err instanceof Error ? err.message : err
);
}
}
if (orgRows.length > 0) {
const succeeded = orgRows.length - failedOrgIds.length;
console.log(
`Generated and stored SSH CA keys for ${succeeded} org(s).`
);
}
if (failedOrgIds.length > 0) {
console.error(
`No CA was generated for ${failedOrgIds.length} organization(s): ${failedOrgIds.join(
", "
)}`
);
}
} catch (e) {
console.error(
"Error while generating SSH CA keys for orgs after migration:",
e
);
}
console.log(`${version} migration complete`);
}

View File

@@ -1,23 +1,167 @@
import { __DIRNAME, APP_PATH } from "@server/lib/consts";
import { APP_PATH, configFilePath1, configFilePath2 } from "@server/lib/consts";
import { encrypt } from "@server/lib/crypto";
import { generateCA } from "@server/private/lib/sshCA";
import Database from "better-sqlite3";
import fs from "fs";
import path from "path";
import yaml from "js-yaml";
const version = "1.16.0";
function getServerSecret(): string {
const envSecret = process.env.SERVER_SECRET;
const configPath = fs.existsSync(configFilePath1)
? configFilePath1
: fs.existsSync(configFilePath2)
? configFilePath2
: null;
// If no config file but an env secret is set, use the env secret directly
if (!configPath) {
if (envSecret && envSecret.length > 0) {
return envSecret;
}
throw new Error(
"Cannot generate org CA keys: no config file found and SERVER_SECRET env var is not set. " +
"Expected config.yml or config.yaml in the config directory, or set SERVER_SECRET."
);
}
const configContent = fs.readFileSync(configPath, "utf8");
const config = yaml.load(configContent) as {
server?: { secret?: string };
};
let secret = config?.server?.secret;
if (!secret || secret.length === 0) {
// Fall back to SERVER_SECRET env var if config does not contain server.secret
if (envSecret && envSecret.length > 0) {
secret = envSecret;
}
}
if (!secret || secret.length === 0) {
throw new Error(
"Cannot generate org CA keys: no server.secret in config and SERVER_SECRET env var is not set. " +
"Set server.secret in config.yml/config.yaml or set SERVER_SECRET."
);
}
return secret;
}
export default async function migration() {
console.log(`Running setup script ${version}...`);
// Ensure server secret exists before running migration (required for org CA key generation)
getServerSecret();
const location = path.join(APP_PATH, "db", "db.sqlite");
const db = new Database(location);
// set all admin role sudo to "full"; all other roles to "none"
// all roles set hoemdir to true
// generate ca certs for all orgs?
// set authDaemonMode to "site" for all site-resources
try {
db.transaction(() => {})();
db.pragma("foreign_keys = OFF");
db.transaction(() => {
// Create roundTripMessageTracker table for tracking message round-trips
db.prepare(
`
CREATE TABLE 'roundTripMessageTracker' (
'messageId' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'clientId' text,
'messageType' text,
'sentAt' integer NOT NULL,
'receivedAt' integer,
'error' text,
'complete' integer DEFAULT 0 NOT NULL
);
`
).run();
// Org SSH CA and billing columns
db.prepare(`ALTER TABLE 'orgs' ADD 'sshCaPrivateKey' text;`).run();
db.prepare(`ALTER TABLE 'orgs' ADD 'sshCaPublicKey' text;`).run();
db.prepare(`ALTER TABLE 'orgs' ADD 'isBillingOrg' integer;`).run();
db.prepare(`ALTER TABLE 'orgs' ADD 'billingOrgId' text;`).run();
// Role SSH sudo and unix group columns
db.prepare(
`ALTER TABLE 'roles' ADD 'sshSudoMode' text DEFAULT 'none';`
).run();
db.prepare(
`ALTER TABLE 'roles' ADD 'sshSudoCommands' text DEFAULT '[]';`
).run();
db.prepare(
`ALTER TABLE 'roles' ADD 'sshCreateHomeDir' integer DEFAULT 1;`
).run();
db.prepare(
`ALTER TABLE 'roles' ADD 'sshUnixGroups' text DEFAULT '[]';`
).run();
// Site resource auth daemon columns
db.prepare(
`ALTER TABLE 'siteResources' ADD 'authDaemonPort' integer DEFAULT 22123;`
).run();
db.prepare(
`ALTER TABLE 'siteResources' ADD 'authDaemonMode' text DEFAULT 'site';`
).run();
// UserOrg PAM username for SSH
db.prepare(`ALTER TABLE 'userOrgs' ADD 'pamUsername' text;`).run();
// Set all admin role sudo to "full"; other roles keep default "none"
db.prepare(
`UPDATE 'roles' SET 'sshSudoMode' = 'full' WHERE isAdmin = 1;`
).run();
})();
db.pragma("foreign_keys = ON");
const orgRows = db.prepare("SELECT orgId FROM orgs").all() as {
orgId: string;
}[];
// Generate and store encrypted SSH CA keys for all orgs
const secret = getServerSecret();
const updateOrgCaKeys = db.prepare(
"UPDATE orgs SET sshCaPrivateKey = ?, sshCaPublicKey = ? WHERE orgId = ?"
);
const failedOrgIds: string[] = [];
for (const row of orgRows) {
try {
const ca = generateCA(`pangolin-ssh-ca-${row.orgId}`);
const encryptedPrivateKey = encrypt(ca.privateKeyPem, secret);
updateOrgCaKeys.run(
encryptedPrivateKey,
ca.publicKeyOpenSSH,
row.orgId
);
} catch (err) {
failedOrgIds.push(row.orgId);
console.error(
`Error: No CA was generated for organization "${row.orgId}".`,
err instanceof Error ? err.message : err
);
}
}
if (orgRows.length > 0) {
const succeeded = orgRows.length - failedOrgIds.length;
console.log(
`Generated and stored SSH CA keys for ${succeeded} org(s).`
);
}
if (failedOrgIds.length > 0) {
console.error(
`No CA was generated for ${failedOrgIds.length} organization(s): ${failedOrgIds.join(", ")}`
);
}
console.log(`Migrated database`);
} catch (e) {

View File

@@ -187,7 +187,11 @@ export default function ResourceAuthenticationPage() {
number | null
>(null);
const [ssoEnabled, setSsoEnabled] = useState(resource.sso);
const [ssoEnabled, setSsoEnabled] = useState(resource.sso ?? false);
useEffect(() => {
setSsoEnabled(resource.sso ?? false);
}, [resource.sso]);
const [selectedIdpId, setSelectedIdpId] = useState<number | null>(
resource.skipToIdpId || null
@@ -472,7 +476,7 @@ export default function ResourceAuthenticationPage() {
<SwitchInput
id="sso-toggle"
label={t("ssoUse")}
defaultChecked={resource.sso}
checked={ssoEnabled}
onCheckedChange={(val) => setSsoEnabled(val)}
/>
@@ -800,8 +804,13 @@ function OneTimePasswordFormSection({
}: OneTimePasswordFormSectionProps) {
const { env } = useEnvContext();
const [whitelistEnabled, setWhitelistEnabled] = useState(
resource.emailWhitelistEnabled
resource.emailWhitelistEnabled ?? false
);
useEffect(() => {
setWhitelistEnabled(resource.emailWhitelistEnabled);
}, [resource.emailWhitelistEnabled]);
const queryClient = useQueryClient();
const [loadingSaveWhitelist, startTransition] = useTransition();
@@ -894,7 +903,7 @@ function OneTimePasswordFormSection({
<SwitchInput
id="whitelist-toggle"
label={t("otpEmailWhitelist")}
defaultChecked={resource.emailWhitelistEnabled}
checked={whitelistEnabled}
onCheckedChange={setWhitelistEnabled}
disabled={!env.email.emailEnabled}
/>

View File

@@ -113,7 +113,12 @@ export default function ResourceRules(props: {
const [rulesToRemove, setRulesToRemove] = useState<number[]>([]);
const [loading, setLoading] = useState(false);
const [pageLoading, setPageLoading] = useState(true);
const [rulesEnabled, setRulesEnabled] = useState(resource.applyRules);
const [rulesEnabled, setRulesEnabled] = useState(resource.applyRules ?? false);
useEffect(() => {
setRulesEnabled(resource.applyRules);
}, [resource.applyRules]);
const [openCountrySelect, setOpenCountrySelect] = useState(false);
const [countrySelectValue, setCountrySelectValue] = useState("");
const [openAddRuleCountrySelect, setOpenAddRuleCountrySelect] =
@@ -836,7 +841,7 @@ export default function ResourceRules(props: {
<SwitchInput
id="rules-toggle"
label={t("rulesEnable")}
defaultChecked={rulesEnabled}
checked={rulesEnabled}
onCheckedChange={(val) => setRulesEnabled(val)}
/>
</div>

View File

@@ -107,7 +107,7 @@ export const orgNavSections = (
]
},
{
heading: "access",
heading: "accessControl",
items: [
{
title: "sidebarTeam",

View File

@@ -31,6 +31,18 @@ const CopyToClipboard = ({
return (
<div className="flex items-center space-x-2 min-w-0 max-w-full">
<button
type="button"
className="h-6 w-6 p-0 flex items-center justify-center cursor-pointer flex-shrink-0"
onClick={handleCopy}
>
{!copied ? (
<Copy className="h-4 w-4" />
) : (
<Check className="text-green-500 h-4 w-4" />
)}
<span className="sr-only">{t("copyText")}</span>
</button>
{isLink ? (
<Link
href={text}
@@ -54,18 +66,6 @@ const CopyToClipboard = ({
{displayValue}
</span>
)}
<button
type="button"
className="h-6 w-6 p-0 flex items-center justify-center cursor-pointer flex-shrink-0"
onClick={handleCopy}
>
{!copied ? (
<Copy className="h-4 w-4" />
) : (
<Check className="text-green-500 h-4 w-4" />
)}
<span className="sr-only">{t("copyText")}</span>
</button>
</div>
);
};

View File

@@ -5,13 +5,11 @@ import { SidebarNav } from "@app/components/SidebarNav";
import { OrgSelector } from "@app/components/OrgSelector";
import { cn } from "@app/lib/cn";
import { ListUserOrgsResponse } from "@server/routers/org";
import SupporterStatus from "@app/components/SupporterStatus";
import { Button } from "@app/components/ui/button";
import { ExternalLink, Menu, Server } from "lucide-react";
import { ArrowRight, Menu, Server } from "lucide-react";
import Link from "next/link";
import { usePathname } from "next/navigation";
import { useUserContext } from "@app/hooks/useUserContext";
import { useEnvContext } from "@app/hooks/useEnvContext";
import { useTranslations } from "next-intl";
import ProfileIcon from "@app/components/ProfileIcon";
import ThemeSwitcher from "@app/components/ThemeSwitcher";
@@ -44,7 +42,6 @@ export function LayoutMobileMenu({
const pathname = usePathname();
const isAdminPage = pathname?.startsWith("/admin");
const { user } = useUserContext();
const { env } = useEnvContext();
const t = useTranslations();
return (
@@ -83,7 +80,7 @@ export function LayoutMobileMenu({
<div className="px-3 pt-3">
{!isAdminPage &&
user.serverAdmin && (
<div className="py-2">
<div className="mb-1">
<Link
href="/admin"
className={cn(
@@ -98,11 +95,12 @@ export function LayoutMobileMenu({
<span className="flex-shrink-0 mr-2">
<Server className="h-4 w-4" />
</span>
<span>
<span className="flex-1">
{t(
"serverAdmin"
)}
</span>
<ArrowRight className="h-4 w-4 shrink-0 ml-auto opacity-70" />
</Link>
</div>
)}
@@ -115,22 +113,6 @@ export function LayoutMobileMenu({
</div>
<div className="sticky bottom-0 left-0 right-0 h-8 pointer-events-none bg-gradient-to-t from-card to-transparent" />
</div>
<div className="px-3 pt-3 pb-3 space-y-4 border-t shrink-0">
<SupporterStatus />
{env?.app?.version && (
<div className="text-xs text-muted-foreground text-center">
<Link
href={`https://github.com/fosrl/pangolin/releases/tag/${env.app.version}`}
target="_blank"
rel="noopener noreferrer"
className="flex items-center justify-center gap-1"
>
v{env.app.version}
<ExternalLink size={12} />
</Link>
</div>
)}
</div>
</SheetContent>
</Sheet>
</div>

View File

@@ -146,6 +146,46 @@ export function LayoutSidebar({
/>
<div className="flex-1 overflow-y-auto relative">
<div className="px-2 pt-3">
{!isAdminPage && user.serverAdmin && (
<div
className={cn(
"shrink-0",
isSidebarCollapsed ? "mb-4" : "mb-1"
)}
>
<Link
href="/admin"
className={cn(
"flex items-center transition-colors text-muted-foreground hover:text-foreground text-sm w-full hover:bg-secondary/80 dark:hover:bg-secondary/50 rounded-md",
isSidebarCollapsed
? "px-2 py-2 justify-center"
: "px-3 py-1.5"
)}
title={
isSidebarCollapsed
? t("serverAdmin")
: undefined
}
>
<span
className={cn(
"shrink-0",
!isSidebarCollapsed && "mr-2"
)}
>
<Server className="h-4 w-4" />
</span>
{!isSidebarCollapsed && (
<>
<span className="flex-1">
{t("serverAdmin")}
</span>
<ArrowRight className="h-4 w-4 shrink-0 ml-auto opacity-70" />
</>
)}
</Link>
</div>
)}
<SidebarNav
sections={navItems}
isCollapsed={isSidebarCollapsed}
@@ -156,40 +196,6 @@ export function LayoutSidebar({
<div className="sticky bottom-0 left-0 right-0 h-8 pointer-events-none bg-gradient-to-t from-card to-transparent" />
</div>
{!isAdminPage && user.serverAdmin && (
<div className="shrink-0 px-2 pb-2">
<Link
href="/admin"
className={cn(
"flex items-center transition-colors text-muted-foreground hover:text-foreground text-sm w-full hover:bg-secondary/80 dark:hover:bg-secondary/50 rounded-md",
isSidebarCollapsed
? "px-2 py-2 justify-center"
: "px-3 py-1.5"
)}
title={
isSidebarCollapsed ? t("serverAdmin") : undefined
}
>
<span
className={cn(
"shrink-0",
!isSidebarCollapsed && "mr-2"
)}
>
<Server className="h-4 w-4" />
</span>
{!isSidebarCollapsed && (
<>
<span className="flex-1">
{t("serverAdmin")}
</span>
<ArrowRight className="h-4 w-4 shrink-0 ml-auto opacity-70" />
</>
)}
</Link>
</div>
)}
{isSidebarCollapsed && (
<div className="shrink-0 flex justify-center py-2">
<TooltipProvider>
@@ -218,7 +224,7 @@ export function LayoutSidebar({
<div className="w-full border-t border-border mb-3" />
<div className="p-4 pt-0 mt-0 flex flex-col shrink-0">
<div className="p-4 pt-1 flex flex-col shrink-0">
{canShowProductUpdates && (
<div className="mb-3 empty:mb-0">
<ProductUpdates isCollapsed={isSidebarCollapsed} />

View File

@@ -2,31 +2,20 @@ import { headers } from "next/headers";
export async function authCookieHeader() {
const otherHeaders = await headers();
const otherHeadersObject = Object.fromEntries(otherHeaders.entries());
const otherHeadersObject = Object.fromEntries(
Array.from(otherHeaders.entries()).map(([k, v]) => [k.toLowerCase(), v])
);
return {
headers: {
cookie:
otherHeadersObject["cookie"] || otherHeadersObject["Cookie"],
host: otherHeadersObject["host"] || otherHeadersObject["Host"],
"user-agent":
otherHeadersObject["user-agent"] ||
otherHeadersObject["User-Agent"],
"x-forwarded-for":
otherHeadersObject["x-forwarded-for"] ||
otherHeadersObject["X-Forwarded-For"],
"x-forwarded-host":
otherHeadersObject["fx-forwarded-host"] ||
otherHeadersObject["Fx-Forwarded-Host"],
"x-forwarded-port":
otherHeadersObject["x-forwarded-port"] ||
otherHeadersObject["X-Forwarded-Port"],
"x-forwarded-proto":
otherHeadersObject["x-forwarded-proto"] ||
otherHeadersObject["X-Forwarded-Proto"],
"x-real-ip":
otherHeadersObject["x-real-ip"] ||
otherHeadersObject["X-Real-IP"]
cookie: otherHeadersObject["cookie"],
host: otherHeadersObject["host"],
"user-agent": otherHeadersObject["user-agent"],
"x-forwarded-for": otherHeadersObject["x-forwarded-for"],
"x-forwarded-host": otherHeadersObject["x-forwarded-host"],
"x-forwarded-port": otherHeadersObject["x-forwarded-port"],
"x-forwarded-proto": otherHeadersObject["x-forwarded-proto"],
"x-real-ip": otherHeadersObject["x-real-ip"]
}
};
}