Compare commits

...

144 Commits

Author SHA1 Message Date
miloschwartz
4cab693cfc openapi and swagger ui improvements and cleanup 2026-03-03 14:54:17 -08:00
Owen
c9515ae77c Add comment about not needing exit node 2026-03-03 14:54:17 -08:00
miloschwartz
d14de86f65 fix org selector spacing on mobile 2026-03-03 14:54:17 -08:00
Laurence
f6ee9db730 enhance(sidebar): make mobile org selector sticky
Make org selector sticky on mobile sidebar

  Move OrgSelector outside the scrollable container so it stays fixed
  at the top while menu items scroll, matching the desktop sidebar
  behavior introduced in 9b2c0d0b.
2026-03-03 14:54:17 -08:00
ChanningHe
94353aea44 feat(integration): add domain CRUD endpoints to integration API 2026-03-03 14:54:17 -08:00
Owen Schwartz
cdf79edb00 Merge pull request #2570 from Fizza-Mukhtar/fix/mixed-target-failover-2448
fix: local targets ignored when newt site is unhealthy (mixed target failover)
2026-03-01 15:58:25 -08:00
Owen Schwartz
44e3eedffa Merge pull request #2567 from marcschaeferger/fix-kubernetes-install
feat(kubernetes): enable newtInstances by default and update installation instructions
2026-03-01 10:56:18 -08:00
Marc Schäfer
bb189874cb fix(newt-install): conditionally display Kubernetes installation info
Signed-off-by: Marc Schäfer <git@marcschaeferger.de>
2026-03-01 10:55:58 -08:00
Marc Schäfer
34dadd0e16 feat(kubernetes): enable newtInstances by default and update installation instructions
Signed-off-by: Marc Schäfer <git@marcschaeferger.de>
2026-03-01 10:55:58 -08:00
Owen Schwartz
87b5cd9988 Merge pull request #2573 from Fizza-Mukhtar/fix/container-search-excludes-labels-2228
fix: exclude labels from container search to prevent false positives
2026-03-01 10:52:50 -08:00
Marc Schäfer
6a537a23e8 fix(newt-install): conditionally display Kubernetes installation info
Signed-off-by: Marc Schäfer <git@marcschaeferger.de>
2026-03-01 18:17:45 +01:00
Fizza-Mukhtar
e63a6e9b77 fix: treat local and wireguard sites as online for failover 2026-03-01 07:56:47 -08:00
Fizza-Mukhtar
7ce589c4f2 fix: exclude labels from container search to prevent false positives 2026-03-01 06:50:03 -08:00
Fizza-Mukhtar
f36cf06e26 fix: fallback to local targets when newt targets are unhealthy 2026-03-01 01:43:15 -08:00
Marc Schäfer
375211f184 feat(kubernetes): enable newtInstances by default and update installation instructions
Signed-off-by: Marc Schäfer <git@marcschaeferger.de>
2026-02-28 23:56:28 +01:00
Owen
66c377a5c9 Merge branch 'main' into dev 2026-02-28 12:14:41 -08:00
Owen
50c2aa0111 Add default memory limits 2026-02-28 12:14:27 -08:00
Owen
fdeb891137 Fix pagination effecting drop downs 2026-02-28 12:07:42 -08:00
Owen Schwartz
6a6e3a43b1 Merge pull request #2562 from LaurenceJJones/fix/zod-openapi-catch-error
fix(zod): Add openapi call after catch
2026-02-28 11:04:10 -08:00
Laurence
b0a34fa21b fix(openapi): Add openapi call after catch
fix: #2561
without making an explicit call to openapi a runtime error happens because it cannot infer the type, the call to openapi is the same across the codebase
2026-02-28 11:27:19 +00:00
Owen
72bf6f3c41 Comma seperated 2026-02-27 17:53:44 -08:00
miloschwartz
ad9289e0c1 sort by name by default 2026-02-27 15:53:27 -08:00
Owen Schwartz
b0cb0e5a99 Merge pull request #2559 from fosrl/dev
1.16.1
2026-02-27 12:40:23 -08:00
miloschwartz
8347203bbe add sort to name col 2026-02-27 12:39:26 -08:00
miloschwartz
4aa1186aed fix machine client pagination 2026-02-27 11:59:55 -08:00
Owen
eed87af61d Use ecr base to build 2026-02-26 21:43:14 -08:00
Owen
daeea8e7ea Add alises to quieries
Fixes #2556
2026-02-26 21:37:47 -08:00
Owen
0d63a15715 Merge branch 'main' into dev 2026-02-26 20:14:41 -08:00
miloschwartz
fa2e229ada support authPath in device login 2026-02-26 14:59:34 -08:00
Owen
5d9700d84c Update sum 2026-02-25 16:46:20 -08:00
Owen
f8a8cdaa5f Show enterprise tier 2026-02-25 16:45:35 -08:00
Owen
e23e446476 Fix rule violations of dynamic import 2026-02-25 16:35:57 -08:00
Owen
fa097df50b Dont maxmind on oss or enterprise 2026-02-25 16:26:33 -08:00
Owen
75f34ff127 Stub cache 2026-02-25 16:17:06 -08:00
Owen
c9586b4d93 Remove postgres logs from private config file 2026-02-25 16:09:26 -08:00
Owen
52937a6d90 Make sshCA GA 2026-02-25 16:04:47 -08:00
Owen Schwartz
186c131cce Merge pull request #2543 from fosrl/dev
1.16.0-rc.0
2026-02-25 15:51:42 -08:00
Owen Schwartz
8de3f9a440 Merge pull request #2542 from fosrl/crowdin_dev
New Crowdin updates
2026-02-25 15:49:34 -08:00
Owen Schwartz
ea49e179f9 New translations en-us.json (Norwegian Bokmal) 2026-02-25 15:48:16 -08:00
Owen Schwartz
485f4f1c8e New translations en-us.json (Chinese Simplified) 2026-02-25 15:48:15 -08:00
Owen Schwartz
5fb35d12d7 New translations en-us.json (Turkish) 2026-02-25 15:48:13 -08:00
Owen Schwartz
ec8a9fe3d2 New translations en-us.json (Russian) 2026-02-25 15:48:12 -08:00
Owen Schwartz
411a34e15e New translations en-us.json (Portuguese) 2026-02-25 15:48:11 -08:00
Owen Schwartz
3df71fd2bc New translations en-us.json (Polish) 2026-02-25 15:48:09 -08:00
Owen Schwartz
5e1f6085e3 New translations en-us.json (Dutch) 2026-02-25 15:48:08 -08:00
Owen Schwartz
53fc7ab6e3 New translations en-us.json (Korean) 2026-02-25 15:48:07 -08:00
Owen Schwartz
7779ed24fe New translations en-us.json (Italian) 2026-02-25 15:48:05 -08:00
Owen Schwartz
6e4193dae3 New translations en-us.json (German) 2026-02-25 15:48:04 -08:00
Owen Schwartz
f138609f48 New translations en-us.json (Czech) 2026-02-25 15:48:02 -08:00
Owen Schwartz
98154b5de3 New translations en-us.json (Bulgarian) 2026-02-25 15:48:01 -08:00
Owen Schwartz
6322fd9eef New translations en-us.json (Spanish) 2026-02-25 15:47:59 -08:00
Owen Schwartz
1c0949e957 New translations en-us.json (French) 2026-02-25 15:47:58 -08:00
Owen
c3847e6001 Prefix usernames 2026-02-25 15:36:22 -08:00
Owen
5cf13a963d Add missing saving username 2026-02-25 15:30:34 -08:00
miloschwartz
b017877826 hide ssh access tab for cidr resources 2026-02-25 14:49:28 -08:00
Owen
959f68b520 Restrict cidr resource 2026-02-25 14:43:47 -08:00
Owen
14cab3fdb8 Update phrase 2026-02-25 14:37:52 -08:00
Owen
b8d468f6de Bump version 2026-02-25 14:22:24 -08:00
Owen
fc66394243 Merge branch 'main' into dev 2026-02-25 14:18:22 -08:00
miloschwartz
8fca243c9a update tierMatrix 2026-02-25 11:59:16 -08:00
miloschwartz
388f710379 add pg migration 2026-02-25 11:37:31 -08:00
Owen Schwartz
ba3ab4362b Merge pull request #2539 from fosrl/dependabot/npm_and_yarn/prod-minor-updates-22e7e52815
Bump the prod-minor-updates group across 1 directory with 3 updates
2026-02-25 11:32:19 -08:00
miloschwartz
e18c9afc2d add sqlite migration 2026-02-25 11:24:32 -08:00
Owen Schwartz
a9b4a86c4a Merge pull request #2470 from fosrl/dependabot/npm_and_yarn/qs-6.14.2
Bump qs from 6.14.1 to 6.14.2
2026-02-25 11:05:46 -08:00
Owen Schwartz
200ea502dd Merge pull request #2484 from fosrl/dependabot/go_modules/install/minor-updates-80eb8af454
Bump golang.org/x/term from 0.39.0 to 0.40.0 in /install in the minor-updates group
2026-02-25 11:05:34 -08:00
dependabot[bot]
de36db97eb Bump the prod-minor-updates group across 1 directory with 3 updates
Bumps the prod-minor-updates group with 3 updates in the / directory: [pg](https://github.com/brianc/node-postgres/tree/HEAD/packages/pg), [posthog-node](https://github.com/PostHog/posthog-js/tree/HEAD/packages/node) and [tailwind-merge](https://github.com/dcastil/tailwind-merge).


Updates `pg` from 8.18.0 to 8.19.0
- [Changelog](https://github.com/brianc/node-postgres/blob/master/CHANGELOG.md)
- [Commits](https://github.com/brianc/node-postgres/commits/pg@8.19.0/packages/pg)

Updates `posthog-node` from 5.24.15 to 5.26.0
- [Release notes](https://github.com/PostHog/posthog-js/releases)
- [Changelog](https://github.com/PostHog/posthog-js/blob/main/packages/node/CHANGELOG.md)
- [Commits](https://github.com/PostHog/posthog-js/commits/posthog-node@5.26.0/packages/node)

Updates `tailwind-merge` from 3.4.0 to 3.5.0
- [Release notes](https://github.com/dcastil/tailwind-merge/releases)
- [Commits](https://github.com/dcastil/tailwind-merge/compare/v3.4.0...v3.5.0)

---
updated-dependencies:
- dependency-name: pg
  dependency-version: 8.19.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: posthog-node
  dependency-version: 5.26.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: tailwind-merge
  dependency-version: 3.5.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 19:03:51 +00:00
dependabot[bot]
30283b044f Bump golang.org/x/term in /install in the minor-updates group
Bumps the minor-updates group in /install with 1 update: [golang.org/x/term](https://github.com/golang/term).


Updates `golang.org/x/term` from 0.39.0 to 0.40.0
- [Commits](https://github.com/golang/term/compare/v0.39.0...v0.40.0)

---
updated-dependencies:
- dependency-name: golang.org/x/term
  dependency-version: 0.40.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 19:01:31 +00:00
Owen Schwartz
055bed8a07 Merge pull request #2338 from fosrl/dependabot/github_actions/actions/checkout-6.0.2
Bump actions/checkout from 6.0.1 to 6.0.2
2026-02-25 11:00:55 -08:00
Owen Schwartz
12b5c2ab34 Merge pull request #2495 from fosrl/dependabot/npm_and_yarn/dev-patch-updates-ebb414287f
Bump @types/nodemailer from 7.0.9 to 7.0.10 in the dev-patch-updates group across 1 directory
2026-02-25 11:00:24 -08:00
Owen Schwartz
dd78674888 Merge pull request #2514 from fosrl/dependabot/npm_and_yarn/multi-60f2582fdd
Bump fast-xml-parser and @aws-sdk/xml-builder
2026-02-25 11:00:15 -08:00
Owen Schwartz
0d0df63847 Merge pull request #2518 from fosrl/dependabot/github_actions/actions/stale-10.2.0
Bump actions/stale from 10.1.1 to 10.2.0
2026-02-25 11:00:01 -08:00
Owen Schwartz
3ab00d9da8 Merge pull request #2527 from fosrl/dependabot/npm_and_yarn/prod-patch-updates-cbac17d765
Bump the prod-patch-updates group across 1 directory with 7 updates
2026-02-25 10:59:38 -08:00
Owen Schwartz
3e6e72c5c7 Merge pull request #2531 from fosrl/dependabot/npm_and_yarn/multi-40a89e2d0a
Bump minimatch
2026-02-25 10:59:08 -08:00
Owen Schwartz
5d8a55f08c Merge pull request #2415 from fosrl/dependabot/github_actions/aws-actions/configure-aws-credentials-6
Bump aws-actions/configure-aws-credentials from 5 to 6
2026-02-25 10:58:18 -08:00
Owen Schwartz
81c569aae4 Merge pull request #2393 from fosrl/dependabot/github_actions/docker/login-action-3.7.0
Bump docker/login-action from 3.6.0 to 3.7.0
2026-02-25 10:58:11 -08:00
Owen Schwartz
88fd3fc4da Merge pull request #2296 from fosrl/dependabot/npm_and_yarn/lodash-4.17.23
Bump lodash from 4.17.21 to 4.17.23
2026-02-25 10:58:02 -08:00
Owen
2282d3ae39 Fix formatting 2026-02-25 10:53:56 -08:00
Owen
c4dcec463a Merge branch 'dev' into LaurenceJJones-feature/installer-tui 2026-02-25 10:48:05 -08:00
Owen
5b7f893ad7 Merge branch 'main' into dev 2026-02-25 10:46:28 -08:00
Owen
2ede0d498a remove log 2026-02-25 10:46:20 -08:00
Owen
f518e8a0ff Merge branch 'feature/installer-tui' of github.com:LaurenceJJones/pangolin into LaurenceJJones-feature/installer-tui 2026-02-25 10:45:38 -08:00
Owen Schwartz
767284408a Merge pull request #2499 from LaurenceJJones/feature/build-variables
enhance(installer): use ldflags to inject versions
2026-02-25 10:42:04 -08:00
Owen Schwartz
eef51f3b84 Merge pull request #2491 from rodneyosodo/fix/install
fix(install): add error handling, code cleanups, and YAML type refactor
2026-02-25 10:41:03 -08:00
Owen Schwartz
69b7114a49 Merge pull request #2537 from Abhinav-kodes/fix/toggle-hydration-sync
fix: sync resource toggle states with context on initial load
2026-02-25 10:36:58 -08:00
Owen Schwartz
0ea38ea568 Merge pull request #2535 from Abhinav-kodes/fix-resource-session-delete-cookie
fix: correct session DELETE tautology and HTTP cookie domain interpolation
2026-02-25 10:35:09 -08:00
Abhinav-kodes
c600da71e3 fix: sync resource toggle states with context on initial load
- Replace defaultChecked with checked for controlled components
- Add useEffect to sync rulesEnabled, ssoEnabled, whitelistEnabled
  when resource context hydrates after mount
- Add nullish coalescing fallback to prevent undefined initial state
2026-02-25 22:07:08 +05:30
Abhinav-kodes
c64dd14b1a fix: correct session DELETE tautology and HTTP cookie domain interpolation 2026-02-25 17:24:27 +05:30
miloschwartz
8ea6d9fa67 add get user by username search endpoint to integration api 2026-02-24 22:04:15 -08:00
Owen
978ac8f53c Add logging 2026-02-24 20:51:27 -08:00
Owen
49a326cde7 Add trust proxy to the internal api
Fix access logs not having the right ip
2026-02-24 20:23:42 -08:00
Owen
63e208f4ec Use local cache in verify session 2026-02-24 19:56:16 -08:00
Owen
f50d1549b0 Update cache to use redis 2026-02-24 19:50:42 -08:00
Owen
55e24df671 Check and prefer user token if provided 2026-02-24 19:48:32 -08:00
Owen
b37e1d0cc0 Use debian slim; alpine broken? 2026-02-24 19:48:16 -08:00
Owen
afa26c0dd4 Exclude migrations? 2026-02-24 19:48:08 -08:00
miloschwartz
c71f46ede5 move copy button and fix translation 2026-02-24 19:44:08 -08:00
dependabot[bot]
2edebaddc2 Bump the prod-patch-updates group across 1 directory with 7 updates
Bumps the prod-patch-updates group with 7 updates in the / directory:

| Package | From | To |
| --- | --- | --- |
| [@asteasolutions/zod-to-openapi](https://github.com/asteasolutions/zod-to-openapi) | `8.4.0` | `8.4.1` |
| [@react-email/components](https://github.com/resend/react-email/tree/HEAD/packages/components) | `1.0.7` | `1.0.8` |
| [@react-email/tailwind](https://github.com/resend/react-email/tree/HEAD/packages/tailwind) | `2.0.4` | `2.0.5` |
| [@simplewebauthn/server](https://github.com/MasterKale/SimpleWebAuthn/tree/HEAD/packages/server) | `13.2.2` | `13.2.3` |
| [glob](https://github.com/isaacs/node-glob) | `13.0.3` | `13.0.6` |
| [next-intl](https://github.com/amannn/next-intl) | `4.8.2` | `4.8.3` |
| [react-hook-form](https://github.com/react-hook-form/react-hook-form) | `7.71.1` | `7.71.2` |



Updates `@asteasolutions/zod-to-openapi` from 8.4.0 to 8.4.1
- [Release notes](https://github.com/asteasolutions/zod-to-openapi/releases)
- [Commits](https://github.com/asteasolutions/zod-to-openapi/compare/v8.4.0...v8.4.1)

Updates `@react-email/components` from 1.0.7 to 1.0.8
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/components/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/components@1.0.8/packages/components)

Updates `@react-email/tailwind` from 2.0.4 to 2.0.5
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/tailwind/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/tailwind@2.0.5/packages/tailwind)

Updates `@simplewebauthn/server` from 13.2.2 to 13.2.3
- [Release notes](https://github.com/MasterKale/SimpleWebAuthn/releases)
- [Changelog](https://github.com/MasterKale/SimpleWebAuthn/blob/master/CHANGELOG.md)
- [Commits](https://github.com/MasterKale/SimpleWebAuthn/commits/v13.2.3/packages/server)

Updates `glob` from 13.0.3 to 13.0.6
- [Changelog](https://github.com/isaacs/node-glob/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/node-glob/compare/v13.0.3...v13.0.6)

Updates `next-intl` from 4.8.2 to 4.8.3
- [Release notes](https://github.com/amannn/next-intl/releases)
- [Changelog](https://github.com/amannn/next-intl/blob/main/CHANGELOG.md)
- [Commits](https://github.com/amannn/next-intl/compare/v4.8.2...v4.8.3)

Updates `react-hook-form` from 7.71.1 to 7.71.2
- [Release notes](https://github.com/react-hook-form/react-hook-form/releases)
- [Changelog](https://github.com/react-hook-form/react-hook-form/blob/master/CHANGELOG.md)
- [Commits](https://github.com/react-hook-form/react-hook-form/compare/v7.71.1...v7.71.2)

---
updated-dependencies:
- dependency-name: "@asteasolutions/zod-to-openapi"
  dependency-version: 8.4.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: "@react-email/components"
  dependency-version: 1.0.8
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: "@react-email/tailwind"
  dependency-version: 2.0.5
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: "@simplewebauthn/server"
  dependency-version: 13.2.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: glob
  dependency-version: 13.0.6
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: next-intl
  dependency-version: 4.8.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: react-hook-form
  dependency-version: 7.71.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 01:37:31 +00:00
dependabot[bot]
119e1d4867 Bump @types/nodemailer in the dev-patch-updates group across 1 directory
Bumps the dev-patch-updates group with 1 update in the / directory: [@types/nodemailer](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/nodemailer).


Updates `@types/nodemailer` from 7.0.9 to 7.0.10
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/nodemailer)

---
updated-dependencies:
- dependency-name: "@types/nodemailer"
  dependency-version: 7.0.10
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 01:35:31 +00:00
dependabot[bot]
63e30d3378 Bump minimatch
Bumps  and [minimatch](https://github.com/isaacs/minimatch). These dependencies needed to be updated together.

Updates `minimatch` from 10.2.0 to 10.2.3
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v10.2.0...v10.2.3)

Updates `minimatch` from 3.1.2 to 3.1.4
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v10.2.0...v10.2.3)

Updates `minimatch` from 9.0.5 to 9.0.7
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v10.2.0...v10.2.3)

Updates `minimatch` from 10.1.1 to 10.2.3
- [Changelog](https://github.com/isaacs/minimatch/blob/main/changelog.md)
- [Commits](https://github.com/isaacs/minimatch/compare/v10.2.0...v10.2.3)

---
updated-dependencies:
- dependency-name: minimatch
  dependency-version: 10.2.3
  dependency-type: indirect
- dependency-name: minimatch
  dependency-version: 3.1.4
  dependency-type: indirect
- dependency-name: minimatch
  dependency-version: 9.0.7
  dependency-type: indirect
- dependency-name: minimatch
  dependency-version: 10.2.3
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-25 01:04:10 +00:00
Owen
d6fe04ec4e Fix orgid issue when regen credentials 2026-02-24 14:26:10 -08:00
Owen
b8a364af6a Fix log query 2026-02-23 22:01:11 -08:00
Owen
5ef808d4a2 Merge branch 'main' into logs-database 2026-02-23 16:39:39 -08:00
miloschwartz
848d4d91e6 fix sidebar 2026-02-23 13:40:08 -08:00
Owen
a502780c9b Fix sso username issue 2026-02-22 22:05:02 -08:00
Owen Schwartz
418e099804 Merge pull request #2521 from fosrl/dev
1.15.4-s.6
2026-02-22 21:13:51 -08:00
Owen Schwartz
06258aa386 Merge pull request #2513 from fosrl/crowdin_dev
New Crowdin updates
2026-02-22 21:13:17 -08:00
Owen Schwartz
d7608b1cc8 New translations en-us.json (Norwegian Bokmal) 2026-02-22 21:11:37 -08:00
Owen Schwartz
cb86ad4104 New translations en-us.json (Chinese Simplified) 2026-02-22 21:11:35 -08:00
Owen Schwartz
8cd51df1e1 New translations en-us.json (Turkish) 2026-02-22 21:11:34 -08:00
Owen Schwartz
8ef7220766 New translations en-us.json (Russian) 2026-02-22 21:11:33 -08:00
Owen Schwartz
b5333a3686 New translations en-us.json (Portuguese) 2026-02-22 21:11:31 -08:00
Owen Schwartz
e6e92dbc0f New translations en-us.json (Polish) 2026-02-22 21:11:30 -08:00
Owen Schwartz
01fdd41a10 New translations en-us.json (Dutch) 2026-02-22 21:11:28 -08:00
Owen Schwartz
6af06a38ae New translations en-us.json (Korean) 2026-02-22 21:11:27 -08:00
Owen Schwartz
5d9c66d22d New translations en-us.json (Italian) 2026-02-22 21:11:26 -08:00
Owen Schwartz
81f5a4b127 New translations en-us.json (German) 2026-02-22 21:11:24 -08:00
Owen Schwartz
da3e68a20b New translations en-us.json (Czech) 2026-02-22 21:11:23 -08:00
Owen Schwartz
8712c1719e New translations en-us.json (Bulgarian) 2026-02-22 21:11:22 -08:00
Owen Schwartz
593c5db0e8 New translations en-us.json (Spanish) 2026-02-22 21:11:20 -08:00
Owen Schwartz
b28391feae New translations en-us.json (French) 2026-02-22 21:11:19 -08:00
Owen
5f8df6d4cd Merge branch 'main' into dev 2026-02-22 21:02:58 -08:00
Owen
c36efe7f14 Add translations 2026-02-22 21:02:21 -08:00
Owen
cf97b6df9c Handle billing bad subs, remove exit node name from lock, prevent some
stuff on saas
2026-02-22 20:45:53 -08:00
dependabot[bot]
720d3a8135 Bump actions/stale from 10.1.1 to 10.2.0
Bumps [actions/stale](https://github.com/actions/stale) from 10.1.1 to 10.2.0.
- [Release notes](https://github.com/actions/stale/releases)
- [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md)
- [Commits](997185467f...b5d41d4e1d)

---
updated-dependencies:
- dependency-name: actions/stale
  dependency-version: 10.2.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-23 01:36:30 +00:00
dependabot[bot]
9c42458fa5 Bump actions/checkout from 6.0.1 to 6.0.2
Bumps [actions/checkout](https://github.com/actions/checkout) from 6.0.1 to 6.0.2.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](8e8c483db8...de0fac2e45)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: 6.0.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-23 01:31:48 +00:00
Owen
6d9b129ac9 Merge branch 'main' into k8s 2026-02-22 17:28:09 -08:00
miloschwartz
e17ec798d4 remove local fonts 2026-02-22 11:18:57 -08:00
miloschwartz
58ac499f30 add safeRead 2026-02-21 16:38:51 -08:00
miloschwartz
f07f0092ad testing with local font 2026-02-21 14:34:38 -08:00
dependabot[bot]
bcd3475d17 Bump fast-xml-parser and @aws-sdk/xml-builder
Bumps [fast-xml-parser](https://github.com/NaturalIntelligence/fast-xml-parser) and [@aws-sdk/xml-builder](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/packages-internal/xml-builder). These dependencies needed to be updated together.

Updates `fast-xml-parser` from 5.3.4 to 5.3.6
- [Release notes](https://github.com/NaturalIntelligence/fast-xml-parser/releases)
- [Changelog](https://github.com/NaturalIntelligence/fast-xml-parser/blob/master/CHANGELOG.md)
- [Commits](https://github.com/NaturalIntelligence/fast-xml-parser/compare/v5.3.4...v5.3.6)

Updates `@aws-sdk/xml-builder` from 3.972.4 to 3.972.5
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/packages-internal/xml-builder/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/HEAD/packages-internal/xml-builder)

---
updated-dependencies:
- dependency-name: fast-xml-parser
  dependency-version: 5.3.6
  dependency-type: indirect
- dependency-name: "@aws-sdk/xml-builder"
  dependency-version: 3.972.5
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-21 20:18:36 +00:00
Owen Schwartz
7c04526088 New translations en-us.json (Czech) 2026-02-21 05:24:27 -08:00
Owen Schwartz
2d7ab68576 New translations en-us.json (Czech) 2026-02-21 04:14:18 -08:00
miloschwartz
218a4893b6 hide address on sites and clients 2026-02-20 22:47:56 -08:00
miloschwartz
266bf261aa update note in migration 2026-02-20 22:45:37 -08:00
Owen
b622aca221 Try to route logs requests to a different database 2026-02-20 17:20:01 -08:00
Owen
b786497299 Working on k8s 2026-02-19 17:55:49 -08:00
Laurence
e8398cb221 enhance(installer): use huh package to handle input
Instead of relying on stdin and stdout by default, using the huh package from charmbracelet allows us to handle user input more gracefully such as y/n instead of typing 'yes' or 'no'. If a user makes a mistake whilst typing in any text fields they cannot use left or right to edit a single character when using huh it can. This adds a dependancy and may increase the size of installer but overall improves user experience.
2026-02-18 11:05:48 +00:00
Laurence
9460e28c7b ehance(installer): use ldflags to inject versions
Instead of the CI/CD using sed to replace the 'replaceme' text we can instead use ldflags which can inject variables at build time to the versions. The makefile had a bunch of workarounds for dev so these have been removed to cleanup etc etc and fetchs versions from the gh api directly if the variables are not injected like the CI/CD does
2026-02-18 09:43:41 +00:00
dependabot[bot]
d8b45396e3 Bump qs from 6.14.1 to 6.14.2
Bumps [qs](https://github.com/ljharb/qs) from 6.14.1 to 6.14.2.
- [Changelog](https://github.com/ljharb/qs/blob/main/CHANGELOG.md)
- [Commits](https://github.com/ljharb/qs/compare/v6.14.1...v6.14.2)

---
updated-dependencies:
- dependency-name: qs
  dependency-version: 6.14.2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-18 05:08:23 +00:00
Rodney Osodo
952d0c74d0 refactor(install): use any for YAML map types instead of interface{}
Signed-off-by: Rodney Osodo <socials@rodneyosodo.com>
2026-02-17 10:45:06 +03:00
Rodney Osodo
ffbea7af59 fix(install): add error handling and minor code cleanups
Signed-off-by: Rodney Osodo <socials@rodneyosodo.com>
2026-02-17 10:45:06 +03:00
dependabot[bot]
971c375398 Bump docker/login-action from 3.6.0 to 3.7.0
Bumps [docker/login-action](https://github.com/docker/login-action) from 3.6.0 to 3.7.0.
- [Release notes](https://github.com/docker/login-action/releases)
- [Commits](5e57cd1181...c94ce9fb46)

---
updated-dependencies:
- dependency-name: docker/login-action
  dependency-version: 3.7.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-15 19:12:16 +00:00
dependabot[bot]
ac4439c5ae Bump aws-actions/configure-aws-credentials from 5 to 6
Bumps [aws-actions/configure-aws-credentials](https://github.com/aws-actions/configure-aws-credentials) from 5 to 6.
- [Release notes](https://github.com/aws-actions/configure-aws-credentials/releases)
- [Changelog](https://github.com/aws-actions/configure-aws-credentials/blob/main/CHANGELOG.md)
- [Commits](https://github.com/aws-actions/configure-aws-credentials/compare/v5...v6)

---
updated-dependencies:
- dependency-name: aws-actions/configure-aws-credentials
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-02-15 19:12:05 +00:00
dependabot[bot]
8c15855fc3 Bump lodash from 4.17.21 to 4.17.23
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.21 to 4.17.23.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.21...4.17.23)

---
updated-dependencies:
- dependency-name: lodash
  dependency-version: 4.17.23
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-01-26 01:37:38 +00:00
208 changed files with 3635 additions and 1356 deletions

View File

@@ -28,9 +28,9 @@ LICENSE
CONTRIBUTING.md CONTRIBUTING.md
dist dist
.git .git
migrations/ server/migrations/
config/ config/
build.ts build.ts
tsconfig.json tsconfig.json
Dockerfile* Dockerfile*
migrations/ drizzle.config.ts

View File

@@ -29,7 +29,7 @@ jobs:
permissions: write-all permissions: write-all
steps: steps:
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5 uses: aws-actions/configure-aws-credentials@v6
with: with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }} role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600 role-duration-seconds: 3600
@@ -62,7 +62,7 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Monitor storage space - name: Monitor storage space
run: | run: |
@@ -77,7 +77,7 @@ jobs:
fi fi
- name: Log in to Docker Hub - name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with: with:
registry: docker.io registry: docker.io
username: ${{ secrets.DOCKER_HUB_USERNAME }} username: ${{ secrets.DOCKER_HUB_USERNAME }}
@@ -134,7 +134,7 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Monitor storage space - name: Monitor storage space
run: | run: |
@@ -149,7 +149,7 @@ jobs:
fi fi
- name: Log in to Docker Hub - name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with: with:
registry: docker.io registry: docker.io
username: ${{ secrets.DOCKER_HUB_USERNAME }} username: ${{ secrets.DOCKER_HUB_USERNAME }}
@@ -201,10 +201,10 @@ jobs:
timeout-minutes: 30 timeout-minutes: 30
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Log in to Docker Hub - name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with: with:
registry: docker.io registry: docker.io
username: ${{ secrets.DOCKER_HUB_USERNAME }} username: ${{ secrets.DOCKER_HUB_USERNAME }}
@@ -256,7 +256,7 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Extract tag name - name: Extract tag name
id: get-tag id: get-tag
@@ -289,22 +289,14 @@ jobs:
echo "LATEST_BADGER_TAG=$LATEST_TAG" >> $GITHUB_ENV echo "LATEST_BADGER_TAG=$LATEST_TAG" >> $GITHUB_ENV
shell: bash shell: bash
- name: Update install/main.go
run: |
PANGOLIN_VERSION=${{ env.TAG }}
GERBIL_VERSION=${{ env.LATEST_GERBIL_TAG }}
BADGER_VERSION=${{ env.LATEST_BADGER_TAG }}
sed -i "s/config.PangolinVersion = \".*\"/config.PangolinVersion = \"$PANGOLIN_VERSION\"/" install/main.go
sed -i "s/config.GerbilVersion = \".*\"/config.GerbilVersion = \"$GERBIL_VERSION\"/" install/main.go
sed -i "s/config.BadgerVersion = \".*\"/config.BadgerVersion = \"$BADGER_VERSION\"/" install/main.go
echo "Updated install/main.go with Pangolin version $PANGOLIN_VERSION, Gerbil version $GERBIL_VERSION, and Badger version $BADGER_VERSION"
cat install/main.go
shell: bash
- name: Build installer - name: Build installer
working-directory: install working-directory: install
run: | run: |
make go-build-release make go-build-release \
PANGOLIN_VERSION=${{ env.TAG }} \
GERBIL_VERSION=${{ env.LATEST_GERBIL_TAG }} \
BADGER_VERSION=${{ env.LATEST_BADGER_TAG }}
shell: bash
- name: Upload artifacts from /install/bin - name: Upload artifacts from /install/bin
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
@@ -415,7 +407,7 @@ jobs:
shell: bash shell: bash
- name: Login to GitHub Container Registry (for cosign) - name: Login to GitHub Container Registry (for cosign)
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@@ -578,7 +570,7 @@ jobs:
permissions: write-all permissions: write-all
steps: steps:
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5 uses: aws-actions/configure-aws-credentials@v6
with: with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }} role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600 role-duration-seconds: 3600

View File

@@ -21,7 +21,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Set up Node.js - name: Set up Node.js
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0

View File

@@ -14,7 +14,7 @@ jobs:
permissions: write-all permissions: write-all
steps: steps:
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5 uses: aws-actions/configure-aws-credentials@v6
with: with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }} role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600 role-duration-seconds: 3600

View File

@@ -23,7 +23,7 @@ jobs:
permissions: write-all permissions: write-all
steps: steps:
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5 uses: aws-actions/configure-aws-credentials@v6
with: with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }} role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600 role-duration-seconds: 3600
@@ -54,7 +54,42 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Download MaxMind GeoLite2 databases
env:
MAXMIND_LICENSE_KEY: ${{ secrets.MAXMIND_LICENSE_KEY }}
run: |
echo "Downloading MaxMind GeoLite2 databases..."
# Download GeoLite2-Country
curl -L "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key=${MAXMIND_LICENSE_KEY}&suffix=tar.gz" \
-o GeoLite2-Country.tar.gz
# Download GeoLite2-ASN
curl -L "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-ASN&license_key=${MAXMIND_LICENSE_KEY}&suffix=tar.gz" \
-o GeoLite2-ASN.tar.gz
# Extract the .mmdb files
tar -xzf GeoLite2-Country.tar.gz --strip-components=1 --wildcards '*.mmdb'
tar -xzf GeoLite2-ASN.tar.gz --strip-components=1 --wildcards '*.mmdb'
# Verify files exist
if [ ! -f "GeoLite2-Country.mmdb" ]; then
echo "ERROR: Failed to download GeoLite2-Country.mmdb"
exit 1
fi
if [ ! -f "GeoLite2-ASN.mmdb" ]; then
echo "ERROR: Failed to download GeoLite2-ASN.mmdb"
exit 1
fi
# Clean up tar files
rm -f GeoLite2-Country.tar.gz GeoLite2-ASN.tar.gz
echo "MaxMind databases downloaded successfully"
ls -lh GeoLite2-*.mmdb
- name: Monitor storage space - name: Monitor storage space
run: | run: |
@@ -69,7 +104,7 @@ jobs:
fi fi
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5 uses: aws-actions/configure-aws-credentials@v6
with: with:
role-to-assume: arn:aws:iam::${{ secrets.aws_account_id }}:role/${{ secrets.AWS_ROLE_NAME }} role-to-assume: arn:aws:iam::${{ secrets.aws_account_id }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600 role-duration-seconds: 3600
@@ -110,7 +145,7 @@ jobs:
permissions: write-all permissions: write-all
steps: steps:
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v5 uses: aws-actions/configure-aws-credentials@v6
with: with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }} role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600 role-duration-seconds: 3600

View File

@@ -14,7 +14,7 @@ jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1 - uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0
with: with:
days-before-stale: 14 days-before-stale: 14
days-before-close: 14 days-before-close: 14

View File

@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Install Node - name: Install Node
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
@@ -62,7 +62,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Build Docker image sqlite - name: Build Docker image sqlite
run: make dev-build-sqlite run: make dev-build-sqlite
@@ -71,7 +71,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Build Docker image pg - name: Build Docker image pg
run: make dev-build-pg run: make dev-build-pg

View File

@@ -1,8 +1,9 @@
FROM node:24-alpine AS base # FROM node:24-slim AS base
FROM public.ecr.aws/docker/library/node:24-slim AS base
WORKDIR /app WORKDIR /app
RUN apk add --no-cache python3 make g++ RUN apt-get update && apt-get install -y python3 make g++ && rm -rf /var/lib/apt/lists/*
COPY package*.json ./ COPY package*.json ./
@@ -23,15 +24,20 @@ RUN if [ "$BUILD" = "oss" ]; then rm -rf server/private; fi && \
npm run build:cli && \ npm run build:cli && \
test -f dist/server.mjs test -f dist/server.mjs
# Create placeholder files for MaxMind databases to avoid COPY errors
# Real files should be present for saas builds, placeholders for oss builds
RUN touch /app/GeoLite2-Country.mmdb /app/GeoLite2-ASN.mmdb
FROM base AS builder FROM base AS builder
RUN npm ci --omit=dev RUN npm ci --omit=dev
FROM node:24-alpine AS runner # FROM node:24-slim AS runner
FROM public.ecr.aws/docker/library/node:24-slim AS runner
WORKDIR /app WORKDIR /app
RUN apk add --no-cache curl tzdata RUN apt-get update && apt-get install -y curl tzdata && rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/node_modules ./node_modules COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/package.json ./package.json COPY --from=builder /app/package.json ./package.json
@@ -49,6 +55,18 @@ COPY server/db/ios_models.json ./dist/ios_models.json
COPY server/db/mac_models.json ./dist/mac_models.json COPY server/db/mac_models.json ./dist/mac_models.json
COPY public ./public COPY public ./public
# Copy MaxMind databases for SaaS builds
ARG BUILD=oss
RUN mkdir -p ./maxmind
# Copy MaxMind databases (placeholders exist for oss builds, real files for saas)
COPY --from=builder-dev /app/GeoLite2-Country.mmdb ./maxmind/GeoLite2-Country.mmdb
COPY --from=builder-dev /app/GeoLite2-ASN.mmdb ./maxmind/GeoLite2-ASN.mmdb
# Remove MaxMind databases for non-saas builds (keep only for saas)
RUN if [ "$BUILD" != "saas" ]; then rm -rf ./maxmind; fi
# OCI Image Labels - Build Args for dynamic values # OCI Image Labels - Build Args for dynamic values
ARG VERSION="dev" ARG VERSION="dev"
ARG REVISION="" ARG REVISION=""

View File

@@ -3,7 +3,7 @@ import { db, orgs } from "@server/db";
import { eq } from "drizzle-orm"; import { eq } from "drizzle-orm";
import { encrypt } from "@server/lib/crypto"; import { encrypt } from "@server/lib/crypto";
import { configFilePath1, configFilePath2 } from "@server/lib/consts"; import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import { generateCA } from "@server/private/lib/sshCA"; import { generateCA } from "@server/lib/sshCA";
import fs from "fs"; import fs from "fs";
import yaml from "js-yaml"; import yaml from "js-yaml";

View File

@@ -4,6 +4,12 @@ services:
image: fosrl/pangolin:latest image: fosrl/pangolin:latest
container_name: pangolin container_name: pangolin
restart: unless-stopped restart: unless-stopped
deploy:
resources:
limits:
memory: 1g
reservations:
memory: 256m
volumes: volumes:
- ./config:/app/config - ./config:/app/config
healthcheck: healthcheck:

View File

@@ -1,41 +1,24 @@
all: update-versions go-build-release put-back all: go-build-release
dev-all: dev-update-versions dev-build dev-clean
# Build with version injection via ldflags
# Versions can be passed via: make go-build-release PANGOLIN_VERSION=x.x.x GERBIL_VERSION=x.x.x BADGER_VERSION=x.x.x
# Or fetched automatically if not provided (requires curl and jq)
PANGOLIN_VERSION ?= $(shell curl -s https://api.github.com/repos/fosrl/pangolin/tags | jq -r '.[0].name')
GERBIL_VERSION ?= $(shell curl -s https://api.github.com/repos/fosrl/gerbil/tags | jq -r '.[0].name')
BADGER_VERSION ?= $(shell curl -s https://api.github.com/repos/fosrl/badger/tags | jq -r '.[0].name')
LDFLAGS = -X main.pangolinVersion=$(PANGOLIN_VERSION) \
-X main.gerbilVersion=$(GERBIL_VERSION) \
-X main.badgerVersion=$(BADGER_VERSION)
go-build-release: go-build-release:
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o bin/installer_linux_amd64 @echo "Building with versions - Pangolin: $(PANGOLIN_VERSION), Gerbil: $(GERBIL_VERSION), Badger: $(BADGER_VERSION)"
CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -o bin/installer_linux_arm64 CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -ldflags "$(LDFLAGS)" -o bin/installer_linux_amd64
CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -ldflags "$(LDFLAGS)" -o bin/installer_linux_arm64
clean: clean:
rm -f bin/installer_linux_amd64 rm -f bin/installer_linux_amd64
rm -f bin/installer_linux_arm64 rm -f bin/installer_linux_arm64
update-versions: .PHONY: all go-build-release clean
@echo "Fetching latest versions..."
cp main.go main.go.bak && \
$(MAKE) dev-update-versions
put-back:
mv main.go.bak main.go
dev-update-versions:
if [ -z "$(tag)" ]; then \
PANGOLIN_VERSION=$$(curl -s https://api.github.com/repos/fosrl/pangolin/tags | jq -r '.[0].name'); \
else \
PANGOLIN_VERSION=$(tag); \
fi && \
GERBIL_VERSION=$$(curl -s https://api.github.com/repos/fosrl/gerbil/tags | jq -r '.[0].name') && \
BADGER_VERSION=$$(curl -s https://api.github.com/repos/fosrl/badger/tags | jq -r '.[0].name') && \
echo "Latest versions - Pangolin: $$PANGOLIN_VERSION, Gerbil: $$GERBIL_VERSION, Badger: $$BADGER_VERSION" && \
sed -i "s/config.PangolinVersion = \".*\"/config.PangolinVersion = \"$$PANGOLIN_VERSION\"/" main.go && \
sed -i "s/config.GerbilVersion = \".*\"/config.GerbilVersion = \"$$GERBIL_VERSION\"/" main.go && \
sed -i "s/config.BadgerVersion = \".*\"/config.BadgerVersion = \"$$BADGER_VERSION\"/" main.go && \
echo "Updated main.go with latest versions"
dev-build: go-build-release
dev-clean:
@echo "Restoring version values ..."
sed -i "s/config.PangolinVersion = \".*\"/config.PangolinVersion = \"replaceme\"/" main.go && \
sed -i "s/config.GerbilVersion = \".*\"/config.GerbilVersion = \"replaceme\"/" main.go && \
sed -i "s/config.BadgerVersion = \".*\"/config.BadgerVersion = \"replaceme\"/" main.go
@echo "Restored version strings in main.go"

View File

@@ -118,19 +118,19 @@ func copyDockerService(sourceFile, destFile, serviceName string) error {
} }
// Parse source Docker Compose YAML // Parse source Docker Compose YAML
var sourceCompose map[string]interface{} var sourceCompose map[string]any
if err := yaml.Unmarshal(sourceData, &sourceCompose); err != nil { if err := yaml.Unmarshal(sourceData, &sourceCompose); err != nil {
return fmt.Errorf("error parsing source Docker Compose file: %w", err) return fmt.Errorf("error parsing source Docker Compose file: %w", err)
} }
// Parse destination Docker Compose YAML // Parse destination Docker Compose YAML
var destCompose map[string]interface{} var destCompose map[string]any
if err := yaml.Unmarshal(destData, &destCompose); err != nil { if err := yaml.Unmarshal(destData, &destCompose); err != nil {
return fmt.Errorf("error parsing destination Docker Compose file: %w", err) return fmt.Errorf("error parsing destination Docker Compose file: %w", err)
} }
// Get services section from source // Get services section from source
sourceServices, ok := sourceCompose["services"].(map[string]interface{}) sourceServices, ok := sourceCompose["services"].(map[string]any)
if !ok { if !ok {
return fmt.Errorf("services section not found in source file or has invalid format") return fmt.Errorf("services section not found in source file or has invalid format")
} }
@@ -142,10 +142,10 @@ func copyDockerService(sourceFile, destFile, serviceName string) error {
} }
// Get or create services section in destination // Get or create services section in destination
destServices, ok := destCompose["services"].(map[string]interface{}) destServices, ok := destCompose["services"].(map[string]any)
if !ok { if !ok {
// If services section doesn't exist, create it // If services section doesn't exist, create it
destServices = make(map[string]interface{}) destServices = make(map[string]any)
destCompose["services"] = destServices destCompose["services"] = destServices
} }
@@ -187,13 +187,12 @@ func backupConfig() error {
return nil return nil
} }
func MarshalYAMLWithIndent(data interface{}, indent int) ([]byte, error) { func MarshalYAMLWithIndent(data any, indent int) ([]byte, error) {
buffer := new(bytes.Buffer) buffer := new(bytes.Buffer)
encoder := yaml.NewEncoder(buffer) encoder := yaml.NewEncoder(buffer)
encoder.SetIndent(indent) encoder.SetIndent(indent)
err := encoder.Encode(data) if err := encoder.Encode(data); err != nil {
if err != nil {
return nil, err return nil, err
} }
@@ -209,7 +208,7 @@ func replaceInFile(filepath, oldStr, newStr string) error {
} }
// Replace the string // Replace the string
newContent := strings.Replace(string(content), oldStr, newStr, -1) newContent := strings.ReplaceAll(string(content), oldStr, newStr)
// Write the modified content back to the file // Write the modified content back to the file
err = os.WriteFile(filepath, []byte(newContent), 0644) err = os.WriteFile(filepath, []byte(newContent), 0644)
@@ -228,28 +227,28 @@ func CheckAndAddTraefikLogVolume(composePath string) error {
} }
// Parse YAML into a generic map // Parse YAML into a generic map
var compose map[string]interface{} var compose map[string]any
if err := yaml.Unmarshal(data, &compose); err != nil { if err := yaml.Unmarshal(data, &compose); err != nil {
return fmt.Errorf("error parsing compose file: %w", err) return fmt.Errorf("error parsing compose file: %w", err)
} }
// Get services section // Get services section
services, ok := compose["services"].(map[string]interface{}) services, ok := compose["services"].(map[string]any)
if !ok { if !ok {
return fmt.Errorf("services section not found or invalid") return fmt.Errorf("services section not found or invalid")
} }
// Get traefik service // Get traefik service
traefik, ok := services["traefik"].(map[string]interface{}) traefik, ok := services["traefik"].(map[string]any)
if !ok { if !ok {
return fmt.Errorf("traefik service not found or invalid") return fmt.Errorf("traefik service not found or invalid")
} }
// Check volumes // Check volumes
logVolume := "./config/traefik/logs:/var/log/traefik" logVolume := "./config/traefik/logs:/var/log/traefik"
var volumes []interface{} var volumes []any
if existingVolumes, ok := traefik["volumes"].([]interface{}); ok { if existingVolumes, ok := traefik["volumes"].([]any); ok {
// Check if volume already exists // Check if volume already exists
for _, v := range existingVolumes { for _, v := range existingVolumes {
if v.(string) == logVolume { if v.(string) == logVolume {
@@ -295,13 +294,13 @@ func MergeYAML(baseFile, overlayFile string) error {
} }
// Parse base YAML into a map // Parse base YAML into a map
var baseMap map[string]interface{} var baseMap map[string]any
if err := yaml.Unmarshal(baseContent, &baseMap); err != nil { if err := yaml.Unmarshal(baseContent, &baseMap); err != nil {
return fmt.Errorf("error parsing base YAML: %v", err) return fmt.Errorf("error parsing base YAML: %v", err)
} }
// Parse overlay YAML into a map // Parse overlay YAML into a map
var overlayMap map[string]interface{} var overlayMap map[string]any
if err := yaml.Unmarshal(overlayContent, &overlayMap); err != nil { if err := yaml.Unmarshal(overlayContent, &overlayMap); err != nil {
return fmt.Errorf("error parsing overlay YAML: %v", err) return fmt.Errorf("error parsing overlay YAML: %v", err)
} }
@@ -324,8 +323,8 @@ func MergeYAML(baseFile, overlayFile string) error {
} }
// mergeMap recursively merges two maps // mergeMap recursively merges two maps
func mergeMap(base, overlay map[string]interface{}) map[string]interface{} { func mergeMap(base, overlay map[string]any) map[string]any {
result := make(map[string]interface{}) result := make(map[string]any)
// Copy all key-values from base map // Copy all key-values from base map
for k, v := range base { for k, v := range base {
@@ -336,8 +335,8 @@ func mergeMap(base, overlay map[string]interface{}) map[string]interface{} {
for k, v := range overlay { for k, v := range overlay {
// If both maps have the same key and both values are maps, merge recursively // If both maps have the same key and both values are maps, merge recursively
if baseVal, ok := base[k]; ok { if baseVal, ok := base[k]; ok {
if baseMap, isBaseMap := baseVal.(map[string]interface{}); isBaseMap { if baseMap, isBaseMap := baseVal.(map[string]any); isBaseMap {
if overlayMap, isOverlayMap := v.(map[string]interface{}); isOverlayMap { if overlayMap, isOverlayMap := v.(map[string]any); isOverlayMap {
result[k] = mergeMap(baseMap, overlayMap) result[k] = mergeMap(baseMap, overlayMap)
continue continue
} }

View File

@@ -4,6 +4,12 @@ services:
image: docker.io/fosrl/pangolin:{{if .IsEnterprise}}ee-{{end}}{{.PangolinVersion}} image: docker.io/fosrl/pangolin:{{if .IsEnterprise}}ee-{{end}}{{.PangolinVersion}}
container_name: pangolin container_name: pangolin
restart: unless-stopped restart: unless-stopped
deploy:
resources:
limits:
memory: 1g
reservations:
memory: 256m
volumes: volumes:
- ./config:/app/config - ./config:/app/config
healthcheck: healthcheck:
@@ -38,9 +44,7 @@ services:
image: docker.io/traefik:v3.6 image: docker.io/traefik:v3.6
container_name: traefik container_name: traefik
restart: unless-stopped restart: unless-stopped
{{if .InstallGerbil}} {{if .InstallGerbil}} network_mode: service:gerbil # Ports appear on the gerbil service{{end}}{{if not .InstallGerbil}}
network_mode: service:gerbil # Ports appear on the gerbil service
{{end}}{{if not .InstallGerbil}}
ports: ports:
- 443:443 - 443:443
- 80:80 - 80:80

View File

@@ -144,12 +144,13 @@ func installDocker() error {
} }
func startDockerService() error { func startDockerService() error {
if runtime.GOOS == "linux" { switch runtime.GOOS {
case "linux":
cmd := exec.Command("systemctl", "enable", "--now", "docker") cmd := exec.Command("systemctl", "enable", "--now", "docker")
cmd.Stdout = os.Stdout cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr cmd.Stderr = os.Stderr
return cmd.Run() return cmd.Run()
} else if runtime.GOOS == "darwin" { case "darwin":
// On macOS, Docker is usually started via the Docker Desktop application // On macOS, Docker is usually started via the Docker Desktop application
fmt.Println("Please start Docker Desktop manually on macOS.") fmt.Println("Please start Docker Desktop manually on macOS.")
return nil return nil
@@ -302,7 +303,7 @@ func pullContainers(containerType SupportedContainer) error {
return nil return nil
} }
return fmt.Errorf("Unsupported container type: %s", containerType) return fmt.Errorf("unsupported container type: %s", containerType)
} }
// startContainers starts the containers using the appropriate command. // startContainers starts the containers using the appropriate command.
@@ -325,7 +326,7 @@ func startContainers(containerType SupportedContainer) error {
return nil return nil
} }
return fmt.Errorf("Unsupported container type: %s", containerType) return fmt.Errorf("unsupported container type: %s", containerType)
} }
// stopContainers stops the containers using the appropriate command. // stopContainers stops the containers using the appropriate command.
@@ -347,7 +348,7 @@ func stopContainers(containerType SupportedContainer) error {
return nil return nil
} }
return fmt.Errorf("Unsupported container type: %s", containerType) return fmt.Errorf("unsupported container type: %s", containerType)
} }
// restartContainer restarts a specific container using the appropriate command. // restartContainer restarts a specific container using the appropriate command.
@@ -369,5 +370,5 @@ func restartContainer(container string, containerType SupportedContainer) error
return nil return nil
} }
return fmt.Errorf("Unsupported container type: %s", containerType) return fmt.Errorf("unsupported container type: %s", containerType)
} }

View File

@@ -27,9 +27,18 @@ func installCrowdsec(config Config) error {
os.Exit(1) os.Exit(1)
} }
os.MkdirAll("config/crowdsec/db", 0755) if err := os.MkdirAll("config/crowdsec/db", 0755); err != nil {
os.MkdirAll("config/crowdsec/acquis.d", 0755) fmt.Printf("Error creating config files: %v\n", err)
os.MkdirAll("config/traefik/logs", 0755) os.Exit(1)
}
if err := os.MkdirAll("config/crowdsec/acquis.d", 0755); err != nil {
fmt.Printf("Error creating config files: %v\n", err)
os.Exit(1)
}
if err := os.MkdirAll("config/traefik/logs", 0755); err != nil {
fmt.Printf("Error creating config files: %v\n", err)
os.Exit(1)
}
if err := copyDockerService("config/crowdsec/docker-compose.yml", "docker-compose.yml", "crowdsec"); err != nil { if err := copyDockerService("config/crowdsec/docker-compose.yml", "docker-compose.yml", "crowdsec"); err != nil {
fmt.Printf("Error copying docker service: %v\n", err) fmt.Printf("Error copying docker service: %v\n", err)
@@ -153,34 +162,34 @@ func CheckAndAddCrowdsecDependency(composePath string) error {
} }
// Parse YAML into a generic map // Parse YAML into a generic map
var compose map[string]interface{} var compose map[string]any
if err := yaml.Unmarshal(data, &compose); err != nil { if err := yaml.Unmarshal(data, &compose); err != nil {
return fmt.Errorf("error parsing compose file: %w", err) return fmt.Errorf("error parsing compose file: %w", err)
} }
// Get services section // Get services section
services, ok := compose["services"].(map[string]interface{}) services, ok := compose["services"].(map[string]any)
if !ok { if !ok {
return fmt.Errorf("services section not found or invalid") return fmt.Errorf("services section not found or invalid")
} }
// Get traefik service // Get traefik service
traefik, ok := services["traefik"].(map[string]interface{}) traefik, ok := services["traefik"].(map[string]any)
if !ok { if !ok {
return fmt.Errorf("traefik service not found or invalid") return fmt.Errorf("traefik service not found or invalid")
} }
// Get dependencies // Get dependencies
dependsOn, ok := traefik["depends_on"].(map[string]interface{}) dependsOn, ok := traefik["depends_on"].(map[string]any)
if ok { if ok {
// Append the new block for crowdsec // Append the new block for crowdsec
dependsOn["crowdsec"] = map[string]interface{}{ dependsOn["crowdsec"] = map[string]any{
"condition": "service_healthy", "condition": "service_healthy",
} }
} else { } else {
// No dependencies exist, create it // No dependencies exist, create it
traefik["depends_on"] = map[string]interface{}{ traefik["depends_on"] = map[string]any{
"crowdsec": map[string]interface{}{ "crowdsec": map[string]any{
"condition": "service_healthy", "condition": "service_healthy",
}, },
} }

View File

@@ -3,8 +3,36 @@ module installer
go 1.24.0 go 1.24.0
require ( require (
golang.org/x/term v0.39.0 github.com/charmbracelet/huh v0.8.0
github.com/charmbracelet/lipgloss v1.1.0
golang.org/x/term v0.40.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
) )
require golang.org/x/sys v0.40.0 // indirect require (
github.com/atotto/clipboard v0.1.4 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/catppuccin/go v0.3.0 // indirect
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7 // indirect
github.com/charmbracelet/bubbletea v1.3.6 // indirect
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/charmbracelet/x/ansi v0.9.3 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect
github.com/muesli/cancelreader v0.2.2 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.41.0 // indirect
golang.org/x/text v0.23.0 // indirect
)

View File

@@ -1,7 +1,80 @@
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ= github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ=
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE=
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY= github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww= github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/aymanbagabas/go-udiff v0.3.1 h1:LV+qyBQ2pqe0u42ZsUEtPiCaUoqgA9gYRDs3vj1nolY=
github.com/aymanbagabas/go-udiff v0.3.1/go.mod h1:G0fsKmG+P6ylD0r6N/KgQD/nWzgfnl8ZBcNLgcbrw8E=
github.com/catppuccin/go v0.3.0 h1:d+0/YicIq+hSTo5oPuRi5kOpqkVA5tAsU6dNhvRu+aY=
github.com/catppuccin/go v0.3.0/go.mod h1:8IHJuMGaUUjQM82qBrGNBv7LFq6JI3NnQCF6MOlZjpc=
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7 h1:JFgG/xnwFfbezlUnFMJy0nusZvytYysV4SCS2cYbvws=
github.com/charmbracelet/bubbles v0.21.1-0.20250623103423-23b8fd6302d7/go.mod h1:ISC1gtLcVilLOf23wvTfoQuYbW2q0JevFxPfUzZ9Ybw=
github.com/charmbracelet/bubbletea v1.3.6 h1:VkHIxPJQeDt0aFJIsVxw8BQdh/F/L2KKZGsK6et5taU=
github.com/charmbracelet/bubbletea v1.3.6/go.mod h1:oQD9VCRQFF8KplacJLo28/jofOI2ToOfGYeFgBBxHOc=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
github.com/charmbracelet/huh v0.8.0 h1:Xz/Pm2h64cXQZn/Jvele4J3r7DDiqFCNIVteYukxDvY=
github.com/charmbracelet/huh v0.8.0/go.mod h1:5YVc+SlZ1IhQALxRPpkGwwEKftN/+OlJlnJYlDRFqN4=
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0=
github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k=
github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
github.com/charmbracelet/x/conpty v0.1.0 h1:4zc8KaIcbiL4mghEON8D72agYtSeIgq8FSThSPQIb+U=
github.com/charmbracelet/x/conpty v0.1.0/go.mod h1:rMFsDJoDwVmiYM10aD4bH2XiRgwI7NYJtQgl5yskjEQ=
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86 h1:JSt3B+U9iqk37QUU2Rvb6DSBYRLtWqFqfxf8l5hOZUA=
github.com/charmbracelet/x/errors v0.0.0-20240508181413-e8d8b6e2de86/go.mod h1:2P0UgXMEa6TsToMSuFqKFQR+fZTO9CNGUNokkPatT/0=
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ=
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0 h1:qko3AQ4gK1MTS/de7F5hPGx6/k1u0w4TeYmBFwzYVP4=
github.com/charmbracelet/x/exp/strings v0.0.0-20240722160745-212f7b056ed0/go.mod h1:pBhA0ybfXv6hDjQUZ7hk1lVxBiUbupdw5R31yPUViVQ=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
github.com/charmbracelet/x/termios v0.1.1/go.mod h1:rB7fnv1TgOPOyyKRJ9o+AsTU/vK5WHJ2ivHeut/Pcwo=
github.com/charmbracelet/x/xpty v0.1.2 h1:Pqmu4TEJ8KeA9uSkISKMU3f+C1F6OGBn8ABuGlqCbtI=
github.com/charmbracelet/x/xpty v0.1.2/go.mod h1:XK2Z0id5rtLWcpeNiMYBccNNBrP2IJnzHI0Lq13Xzq4=
github.com/creack/pty v1.1.24 h1:bJrF4RRfyJnbTJqzRLHzcGaZK1NeM5kTC9jGgovnR1s=
github.com/creack/pty v1.1.24/go.mod h1:08sCNb52WyoAwi2QDyzUCTgcvVFhUzewun7wtTfvcwE=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4=
github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4=
github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI=
github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo=
github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA=
github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI=
golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo=
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

View File

@@ -1,92 +1,235 @@
package main package main
import ( import (
"bufio" "errors"
"fmt" "fmt"
"strings" "os"
"syscall" "strconv"
"github.com/charmbracelet/huh"
"golang.org/x/term" "golang.org/x/term"
) )
func readString(reader *bufio.Reader, prompt string, defaultValue string) string { // pangolinTheme is the custom theme using brand colors
var pangolinTheme = ThemePangolin()
// isAccessibleMode checks if we should use accessible mode (simple prompts)
// This is true for: non-TTY, TERM=dumb, or ACCESSIBLE env var set
func isAccessibleMode() bool {
// Check if stdin is not a terminal (piped input, CI, etc.)
if !term.IsTerminal(int(os.Stdin.Fd())) {
return true
}
// Check for dumb terminal
if os.Getenv("TERM") == "dumb" {
return true
}
// Check for explicit accessible mode request
if os.Getenv("ACCESSIBLE") != "" {
return true
}
return false
}
// handleAbort checks if the error is a user abort (Ctrl+C) and exits if so
func handleAbort(err error) {
if err != nil && errors.Is(err, huh.ErrUserAborted) {
fmt.Println("\nInstallation cancelled.")
os.Exit(0)
}
}
// runField runs a single field with the Pangolin theme, handling accessible mode
func runField(field huh.Field) error {
if isAccessibleMode() {
return field.RunAccessible(os.Stdout, os.Stdin)
}
form := huh.NewForm(huh.NewGroup(field)).WithTheme(pangolinTheme)
return form.Run()
}
func readString(prompt string, defaultValue string) string {
var value string
title := prompt
if defaultValue != "" { if defaultValue != "" {
fmt.Printf("%s (default: %s): ", prompt, defaultValue) title = fmt.Sprintf("%s (default: %s)", prompt, defaultValue)
} else {
fmt.Print(prompt + ": ")
} }
input, _ := reader.ReadString('\n')
input = strings.TrimSpace(input)
if input == "" {
return defaultValue
}
return input
}
func readStringNoDefault(reader *bufio.Reader, prompt string) string { input := huh.NewInput().
fmt.Print(prompt + ": ") Title(title).
input, _ := reader.ReadString('\n') Value(&value)
return strings.TrimSpace(input)
}
func readPassword(prompt string, reader *bufio.Reader) string { // If no default value, this field is required
if term.IsTerminal(int(syscall.Stdin)) { if defaultValue == "" {
fmt.Print(prompt + ": ") input = input.Validate(func(s string) error {
// Read password without echo if we're in a terminal if s == "" {
password, err := term.ReadPassword(int(syscall.Stdin)) return fmt.Errorf("this field is required")
fmt.Println() // Add a newline since ReadPassword doesn't add one }
if err != nil { return nil
return "" })
}
input := strings.TrimSpace(string(password))
if input == "" {
return readPassword(prompt, reader)
}
return input
} else {
// Fallback to reading from stdin if not in a terminal
return readString(reader, prompt, "")
} }
}
func readBool(reader *bufio.Reader, prompt string, defaultValue bool) bool { err := runField(input)
defaultStr := "no" handleAbort(err)
if defaultValue {
defaultStr = "yes"
}
for {
input := readString(reader, prompt+" (yes/no)", defaultStr)
lower := strings.ToLower(input)
if lower == "yes" {
return true
} else if lower == "no" {
return false
} else {
fmt.Println("Please enter 'yes' or 'no'.")
}
}
}
func readBoolNoDefault(reader *bufio.Reader, prompt string) bool { if value == "" {
for { value = defaultValue
input := readStringNoDefault(reader, prompt+" (yes/no)")
lower := strings.ToLower(input)
if lower == "yes" {
return true
} else if lower == "no" {
return false
} else {
fmt.Println("Please enter 'yes' or 'no'.")
}
} }
}
func readInt(reader *bufio.Reader, prompt string, defaultValue int) int { // Print the answer so it remains visible in terminal history (skip in accessible mode as it already shows)
input := readString(reader, prompt, fmt.Sprintf("%d", defaultValue)) if !isAccessibleMode() {
if input == "" { fmt.Printf("%s: %s\n", prompt, value)
return defaultValue
} }
value := defaultValue
fmt.Sscanf(input, "%d", &value)
return value return value
} }
func readStringNoDefault(prompt string) string {
var value string
for {
input := huh.NewInput().
Title(prompt).
Value(&value).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("this field is required")
}
return nil
})
err := runField(input)
handleAbort(err)
if value != "" {
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
fmt.Printf("%s: %s\n", prompt, value)
}
return value
}
}
}
func readPassword(prompt string) string {
var value string
for {
input := huh.NewInput().
Title(prompt).
Value(&value).
EchoMode(huh.EchoModePassword).
Validate(func(s string) error {
if s == "" {
return fmt.Errorf("password is required")
}
return nil
})
err := runField(input)
handleAbort(err)
if value != "" {
// Print confirmation without revealing the password
if !isAccessibleMode() {
fmt.Printf("%s: %s\n", prompt, "********")
}
return value
}
}
}
func readBool(prompt string, defaultValue bool) bool {
var value = defaultValue
confirm := huh.NewConfirm().
Title(prompt).
Value(&value).
Affirmative("Yes").
Negative("No")
err := runField(confirm)
handleAbort(err)
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
answer := "No"
if value {
answer = "Yes"
}
fmt.Printf("%s: %s\n", prompt, answer)
}
return value
}
func readBoolNoDefault(prompt string) bool {
var value bool
confirm := huh.NewConfirm().
Title(prompt).
Value(&value).
Affirmative("Yes").
Negative("No")
err := runField(confirm)
handleAbort(err)
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
answer := "No"
if value {
answer = "Yes"
}
fmt.Printf("%s: %s\n", prompt, answer)
}
return value
}
func readInt(prompt string, defaultValue int) int {
var value string
title := fmt.Sprintf("%s (default: %d)", prompt, defaultValue)
input := huh.NewInput().
Title(title).
Value(&value).
Validate(func(s string) error {
if s == "" {
return nil
}
_, err := strconv.Atoi(s)
if err != nil {
return fmt.Errorf("please enter a valid number")
}
return nil
})
err := runField(input)
handleAbort(err)
if value == "" {
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
fmt.Printf("%s: %d\n", prompt, defaultValue)
}
return defaultValue
}
result, err := strconv.Atoi(value)
if err != nil {
if !isAccessibleMode() {
fmt.Printf("%s: %d\n", prompt, defaultValue)
}
return defaultValue
}
// Print the answer so it remains visible in terminal history
if !isAccessibleMode() {
fmt.Printf("%s: %d\n", prompt, result)
}
return result
}

View File

@@ -1,13 +1,12 @@
package main package main
import ( import (
"bufio" "crypto/rand"
"embed" "embed"
"encoding/base64"
"fmt" "fmt"
"io" "io"
"io/fs" "io/fs"
"crypto/rand"
"encoding/base64"
"net" "net"
"net/http" "net/http"
"net/url" "net/url"
@@ -20,11 +19,17 @@ import (
"time" "time"
) )
// DO NOT EDIT THIS FUNCTION; IT MATCHED BY REGEX IN CICD // Version variables injected at build time via -ldflags
var (
pangolinVersion string
gerbilVersion string
badgerVersion string
)
func loadVersions(config *Config) { func loadVersions(config *Config) {
config.PangolinVersion = "replaceme" config.PangolinVersion = pangolinVersion
config.GerbilVersion = "replaceme" config.GerbilVersion = gerbilVersion
config.BadgerVersion = "replaceme" config.BadgerVersion = badgerVersion
} }
//go:embed config/* //go:embed config/*
@@ -82,14 +87,12 @@ func main() {
} }
} }
reader := bufio.NewReader(os.Stdin)
var config Config var config Config
var alreadyInstalled = false var alreadyInstalled = false
// check if there is already a config file // check if there is already a config file
if _, err := os.Stat("config/config.yml"); err != nil { if _, err := os.Stat("config/config.yml"); err != nil {
config = collectUserInput(reader) config = collectUserInput()
loadVersions(&config) loadVersions(&config)
config.DoCrowdsecInstall = false config.DoCrowdsecInstall = false
@@ -102,7 +105,10 @@ func main() {
os.Exit(1) os.Exit(1)
} }
moveFile("config/docker-compose.yml", "docker-compose.yml") if err := moveFile("config/docker-compose.yml", "docker-compose.yml"); err != nil {
fmt.Printf("Error moving docker-compose.yml: %v\n", err)
os.Exit(1)
}
fmt.Println("\nConfiguration files created successfully!") fmt.Println("\nConfiguration files created successfully!")
@@ -117,13 +123,17 @@ func main() {
fmt.Println("\n=== Starting installation ===") fmt.Println("\n=== Starting installation ===")
if readBool(reader, "Would you like to install and start the containers?", true) { if readBool("Would you like to install and start the containers?", true) {
config.InstallationContainerType = podmanOrDocker(reader) config.InstallationContainerType = podmanOrDocker()
if !isDockerInstalled() && runtime.GOOS == "linux" && config.InstallationContainerType == Docker { if !isDockerInstalled() && runtime.GOOS == "linux" && config.InstallationContainerType == Docker {
if readBool(reader, "Docker is not installed. Would you like to install it?", true) { if readBool("Docker is not installed. Would you like to install it?", true) {
installDocker() if err := installDocker(); err != nil {
fmt.Printf("Error installing Docker: %v\n", err)
return
}
// try to start docker service but ignore errors // try to start docker service but ignore errors
if err := startDockerService(); err != nil { if err := startDockerService(); err != nil {
fmt.Println("Error starting Docker service:", err) fmt.Println("Error starting Docker service:", err)
@@ -132,7 +142,7 @@ func main() {
} }
// wait 10 seconds for docker to start checking if docker is running every 2 seconds // wait 10 seconds for docker to start checking if docker is running every 2 seconds
fmt.Println("Waiting for Docker to start...") fmt.Println("Waiting for Docker to start...")
for i := 0; i < 5; i++ { for range 5 {
if isDockerRunning() { if isDockerRunning() {
fmt.Println("Docker is running!") fmt.Println("Docker is running!")
break break
@@ -167,7 +177,7 @@ func main() {
fmt.Println("\n=== MaxMind Database Update ===") fmt.Println("\n=== MaxMind Database Update ===")
if _, err := os.Stat("config/GeoLite2-Country.mmdb"); err == nil { if _, err := os.Stat("config/GeoLite2-Country.mmdb"); err == nil {
fmt.Println("MaxMind GeoLite2 Country database found.") fmt.Println("MaxMind GeoLite2 Country database found.")
if readBool(reader, "Would you like to update the MaxMind database to the latest version?", false) { if readBool("Would you like to update the MaxMind database to the latest version?", false) {
if err := downloadMaxMindDatabase(); err != nil { if err := downloadMaxMindDatabase(); err != nil {
fmt.Printf("Error updating MaxMind database: %v\n", err) fmt.Printf("Error updating MaxMind database: %v\n", err)
fmt.Println("You can try updating it manually later if needed.") fmt.Println("You can try updating it manually later if needed.")
@@ -175,7 +185,7 @@ func main() {
} }
} else { } else {
fmt.Println("MaxMind GeoLite2 Country database not found.") fmt.Println("MaxMind GeoLite2 Country database not found.")
if readBool(reader, "Would you like to download the MaxMind GeoLite2 database for geoblocking functionality?", false) { if readBool("Would you like to download the MaxMind GeoLite2 database for geoblocking functionality?", false) {
if err := downloadMaxMindDatabase(); err != nil { if err := downloadMaxMindDatabase(); err != nil {
fmt.Printf("Error downloading MaxMind database: %v\n", err) fmt.Printf("Error downloading MaxMind database: %v\n", err)
fmt.Println("You can try downloading it manually later if needed.") fmt.Println("You can try downloading it manually later if needed.")
@@ -192,11 +202,11 @@ func main() {
if !checkIsCrowdsecInstalledInCompose() { if !checkIsCrowdsecInstalledInCompose() {
fmt.Println("\n=== CrowdSec Install ===") fmt.Println("\n=== CrowdSec Install ===")
// check if crowdsec is installed // check if crowdsec is installed
if readBool(reader, "Would you like to install CrowdSec?", false) { if readBool("Would you like to install CrowdSec?", false) {
fmt.Println("This installer constitutes a minimal viable CrowdSec deployment. CrowdSec will add extra complexity to your Pangolin installation and may not work to the best of its abilities out of the box. Users are expected to implement configuration adjustments on their own to achieve the best security posture. Consult the CrowdSec documentation for detailed configuration instructions.") fmt.Println("This installer constitutes a minimal viable CrowdSec deployment. CrowdSec will add extra complexity to your Pangolin installation and may not work to the best of its abilities out of the box. Users are expected to implement configuration adjustments on their own to achieve the best security posture. Consult the CrowdSec documentation for detailed configuration instructions.")
// BUG: crowdsec installation will be skipped if the user chooses to install on the first installation. // BUG: crowdsec installation will be skipped if the user chooses to install on the first installation.
if readBool(reader, "Are you willing to manage CrowdSec?", false) { if readBool("Are you willing to manage CrowdSec?", false) {
if config.DashboardDomain == "" { if config.DashboardDomain == "" {
traefikConfig, err := ReadTraefikConfig("config/traefik/traefik_config.yml") traefikConfig, err := ReadTraefikConfig("config/traefik/traefik_config.yml")
if err != nil { if err != nil {
@@ -225,8 +235,8 @@ func main() {
fmt.Printf("Let's Encrypt Email: %s\n", config.LetsEncryptEmail) fmt.Printf("Let's Encrypt Email: %s\n", config.LetsEncryptEmail)
fmt.Printf("Badger Version: %s\n", config.BadgerVersion) fmt.Printf("Badger Version: %s\n", config.BadgerVersion)
if !readBool(reader, "Are these values correct?", true) { if !readBool("Are these values correct?", true) {
config = collectUserInput(reader) config = collectUserInput()
} }
} }
@@ -235,7 +245,7 @@ func main() {
if detectedType == Undefined { if detectedType == Undefined {
// If detection fails, prompt the user // If detection fails, prompt the user
fmt.Println("Unable to detect container type from existing installation.") fmt.Println("Unable to detect container type from existing installation.")
config.InstallationContainerType = podmanOrDocker(reader) config.InstallationContainerType = podmanOrDocker()
} else { } else {
config.InstallationContainerType = detectedType config.InstallationContainerType = detectedType
fmt.Printf("Detected container type: %s\n", config.InstallationContainerType) fmt.Printf("Detected container type: %s\n", config.InstallationContainerType)
@@ -277,8 +287,8 @@ func main() {
fmt.Printf("\nTo complete the initial setup, please visit:\nhttps://%s/auth/initial-setup\n", config.DashboardDomain) fmt.Printf("\nTo complete the initial setup, please visit:\nhttps://%s/auth/initial-setup\n", config.DashboardDomain)
} }
func podmanOrDocker(reader *bufio.Reader) SupportedContainer { func podmanOrDocker() SupportedContainer {
inputContainer := readString(reader, "Would you like to run Pangolin as Docker or Podman containers?", "docker") inputContainer := readString("Would you like to run Pangolin as Docker or Podman containers?", "docker")
chosenContainer := Docker chosenContainer := Docker
if strings.EqualFold(inputContainer, "docker") { if strings.EqualFold(inputContainer, "docker") {
@@ -290,7 +300,8 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
os.Exit(1) os.Exit(1)
} }
if chosenContainer == Podman { switch chosenContainer {
case Podman:
if !isPodmanInstalled() { if !isPodmanInstalled() {
fmt.Println("Podman or podman-compose is not installed. Please install both manually. Automated installation will be available in a later release.") fmt.Println("Podman or podman-compose is not installed. Please install both manually. Automated installation will be available in a later release.")
os.Exit(1) os.Exit(1)
@@ -299,7 +310,7 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
if err := exec.Command("bash", "-c", "cat /etc/sysctl.d/99-podman.conf 2>/dev/null | grep 'net.ipv4.ip_unprivileged_port_start=' || cat /etc/sysctl.conf 2>/dev/null | grep 'net.ipv4.ip_unprivileged_port_start='").Run(); err != nil { if err := exec.Command("bash", "-c", "cat /etc/sysctl.d/99-podman.conf 2>/dev/null | grep 'net.ipv4.ip_unprivileged_port_start=' || cat /etc/sysctl.conf 2>/dev/null | grep 'net.ipv4.ip_unprivileged_port_start='").Run(); err != nil {
fmt.Println("Would you like to configure ports >= 80 as unprivileged ports? This enables podman containers to listen on low-range ports.") fmt.Println("Would you like to configure ports >= 80 as unprivileged ports? This enables podman containers to listen on low-range ports.")
fmt.Println("Pangolin will experience startup issues if this is not configured, because it needs to listen on port 80/443 by default.") fmt.Println("Pangolin will experience startup issues if this is not configured, because it needs to listen on port 80/443 by default.")
approved := readBool(reader, "The installer is about to execute \"echo 'net.ipv4.ip_unprivileged_port_start=80' > /etc/sysctl.d/99-podman.conf && sysctl --system\". Approve?", true) approved := readBool("The installer is about to execute \"echo 'net.ipv4.ip_unprivileged_port_start=80' > /etc/sysctl.d/99-podman.conf && sysctl --system\". Approve?", true)
if approved { if approved {
if os.Geteuid() != 0 { if os.Geteuid() != 0 {
fmt.Println("You need to run the installer as root for such a configuration.") fmt.Println("You need to run the installer as root for such a configuration.")
@@ -311,7 +322,7 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
// Linux only. // Linux only.
if err := run("bash", "-c", "echo 'net.ipv4.ip_unprivileged_port_start=80' > /etc/sysctl.d/99-podman.conf && sysctl --system"); err != nil { if err := run("bash", "-c", "echo 'net.ipv4.ip_unprivileged_port_start=80' > /etc/sysctl.d/99-podman.conf && sysctl --system"); err != nil {
fmt.Printf("Error configuring unprivileged ports: %v\n", err) fmt.Printf("Error configuring unprivileged ports: %v\n", err)
os.Exit(1) os.Exit(1)
} }
} else { } else {
@@ -321,7 +332,7 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
fmt.Println("Unprivileged ports have been configured.") fmt.Println("Unprivileged ports have been configured.")
} }
} else if chosenContainer == Docker { case Docker:
// check if docker is not installed and the user is root // check if docker is not installed and the user is root
if !isDockerInstalled() { if !isDockerInstalled() {
if os.Geteuid() != 0 { if os.Geteuid() != 0 {
@@ -336,7 +347,7 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
fmt.Println("The installer will not be able to run docker commands without running it as root.") fmt.Println("The installer will not be able to run docker commands without running it as root.")
os.Exit(1) os.Exit(1)
} }
} else { default:
// This shouldn't happen unless there's a third container runtime. // This shouldn't happen unless there's a third container runtime.
os.Exit(1) os.Exit(1)
} }
@@ -344,35 +355,35 @@ func podmanOrDocker(reader *bufio.Reader) SupportedContainer {
return chosenContainer return chosenContainer
} }
func collectUserInput(reader *bufio.Reader) Config { func collectUserInput() Config {
config := Config{} config := Config{}
// Basic configuration // Basic configuration
fmt.Println("\n=== Basic Configuration ===") fmt.Println("\n=== Basic Configuration ===")
config.IsEnterprise = readBoolNoDefault(reader, "Do you want to install the Enterprise version of Pangolin? The EE is free for personal use or for businesses making less than 100k USD annually.") config.IsEnterprise = readBoolNoDefault("Do you want to install the Enterprise version of Pangolin? The EE is free for personal use or for businesses making less than 100k USD annually.")
config.BaseDomain = readString(reader, "Enter your base domain (no subdomain e.g. example.com)", "") config.BaseDomain = readString("Enter your base domain (no subdomain e.g. example.com)", "")
// Set default dashboard domain after base domain is collected // Set default dashboard domain after base domain is collected
defaultDashboardDomain := "" defaultDashboardDomain := ""
if config.BaseDomain != "" { if config.BaseDomain != "" {
defaultDashboardDomain = "pangolin." + config.BaseDomain defaultDashboardDomain = "pangolin." + config.BaseDomain
} }
config.DashboardDomain = readString(reader, "Enter the domain for the Pangolin dashboard", defaultDashboardDomain) config.DashboardDomain = readString("Enter the domain for the Pangolin dashboard", defaultDashboardDomain)
config.LetsEncryptEmail = readString(reader, "Enter email for Let's Encrypt certificates", "") config.LetsEncryptEmail = readString("Enter email for Let's Encrypt certificates", "")
config.InstallGerbil = readBool(reader, "Do you want to use Gerbil to allow tunneled connections", true) config.InstallGerbil = readBool("Do you want to use Gerbil to allow tunneled connections", true)
// Email configuration // Email configuration
fmt.Println("\n=== Email Configuration ===") fmt.Println("\n=== Email Configuration ===")
config.EnableEmail = readBool(reader, "Enable email functionality (SMTP)", false) config.EnableEmail = readBool("Enable email functionality (SMTP)", false)
if config.EnableEmail { if config.EnableEmail {
config.EmailSMTPHost = readString(reader, "Enter SMTP host", "") config.EmailSMTPHost = readString("Enter SMTP host", "")
config.EmailSMTPPort = readInt(reader, "Enter SMTP port (default 587)", 587) config.EmailSMTPPort = readInt("Enter SMTP port (default 587)", 587)
config.EmailSMTPUser = readString(reader, "Enter SMTP username", "") config.EmailSMTPUser = readString("Enter SMTP username", "")
config.EmailSMTPPass = readString(reader, "Enter SMTP password", "") // Should this be readPassword? config.EmailSMTPPass = readPassword("Enter SMTP password")
config.EmailNoReply = readString(reader, "Enter no-reply email address (often the same as SMTP username)", "") config.EmailNoReply = readString("Enter no-reply email address (often the same as SMTP username)", "")
} }
// Validate required fields // Validate required fields
@@ -393,8 +404,8 @@ func collectUserInput(reader *bufio.Reader) Config {
fmt.Println("\n=== Advanced Configuration ===") fmt.Println("\n=== Advanced Configuration ===")
config.EnableIPv6 = readBool(reader, "Is your server IPv6 capable?", true) config.EnableIPv6 = readBool("Is your server IPv6 capable?", true)
config.EnableGeoblocking = readBool(reader, "Do you want to download the MaxMind GeoLite2 database for geoblocking functionality?", true) config.EnableGeoblocking = readBool("Do you want to download the MaxMind GeoLite2 database for geoblocking functionality?", true)
if config.DashboardDomain == "" { if config.DashboardDomain == "" {
fmt.Println("Error: Dashboard Domain name is required") fmt.Println("Error: Dashboard Domain name is required")
@@ -405,10 +416,18 @@ func collectUserInput(reader *bufio.Reader) Config {
} }
func createConfigFiles(config Config) error { func createConfigFiles(config Config) error {
os.MkdirAll("config", 0755) if err := os.MkdirAll("config", 0755); err != nil {
os.MkdirAll("config/letsencrypt", 0755) return fmt.Errorf("failed to create config directory: %v", err)
os.MkdirAll("config/db", 0755) }
os.MkdirAll("config/logs", 0755) if err := os.MkdirAll("config/letsencrypt", 0755); err != nil {
return fmt.Errorf("failed to create letsencrypt directory: %v", err)
}
if err := os.MkdirAll("config/db", 0755); err != nil {
return fmt.Errorf("failed to create db directory: %v", err)
}
if err := os.MkdirAll("config/logs", 0755); err != nil {
return fmt.Errorf("failed to create logs directory: %v", err)
}
// Walk through all embedded files // Walk through all embedded files
err := fs.WalkDir(configFiles, "config", func(path string, d fs.DirEntry, err error) error { err := fs.WalkDir(configFiles, "config", func(path string, d fs.DirEntry, err error) error {
@@ -562,22 +581,24 @@ func showSetupTokenInstructions(containerType SupportedContainer, dashboardDomai
fmt.Println("To get your setup token, you need to:") fmt.Println("To get your setup token, you need to:")
fmt.Println("") fmt.Println("")
fmt.Println("1. Start the containers") fmt.Println("1. Start the containers")
if containerType == Docker { switch containerType {
case Docker:
fmt.Println(" docker compose up -d") fmt.Println(" docker compose up -d")
} else if containerType == Podman { case Podman:
fmt.Println(" podman-compose up -d") fmt.Println(" podman-compose up -d")
} else {
} }
fmt.Println("") fmt.Println("")
fmt.Println("2. Wait for the Pangolin container to start and generate the token") fmt.Println("2. Wait for the Pangolin container to start and generate the token")
fmt.Println("") fmt.Println("")
fmt.Println("3. Check the container logs for the setup token") fmt.Println("3. Check the container logs for the setup token")
if containerType == Docker { switch containerType {
case Docker:
fmt.Println(" docker logs pangolin | grep -A 2 -B 2 'SETUP TOKEN'") fmt.Println(" docker logs pangolin | grep -A 2 -B 2 'SETUP TOKEN'")
} else if containerType == Podman { case Podman:
fmt.Println(" podman logs pangolin | grep -A 2 -B 2 'SETUP TOKEN'") fmt.Println(" podman logs pangolin | grep -A 2 -B 2 'SETUP TOKEN'")
} else {
} }
fmt.Println("") fmt.Println("")
fmt.Println("4. Look for output like") fmt.Println("4. Look for output like")
fmt.Println(" === SETUP TOKEN GENERATED ===") fmt.Println(" === SETUP TOKEN GENERATED ===")
@@ -639,10 +660,7 @@ func checkPortsAvailable(port int) error {
addr := fmt.Sprintf(":%d", port) addr := fmt.Sprintf(":%d", port)
ln, err := net.Listen("tcp", addr) ln, err := net.Listen("tcp", addr)
if err != nil { if err != nil {
return fmt.Errorf( return fmt.Errorf("ERROR: port %d is occupied or cannot be bound: %w", port, err)
"ERROR: port %d is occupied or cannot be bound: %w\n\n",
port, err,
)
} }
if closeErr := ln.Close(); closeErr != nil { if closeErr := ln.Close(); closeErr != nil {
fmt.Fprintf(os.Stderr, fmt.Fprintf(os.Stderr,

51
install/theme.go Normal file
View File

@@ -0,0 +1,51 @@
package main
import (
"github.com/charmbracelet/huh"
"github.com/charmbracelet/lipgloss"
)
// Pangolin brand colors (converted from oklch to hex)
var (
// Primary orange/amber - oklch(0.6717 0.1946 41.93)
primaryColor = lipgloss.AdaptiveColor{Light: "#D97706", Dark: "#F59E0B"}
// Muted foreground
mutedColor = lipgloss.AdaptiveColor{Light: "#737373", Dark: "#A3A3A3"}
// Success green
successColor = lipgloss.AdaptiveColor{Light: "#16A34A", Dark: "#22C55E"}
// Error red - oklch(0.577 0.245 27.325)
errorColor = lipgloss.AdaptiveColor{Light: "#DC2626", Dark: "#EF4444"}
// Normal text
normalFg = lipgloss.AdaptiveColor{Light: "#171717", Dark: "#FAFAFA"}
)
// ThemePangolin returns a huh theme using Pangolin brand colors
func ThemePangolin() *huh.Theme {
t := huh.ThemeBase()
// Focused state styles
t.Focused.Base = t.Focused.Base.BorderForeground(primaryColor)
t.Focused.Title = t.Focused.Title.Foreground(primaryColor).Bold(true)
t.Focused.Description = t.Focused.Description.Foreground(mutedColor)
t.Focused.ErrorIndicator = t.Focused.ErrorIndicator.Foreground(errorColor)
t.Focused.ErrorMessage = t.Focused.ErrorMessage.Foreground(errorColor)
t.Focused.SelectSelector = t.Focused.SelectSelector.Foreground(primaryColor)
t.Focused.NextIndicator = t.Focused.NextIndicator.Foreground(primaryColor)
t.Focused.PrevIndicator = t.Focused.PrevIndicator.Foreground(primaryColor)
t.Focused.Option = t.Focused.Option.Foreground(normalFg)
t.Focused.SelectedOption = t.Focused.SelectedOption.Foreground(primaryColor)
t.Focused.SelectedPrefix = lipgloss.NewStyle().Foreground(successColor).SetString("✓ ")
t.Focused.UnselectedPrefix = lipgloss.NewStyle().Foreground(mutedColor).SetString(" ")
t.Focused.FocusedButton = t.Focused.FocusedButton.Foreground(lipgloss.Color("#FFFFFF")).Background(primaryColor)
t.Focused.BlurredButton = t.Focused.BlurredButton.Foreground(normalFg).Background(lipgloss.AdaptiveColor{Light: "#E5E5E5", Dark: "#404040"})
t.Focused.TextInput.Cursor = t.Focused.TextInput.Cursor.Foreground(primaryColor)
t.Focused.TextInput.Prompt = t.Focused.TextInput.Prompt.Foreground(primaryColor)
// Blurred state inherits from focused but with hidden border
t.Blurred = t.Focused
t.Blurred.Base = t.Focused.Base.BorderStyle(lipgloss.HiddenBorder())
t.Blurred.Title = t.Blurred.Title.Foreground(mutedColor).Bold(false)
t.Blurred.TextInput.Prompt = t.Blurred.TextInput.Prompt.Foreground(mutedColor)
return t
}

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Неуспешно превключване на ресурса", "resourcesErrorUpdate": "Неуспешно превключване на ресурса",
"resourcesErrorUpdateDescription": "Възникна грешка при актуализиране на ресурса", "resourcesErrorUpdateDescription": "Възникна грешка при актуализиране на ресурса",
"access": "Достъп", "access": "Достъп",
"accessControl": "Контрол на достъпа",
"shareLink": "{resource} Сподели връзка", "shareLink": "{resource} Сподели връзка",
"resourceSelect": "Изберете ресурс", "resourceSelect": "Изберете ресурс",
"shareLinks": "Споделени връзки", "shareLinks": "Споделени връзки",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "О, не! Страницата, която търсите, не съществува.", "pageNotFoundDescription": "О, не! Страницата, която търсите, не съществува.",
"overview": "Общ преглед", "overview": "Общ преглед",
"home": "Начало", "home": "Начало",
"accessControl": "Контрол на достъпа",
"settings": "Настройки", "settings": "Настройки",
"usersAll": "Всички потребители", "usersAll": "Всички потребители",
"license": "Лиценз", "license": "Лиценз",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Уведомление за наличност на функциите", "billingFeatureLossWarning": "Уведомление за наличност на функциите",
"billingFeatureLossDescription": "Чрез понижението на плана, функциите, недостъпни в новия план, ще бъдат автоматично деактивирани. Някои настройки и конфигурации може да бъдат загубени. Моля, прегледайте ценовата матрица, за да разберете кои функции вече няма да са на разположение.", "billingFeatureLossDescription": "Чрез понижението на плана, функциите, недостъпни в новия план, ще бъдат автоматично деактивирани. Някои настройки и конфигурации може да бъдат загубени. Моля, прегледайте ценовата матрица, за да разберете кои функции вече няма да са на разположение.",
"billingUsageExceedsLimit": "Текущото използване ({current}) надвишава ограничението ({limit})", "billingUsageExceedsLimit": "Текущото използване ({current}) надвишава ограничението ({limit})",
"billingPastDueTitle": "Плащането е просрочено",
"billingPastDueDescription": "Вашето плащане е просрочено. Моля, актуализирайте метода на плащане, за да продължите да използвате настоящия си план. Ако проблемът не бъде разрешен, абонаментът ви ще бъде прекратен и ще бъдете прехвърлени на безплатния план.",
"billingUnpaidTitle": "Абонаментът не е платен",
"billingUnpaidDescription": "Вашият абонамент не е платен и сте прехвърлени на безплатния план. Моля, актуализирайте метода на плащане, за да възстановите вашия абонамент.",
"billingIncompleteTitle": "Плащането е непълно",
"billingIncompleteDescription": "Вашето плащане е непълно. Моля, завършете процеса на плащане, за да активирате вашия абонамент.",
"billingIncompleteExpiredTitle": "Плащането е изтекло",
"billingIncompleteExpiredDescription": "Вашето плащане никога не е завършено и е изтекло. Прехвърлени сте на безплатния план. Моля, абонирайте се отново, за да възстановите достъпа до платените функции.",
"billingManageSubscription": "Управлявайте вашия абонамент",
"billingResolvePaymentIssue": "Моля, разрешете проблема с плащането преди да извършите надграждане или понижение",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Съгласен съм с", "IAgreeToThe": "Съгласен съм с",
"termsOfService": "условията за ползване", "termsOfService": "условията за ползване",

View File

@@ -336,7 +336,7 @@
"userQuestionRemove": "Jste si jisti, že chcete trvale odstranit uživatele ze serveru?", "userQuestionRemove": "Jste si jisti, že chcete trvale odstranit uživatele ze serveru?",
"licenseKey": "Licenční klíč", "licenseKey": "Licenční klíč",
"valid": "Valid", "valid": "Valid",
"numberOfSites": "Počet stránek", "numberOfSites": "Počet lokalit",
"licenseKeySearch": "Hledat licenční klíče...", "licenseKeySearch": "Hledat licenční klíče...",
"licenseKeyAdd": "Přidat licenční klíč", "licenseKeyAdd": "Přidat licenční klíč",
"type": "Typ", "type": "Typ",
@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Nepodařilo se přepnout zdroj", "resourcesErrorUpdate": "Nepodařilo se přepnout zdroj",
"resourcesErrorUpdateDescription": "Došlo k chybě při aktualizaci zdroje", "resourcesErrorUpdateDescription": "Došlo k chybě při aktualizaci zdroje",
"access": "Přístup", "access": "Přístup",
"accessControl": "Kontrola přístupu",
"shareLink": "{resource} Sdílet odkaz", "shareLink": "{resource} Sdílet odkaz",
"resourceSelect": "Vyberte zdroj", "resourceSelect": "Vyberte zdroj",
"shareLinks": "Sdílet odkazy", "shareLinks": "Sdílet odkazy",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Jejda! Stránka, kterou hledáte, neexistuje.", "pageNotFoundDescription": "Jejda! Stránka, kterou hledáte, neexistuje.",
"overview": "Přehled", "overview": "Přehled",
"home": "Domů", "home": "Domů",
"accessControl": "Kontrola přístupu",
"settings": "Nastavení", "settings": "Nastavení",
"usersAll": "Všichni uživatelé", "usersAll": "Všichni uživatelé",
"license": "Licence", "license": "Licence",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Upozornění na dostupnost funkce", "billingFeatureLossWarning": "Upozornění na dostupnost funkce",
"billingFeatureLossDescription": "Po pomenutí budou funkce v novém plánu automaticky zakázány. Některá nastavení a konfigurace mohou být ztraceny. Zkontrolujte cenovou matrici, abyste pochopili, které funkce již nebudou k dispozici.", "billingFeatureLossDescription": "Po pomenutí budou funkce v novém plánu automaticky zakázány. Některá nastavení a konfigurace mohou být ztraceny. Zkontrolujte cenovou matrici, abyste pochopili, které funkce již nebudou k dispozici.",
"billingUsageExceedsLimit": "Aktuální využití ({current}) překračuje limit ({limit})", "billingUsageExceedsLimit": "Aktuální využití ({current}) překračuje limit ({limit})",
"billingPastDueTitle": "Poslední splatnost platby",
"billingPastDueDescription": "Vaše platba je již splatná. Chcete-li pokračovat v používání aktuálních tarifů, aktualizujte prosím způsob platby. Pokud nebude vyřešeno, Vaše předplatné bude zrušeno a budete vráceno na úroveň zdarma.",
"billingUnpaidTitle": "Předplatné nezaplaceno",
"billingUnpaidDescription": "Vaše předplatné není zaplaceno a byli jste vráceni do bezplatné úrovně. Aktualizujte prosím svou platební metodu pro obnovení předplatného.",
"billingIncompleteTitle": "Platba nedokončena",
"billingIncompleteDescription": "Vaše platba je neúplná. Pro aktivaci předplatného prosím dokončete platební proces.",
"billingIncompleteExpiredTitle": "Platba vypršela",
"billingIncompleteExpiredDescription": "Vaše platba nebyla nikdy dokončena a vypršela. Byli jste vráceni na úroveň zdarma. Prosím, přihlašte se znovu pro obnovení přístupu k placeným funkcím.",
"billingManageSubscription": "Spravujte své předplatné",
"billingResolvePaymentIssue": "Vyřešte prosím problém s platbou před upgradem nebo upgradem",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Souhlasím s", "IAgreeToThe": "Souhlasím s",
"termsOfService": "podmínky služby", "termsOfService": "podmínky služby",
@@ -2382,7 +2392,7 @@
"terms": "Výrazy", "terms": "Výrazy",
"privacy": "Soukromí", "privacy": "Soukromí",
"security": "Zabezpečení", "security": "Zabezpečení",
"docs": "Dokumenty", "docs": "Dokumentace",
"deviceActivation": "Aktivace zařízení", "deviceActivation": "Aktivace zařízení",
"deviceCodeInvalidFormat": "Kód musí být 9 znaků (např. A1AJ-N5JD)", "deviceCodeInvalidFormat": "Kód musí být 9 znaků (např. A1AJ-N5JD)",
"deviceCodeInvalidOrExpired": "Neplatný nebo prošlý kód", "deviceCodeInvalidOrExpired": "Neplatný nebo prošlý kód",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Fehler beim Umschalten der Ressource", "resourcesErrorUpdate": "Fehler beim Umschalten der Ressource",
"resourcesErrorUpdateDescription": "Beim Aktualisieren der Ressource ist ein Fehler aufgetreten", "resourcesErrorUpdateDescription": "Beim Aktualisieren der Ressource ist ein Fehler aufgetreten",
"access": "Zugriff", "access": "Zugriff",
"accessControl": "Zugriffskontrolle",
"shareLink": "{resource} Freigabe-Link", "shareLink": "{resource} Freigabe-Link",
"resourceSelect": "Ressource auswählen", "resourceSelect": "Ressource auswählen",
"shareLinks": "Freigabe-Links", "shareLinks": "Freigabe-Links",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Hoppla! Die gesuchte Seite existiert nicht.", "pageNotFoundDescription": "Hoppla! Die gesuchte Seite existiert nicht.",
"overview": "Übersicht", "overview": "Übersicht",
"home": "Startseite", "home": "Startseite",
"accessControl": "Zugriffskontrolle",
"settings": "Einstellungen", "settings": "Einstellungen",
"usersAll": "Alle Benutzer", "usersAll": "Alle Benutzer",
"license": "Lizenz", "license": "Lizenz",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Verfügbarkeitshinweis", "billingFeatureLossWarning": "Verfügbarkeitshinweis",
"billingFeatureLossDescription": "Durch Herabstufung werden Funktionen, die im neuen Paket nicht verfügbar sind, automatisch deaktiviert. Einige Einstellungen und Konfigurationen können verloren gehen. Bitte überprüfen Sie die Preismatrix um zu verstehen, welche Funktionen nicht mehr verfügbar sein werden.", "billingFeatureLossDescription": "Durch Herabstufung werden Funktionen, die im neuen Paket nicht verfügbar sind, automatisch deaktiviert. Einige Einstellungen und Konfigurationen können verloren gehen. Bitte überprüfen Sie die Preismatrix um zu verstehen, welche Funktionen nicht mehr verfügbar sein werden.",
"billingUsageExceedsLimit": "Aktuelle Nutzung ({current}) überschreitet das Limit ({limit})", "billingUsageExceedsLimit": "Aktuelle Nutzung ({current}) überschreitet das Limit ({limit})",
"billingPastDueTitle": "Zahlung vergangene Fälligkeit",
"billingPastDueDescription": "Ihre Zahlung ist abgelaufen. Bitte aktualisieren Sie Ihre Zahlungsmethode, um die aktuellen Funktionen Ihres Pakets weiter zu nutzen. Wenn nicht geklärt, wird Ihr Abonnement abgebrochen und Sie werden auf die kostenlose Stufe zurückgekehrt.",
"billingUnpaidTitle": "Unbezahltes Abonnement",
"billingUnpaidDescription": "Dein Abonnement ist unbezahlt und du wurdest auf die kostenlose Stufe zurückgekehrt. Bitte aktualisiere deine Zahlungsmethode, um dein Abonnement wiederherzustellen.",
"billingIncompleteTitle": "Zahlung unvollständig",
"billingIncompleteDescription": "Ihre Zahlung ist unvollständig. Bitte schließen Sie den Zahlungsvorgang ab, um Ihr Abonnement zu aktivieren.",
"billingIncompleteExpiredTitle": "Zahlung abgelaufen",
"billingIncompleteExpiredDescription": "Deine Zahlung wurde nie abgeschlossen und ist abgelaufen. Du wurdest zur kostenlosen Stufe zurückgekehrt. Bitte melde dich erneut an, um den Zugriff auf kostenpflichtige Funktionen wiederherzustellen.",
"billingManageSubscription": "Verwalten Sie Ihr Abonnement",
"billingResolvePaymentIssue": "Bitte beheben Sie Ihr Zahlungsproblem vor dem Upgrade oder Herabstufen",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Ich stimme den", "IAgreeToThe": "Ich stimme den",
"termsOfService": "Nutzungsbedingungen zu", "termsOfService": "Nutzungsbedingungen zu",

View File

@@ -649,7 +649,8 @@
"resourcesUsersRolesAccess": "User and role-based access control", "resourcesUsersRolesAccess": "User and role-based access control",
"resourcesErrorUpdate": "Failed to toggle resource", "resourcesErrorUpdate": "Failed to toggle resource",
"resourcesErrorUpdateDescription": "An error occurred while updating the resource", "resourcesErrorUpdateDescription": "An error occurred while updating the resource",
"access": "Access Control", "access": "Access",
"accessControl": "Access Control",
"shareLink": "{resource} Share Link", "shareLink": "{resource} Share Link",
"resourceSelect": "Select resource", "resourceSelect": "Select resource",
"shareLinks": "Share Links", "shareLinks": "Share Links",
@@ -1101,6 +1102,12 @@
"actionGetUser": "Get User", "actionGetUser": "Get User",
"actionGetOrgUser": "Get Organization User", "actionGetOrgUser": "Get Organization User",
"actionListOrgDomains": "List Organization Domains", "actionListOrgDomains": "List Organization Domains",
"actionGetDomain": "Get Domain",
"actionCreateOrgDomain": "Create Domain",
"actionUpdateOrgDomain": "Update Domain",
"actionDeleteOrgDomain": "Delete Domain",
"actionGetDNSRecords": "Get DNS Records",
"actionRestartOrgDomain": "Restart Domain",
"actionCreateSite": "Create Site", "actionCreateSite": "Create Site",
"actionDeleteSite": "Delete Site", "actionDeleteSite": "Delete Site",
"actionGetSite": "Get Site", "actionGetSite": "Get Site",
@@ -1572,6 +1579,16 @@
"billingFeatureLossWarning": "Feature Availability Notice", "billingFeatureLossWarning": "Feature Availability Notice",
"billingFeatureLossDescription": "By downgrading, features not available in the new plan will be automatically disabled. Some settings and configurations may be lost. Please review the pricing matrix to understand which features will no longer be available.", "billingFeatureLossDescription": "By downgrading, features not available in the new plan will be automatically disabled. Some settings and configurations may be lost. Please review the pricing matrix to understand which features will no longer be available.",
"billingUsageExceedsLimit": "Current usage ({current}) exceeds limit ({limit})", "billingUsageExceedsLimit": "Current usage ({current}) exceeds limit ({limit})",
"billingPastDueTitle": "Payment Past Due",
"billingPastDueDescription": "Your payment is past due. Please update your payment method to continue using your current plan features. If not resolved, your subscription will be canceled and you'll be reverted to the free tier.",
"billingUnpaidTitle": "Subscription Unpaid",
"billingUnpaidDescription": "Your subscription is unpaid and you have been reverted to the free tier. Please update your payment method to restore your subscription.",
"billingIncompleteTitle": "Payment Incomplete",
"billingIncompleteDescription": "Your payment is incomplete. Please complete the payment process to activate your subscription.",
"billingIncompleteExpiredTitle": "Payment Expired",
"billingIncompleteExpiredDescription": "Your payment was never completed and has expired. You have been reverted to the free tier. Please subscribe again to restore access to paid features.",
"billingManageSubscription": "Manage your subscription",
"billingResolvePaymentIssue": "Please resolve your payment issue before upgrading or downgrading",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "I agree to the", "IAgreeToThe": "I agree to the",
"termsOfService": "terms of service", "termsOfService": "terms of service",
@@ -1659,10 +1676,10 @@
"sshSudoModeCommandsDescription": "User can run only the specified commands with sudo.", "sshSudoModeCommandsDescription": "User can run only the specified commands with sudo.",
"sshSudo": "Allow sudo", "sshSudo": "Allow sudo",
"sshSudoCommands": "Sudo Commands", "sshSudoCommands": "Sudo Commands",
"sshSudoCommandsDescription": "List of commands the user is allowed to run with sudo.", "sshSudoCommandsDescription": "Comma separated list of commands the user is allowed to run with sudo.",
"sshCreateHomeDir": "Create Home Directory", "sshCreateHomeDir": "Create Home Directory",
"sshUnixGroups": "Unix Groups", "sshUnixGroups": "Unix Groups",
"sshUnixGroupsDescription": "Unix groups to add the user to on the target host.", "sshUnixGroupsDescription": "Comma separated Unix groups to add the user to on the target host.",
"retryAttempts": "Retry Attempts", "retryAttempts": "Retry Attempts",
"expectedResponseCodes": "Expected Response Codes", "expectedResponseCodes": "Expected Response Codes",
"expectedResponseCodesDescription": "HTTP status code that indicates healthy status. If left blank, 200-300 is considered healthy.", "expectedResponseCodesDescription": "HTTP status code that indicates healthy status. If left blank, 200-300 is considered healthy.",
@@ -2532,7 +2549,7 @@
"internalResourceAuthDaemonSite": "On Site", "internalResourceAuthDaemonSite": "On Site",
"internalResourceAuthDaemonSiteDescription": "Auth daemon runs on the site (Newt).", "internalResourceAuthDaemonSiteDescription": "Auth daemon runs on the site (Newt).",
"internalResourceAuthDaemonRemote": "Remote Host", "internalResourceAuthDaemonRemote": "Remote Host",
"internalResourceAuthDaemonRemoteDescription": "Auth daemon runs on a host that is not the site.", "internalResourceAuthDaemonRemoteDescription": "Auth daemon runs on this resource's destination - not the site.",
"internalResourceAuthDaemonPort": "Daemon Port (optional)", "internalResourceAuthDaemonPort": "Daemon Port (optional)",
"orgAuthWhatsThis": "Where can I find my organization ID?", "orgAuthWhatsThis": "Where can I find my organization ID?",
"learnMore": "Learn more", "learnMore": "Learn more",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Error al cambiar el recurso", "resourcesErrorUpdate": "Error al cambiar el recurso",
"resourcesErrorUpdateDescription": "Se ha producido un error al actualizar el recurso", "resourcesErrorUpdateDescription": "Se ha producido un error al actualizar el recurso",
"access": "Acceder", "access": "Acceder",
"accessControl": "Control de acceso",
"shareLink": "{resource} Compartir Enlace", "shareLink": "{resource} Compartir Enlace",
"resourceSelect": "Seleccionar recurso", "resourceSelect": "Seleccionar recurso",
"shareLinks": "Compartir enlaces", "shareLinks": "Compartir enlaces",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "¡Vaya! La página que estás buscando no existe.", "pageNotFoundDescription": "¡Vaya! La página que estás buscando no existe.",
"overview": "Resumen", "overview": "Resumen",
"home": "Inicio", "home": "Inicio",
"accessControl": "Control de acceso",
"settings": "Ajustes", "settings": "Ajustes",
"usersAll": "Todos los usuarios", "usersAll": "Todos los usuarios",
"license": "Licencia", "license": "Licencia",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Aviso de disponibilidad de funcionalidad", "billingFeatureLossWarning": "Aviso de disponibilidad de funcionalidad",
"billingFeatureLossDescription": "Al degradar, las características no disponibles en el nuevo plan se desactivarán automáticamente. Algunas configuraciones y configuraciones pueden perderse. Por favor, revise la matriz de precios para entender qué características ya no estarán disponibles.", "billingFeatureLossDescription": "Al degradar, las características no disponibles en el nuevo plan se desactivarán automáticamente. Algunas configuraciones y configuraciones pueden perderse. Por favor, revise la matriz de precios para entender qué características ya no estarán disponibles.",
"billingUsageExceedsLimit": "El uso actual ({current}) supera el límite ({limit})", "billingUsageExceedsLimit": "El uso actual ({current}) supera el límite ({limit})",
"billingPastDueTitle": "Pago vencido",
"billingPastDueDescription": "Su pago ha vencido. Por favor, actualice su método de pago para seguir utilizando las características actuales de su plan. Si no se resuelve, tu suscripción se cancelará y serás revertido al nivel gratuito.",
"billingUnpaidTitle": "Suscripción no pagada",
"billingUnpaidDescription": "Tu suscripción no está pagada y has sido revertido al nivel gratuito. Por favor, actualiza tu método de pago para restaurar tu suscripción.",
"billingIncompleteTitle": "Pago incompleto",
"billingIncompleteDescription": "Su pago está incompleto. Por favor, complete el proceso de pago para activar su suscripción.",
"billingIncompleteExpiredTitle": "Pago expirado",
"billingIncompleteExpiredDescription": "Tu pago nunca se completó y ha expirado. Has sido revertido al nivel gratuito. Suscríbete de nuevo para restaurar el acceso a las funciones de pago.",
"billingManageSubscription": "Administra tu suscripción",
"billingResolvePaymentIssue": "Por favor resuelva su problema de pago antes de actualizar o bajar de calificación",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Estoy de acuerdo con los", "IAgreeToThe": "Estoy de acuerdo con los",
"termsOfService": "términos del servicio", "termsOfService": "términos del servicio",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Échec de la bascule de la ressource", "resourcesErrorUpdate": "Échec de la bascule de la ressource",
"resourcesErrorUpdateDescription": "Une erreur s'est produite lors de la mise à jour de la ressource", "resourcesErrorUpdateDescription": "Une erreur s'est produite lors de la mise à jour de la ressource",
"access": "Accès", "access": "Accès",
"accessControl": "Contrôle d'accès",
"shareLink": "Lien de partage {resource}", "shareLink": "Lien de partage {resource}",
"resourceSelect": "Sélectionner une ressource", "resourceSelect": "Sélectionner une ressource",
"shareLinks": "Liens de partage", "shareLinks": "Liens de partage",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Oups! La page que vous recherchez n'existe pas.", "pageNotFoundDescription": "Oups! La page que vous recherchez n'existe pas.",
"overview": "Vue d'ensemble", "overview": "Vue d'ensemble",
"home": "Accueil", "home": "Accueil",
"accessControl": "Contrôle d'accès",
"settings": "Paramètres", "settings": "Paramètres",
"usersAll": "Tous les utilisateurs", "usersAll": "Tous les utilisateurs",
"license": "Licence", "license": "Licence",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Avis de disponibilité des fonctionnalités", "billingFeatureLossWarning": "Avis de disponibilité des fonctionnalités",
"billingFeatureLossDescription": "En rétrogradant, les fonctionnalités non disponibles dans le nouveau plan seront automatiquement désactivées. Certains paramètres et configurations peuvent être perdus. Veuillez consulter la matrice de prix pour comprendre quelles fonctionnalités ne seront plus disponibles.", "billingFeatureLossDescription": "En rétrogradant, les fonctionnalités non disponibles dans le nouveau plan seront automatiquement désactivées. Certains paramètres et configurations peuvent être perdus. Veuillez consulter la matrice de prix pour comprendre quelles fonctionnalités ne seront plus disponibles.",
"billingUsageExceedsLimit": "L'utilisation actuelle ({current}) dépasse la limite ({limit})", "billingUsageExceedsLimit": "L'utilisation actuelle ({current}) dépasse la limite ({limit})",
"billingPastDueTitle": "Paiement en retard",
"billingPastDueDescription": "Votre paiement est échu. Veuillez mettre à jour votre méthode de paiement pour continuer à utiliser les fonctionnalités de votre plan actuel. Si non résolu, votre abonnement sera annulé et vous serez remis au niveau gratuit.",
"billingUnpaidTitle": "Abonnement impayé",
"billingUnpaidDescription": "Votre abonnement est impayé et vous avez été reversé au niveau gratuit. Veuillez mettre à jour votre méthode de paiement pour restaurer votre abonnement.",
"billingIncompleteTitle": "Paiement incomplet",
"billingIncompleteDescription": "Votre paiement est incomplet. Veuillez compléter le processus de paiement pour activer votre abonnement.",
"billingIncompleteExpiredTitle": "Paiement expiré",
"billingIncompleteExpiredDescription": "Votre paiement n'a jamais été complété et a expiré. Vous avez été restauré au niveau gratuit. Veuillez vous abonner à nouveau pour restaurer l'accès aux fonctionnalités payantes.",
"billingManageSubscription": "Gérer votre abonnement",
"billingResolvePaymentIssue": "Veuillez résoudre votre problème de paiement avant de procéder à la mise à niveau ou à la rétrogradation",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Je suis d'accord avec", "IAgreeToThe": "Je suis d'accord avec",
"termsOfService": "les conditions d'utilisation", "termsOfService": "les conditions d'utilisation",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Impossibile attivare/disattivare la risorsa", "resourcesErrorUpdate": "Impossibile attivare/disattivare la risorsa",
"resourcesErrorUpdateDescription": "Si è verificato un errore durante l'aggiornamento della risorsa", "resourcesErrorUpdateDescription": "Si è verificato un errore durante l'aggiornamento della risorsa",
"access": "Accesso", "access": "Accesso",
"accessControl": "Controllo Accessi",
"shareLink": "Link di Condivisione {resource}", "shareLink": "Link di Condivisione {resource}",
"resourceSelect": "Seleziona risorsa", "resourceSelect": "Seleziona risorsa",
"shareLinks": "Link di Condivisione", "shareLinks": "Link di Condivisione",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Oops! La pagina che stai cercando non esiste.", "pageNotFoundDescription": "Oops! La pagina che stai cercando non esiste.",
"overview": "Panoramica", "overview": "Panoramica",
"home": "Home", "home": "Home",
"accessControl": "Controllo Accessi",
"settings": "Impostazioni", "settings": "Impostazioni",
"usersAll": "Tutti Gli Utenti", "usersAll": "Tutti Gli Utenti",
"license": "Licenza", "license": "Licenza",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Avviso Di Disponibilità Caratteristica", "billingFeatureLossWarning": "Avviso Di Disponibilità Caratteristica",
"billingFeatureLossDescription": "Con il downgrading, le funzioni non disponibili nel nuovo piano saranno disattivate automaticamente. Alcune impostazioni e configurazioni potrebbero andare perse. Controlla la matrice dei prezzi per capire quali funzioni non saranno più disponibili.", "billingFeatureLossDescription": "Con il downgrading, le funzioni non disponibili nel nuovo piano saranno disattivate automaticamente. Alcune impostazioni e configurazioni potrebbero andare perse. Controlla la matrice dei prezzi per capire quali funzioni non saranno più disponibili.",
"billingUsageExceedsLimit": "L'utilizzo corrente ({current}) supera il limite ({limit})", "billingUsageExceedsLimit": "L'utilizzo corrente ({current}) supera il limite ({limit})",
"billingPastDueTitle": "Pagamento Scaduto",
"billingPastDueDescription": "Il pagamento è scaduto. Si prega di aggiornare il metodo di pagamento per continuare a utilizzare le funzioni del piano corrente. Se non risolto, il tuo abbonamento verrà annullato e verrai ripristinato al livello gratuito.",
"billingUnpaidTitle": "Abbonamento Non Pagato",
"billingUnpaidDescription": "Il tuo abbonamento non è pagato e sei stato restituito al livello gratuito. Per favore aggiorna il metodo di pagamento per ripristinare l'abbonamento.",
"billingIncompleteTitle": "Pagamento Incompleto",
"billingIncompleteDescription": "Il pagamento è incompleto. Si prega di completare il processo di pagamento per attivare il tuo abbonamento.",
"billingIncompleteExpiredTitle": "Pagamento Scaduto",
"billingIncompleteExpiredDescription": "Il tuo pagamento non è mai stato completato ed è scaduto. Sei stato ripristinato al livello gratuito. Si prega di iscriversi nuovamente per ripristinare l'accesso alle funzionalità a pagamento.",
"billingManageSubscription": "Gestisci il tuo abbonamento",
"billingResolvePaymentIssue": "Si prega di risolvere il problema di pagamento prima di aggiornare o declassare",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Accetto i", "IAgreeToThe": "Accetto i",
"termsOfService": "termini di servizio", "termsOfService": "termini di servizio",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "리소스를 전환하는 데 실패했습니다.", "resourcesErrorUpdate": "리소스를 전환하는 데 실패했습니다.",
"resourcesErrorUpdateDescription": "리소스를 업데이트하는 동안 오류가 발생했습니다.", "resourcesErrorUpdateDescription": "리소스를 업데이트하는 동안 오류가 발생했습니다.",
"access": "접속", "access": "접속",
"accessControl": "액세스 제어",
"shareLink": "{resource} 공유 링크", "shareLink": "{resource} 공유 링크",
"resourceSelect": "리소스 선택", "resourceSelect": "리소스 선택",
"shareLinks": "공유 링크", "shareLinks": "공유 링크",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "앗! 찾고 있는 페이지가 존재하지 않습니다.", "pageNotFoundDescription": "앗! 찾고 있는 페이지가 존재하지 않습니다.",
"overview": "개요", "overview": "개요",
"home": "홈", "home": "홈",
"accessControl": "액세스 제어",
"settings": "설정", "settings": "설정",
"usersAll": "모든 사용자", "usersAll": "모든 사용자",
"license": "라이선스", "license": "라이선스",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "기능 가용성 알림", "billingFeatureLossWarning": "기능 가용성 알림",
"billingFeatureLossDescription": "다운그레이드함으로써 새 계획에서 사용할 수 없는 기능은 자동으로 비활성화됩니다. 일부 설정 및 구성은 손실될 수 있습니다. 어떤 기능들이 더 이상 사용 불가능한지 이해하기 위해 가격표를 검토하세요.", "billingFeatureLossDescription": "다운그레이드함으로써 새 계획에서 사용할 수 없는 기능은 자동으로 비활성화됩니다. 일부 설정 및 구성은 손실될 수 있습니다. 어떤 기능들이 더 이상 사용 불가능한지 이해하기 위해 가격표를 검토하세요.",
"billingUsageExceedsLimit": "현재 사용량 ({current})이 제한 ({limit})을 초과합니다", "billingUsageExceedsLimit": "현재 사용량 ({current})이 제한 ({limit})을 초과합니다",
"billingPastDueTitle": "연체된 결제",
"billingPastDueDescription": "결제가 연체되었습니다. 현재 이용 중인 플랜 기능을 계속 사용하기 위해 결제 수단을 업데이트해 주세요. 해결되지 않으면 구독이 취소되고 무료 요금제로 전환됩니다.",
"billingUnpaidTitle": "결제되지 않은 구독",
"billingUnpaidDescription": "구독 결제가 완료되지 않아 무료 요금제로 전환되었습니다. 구독을 복원하려면 결제 수단을 업데이트해 주세요.",
"billingIncompleteTitle": "불완전한 결제",
"billingIncompleteDescription": "결제가 불완전합니다. 구독을 활성화하기 위해 결제 과정을 완료해 주세요.",
"billingIncompleteExpiredTitle": "만료된 결제",
"billingIncompleteExpiredDescription": "결제가 완료되지 않아 만료되었습니다. 무료 요금제로 전환되었습니다. 유료 기능에 대한 액세스를 복원하려면 다시 구독해 주세요.",
"billingManageSubscription": "구독을 관리하십시오",
"billingResolvePaymentIssue": "업그레이드 또는 다운그레이드하기 전에 결제 문제를 해결해 주세요.",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "동의합니다", "IAgreeToThe": "동의합니다",
"termsOfService": "서비스 약관", "termsOfService": "서비스 약관",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Feilet å slå av/på ressurs", "resourcesErrorUpdate": "Feilet å slå av/på ressurs",
"resourcesErrorUpdateDescription": "En feil oppstod under oppdatering av ressursen", "resourcesErrorUpdateDescription": "En feil oppstod under oppdatering av ressursen",
"access": "Tilgang", "access": "Tilgang",
"accessControl": "Tilgangskontroll",
"shareLink": "{resource} Del Lenke", "shareLink": "{resource} Del Lenke",
"resourceSelect": "Velg ressurs", "resourceSelect": "Velg ressurs",
"shareLinks": "Del lenker", "shareLinks": "Del lenker",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Oops! Siden du leter etter finnes ikke.", "pageNotFoundDescription": "Oops! Siden du leter etter finnes ikke.",
"overview": "Oversikt", "overview": "Oversikt",
"home": "Hjem", "home": "Hjem",
"accessControl": "Tilgangskontroll",
"settings": "Innstillinger", "settings": "Innstillinger",
"usersAll": "Alle brukere", "usersAll": "Alle brukere",
"license": "Lisens", "license": "Lisens",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Fremhev tilgjengelig varsel", "billingFeatureLossWarning": "Fremhev tilgjengelig varsel",
"billingFeatureLossDescription": "Ved å nedgradere vil funksjoner som ikke er tilgjengelige i den nye planen automatisk bli deaktivert. Noen innstillinger og konfigurasjoner kan gå tapt. Vennligst gjennomgå prismatrisen for å forstå hvilke funksjoner som ikke lenger vil være tilgjengelige.", "billingFeatureLossDescription": "Ved å nedgradere vil funksjoner som ikke er tilgjengelige i den nye planen automatisk bli deaktivert. Noen innstillinger og konfigurasjoner kan gå tapt. Vennligst gjennomgå prismatrisen for å forstå hvilke funksjoner som ikke lenger vil være tilgjengelige.",
"billingUsageExceedsLimit": "Gjeldende bruk ({current}) overskrider grensen ({limit})", "billingUsageExceedsLimit": "Gjeldende bruk ({current}) overskrider grensen ({limit})",
"billingPastDueTitle": "Betalingen har forfalt",
"billingPastDueDescription": "Betalingen er forfalt. Vennligst oppdater betalingsmetoden din for å fortsette å bruke den gjeldende funksjonsplanen din. Hvis du ikke har løst deg, vil abonnementet ditt avbrytes, og du vil bli tilbakestilt til gratistiden.",
"billingUnpaidTitle": "Abonnement ubetalt",
"billingUnpaidDescription": "Ditt abonnement er ubetalt og du har blitt tilbakestilt til gratis kasse. Vennligst oppdater din betalingsmetode for å gjenopprette abonnementet.",
"billingIncompleteTitle": "Betaling ufullstendig",
"billingIncompleteDescription": "Betalingen er ufullstendig. Vennligst fullfør betalingsprosessen for å aktivere abonnementet.",
"billingIncompleteExpiredTitle": "Betaling utløpt",
"billingIncompleteExpiredDescription": "Din betaling ble aldri fullført, og har utløpt. Du har blitt tilbakestilt til gratis dekk. Vennligst abonner på nytt for å gjenopprette tilgangen til betalte funksjoner.",
"billingManageSubscription": "Administrere ditt abonnement",
"billingResolvePaymentIssue": "Vennligst løs ditt betalingsproblem før du oppgraderer eller nedgraderer betalingen",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Jeg godtar", "IAgreeToThe": "Jeg godtar",
"termsOfService": "brukervilkårene", "termsOfService": "brukervilkårene",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Bron wisselen mislukt", "resourcesErrorUpdate": "Bron wisselen mislukt",
"resourcesErrorUpdateDescription": "Er is een fout opgetreden tijdens het bijwerken van het document", "resourcesErrorUpdateDescription": "Er is een fout opgetreden tijdens het bijwerken van het document",
"access": "Toegangsrechten", "access": "Toegangsrechten",
"accessControl": "Toegangs controle",
"shareLink": "{resource} Share link", "shareLink": "{resource} Share link",
"resourceSelect": "Selecteer resource", "resourceSelect": "Selecteer resource",
"shareLinks": "Links delen", "shareLinks": "Links delen",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Oeps! De pagina die je zoekt bestaat niet.", "pageNotFoundDescription": "Oeps! De pagina die je zoekt bestaat niet.",
"overview": "Overzicht.", "overview": "Overzicht.",
"home": "Startpagina", "home": "Startpagina",
"accessControl": "Toegangs controle",
"settings": "Instellingen", "settings": "Instellingen",
"usersAll": "Alle gebruikers", "usersAll": "Alle gebruikers",
"license": "Licentie", "license": "Licentie",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Kennisgeving beschikbaarheid", "billingFeatureLossWarning": "Kennisgeving beschikbaarheid",
"billingFeatureLossDescription": "Door downgraden worden functies die niet beschikbaar zijn in het nieuwe abonnement automatisch uitgeschakeld. Sommige instellingen en configuraties kunnen verloren gaan. Raadpleeg de prijsmatrix om te begrijpen welke functies niet langer beschikbaar zijn.", "billingFeatureLossDescription": "Door downgraden worden functies die niet beschikbaar zijn in het nieuwe abonnement automatisch uitgeschakeld. Sommige instellingen en configuraties kunnen verloren gaan. Raadpleeg de prijsmatrix om te begrijpen welke functies niet langer beschikbaar zijn.",
"billingUsageExceedsLimit": "Huidig gebruik ({current}) overschrijdt limiet ({limit})", "billingUsageExceedsLimit": "Huidig gebruik ({current}) overschrijdt limiet ({limit})",
"billingPastDueTitle": "Vervaldatum betaling",
"billingPastDueDescription": "Uw betaling is verlopen. Werk uw betaalmethode bij om uw huidige abonnementsfuncties te blijven gebruiken. Als dit niet is opgelost, zal je abonnement worden geannuleerd en zal je worden teruggezet naar de vrije rang.",
"billingUnpaidTitle": "Abonnement Onbetaald",
"billingUnpaidDescription": "Uw abonnement is niet betaald en u bent teruggekeerd naar het gratis niveau. Update uw betalingsmethode om uw abonnement te herstellen.",
"billingIncompleteTitle": "Betaling onvolledig",
"billingIncompleteDescription": "Uw betaling is onvolledig. Voltooi alstublieft het betalingsproces om uw abonnement te activeren.",
"billingIncompleteExpiredTitle": "Betaling verlopen",
"billingIncompleteExpiredDescription": "Uw betaling is nooit voltooid en verlopen. U bent teruggekeerd naar de gratis niveaus. Abonneer u opnieuw om de toegang tot betaalde functies te herstellen.",
"billingManageSubscription": "Beheer uw abonnement",
"billingResolvePaymentIssue": "Gelieve uw betalingsprobleem op te lossen voor het upgraden of downgraden",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Ik ga akkoord met de", "IAgreeToThe": "Ik ga akkoord met de",
"termsOfService": "servicevoorwaarden", "termsOfService": "servicevoorwaarden",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Nie udało się przełączyć zasobu", "resourcesErrorUpdate": "Nie udało się przełączyć zasobu",
"resourcesErrorUpdateDescription": "Wystąpił błąd podczas aktualizacji zasobu", "resourcesErrorUpdateDescription": "Wystąpił błąd podczas aktualizacji zasobu",
"access": "Dostęp", "access": "Dostęp",
"accessControl": "Kontrola dostępu",
"shareLink": "Link udostępniania {resource}", "shareLink": "Link udostępniania {resource}",
"resourceSelect": "Wybierz zasób", "resourceSelect": "Wybierz zasób",
"shareLinks": "Linki udostępniania", "shareLinks": "Linki udostępniania",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Ups! Strona, której szukasz, nie istnieje.", "pageNotFoundDescription": "Ups! Strona, której szukasz, nie istnieje.",
"overview": "Przegląd", "overview": "Przegląd",
"home": "Strona główna", "home": "Strona główna",
"accessControl": "Kontrola dostępu",
"settings": "Ustawienia", "settings": "Ustawienia",
"usersAll": "Wszyscy użytkownicy", "usersAll": "Wszyscy użytkownicy",
"license": "Licencja", "license": "Licencja",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Powiadomienie o dostępności funkcji", "billingFeatureLossWarning": "Powiadomienie o dostępności funkcji",
"billingFeatureLossDescription": "Po obniżeniu wartości funkcje niedostępne w nowym planie zostaną automatycznie wyłączone. Niektóre ustawienia i konfiguracje mogą zostać utracone. Zapoznaj się z matrycą cenową, aby zrozumieć, które funkcje nie będą już dostępne.", "billingFeatureLossDescription": "Po obniżeniu wartości funkcje niedostępne w nowym planie zostaną automatycznie wyłączone. Niektóre ustawienia i konfiguracje mogą zostać utracone. Zapoznaj się z matrycą cenową, aby zrozumieć, które funkcje nie będą już dostępne.",
"billingUsageExceedsLimit": "Bieżące użycie ({current}) przekracza limit ({limit})", "billingUsageExceedsLimit": "Bieżące użycie ({current}) przekracza limit ({limit})",
"billingPastDueTitle": "Płatność w przeszłości",
"billingPastDueDescription": "Twoja płatność jest zaległa. Zaktualizuj metodę płatności, aby kontynuować korzystanie z funkcji aktualnego planu. Jeśli nie zostanie rozwiązana, Twoja subskrypcja zostanie anulowana i zostaniesz przywrócony do darmowego poziomu.",
"billingUnpaidTitle": "Subskrypcja niezapłacona",
"billingUnpaidDescription": "Twoja subskrypcja jest niezapłacona i została przywrócona do darmowego poziomu. Zaktualizuj swoją metodę płatności, aby przywrócić subskrypcję.",
"billingIncompleteTitle": "Płatność niezakończona",
"billingIncompleteDescription": "Twoja płatność jest niekompletna. Ukończ proces płatności, aby aktywować subskrypcję.",
"billingIncompleteExpiredTitle": "Płatność wygasła",
"billingIncompleteExpiredDescription": "Twoja płatność nigdy nie została zakończona i wygasła. Zostałeś przywrócony do darmowego poziomu. Zapisz się ponownie, aby przywrócić dostęp do płatnych funkcji.",
"billingManageSubscription": "Zarządzaj subskrypcją",
"billingResolvePaymentIssue": "Rozwiąż problem z płatnościami przed aktualizacją lub obniżeniem oceny",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Zgadzam się z", "IAgreeToThe": "Zgadzam się z",
"termsOfService": "warunkami usługi", "termsOfService": "warunkami usługi",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Falha ao alternar recurso", "resourcesErrorUpdate": "Falha ao alternar recurso",
"resourcesErrorUpdateDescription": "Ocorreu um erro ao atualizar o recurso", "resourcesErrorUpdateDescription": "Ocorreu um erro ao atualizar o recurso",
"access": "Acesso", "access": "Acesso",
"accessControl": "Controle de Acesso",
"shareLink": "Link de Compartilhamento {resource}", "shareLink": "Link de Compartilhamento {resource}",
"resourceSelect": "Selecionar recurso", "resourceSelect": "Selecionar recurso",
"shareLinks": "Links de Compartilhamento", "shareLinks": "Links de Compartilhamento",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Ops! A página que você está procurando não existe.", "pageNotFoundDescription": "Ops! A página que você está procurando não existe.",
"overview": "Visão Geral", "overview": "Visão Geral",
"home": "Início", "home": "Início",
"accessControl": "Controle de Acesso",
"settings": "Configurações", "settings": "Configurações",
"usersAll": "Todos os Utilizadores", "usersAll": "Todos os Utilizadores",
"license": "Licença", "license": "Licença",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Aviso de disponibilidade de recursos", "billingFeatureLossWarning": "Aviso de disponibilidade de recursos",
"billingFeatureLossDescription": "Ao fazer o downgrading, recursos não disponíveis no novo plano serão desativados automaticamente. Algumas configurações e configurações podem ser perdidas. Por favor, revise a matriz de preços para entender quais características não estarão mais disponíveis.", "billingFeatureLossDescription": "Ao fazer o downgrading, recursos não disponíveis no novo plano serão desativados automaticamente. Algumas configurações e configurações podem ser perdidas. Por favor, revise a matriz de preços para entender quais características não estarão mais disponíveis.",
"billingUsageExceedsLimit": "Uso atual ({current}) excede o limite ({limit})", "billingUsageExceedsLimit": "Uso atual ({current}) excede o limite ({limit})",
"billingPastDueTitle": "Pagamento passado devido",
"billingPastDueDescription": "Seu pagamento está vencido. Por favor, atualize seu método de pagamento para continuar usando os recursos do seu plano atual. Se não for resolvido, sua assinatura será cancelada e você será revertido para o nível gratuito.",
"billingUnpaidTitle": "Assinatura não paga",
"billingUnpaidDescription": "Sua assinatura não foi paga e você voltou para o nível gratuito. Atualize o seu método de pagamento para restaurar sua assinatura.",
"billingIncompleteTitle": "Pagamento Incompleto",
"billingIncompleteDescription": "Seu pagamento está incompleto. Por favor, complete o processo de pagamento para ativar sua assinatura.",
"billingIncompleteExpiredTitle": "Pagamento expirado",
"billingIncompleteExpiredDescription": "Seu pagamento nunca foi concluído e expirou. Você foi revertido para o nível gratuito. Por favor, inscreva-se novamente para restaurar o acesso a recursos pagos.",
"billingManageSubscription": "Gerencie sua assinatura",
"billingResolvePaymentIssue": "Por favor, resolva seu problema de pagamento antes de atualizar ou rebaixar",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Concordo com", "IAgreeToThe": "Concordo com",
"termsOfService": "os termos de serviço", "termsOfService": "os termos de serviço",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Не удалось переключить ресурс", "resourcesErrorUpdate": "Не удалось переключить ресурс",
"resourcesErrorUpdateDescription": "Произошла ошибка при обновлении ресурса", "resourcesErrorUpdateDescription": "Произошла ошибка при обновлении ресурса",
"access": "Доступ", "access": "Доступ",
"accessControl": "Контроль доступа",
"shareLink": "Общая ссылка {resource}", "shareLink": "Общая ссылка {resource}",
"resourceSelect": "Выберите ресурс", "resourceSelect": "Выберите ресурс",
"shareLinks": "Общие ссылки", "shareLinks": "Общие ссылки",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Упс! Страница, которую вы ищете, не существует.", "pageNotFoundDescription": "Упс! Страница, которую вы ищете, не существует.",
"overview": "Обзор", "overview": "Обзор",
"home": "Главная", "home": "Главная",
"accessControl": "Контроль доступа",
"settings": "Настройки", "settings": "Настройки",
"usersAll": "Все пользователи", "usersAll": "Все пользователи",
"license": "Лицензия", "license": "Лицензия",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Уведомление о доступности функций", "billingFeatureLossWarning": "Уведомление о доступности функций",
"billingFeatureLossDescription": "При переходе на другой тарифный план функции не будут автоматически отключены. Некоторые настройки и конфигурации могут быть потеряны. Пожалуйста, ознакомьтесь с матрицей ценообразования, чтобы понять, какие функции больше не будут доступны.", "billingFeatureLossDescription": "При переходе на другой тарифный план функции не будут автоматически отключены. Некоторые настройки и конфигурации могут быть потеряны. Пожалуйста, ознакомьтесь с матрицей ценообразования, чтобы понять, какие функции больше не будут доступны.",
"billingUsageExceedsLimit": "Текущее использование ({current}) превышает предел ({limit})", "billingUsageExceedsLimit": "Текущее использование ({current}) превышает предел ({limit})",
"billingPastDueTitle": "Платеж просрочен",
"billingPastDueDescription": "Ваш платеж просрочен. Пожалуйста, обновите способ оплаты, чтобы продолжить использовать текущие функции. Если ваша подписка не будет решена, она будет отменена, и вы вернетесь к бесплатному уровню.",
"billingUnpaidTitle": "Подписка не оплачена",
"billingUnpaidDescription": "Ваша подписка не оплачена, и вы были возвращены к бесплатному уровню. Пожалуйста, обновите способ оплаты, чтобы восстановить вашу подписку.",
"billingIncompleteTitle": "Платеж не завершен",
"billingIncompleteDescription": "Ваш платеж не завершен. Пожалуйста, завершите процесс оплаты, чтобы активировать вашу подписку.",
"billingIncompleteExpiredTitle": "Платеж просрочен",
"billingIncompleteExpiredDescription": "Ваш платеж не был завершен и истек. Вы были возвращены к бесплатному уровню. Пожалуйста, подпишитесь снова, чтобы восстановить доступ к платным функциям.",
"billingManageSubscription": "Управление подпиской",
"billingResolvePaymentIssue": "Пожалуйста, решите проблему оплаты перед обновлением или понижением сорта",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Я согласен с", "IAgreeToThe": "Я согласен с",
"termsOfService": "условия использования", "termsOfService": "условия использования",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "Kaynak değiştirilemedi", "resourcesErrorUpdate": "Kaynak değiştirilemedi",
"resourcesErrorUpdateDescription": "Kaynak güncellenirken bir hata oluştu", "resourcesErrorUpdateDescription": "Kaynak güncellenirken bir hata oluştu",
"access": "Erişim", "access": "Erişim",
"accessControl": "Erişim Kontrolü",
"shareLink": "{resource} Paylaşım Bağlantısı", "shareLink": "{resource} Paylaşım Bağlantısı",
"resourceSelect": "Kaynak seçin", "resourceSelect": "Kaynak seçin",
"shareLinks": "Paylaşım Bağlantıları", "shareLinks": "Paylaşım Bağlantıları",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "Oops! Aradığınız sayfa mevcut değil.", "pageNotFoundDescription": "Oops! Aradığınız sayfa mevcut değil.",
"overview": "Genel Bakış", "overview": "Genel Bakış",
"home": "Ana Sayfa", "home": "Ana Sayfa",
"accessControl": "Erişim Kontrolü",
"settings": "Ayarlar", "settings": "Ayarlar",
"usersAll": "Tüm Kullanıcılar", "usersAll": "Tüm Kullanıcılar",
"license": "Lisans", "license": "Lisans",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "Özellik Kullanılabilirlik Bildirimi", "billingFeatureLossWarning": "Özellik Kullanılabilirlik Bildirimi",
"billingFeatureLossDescription": "Plan düşürüldüğünde, yeni planda mevcut olmayan özellikler otomatik olarak devre dışı bırakılacaktır. Bazı ayarlar ve yapılar kaybolabilir. Hangi özelliklerin artık mevcut olmayacağını anlamak için fiyat tablosunu inceleyiniz.", "billingFeatureLossDescription": "Plan düşürüldüğünde, yeni planda mevcut olmayan özellikler otomatik olarak devre dışı bırakılacaktır. Bazı ayarlar ve yapılar kaybolabilir. Hangi özelliklerin artık mevcut olmayacağını anlamak için fiyat tablosunu inceleyiniz.",
"billingUsageExceedsLimit": "Mevcut kullanım ({current}) limitleri ({limit}) aşıyor", "billingUsageExceedsLimit": "Mevcut kullanım ({current}) limitleri ({limit}) aşıyor",
"billingPastDueTitle": "Ödeme Geçmiş",
"billingPastDueDescription": "Ödemenizın vadesi geçti. Mevcut plan özelliklerinizi kullanmaya devam etmek için lütfen ödeme yöntemini güncelleyin. Sorun çözülmezse aboneliğiniz iptal edilecek ve ücretsiz seviyeye dönüleceksiniz.",
"billingUnpaidTitle": "Ödenmemiş Abonelik",
"billingUnpaidDescription": "Aboneliğiniz ödenmedi ve ücretsiz seviyeye geri döndünüz. Aboneliğinizi geri yüklemek için lütfen ödeme yöntemini güncelleyin.",
"billingIncompleteTitle": "Eksik Ödeme",
"billingIncompleteDescription": "Ödemeniz eksik. Aboneliğinizi etkinleştirmek için lütfen ödeme sürecini tamamlayın.",
"billingIncompleteExpiredTitle": "Ödeme Süresi Doldu",
"billingIncompleteExpiredDescription": "Ödemeniz hiç tamamlanmadı ve süresi doldu. Ücretsiz seviyeye geri döndünüz. Ücretli özelliklere erişimi yeniden sağlamak için lütfen yeniden abone olun.",
"billingManageSubscription": "Aboneliğinizi Yönetin",
"billingResolvePaymentIssue": "Yükseltmeden veya düşürmeden önce ödeme sorunuzu çözün",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "Kabul ediyorum", "IAgreeToThe": "Kabul ediyorum",
"termsOfService": "hizmet şartları", "termsOfService": "hizmet şartları",

View File

@@ -650,6 +650,7 @@
"resourcesErrorUpdate": "切换资源失败", "resourcesErrorUpdate": "切换资源失败",
"resourcesErrorUpdateDescription": "更新资源时出错", "resourcesErrorUpdateDescription": "更新资源时出错",
"access": "访问权限", "access": "访问权限",
"accessControl": "访问控制",
"shareLink": "{resource} 的分享链接", "shareLink": "{resource} 的分享链接",
"resourceSelect": "选择资源", "resourceSelect": "选择资源",
"shareLinks": "分享链接", "shareLinks": "分享链接",
@@ -1038,7 +1039,6 @@
"pageNotFoundDescription": "哎呀!您正在查找的页面不存在。", "pageNotFoundDescription": "哎呀!您正在查找的页面不存在。",
"overview": "概览", "overview": "概览",
"home": "首页", "home": "首页",
"accessControl": "访问控制",
"settings": "设置", "settings": "设置",
"usersAll": "所有用户", "usersAll": "所有用户",
"license": "许可协议", "license": "许可协议",
@@ -1572,6 +1572,16 @@
"billingFeatureLossWarning": "功能可用通知", "billingFeatureLossWarning": "功能可用通知",
"billingFeatureLossDescription": "如果降级,新计划中不可用的功能将被自动禁用。一些设置和配置可能会丢失。 请查看定价矩阵以了解哪些功能将不再可用。", "billingFeatureLossDescription": "如果降级,新计划中不可用的功能将被自动禁用。一些设置和配置可能会丢失。 请查看定价矩阵以了解哪些功能将不再可用。",
"billingUsageExceedsLimit": "当前使用量 ({current}) 超出限制 ({limit})", "billingUsageExceedsLimit": "当前使用量 ({current}) 超出限制 ({limit})",
"billingPastDueTitle": "过去到期的付款",
"billingPastDueDescription": "您的付款已过期。请更新您的付款方法以继续使用您当前的计划功能。 如果不解决,您的订阅将被取消,您将被恢复到免费等级。",
"billingUnpaidTitle": "订阅未付款",
"billingUnpaidDescription": "您的订阅未付,您已恢复到免费等级。请更新您的付款方法以恢复您的订阅。",
"billingIncompleteTitle": "付款不完成",
"billingIncompleteDescription": "您的付款不完整。请完成付款过程以激活您的订阅。",
"billingIncompleteExpiredTitle": "付款已过期",
"billingIncompleteExpiredDescription": "您的付款尚未完成且已过期。您已恢复到免费级别。请再次订阅以恢复对已支付功能的访问。",
"billingManageSubscription": "管理您的订阅",
"billingResolvePaymentIssue": "请在升级或降级之前解决您的付款问题",
"signUpTerms": { "signUpTerms": {
"IAgreeToThe": "我同意", "IAgreeToThe": "我同意",
"termsOfService": "服务条款", "termsOfService": "服务条款",

511
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -32,7 +32,7 @@
"format": "prettier --write ." "format": "prettier --write ."
}, },
"dependencies": { "dependencies": {
"@asteasolutions/zod-to-openapi": "8.4.0", "@asteasolutions/zod-to-openapi": "8.4.1",
"@aws-sdk/client-s3": "3.989.0", "@aws-sdk/client-s3": "3.989.0",
"@faker-js/faker": "10.3.0", "@faker-js/faker": "10.3.0",
"@headlessui/react": "2.2.9", "@headlessui/react": "2.2.9",
@@ -59,11 +59,11 @@
"@radix-ui/react-tabs": "1.1.13", "@radix-ui/react-tabs": "1.1.13",
"@radix-ui/react-toast": "1.2.15", "@radix-ui/react-toast": "1.2.15",
"@radix-ui/react-tooltip": "1.2.8", "@radix-ui/react-tooltip": "1.2.8",
"@react-email/components": "1.0.7", "@react-email/components": "1.0.8",
"@react-email/render": "2.0.4", "@react-email/render": "2.0.4",
"@react-email/tailwind": "2.0.4", "@react-email/tailwind": "2.0.5",
"@simplewebauthn/browser": "13.2.2", "@simplewebauthn/browser": "13.2.2",
"@simplewebauthn/server": "13.2.2", "@simplewebauthn/server": "13.2.3",
"@tailwindcss/forms": "0.5.11", "@tailwindcss/forms": "0.5.11",
"@tanstack/react-query": "5.90.21", "@tanstack/react-query": "5.90.21",
"@tanstack/react-table": "8.21.3", "@tanstack/react-table": "8.21.3",
@@ -81,7 +81,7 @@
"drizzle-orm": "0.45.1", "drizzle-orm": "0.45.1",
"express": "5.2.1", "express": "5.2.1",
"express-rate-limit": "8.2.1", "express-rate-limit": "8.2.1",
"glob": "13.0.3", "glob": "13.0.6",
"helmet": "8.1.0", "helmet": "8.1.0",
"http-errors": "2.0.1", "http-errors": "2.0.1",
"input-otp": "1.4.2", "input-otp": "1.4.2",
@@ -93,20 +93,20 @@
"maxmind": "5.0.5", "maxmind": "5.0.5",
"moment": "2.30.1", "moment": "2.30.1",
"next": "15.5.12", "next": "15.5.12",
"next-intl": "4.8.2", "next-intl": "4.8.3",
"next-themes": "0.4.6", "next-themes": "0.4.6",
"nextjs-toploader": "3.9.17", "nextjs-toploader": "3.9.17",
"node-cache": "5.1.2", "node-cache": "5.1.2",
"nodemailer": "8.0.1", "nodemailer": "8.0.1",
"oslo": "1.2.1", "oslo": "1.2.1",
"pg": "8.18.0", "pg": "8.19.0",
"posthog-node": "5.24.15", "posthog-node": "5.26.0",
"qrcode.react": "4.2.0", "qrcode.react": "4.2.0",
"react": "19.2.4", "react": "19.2.4",
"react-day-picker": "9.13.2", "react-day-picker": "9.13.2",
"react-dom": "19.2.4", "react-dom": "19.2.4",
"react-easy-sort": "1.8.0", "react-easy-sort": "1.8.0",
"react-hook-form": "7.71.1", "react-hook-form": "7.71.2",
"react-icons": "5.5.0", "react-icons": "5.5.0",
"recharts": "2.15.4", "recharts": "2.15.4",
"reodotdev": "1.0.0", "reodotdev": "1.0.0",
@@ -115,7 +115,7 @@
"sshpk": "^1.18.0", "sshpk": "^1.18.0",
"stripe": "20.3.1", "stripe": "20.3.1",
"swagger-ui-express": "5.0.1", "swagger-ui-express": "5.0.1",
"tailwind-merge": "3.4.0", "tailwind-merge": "3.5.0",
"topojson-client": "3.1.0", "topojson-client": "3.1.0",
"tw-animate-css": "1.4.0", "tw-animate-css": "1.4.0",
"use-debounce": "^10.1.0", "use-debounce": "^10.1.0",
@@ -147,7 +147,7 @@
"@types/js-yaml": "4.0.9", "@types/js-yaml": "4.0.9",
"@types/jsonwebtoken": "9.0.10", "@types/jsonwebtoken": "9.0.10",
"@types/node": "25.2.3", "@types/node": "25.2.3",
"@types/nodemailer": "7.0.9", "@types/nodemailer": "7.0.11",
"@types/nprogress": "0.2.3", "@types/nprogress": "0.2.3",
"@types/pg": "8.16.0", "@types/pg": "8.16.0",
"@types/react": "19.2.14", "@types/react": "19.2.14",

View File

@@ -3,7 +3,14 @@ import {
encodeHexLowerCase encodeHexLowerCase
} from "@oslojs/encoding"; } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2"; import { sha256 } from "@oslojs/crypto/sha2";
import { resourceSessions, Session, sessions, User, users } from "@server/db"; import {
resourceSessions,
safeRead,
Session,
sessions,
User,
users
} from "@server/db";
import { db } from "@server/db"; import { db } from "@server/db";
import { eq, inArray } from "drizzle-orm"; import { eq, inArray } from "drizzle-orm";
import config from "@server/lib/config"; import config from "@server/lib/config";
@@ -54,11 +61,15 @@ export async function validateSessionToken(
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)) sha256(new TextEncoder().encode(token))
); );
const result = await db
.select({ user: users, session: sessions }) const result = await safeRead((db) =>
.from(sessions) db
.innerJoin(users, eq(sessions.userId, users.userId)) .select({ user: users, session: sessions })
.where(eq(sessions.sessionId, sessionId)); .from(sessions)
.innerJoin(users, eq(sessions.userId, users.userId))
.where(eq(sessions.sessionId, sessionId))
);
if (result.length < 1) { if (result.length < 1) {
return { session: null, user: null }; return { session: null, user: null };
} }

View File

@@ -1,7 +1,7 @@
import { encodeHexLowerCase } from "@oslojs/encoding"; import { encodeHexLowerCase } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2"; import { sha256 } from "@oslojs/crypto/sha2";
import { resourceSessions, ResourceSession } from "@server/db"; import { resourceSessions, ResourceSession } from "@server/db";
import { db } from "@server/db"; import { db, safeRead } from "@server/db";
import { eq, and } from "drizzle-orm"; import { eq, and } from "drizzle-orm";
import config from "@server/lib/config"; import config from "@server/lib/config";
@@ -66,15 +66,17 @@ export async function validateResourceSessionToken(
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)) sha256(new TextEncoder().encode(token))
); );
const result = await db const result = await safeRead((db) =>
.select() db
.from(resourceSessions) .select()
.where( .from(resourceSessions)
and( .where(
eq(resourceSessions.sessionId, sessionId), and(
eq(resourceSessions.resourceId, resourceId) eq(resourceSessions.sessionId, sessionId),
eq(resourceSessions.resourceId, resourceId)
)
) )
); );
if (result.length < 1) { if (result.length < 1) {
return { resourceSession: null }; return { resourceSession: null };
@@ -85,7 +87,7 @@ export async function validateResourceSessionToken(
if (Date.now() >= resourceSession.expiresAt) { if (Date.now() >= resourceSession.expiresAt) {
await db await db
.delete(resourceSessions) .delete(resourceSessions)
.where(eq(resourceSessions.sessionId, resourceSessions.sessionId)); .where(eq(resourceSessions.sessionId, sessionId));
return { resourceSession: null }; return { resourceSession: null };
} else if ( } else if (
Date.now() >= Date.now() >=
@@ -179,7 +181,7 @@ export function serializeResourceSessionCookie(
return `${cookieName}_s.${now}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Secure; Domain=${domain}`; return `${cookieName}_s.${now}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Secure; Domain=${domain}`;
} else { } else {
if (expiresAt === undefined) { if (expiresAt === undefined) {
return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Path=/; Domain=$domain}`; return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Path=/; Domain=${domain}`;
} }
return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Domain=${domain}`; return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Domain=${domain}`;
} }

View File

@@ -1,4 +1,6 @@
export * from "./driver"; export * from "./driver";
export * from "./logsDriver";
export * from "./safeRead";
export * from "./schema/schema"; export * from "./schema/schema";
export * from "./schema/privateSchema"; export * from "./schema/privateSchema";
export * from "./migrate"; export * from "./migrate";

View File

@@ -0,0 +1,87 @@
import { drizzle as DrizzlePostgres } from "drizzle-orm/node-postgres";
import { Pool } from "pg";
import { readConfigFile } from "@server/lib/readConfigFile";
import { withReplicas } from "drizzle-orm/pg-core";
import { build } from "@server/build";
import { db as mainDb, primaryDb as mainPrimaryDb } from "./driver";
function createLogsDb() {
// Only use separate logs database in SaaS builds
if (build !== "saas") {
return mainDb;
}
const config = readConfigFile();
// Merge configs, prioritizing private config
const logsConfig = config.postgres_logs;
// Check environment variable first
let connectionString = process.env.POSTGRES_LOGS_CONNECTION_STRING;
let replicaConnections: Array<{ connection_string: string }> = [];
if (!connectionString && logsConfig) {
connectionString = logsConfig.connection_string;
replicaConnections = logsConfig.replicas || [];
}
// If POSTGRES_LOGS_REPLICA_CONNECTION_STRINGS is set, use it
if (process.env.POSTGRES_LOGS_REPLICA_CONNECTION_STRINGS) {
replicaConnections =
process.env.POSTGRES_LOGS_REPLICA_CONNECTION_STRINGS.split(",").map(
(conn) => ({
connection_string: conn.trim()
})
);
}
// If no logs database is configured, fall back to main database
if (!connectionString) {
return mainDb;
}
// Create separate connection pool for logs database
const poolConfig = logsConfig?.pool || config.postgres?.pool;
const primaryPool = new Pool({
connectionString,
max: poolConfig?.max_connections || 20,
idleTimeoutMillis: poolConfig?.idle_timeout_ms || 30000,
connectionTimeoutMillis: poolConfig?.connection_timeout_ms || 5000
});
const replicas = [];
if (!replicaConnections.length) {
replicas.push(
DrizzlePostgres(primaryPool, {
logger: process.env.QUERY_LOGGING == "true"
})
);
} else {
for (const conn of replicaConnections) {
const replicaPool = new Pool({
connectionString: conn.connection_string,
max: poolConfig?.max_replica_connections || 20,
idleTimeoutMillis: poolConfig?.idle_timeout_ms || 30000,
connectionTimeoutMillis:
poolConfig?.connection_timeout_ms || 5000
});
replicas.push(
DrizzlePostgres(replicaPool, {
logger: process.env.QUERY_LOGGING == "true"
})
);
}
}
return withReplicas(
DrizzlePostgres(primaryPool, {
logger: process.env.QUERY_LOGGING == "true"
}),
replicas as any
);
}
export const logsDb = createLogsDb();
export default logsDb;
export const primaryLogsDb = logsDb.$primary;

24
server/db/pg/safeRead.ts Normal file
View File

@@ -0,0 +1,24 @@
import { db, primaryDb } from "./driver";
/**
* Runs a read query with replica fallback for Postgres.
* Executes the query against the replica first (when replicas exist).
* If the query throws or returns no data (null, undefined, or empty array),
* runs the same query against the primary.
*/
export async function safeRead<T>(
query: (d: typeof db | typeof primaryDb) => Promise<T>
): Promise<T> {
try {
const result = await query(db);
if (result === undefined || result === null) {
return query(primaryDb);
}
if (Array.isArray(result) && result.length === 0) {
return query(primaryDb);
}
return result;
} catch {
return query(primaryDb);
}
}

View File

@@ -1,4 +1,6 @@
export * from "./driver"; export * from "./driver";
export * from "./logsDriver";
export * from "./safeRead";
export * from "./schema/schema"; export * from "./schema/schema";
export * from "./schema/privateSchema"; export * from "./schema/privateSchema";
export * from "./migrate"; export * from "./migrate";

View File

@@ -0,0 +1,7 @@
import { db as mainDb } from "./driver";
// SQLite doesn't support separate databases for logs in the same way as Postgres
// Always use the main database connection for SQLite
export const logsDb = mainDb;
export default logsDb;
export const primaryLogsDb = logsDb;

View File

@@ -0,0 +1,11 @@
import { db } from "./driver";
/**
* Runs a read query. For SQLite there is no replica/primary distinction,
* so the query is executed once against the database.
*/
export async function safeRead<T>(
query: (d: typeof db) => Promise<T>
): Promise<T> {
return query(db);
}

View File

@@ -17,6 +17,7 @@ import fs from "fs";
import path from "path"; import path from "path";
import { APP_PATH } from "./lib/consts"; import { APP_PATH } from "./lib/consts";
import yaml from "js-yaml"; import yaml from "js-yaml";
import { z } from "zod";
const dev = process.env.ENVIRONMENT !== "prod"; const dev = process.env.ENVIRONMENT !== "prod";
const externalPort = config.getRawConfig().server.integration_port; const externalPort = config.getRawConfig().server.integration_port;
@@ -38,12 +39,24 @@ export function createIntegrationApiServer() {
apiServer.use(cookieParser()); apiServer.use(cookieParser());
apiServer.use(express.json()); apiServer.use(express.json());
const openApiDocumentation = getOpenApiDocumentation();
apiServer.use( apiServer.use(
"/v1/docs", "/v1/docs",
swaggerUi.serve, swaggerUi.serve,
swaggerUi.setup(getOpenApiDocumentation()) swaggerUi.setup(openApiDocumentation)
); );
// Unauthenticated OpenAPI spec endpoints
apiServer.get("/v1/openapi.json", (_req, res) => {
res.json(openApiDocumentation);
});
apiServer.get("/v1/openapi.yaml", (_req, res) => {
const yamlOutput = yaml.dump(openApiDocumentation);
res.type("application/yaml").send(yamlOutput);
});
// API routes // API routes
const prefix = `/v1`; const prefix = `/v1`;
apiServer.use(logIncomingMiddleware); apiServer.use(logIncomingMiddleware);
@@ -75,16 +88,6 @@ function getOpenApiDocumentation() {
} }
); );
for (const def of registry.definitions) {
if (def.type === "route") {
def.route.security = [
{
[bearerAuth.name]: []
}
];
}
}
registry.registerPath({ registry.registerPath({
method: "get", method: "get",
path: "/", path: "/",
@@ -94,6 +97,74 @@ function getOpenApiDocumentation() {
responses: {} responses: {}
}); });
registry.registerPath({
method: "get",
path: "/openapi.json",
description: "Get OpenAPI specification as JSON",
tags: [],
request: {},
responses: {
"200": {
description: "OpenAPI specification as JSON",
content: {
"application/json": {
schema: {
type: "object"
}
}
}
}
}
});
registry.registerPath({
method: "get",
path: "/openapi.yaml",
description: "Get OpenAPI specification as YAML",
tags: [],
request: {},
responses: {
"200": {
description: "OpenAPI specification as YAML",
content: {
"application/yaml": {
schema: {
type: "string"
}
}
}
}
}
});
for (const def of registry.definitions) {
if (def.type === "route") {
def.route.security = [
{
[bearerAuth.name]: []
}
];
// Ensure every route has a generic JSON response schema so Swagger UI can render responses
const existingResponses = def.route.responses;
const hasExistingResponses =
existingResponses && Object.keys(existingResponses).length > 0;
if (!hasExistingResponses) {
def.route.responses = {
"*": {
description: "",
content: {
"application/json": {
schema: z.object({})
}
}
}
};
}
}
}
const generator = new OpenApiGeneratorV3(registry.definitions); const generator = new OpenApiGeneratorV3(registry.definitions);
const generated = generator.generateDocument({ const generated = generator.generateDocument({

View File

@@ -16,6 +16,11 @@ const internalPort = config.getRawConfig().server.internal_port;
export function createInternalServer() { export function createInternalServer() {
const internalServer = express(); const internalServer = express();
const trustProxy = config.getRawConfig().server.trust_proxy;
if (trustProxy) {
internalServer.set("trust proxy", trustProxy);
}
internalServer.use(helmet()); internalServer.use(helmet());
internalServer.use(cors()); internalServer.use(cors());
internalServer.use(stripDuplicateSesions); internalServer.use(stripDuplicateSesions);

View File

@@ -48,5 +48,5 @@ export const tierMatrix: Record<TierFeature, Tier[]> = {
"enterprise" "enterprise"
], ],
[TierFeature.AutoProvisioning]: ["tier1", "tier3", "enterprise"], [TierFeature.AutoProvisioning]: ["tier1", "tier3", "enterprise"],
[TierFeature.SshPam]: ["enterprise"] [TierFeature.SshPam]: ["tier1", "tier3", "enterprise"]
}; };

View File

@@ -12,7 +12,7 @@ import {
import { FeatureId, getFeatureMeterId } from "./features"; import { FeatureId, getFeatureMeterId } from "./features";
import logger from "@server/logger"; import logger from "@server/logger";
import { build } from "@server/build"; import { build } from "@server/build";
import cache from "@server/lib/cache"; import cache from "#dynamic/lib/cache";
export function noop() { export function noop() {
if (build !== "saas") { if (build !== "saas") {
@@ -230,7 +230,7 @@ export class UsageService {
const orgIdToUse = await this.getBillingOrg(orgId); const orgIdToUse = await this.getBillingOrg(orgId);
const cacheKey = `customer_${orgIdToUse}_${featureId}`; const cacheKey = `customer_${orgIdToUse}_${featureId}`;
const cached = cache.get<string>(cacheKey); const cached = await cache.get<string>(cacheKey);
if (cached) { if (cached) {
return cached; return cached;
@@ -253,7 +253,7 @@ export class UsageService {
const customerId = customer.customerId; const customerId = customer.customerId;
// Cache the result // Cache the result
cache.set(cacheKey, customerId, 300); // 5 minute TTL await cache.set(cacheKey, customerId, 300); // 5 minute TTL
return customerId; return customerId;
} catch (error) { } catch (error) {

View File

@@ -11,7 +11,7 @@ import {
userSiteResources userSiteResources
} from "@server/db"; } from "@server/db";
import { sites } from "@server/db"; import { sites } from "@server/db";
import { eq, and, ne, inArray } from "drizzle-orm"; import { eq, and, ne, inArray, or } from "drizzle-orm";
import { Config } from "./types"; import { Config } from "./types";
import logger from "@server/logger"; import logger from "@server/logger";
import { getNextAvailableAliasAddress } from "../ip"; import { getNextAvailableAliasAddress } from "../ip";
@@ -142,7 +142,10 @@ export async function updateClientResources(
.innerJoin(userOrgs, eq(users.userId, userOrgs.userId)) .innerJoin(userOrgs, eq(users.userId, userOrgs.userId))
.where( .where(
and( and(
inArray(users.username, resourceData.users), or(
inArray(users.username, resourceData.users),
inArray(users.email, resourceData.users)
),
eq(userOrgs.orgId, orgId) eq(userOrgs.orgId, orgId)
) )
); );
@@ -276,7 +279,10 @@ export async function updateClientResources(
.innerJoin(userOrgs, eq(users.userId, userOrgs.userId)) .innerJoin(userOrgs, eq(users.userId, userOrgs.userId))
.where( .where(
and( and(
inArray(users.username, resourceData.users), or(
inArray(users.username, resourceData.users),
inArray(users.email, resourceData.users)
),
eq(userOrgs.orgId, orgId) eq(userOrgs.orgId, orgId)
) )
); );

View File

@@ -212,7 +212,10 @@ export async function updateProxyResources(
} else { } else {
// Update existing resource // Update existing resource
const isLicensed = await isLicensedOrSubscribed(orgId, tierMatrix.maintencePage); const isLicensed = await isLicensedOrSubscribed(
orgId,
tierMatrix.maintencePage
);
if (!isLicensed) { if (!isLicensed) {
resourceData.maintenance = undefined; resourceData.maintenance = undefined;
} }
@@ -590,7 +593,10 @@ export async function updateProxyResources(
existingRule.action !== getRuleAction(rule.action) || existingRule.action !== getRuleAction(rule.action) ||
existingRule.match !== rule.match.toUpperCase() || existingRule.match !== rule.match.toUpperCase() ||
existingRule.value !== existingRule.value !==
getRuleValue(rule.match.toUpperCase(), rule.value) || getRuleValue(
rule.match.toUpperCase(),
rule.value
) ||
existingRule.priority !== intendedPriority existingRule.priority !== intendedPriority
) { ) {
validateRule(rule); validateRule(rule);
@@ -648,7 +654,10 @@ export async function updateProxyResources(
); );
} }
const isLicensed = await isLicensedOrSubscribed(orgId, tierMatrix.maintencePage); const isLicensed = await isLicensedOrSubscribed(
orgId,
tierMatrix.maintencePage
);
if (!isLicensed) { if (!isLicensed) {
resourceData.maintenance = undefined; resourceData.maintenance = undefined;
} }
@@ -935,7 +944,12 @@ async function syncUserResources(
.select() .select()
.from(users) .from(users)
.innerJoin(userOrgs, eq(users.userId, userOrgs.userId)) .innerJoin(userOrgs, eq(users.userId, userOrgs.userId))
.where(and(eq(users.username, username), eq(userOrgs.orgId, orgId))) .where(
and(
or(eq(users.username, username), eq(users.email, username)),
eq(userOrgs.orgId, orgId)
)
)
.limit(1); .limit(1);
if (!user) { if (!user) {

View File

@@ -69,7 +69,7 @@ export const AuthSchema = z.object({
.refine((roles) => !roles.includes("Admin"), { .refine((roles) => !roles.includes("Admin"), {
error: "Admin role cannot be included in sso-roles" error: "Admin role cannot be included in sso-roles"
}), }),
"sso-users": z.array(z.email()).optional().default([]), "sso-users": z.array(z.string()).optional().default([]),
"whitelist-users": z.array(z.email()).optional().default([]), "whitelist-users": z.array(z.email()).optional().default([]),
"auto-login-idp": z.int().positive().optional() "auto-login-idp": z.int().positive().optional()
}); });
@@ -335,7 +335,7 @@ export const ClientResourceSchema = z
.refine((roles) => !roles.includes("Admin"), { .refine((roles) => !roles.includes("Admin"), {
error: "Admin role cannot be included in roles" error: "Admin role cannot be included in roles"
}), }),
users: z.array(z.email()).optional().default([]), users: z.array(z.string()).optional().default([]),
machines: z.array(z.string()).optional().default([]) machines: z.array(z.string()).optional().default([])
}) })
.refine( .refine(

View File

@@ -1,9 +1,9 @@
import NodeCache from "node-cache"; import NodeCache from "node-cache";
import logger from "@server/logger"; import logger from "@server/logger";
// Create cache with maxKeys limit to prevent memory leaks // Create local cache with maxKeys limit to prevent memory leaks
// With ~10k requests/day and 5min TTL, 10k keys should be more than sufficient // With ~10k requests/day and 5min TTL, 10k keys should be more than sufficient
export const cache = new NodeCache({ export const localCache = new NodeCache({
stdTTL: 3600, stdTTL: 3600,
checkperiod: 120, checkperiod: 120,
maxKeys: 10000 maxKeys: 10000
@@ -11,10 +11,151 @@ export const cache = new NodeCache({
// Log cache statistics periodically for monitoring // Log cache statistics periodically for monitoring
setInterval(() => { setInterval(() => {
const stats = cache.getStats(); const stats = localCache.getStats();
logger.debug( logger.debug(
`Cache stats - Keys: ${stats.keys}, Hits: ${stats.hits}, Misses: ${stats.misses}, Hit rate: ${stats.hits > 0 ? ((stats.hits / (stats.hits + stats.misses)) * 100).toFixed(2) : 0}%` `Local cache stats - Keys: ${stats.keys}, Hits: ${stats.hits}, Misses: ${stats.misses}, Hit rate: ${stats.hits > 0 ? ((stats.hits / (stats.hits + stats.misses)) * 100).toFixed(2) : 0}%`
); );
}, 300000); // Every 5 minutes }, 300000); // Every 5 minutes
/**
* Adaptive cache that uses Redis when available in multi-node environments,
* otherwise falls back to local memory cache for single-node deployments.
*/
class AdaptiveCache {
/**
* Set a value in the cache
* @param key - Cache key
* @param value - Value to cache (will be JSON stringified for Redis)
* @param ttl - Time to live in seconds (0 = no expiration)
* @returns boolean indicating success
*/
async set(key: string, value: any, ttl?: number): Promise<boolean> {
const effectiveTtl = ttl === 0 ? undefined : ttl;
// Use local cache as fallback or primary
const success = localCache.set(key, value, effectiveTtl || 0);
if (success) {
logger.debug(`Set key in local cache: ${key}`);
}
return success;
}
/**
* Get a value from the cache
* @param key - Cache key
* @returns The cached value or undefined if not found
*/
async get<T = any>(key: string): Promise<T | undefined> {
// Use local cache as fallback or primary
const value = localCache.get<T>(key);
if (value !== undefined) {
logger.debug(`Cache hit in local cache: ${key}`);
} else {
logger.debug(`Cache miss in local cache: ${key}`);
}
return value;
}
/**
* Delete a value from the cache
* @param key - Cache key or array of keys
* @returns Number of deleted entries
*/
async del(key: string | string[]): Promise<number> {
const keys = Array.isArray(key) ? key : [key];
let deletedCount = 0;
// Use local cache as fallback or primary
for (const k of keys) {
const success = localCache.del(k);
if (success > 0) {
deletedCount++;
logger.debug(`Deleted key from local cache: ${k}`);
}
}
return deletedCount;
}
/**
* Check if a key exists in the cache
* @param key - Cache key
* @returns boolean indicating if key exists
*/
async has(key: string): Promise<boolean> {
// Use local cache as fallback or primary
return localCache.has(key);
}
/**
* Get multiple values from the cache
* @param keys - Array of cache keys
* @returns Array of values (undefined for missing keys)
*/
async mget<T = any>(keys: string[]): Promise<(T | undefined)[]> {
// Use local cache as fallback or primary
return keys.map((key) => localCache.get<T>(key));
}
/**
* Flush all keys from the cache
*/
async flushAll(): Promise<void> {
localCache.flushAll();
logger.debug("Flushed local cache");
}
/**
* Get cache statistics
* Note: Only returns local cache stats, Redis stats are not included
*/
getStats() {
return localCache.getStats();
}
/**
* Get the current cache backend being used
* @returns "redis" if Redis is available and healthy, "local" otherwise
*/
getCurrentBackend(): "redis" | "local" {
return "local";
}
/**
* Take a key from the cache and delete it
* @param key - Cache key
* @returns The value or undefined if not found
*/
async take<T = any>(key: string): Promise<T | undefined> {
const value = await this.get<T>(key);
if (value !== undefined) {
await this.del(key);
}
return value;
}
/**
* Get TTL (time to live) for a key
* @param key - Cache key
* @returns TTL in seconds, 0 if no expiration, -1 if key doesn't exist
*/
getTtl(key: string): number {
const ttl = localCache.getTtl(key);
if (ttl === undefined) {
return -1;
}
return Math.max(0, Math.floor((ttl - Date.now()) / 1000));
}
/**
* Get all keys from the cache
* Note: Only returns local cache keys, Redis keys are not included
*/
keys(): string[] {
return localCache.keys();
}
}
// Export singleton instance
export const cache = new AdaptiveCache();
export default cache; export default cache;

View File

@@ -2,7 +2,7 @@ import path from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
// This is a placeholder value replaced by the build process // This is a placeholder value replaced by the build process
export const APP_VERSION = "1.15.4"; export const APP_VERSION = "1.16.0";
export const __FILENAME = fileURLToPath(import.meta.url); export const __FILENAME = fileURLToPath(import.meta.url);
export const __DIRNAME = path.dirname(__FILENAME); export const __DIRNAME = path.dirname(__FILENAME);

View File

@@ -189,6 +189,46 @@ export const configSchema = z
.prefault({}) .prefault({})
}) })
.optional(), .optional(),
postgres_logs: z
.object({
connection_string: z
.string()
.optional()
.transform(getEnvOrYaml("POSTGRES_LOGS_CONNECTION_STRING")),
replicas: z
.array(
z.object({
connection_string: z.string()
})
)
.optional(),
pool: z
.object({
max_connections: z
.number()
.positive()
.optional()
.default(20),
max_replica_connections: z
.number()
.positive()
.optional()
.default(10),
idle_timeout_ms: z
.number()
.positive()
.optional()
.default(30000),
connection_timeout_ms: z
.number()
.positive()
.optional()
.default(5000)
})
.optional()
.prefault({})
})
.optional(),
traefik: z traefik: z
.object({ .object({
http_entrypoint: z.string().optional().default("web"), http_entrypoint: z.string().optional().default("web"),

View File

@@ -1,16 +1,3 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import * as crypto from "crypto"; import * as crypto from "crypto";
/** /**

View File

@@ -477,7 +477,10 @@ export async function getTraefikConfig(
// TODO: HOW TO HANDLE ^^^^^^ BETTER // TODO: HOW TO HANDLE ^^^^^^ BETTER
const anySitesOnline = targets.some( const anySitesOnline = targets.some(
(target) => target.site.online (target) =>
target.site.online ||
target.site.type === "local" ||
target.site.type === "wireguard"
); );
return ( return (
@@ -490,7 +493,7 @@ export async function getTraefikConfig(
if (target.health == "unhealthy") { if (target.health == "unhealthy") {
return false; return false;
} }
// If any sites are online, exclude offline sites // If any sites are online, exclude offline sites
if (anySitesOnline && !target.site.online) { if (anySitesOnline && !target.site.online) {
return false; return false;
@@ -605,7 +608,10 @@ export async function getTraefikConfig(
servers: (() => { servers: (() => {
// Check if any sites are online // Check if any sites are online
const anySitesOnline = targets.some( const anySitesOnline = targets.some(
(target) => target.site.online (target) =>
target.site.online ||
target.site.type === "local" ||
target.site.type === "wireguard"
); );
return targets return targets
@@ -613,7 +619,7 @@ export async function getTraefikConfig(
if (!target.enabled) { if (!target.enabled) {
return false; return false;
} }
// If any sites are online, exclude offline sites // If any sites are online, exclude offline sites
if (anySitesOnline && !target.site.online) { if (anySitesOnline && !target.site.online) {
return false; return false;

View File

@@ -14,3 +14,4 @@ export * from "./verifyApiKeyApiKeyAccess";
export * from "./verifyApiKeyClientAccess"; export * from "./verifyApiKeyClientAccess";
export * from "./verifyApiKeySiteResourceAccess"; export * from "./verifyApiKeySiteResourceAccess";
export * from "./verifyApiKeyIdpAccess"; export * from "./verifyApiKeyIdpAccess";
export * from "./verifyApiKeyDomainAccess";

View File

@@ -0,0 +1,90 @@
import { Request, Response, NextFunction } from "express";
import { db, domains, orgDomains, apiKeyOrg } from "@server/db";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
export async function verifyApiKeyDomainAccess(
req: Request,
res: Response,
next: NextFunction
) {
try {
const apiKey = req.apiKey;
const domainId =
req.params.domainId || req.body.domainId || req.query.domainId;
const orgId = req.params.orgId;
if (!apiKey) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Key not authenticated")
);
}
if (!domainId) {
return next(
createHttpError(HttpCode.BAD_REQUEST, "Invalid domain ID")
);
}
if (apiKey.isRoot) {
// Root keys can access any domain in any org
return next();
}
// Verify domain exists and belongs to the organization
const [domain] = await db
.select()
.from(domains)
.innerJoin(orgDomains, eq(orgDomains.domainId, domains.domainId))
.where(
and(
eq(orgDomains.domainId, domainId),
eq(orgDomains.orgId, orgId)
)
)
.limit(1);
if (!domain) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`Domain with ID ${domainId} not found in organization ${orgId}`
)
);
}
// Verify the API key has access to this organization
if (!req.apiKeyOrg) {
const apiKeyOrgRes = await db
.select()
.from(apiKeyOrg)
.where(
and(
eq(apiKeyOrg.apiKeyId, apiKey.apiKeyId),
eq(apiKeyOrg.orgId, orgId)
)
)
.limit(1);
req.apiKeyOrg = apiKeyOrgRes[0];
}
if (!req.apiKeyOrg) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Key does not have access to this organization"
)
);
}
return next();
} catch (error) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying domain access"
)
);
}
}

View File

@@ -5,17 +5,20 @@ export const registry = new OpenAPIRegistry();
export enum OpenAPITags { export enum OpenAPITags {
Site = "Site", Site = "Site",
Org = "Organization", Org = "Organization",
Resource = "Resource", PublicResource = "Public Resource",
PrivateResource = "Private Resource",
Role = "Role", Role = "Role",
User = "User", User = "User",
Invitation = "Invitation", Invitation = "User Invitation",
Target = "Target", Target = "Resource Target",
Rule = "Rule", Rule = "Rule",
AccessToken = "Access Token", AccessToken = "Access Token",
Idp = "Identity Provider", GlobalIdp = "Identity Provider (Global)",
OrgIdp = "Identity Provider (Organization Only)",
Client = "Client", Client = "Client",
ApiKey = "API Key", ApiKey = "API Key",
Domain = "Domain", Domain = "Domain",
Blueprint = "Blueprint", Blueprint = "Blueprint",
Ssh = "SSH" Ssh = "SSH",
Logs = "Logs"
} }

266
server/private/lib/cache.ts Normal file
View File

@@ -0,0 +1,266 @@
import NodeCache from "node-cache";
import logger from "@server/logger";
import { redisManager } from "@server/private/lib/redis";
// Create local cache with maxKeys limit to prevent memory leaks
// With ~10k requests/day and 5min TTL, 10k keys should be more than sufficient
export const localCache = new NodeCache({
stdTTL: 3600,
checkperiod: 120,
maxKeys: 10000
});
// Log cache statistics periodically for monitoring
setInterval(() => {
const stats = localCache.getStats();
logger.debug(
`Local cache stats - Keys: ${stats.keys}, Hits: ${stats.hits}, Misses: ${stats.misses}, Hit rate: ${stats.hits > 0 ? ((stats.hits / (stats.hits + stats.misses)) * 100).toFixed(2) : 0}%`
);
}, 300000); // Every 5 minutes
/**
* Adaptive cache that uses Redis when available in multi-node environments,
* otherwise falls back to local memory cache for single-node deployments.
*/
class AdaptiveCache {
private useRedis(): boolean {
return redisManager.isRedisEnabled() && redisManager.getHealthStatus().isHealthy;
}
/**
* Set a value in the cache
* @param key - Cache key
* @param value - Value to cache (will be JSON stringified for Redis)
* @param ttl - Time to live in seconds (0 = no expiration)
* @returns boolean indicating success
*/
async set(key: string, value: any, ttl?: number): Promise<boolean> {
const effectiveTtl = ttl === 0 ? undefined : ttl;
if (this.useRedis()) {
try {
const serialized = JSON.stringify(value);
const success = await redisManager.set(key, serialized, effectiveTtl);
if (success) {
logger.debug(`Set key in Redis: ${key}`);
return true;
}
// Redis failed, fall through to local cache
logger.debug(`Redis set failed for key ${key}, falling back to local cache`);
} catch (error) {
logger.error(`Redis set error for key ${key}:`, error);
// Fall through to local cache
}
}
// Use local cache as fallback or primary
const success = localCache.set(key, value, effectiveTtl || 0);
if (success) {
logger.debug(`Set key in local cache: ${key}`);
}
return success;
}
/**
* Get a value from the cache
* @param key - Cache key
* @returns The cached value or undefined if not found
*/
async get<T = any>(key: string): Promise<T | undefined> {
if (this.useRedis()) {
try {
const value = await redisManager.get(key);
if (value !== null) {
logger.debug(`Cache hit in Redis: ${key}`);
return JSON.parse(value) as T;
}
logger.debug(`Cache miss in Redis: ${key}`);
return undefined;
} catch (error) {
logger.error(`Redis get error for key ${key}:`, error);
// Fall through to local cache
}
}
// Use local cache as fallback or primary
const value = localCache.get<T>(key);
if (value !== undefined) {
logger.debug(`Cache hit in local cache: ${key}`);
} else {
logger.debug(`Cache miss in local cache: ${key}`);
}
return value;
}
/**
* Delete a value from the cache
* @param key - Cache key or array of keys
* @returns Number of deleted entries
*/
async del(key: string | string[]): Promise<number> {
const keys = Array.isArray(key) ? key : [key];
let deletedCount = 0;
if (this.useRedis()) {
try {
for (const k of keys) {
const success = await redisManager.del(k);
if (success) {
deletedCount++;
logger.debug(`Deleted key from Redis: ${k}`);
}
}
if (deletedCount === keys.length) {
return deletedCount;
}
// Some Redis deletes failed, fall through to local cache
logger.debug(`Some Redis deletes failed, falling back to local cache`);
} catch (error) {
logger.error(`Redis del error for keys ${keys.join(", ")}:`, error);
// Fall through to local cache
deletedCount = 0;
}
}
// Use local cache as fallback or primary
for (const k of keys) {
const success = localCache.del(k);
if (success > 0) {
deletedCount++;
logger.debug(`Deleted key from local cache: ${k}`);
}
}
return deletedCount;
}
/**
* Check if a key exists in the cache
* @param key - Cache key
* @returns boolean indicating if key exists
*/
async has(key: string): Promise<boolean> {
if (this.useRedis()) {
try {
const value = await redisManager.get(key);
return value !== null;
} catch (error) {
logger.error(`Redis has error for key ${key}:`, error);
// Fall through to local cache
}
}
// Use local cache as fallback or primary
return localCache.has(key);
}
/**
* Get multiple values from the cache
* @param keys - Array of cache keys
* @returns Array of values (undefined for missing keys)
*/
async mget<T = any>(keys: string[]): Promise<(T | undefined)[]> {
if (this.useRedis()) {
try {
const results: (T | undefined)[] = [];
for (const key of keys) {
const value = await redisManager.get(key);
if (value !== null) {
results.push(JSON.parse(value) as T);
} else {
results.push(undefined);
}
}
return results;
} catch (error) {
logger.error(`Redis mget error:`, error);
// Fall through to local cache
}
}
// Use local cache as fallback or primary
return keys.map((key) => localCache.get<T>(key));
}
/**
* Flush all keys from the cache
*/
async flushAll(): Promise<void> {
if (this.useRedis()) {
logger.warn("Adaptive cache flushAll called - Redis flush not implemented, only local cache will be flushed");
}
localCache.flushAll();
logger.debug("Flushed local cache");
}
/**
* Get cache statistics
* Note: Only returns local cache stats, Redis stats are not included
*/
getStats() {
return localCache.getStats();
}
/**
* Get the current cache backend being used
* @returns "redis" if Redis is available and healthy, "local" otherwise
*/
getCurrentBackend(): "redis" | "local" {
return this.useRedis() ? "redis" : "local";
}
/**
* Take a key from the cache and delete it
* @param key - Cache key
* @returns The value or undefined if not found
*/
async take<T = any>(key: string): Promise<T | undefined> {
const value = await this.get<T>(key);
if (value !== undefined) {
await this.del(key);
}
return value;
}
/**
* Get TTL (time to live) for a key
* @param key - Cache key
* @returns TTL in seconds, 0 if no expiration, -1 if key doesn't exist
*/
getTtl(key: string): number {
// Note: This only works for local cache, Redis TTL is not supported
if (this.useRedis()) {
logger.warn(`getTtl called for key ${key} but Redis TTL lookup is not implemented`);
}
const ttl = localCache.getTtl(key);
if (ttl === undefined) {
return -1;
}
return Math.max(0, Math.floor((ttl - Date.now()) / 1000));
}
/**
* Get all keys from the cache
* Note: Only returns local cache keys, Redis keys are not included
*/
keys(): string[] {
if (this.useRedis()) {
logger.warn("keys() called but Redis keys are not included, only local cache keys returned");
}
return localCache.keys();
}
}
// Export singleton instance
export const cache = new AdaptiveCache();
export default cache;

View File

@@ -15,9 +15,8 @@ import config from "./config";
import { certificates, db } from "@server/db"; import { certificates, db } from "@server/db";
import { and, eq, isNotNull, or, inArray, sql } from "drizzle-orm"; import { and, eq, isNotNull, or, inArray, sql } from "drizzle-orm";
import { decryptData } from "@server/lib/encryption"; import { decryptData } from "@server/lib/encryption";
import * as fs from "fs";
import logger from "@server/logger"; import logger from "@server/logger";
import cache from "@server/lib/cache"; import cache from "#private/lib/cache";
let encryptionKeyHex = ""; let encryptionKeyHex = "";
let encryptionKey: Buffer; let encryptionKey: Buffer;
@@ -55,7 +54,7 @@ export async function getValidCertificatesForDomains(
if (useCache) { if (useCache) {
for (const domain of domains) { for (const domain of domains) {
const cacheKey = `cert:${domain}`; const cacheKey = `cert:${domain}`;
const cachedCert = cache.get<CertificateResult>(cacheKey); const cachedCert = await cache.get<CertificateResult>(cacheKey);
if (cachedCert) { if (cachedCert) {
finalResults.push(cachedCert); // Valid cache hit finalResults.push(cachedCert); // Valid cache hit
} else { } else {
@@ -169,7 +168,7 @@ export async function getValidCertificatesForDomains(
// Add to cache for future requests, using the *requested domain* as the key // Add to cache for future requests, using the *requested domain* as the key
if (useCache) { if (useCache) {
const cacheKey = `cert:${domain}`; const cacheKey = `cert:${domain}`;
cache.set(cacheKey, resultCert, 180); await cache.set(cacheKey, resultCert, 180);
} }
} }
} }

View File

@@ -14,6 +14,9 @@
import { config } from "@server/lib/config"; import { config } from "@server/lib/config";
import logger from "@server/logger"; import logger from "@server/logger";
import { redis } from "#private/lib/redis"; import { redis } from "#private/lib/redis";
import { v4 as uuidv4 } from "uuid";
const instanceId = uuidv4();
export class LockManager { export class LockManager {
/** /**
@@ -33,7 +36,7 @@ export class LockManager {
} }
const lockValue = `${ const lockValue = `${
config.getRawConfig().gerbil.exit_node_name instanceId
}:${Date.now()}`; }:${Date.now()}`;
const redisKey = `lock:${lockKey}`; const redisKey = `lock:${lockKey}`;
@@ -52,7 +55,7 @@ export class LockManager {
if (result === "OK") { if (result === "OK") {
logger.debug( logger.debug(
`Lock acquired: ${lockKey} by ${ `Lock acquired: ${lockKey} by ${
config.getRawConfig().gerbil.exit_node_name instanceId
}` }`
); );
return true; return true;
@@ -63,14 +66,14 @@ export class LockManager {
if ( if (
existingValue && existingValue &&
existingValue.startsWith( existingValue.startsWith(
`${config.getRawConfig().gerbil.exit_node_name}:` `${instanceId}:`
) )
) { ) {
// Extend the lock TTL since it's the same worker // Extend the lock TTL since it's the same worker
await redis.pexpire(redisKey, ttlMs); await redis.pexpire(redisKey, ttlMs);
logger.debug( logger.debug(
`Lock extended: ${lockKey} by ${ `Lock extended: ${lockKey} by ${
config.getRawConfig().gerbil.exit_node_name instanceId
}` }`
); );
return true; return true;
@@ -116,7 +119,7 @@ export class LockManager {
local key = KEYS[1] local key = KEYS[1]
local worker_prefix = ARGV[1] local worker_prefix = ARGV[1]
local current_value = redis.call('GET', key) local current_value = redis.call('GET', key)
if current_value and string.find(current_value, worker_prefix, 1, true) == 1 then if current_value and string.find(current_value, worker_prefix, 1, true) == 1 then
return redis.call('DEL', key) return redis.call('DEL', key)
else else
@@ -129,19 +132,19 @@ export class LockManager {
luaScript, luaScript,
1, 1,
redisKey, redisKey,
`${config.getRawConfig().gerbil.exit_node_name}:` `${instanceId}:`
)) as number; )) as number;
if (result === 1) { if (result === 1) {
logger.debug( logger.debug(
`Lock released: ${lockKey} by ${ `Lock released: ${lockKey} by ${
config.getRawConfig().gerbil.exit_node_name instanceId
}` }`
); );
} else { } else {
logger.warn( logger.warn(
`Lock not released - not owned by worker: ${lockKey} by ${ `Lock not released - not owned by worker: ${lockKey} by ${
config.getRawConfig().gerbil.exit_node_name instanceId
}` }`
); );
} }
@@ -198,7 +201,7 @@ export class LockManager {
const ownedByMe = const ownedByMe =
exists && exists &&
value!.startsWith( value!.startsWith(
`${config.getRawConfig().gerbil.exit_node_name}:` `${instanceId}:`
); );
const owner = exists ? value!.split(":")[0] : undefined; const owner = exists ? value!.split(":")[0] : undefined;
@@ -233,7 +236,7 @@ export class LockManager {
local worker_prefix = ARGV[1] local worker_prefix = ARGV[1]
local ttl = tonumber(ARGV[2]) local ttl = tonumber(ARGV[2])
local current_value = redis.call('GET', key) local current_value = redis.call('GET', key)
if current_value and string.find(current_value, worker_prefix, 1, true) == 1 then if current_value and string.find(current_value, worker_prefix, 1, true) == 1 then
return redis.call('PEXPIRE', key, ttl) return redis.call('PEXPIRE', key, ttl)
else else
@@ -246,14 +249,14 @@ export class LockManager {
luaScript, luaScript,
1, 1,
redisKey, redisKey,
`${config.getRawConfig().gerbil.exit_node_name}:`, `${instanceId}:`,
ttlMs.toString() ttlMs.toString()
)) as number; )) as number;
if (result === 1) { if (result === 1) {
logger.debug( logger.debug(
`Lock extended: ${lockKey} by ${ `Lock extended: ${lockKey} by ${
config.getRawConfig().gerbil.exit_node_name instanceId
} for ${ttlMs}ms` } for ${ttlMs}ms`
); );
return true; return true;
@@ -356,7 +359,7 @@ export class LockManager {
(value) => (value) =>
value && value &&
value.startsWith( value.startsWith(
`${config.getRawConfig().gerbil.exit_node_name}:` `${instanceId}:`
) )
).length; ).length;
} }

View File

@@ -11,17 +11,17 @@
* This file is not licensed under the AGPLv3. * This file is not licensed under the AGPLv3.
*/ */
import { accessAuditLog, db, orgs } from "@server/db"; import { accessAuditLog, logsDb, db, orgs } from "@server/db";
import { getCountryCodeForIp } from "@server/lib/geoip"; import { getCountryCodeForIp } from "@server/lib/geoip";
import logger from "@server/logger"; import logger from "@server/logger";
import { and, eq, lt } from "drizzle-orm"; import { and, eq, lt } from "drizzle-orm";
import cache from "@server/lib/cache"; import cache from "#private/lib/cache";
import { calculateCutoffTimestamp } from "@server/lib/cleanupLogs"; import { calculateCutoffTimestamp } from "@server/lib/cleanupLogs";
import { stripPortFromHost } from "@server/lib/ip"; import { stripPortFromHost } from "@server/lib/ip";
async function getAccessDays(orgId: string): Promise<number> { async function getAccessDays(orgId: string): Promise<number> {
// check cache first // check cache first
const cached = cache.get<number>(`org_${orgId}_accessDays`); const cached = await cache.get<number>(`org_${orgId}_accessDays`);
if (cached !== undefined) { if (cached !== undefined) {
return cached; return cached;
} }
@@ -39,7 +39,7 @@ async function getAccessDays(orgId: string): Promise<number> {
} }
// store the result in cache // store the result in cache
cache.set( await cache.set(
`org_${orgId}_accessDays`, `org_${orgId}_accessDays`,
org.settingsLogRetentionDaysAction, org.settingsLogRetentionDaysAction,
300 300
@@ -52,7 +52,7 @@ export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays); const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
try { try {
await db await logsDb
.delete(accessAuditLog) .delete(accessAuditLog)
.where( .where(
and( and(
@@ -124,7 +124,7 @@ export async function logAccessAudit(data: {
? await getCountryCodeFromIp(data.requestIp) ? await getCountryCodeFromIp(data.requestIp)
: undefined; : undefined;
await db.insert(accessAuditLog).values({ await logsDb.insert(accessAuditLog).values({
timestamp: timestamp, timestamp: timestamp,
orgId: data.orgId, orgId: data.orgId,
actorType, actorType,
@@ -146,14 +146,14 @@ export async function logAccessAudit(data: {
async function getCountryCodeFromIp(ip: string): Promise<string | undefined> { async function getCountryCodeFromIp(ip: string): Promise<string | undefined> {
const geoIpCacheKey = `geoip_access:${ip}`; const geoIpCacheKey = `geoip_access:${ip}`;
let cachedCountryCode: string | undefined = cache.get(geoIpCacheKey); let cachedCountryCode: string | undefined = await cache.get(geoIpCacheKey);
if (!cachedCountryCode) { if (!cachedCountryCode) {
cachedCountryCode = await getCountryCodeForIp(ip); // do it locally cachedCountryCode = await getCountryCodeForIp(ip); // do it locally
// Only cache successful lookups to avoid filling cache with undefined values // Only cache successful lookups to avoid filling cache with undefined values
if (cachedCountryCode) { if (cachedCountryCode) {
// Cache for longer since IP geolocation doesn't change frequently // Cache for longer since IP geolocation doesn't change frequently
cache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes await cache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes
} }
} }

View File

@@ -72,15 +72,15 @@ export const privateConfigSchema = z.object({
db: z.int().nonnegative().optional().default(0) db: z.int().nonnegative().optional().default(0)
}) })
) )
.optional(),
tls: z
.object({
rejectUnauthorized: z
.boolean()
.optional()
.default(true)
})
.optional() .optional()
// tls: z
// .object({
// reject_unauthorized: z
// .boolean()
// .optional()
// .default(true)
// })
// .optional()
}) })
.optional(), .optional(),
gerbil: z gerbil: z

View File

@@ -108,11 +108,15 @@ class RedisManager {
port: redisConfig.port!, port: redisConfig.port!,
password: redisConfig.password, password: redisConfig.password,
db: redisConfig.db db: redisConfig.db
// tls: {
// rejectUnauthorized:
// redisConfig.tls?.reject_unauthorized || false
// }
}; };
// Enable TLS if configured (required for AWS ElastiCache in-transit encryption)
if (redisConfig.tls) {
opts.tls = {
rejectUnauthorized: redisConfig.tls.rejectUnauthorized ?? true
};
}
return opts; return opts;
} }
@@ -130,11 +134,15 @@ class RedisManager {
port: replica.port!, port: replica.port!,
password: replica.password, password: replica.password,
db: replica.db || redisConfig.db db: replica.db || redisConfig.db
// tls: {
// rejectUnauthorized:
// replica.tls?.reject_unauthorized || false
// }
}; };
// Enable TLS if configured (required for AWS ElastiCache in-transit encryption)
if (redisConfig.tls) {
opts.tls = {
rejectUnauthorized: redisConfig.tls.rejectUnauthorized ?? true
};
}
return opts; return opts;
} }

View File

@@ -665,7 +665,10 @@ export async function getTraefikConfig(
// TODO: HOW TO HANDLE ^^^^^^ BETTER // TODO: HOW TO HANDLE ^^^^^^ BETTER
const anySitesOnline = targets.some( const anySitesOnline = targets.some(
(target) => target.site.online (target) =>
target.site.online ||
target.site.type === "local" ||
target.site.type === "wireguard"
); );
return ( return (
@@ -793,7 +796,10 @@ export async function getTraefikConfig(
servers: (() => { servers: (() => {
// Check if any sites are online // Check if any sites are online
const anySitesOnline = targets.some( const anySitesOnline = targets.some(
(target) => target.site.online (target) =>
target.site.online ||
target.site.type === "local" ||
target.site.type === "wireguard"
); );
return targets return targets

View File

@@ -12,18 +12,18 @@
*/ */
import { ActionsEnum } from "@server/auth/actions"; import { ActionsEnum } from "@server/auth/actions";
import { actionAuditLog, db, orgs } from "@server/db"; import { actionAuditLog, logsDb, db, orgs } from "@server/db";
import logger from "@server/logger"; import logger from "@server/logger";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import createHttpError from "http-errors"; import createHttpError from "http-errors";
import { and, eq, lt } from "drizzle-orm"; import { and, eq, lt } from "drizzle-orm";
import cache from "@server/lib/cache"; import cache from "#private/lib/cache";
import { calculateCutoffTimestamp } from "@server/lib/cleanupLogs"; import { calculateCutoffTimestamp } from "@server/lib/cleanupLogs";
async function getActionDays(orgId: string): Promise<number> { async function getActionDays(orgId: string): Promise<number> {
// check cache first // check cache first
const cached = cache.get<number>(`org_${orgId}_actionDays`); const cached = await cache.get<number>(`org_${orgId}_actionDays`);
if (cached !== undefined) { if (cached !== undefined) {
return cached; return cached;
} }
@@ -41,7 +41,7 @@ async function getActionDays(orgId: string): Promise<number> {
} }
// store the result in cache // store the result in cache
cache.set( await cache.set(
`org_${orgId}_actionDays`, `org_${orgId}_actionDays`,
org.settingsLogRetentionDaysAction, org.settingsLogRetentionDaysAction,
300 300
@@ -54,7 +54,7 @@ export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays); const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
try { try {
await db await logsDb
.delete(actionAuditLog) .delete(actionAuditLog)
.where( .where(
and( and(
@@ -123,7 +123,7 @@ export function logActionAudit(action: ActionsEnum) {
metadata = JSON.stringify(req.params); metadata = JSON.stringify(req.params);
} }
await db.insert(actionAuditLog).values({ await logsDb.insert(actionAuditLog).values({
timestamp, timestamp,
orgId, orgId,
actorType, actorType,

View File

@@ -32,7 +32,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/logs/access/export", path: "/org/{orgId}/logs/access/export",
description: "Export the access audit log for an organization as CSV", description: "Export the access audit log for an organization as CSV",
tags: [OpenAPITags.Org], tags: [OpenAPITags.Logs],
request: { request: {
query: queryAccessAuditLogsQuery, query: queryAccessAuditLogsQuery,
params: queryAccessAuditLogsParams params: queryAccessAuditLogsParams

View File

@@ -32,7 +32,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/logs/action/export", path: "/org/{orgId}/logs/action/export",
description: "Export the action audit log for an organization as CSV", description: "Export the action audit log for an organization as CSV",
tags: [OpenAPITags.Org], tags: [OpenAPITags.Logs],
request: { request: {
query: queryActionAuditLogsQuery, query: queryActionAuditLogsQuery,
params: queryActionAuditLogsParams params: queryActionAuditLogsParams

View File

@@ -11,11 +11,11 @@
* This file is not licensed under the AGPLv3. * This file is not licensed under the AGPLv3.
*/ */
import { accessAuditLog, db, resources } from "@server/db"; import { accessAuditLog, logsDb, resources, db, primaryDb } from "@server/db";
import { registry } from "@server/openApi"; import { registry } from "@server/openApi";
import { NextFunction } from "express"; import { NextFunction } from "express";
import { Request, Response } from "express"; import { Request, Response } from "express";
import { eq, gt, lt, and, count, desc } from "drizzle-orm"; import { eq, gt, lt, and, count, desc, inArray } from "drizzle-orm";
import { OpenAPITags } from "@server/openApi"; import { OpenAPITags } from "@server/openApi";
import { z } from "zod"; import { z } from "zod";
import createHttpError from "http-errors"; import createHttpError from "http-errors";
@@ -115,15 +115,13 @@ function getWhere(data: Q) {
} }
export function queryAccess(data: Q) { export function queryAccess(data: Q) {
return db return logsDb
.select({ .select({
orgId: accessAuditLog.orgId, orgId: accessAuditLog.orgId,
action: accessAuditLog.action, action: accessAuditLog.action,
actorType: accessAuditLog.actorType, actorType: accessAuditLog.actorType,
actorId: accessAuditLog.actorId, actorId: accessAuditLog.actorId,
resourceId: accessAuditLog.resourceId, resourceId: accessAuditLog.resourceId,
resourceName: resources.name,
resourceNiceId: resources.niceId,
ip: accessAuditLog.ip, ip: accessAuditLog.ip,
location: accessAuditLog.location, location: accessAuditLog.location,
userAgent: accessAuditLog.userAgent, userAgent: accessAuditLog.userAgent,
@@ -133,16 +131,46 @@ export function queryAccess(data: Q) {
actor: accessAuditLog.actor actor: accessAuditLog.actor
}) })
.from(accessAuditLog) .from(accessAuditLog)
.leftJoin(
resources,
eq(accessAuditLog.resourceId, resources.resourceId)
)
.where(getWhere(data)) .where(getWhere(data))
.orderBy(desc(accessAuditLog.timestamp), desc(accessAuditLog.id)); .orderBy(desc(accessAuditLog.timestamp), desc(accessAuditLog.id));
} }
async function enrichWithResourceDetails(logs: Awaited<ReturnType<typeof queryAccess>>) {
// If logs database is the same as main database, we can do a join
// Otherwise, we need to fetch resource details separately
const resourceIds = logs
.map(log => log.resourceId)
.filter((id): id is number => id !== null && id !== undefined);
if (resourceIds.length === 0) {
return logs.map(log => ({ ...log, resourceName: null, resourceNiceId: null }));
}
// Fetch resource details from main database
const resourceDetails = await primaryDb
.select({
resourceId: resources.resourceId,
name: resources.name,
niceId: resources.niceId
})
.from(resources)
.where(inArray(resources.resourceId, resourceIds));
// Create a map for quick lookup
const resourceMap = new Map(
resourceDetails.map(r => [r.resourceId, { name: r.name, niceId: r.niceId }])
);
// Enrich logs with resource details
return logs.map(log => ({
...log,
resourceName: log.resourceId ? resourceMap.get(log.resourceId)?.name ?? null : null,
resourceNiceId: log.resourceId ? resourceMap.get(log.resourceId)?.niceId ?? null : null
}));
}
export function countAccessQuery(data: Q) { export function countAccessQuery(data: Q) {
const countQuery = db const countQuery = logsDb
.select({ count: count() }) .select({ count: count() })
.from(accessAuditLog) .from(accessAuditLog)
.where(getWhere(data)); .where(getWhere(data));
@@ -161,7 +189,7 @@ async function queryUniqueFilterAttributes(
); );
// Get unique actors // Get unique actors
const uniqueActors = await db const uniqueActors = await logsDb
.selectDistinct({ .selectDistinct({
actor: accessAuditLog.actor actor: accessAuditLog.actor
}) })
@@ -169,7 +197,7 @@ async function queryUniqueFilterAttributes(
.where(baseConditions); .where(baseConditions);
// Get unique locations // Get unique locations
const uniqueLocations = await db const uniqueLocations = await logsDb
.selectDistinct({ .selectDistinct({
locations: accessAuditLog.location locations: accessAuditLog.location
}) })
@@ -177,25 +205,40 @@ async function queryUniqueFilterAttributes(
.where(baseConditions); .where(baseConditions);
// Get unique resources with names // Get unique resources with names
const uniqueResources = await db const uniqueResources = await logsDb
.selectDistinct({ .selectDistinct({
id: accessAuditLog.resourceId, id: accessAuditLog.resourceId
name: resources.name
}) })
.from(accessAuditLog) .from(accessAuditLog)
.leftJoin(
resources,
eq(accessAuditLog.resourceId, resources.resourceId)
)
.where(baseConditions); .where(baseConditions);
// Fetch resource names from main database for the unique resource IDs
const resourceIds = uniqueResources
.map(row => row.id)
.filter((id): id is number => id !== null);
let resourcesWithNames: Array<{ id: number; name: string | null }> = [];
if (resourceIds.length > 0) {
const resourceDetails = await primaryDb
.select({
resourceId: resources.resourceId,
name: resources.name
})
.from(resources)
.where(inArray(resources.resourceId, resourceIds));
resourcesWithNames = resourceDetails.map(r => ({
id: r.resourceId,
name: r.name
}));
}
return { return {
actors: uniqueActors actors: uniqueActors
.map((row) => row.actor) .map((row) => row.actor)
.filter((actor): actor is string => actor !== null), .filter((actor): actor is string => actor !== null),
resources: uniqueResources.filter( resources: resourcesWithNames,
(row): row is { id: number; name: string | null } => row.id !== null
),
locations: uniqueLocations locations: uniqueLocations
.map((row) => row.locations) .map((row) => row.locations)
.filter((location): location is string => location !== null) .filter((location): location is string => location !== null)
@@ -206,7 +249,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/logs/access", path: "/org/{orgId}/logs/access",
description: "Query the access audit log for an organization", description: "Query the access audit log for an organization",
tags: [OpenAPITags.Org], tags: [OpenAPITags.Logs],
request: { request: {
query: queryAccessAuditLogsQuery, query: queryAccessAuditLogsQuery,
params: queryAccessAuditLogsParams params: queryAccessAuditLogsParams
@@ -243,7 +286,10 @@ export async function queryAccessAuditLogs(
const baseQuery = queryAccess(data); const baseQuery = queryAccess(data);
const log = await baseQuery.limit(data.limit).offset(data.offset); const logsRaw = await baseQuery.limit(data.limit).offset(data.offset);
// Enrich with resource details (handles cross-database scenario)
const log = await enrichWithResourceDetails(logsRaw);
const totalCountResult = await countAccessQuery(data); const totalCountResult = await countAccessQuery(data);
const totalCount = totalCountResult[0].count; const totalCount = totalCountResult[0].count;

View File

@@ -11,7 +11,7 @@
* This file is not licensed under the AGPLv3. * This file is not licensed under the AGPLv3.
*/ */
import { actionAuditLog, db } from "@server/db"; import { actionAuditLog, logsDb } from "@server/db";
import { registry } from "@server/openApi"; import { registry } from "@server/openApi";
import { NextFunction } from "express"; import { NextFunction } from "express";
import { Request, Response } from "express"; import { Request, Response } from "express";
@@ -97,7 +97,7 @@ function getWhere(data: Q) {
} }
export function queryAction(data: Q) { export function queryAction(data: Q) {
return db return logsDb
.select({ .select({
orgId: actionAuditLog.orgId, orgId: actionAuditLog.orgId,
action: actionAuditLog.action, action: actionAuditLog.action,
@@ -113,7 +113,7 @@ export function queryAction(data: Q) {
} }
export function countActionQuery(data: Q) { export function countActionQuery(data: Q) {
const countQuery = db const countQuery = logsDb
.select({ count: count() }) .select({ count: count() })
.from(actionAuditLog) .from(actionAuditLog)
.where(getWhere(data)); .where(getWhere(data));
@@ -132,14 +132,14 @@ async function queryUniqueFilterAttributes(
); );
// Get unique actors // Get unique actors
const uniqueActors = await db const uniqueActors = await logsDb
.selectDistinct({ .selectDistinct({
actor: actionAuditLog.actor actor: actionAuditLog.actor
}) })
.from(actionAuditLog) .from(actionAuditLog)
.where(baseConditions); .where(baseConditions);
const uniqueActions = await db const uniqueActions = await logsDb
.selectDistinct({ .selectDistinct({
action: actionAuditLog.action action: actionAuditLog.action
}) })
@@ -160,7 +160,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/logs/action", path: "/org/{orgId}/logs/action",
description: "Query the action audit log for an organization", description: "Query the action audit log for an organization",
tags: [OpenAPITags.Org], tags: [OpenAPITags.Logs],
request: { request: {
query: queryActionAuditLogsQuery, query: queryActionAuditLogsQuery,
params: queryActionAuditLogsParams params: queryActionAuditLogsParams

View File

@@ -31,16 +31,16 @@ const getOrgSchema = z.strictObject({
orgId: z.string() orgId: z.string()
}); });
registry.registerPath({ // registry.registerPath({
method: "get", // method: "get",
path: "/org/{orgId}/billing/usage", // path: "/org/{orgId}/billing/usage",
description: "Get an organization's billing usage", // description: "Get an organization's billing usage",
tags: [OpenAPITags.Org], // tags: [OpenAPITags.Org],
request: { // request: {
params: getOrgSchema // params: getOrgSchema
}, // },
responses: {} // responses: {}
}); // });
export async function getOrgUsage( export async function getOrgUsage(
req: Request, req: Request,

View File

@@ -480,9 +480,9 @@ authenticated.get(
authenticated.post( authenticated.post(
"/re-key/:clientId/regenerate-client-secret", "/re-key/:clientId/regenerate-client-secret",
verifyClientAccess, // this is first to set the org id
verifyValidLicense, verifyValidLicense,
verifyValidSubscription(tierMatrix.rotateCredentials), verifyValidSubscription(tierMatrix.rotateCredentials),
verifyClientAccess, // this is first to set the org id
verifyLimits, verifyLimits,
verifyUserHasAction(ActionsEnum.reGenerateSecret), verifyUserHasAction(ActionsEnum.reGenerateSecret),
reKey.reGenerateClientSecret reKey.reGenerateClientSecret
@@ -490,9 +490,9 @@ authenticated.post(
authenticated.post( authenticated.post(
"/re-key/:siteId/regenerate-site-secret", "/re-key/:siteId/regenerate-site-secret",
verifySiteAccess, // this is first to set the org id
verifyValidLicense, verifyValidLicense,
verifyValidSubscription(tierMatrix.rotateCredentials), verifyValidSubscription(tierMatrix.rotateCredentials),
verifySiteAccess, // this is first to set the org id
verifyLimits, verifyLimits,
verifyUserHasAction(ActionsEnum.reGenerateSecret), verifyUserHasAction(ActionsEnum.reGenerateSecret),
reKey.reGenerateSiteSecret reKey.reGenerateSiteSecret

View File

@@ -52,7 +52,7 @@ registry.registerPath({
method: "put", method: "put",
path: "/org/{orgId}/idp/oidc", path: "/org/{orgId}/idp/oidc",
description: "Create an OIDC IdP for a specific organization.", description: "Create an OIDC IdP for a specific organization.",
tags: [OpenAPITags.Idp, OpenAPITags.Org], tags: [OpenAPITags.OrgIdp],
request: { request: {
params: paramsSchema, params: paramsSchema,
body: { body: {

View File

@@ -35,7 +35,7 @@ registry.registerPath({
method: "delete", method: "delete",
path: "/org/{orgId}/idp/{idpId}", path: "/org/{orgId}/idp/{idpId}",
description: "Delete IDP for a specific organization.", description: "Delete IDP for a specific organization.",
tags: [OpenAPITags.Idp, OpenAPITags.Org], tags: [OpenAPITags.OrgIdp],
request: { request: {
params: paramsSchema params: paramsSchema
}, },

View File

@@ -50,9 +50,9 @@ async function query(idpId: number, orgId: string) {
registry.registerPath({ registry.registerPath({
method: "get", method: "get",
path: "/org/:orgId/idp/:idpId", path: "/org/{orgId}/idp/{idpId}",
description: "Get an IDP by its IDP ID for a specific organization.", description: "Get an IDP by its IDP ID for a specific organization.",
tags: [OpenAPITags.Idp, OpenAPITags.Org], tags: [OpenAPITags.OrgIdp],
request: { request: {
params: paramsSchema params: paramsSchema
}, },

View File

@@ -67,7 +67,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/idp", path: "/org/{orgId}/idp",
description: "List all IDP for a specific organization.", description: "List all IDP for a specific organization.",
tags: [OpenAPITags.Idp, OpenAPITags.Org], tags: [OpenAPITags.OrgIdp],
request: { request: {
query: querySchema, query: querySchema,
params: paramsSchema params: paramsSchema

View File

@@ -59,7 +59,7 @@ registry.registerPath({
method: "post", method: "post",
path: "/org/{orgId}/idp/{idpId}/oidc", path: "/org/{orgId}/idp/{idpId}/oidc",
description: "Update an OIDC IdP for a specific organization.", description: "Update an OIDC IdP for a specific organization.",
tags: [OpenAPITags.Idp, OpenAPITags.Org], tags: [OpenAPITags.OrgIdp],
request: { request: {
params: paramsSchema, params: paramsSchema,
body: { body: {

View File

@@ -52,7 +52,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/maintenance/info", path: "/maintenance/info",
description: "Get maintenance information for a resource by domain.", description: "Get maintenance information for a resource by domain.",
tags: [OpenAPITags.Resource], tags: [OpenAPITags.PublicResource],
request: { request: {
query: z.object({ query: z.object({
fullDomain: z.string() fullDomain: z.string()

View File

@@ -32,7 +32,7 @@ import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi"; import { OpenAPITags, registry } from "@server/openApi";
import { eq, or, and } from "drizzle-orm"; import { eq, or, and } from "drizzle-orm";
import { canUserAccessSiteResource } from "@server/auth/canUserAccessSiteResource"; import { canUserAccessSiteResource } from "@server/auth/canUserAccessSiteResource";
import { signPublicKey, getOrgCAKeys } from "#private/lib/sshCA"; import { signPublicKey, getOrgCAKeys } from "@server/lib/sshCA";
import config from "@server/lib/config"; import config from "@server/lib/config";
import { sendToClient } from "#private/routers/ws"; import { sendToClient } from "#private/routers/ws";
@@ -176,7 +176,7 @@ export async function signSshKey(
} else if (req.user?.username) { } else if (req.user?.username) {
usernameToUse = req.user.username; usernameToUse = req.user.username;
// We need to clean out any spaces or special characters from the username to ensure it's valid for SSH certificates // We need to clean out any spaces or special characters from the username to ensure it's valid for SSH certificates
usernameToUse = usernameToUse.replace(/[^a-zA-Z0-9_-]/g, ""); usernameToUse = usernameToUse.replace(/[^a-zA-Z0-9_-]/g, "-");
if (!usernameToUse) { if (!usernameToUse) {
return next( return next(
createHttpError( createHttpError(
@@ -194,6 +194,9 @@ export async function signSshKey(
); );
} }
// prefix with p-
usernameToUse = `p-${usernameToUse}`;
// check if we have a existing user in this org with the same // check if we have a existing user in this org with the same
const [existingUserWithSameName] = await db const [existingUserWithSameName] = await db
.select() .select()
@@ -239,6 +242,16 @@ export async function signSshKey(
); );
} }
} }
await db
.update(userOrgs)
.set({ pamUsername: usernameToUse })
.where(
and(
eq(userOrgs.orgId, orgId),
eq(userOrgs.userId, userId)
)
);
} else { } else {
usernameToUse = userOrg.pamUsername; usernameToUse = userOrg.pamUsername;
} }
@@ -310,6 +323,15 @@ export async function signSshKey(
); );
} }
if (resource.mode == "cidr") {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"SSHing is not supported for CIDR resources"
)
);
}
// Check if the user has access to the resource // Check if the user has access to the resource
const hasAccess = await canUserAccessSiteResource({ const hasAccess = await canUserAccessSiteResource({
userId: userId, userId: userId,

View File

@@ -43,7 +43,7 @@ registry.registerPath({
method: "post", method: "post",
path: "/resource/{resourceId}/access-token", path: "/resource/{resourceId}/access-token",
description: "Generate a new access token for a resource.", description: "Generate a new access token for a resource.",
tags: [OpenAPITags.Resource, OpenAPITags.AccessToken], tags: [OpenAPITags.PublicResource, OpenAPITags.AccessToken],
request: { request: {
params: generateAccssTokenParamsSchema, params: generateAccssTokenParamsSchema,
body: { body: {

View File

@@ -122,7 +122,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/access-tokens", path: "/org/{orgId}/access-tokens",
description: "List all access tokens in an organization.", description: "List all access tokens in an organization.",
tags: [OpenAPITags.Org, OpenAPITags.AccessToken], tags: [OpenAPITags.AccessToken],
request: { request: {
params: z.object({ params: z.object({
orgId: z.string() orgId: z.string()
@@ -135,8 +135,8 @@ registry.registerPath({
registry.registerPath({ registry.registerPath({
method: "get", method: "get",
path: "/resource/{resourceId}/access-tokens", path: "/resource/{resourceId}/access-tokens",
description: "List all access tokens in an organization.", description: "List all access tokens for a resource.",
tags: [OpenAPITags.Resource, OpenAPITags.AccessToken], tags: [OpenAPITags.PublicResource, OpenAPITags.AccessToken],
request: { request: {
params: z.object({ params: z.object({
resourceId: z.number() resourceId: z.number()

View File

@@ -37,7 +37,7 @@ registry.registerPath({
method: "put", method: "put",
path: "/org/{orgId}/api-key", path: "/org/{orgId}/api-key",
description: "Create a new API key scoped to the organization.", description: "Create a new API key scoped to the organization.",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey], tags: [OpenAPITags.ApiKey],
request: { request: {
params: paramsSchema, params: paramsSchema,
body: { body: {

View File

@@ -18,7 +18,7 @@ registry.registerPath({
method: "delete", method: "delete",
path: "/org/{orgId}/api-key/{apiKeyId}", path: "/org/{orgId}/api-key/{apiKeyId}",
description: "Delete an API key.", description: "Delete an API key.",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey], tags: [OpenAPITags.ApiKey],
request: { request: {
params: paramsSchema params: paramsSchema
}, },

View File

@@ -48,7 +48,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/api-key/{apiKeyId}/actions", path: "/org/{orgId}/api-key/{apiKeyId}/actions",
description: "List all actions set for an API key.", description: "List all actions set for an API key.",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey], tags: [OpenAPITags.ApiKey],
request: { request: {
params: paramsSchema, params: paramsSchema,
query: querySchema query: querySchema

View File

@@ -52,7 +52,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/api-keys", path: "/org/{orgId}/api-keys",
description: "List all API keys for an organization", description: "List all API keys for an organization",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey], tags: [OpenAPITags.ApiKey],
request: { request: {
params: paramsSchema, params: paramsSchema,
query: querySchema query: querySchema

View File

@@ -25,7 +25,7 @@ registry.registerPath({
path: "/org/{orgId}/api-key/{apiKeyId}/actions", path: "/org/{orgId}/api-key/{apiKeyId}/actions",
description: description:
"Set actions for an API key. This will replace any existing actions.", "Set actions for an API key. This will replace any existing actions.",
tags: [OpenAPITags.Org, OpenAPITags.ApiKey], tags: [OpenAPITags.ApiKey],
request: { request: {
params: paramsSchema, params: paramsSchema,
body: { body: {

View File

@@ -20,7 +20,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/logs/request", path: "/org/{orgId}/logs/request",
description: "Query the request audit log for an organization", description: "Query the request audit log for an organization",
tags: [OpenAPITags.Org], tags: [OpenAPITags.Logs],
request: { request: {
query: queryAccessAuditLogsQuery.omit({ query: queryAccessAuditLogsQuery.omit({
limit: true, limit: true,

View File

@@ -1,4 +1,4 @@
import { db, requestAuditLog, driver, primaryDb } from "@server/db"; import { logsDb, requestAuditLog, driver, primaryLogsDb } from "@server/db";
import { registry } from "@server/openApi"; import { registry } from "@server/openApi";
import { NextFunction } from "express"; import { NextFunction } from "express";
import { Request, Response } from "express"; import { Request, Response } from "express";
@@ -74,12 +74,12 @@ async function query(query: Q) {
); );
} }
const [all] = await primaryDb const [all] = await primaryLogsDb
.select({ total: count() }) .select({ total: count() })
.from(requestAuditLog) .from(requestAuditLog)
.where(baseConditions); .where(baseConditions);
const [blocked] = await primaryDb const [blocked] = await primaryLogsDb
.select({ total: count() }) .select({ total: count() })
.from(requestAuditLog) .from(requestAuditLog)
.where(and(baseConditions, eq(requestAuditLog.action, false))); .where(and(baseConditions, eq(requestAuditLog.action, false)));
@@ -90,7 +90,7 @@ async function query(query: Q) {
const DISTINCT_LIMIT = 500; const DISTINCT_LIMIT = 500;
const requestsPerCountry = await primaryDb const requestsPerCountry = await primaryLogsDb
.selectDistinct({ .selectDistinct({
code: requestAuditLog.location, code: requestAuditLog.location,
count: totalQ count: totalQ
@@ -118,7 +118,7 @@ async function query(query: Q) {
const booleanTrue = driver === "pg" ? sql`true` : sql`1`; const booleanTrue = driver === "pg" ? sql`true` : sql`1`;
const booleanFalse = driver === "pg" ? sql`false` : sql`0`; const booleanFalse = driver === "pg" ? sql`false` : sql`0`;
const requestsPerDay = await primaryDb const requestsPerDay = await primaryLogsDb
.select({ .select({
day: groupByDayFunction.as("day"), day: groupByDayFunction.as("day"),
allowedCount: allowedCount:
@@ -151,7 +151,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/logs/analytics", path: "/org/{orgId}/logs/analytics",
description: "Query the request audit analytics for an organization", description: "Query the request audit analytics for an organization",
tags: [OpenAPITags.Org], tags: [OpenAPITags.Logs],
request: { request: {
query: queryAccessAuditLogsQuery, query: queryAccessAuditLogsQuery,
params: queryRequestAuditLogsParams params: queryRequestAuditLogsParams

View File

@@ -1,8 +1,8 @@
import { db, primaryDb, requestAuditLog, resources } from "@server/db"; import { logsDb, primaryLogsDb, requestAuditLog, resources, db, primaryDb } from "@server/db";
import { registry } from "@server/openApi"; import { registry } from "@server/openApi";
import { NextFunction } from "express"; import { NextFunction } from "express";
import { Request, Response } from "express"; import { Request, Response } from "express";
import { eq, gt, lt, and, count, desc } from "drizzle-orm"; import { eq, gt, lt, and, count, desc, inArray } from "drizzle-orm";
import { OpenAPITags } from "@server/openApi"; import { OpenAPITags } from "@server/openApi";
import { z } from "zod"; import { z } from "zod";
import createHttpError from "http-errors"; import createHttpError from "http-errors";
@@ -107,7 +107,7 @@ function getWhere(data: Q) {
} }
export function queryRequest(data: Q) { export function queryRequest(data: Q) {
return primaryDb return primaryLogsDb
.select({ .select({
id: requestAuditLog.id, id: requestAuditLog.id,
timestamp: requestAuditLog.timestamp, timestamp: requestAuditLog.timestamp,
@@ -129,21 +129,49 @@ export function queryRequest(data: Q) {
host: requestAuditLog.host, host: requestAuditLog.host,
path: requestAuditLog.path, path: requestAuditLog.path,
method: requestAuditLog.method, method: requestAuditLog.method,
tls: requestAuditLog.tls, tls: requestAuditLog.tls
resourceName: resources.name,
resourceNiceId: resources.niceId
}) })
.from(requestAuditLog) .from(requestAuditLog)
.leftJoin(
resources,
eq(requestAuditLog.resourceId, resources.resourceId)
) // TODO: Is this efficient?
.where(getWhere(data)) .where(getWhere(data))
.orderBy(desc(requestAuditLog.timestamp)); .orderBy(desc(requestAuditLog.timestamp));
} }
async function enrichWithResourceDetails(logs: Awaited<ReturnType<typeof queryRequest>>) {
// If logs database is the same as main database, we can do a join
// Otherwise, we need to fetch resource details separately
const resourceIds = logs
.map(log => log.resourceId)
.filter((id): id is number => id !== null && id !== undefined);
if (resourceIds.length === 0) {
return logs.map(log => ({ ...log, resourceName: null, resourceNiceId: null }));
}
// Fetch resource details from main database
const resourceDetails = await primaryDb
.select({
resourceId: resources.resourceId,
name: resources.name,
niceId: resources.niceId
})
.from(resources)
.where(inArray(resources.resourceId, resourceIds));
// Create a map for quick lookup
const resourceMap = new Map(
resourceDetails.map(r => [r.resourceId, { name: r.name, niceId: r.niceId }])
);
// Enrich logs with resource details
return logs.map(log => ({
...log,
resourceName: log.resourceId ? resourceMap.get(log.resourceId)?.name ?? null : null,
resourceNiceId: log.resourceId ? resourceMap.get(log.resourceId)?.niceId ?? null : null
}));
}
export function countRequestQuery(data: Q) { export function countRequestQuery(data: Q) {
const countQuery = primaryDb const countQuery = primaryLogsDb
.select({ count: count() }) .select({ count: count() })
.from(requestAuditLog) .from(requestAuditLog)
.where(getWhere(data)); .where(getWhere(data));
@@ -154,7 +182,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/logs/request", path: "/org/{orgId}/logs/request",
description: "Query the request audit log for an organization", description: "Query the request audit log for an organization",
tags: [OpenAPITags.Org], tags: [OpenAPITags.Logs],
request: { request: {
query: queryAccessAuditLogsQuery, query: queryAccessAuditLogsQuery,
params: queryRequestAuditLogsParams params: queryRequestAuditLogsParams
@@ -185,36 +213,31 @@ async function queryUniqueFilterAttributes(
uniquePaths, uniquePaths,
uniqueResources uniqueResources
] = await Promise.all([ ] = await Promise.all([
primaryDb primaryLogsDb
.selectDistinct({ actor: requestAuditLog.actor }) .selectDistinct({ actor: requestAuditLog.actor })
.from(requestAuditLog) .from(requestAuditLog)
.where(baseConditions) .where(baseConditions)
.limit(DISTINCT_LIMIT + 1), .limit(DISTINCT_LIMIT + 1),
primaryDb primaryLogsDb
.selectDistinct({ locations: requestAuditLog.location }) .selectDistinct({ locations: requestAuditLog.location })
.from(requestAuditLog) .from(requestAuditLog)
.where(baseConditions) .where(baseConditions)
.limit(DISTINCT_LIMIT + 1), .limit(DISTINCT_LIMIT + 1),
primaryDb primaryLogsDb
.selectDistinct({ hosts: requestAuditLog.host }) .selectDistinct({ hosts: requestAuditLog.host })
.from(requestAuditLog) .from(requestAuditLog)
.where(baseConditions) .where(baseConditions)
.limit(DISTINCT_LIMIT + 1), .limit(DISTINCT_LIMIT + 1),
primaryDb primaryLogsDb
.selectDistinct({ paths: requestAuditLog.path }) .selectDistinct({ paths: requestAuditLog.path })
.from(requestAuditLog) .from(requestAuditLog)
.where(baseConditions) .where(baseConditions)
.limit(DISTINCT_LIMIT + 1), .limit(DISTINCT_LIMIT + 1),
primaryDb primaryLogsDb
.selectDistinct({ .selectDistinct({
id: requestAuditLog.resourceId, id: requestAuditLog.resourceId
name: resources.name
}) })
.from(requestAuditLog) .from(requestAuditLog)
.leftJoin(
resources,
eq(requestAuditLog.resourceId, resources.resourceId)
)
.where(baseConditions) .where(baseConditions)
.limit(DISTINCT_LIMIT + 1) .limit(DISTINCT_LIMIT + 1)
]); ]);
@@ -231,13 +254,33 @@ async function queryUniqueFilterAttributes(
// throw new Error("Too many distinct filter attributes to retrieve. Please refine your time range."); // throw new Error("Too many distinct filter attributes to retrieve. Please refine your time range.");
// } // }
// Fetch resource names from main database for the unique resource IDs
const resourceIds = uniqueResources
.map(row => row.id)
.filter((id): id is number => id !== null);
let resourcesWithNames: Array<{ id: number; name: string | null }> = [];
if (resourceIds.length > 0) {
const resourceDetails = await primaryDb
.select({
resourceId: resources.resourceId,
name: resources.name
})
.from(resources)
.where(inArray(resources.resourceId, resourceIds));
resourcesWithNames = resourceDetails.map(r => ({
id: r.resourceId,
name: r.name
}));
}
return { return {
actors: uniqueActors actors: uniqueActors
.map((row) => row.actor) .map((row) => row.actor)
.filter((actor): actor is string => actor !== null), .filter((actor): actor is string => actor !== null),
resources: uniqueResources.filter( resources: resourcesWithNames,
(row): row is { id: number; name: string | null } => row.id !== null
),
locations: uniqueLocations locations: uniqueLocations
.map((row) => row.locations) .map((row) => row.locations)
.filter((location): location is string => location !== null), .filter((location): location is string => location !== null),
@@ -280,7 +323,10 @@ export async function queryRequestAuditLogs(
const baseQuery = queryRequest(data); const baseQuery = queryRequest(data);
const log = await baseQuery.limit(data.limit).offset(data.offset); const logsRaw = await baseQuery.limit(data.limit).offset(data.offset);
// Enrich with resource details (handles cross-database scenario)
const log = await enrichWithResourceDetails(logsRaw);
const totalCountResult = await countRequestQuery(data); const totalCountResult = await countRequestQuery(data);
const totalCount = totalCountResult[0].count; const totalCount = totalCountResult[0].count;

View File

@@ -1,7 +1,7 @@
import { db, orgs, requestAuditLog } from "@server/db"; import { logsDb, primaryLogsDb, db, orgs, requestAuditLog } from "@server/db";
import logger from "@server/logger"; import logger from "@server/logger";
import { and, eq, lt, sql } from "drizzle-orm"; import { and, eq, lt, sql } from "drizzle-orm";
import cache from "@server/lib/cache"; import cache from "#dynamic/lib/cache";
import { calculateCutoffTimestamp } from "@server/lib/cleanupLogs"; import { calculateCutoffTimestamp } from "@server/lib/cleanupLogs";
import { stripPortFromHost } from "@server/lib/ip"; import { stripPortFromHost } from "@server/lib/ip";
@@ -69,7 +69,7 @@ async function flushAuditLogs() {
try { try {
// Use a transaction to ensure all inserts succeed or fail together // Use a transaction to ensure all inserts succeed or fail together
// This prevents index corruption from partial writes // This prevents index corruption from partial writes
await db.transaction(async (tx) => { await logsDb.transaction(async (tx) => {
// Batch insert logs in groups of 25 to avoid overwhelming the database // Batch insert logs in groups of 25 to avoid overwhelming the database
const BATCH_DB_SIZE = 25; const BATCH_DB_SIZE = 25;
for (let i = 0; i < logsToWrite.length; i += BATCH_DB_SIZE) { for (let i = 0; i < logsToWrite.length; i += BATCH_DB_SIZE) {
@@ -130,7 +130,7 @@ export async function shutdownAuditLogger() {
async function getRetentionDays(orgId: string): Promise<number> { async function getRetentionDays(orgId: string): Promise<number> {
// check cache first // check cache first
const cached = cache.get<number>(`org_${orgId}_retentionDays`); const cached = await cache.get<number>(`org_${orgId}_retentionDays`);
if (cached !== undefined) { if (cached !== undefined) {
return cached; return cached;
} }
@@ -149,7 +149,7 @@ async function getRetentionDays(orgId: string): Promise<number> {
} }
// store the result in cache // store the result in cache
cache.set( await cache.set(
`org_${orgId}_retentionDays`, `org_${orgId}_retentionDays`,
org.settingsLogRetentionDaysRequest, org.settingsLogRetentionDaysRequest,
300 300
@@ -162,7 +162,7 @@ export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays); const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
try { try {
await db await logsDb
.delete(requestAuditLog) .delete(requestAuditLog)
.where( .where(
and( and(

View File

@@ -37,7 +37,7 @@ import {
enforceResourceSessionLength enforceResourceSessionLength
} from "#dynamic/lib/checkOrgAccessPolicy"; } from "#dynamic/lib/checkOrgAccessPolicy";
import { logRequestAudit } from "./logRequestAudit"; import { logRequestAudit } from "./logRequestAudit";
import cache from "@server/lib/cache"; import { localCache } from "#dynamic/lib/cache";
import { APP_VERSION } from "@server/lib/consts"; import { APP_VERSION } from "@server/lib/consts";
import { isSubscribed } from "#dynamic/lib/isSubscribed"; import { isSubscribed } from "#dynamic/lib/isSubscribed";
import { tierMatrix } from "@server/lib/billing/tierMatrix"; import { tierMatrix } from "@server/lib/billing/tierMatrix";
@@ -137,7 +137,7 @@ export async function verifyResourceSession(
headerAuthExtendedCompatibility: ResourceHeaderAuthExtendedCompatibility | null; headerAuthExtendedCompatibility: ResourceHeaderAuthExtendedCompatibility | null;
org: Org; org: Org;
} }
| undefined = cache.get(resourceCacheKey); | undefined = localCache.get(resourceCacheKey);
if (!resourceData) { if (!resourceData) {
const result = await getResourceByDomain(cleanHost); const result = await getResourceByDomain(cleanHost);
@@ -161,7 +161,7 @@ export async function verifyResourceSession(
} }
resourceData = result; resourceData = result;
cache.set(resourceCacheKey, resourceData, 5); localCache.set(resourceCacheKey, resourceData, 5);
} }
const { const {
@@ -405,7 +405,7 @@ export async function verifyResourceSession(
// check for HTTP Basic Auth header // check for HTTP Basic Auth header
const clientHeaderAuthKey = `headerAuth:${clientHeaderAuth}`; const clientHeaderAuthKey = `headerAuth:${clientHeaderAuth}`;
if (headerAuth && clientHeaderAuth) { if (headerAuth && clientHeaderAuth) {
if (cache.get(clientHeaderAuthKey)) { if (localCache.get(clientHeaderAuthKey)) {
logger.debug( logger.debug(
"Resource allowed because header auth is valid (cached)" "Resource allowed because header auth is valid (cached)"
); );
@@ -428,7 +428,7 @@ export async function verifyResourceSession(
headerAuth.headerAuthHash headerAuth.headerAuthHash
) )
) { ) {
cache.set(clientHeaderAuthKey, clientHeaderAuth, 5); localCache.set(clientHeaderAuthKey, clientHeaderAuth, 5);
logger.debug("Resource allowed because header auth is valid"); logger.debug("Resource allowed because header auth is valid");
logRequestAudit( logRequestAudit(
@@ -520,7 +520,7 @@ export async function verifyResourceSession(
if (resourceSessionToken) { if (resourceSessionToken) {
const sessionCacheKey = `session:${resourceSessionToken}`; const sessionCacheKey = `session:${resourceSessionToken}`;
let resourceSession: any = cache.get(sessionCacheKey); let resourceSession: any = localCache.get(sessionCacheKey);
if (!resourceSession) { if (!resourceSession) {
const result = await validateResourceSessionToken( const result = await validateResourceSessionToken(
@@ -529,7 +529,7 @@ export async function verifyResourceSession(
); );
resourceSession = result?.resourceSession; resourceSession = result?.resourceSession;
cache.set(sessionCacheKey, resourceSession, 5); localCache.set(sessionCacheKey, resourceSession, 5);
} }
if (resourceSession?.isRequestToken) { if (resourceSession?.isRequestToken) {
@@ -662,7 +662,7 @@ export async function verifyResourceSession(
}:${resource.resourceId}`; }:${resource.resourceId}`;
let allowedUserData: BasicUserData | null | undefined = let allowedUserData: BasicUserData | null | undefined =
cache.get(userAccessCacheKey); localCache.get(userAccessCacheKey);
if (allowedUserData === undefined) { if (allowedUserData === undefined) {
allowedUserData = await isUserAllowedToAccessResource( allowedUserData = await isUserAllowedToAccessResource(
@@ -671,7 +671,7 @@ export async function verifyResourceSession(
resourceData.org resourceData.org
); );
cache.set(userAccessCacheKey, allowedUserData, 5); localCache.set(userAccessCacheKey, allowedUserData, 5);
} }
if ( if (
@@ -974,11 +974,11 @@ async function checkRules(
): Promise<"ACCEPT" | "DROP" | "PASS" | undefined> { ): Promise<"ACCEPT" | "DROP" | "PASS" | undefined> {
const ruleCacheKey = `rules:${resourceId}`; const ruleCacheKey = `rules:${resourceId}`;
let rules: ResourceRule[] | undefined = cache.get(ruleCacheKey); let rules: ResourceRule[] | undefined = localCache.get(ruleCacheKey);
if (!rules) { if (!rules) {
rules = await getResourceRules(resourceId); rules = await getResourceRules(resourceId);
cache.set(ruleCacheKey, rules, 5); localCache.set(ruleCacheKey, rules, 5);
} }
if (rules.length === 0) { if (rules.length === 0) {
@@ -1208,13 +1208,13 @@ async function isIpInAsn(
async function getAsnFromIp(ip: string): Promise<number | undefined> { async function getAsnFromIp(ip: string): Promise<number | undefined> {
const asnCacheKey = `asn:${ip}`; const asnCacheKey = `asn:${ip}`;
let cachedAsn: number | undefined = cache.get(asnCacheKey); let cachedAsn: number | undefined = localCache.get(asnCacheKey);
if (!cachedAsn) { if (!cachedAsn) {
cachedAsn = await getAsnForIp(ip); // do it locally cachedAsn = await getAsnForIp(ip); // do it locally
// Cache for longer since IP ASN doesn't change frequently // Cache for longer since IP ASN doesn't change frequently
if (cachedAsn) { if (cachedAsn) {
cache.set(asnCacheKey, cachedAsn, 300); // 5 minutes localCache.set(asnCacheKey, cachedAsn, 300); // 5 minutes
} }
} }
@@ -1224,14 +1224,14 @@ async function getAsnFromIp(ip: string): Promise<number | undefined> {
async function getCountryCodeFromIp(ip: string): Promise<string | undefined> { async function getCountryCodeFromIp(ip: string): Promise<string | undefined> {
const geoIpCacheKey = `geoip:${ip}`; const geoIpCacheKey = `geoip:${ip}`;
let cachedCountryCode: string | undefined = cache.get(geoIpCacheKey); let cachedCountryCode: string | undefined = localCache.get(geoIpCacheKey);
if (!cachedCountryCode) { if (!cachedCountryCode) {
cachedCountryCode = await getCountryCodeForIp(ip); // do it locally cachedCountryCode = await getCountryCodeForIp(ip); // do it locally
// Only cache successful lookups to avoid filling cache with undefined values // Only cache successful lookups to avoid filling cache with undefined values
if (cachedCountryCode) { if (cachedCountryCode) {
// Cache for longer since IP geolocation doesn't change frequently // Cache for longer since IP geolocation doesn't change frequently
cache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes localCache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes
} }
} }

View File

@@ -20,7 +20,7 @@ registry.registerPath({
method: "put", method: "put",
path: "/org/{orgId}/blueprint", path: "/org/{orgId}/blueprint",
description: "Apply a base64 encoded JSON blueprint to an organization", description: "Apply a base64 encoded JSON blueprint to an organization",
tags: [OpenAPITags.Org, OpenAPITags.Blueprint], tags: [OpenAPITags.Blueprint],
request: { request: {
params: applyBlueprintParamsSchema, params: applyBlueprintParamsSchema,
body: { body: {

View File

@@ -43,7 +43,7 @@ registry.registerPath({
method: "put", method: "put",
path: "/org/{orgId}/blueprint", path: "/org/{orgId}/blueprint",
description: "Create and apply a YAML blueprint to an organization", description: "Create and apply a YAML blueprint to an organization",
tags: [OpenAPITags.Org, OpenAPITags.Blueprint], tags: [OpenAPITags.Blueprint],
request: { request: {
params: applyBlueprintParamsSchema, params: applyBlueprintParamsSchema,
body: { body: {

View File

@@ -53,7 +53,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/blueprint/{blueprintId}", path: "/org/{orgId}/blueprint/{blueprintId}",
description: "Get a blueprint by its blueprint ID.", description: "Get a blueprint by its blueprint ID.",
tags: [OpenAPITags.Org, OpenAPITags.Blueprint], tags: [OpenAPITags.Blueprint],
request: { request: {
params: getBlueprintSchema params: getBlueprintSchema
}, },

View File

@@ -67,7 +67,7 @@ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/blueprints", path: "/org/{orgId}/blueprints",
description: "List all blueprints for a organization.", description: "List all blueprints for a organization.",
tags: [OpenAPITags.Org, OpenAPITags.Blueprint], tags: [OpenAPITags.Blueprint],
request: { request: {
params: z.object({ params: z.object({
orgId: z.string() orgId: z.string()

View File

@@ -48,7 +48,7 @@ registry.registerPath({
method: "put", method: "put",
path: "/org/{orgId}/client", path: "/org/{orgId}/client",
description: "Create a new client for an organization.", description: "Create a new client for an organization.",
tags: [OpenAPITags.Client, OpenAPITags.Org], tags: [OpenAPITags.Client],
request: { request: {
params: createClientParamsSchema, params: createClientParamsSchema,
body: { body: {

View File

@@ -49,7 +49,7 @@ registry.registerPath({
path: "/org/{orgId}/user/{userId}/client", path: "/org/{orgId}/user/{userId}/client",
description: description:
"Create a new client for a user and associate it with an existing olm.", "Create a new client for a user and associate it with an existing olm.",
tags: [OpenAPITags.Client, OpenAPITags.Org, OpenAPITags.User], tags: [OpenAPITags.Client],
request: { request: {
params: paramsSchema, params: paramsSchema,
body: { body: {

View File

@@ -243,7 +243,7 @@ registry.registerPath({
path: "/org/{orgId}/client/{niceId}", path: "/org/{orgId}/client/{niceId}",
description: description:
"Get a client by orgId and niceId. NiceId is a readable ID for the site and unique on a per org basis.", "Get a client by orgId and niceId. NiceId is a readable ID for the site and unique on a per org basis.",
tags: [OpenAPITags.Org, OpenAPITags.Site], tags: [OpenAPITags.Site],
request: { request: {
params: z.object({ params: z.object({
orgId: z.string(), orgId: z.string(),

Some files were not shown because too many files have changed in this diff Show More