Compare commits

..

269 Commits

Author SHA1 Message Date
miloschwartz
8ed13b41d9 Merge branch 'dev' into cicd 2025-12-20 12:32:15 -05:00
Owen
b80757a129 Add blueprint validation 2025-12-20 12:16:31 -05:00
Owen
13ddf30781 Add hybrid route 2025-12-20 12:16:31 -05:00
Owen
4ecca88856 Add asn option to blueprint type 2025-12-20 12:16:31 -05:00
Thomas Wilde
4f154d212e Add ASN-based resource rule matching
- Add MaxMind ASN database integration
- Implement ASN lookup and matching in resource rule verification
- Add curated list of 100+ major ASNs (cloud, ISP, CDN, mobile carriers)
- Add ASN dropdown selector in resource rules UI with search functionality
- Support custom ASN input for unlisted ASNs
- Add 'ALL ASNs' special case handling (AS0)
- Cache ASN lookups with 5-minute TTL for performance
- Update validation schemas to support ASN match type

This allows administrators to create resource access rules based on
Autonomous System Numbers, similar to existing country-based rules.
Useful for restricting access by ISP, cloud provider, or mobile carrier.
2025-12-20 12:16:31 -05:00
dependabot[bot]
981d777a65 Bump the prod-patch-updates group across 1 directory with 6 updates
Bumps the prod-patch-updates group with 6 updates in the / directory:

| Package | From | To |
| --- | --- | --- |
| [@react-email/components](https://github.com/resend/react-email/tree/HEAD/packages/components) | `1.0.1` | `1.0.2` |
| [@react-email/tailwind](https://github.com/resend/react-email/tree/HEAD/packages/tailwind) | `2.0.1` | `2.0.2` |
| [@tailwindcss/forms](https://github.com/tailwindlabs/tailwindcss-forms) | `0.5.10` | `0.5.11` |
| [drizzle-orm](https://github.com/drizzle-team/drizzle-orm) | `0.45.0` | `0.45.1` |
| [eslint](https://github.com/eslint/eslint) | `9.39.1` | `9.39.2` |
| [posthog-node](https://github.com/PostHog/posthog-js/tree/HEAD/packages/node) | `5.17.2` | `5.17.4` |



Updates `@react-email/components` from 1.0.1 to 1.0.2
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/components/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/components@1.0.2/packages/components)

Updates `@react-email/tailwind` from 2.0.1 to 2.0.2
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/tailwind/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/tailwind@2.0.2/packages/tailwind)

Updates `@tailwindcss/forms` from 0.5.10 to 0.5.11
- [Release notes](https://github.com/tailwindlabs/tailwindcss-forms/releases)
- [Changelog](https://github.com/tailwindlabs/tailwindcss-forms/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tailwindlabs/tailwindcss-forms/compare/v0.5.10...v0.5.11)

Updates `drizzle-orm` from 0.45.0 to 0.45.1
- [Release notes](https://github.com/drizzle-team/drizzle-orm/releases)
- [Commits](https://github.com/drizzle-team/drizzle-orm/compare/0.45.0...0.45.1)

Updates `eslint` from 9.39.1 to 9.39.2
- [Release notes](https://github.com/eslint/eslint/releases)
- [Commits](https://github.com/eslint/eslint/compare/v9.39.1...v9.39.2)

Updates `posthog-node` from 5.17.2 to 5.17.4
- [Release notes](https://github.com/PostHog/posthog-js/releases)
- [Changelog](https://github.com/PostHog/posthog-js/blob/main/packages/node/CHANGELOG.md)
- [Commits](https://github.com/PostHog/posthog-js/commits/posthog-node@5.17.4/packages/node)

---
updated-dependencies:
- dependency-name: "@react-email/components"
  dependency-version: 1.0.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: "@react-email/tailwind"
  dependency-version: 2.0.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: "@tailwindcss/forms"
  dependency-version: 0.5.11
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: drizzle-orm
  dependency-version: 0.45.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: eslint
  dependency-version: 9.39.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
- dependency-name: posthog-node
  dependency-version: 5.17.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: prod-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-20 12:16:31 -05:00
dependabot[bot]
dd13758085 Bump the dev-patch-updates group across 1 directory with 4 updates
Bumps the dev-patch-updates group with 4 updates in the / directory: [@dotenvx/dotenvx](https://github.com/dotenvx/dotenvx), [@tailwindcss/postcss](https://github.com/tailwindlabs/tailwindcss/tree/HEAD/packages/@tailwindcss-postcss), [esbuild](https://github.com/evanw/esbuild) and [tailwindcss](https://github.com/tailwindlabs/tailwindcss/tree/HEAD/packages/tailwindcss).


Updates `@dotenvx/dotenvx` from 1.51.1 to 1.51.2
- [Release notes](https://github.com/dotenvx/dotenvx/releases)
- [Changelog](https://github.com/dotenvx/dotenvx/blob/main/CHANGELOG.md)
- [Commits](https://github.com/dotenvx/dotenvx/compare/v1.51.1...v1.51.2)

Updates `@tailwindcss/postcss` from 4.1.17 to 4.1.18
- [Release notes](https://github.com/tailwindlabs/tailwindcss/releases)
- [Changelog](https://github.com/tailwindlabs/tailwindcss/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tailwindlabs/tailwindcss/commits/v4.1.18/packages/@tailwindcss-postcss)

Updates `esbuild` from 0.27.1 to 0.27.2
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.27.1...v0.27.2)

Updates `tailwindcss` from 4.1.17 to 4.1.18
- [Release notes](https://github.com/tailwindlabs/tailwindcss/releases)
- [Changelog](https://github.com/tailwindlabs/tailwindcss/blob/main/CHANGELOG.md)
- [Commits](https://github.com/tailwindlabs/tailwindcss/commits/v4.1.18/packages/tailwindcss)

---
updated-dependencies:
- dependency-name: "@dotenvx/dotenvx"
  dependency-version: 1.51.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
- dependency-name: "@tailwindcss/postcss"
  dependency-version: 4.1.18
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
- dependency-name: esbuild
  dependency-version: 0.27.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
- dependency-name: tailwindcss
  dependency-version: 4.1.18
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-20 12:16:30 -05:00
dependabot[bot]
3d8153aeb1 Bump the prod-minor-updates group across 1 directory with 7 updates
Bumps the prod-minor-updates group with 7 updates in the / directory:

| Package | From | To |
| --- | --- | --- |
| [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-s3) | `3.948.0` | `3.955.0` |
| [eslint-config-next](https://github.com/vercel/next.js/tree/HEAD/packages/eslint-config-next) | `16.0.8` | `16.1.0` |
| [lucide-react](https://github.com/lucide-icons/lucide/tree/HEAD/packages/lucide-react) | `0.559.0` | `0.562.0` |
| [next-intl](https://github.com/amannn/next-intl) | `4.5.8` | `4.6.1` |
| [react-day-picker](https://github.com/gpbl/react-day-picker) | `9.12.0` | `9.13.0` |
| [stripe](https://github.com/stripe/stripe-node) | `20.0.0` | `20.1.0` |
| [zod](https://github.com/colinhacks/zod) | `4.1.13` | `4.2.1` |



Updates `@aws-sdk/client-s3` from 3.948.0 to 3.955.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-s3/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.955.0/clients/client-s3)

Updates `eslint-config-next` from 16.0.8 to 16.1.0
- [Release notes](https://github.com/vercel/next.js/releases)
- [Changelog](https://github.com/vercel/next.js/blob/canary/release.js)
- [Commits](https://github.com/vercel/next.js/commits/v16.1.0/packages/eslint-config-next)

Updates `lucide-react` from 0.559.0 to 0.562.0
- [Release notes](https://github.com/lucide-icons/lucide/releases)
- [Commits](https://github.com/lucide-icons/lucide/commits/0.562.0/packages/lucide-react)

Updates `next-intl` from 4.5.8 to 4.6.1
- [Release notes](https://github.com/amannn/next-intl/releases)
- [Changelog](https://github.com/amannn/next-intl/blob/main/CHANGELOG.md)
- [Commits](https://github.com/amannn/next-intl/compare/v4.5.8...v4.6.1)

Updates `react-day-picker` from 9.12.0 to 9.13.0
- [Release notes](https://github.com/gpbl/react-day-picker/releases)
- [Changelog](https://github.com/gpbl/react-day-picker/blob/main/CHANGELOG.md)
- [Commits](https://github.com/gpbl/react-day-picker/compare/v9.12.0...v9.13.0)

Updates `stripe` from 20.0.0 to 20.1.0
- [Release notes](https://github.com/stripe/stripe-node/releases)
- [Changelog](https://github.com/stripe/stripe-node/blob/master/CHANGELOG.md)
- [Commits](https://github.com/stripe/stripe-node/compare/v20.0.0...v20.1.0)

Updates `zod` from 4.1.13 to 4.2.1
- [Release notes](https://github.com/colinhacks/zod/releases)
- [Commits](https://github.com/colinhacks/zod/compare/v4.1.13...v4.2.1)

---
updated-dependencies:
- dependency-name: "@aws-sdk/client-s3"
  dependency-version: 3.955.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: eslint-config-next
  dependency-version: 16.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: lucide-react
  dependency-version: 0.562.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: next-intl
  dependency-version: 4.6.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: react-day-picker
  dependency-version: 9.13.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: stripe
  dependency-version: 20.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: zod
  dependency-version: 4.2.1
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-20 12:16:30 -05:00
miloschwartz
9ffa391416 improve clean redirects 2025-12-20 12:00:58 -05:00
miloschwartz
afc19f192b visual enhancements to sidebar 2025-12-19 21:57:44 -05:00
miloschwartz
5587bd9d59 Merge branch 'dev' of https://github.com/fosrl/pangolin into dev 2025-12-19 17:06:57 -05:00
miloschwartz
b5f8e8feb2 add org auth slug with device auth support 2025-12-19 17:04:37 -05:00
Jacky Fong
9bd66fa306 add back the blueprints api - draft 2025-12-19 15:01:33 -05:00
Owen
fea4d43920 Make utility subnet configurable 2025-12-19 14:45:00 -05:00
miloschwartz
d414617f9d add color to product updates 2025-12-19 10:45:45 -05:00
miloschwartz
1d7e55bf98 add gradient to saas 2025-12-18 18:16:22 -05:00
miloschwartz
bc45e16109 improve local table state 2025-12-18 18:08:07 -05:00
miloschwartz
4f1dc19569 sidebar enhancements 2025-12-18 17:54:29 -05:00
miloschwartz
1af938d7ea Merge branch 'dev' of https://github.com/fosrl/pangolin into dev 2025-12-18 17:47:59 -05:00
miloschwartz
fc924f707c add banners 2025-12-18 17:47:54 -05:00
Owen
6e7ba1dc52 Prevent overlapping resources with org subnets 2025-12-18 17:08:50 -05:00
Owen
3e01bfef7d Move primaryDb into driver 2025-12-18 17:08:50 -05:00
miloschwartz
d8b662496b Merge branch 'dev' of https://github.com/fosrl/pangolin into dev 2025-12-18 16:49:48 -05:00
miloschwartz
e0de003c2c Merge branch 'dev' of https://github.com/fosrl/pangolin into dev 2025-12-18 16:49:44 -05:00
miloschwartz
6e35c182b0 Merge branch 'dev' of https://github.com/fosrl/pangolin into dev 2025-12-18 16:13:59 -05:00
miloschwartz
2479a3c53c improved private resource modal 2025-12-18 16:13:15 -05:00
Owen
6b609bb078 Force big queries onto primary db to prevent 40001 2025-12-18 16:03:15 -05:00
Owen
9c21e3da16 Merge branch 'main' into dev 2025-12-18 15:00:13 -05:00
Owen
7ccde11e3e Fix crowdsec healthcheck
Fixes #2118
2025-12-18 13:49:55 -05:00
miloschwartz
56b0185c8f visual adjustments 2025-12-18 10:58:16 -05:00
Milo Schwartz
8b47b2aabe Merge pull request #1989 from Fredkiss3/refactor/save-button-positions
refactor: save button positionning
2025-12-18 07:28:47 -08:00
miloschwartz
416fd914cb visual enhacements 2025-12-18 10:26:28 -05:00
depado
16653dd524 fix(database): filter dates evaluated at module load time 2025-12-18 10:06:31 -05:00
Fred KISSIE
e2d3d172af Merge branch 'dev' into refactor/save-button-positions 2025-12-18 04:39:17 +01:00
Fred KISSIE
137d6c2523 🏷️ fix typescript error 2025-12-18 04:36:09 +01:00
Fred KISSIE
1a976c78ef ♻️ separate org settings page into multiple forms 2025-12-18 04:27:24 +01:00
miloschwartz
e309a125f5 move blueprints on sidebar 2025-12-17 22:23:35 -05:00
Owen
2bdb1ddb6f Update lock 2025-12-17 22:14:57 -05:00
Owen
8ff588407c Fix various small issues; blueprints working 2025-12-17 22:12:16 -05:00
Owen
c2e06725a8 Keep the same site resource id 2025-12-17 22:12:16 -05:00
Owen
bb43e0c325 Handle changing site by recreating site resource 2025-12-17 22:12:16 -05:00
Owen
35ea01610a Update API routes and ui 2025-12-17 22:12:16 -05:00
miloschwartz
79eefc0ac7 blueprints enhancements 2025-12-17 22:05:36 -05:00
miloschwartz
3a781f9ac4 add contrast to sidebar 2025-12-17 21:28:46 -05:00
Owen
cc1e551f43 Fix deleting site not terminating newt 2025-12-17 21:23:25 -05:00
miloschwartz
68191d5921 fix refresh button doesnt work for users table 2025-12-17 21:19:50 -05:00
Fred KISSIE
2b3d065650 Merge branch 'dev' into refactor/save-button-positions 2025-12-18 01:46:13 +01:00
Fred KISSIE
7ae80d2cad ♻️ apply domain picker from dev 2025-12-18 00:20:19 +01:00
miloschwartz
acf08e3ef6 simplify idp create selector 2025-12-17 16:56:31 -05:00
miloschwartz
6f50fb8a4f remote node changes 2025-12-17 16:50:39 -05:00
miloschwartz
a5b203af27 add rotate server secret command 2025-12-17 16:23:11 -05:00
miloschwartz
443b53ee37 add clear exit nodes pangctl command 2025-12-17 16:01:55 -05:00
miloschwartz
e033c10021 simplify animation 2025-12-17 15:52:12 -05:00
miloschwartz
ad4c44c325 visual adjustments 2025-12-17 15:34:36 -05:00
miloschwartz
4aef7ca8d5 small branding fixes and adjustments 2025-12-17 15:02:39 -05:00
Owen
f892acbc4c Add tcp, udp ports, and icmp to blueprints 2025-12-17 11:58:45 -05:00
Milo Schwartz
9010ed6237 Merge pull request #1846 from Fredkiss3/feat/login-page-customization
feat: login page customization
2025-12-17 08:42:55 -08:00
miloschwartz
9f29657570 Merge branch 'dev' into feat/login-page-customization 2025-12-17 11:41:17 -05:00
Milo Schwartz
1b13132845 Merge pull request #2033 from buggystick/feature/oidc-email-error
Add OIDC authentication error response support
2025-12-17 08:21:06 -08:00
Milo Schwartz
553fda265c Merge branch 'dev' into feature/oidc-email-error 2025-12-17 08:20:40 -08:00
miloschwartz
0f79826535 fix useEffect re-render too many times 2025-12-17 11:18:30 -05:00
miloschwartz
14438bd2b4 remove desc text 2025-12-17 10:47:51 -05:00
Milo Schwartz
c4445c329f Merge pull request #2034 from Fredkiss3/refactor/domain-picker-default-value
refactor:  Update `<DomainPicker />` to accept default values
2025-12-17 07:40:46 -08:00
miloschwartz
5c032ee0c3 add larger header text 2025-12-17 10:34:49 -05:00
Fred KISSIE
d3d5a1c204 🚸 trigger null domain change if the user switches from another domain type to free domain option to prevent the modal from registering it as a valid value 2025-12-17 05:22:04 +01:00
miloschwartz
809bb4a7b4 adjustments to dialog spacing 2025-12-16 23:19:18 -05:00
miloschwartz
e8f763a77f fix broken link 2025-12-16 23:06:12 -05:00
Fred KISSIE
3ad4a76f03 ♻️ pass default subdomain value to org auth page settings 2025-12-17 05:05:30 +01:00
Fred KISSIE
b133593ea2 🚸 now the domain picker is deterministic 2025-12-17 04:57:16 +01:00
Owen
43fb06084f Alias should not get double regex 2025-12-16 21:54:14 -05:00
Owen
9de39dbe42 Support wildcard resources 2025-12-16 21:54:14 -05:00
Fred KISSIE
c98d61a8fb ♻️ pass default value to domain picker 2025-12-17 02:36:29 +01:00
Fred KISSIE
fccff9c23a Merge branch 'dev' into refactor/domain-picker-default-value 2025-12-17 00:52:32 +01:00
Fred KISSIE
e02fa7c148 ♻️ pass the default domainId instead of the base domain 2025-12-17 00:52:12 +01:00
Owen
a21029582e Always send the relay port config 2025-12-16 18:38:20 -05:00
Fred KISSIE
9ef7faace7 🚧 wip 2025-12-16 23:45:53 +01:00
Owen
3d5ae9dd5c Disable icmp packets over private resources 2025-12-16 17:20:18 -05:00
miloschwartz
6072ee93fa add remove invitation to integration api 2025-12-16 17:17:03 -05:00
Owen
7f7f6eeaea Check the postgres string first
Fixes #2092
2025-12-16 10:42:32 -05:00
Owen
1b4884afd8 Make sure to push changes 2025-12-15 22:13:56 -05:00
Owen
0c0ad7029f Batch and delay for large amounts of targets 2025-12-15 22:13:56 -05:00
Owen
10f1437496 Small visual adjustments 2025-12-15 22:13:56 -05:00
Owen
c44c1a5518 Add UI, update API, send to newt 2025-12-15 22:13:56 -05:00
Owen Schwartz
48110ccda3 Merge pull request #2080 from water-sucks/server-setup-token-env-var
feat(setup): allow declaring a server setup token through env variable
2025-12-15 21:04:05 -05:00
Varun Narravula
e94f21bc05 ci: parallelize test workflow 2025-12-15 21:03:47 -05:00
Owen Schwartz
65f8a414be Merge pull request #2084 from water-sucks/parallelize-test-ci-workflow
ci: parallelize test workflow
2025-12-15 20:57:48 -05:00
Fred KISSIE
8dad38775c 🐛 use /resource instead of /site-resource 2025-12-16 01:53:20 +01:00
Fred KISSIE
0d14cb853e ♻️ invalidate everything & fix use effect condition 2025-12-16 01:53:06 +01:00
Fred KISSIE
778e6bf623 💄 lower margin y 2025-12-16 00:27:24 +01:00
miloschwartz
5a960649db fix generate password reset code only shows for non internal users 2025-12-15 18:06:29 -05:00
Fred KISSIE
23a7688789 💄 more margin top 2025-12-15 23:51:06 +01:00
Owen
0e3b6b90b7 Send reply to email in support requests 2025-12-15 17:43:45 -05:00
Fred KISSIE
872bb557c2 💄 put save org settings button into the form 2025-12-15 23:36:13 +01:00
Fred KISSIE
9125a7bccb 🚧 org settings form 2025-12-15 23:18:28 +01:00
Fred KISSIE
5a0a8893e8 Merge branch 'dev' into refactor/save-button-positions 2025-12-15 17:04:58 +01:00
Varun Narravula
abe76e5002 ci: parallelize test workflow 2025-12-15 05:30:43 -08:00
Varun Narravula
474b9a685d feat(setup): allow declaring a server setup token through env variable 2025-12-14 16:24:17 -08:00
Owen
97631c068c Clean key
Ref #1806
2025-12-14 15:58:29 -05:00
Owen Schwartz
98c77ad7e2 Update README.md 2025-12-14 03:09:45 -05:00
Owen Schwartz
3915df3200 Merge pull request #2068 from mgruszkiewicz/fix-missing-gpg-in-installer
Fix: Add missing gnupg utility during Docker installation
2025-12-13 14:28:04 -05:00
Mateusz Gruszkiewicz
9b98acb553 fix missing gpg dependency which is preventing docker from installing correctly 2025-12-13 19:27:15 +01:00
Owen
a767a31c21 Quiet log message 2025-12-13 12:28:44 -05:00
Owen
f2d4c2f83c Remove duplicate target 2025-12-13 12:16:11 -05:00
Owen
25fed23758 Speed up build 2025-12-13 12:13:33 -05:00
Owen Schwartz
5cb3fa1127 Merge pull request #2066 from fosrl/dev
Dev
2025-12-13 12:09:22 -05:00
Owen
deac26bad2 Bump version 2025-12-13 12:07:35 -05:00
miloschwartz
c7747fd4b4 add license watermark 2025-12-13 11:45:15 -05:00
Owen
1aaad43871 Format 2025-12-13 11:36:53 -05:00
Owen
143175bde7 Update react-dom 2025-12-13 11:34:58 -05:00
Owen
9f55d6b20a Try to fix issue not sending newt commands 2025-12-13 11:19:42 -05:00
miloschwartz
4366ca5836 add spacing to delete modal 2025-12-13 10:57:24 -05:00
miloschwartz
9cb95576d0 Merge branch 'dev' into cicd 2025-12-12 23:08:06 -05:00
miloschwartz
d5307adef0 fix bug preventing save resource priority closes #2063 2025-12-12 22:52:00 -05:00
miloschwartz
3d857c3b52 fix client side pagination issue 2025-12-12 22:41:10 -05:00
Owen
a012369f83 Make sure to always check retention first
Fixes #2061
2025-12-12 18:39:13 -05:00
Fred KISSIE
9cee3d9c79 ♻️ refactor 2025-12-12 23:35:24 +01:00
Fred KISSIE
8257dca340 ♻️ refactor 2025-12-12 23:34:35 +01:00
Fred KISSIE
5e0a1cf9c5 💡remove comment 2025-12-12 22:09:37 +01:00
miloschwartz
b3ec9dfda2 split builds based on arch 2025-12-12 15:56:42 -05:00
Fred KISSIE
93d4f60314 ♻️correctly init the form 2025-12-12 21:55:23 +01:00
Fred KISSIE
769d20cea1 Merge branch 'dev' into refactor/save-button-positions 2025-12-12 21:42:06 +01:00
Fred KISSIE
124ba208de ♻️ use react querty 2025-12-12 21:40:49 +01:00
Owen
ba99614d58 Merge branch 'dev' of github.com:fosrl/pangolin into dev 2025-12-12 14:54:59 -05:00
Owen
27db77bca4 Format 2025-12-12 14:53:26 -05:00
miloschwartz
29b924230f add runner restart action 2025-12-12 14:48:49 -05:00
Owen
8eb3f6aacc Bump next and react again
CVE-2025-55184 and CVE-2025-67779
2025-12-12 09:55:52 -05:00
Fred KISSIE
7f07ccea44 Merge branch 'dev' into refactor/save-button-positions 2025-12-12 00:32:02 +01:00
Fred KISSIE
c13bfc709f Merge branch 'dev' into refactor/save-button-positions 2025-12-11 23:35:00 +01:00
Fred KISSIE
6fc54bcc9e ♻️ set default value on domain picker modal in proxy resource page 2025-12-11 22:51:02 +01:00
Owen
5d6ee45125 Merge branch 'dev' 2025-12-11 16:49:40 -05:00
Owen Schwartz
fceaedfcd8 Merge pull request #2045 from Fredkiss3/fix/update-full-domain-on-resource-page
fix: full domain should be updated when the form is saved on resource proxy page
2025-12-11 16:49:22 -05:00
Fred KISSIE
181612ce25 🐛 full domain should be updated when the form is saved 2025-12-11 22:26:38 +01:00
Owen
224b78fc64 Update consts 2025-12-11 16:13:33 -05:00
Owen
757e540be6 Merge branch 'main' into dev 2025-12-11 16:12:08 -05:00
Milo Schwartz
bf1675686c Update README.md 2025-12-11 15:44:47 -05:00
miloschwartz
f81909489a add client telmetry and fix missing openapi on prefault 2025-12-11 10:38:48 -05:00
miloschwartz
963468d7fa remove top border from dialog 2025-12-11 10:17:17 -05:00
miloschwartz
f67f4f8834 update screenshots and readme 2025-12-10 21:13:09 -05:00
Owen
4c819d264b Only permit ipv4 for now 2025-12-10 20:40:22 -05:00
Owen Schwartz
cbcb23ccea Merge pull request #2036 from fosrl/dependabot/npm_and_yarn/dev-minor-updates-316ddb12fb
Bump @types/pg from 8.15.6 to 8.16.0 in the dev-minor-updates group
2025-12-10 20:35:49 -05:00
Owen Schwartz
d8b27de5ac Merge pull request #2038 from fosrl/dependabot/npm_and_yarn/prod-minor-updates-0a59212de9
Bump the prod-minor-updates group across 1 directory with 4 updates
2025-12-10 20:35:43 -05:00
Owen
01f7842fd5 Fix function rename issue 2025-12-10 20:34:40 -05:00
Owen Schwartz
d409e58186 Merge pull request #2020 from Fredkiss3/fix/log-analytics-adjustments
refactor: adjustments for logs pages
2025-12-10 20:33:12 -05:00
Owen Schwartz
c9e1c4da1c Merge pull request #2026 from fosrl/crowdin_dev
New Crowdin updates
2025-12-10 20:25:31 -05:00
dependabot[bot]
9c38f65ad4 Bump the prod-minor-updates group across 1 directory with 4 updates
Bumps the prod-minor-updates group with 4 updates in the / directory: [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-s3), [lucide-react](https://github.com/lucide-icons/lucide/tree/HEAD/packages/lucide-react), [npm](https://github.com/npm/cli) and [resend](https://github.com/resend/resend-node).


Updates `@aws-sdk/client-s3` from 3.947.0 to 3.948.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-s3/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.948.0/clients/client-s3)

Updates `lucide-react` from 0.556.0 to 0.559.0
- [Release notes](https://github.com/lucide-icons/lucide/releases)
- [Commits](https://github.com/lucide-icons/lucide/commits/0.559.0/packages/lucide-react)

Updates `npm` from 11.6.4 to 11.7.0
- [Release notes](https://github.com/npm/cli/releases)
- [Changelog](https://github.com/npm/cli/blob/latest/CHANGELOG.md)
- [Commits](https://github.com/npm/cli/compare/v11.6.4...v11.7.0)

Updates `resend` from 6.5.2 to 6.6.0
- [Release notes](https://github.com/resend/resend-node/releases)
- [Commits](https://github.com/resend/resend-node/compare/v6.5.2...v6.6.0)

---
updated-dependencies:
- dependency-name: "@aws-sdk/client-s3"
  dependency-version: 3.948.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: lucide-react
  dependency-version: 0.559.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: npm
  dependency-version: 11.7.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: resend
  dependency-version: 6.6.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-11 01:21:19 +00:00
dependabot[bot]
2316462721 Bump @types/pg from 8.15.6 to 8.16.0 in the dev-minor-updates group
Bumps the dev-minor-updates group with 1 update: [@types/pg](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/pg).


Updates `@types/pg` from 8.15.6 to 8.16.0
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/pg)

---
updated-dependencies:
- dependency-name: "@types/pg"
  dependency-version: 8.16.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: dev-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-11 01:19:20 +00:00
Owen Schwartz
7cc990107a Merge pull request #2035 from fosrl/dependabot/npm_and_yarn/dev-patch-updates-3ea7ca757b
Bump react-email from 5.0.6 to 5.0.7 in the dev-patch-updates group
2025-12-10 20:19:00 -05:00
dependabot[bot]
9917a569ac Bump react-email from 5.0.6 to 5.0.7 in the dev-patch-updates group
Bumps the dev-patch-updates group with 1 update: [react-email](https://github.com/resend/react-email/tree/HEAD/packages/react-email).


Updates `react-email` from 5.0.6 to 5.0.7
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/react-email/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/react-email@5.0.7/packages/react-email)

---
updated-dependencies:
- dependency-name: react-email
  dependency-version: 5.0.7
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-11 01:18:06 +00:00
Fred KISSIE
aab0471b6b 🏷️ fix typescript errors 2025-12-10 21:26:55 +01:00
Fred KISSIE
de684b212f 🔇 remove console.log 2025-12-10 21:26:46 +01:00
Fred KISSIE
fbd3802e46 ♻️ Update domain picker component to accept default values 2025-12-10 21:17:00 +01:00
Fred KISSIE
4e842a660a 🚧 wip: refactor proxy resource page 2025-12-10 21:15:42 +01:00
Fred KISSIE
ce6b609ca2 ♻️ Update domain picker component to accept default values 2025-12-10 21:15:26 +01:00
David Reed
78369b6f6a Add OIDC authentication error response support 2025-12-10 11:13:04 -08:00
Fred KISSIE
ea43bf97c7 Merge branch 'dev' into refactor/save-button-positions 2025-12-10 20:04:59 +01:00
Owen
c56574e431 Send site add in case the client does not have the site 2025-12-10 11:57:45 -05:00
Fred KISSIE
f9c0e0ec3d 💬 updated text 2025-12-10 03:56:08 +01:00
Fred KISSIE
85986dcccb Merge branch 'dev' into fix/log-analytics-adjustments 2025-12-10 03:49:40 +01:00
Fred KISSIE
c9779254c3 💄add time range tooltip to explain it better 2025-12-10 03:44:46 +01:00
Fred KISSIE
5b620469c7 ♻️ set export logs limits to 50 000 everywhere 2025-12-10 03:42:53 +01:00
Fred KISSIE
df4b9de334 🚧 wip: export limits 2025-12-10 03:24:32 +01:00
Fred KISSIE
d490cab48c Merge dev into fix/log-analytics-adjustments 2025-12-10 03:19:14 +01:00
miloschwartz
b68c0962c6 visual enhancements 2025-12-09 20:58:45 -05:00
Owen
ee2a438602 Merge branch 'main' into dev 2025-12-09 16:26:21 -05:00
Owen
74dd3fdc9f Update packages 2025-12-09 16:18:20 -05:00
Owen
314da3ee3e Update formatting to work with ipv6 2025-12-09 16:11:12 -05:00
Owen Schwartz
68cfc84249 New translations en-us.json (German) 2025-12-09 14:09:22 -05:00
Owen
0bcf5c2b42 Update packages 2025-12-09 12:09:23 -05:00
Owen
9210e005e9 Merge branch 'main' into dev 2025-12-09 12:08:32 -05:00
Owen
f245632371 Fix expires at not updating 2025-12-09 11:50:48 -05:00
miloschwartz
6453b070bb add more resiliency to the license check 2025-12-09 11:26:11 -05:00
Owen Schwartz
8c4db93a93 Merge pull request #2024 from fosrl/dependabot/npm_and_yarn/multi-1eaea4558a
Bump next and @react-email/preview-server
2025-12-09 10:57:54 -05:00
Owen
f9b03943c3 Format all files 2025-12-09 10:56:14 -05:00
Owen
fa839a811f Merge branch 'Fredkiss3-chore/some-dx-changes' into dev 2025-12-09 10:54:35 -05:00
Owen
88d2c2eac8 Merge branch 'chore/some-dx-changes' of github.com:Fredkiss3/pangolin into Fredkiss3-chore/some-dx-changes 2025-12-09 10:54:28 -05:00
dependabot[bot]
c84cc1815b Bump next and @react-email/preview-server
Bumps [next](https://github.com/vercel/next.js) to 15.5.7 and updates ancestor dependency [@react-email/preview-server](https://github.com/resend/react-email/tree/HEAD/packages/preview-server). These dependencies need to be updated together.


Updates `next` from 15.5.2 to 15.5.7
- [Release notes](https://github.com/vercel/next.js/releases)
- [Changelog](https://github.com/vercel/next.js/blob/canary/release.js)
- [Commits](https://github.com/vercel/next.js/compare/v15.5.2...v15.5.7)

Updates `@react-email/preview-server` from 4.3.2 to 5.0.6
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/preview-server/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/preview-server@5.0.6/packages/preview-server)

---
updated-dependencies:
- dependency-name: next
  dependency-version: 15.5.7
  dependency-type: indirect
- dependency-name: "@react-email/preview-server"
  dependency-version: 5.0.6
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-09 15:50:48 +00:00
Owen
2c23ffd178 Merge branch 'dev' of github.com:fosrl/pangolin into dev 2025-12-09 10:50:33 -05:00
Owen Schwartz
da3f7ae404 Merge pull request #2018 from fosrl/dependabot/npm_and_yarn/prod-minor-updates-a5bcaae1b2
Bump the prod-minor-updates group across 1 directory with 4 updates
2025-12-09 10:49:02 -05:00
Owen Schwartz
f460559a4b Merge pull request #2021 from fosrl/dependabot/npm_and_yarn/dev-patch-updates-cd2d8c1767
Bump @types/node from 24.10.1 to 24.10.2 in the dev-patch-updates group
2025-12-09 10:48:10 -05:00
Owen Schwartz
0c9deeb2d7 Merge pull request #2022 from fosrl/dependabot/go_modules/install/prod-minor-updates-f9eb33633d
Bump golang.org/x/term from 0.37.0 to 0.38.0 in /install in the prod-minor-updates group
2025-12-09 10:48:02 -05:00
Owen Schwartz
1289b99f14 Merge pull request #2009 from fosrl/dependabot/npm_and_yarn/stripe-20.0.0
Bump stripe from 18.2.1 to 20.0.0
2025-12-09 10:47:54 -05:00
Owen Schwartz
1a7a6e5b6f Merge pull request #2007 from fosrl/dependabot/npm_and_yarn/react-email/render-2.0.0
Bump @react-email/render from 1.4.0 to 2.0.0
2025-12-09 10:47:00 -05:00
Owen Schwartz
f56135eed3 Merge pull request #2011 from Lokowitz/fix-dev
Update packages
2025-12-09 10:46:32 -05:00
Owen
23e9a61f3e Fixing various bugs 2025-12-09 10:31:43 -05:00
Lokowitz
5428ad1009 merge upstream 2025-12-09 11:40:57 +00:00
Lokowitz
bba28bc5f2 Merge remote-tracking branch 'upstream/dev' into fix-dev 2025-12-09 11:40:04 +00:00
Owen
18498a32ce Quite log messages 2025-12-08 22:07:17 -05:00
Owen
887af85db1 Fix removing remote subnet on remove site resource 2025-12-08 22:06:37 -05:00
Owen
a306aa971b Pick client endpoint as part of the transation 2025-12-08 21:37:17 -05:00
Owen
0a9b19ecfc Try to fix deadlocks again
Fixes FOU-284
2025-12-08 21:26:23 -05:00
Owen
e011580b96 Update and add server version 2025-12-08 21:26:23 -05:00
miloschwartz
048ce850a8 get coutry using maxmind and clear stale device codes 2025-12-08 21:12:19 -05:00
dependabot[bot]
2ca1f15add Bump the prod-minor-updates group across 1 directory with 4 updates
Bumps the prod-minor-updates group with 4 updates in the / directory: [@asteasolutions/zod-to-openapi](https://github.com/asteasolutions/zod-to-openapi), [@aws-sdk/client-s3](https://github.com/aws/aws-sdk-js-v3/tree/HEAD/clients/client-s3), [react-day-picker](https://github.com/gpbl/react-day-picker) and [winston](https://github.com/winstonjs/winston).


Updates `@asteasolutions/zod-to-openapi` from 8.1.0 to 8.2.0
- [Release notes](https://github.com/asteasolutions/zod-to-openapi/releases)
- [Commits](https://github.com/asteasolutions/zod-to-openapi/compare/v8.1.0...v8.2.0)

Updates `@aws-sdk/client-s3` from 3.943.0 to 3.946.0
- [Release notes](https://github.com/aws/aws-sdk-js-v3/releases)
- [Changelog](https://github.com/aws/aws-sdk-js-v3/blob/main/clients/client-s3/CHANGELOG.md)
- [Commits](https://github.com/aws/aws-sdk-js-v3/commits/v3.946.0/clients/client-s3)

Updates `react-day-picker` from 9.11.3 to 9.12.0
- [Release notes](https://github.com/gpbl/react-day-picker/releases)
- [Changelog](https://github.com/gpbl/react-day-picker/blob/main/CHANGELOG.md)
- [Commits](https://github.com/gpbl/react-day-picker/compare/v9.11.3...v9.12.0)

Updates `winston` from 3.18.3 to 3.19.0
- [Release notes](https://github.com/winstonjs/winston/releases)
- [Changelog](https://github.com/winstonjs/winston/blob/master/CHANGELOG.md)
- [Commits](https://github.com/winstonjs/winston/compare/v3.18.3...v3.19.0)

---
updated-dependencies:
- dependency-name: "@asteasolutions/zod-to-openapi"
  dependency-version: 8.2.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: "@aws-sdk/client-s3"
  dependency-version: 3.946.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: react-day-picker
  dependency-version: 9.12.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
- dependency-name: winston
  dependency-version: 3.19.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-09 01:23:08 +00:00
dependabot[bot]
05ebd547b5 Bump golang.org/x/term in /install in the prod-minor-updates group
Bumps the prod-minor-updates group in /install with 1 update: [golang.org/x/term](https://github.com/golang/term).


Updates `golang.org/x/term` from 0.37.0 to 0.38.0
- [Commits](https://github.com/golang/term/compare/v0.37.0...v0.38.0)

---
updated-dependencies:
- dependency-name: golang.org/x/term
  dependency-version: 0.38.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: prod-minor-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-09 01:21:11 +00:00
dependabot[bot]
5a8b1383a4 Bump @types/node from 24.10.1 to 24.10.2 in the dev-patch-updates group
Bumps the dev-patch-updates group with 1 update: [@types/node](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/node).


Updates `@types/node` from 24.10.1 to 24.10.2
- [Release notes](https://github.com/DefinitelyTyped/DefinitelyTyped/releases)
- [Commits](https://github.com/DefinitelyTyped/DefinitelyTyped/commits/HEAD/types/node)

---
updated-dependencies:
- dependency-name: "@types/node"
  dependency-version: 24.10.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: dev-patch-updates
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-09 01:20:03 +00:00
miloschwartz
ede51bebb5 use semver to compare versions in product updates 2025-12-08 19:51:32 -05:00
Owen Schwartz
fd29071d57 Merge pull request #2004 from fosrl/dependabot/github_actions/actions/checkout-6.0.1
Bump actions/checkout from 6.0.0 to 6.0.1
2025-12-08 19:48:01 -05:00
Owen Schwartz
8e1af79dc4 Merge pull request #2003 from fosrl/dependabot/github_actions/actions/setup-node-6.1.0
Bump actions/setup-node from 6.0.0 to 6.1.0
2025-12-08 19:47:48 -05:00
Owen Schwartz
dc8c28626d Merge pull request #2002 from fosrl/dependabot/github_actions/actions/stale-10.1.1
Bump actions/stale from 10.1.0 to 10.1.1
2025-12-08 19:47:40 -05:00
Fred KISSIE
9db2feff77 ♻️ set default time to 7 days ago in API too 2025-12-09 00:17:34 +01:00
Fred KISSIE
adf76bfb53 ♻️ set default start time to 7 days ago 2025-12-08 23:56:28 +01:00
Fred KISSIE
e0a79b7d4d ♻️ set default log analytics time range to. 7days ago 2025-12-08 22:57:05 +01:00
dependabot[bot]
9ea3914a93 Bump @react-email/render from 1.4.0 to 2.0.0
Bumps [@react-email/render](https://github.com/resend/react-email/tree/HEAD/packages/render) from 1.4.0 to 2.0.0.
- [Release notes](https://github.com/resend/react-email/releases)
- [Changelog](https://github.com/resend/react-email/blob/canary/packages/render/CHANGELOG.md)
- [Commits](https://github.com/resend/react-email/commits/@react-email/render@2.0.0/packages/render)

---
updated-dependencies:
- dependency-name: "@react-email/render"
  dependency-version: 2.0.0
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 19:13:04 +00:00
miloschwartz
1aeb31be04 remove file 2025-12-08 14:12:10 -05:00
Fred KISSIE
64120ea878 🔨Add format script and install prettier 2025-12-08 19:57:08 +01:00
Fred KISSIE
0003ec021b 🔨add default vscode options for new contributors 2025-12-08 19:56:53 +01:00
Lokowitz
c9a1da210f revert my fix 2025-12-08 08:27:05 +00:00
Lokowitz
ace402af2d update packages 2025-12-08 08:23:32 +00:00
Lokowitz
e60dce25c9 Merge remote-tracking branch 'upstream/dev' into fix-dev
merge dev
2025-12-08 08:21:19 +00:00
dependabot[bot]
ccfff030e5 Bump stripe from 18.2.1 to 20.0.0
Bumps [stripe](https://github.com/stripe/stripe-node) from 18.2.1 to 20.0.0.
- [Release notes](https://github.com/stripe/stripe-node/releases)
- [Changelog](https://github.com/stripe/stripe-node/blob/master/CHANGELOG.md)
- [Commits](https://github.com/stripe/stripe-node/compare/v18.2.1...v20.0.0)

---
updated-dependencies:
- dependency-name: stripe
  dependency-version: 20.0.0
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 01:25:10 +00:00
dependabot[bot]
00765c1faf Bump actions/checkout from 6.0.0 to 6.0.1
Bumps [actions/checkout](https://github.com/actions/checkout) from 6.0.0 to 6.0.1.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](1af3b93b68...8e8c483db8)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: 6.0.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 01:16:59 +00:00
dependabot[bot]
f6bbdeadb9 Bump actions/setup-node from 6.0.0 to 6.1.0
Bumps [actions/setup-node](https://github.com/actions/setup-node) from 6.0.0 to 6.1.0.
- [Release notes](https://github.com/actions/setup-node/releases)
- [Commits](2028fbc5c2...395ad32622)

---
updated-dependencies:
- dependency-name: actions/setup-node
  dependency-version: 6.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 01:16:53 +00:00
dependabot[bot]
9cf520574a Bump actions/stale from 10.1.0 to 10.1.1
Bumps [actions/stale](https://github.com/actions/stale) from 10.1.0 to 10.1.1.
- [Release notes](https://github.com/actions/stale/releases)
- [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md)
- [Commits](5f858e3efb...997185467f)

---
updated-dependencies:
- dependency-name: actions/stale
  dependency-version: 10.1.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 01:16:48 +00:00
Lokowitz
f8ab5b7af7 update packages 2025-12-07 14:03:34 +00:00
Fred KISSIE
72bc26f0f8 💬 update texts to be more specific 2025-12-06 01:14:15 +01:00
Fred KISSIE
2ec2295cd6 ♻️ separate proxy page into multiple forms 2025-12-06 00:51:36 +01:00
Fred KISSIE
a0a369dc43 ♻️ refactor reverse proxy targets page 2025-12-05 23:10:10 +01:00
Fred KISSIE
d0157ea7a5 Merge branch 'dev' into feat/login-page-customization 2025-12-05 22:38:07 +01:00
Fred KISSIE
d89f5279bf ♻️address PR feedback 2025-12-05 01:08:02 +01:00
Fred KISSIE
744305ab39 ♻️ refactor 2025-12-05 00:02:13 +01:00
Fred KISSIE
ba9048a377 Merge branch 'dev' into feat/login-page-customization 2025-12-04 23:56:16 +01:00
Fred KISSIE
ff089ec6d7 📦update lockfile 2025-11-18 03:48:41 +01:00
Fred KISSIE
dc4f9a9bd1 ♻️ check for licence when checking for subscription 2025-11-18 03:32:05 +01:00
Fred KISSIE
e867de023a ♻️ load branding only if correctly subscribed 2025-11-18 03:14:20 +01:00
Fred KISSIE
e00c3f2193 🛂 check for subscription status 2025-11-18 02:46:22 +01:00
Fred KISSIE
8c30995228 ♻️ refactor 2025-11-18 02:38:08 +01:00
Fred KISSIE
3ba65a3311 ♻️ check for disabled features in general org settings page 2025-11-18 02:35:11 +01:00
Fred KISSIE
c5914dc0c0 ♻️ Also check for active subscription in paid status hook 2025-11-18 02:26:49 +01:00
Fred KISSIE
30f3ab11b2 🚚 rename SecurityFeaturesAlert to PaidFeaturesAlert 2025-11-18 02:26:25 +01:00
Fred KISSIE
66b01b764f ♻️ adapt zod schema to v4 and move form description below the inptu 2025-11-18 01:07:46 +01:00
Fred KISSIE
ee7e7778b6 ♻️commit 2025-11-17 22:23:11 +01:00
Fred KISSIE
0d0c43f72b Merge branch 'dev' into feat/login-page-customization 2025-11-17 22:18:32 +01:00
Fred KISSIE
83f36bce9d ♻️refactor 2025-11-17 22:17:55 +01:00
Fred KISSIE
2466d24c1a 🔥remove unused imports 2025-11-15 07:08:07 +01:00
Fred KISSIE
2f34def4d7 ♻️ correctly apply the CSS variable 2025-11-15 07:06:20 +01:00
Fred KISSIE
8e8f992876 💡add comment 2025-11-15 07:04:36 +01:00
Fred KISSIE
1d9ed9d219 💡remove useless comments 2025-11-15 07:01:27 +01:00
Fred KISSIE
616fb9c8e9 ♻️remove unused imports 2025-11-15 06:59:15 +01:00
Fred KISSIE
a2ab7191e5 🔇remove log 2025-11-15 06:58:05 +01:00
Fred KISSIE
7a31292ec7 revert package.json changes 2025-11-15 06:34:40 +01:00
Fred KISSIE
196fbbe334 📦update lockfile 2025-11-15 06:32:45 +01:00
Fred KISSIE
5bb5aeff36 Merge branch 'dev' into feat/login-page-customization 2025-11-15 06:32:03 +01:00
Fred KISSIE
2ada05b286 ♻️only apply org branding in saas 2025-11-15 06:26:17 +01:00
Fred KISSIE
87f23f582c apply branding to org auth page 2025-11-15 06:08:02 +01:00
Fred KISSIE
29a52f6ac4 🐛 Apply branding to auth page when not authenticated not only when authed 2025-11-15 05:43:17 +01:00
Fred KISSIE
790f7083e2 🐛 fix cols and some other refactors 2025-11-15 04:04:10 +01:00
Fred KISSIE
5c851e82ff ♻️refactor 2025-11-15 04:03:42 +01:00
Fred KISSIE
854f638da3 show toast message when updating auth page domain 2025-11-15 04:03:21 +01:00
Fred KISSIE
4842648e7b ♻️refactor 2025-11-15 02:38:51 +01:00
Fred KISSIE
8f152bdf9f add primary color branding to the page 2025-11-15 02:38:46 +01:00
Fred KISSIE
d003436179 ⚗️ generate build variable as fully typed to prevent typos (to check if it's ok) 2025-11-15 01:43:58 +01:00
Fred KISSIE
9776ef43ea ♻️ only include org settings in saas build 2025-11-15 01:42:20 +01:00
Fred KISSIE
e2c4a906c4 ♻️rename title & subtitle to orgTitle and orgSubtitle 2025-11-15 01:41:56 +01:00
Fred KISSIE
27e8250cd1 ♻️some refactor 2025-11-15 01:07:07 +01:00
Fred KISSIE
0d84b7af6e ♻️show org page branding section only in saas 2025-11-15 01:07:00 +01:00
Fred KISSIE
b961271aa6 ♻️ some refactor 2025-11-15 01:06:22 +01:00
Fred KISSIE
b505cc60b0 🗃️ Add primaryColor to login page branding 2025-11-15 01:06:09 +01:00
Fred KISSIE
955f927c59 🚧WIP 2025-11-14 01:24:15 +01:00
Fred KISSIE
4beed9d464 apply auth branding to resource auth page 2025-11-13 03:24:47 +01:00
Fred KISSIE
228481444f ♻️ do not manually track the loading state in ConfirmDeleteDialog 2025-11-13 02:19:25 +01:00
Fred KISSIE
02cd2cfb17 save and update branding 2025-11-13 02:18:52 +01:00
Fred KISSIE
d218a4bbc3 🏷️ fix types 2025-11-12 03:50:11 +01:00
Fred KISSIE
4bd1c4e0c6 ♻️ refactor 2025-11-12 03:50:04 +01:00
Fred KISSIE
cfde4e7443 🚧 WIP 2025-11-12 03:43:19 +01:00
Fred KISSIE
f58cf68f7c 🚧 WIP 2025-11-11 23:35:20 +01:00
Fred KISSIE
08e43400e4 🚧 frontend wip 2025-11-11 21:14:10 +01:00
Fred KISSIE
46d60bd090 ♻️ add type 2025-11-11 17:08:52 +01:00
Fred KISSIE
5641a2aa31 🗃️ add org auth page model 2025-11-11 17:08:27 +01:00
Fred KISSIE
0abc561bb8 ♻️ refactor 2025-11-11 02:22:26 +01:00
684 changed files with 21685 additions and 15669 deletions

View File

@@ -1,6 +1,3 @@
{ {
"extends": [ "extends": ["next/core-web-vitals", "next/typescript"]
"next/core-web-vitals",
"next/typescript"
]
} }

View File

@@ -24,9 +24,35 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
release: pre-run:
name: Build and Release runs-on: ubuntu-latest
runs-on: [self-hosted, linux, x64] permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600
aws-region: ${{ secrets.AWS_REGION }}
- name: Verify AWS identity
run: aws sts get-caller-identity
- name: Start EC2 instances
run: |
aws ec2 start-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_ARM_RUNNER }}
aws ec2 start-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_AMD_RUNNER }}
echo "EC2 instances started"
release-arm:
name: Build and Release (ARM64)
runs-on: [self-hosted, linux, arm64, us-east-1]
needs: [pre-run]
if: >-
${{
needs.pre-run.result == 'success'
}}
# Job-level timeout to avoid runaway or stuck runs # Job-level timeout to avoid runaway or stuck runs
timeout-minutes: 120 timeout-minutes: 120
env: env:
@@ -36,13 +62,19 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up QEMU - name: Monitor storage space
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0 run: |
THRESHOLD=75
- name: Set up Docker Buildx USED_SPACE=$(df / | grep / | awk '{ print $5 }' | sed 's/%//g')
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 echo "Used space: $USED_SPACE%"
if [ "$USED_SPACE" -ge "$THRESHOLD" ]; then
echo "Used space is below the threshold of 75% free. Running Docker system prune."
echo y | docker system prune -a
else
echo "Storage space is above the threshold. No action needed."
fi
- name: Log in to Docker Hub - name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
@@ -50,6 +82,103 @@ jobs:
registry: docker.io registry: docker.io
username: ${{ secrets.DOCKER_HUB_USERNAME }} username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }} password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Extract tag name
id: get-tag
run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
shell: bash
- name: Update version in package.json
run: |
TAG=${{ env.TAG }}
sed -i "s/export const APP_VERSION = \".*\";/export const APP_VERSION = \"$TAG\";/" server/lib/consts.ts
cat server/lib/consts.ts
shell: bash
- name: Build and push Docker images (Docker Hub - ARM64)
run: |
TAG=${{ env.TAG }}
make build-release-arm tag=$TAG
echo "Built & pushed ARM64 images to: ${{ env.DOCKERHUB_IMAGE }}:${TAG}"
shell: bash
release-amd:
name: Build and Release (AMD64)
runs-on: [self-hosted, linux, x64, us-east-1]
needs: [pre-run]
if: >-
${{
needs.pre-run.result == 'success'
}}
# Job-level timeout to avoid runaway or stuck runs
timeout-minutes: 120
env:
# Target images
DOCKERHUB_IMAGE: docker.io/fosrl/${{ github.event.repository.name }}
GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Monitor storage space
run: |
THRESHOLD=75
USED_SPACE=$(df / | grep / | awk '{ print $5 }' | sed 's/%//g')
echo "Used space: $USED_SPACE%"
if [ "$USED_SPACE" -ge "$THRESHOLD" ]; then
echo "Used space is below the threshold of 75% free. Running Docker system prune."
echo y | docker system prune -a
else
echo "Storage space is above the threshold. No action needed."
fi
- name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: docker.io
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Extract tag name
id: get-tag
run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
shell: bash
- name: Update version in package.json
run: |
TAG=${{ env.TAG }}
sed -i "s/export const APP_VERSION = \".*\";/export const APP_VERSION = \"$TAG\";/" server/lib/consts.ts
cat server/lib/consts.ts
shell: bash
- name: Build and push Docker images (Docker Hub - AMD64)
run: |
TAG=${{ env.TAG }}
make build-release-amd tag=$TAG
echo "Built & pushed AMD64 images to: ${{ env.DOCKERHUB_IMAGE }}:${TAG}"
shell: bash
sign-and-package:
name: Sign and Package
runs-on: [self-hosted, linux, x64, us-east-1]
needs: [release-arm, release-amd]
if: >-
${{
needs.release-arm.result == 'success' &&
needs.release-amd.result == 'success'
}}
# Job-level timeout to avoid runaway or stuck runs
timeout-minutes: 120
env:
# Target images
DOCKERHUB_IMAGE: docker.io/fosrl/${{ github.event.repository.name }}
GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Extract tag name - name: Extract tag name
id: get-tag id: get-tag
run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
@@ -104,13 +233,6 @@ jobs:
name: install-bin name: install-bin
path: install/bin/ path: install/bin/
- name: Build and push Docker images (Docker Hub)
run: |
TAG=${{ env.TAG }}
make build-release tag=$TAG
echo "Built & pushed to: ${{ env.DOCKERHUB_IMAGE }}:${TAG}"
shell: bash
- name: Install skopeo + jq - name: Install skopeo + jq
# skopeo: copy/inspect images between registries # skopeo: copy/inspect images between registries
# jq: JSON parsing tool used to extract digest values # jq: JSON parsing tool used to extract digest values
@@ -127,9 +249,12 @@ jobs:
- name: Copy tag from Docker Hub to GHCR - name: Copy tag from Docker Hub to GHCR
# Mirror the already-built image (all architectures) to GHCR so we can sign it # Mirror the already-built image (all architectures) to GHCR so we can sign it
# Wait a bit for both architectures to be available in Docker Hub manifest
run: | run: |
set -euo pipefail set -euo pipefail
TAG=${{ env.TAG }} TAG=${{ env.TAG }}
echo "Waiting for multi-arch manifest to be ready..."
sleep 30
echo "Copying ${{ env.DOCKERHUB_IMAGE }}:${TAG} -> ${{ env.GHCR_IMAGE }}:${TAG}" echo "Copying ${{ env.DOCKERHUB_IMAGE }}:${TAG} -> ${{ env.GHCR_IMAGE }}:${TAG}"
skopeo copy --all --retry-times 3 \ skopeo copy --all --retry-times 3 \
docker://$DOCKERHUB_IMAGE:$TAG \ docker://$DOCKERHUB_IMAGE:$TAG \
@@ -185,3 +310,32 @@ jobs:
"${REF}" -o text "${REF}" -o text
done done
shell: bash shell: bash
post-run:
needs: [pre-run, release-arm, release-amd, sign-and-package]
if: >-
${{
always() &&
needs.pre-run.result == 'success' &&
(needs.release-arm.result == 'success' || needs.release-arm.result == 'skipped' || needs.release-arm.result == 'failure') &&
(needs.release-amd.result == 'success' || needs.release-amd.result == 'skipped' || needs.release-amd.result == 'failure') &&
(needs.sign-and-package.result == 'success' || needs.sign-and-package.result == 'skipped' || needs.sign-and-package.result == 'failure')
}}
runs-on: ubuntu-latest
permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600
aws-region: ${{ secrets.AWS_REGION }}
- name: Verify AWS identity
run: aws sts get-caller-identity
- name: Stop EC2 instances
run: |
aws ec2 stop-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_ARM_RUNNER }}
aws ec2 stop-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_AMD_RUNNER }}
echo "EC2 instances stopped"

View File

@@ -21,10 +21,10 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Node.js - name: Set up Node.js
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with: with:
node-version: '22' node-version: '22'

39
.github/workflows/restart-runners.yml vendored Normal file
View File

@@ -0,0 +1,39 @@
name: Restart Runners
on:
schedule:
- cron: '0 0 */7 * *'
permissions:
id-token: write
contents: read
jobs:
ec2-maintenance-prod:
runs-on: ubuntu-latest
permissions: write-all
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ secrets.AWS_ROLE_NAME }}
role-duration-seconds: 3600
aws-region: ${{ secrets.AWS_REGION }}
- name: Verify AWS identity
run: aws sts get-caller-identity
- name: Start EC2 instance
run: |
aws ec2 start-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_ARM_RUNNER }}
aws ec2 start-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_AMD_RUNNER }}
echo "EC2 instances started"
- name: Wait
run: sleep 600
- name: Stop EC2 instance
run: |
aws ec2 stop-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_ARM_RUNNER }}
aws ec2 stop-instances --instance-ids ${{ secrets.EC2_INSTANCE_ID_AMD_RUNNER }}
echo "EC2 instances stopped"

View File

@@ -14,7 +14,7 @@ jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 - uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
with: with:
days-before-stale: 14 days-before-stale: 14
days-before-close: 14 days-before-close: 14

View File

@@ -12,11 +12,12 @@ on:
jobs: jobs:
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0 - name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 - name: Install Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with: with:
node-version: '22' node-version: '22'
@@ -57,8 +58,26 @@ jobs:
echo "App failed to start" echo "App failed to start"
exit 1 exit 1
build-sqlite:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Copy config file
run: cp config/config.example.yml config/config.yml
- name: Build Docker image sqlite - name: Build Docker image sqlite
run: make build-sqlite run: make dev-build-sqlite
build-postgres:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Copy config file
run: cp config/config.example.yml config/config.yml
- name: Build Docker image pg - name: Build Docker image pg
run: make build-pg run: make dev-build-pg

12
.prettierignore Normal file
View File

@@ -0,0 +1,12 @@
.github/
bruno/
cli/
config/
messages/
next.config.mjs/
public/
tailwind.config.js/
test/
**/*.yml
**/*.yaml
**/*.md

3
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"recommendations": ["esbenp.prettier-vscode"]
}

22
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,22 @@
{
"editor.codeActionsOnSave": {
"source.addMissingImports.ts": "always"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"editor.formatOnSave": true
}

View File

@@ -43,23 +43,25 @@ RUN test -f dist/server.mjs
RUN npm run build:cli RUN npm run build:cli
# Prune dev dependencies and clean up to prepare for copy to runner
RUN npm prune --omit=dev && npm cache clean --force
FROM node:24-alpine AS runner FROM node:24-alpine AS runner
WORKDIR /app WORKDIR /app
# Curl used for the health checks # Only curl and tzdata needed at runtime - no build tools!
# Python and build tools needed for better-sqlite3 native compilation RUN apk add --no-cache curl tzdata
RUN apk add --no-cache curl tzdata python3 make g++
# COPY package.json package-lock.json ./ # Copy pre-built node_modules from builder (already pruned to production only)
COPY package*.json ./ # This includes the compiled native modules like better-sqlite3
COPY --from=builder /app/node_modules ./node_modules
RUN npm ci --omit=dev && npm cache clean --force
COPY --from=builder /app/.next/standalone ./ COPY --from=builder /app/.next/standalone ./
COPY --from=builder /app/.next/static ./.next/static COPY --from=builder /app/.next/static ./.next/static
COPY --from=builder /app/dist ./dist COPY --from=builder /app/dist ./dist
COPY --from=builder /app/init ./dist/init COPY --from=builder /app/init ./dist/init
COPY --from=builder /app/package.json ./package.json
COPY ./cli/wrapper.sh /usr/local/bin/pangctl COPY ./cli/wrapper.sh /usr/local/bin/pangctl
RUN chmod +x /usr/local/bin/pangctl ./dist/cli.mjs RUN chmod +x /usr/local/bin/pangctl ./dist/cli.mjs

119
Makefile
View File

@@ -1,8 +1,13 @@
.PHONY: build build-pg build-release build-arm build-x86 test clean .PHONY: build build-pg build-release build-release-arm build-release-amd build-arm build-x86 test clean
major_tag := $(shell echo $(tag) | cut -d. -f1) major_tag := $(shell echo $(tag) | cut -d. -f1)
minor_tag := $(shell echo $(tag) | cut -d. -f1,2) minor_tag := $(shell echo $(tag) | cut -d. -f1,2)
build-release:
.PHONY: build-release build-sqlite build-postgresql build-ee-sqlite build-ee-postgresql
build-release: build-sqlite build-postgresql build-ee-sqlite build-ee-postgresql
build-sqlite:
@if [ -z "$(tag)" ]; then \ @if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \ echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
exit 1; \ exit 1; \
@@ -16,6 +21,12 @@ build-release:
--tag fosrl/pangolin:$(minor_tag) \ --tag fosrl/pangolin:$(minor_tag) \
--tag fosrl/pangolin:$(tag) \ --tag fosrl/pangolin:$(tag) \
--push . --push .
build-postgresql:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
exit 1; \
fi
docker buildx build \ docker buildx build \
--build-arg BUILD=oss \ --build-arg BUILD=oss \
--build-arg DATABASE=pg \ --build-arg DATABASE=pg \
@@ -25,6 +36,12 @@ build-release:
--tag fosrl/pangolin:postgresql-$(minor_tag) \ --tag fosrl/pangolin:postgresql-$(minor_tag) \
--tag fosrl/pangolin:postgresql-$(tag) \ --tag fosrl/pangolin:postgresql-$(tag) \
--push . --push .
build-ee-sqlite:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
exit 1; \
fi
docker buildx build \ docker buildx build \
--build-arg BUILD=enterprise \ --build-arg BUILD=enterprise \
--build-arg DATABASE=sqlite \ --build-arg DATABASE=sqlite \
@@ -34,6 +51,12 @@ build-release:
--tag fosrl/pangolin:ee-$(minor_tag) \ --tag fosrl/pangolin:ee-$(minor_tag) \
--tag fosrl/pangolin:ee-$(tag) \ --tag fosrl/pangolin:ee-$(tag) \
--push . --push .
build-ee-postgresql:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
exit 1; \
fi
docker buildx build \ docker buildx build \
--build-arg BUILD=enterprise \ --build-arg BUILD=enterprise \
--build-arg DATABASE=pg \ --build-arg DATABASE=pg \
@@ -44,6 +67,94 @@ build-release:
--tag fosrl/pangolin:ee-postgresql-$(tag) \ --tag fosrl/pangolin:ee-postgresql-$(tag) \
--push . --push .
build-release-arm:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release-arm tag=<tag>"; \
exit 1; \
fi
@MAJOR_TAG=$$(echo $(tag) | cut -d. -f1); \
MINOR_TAG=$$(echo $(tag) | cut -d. -f1,2); \
docker buildx build \
--build-arg BUILD=oss \
--build-arg DATABASE=sqlite \
--platform linux/arm64 \
--tag fosrl/pangolin:latest \
--tag fosrl/pangolin:$$MAJOR_TAG \
--tag fosrl/pangolin:$$MINOR_TAG \
--tag fosrl/pangolin:$(tag) \
--push . && \
docker buildx build \
--build-arg BUILD=oss \
--build-arg DATABASE=pg \
--platform linux/arm64 \
--tag fosrl/pangolin:postgresql-latest \
--tag fosrl/pangolin:postgresql-$$MAJOR_TAG \
--tag fosrl/pangolin:postgresql-$$MINOR_TAG \
--tag fosrl/pangolin:postgresql-$(tag) \
--push . && \
docker buildx build \
--build-arg BUILD=enterprise \
--build-arg DATABASE=sqlite \
--platform linux/arm64 \
--tag fosrl/pangolin:ee-latest \
--tag fosrl/pangolin:ee-$$MAJOR_TAG \
--tag fosrl/pangolin:ee-$$MINOR_TAG \
--tag fosrl/pangolin:ee-$(tag) \
--push . && \
docker buildx build \
--build-arg BUILD=enterprise \
--build-arg DATABASE=pg \
--platform linux/arm64 \
--tag fosrl/pangolin:ee-postgresql-latest \
--tag fosrl/pangolin:ee-postgresql-$$MAJOR_TAG \
--tag fosrl/pangolin:ee-postgresql-$$MINOR_TAG \
--tag fosrl/pangolin:ee-postgresql-$(tag) \
--push .
build-release-amd:
@if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release-amd tag=<tag>"; \
exit 1; \
fi
@MAJOR_TAG=$$(echo $(tag) | cut -d. -f1); \
MINOR_TAG=$$(echo $(tag) | cut -d. -f1,2); \
docker buildx build \
--build-arg BUILD=oss \
--build-arg DATABASE=sqlite \
--platform linux/amd64 \
--tag fosrl/pangolin:latest \
--tag fosrl/pangolin:$$MAJOR_TAG \
--tag fosrl/pangolin:$$MINOR_TAG \
--tag fosrl/pangolin:$(tag) \
--push . && \
docker buildx build \
--build-arg BUILD=oss \
--build-arg DATABASE=pg \
--platform linux/amd64 \
--tag fosrl/pangolin:postgresql-latest \
--tag fosrl/pangolin:postgresql-$$MAJOR_TAG \
--tag fosrl/pangolin:postgresql-$$MINOR_TAG \
--tag fosrl/pangolin:postgresql-$(tag) \
--push . && \
docker buildx build \
--build-arg BUILD=enterprise \
--build-arg DATABASE=sqlite \
--platform linux/amd64 \
--tag fosrl/pangolin:ee-latest \
--tag fosrl/pangolin:ee-$$MAJOR_TAG \
--tag fosrl/pangolin:ee-$$MINOR_TAG \
--tag fosrl/pangolin:ee-$(tag) \
--push . && \
docker buildx build \
--build-arg BUILD=enterprise \
--build-arg DATABASE=pg \
--platform linux/amd64 \
--tag fosrl/pangolin:ee-postgresql-latest \
--tag fosrl/pangolin:ee-postgresql-$$MAJOR_TAG \
--tag fosrl/pangolin:ee-postgresql-$$MINOR_TAG \
--tag fosrl/pangolin:ee-postgresql-$(tag) \
--push .
build-rc: build-rc:
@if [ -z "$(tag)" ]; then \ @if [ -z "$(tag)" ]; then \
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \ echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
@@ -80,10 +191,10 @@ build-arm:
build-x86: build-x86:
docker buildx build --platform linux/amd64 -t fosrl/pangolin:latest . docker buildx build --platform linux/amd64 -t fosrl/pangolin:latest .
build-sqlite: dev-build-sqlite:
docker build --build-arg DATABASE=sqlite -t fosrl/pangolin:latest . docker build --build-arg DATABASE=sqlite -t fosrl/pangolin:latest .
build-pg: dev-build-pg:
docker build --build-arg DATABASE=pg -t fosrl/pangolin:postgresql-latest . docker build --build-arg DATABASE=pg -t fosrl/pangolin:postgresql-latest .
test: test:

View File

@@ -31,7 +31,7 @@
[![Slack](https://img.shields.io/badge/chat-slack-yellow?style=flat-square&logo=slack)](https://pangolin.net/slack) [![Slack](https://img.shields.io/badge/chat-slack-yellow?style=flat-square&logo=slack)](https://pangolin.net/slack)
[![Docker](https://img.shields.io/docker/pulls/fosrl/pangolin?style=flat-square)](https://hub.docker.com/r/fosrl/pangolin) [![Docker](https://img.shields.io/docker/pulls/fosrl/pangolin?style=flat-square)](https://hub.docker.com/r/fosrl/pangolin)
![Stars](https://img.shields.io/github/stars/fosrl/pangolin?style=flat-square) ![Stars](https://img.shields.io/github/stars/fosrl/pangolin?style=flat-square)
[![YouTube](https://img.shields.io/badge/YouTube-red?logo=youtube&logoColor=white&style=flat-square)](https://www.youtube.com/@fossorial-app) [![YouTube](https://img.shields.io/badge/YouTube-red?logo=youtube&logoColor=white&style=flat-square)](https://www.youtube.com/@pangolin-net)
</div> </div>
@@ -41,7 +41,7 @@
</strong> </strong>
</p> </p>
Pangolin is a self-hosted tunneled reverse proxy server with identity and context aware access control, designed to easily expose and protect applications running anywhere. Pangolin acts as a central hub and connects isolated networks — even those behind restrictive firewalls — through encrypted tunnels, enabling easy access to remote services without opening ports or requiring a VPN. Pangolin is an open-source, identity-based remote access platform built on WireGuard that enables secure, seamless connectivity to private and public resources. Pangolin combines reverse proxy and VPN capabilities into one platform, providing browser-based access to web applications and client-based access to any private resources, all with zero-trust security and granular access control.
## Installation ## Installation
@@ -60,14 +60,20 @@ Pangolin is a self-hosted tunneled reverse proxy server with identity and contex
## Key Features ## Key Features
Pangolin packages everything you need for seamless application access and exposure into one cohesive platform.
| <img width=500 /> | <img width=500 /> | | <img width=500 /> | <img width=500 /> |
|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------| |----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------|
| **Manage applications in one place**<br /><br /> Pangolin provides a unified dashboard where you can monitor, configure, and secure all of your services regardless of where they are hosted. | <img src="public/screenshots/hero.png" width=500 /><tr></tr> | | **Connect remote networks with sites**<br /><br />Pangolin's lightweight site connectors create secure tunnels from remote networks without requiring public IP addresses or open ports. Sites make any network anywhere available for authorized access. | <img src="public/screenshots/sites.png" width=500 /><tr></tr> |
| **Reverse proxy across networks anywhere**<br /><br />Route traffic via tunnels to any private network. Pangolin works like a reverse proxy that spans multiple networks and handles routing, load balancing, health checking, and more to the right services on the other end. | <img src="public/screenshots/sites.png" width=500 /><tr></tr> | | **Browser-based reverse proxy access**<br /><br />Expose web applications through identity and context-aware tunneled reverse proxies. Pangolin handles routing, load balancing, health checking, and automatic SSL certificates without exposing your network directly to the internet. Users access applications through any web browser with authentication and granular access control. | <img src="public/clip.gif" width=500 /><tr></tr> |
| **Enforce identity and context aware rules**<br /><br />Protect your applications with identity and context aware rules such as SSO, OIDC, PIN, password, temporary share links, geolocation, IP, and more. | <img src="public/auth-diagram1.png" width=500 /><tr></tr> | | **Client-based private resource access**<br /><br />Access private resources like SSH servers, databases, RDP, and entire network ranges through Pangolin clients. Intelligent NAT traversal enables connections even through restrictive firewalls, while DNS aliases provide friendly names and fast connections to resources across all your sites. | <img src="public/screenshots/private-resources.png" width=500 /><tr></tr> |
| **Quickly connect Pangolin sites**<br /><br />Pangolin's lightweight [Newt](https://github.com/fosrl/newt) client runs in userspace and can run anywhere. Use it as a site connector to route traffic to backends across all of your environments. | <img src="public/clip.gif" width=500 /><tr></tr> | | **Zero-trust granular access**<br /><br />Grant users access to specific resources, not entire networks. Unlike traditional VPNs that expose full network access, Pangolin's zero-trust model ensures users can only reach the applications and services you explicitly define, reducing security risk and attack surface. | <img src="public/screenshots/user-devices.png" width=500 /><tr></tr> |
## Download Clients
Download the Pangolin client for your platform:
- [Mac](https://pangolin.net/downloads/mac)
- [Windows](https://pangolin.net/downloads/windows)
- [Linux](https://pangolin.net/downloads/linux)
## Get Started ## Get Started

View File

@@ -0,0 +1,36 @@
import { CommandModule } from "yargs";
import { db, exitNodes } from "@server/db";
import { eq } from "drizzle-orm";
type ClearExitNodesArgs = { };
export const clearExitNodes: CommandModule<
{},
ClearExitNodesArgs
> = {
command: "clear-exit-nodes",
describe:
"Clear all exit nodes from the database",
// no args
builder: (yargs) => {
return yargs;
},
handler: async (argv: {}) => {
try {
console.log(`Clearing all exit nodes from the database`);
// Delete all exit nodes
const deletedCount = await db
.delete(exitNodes)
.where(eq(exitNodes.exitNodeId, exitNodes.exitNodeId)) .returning();; // delete all
console.log(`Deleted ${deletedCount.length} exit node(s) from the database`);
process.exit(0);
} catch (error) {
console.error("Error:", error);
process.exit(1);
}
}
};

View File

@@ -0,0 +1,284 @@
import { CommandModule } from "yargs";
import { db, idpOidcConfig, licenseKey } from "@server/db";
import { encrypt, decrypt } from "@server/lib/crypto";
import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import { eq } from "drizzle-orm";
import fs from "fs";
import yaml from "js-yaml";
type RotateServerSecretArgs = {
oldSecret: string;
newSecret: string;
force?: boolean;
};
export const rotateServerSecret: CommandModule<
{},
RotateServerSecretArgs
> = {
command: "rotate-server-secret",
describe:
"Rotate the server secret by decrypting all encrypted values with the old secret and re-encrypting with a new secret",
builder: (yargs) => {
return yargs
.option("oldSecret", {
type: "string",
demandOption: true,
describe: "The current server secret (for verification)"
})
.option("newSecret", {
type: "string",
demandOption: true,
describe: "The new server secret to use"
})
.option("force", {
type: "boolean",
default: false,
describe:
"Force rotation even if the old secret doesn't match the config file. " +
"Use this if you know the old secret is correct but the config file is out of sync. " +
"WARNING: This will attempt to decrypt all values with the provided old secret. " +
"If the old secret is incorrect, the rotation will fail or corrupt data."
});
},
handler: async (argv: {
oldSecret: string;
newSecret: string;
force?: boolean;
}) => {
try {
// Determine which config file exists
const configPath = fs.existsSync(configFilePath1)
? configFilePath1
: fs.existsSync(configFilePath2)
? configFilePath2
: null;
if (!configPath) {
console.error(
"Error: Config file not found. Expected config.yml or config.yaml in the config directory."
);
process.exit(1);
}
// Read current config
const configContent = fs.readFileSync(configPath, "utf8");
const config = yaml.load(configContent) as any;
if (!config?.server?.secret) {
console.error(
"Error: No server secret found in config file. Cannot rotate."
);
process.exit(1);
}
const configSecret = config.server.secret;
const oldSecret = argv.oldSecret;
const newSecret = argv.newSecret;
const force = argv.force || false;
// Verify that the provided old secret matches the one in config
if (configSecret !== oldSecret) {
if (!force) {
console.error(
"Error: The provided old secret does not match the secret in the config file."
);
console.error(
"\nIf you are certain the old secret is correct and the config file is out of sync,"
);
console.error(
"you can use the --force flag to bypass this check."
);
console.error(
"\nWARNING: Using --force with an incorrect old secret will cause the rotation to fail"
);
console.error(
"or corrupt encrypted data. Only use --force if you are absolutely certain."
);
process.exit(1);
} else {
console.warn(
"\nWARNING: Using --force flag. Bypassing old secret verification."
);
console.warn(
"The provided old secret does not match the config file, but proceeding anyway."
);
console.warn(
"If the old secret is incorrect, this operation will fail or corrupt data.\n"
);
}
}
// Validate new secret
if (newSecret.length < 8) {
console.error(
"Error: New secret must be at least 8 characters long"
);
process.exit(1);
}
if (oldSecret === newSecret) {
console.error("Error: New secret must be different from old secret");
process.exit(1);
}
console.log("Starting server secret rotation...");
console.log("This will decrypt and re-encrypt all encrypted values in the database.");
// Read all data first
console.log("\nReading encrypted data from database...");
const idpConfigs = await db.select().from(idpOidcConfig);
const licenseKeys = await db.select().from(licenseKey);
console.log(`Found ${idpConfigs.length} OIDC IdP configuration(s)`);
console.log(`Found ${licenseKeys.length} license key(s)`);
// Prepare all decrypted and re-encrypted values
console.log("\nDecrypting and re-encrypting values...");
type IdpUpdate = {
idpOauthConfigId: number;
encryptedClientId: string;
encryptedClientSecret: string;
};
type LicenseKeyUpdate = {
oldLicenseKeyId: string;
newLicenseKeyId: string;
encryptedToken: string;
encryptedInstanceId: string;
};
const idpUpdates: IdpUpdate[] = [];
const licenseKeyUpdates: LicenseKeyUpdate[] = [];
// Process idpOidcConfig entries
for (const idpConfig of idpConfigs) {
try {
// Decrypt with old secret
const decryptedClientId = decrypt(idpConfig.clientId, oldSecret);
const decryptedClientSecret = decrypt(
idpConfig.clientSecret,
oldSecret
);
// Re-encrypt with new secret
const encryptedClientId = encrypt(decryptedClientId, newSecret);
const encryptedClientSecret = encrypt(
decryptedClientSecret,
newSecret
);
idpUpdates.push({
idpOauthConfigId: idpConfig.idpOauthConfigId,
encryptedClientId,
encryptedClientSecret
});
} catch (error) {
console.error(
`Error processing IdP config ${idpConfig.idpOauthConfigId}:`,
error
);
throw error;
}
}
// Process licenseKey entries
for (const key of licenseKeys) {
try {
// Decrypt with old secret
const decryptedLicenseKeyId = decrypt(key.licenseKeyId, oldSecret);
const decryptedToken = decrypt(key.token, oldSecret);
const decryptedInstanceId = decrypt(key.instanceId, oldSecret);
// Re-encrypt with new secret
const encryptedLicenseKeyId = encrypt(
decryptedLicenseKeyId,
newSecret
);
const encryptedToken = encrypt(decryptedToken, newSecret);
const encryptedInstanceId = encrypt(
decryptedInstanceId,
newSecret
);
licenseKeyUpdates.push({
oldLicenseKeyId: key.licenseKeyId,
newLicenseKeyId: encryptedLicenseKeyId,
encryptedToken,
encryptedInstanceId
});
} catch (error) {
console.error(
`Error processing license key ${key.licenseKeyId}:`,
error
);
throw error;
}
}
// Perform all database updates in a single transaction
console.log("\nUpdating database in transaction...");
await db.transaction(async (trx) => {
// Update idpOidcConfig entries
for (const update of idpUpdates) {
await trx
.update(idpOidcConfig)
.set({
clientId: update.encryptedClientId,
clientSecret: update.encryptedClientSecret
})
.where(
eq(
idpOidcConfig.idpOauthConfigId,
update.idpOauthConfigId
)
);
}
// Update licenseKey entries (delete old, insert new)
for (const update of licenseKeyUpdates) {
// Delete old entry
await trx
.delete(licenseKey)
.where(eq(licenseKey.licenseKeyId, update.oldLicenseKeyId));
// Insert new entry with re-encrypted values
await trx.insert(licenseKey).values({
licenseKeyId: update.newLicenseKeyId,
token: update.encryptedToken,
instanceId: update.encryptedInstanceId
});
}
});
console.log(`Rotated ${idpUpdates.length} OIDC IdP configuration(s)`);
console.log(`Rotated ${licenseKeyUpdates.length} license key(s)`);
// Update config file with new secret
console.log("\nUpdating config file...");
config.server.secret = newSecret;
const newConfigContent = yaml.dump(config, {
indent: 2,
lineWidth: -1
});
fs.writeFileSync(configPath, newConfigContent, "utf8");
console.log(`Updated config file: ${configPath}`);
console.log("\nServer secret rotation completed successfully!");
console.log(`\nSummary:`);
console.log(` - OIDC IdP configurations: ${idpUpdates.length}`);
console.log(` - License keys: ${licenseKeyUpdates.length}`);
console.log(
`\n IMPORTANT: Restart the server for the new secret to take effect.`
);
process.exit(0);
} catch (error) {
console.error("Error rotating server secret:", error);
process.exit(1);
}
}
};

View File

@@ -4,10 +4,14 @@ import yargs from "yargs";
import { hideBin } from "yargs/helpers"; import { hideBin } from "yargs/helpers";
import { setAdminCredentials } from "@cli/commands/setAdminCredentials"; import { setAdminCredentials } from "@cli/commands/setAdminCredentials";
import { resetUserSecurityKeys } from "@cli/commands/resetUserSecurityKeys"; import { resetUserSecurityKeys } from "@cli/commands/resetUserSecurityKeys";
import { clearExitNodes } from "./commands/clearExitNodes";
import { rotateServerSecret } from "./commands/rotateServerSecret";
yargs(hideBin(process.argv)) yargs(hideBin(process.argv))
.scriptName("pangctl") .scriptName("pangctl")
.command(setAdminCredentials) .command(setAdminCredentials)
.command(resetUserSecurityKeys) .command(resetUserSecurityKeys)
.command(clearExitNodes)
.command(rotateServerSecret)
.demandCommand() .demandCommand()
.help().argv; .help().argv;

View File

@@ -1,9 +1,7 @@
import { defineConfig } from "drizzle-kit"; import { defineConfig } from "drizzle-kit";
import path from "path"; import path from "path";
const schema = [ const schema = [path.join("server", "db", "pg", "schema")];
path.join("server", "db", "pg", "schema"),
];
export default defineConfig({ export default defineConfig({
dialect: "postgresql", dialect: "postgresql",

View File

@@ -2,9 +2,7 @@ import { APP_PATH } from "@server/lib/consts";
import { defineConfig } from "drizzle-kit"; import { defineConfig } from "drizzle-kit";
import path from "path"; import path from "path";
const schema = [ const schema = [path.join("server", "db", "sqlite", "schema")];
path.join("server", "db", "sqlite", "schema"),
];
export default defineConfig({ export default defineConfig({
dialect: "sqlite", dialect: "sqlite",

View File

@@ -24,20 +24,20 @@ const argv = yargs(hideBin(process.argv))
alias: "e", alias: "e",
describe: "Entry point file", describe: "Entry point file",
type: "string", type: "string",
demandOption: true, demandOption: true
}) })
.option("out", { .option("out", {
alias: "o", alias: "o",
describe: "Output file path", describe: "Output file path",
type: "string", type: "string",
demandOption: true, demandOption: true
}) })
.option("build", { .option("build", {
alias: "b", alias: "b",
describe: "Build type (oss, saas, enterprise)", describe: "Build type (oss, saas, enterprise)",
type: "string", type: "string",
choices: ["oss", "saas", "enterprise"], choices: ["oss", "saas", "enterprise"],
default: "oss", default: "oss"
}) })
.help() .help()
.alias("help", "h").argv; .alias("help", "h").argv;
@@ -66,7 +66,9 @@ function privateImportGuardPlugin() {
// Check if the importing file is NOT in server/private // Check if the importing file is NOT in server/private
const normalizedImporter = path.normalize(importingFile); const normalizedImporter = path.normalize(importingFile);
const isInServerPrivate = normalizedImporter.includes(path.normalize("server/private")); const isInServerPrivate = normalizedImporter.includes(
path.normalize("server/private")
);
if (!isInServerPrivate) { if (!isInServerPrivate) {
const violation = { const violation = {
@@ -79,8 +81,8 @@ function privateImportGuardPlugin() {
console.log(`PRIVATE IMPORT VIOLATION:`); console.log(`PRIVATE IMPORT VIOLATION:`);
console.log(` File: ${importingFile}`); console.log(` File: ${importingFile}`);
console.log(` Import: ${args.path}`); console.log(` Import: ${args.path}`);
console.log(` Resolve dir: ${args.resolveDir || 'N/A'}`); console.log(` Resolve dir: ${args.resolveDir || "N/A"}`);
console.log(''); console.log("");
} }
// Return null to let the default resolver handle it // Return null to let the default resolver handle it
@@ -89,16 +91,20 @@ function privateImportGuardPlugin() {
build.onEnd((result) => { build.onEnd((result) => {
if (violations.length > 0) { if (violations.length > 0) {
console.log(`\nSUMMARY: Found ${violations.length} private import violation(s):`); console.log(
`\nSUMMARY: Found ${violations.length} private import violation(s):`
);
violations.forEach((v, i) => { violations.forEach((v, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`); console.log(
` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`
);
}); });
console.log(''); console.log("");
result.errors.push({ result.errors.push({
text: `Private import violations detected: ${violations.length} violation(s) found`, text: `Private import violations detected: ${violations.length} violation(s) found`,
location: null, location: null,
notes: violations.map(v => ({ notes: violations.map((v) => ({
text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`, text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`,
location: null location: null
})) }))
@@ -121,7 +127,9 @@ function dynamicImportGuardPlugin() {
// Check if the importing file is NOT in server/private // Check if the importing file is NOT in server/private
const normalizedImporter = path.normalize(importingFile); const normalizedImporter = path.normalize(importingFile);
const isInServerPrivate = normalizedImporter.includes(path.normalize("server/private")); const isInServerPrivate = normalizedImporter.includes(
path.normalize("server/private")
);
if (isInServerPrivate) { if (isInServerPrivate) {
const violation = { const violation = {
@@ -134,8 +142,8 @@ function dynamicImportGuardPlugin() {
console.log(`DYNAMIC IMPORT VIOLATION:`); console.log(`DYNAMIC IMPORT VIOLATION:`);
console.log(` File: ${importingFile}`); console.log(` File: ${importingFile}`);
console.log(` Import: ${args.path}`); console.log(` Import: ${args.path}`);
console.log(` Resolve dir: ${args.resolveDir || 'N/A'}`); console.log(` Resolve dir: ${args.resolveDir || "N/A"}`);
console.log(''); console.log("");
} }
// Return null to let the default resolver handle it // Return null to let the default resolver handle it
@@ -144,16 +152,20 @@ function dynamicImportGuardPlugin() {
build.onEnd((result) => { build.onEnd((result) => {
if (violations.length > 0) { if (violations.length > 0) {
console.log(`\nSUMMARY: Found ${violations.length} dynamic import violation(s):`); console.log(
`\nSUMMARY: Found ${violations.length} dynamic import violation(s):`
);
violations.forEach((v, i) => { violations.forEach((v, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`); console.log(
` ${i + 1}. ${path.relative(process.cwd(), v.file)} imports ${v.importPath}`
);
}); });
console.log(''); console.log("");
result.errors.push({ result.errors.push({
text: `Dynamic import violations detected: ${violations.length} violation(s) found`, text: `Dynamic import violations detected: ${violations.length} violation(s) found`,
location: null, location: null,
notes: violations.map(v => ({ notes: violations.map((v) => ({
text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`, text: `${path.relative(process.cwd(), v.file)} imports ${v.importPath}`,
location: null location: null
})) }))
@@ -172,21 +184,28 @@ function dynamicImportSwitcherPlugin(buildValue) {
const switches = []; const switches = [];
build.onStart(() => { build.onStart(() => {
console.log(`Dynamic import switcher using build type: ${buildValue}`); console.log(
`Dynamic import switcher using build type: ${buildValue}`
);
}); });
build.onResolve({ filter: /^#dynamic\// }, (args) => { build.onResolve({ filter: /^#dynamic\// }, (args) => {
// Extract the path after #dynamic/ // Extract the path after #dynamic/
const dynamicPath = args.path.replace(/^#dynamic\//, ''); const dynamicPath = args.path.replace(/^#dynamic\//, "");
// Determine the replacement based on build type // Determine the replacement based on build type
let replacement; let replacement;
if (buildValue === "oss") { if (buildValue === "oss") {
replacement = `#open/${dynamicPath}`; replacement = `#open/${dynamicPath}`;
} else if (buildValue === "saas" || buildValue === "enterprise") { } else if (
buildValue === "saas" ||
buildValue === "enterprise"
) {
replacement = `#closed/${dynamicPath}`; // We use #closed here so that the route guards dont complain after its been changed but this is the same as #private replacement = `#closed/${dynamicPath}`; // We use #closed here so that the route guards dont complain after its been changed but this is the same as #private
} else { } else {
console.warn(`Unknown build type '${buildValue}', defaulting to #open/`); console.warn(
`Unknown build type '${buildValue}', defaulting to #open/`
);
replacement = `#open/${dynamicPath}`; replacement = `#open/${dynamicPath}`;
} }
@@ -201,8 +220,10 @@ function dynamicImportSwitcherPlugin(buildValue) {
console.log(`DYNAMIC IMPORT SWITCH:`); console.log(`DYNAMIC IMPORT SWITCH:`);
console.log(` File: ${args.importer}`); console.log(` File: ${args.importer}`);
console.log(` Original: ${args.path}`); console.log(` Original: ${args.path}`);
console.log(` Switched to: ${replacement} (build: ${buildValue})`); console.log(
console.log(''); ` Switched to: ${replacement} (build: ${buildValue})`
);
console.log("");
// Rewrite the import path and let the normal resolution continue // Rewrite the import path and let the normal resolution continue
return build.resolve(replacement, { return build.resolve(replacement, {
@@ -215,12 +236,18 @@ function dynamicImportSwitcherPlugin(buildValue) {
build.onEnd((result) => { build.onEnd((result) => {
if (switches.length > 0) { if (switches.length > 0) {
console.log(`\nDYNAMIC IMPORT SUMMARY: Switched ${switches.length} import(s) for build type '${buildValue}':`); console.log(
`\nDYNAMIC IMPORT SUMMARY: Switched ${switches.length} import(s) for build type '${buildValue}':`
);
switches.forEach((s, i) => { switches.forEach((s, i) => {
console.log(` ${i + 1}. ${path.relative(process.cwd(), s.file)}`); console.log(
console.log(` ${s.originalPath} ${s.replacementPath}`); ` ${i + 1}. ${path.relative(process.cwd(), s.file)}`
);
console.log(
` ${s.originalPath}${s.replacementPath}`
);
}); });
console.log(''); console.log("");
} }
}); });
} }
@@ -235,7 +262,7 @@ esbuild
format: "esm", format: "esm",
minify: false, minify: false,
banner: { banner: {
js: banner, js: banner
}, },
platform: "node", platform: "node",
external: ["body-parser"], external: ["body-parser"],
@@ -244,20 +271,22 @@ esbuild
dynamicImportGuardPlugin(), dynamicImportGuardPlugin(),
dynamicImportSwitcherPlugin(argv.build), dynamicImportSwitcherPlugin(argv.build),
nodeExternalsPlugin({ nodeExternalsPlugin({
packagePath: getPackagePaths(), packagePath: getPackagePaths()
}), })
], ],
sourcemap: "inline", sourcemap: "inline",
target: "node22", target: "node22"
}) })
.then((result) => { .then((result) => {
// Check if there were any errors in the build result // Check if there were any errors in the build result
if (result.errors && result.errors.length > 0) { if (result.errors && result.errors.length > 0) {
console.error(`Build failed with ${result.errors.length} error(s):`); console.error(
`Build failed with ${result.errors.length} error(s):`
);
result.errors.forEach((error, i) => { result.errors.forEach((error, i) => {
console.error(`${i + 1}. ${error.text}`); console.error(`${i + 1}. ${error.text}`);
if (error.notes) { if (error.notes) {
error.notes.forEach(note => { error.notes.forEach((note) => {
console.error(` - ${note.text}`); console.error(` - ${note.text}`);
}); });
} }

View File

@@ -1,19 +1,19 @@
import tseslint from 'typescript-eslint'; import tseslint from "typescript-eslint";
export default tseslint.config({ export default tseslint.config({
files: ["**/*.{ts,tsx,js,jsx}"], files: ["**/*.{ts,tsx,js,jsx}"],
languageOptions: { languageOptions: {
parser: tseslint.parser, parser: tseslint.parser,
parserOptions: { parserOptions: {
ecmaVersion: "latest", ecmaVersion: "latest",
sourceType: "module", sourceType: "module",
ecmaFeatures: { ecmaFeatures: {
jsx: true jsx: true
} }
}
},
rules: {
semi: "error",
"prefer-const": "warn"
} }
},
rules: {
"semi": "error",
"prefer-const": "warn"
}
}); });

View File

@@ -9,10 +9,15 @@ services:
PARSERS: crowdsecurity/whitelists PARSERS: crowdsecurity/whitelists
ENROLL_TAGS: docker ENROLL_TAGS: docker
healthcheck: healthcheck:
interval: 10s test:
retries: 15 - CMD
timeout: 10s - cscli
test: ["CMD", "cscli", "capi", "status"] - lapi
- status
interval: 10s
timeout: 5s
retries: 3
start_period: 30s
labels: labels:
- "traefik.enable=false" # Disable traefik for crowdsec - "traefik.enable=false" # Disable traefik for crowdsec
volumes: volumes:

View File

@@ -107,3 +107,12 @@ http:
loadBalancer: loadBalancer:
servers: servers:
- url: "http://pangolin:3000" # API/WebSocket server - url: "http://pangolin:3000" # API/WebSocket server
tcp:
serversTransports:
pp-transport-v1:
proxyProtocol:
version: 1
pp-transport-v2:
proxyProtocol:
version: 2

View File

@@ -73,7 +73,7 @@ func installDocker() error {
case strings.Contains(osRelease, "ID=ubuntu"): case strings.Contains(osRelease, "ID=ubuntu"):
installCmd = exec.Command("bash", "-c", fmt.Sprintf(` installCmd = exec.Command("bash", "-c", fmt.Sprintf(`
apt-get update && apt-get update &&
apt-get install -y apt-transport-https ca-certificates curl && apt-get install -y apt-transport-https ca-certificates curl gpg &&
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg && curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg &&
echo "deb [arch=%s signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list && echo "deb [arch=%s signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list &&
apt-get update && apt-get update &&
@@ -82,7 +82,7 @@ func installDocker() error {
case strings.Contains(osRelease, "ID=debian"): case strings.Contains(osRelease, "ID=debian"):
installCmd = exec.Command("bash", "-c", fmt.Sprintf(` installCmd = exec.Command("bash", "-c", fmt.Sprintf(`
apt-get update && apt-get update &&
apt-get install -y apt-transport-https ca-certificates curl && apt-get install -y apt-transport-https ca-certificates curl gpg &&
curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg && curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg &&
echo "deb [arch=%s signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list && echo "deb [arch=%s signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list &&
apt-get update && apt-get update &&

View File

@@ -3,8 +3,8 @@ module installer
go 1.24.0 go 1.24.0
require ( require (
golang.org/x/term v0.37.0 golang.org/x/term v0.38.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
) )
require golang.org/x/sys v0.38.0 // indirect require golang.org/x/sys v0.39.0 // indirect

View File

@@ -1,7 +1,7 @@
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= golang.org/x/term v0.38.0 h1:PQ5pkm/rLO6HnxFR7N2lJHOZX6Kez5Y1gDSJla6jo7Q=
golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/term v0.38.0/go.mod h1:bSEAKrOT1W+VSu9TSCMtoGEOUcKxOKgl3LE5QEF/xVg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=

View File

@@ -1043,7 +1043,7 @@
"actionDeleteSite": "Standort löschen", "actionDeleteSite": "Standort löschen",
"actionGetSite": "Standort abrufen", "actionGetSite": "Standort abrufen",
"actionListSites": "Standorte auflisten", "actionListSites": "Standorte auflisten",
"actionApplyBlueprint": "Blaupause anwenden", "actionApplyBlueprint": "Blueprint anwenden",
"setupToken": "Setup-Token", "setupToken": "Setup-Token",
"setupTokenDescription": "Geben Sie das Setup-Token von der Serverkonsole ein.", "setupTokenDescription": "Geben Sie das Setup-Token von der Serverkonsole ein.",
"setupTokenRequired": "Setup-Token ist erforderlich", "setupTokenRequired": "Setup-Token ist erforderlich",
@@ -1102,7 +1102,7 @@
"actionDeleteIdpOrg": "IDP-Organisationsrichtlinie löschen", "actionDeleteIdpOrg": "IDP-Organisationsrichtlinie löschen",
"actionListIdpOrgs": "IDP-Organisationen auflisten", "actionListIdpOrgs": "IDP-Organisationen auflisten",
"actionUpdateIdpOrg": "IDP-Organisation aktualisieren", "actionUpdateIdpOrg": "IDP-Organisation aktualisieren",
"actionCreateClient": "Endgerät anlegen", "actionCreateClient": "Client erstellen",
"actionDeleteClient": "Client löschen", "actionDeleteClient": "Client löschen",
"actionUpdateClient": "Client aktualisieren", "actionUpdateClient": "Client aktualisieren",
"actionListClients": "Clients auflisten", "actionListClients": "Clients auflisten",
@@ -1201,24 +1201,24 @@
"sidebarLogsAnalytics": "Analytik", "sidebarLogsAnalytics": "Analytik",
"blueprints": "Baupläne", "blueprints": "Baupläne",
"blueprintsDescription": "Deklarative Konfigurationen anwenden und vorherige Abläufe anzeigen", "blueprintsDescription": "Deklarative Konfigurationen anwenden und vorherige Abläufe anzeigen",
"blueprintAdd": "Blaupause hinzufügen", "blueprintAdd": "Blueprint hinzufügen",
"blueprintGoBack": "Alle Blaupausen ansehen", "blueprintGoBack": "Alle Blueprints ansehen",
"blueprintCreate": "Blaupause erstellen", "blueprintCreate": "Blueprint erstellen",
"blueprintCreateDescription2": "Folge den Schritten unten, um eine neue Blaupause zu erstellen und anzuwenden", "blueprintCreateDescription2": "Folge den unten aufgeführten Schritten, um einen neuen Blueprint zu erstellen und anzuwenden",
"blueprintDetails": "Blaupausendetails", "blueprintDetails": "Blueprint Detailinformationen",
"blueprintDetailsDescription": "Siehe das Ergebnis der angewendeten Blaupause und alle aufgetretenen Fehler", "blueprintDetailsDescription": "Siehe das Ergebnis des angewendeten Blueprints und alle aufgetretenen Fehler",
"blueprintInfo": "Blaupauseninformation", "blueprintInfo": "Blueprint Informationen",
"message": "Nachricht", "message": "Nachricht",
"blueprintContentsDescription": "Den YAML-Inhalt definieren, der die Infrastruktur beschreibt", "blueprintContentsDescription": "Den YAML-Inhalt definieren, der die Infrastruktur beschreibt",
"blueprintErrorCreateDescription": "Fehler beim Anwenden der Blaupause", "blueprintErrorCreateDescription": "Fehler beim Anwenden des Blueprints",
"blueprintErrorCreate": "Fehler beim Erstellen der Blaupause", "blueprintErrorCreate": "Fehler beim Erstellen des Blueprints",
"searchBlueprintProgress": "Blaupausen suchen...", "searchBlueprintProgress": "Blueprints suchen...",
"appliedAt": "Angewandt am", "appliedAt": "Angewandt am",
"source": "Quelle", "source": "Quelle",
"contents": "Inhalt", "contents": "Inhalt",
"parsedContents": "Analysierte Inhalte (Nur lesen)", "parsedContents": "Analysierte Inhalte (Nur lesen)",
"enableDockerSocket": "Docker Blaupause aktivieren", "enableDockerSocket": "Docker Blueprint aktivieren",
"enableDockerSocketDescription": "Aktiviere Docker-Socket-Label-Scraping für Blaupausenbeschriftungen. Der Socket-Pfad muss neu angegeben werden.", "enableDockerSocketDescription": "Aktiviere Docker-Socket-Label-Scraping für Blueprintbeschriftungen. Der Socket-Pfad muss neu angegeben werden.",
"enableDockerSocketLink": "Mehr erfahren", "enableDockerSocketLink": "Mehr erfahren",
"viewDockerContainers": "Docker Container anzeigen", "viewDockerContainers": "Docker Container anzeigen",
"containersIn": "Container in {siteName}", "containersIn": "Container in {siteName}",
@@ -1543,7 +1543,7 @@
"healthCheckPathRequired": "Gesundheits-Check-Pfad ist erforderlich", "healthCheckPathRequired": "Gesundheits-Check-Pfad ist erforderlich",
"healthCheckMethodRequired": "HTTP-Methode ist erforderlich", "healthCheckMethodRequired": "HTTP-Methode ist erforderlich",
"healthCheckIntervalMin": "Prüfintervall muss mindestens 5 Sekunden betragen", "healthCheckIntervalMin": "Prüfintervall muss mindestens 5 Sekunden betragen",
"healthCheckTimeoutMin": "Timeout muss mindestens 1 Sekunde betragen", "healthCheckTimeoutMin": "Zeitüberschreitung muss mindestens 1 Sekunde betragen",
"healthCheckRetryMin": "Wiederholungsversuche müssen mindestens 1 betragen", "healthCheckRetryMin": "Wiederholungsversuche müssen mindestens 1 betragen",
"httpMethod": "HTTP-Methode", "httpMethod": "HTTP-Methode",
"selectHttpMethod": "HTTP-Methode auswählen", "selectHttpMethod": "HTTP-Methode auswählen",

View File

@@ -33,7 +33,7 @@
"password": "Password", "password": "Password",
"confirmPassword": "Confirm Password", "confirmPassword": "Confirm Password",
"createAccount": "Create Account", "createAccount": "Create Account",
"viewSettings": "View settings", "viewSettings": "View Settings",
"delete": "Delete", "delete": "Delete",
"name": "Name", "name": "Name",
"online": "Online", "online": "Online",
@@ -51,6 +51,9 @@
"siteQuestionRemove": "Are you sure you want to remove the site from the organization?", "siteQuestionRemove": "Are you sure you want to remove the site from the organization?",
"siteManageSites": "Manage Sites", "siteManageSites": "Manage Sites",
"siteDescription": "Create and manage sites to enable connectivity to private networks", "siteDescription": "Create and manage sites to enable connectivity to private networks",
"sitesBannerTitle": "Connect Any Network",
"sitesBannerDescription": "A site is a connection to a remote network that allows Pangolin to provide access to resources, whether public or private, to users anywhere. Install the site network connector (Newt) anywhere you can run a binary or container to establish the connection.",
"sitesBannerButtonText": "Install Site",
"siteCreate": "Create Site", "siteCreate": "Create Site",
"siteCreateDescription2": "Follow the steps below to create and connect a new site", "siteCreateDescription2": "Follow the steps below to create and connect a new site",
"siteCreateDescription": "Create a new site to start connecting resources", "siteCreateDescription": "Create a new site to start connecting resources",
@@ -100,6 +103,7 @@
"siteTunnelDescription": "Determine how you want to connect to the site", "siteTunnelDescription": "Determine how you want to connect to the site",
"siteNewtCredentials": "Credentials", "siteNewtCredentials": "Credentials",
"siteNewtCredentialsDescription": "This is how the site will authenticate with the server", "siteNewtCredentialsDescription": "This is how the site will authenticate with the server",
"remoteNodeCredentialsDescription": "This is how the remote node will authenticate with the server",
"siteCredentialsSave": "Save the Credentials", "siteCredentialsSave": "Save the Credentials",
"siteCredentialsSaveDescription": "You will only be able to see this once. Make sure to copy it to a secure place.", "siteCredentialsSaveDescription": "You will only be able to see this once. Make sure to copy it to a secure place.",
"siteInfo": "Site Information", "siteInfo": "Site Information",
@@ -146,8 +150,12 @@
"shareErrorSelectResource": "Please select a resource", "shareErrorSelectResource": "Please select a resource",
"proxyResourceTitle": "Manage Public Resources", "proxyResourceTitle": "Manage Public Resources",
"proxyResourceDescription": "Create and manage resources that are publicly accessible through a web browser", "proxyResourceDescription": "Create and manage resources that are publicly accessible through a web browser",
"proxyResourcesBannerTitle": "Web-based Public Access",
"proxyResourcesBannerDescription": "Public resources are HTTPS or TCP/UDP proxies accessible to anyone on the internet through a web browser. Unlike private resources, they do not require client-side software and can include identity and context-aware access policies.",
"clientResourceTitle": "Manage Private Resources", "clientResourceTitle": "Manage Private Resources",
"clientResourceDescription": "Create and manage resources that are only accessible through a connected client", "clientResourceDescription": "Create and manage resources that are only accessible through a connected client",
"privateResourcesBannerTitle": "Zero-Trust Private Access",
"privateResourcesBannerDescription": "Private resources use zero-trust security, ensuring users and machines can only access resources you explicitly grant. Connect user devices or machine clients to access these resources over a secure virtual private network.",
"resourcesSearch": "Search resources...", "resourcesSearch": "Search resources...",
"resourceAdd": "Add Resource", "resourceAdd": "Add Resource",
"resourceErrorDelte": "Error deleting resource", "resourceErrorDelte": "Error deleting resource",
@@ -157,9 +165,9 @@
"resourceMessageRemove": "Once removed, the resource will no longer be accessible. All targets associated with the resource will also be removed.", "resourceMessageRemove": "Once removed, the resource will no longer be accessible. All targets associated with the resource will also be removed.",
"resourceQuestionRemove": "Are you sure you want to remove the resource from the organization?", "resourceQuestionRemove": "Are you sure you want to remove the resource from the organization?",
"resourceHTTP": "HTTPS Resource", "resourceHTTP": "HTTPS Resource",
"resourceHTTPDescription": "Proxy requests to the app over HTTPS using a subdomain or base domain.", "resourceHTTPDescription": "Proxy requests over HTTPS using a fully qualified domain name.",
"resourceRaw": "Raw TCP/UDP Resource", "resourceRaw": "Raw TCP/UDP Resource",
"resourceRawDescription": "Proxy requests to the app over TCP/UDP using a port number. This only works when sites are connected to nodes.", "resourceRawDescription": "Proxy requests over raw TCP/UDP using a port number.",
"resourceCreate": "Create Resource", "resourceCreate": "Create Resource",
"resourceCreateDescription": "Follow the steps below to create a new resource", "resourceCreateDescription": "Follow the steps below to create a new resource",
"resourceSeeAll": "See All Resources", "resourceSeeAll": "See All Resources",
@@ -419,7 +427,7 @@
"userErrorExistsDescription": "This user is already a member of the organization.", "userErrorExistsDescription": "This user is already a member of the organization.",
"inviteError": "Failed to invite user", "inviteError": "Failed to invite user",
"inviteErrorDescription": "An error occurred while inviting the user", "inviteErrorDescription": "An error occurred while inviting the user",
"userInvited": "User invited", "userInvited": "User Invited",
"userInvitedDescription": "The user has been successfully invited.", "userInvitedDescription": "The user has been successfully invited.",
"userErrorCreate": "Failed to create user", "userErrorCreate": "Failed to create user",
"userErrorCreateDescription": "An error occurred while creating the user", "userErrorCreateDescription": "An error occurred while creating the user",
@@ -687,7 +695,7 @@
"resourceRoleDescription": "Admins can always access this resource.", "resourceRoleDescription": "Admins can always access this resource.",
"resourceUsersRoles": "Access Controls", "resourceUsersRoles": "Access Controls",
"resourceUsersRolesDescription": "Configure which users and roles can visit this resource", "resourceUsersRolesDescription": "Configure which users and roles can visit this resource",
"resourceUsersRolesSubmit": "Save Users & Roles", "resourceUsersRolesSubmit": "Save Access Controls",
"resourceWhitelistSave": "Saved successfully", "resourceWhitelistSave": "Saved successfully",
"resourceWhitelistSaveDescription": "Whitelist settings have been saved", "resourceWhitelistSaveDescription": "Whitelist settings have been saved",
"ssoUse": "Use Platform SSO", "ssoUse": "Use Platform SSO",
@@ -945,7 +953,7 @@
"pincodeAuth": "Authenticator Code", "pincodeAuth": "Authenticator Code",
"pincodeSubmit2": "Submit Code", "pincodeSubmit2": "Submit Code",
"passwordResetSubmit": "Request Reset", "passwordResetSubmit": "Request Reset",
"passwordResetAlreadyHaveCode": "Enter Password Reset Code", "passwordResetAlreadyHaveCode": "Enter Code",
"passwordResetSmtpRequired": "Please contact your administrator", "passwordResetSmtpRequired": "Please contact your administrator",
"passwordResetSmtpRequiredDescription": "A password reset code is required to reset your password. Please contact your administrator for assistance.", "passwordResetSmtpRequiredDescription": "A password reset code is required to reset your password. Please contact your administrator for assistance.",
"passwordBack": "Back to Password", "passwordBack": "Back to Password",
@@ -1035,6 +1043,7 @@
"updateOrgUser": "Update Org User", "updateOrgUser": "Update Org User",
"createOrgUser": "Create Org User", "createOrgUser": "Create Org User",
"actionUpdateOrg": "Update Organization", "actionUpdateOrg": "Update Organization",
"actionRemoveInvitation": "Remove Invitation",
"actionUpdateUser": "Update User", "actionUpdateUser": "Update User",
"actionGetUser": "Get User", "actionGetUser": "Get User",
"actionGetOrgUser": "Get Organization User", "actionGetOrgUser": "Get Organization User",
@@ -1044,6 +1053,8 @@
"actionGetSite": "Get Site", "actionGetSite": "Get Site",
"actionListSites": "List Sites", "actionListSites": "List Sites",
"actionApplyBlueprint": "Apply Blueprint", "actionApplyBlueprint": "Apply Blueprint",
"actionListBlueprints": "List Blueprints",
"actionGetBlueprint": "Get Blueprint",
"setupToken": "Setup Token", "setupToken": "Setup Token",
"setupTokenDescription": "Enter the setup token from the server console.", "setupTokenDescription": "Enter the setup token from the server console.",
"setupTokenRequired": "Setup token is required", "setupTokenRequired": "Setup token is required",
@@ -1194,7 +1205,7 @@
"sidebarUserDevices": "Users", "sidebarUserDevices": "Users",
"sidebarMachineClients": "Machines", "sidebarMachineClients": "Machines",
"sidebarDomains": "Domains", "sidebarDomains": "Domains",
"sidebarGeneral": "General", "sidebarGeneral": "Manage",
"sidebarLogAndAnalytics": "Log & Analytics", "sidebarLogAndAnalytics": "Log & Analytics",
"sidebarBluePrints": "Blueprints", "sidebarBluePrints": "Blueprints",
"sidebarOrganization": "Organization", "sidebarOrganization": "Organization",
@@ -1308,8 +1319,11 @@
"accountSetupSuccess": "Account setup completed! Welcome to Pangolin!", "accountSetupSuccess": "Account setup completed! Welcome to Pangolin!",
"documentation": "Documentation", "documentation": "Documentation",
"saveAllSettings": "Save All Settings", "saveAllSettings": "Save All Settings",
"saveResourceTargets": "Save Targets",
"saveResourceHttp": "Save Proxy Settings",
"saveProxyProtocol": "Save Proxy protocol settings",
"settingsUpdated": "Settings updated", "settingsUpdated": "Settings updated",
"settingsUpdatedDescription": "All settings have been updated successfully", "settingsUpdatedDescription": "Settings updated successfully",
"settingsErrorUpdate": "Failed to update settings", "settingsErrorUpdate": "Failed to update settings",
"settingsErrorUpdateDescription": "An error occurred while updating settings", "settingsErrorUpdateDescription": "An error occurred while updating settings",
"sidebarCollapse": "Collapse", "sidebarCollapse": "Collapse",
@@ -1616,9 +1630,8 @@
"createInternalResourceDialogResourceProperties": "Resource Properties", "createInternalResourceDialogResourceProperties": "Resource Properties",
"createInternalResourceDialogName": "Name", "createInternalResourceDialogName": "Name",
"createInternalResourceDialogSite": "Site", "createInternalResourceDialogSite": "Site",
"createInternalResourceDialogSelectSite": "Select site...", "selectSite": "Select site...",
"createInternalResourceDialogSearchSites": "Search sites...", "noSitesFound": "No sites found.",
"createInternalResourceDialogNoSitesFound": "No sites found.",
"createInternalResourceDialogProtocol": "Protocol", "createInternalResourceDialogProtocol": "Protocol",
"createInternalResourceDialogTcp": "TCP", "createInternalResourceDialogTcp": "TCP",
"createInternalResourceDialogUdp": "UDP", "createInternalResourceDialogUdp": "UDP",
@@ -1658,7 +1671,7 @@
"siteAddressDescription": "The internal address of the site. Must fall within the organization's subnet.", "siteAddressDescription": "The internal address of the site. Must fall within the organization's subnet.",
"siteNameDescription": "The display name of the site that can be changed later.", "siteNameDescription": "The display name of the site that can be changed later.",
"autoLoginExternalIdp": "Auto Login with External IDP", "autoLoginExternalIdp": "Auto Login with External IDP",
"autoLoginExternalIdpDescription": "Immediately redirect the user to the external IDP for authentication.", "autoLoginExternalIdpDescription": "Immediately redirect the user to the external identity provider for authentication.",
"selectIdp": "Select IDP", "selectIdp": "Select IDP",
"selectIdpPlaceholder": "Choose an IDP...", "selectIdpPlaceholder": "Choose an IDP...",
"selectIdpRequired": "Please select an IDP when auto login is enabled.", "selectIdpRequired": "Please select an IDP when auto login is enabled.",
@@ -1670,7 +1683,7 @@
"autoLoginErrorNoRedirectUrl": "No redirect URL received from the identity provider.", "autoLoginErrorNoRedirectUrl": "No redirect URL received from the identity provider.",
"autoLoginErrorGeneratingUrl": "Failed to generate authentication URL.", "autoLoginErrorGeneratingUrl": "Failed to generate authentication URL.",
"remoteExitNodeManageRemoteExitNodes": "Remote Nodes", "remoteExitNodeManageRemoteExitNodes": "Remote Nodes",
"remoteExitNodeDescription": "Self-host one or more remote nodes to extend network connectivity and reduce reliance on the cloud", "remoteExitNodeDescription": "Self-host your own remote relay and proxy server nodes",
"remoteExitNodes": "Nodes", "remoteExitNodes": "Nodes",
"searchRemoteExitNodes": "Search nodes...", "searchRemoteExitNodes": "Search nodes...",
"remoteExitNodeAdd": "Add Node", "remoteExitNodeAdd": "Add Node",
@@ -1680,20 +1693,22 @@
"remoteExitNodeConfirmDelete": "Confirm Delete Node", "remoteExitNodeConfirmDelete": "Confirm Delete Node",
"remoteExitNodeDelete": "Delete Node", "remoteExitNodeDelete": "Delete Node",
"sidebarRemoteExitNodes": "Remote Nodes", "sidebarRemoteExitNodes": "Remote Nodes",
"remoteExitNodeId": "ID",
"remoteExitNodeSecretKey": "Secret",
"remoteExitNodeCreate": { "remoteExitNodeCreate": {
"title": "Create Node", "title": "Create Remote Node",
"description": "Create a new node to extend network connectivity", "description": "Create a new self-hosted remote relay and proxy server node",
"viewAllButton": "View All Nodes", "viewAllButton": "View All Nodes",
"strategy": { "strategy": {
"title": "Creation Strategy", "title": "Creation Strategy",
"description": "Choose this to manually configure the node or generate new credentials.", "description": "Select how you want to create the remote node",
"adopt": { "adopt": {
"title": "Adopt Node", "title": "Adopt Node",
"description": "Choose this if you already have the credentials for the node." "description": "Choose this if you already have the credentials for the node."
}, },
"generate": { "generate": {
"title": "Generate Keys", "title": "Generate Keys",
"description": "Choose this if you want to generate new keys for the node" "description": "Choose this if you want to generate new keys for the node."
} }
}, },
"adopt": { "adopt": {
@@ -1806,9 +1821,30 @@
"idpAzureDescription": "Microsoft Azure OAuth2/OIDC provider", "idpAzureDescription": "Microsoft Azure OAuth2/OIDC provider",
"subnet": "Subnet", "subnet": "Subnet",
"subnetDescription": "The subnet for this organization's network configuration.", "subnetDescription": "The subnet for this organization's network configuration.",
"authPage": "Auth Page", "customDomain": "Custom Domain",
"authPageDescription": "Configure the auth page for the organization", "authPage": "Authentication Pages",
"authPageDescription": "Set a custom domain for the organization's authentication pages",
"authPageDomain": "Auth Page Domain", "authPageDomain": "Auth Page Domain",
"authPageBranding": "Custom Branding",
"authPageBrandingDescription": "Configure the branding that appears on authentication pages for this organization",
"authPageBrandingUpdated": "Auth page Branding updated successfully",
"authPageBrandingRemoved": "Auth page Branding removed successfully",
"authPageBrandingRemoveTitle": "Remove Auth Page Branding",
"authPageBrandingQuestionRemove": "Are you sure you want to remove the branding for Auth Pages ?",
"authPageBrandingDeleteConfirm": "Confirm Delete Branding",
"brandingLogoURL": "Logo URL",
"brandingPrimaryColor": "Primary Color",
"brandingLogoWidth": "Width (px)",
"brandingLogoHeight": "Height (px)",
"brandingOrgTitle": "Title for Organization Auth Page",
"brandingOrgDescription": "{orgName} will be replaced with the organization's name",
"brandingOrgSubtitle": "Subtitle for Organization Auth Page",
"brandingResourceTitle": "Title for Resource Auth Page",
"brandingResourceSubtitle": "Subtitle for Resource Auth Page",
"brandingResourceDescription": "{resourceName} will be replaced with the organization's name",
"saveAuthPageDomain": "Save Domain",
"saveAuthPageBranding": "Save Branding",
"removeAuthPageBranding": "Remove Branding",
"noDomainSet": "No domain set", "noDomainSet": "No domain set",
"changeDomain": "Change Domain", "changeDomain": "Change Domain",
"selectDomain": "Select Domain", "selectDomain": "Select Domain",
@@ -1817,7 +1853,7 @@
"setAuthPageDomain": "Set Auth Page Domain", "setAuthPageDomain": "Set Auth Page Domain",
"failedToFetchCertificate": "Failed to fetch certificate", "failedToFetchCertificate": "Failed to fetch certificate",
"failedToRestartCertificate": "Failed to restart certificate", "failedToRestartCertificate": "Failed to restart certificate",
"addDomainToEnableCustomAuthPages": "Add a domain to enable custom authentication pages for the organization", "addDomainToEnableCustomAuthPages": "Users will be able to access the organization's login page and complete resource authentication using this domain.",
"selectDomainForOrgAuthPage": "Select a domain for the organization's authentication page", "selectDomainForOrgAuthPage": "Select a domain for the organization's authentication page",
"domainPickerProvidedDomain": "Provided Domain", "domainPickerProvidedDomain": "Provided Domain",
"domainPickerFreeProvidedDomain": "Free Provided Domain", "domainPickerFreeProvidedDomain": "Free Provided Domain",
@@ -1832,10 +1868,19 @@
"domainPickerInvalidSubdomainCannotMakeValid": "\"{sub}\" could not be made valid for {domain}.", "domainPickerInvalidSubdomainCannotMakeValid": "\"{sub}\" could not be made valid for {domain}.",
"domainPickerSubdomainSanitized": "Subdomain sanitized", "domainPickerSubdomainSanitized": "Subdomain sanitized",
"domainPickerSubdomainCorrected": "\"{sub}\" was corrected to \"{sanitized}\"", "domainPickerSubdomainCorrected": "\"{sub}\" was corrected to \"{sanitized}\"",
"orgAuthSignInTitle": "Sign in to the organization", "orgAuthSignInTitle": "Organization Sign In",
"orgAuthChooseIdpDescription": "Choose your identity provider to continue", "orgAuthChooseIdpDescription": "Choose your identity provider to continue",
"orgAuthNoIdpConfigured": "This organization doesn't have any identity providers configured. You can log in with your Pangolin identity instead.", "orgAuthNoIdpConfigured": "This organization doesn't have any identity providers configured. You can log in with your Pangolin identity instead.",
"orgAuthSignInWithPangolin": "Sign in with Pangolin", "orgAuthSignInWithPangolin": "Sign in with Pangolin",
"orgAuthSignInToOrg": "Sign in to an organization",
"orgAuthSelectOrgTitle": "Organization Sign In",
"orgAuthSelectOrgDescription": "Enter your organization ID to continue",
"orgAuthOrgIdPlaceholder": "your-organization",
"orgAuthOrgIdHelp": "Enter your organization's unique identifier",
"orgAuthSelectOrgHelp": "After entering your organization ID, you'll be taken to your organization's sign-in page where you can use SSO or your organization credentials.",
"orgAuthRememberOrgId": "Remember this organization ID",
"orgAuthBackToSignIn": "Back to standard sign in",
"orgAuthNoAccount": "Don't have an account?",
"subscriptionRequiredToUse": "A subscription is required to use this feature.", "subscriptionRequiredToUse": "A subscription is required to use this feature.",
"idpDisabled": "Identity providers are disabled.", "idpDisabled": "Identity providers are disabled.",
"orgAuthPageDisabled": "Organization auth page is disabled.", "orgAuthPageDisabled": "Organization auth page is disabled.",
@@ -1850,6 +1895,8 @@
"enableTwoFactorAuthentication": "Enable two-factor authentication", "enableTwoFactorAuthentication": "Enable two-factor authentication",
"completeSecuritySteps": "Complete Security Steps", "completeSecuritySteps": "Complete Security Steps",
"securitySettings": "Security Settings", "securitySettings": "Security Settings",
"dangerSection": "Danger Zone",
"dangerSectionDescription": "Permanently delete all data associated with this organization",
"securitySettingsDescription": "Configure security policies for the organization", "securitySettingsDescription": "Configure security policies for the organization",
"requireTwoFactorForAllUsers": "Require Two-Factor Authentication for All Users", "requireTwoFactorForAllUsers": "Require Two-Factor Authentication for All Users",
"requireTwoFactorDescription": "When enabled, all internal users in this organization must have two-factor authentication enabled to access the organization.", "requireTwoFactorDescription": "When enabled, all internal users in this organization must have two-factor authentication enabled to access the organization.",
@@ -1887,7 +1934,7 @@
"securityPolicyChangeWarningText": "This will affect all users in the organization", "securityPolicyChangeWarningText": "This will affect all users in the organization",
"authPageErrorUpdateMessage": "An error occurred while updating the auth page settings", "authPageErrorUpdateMessage": "An error occurred while updating the auth page settings",
"authPageErrorUpdate": "Unable to update auth page", "authPageErrorUpdate": "Unable to update auth page",
"authPageUpdated": "Auth page updated successfully", "authPageDomainUpdated": "Auth page Domain updated successfully",
"healthCheckNotAvailable": "Local", "healthCheckNotAvailable": "Local",
"rewritePath": "Rewrite Path", "rewritePath": "Rewrite Path",
"rewritePathDescription": "Optionally rewrite the path before forwarding to the target.", "rewritePathDescription": "Optionally rewrite the path before forwarding to the target.",
@@ -1915,8 +1962,15 @@
"beta": "Beta", "beta": "Beta",
"manageUserDevices": "User Devices", "manageUserDevices": "User Devices",
"manageUserDevicesDescription": "View and manage devices that users use to privately connect to resources", "manageUserDevicesDescription": "View and manage devices that users use to privately connect to resources",
"downloadClientBannerTitle": "Download Pangolin Client",
"downloadClientBannerDescription": "Download the Pangolin client for your system to connect to the Pangolin network and access resources privately.",
"manageMachineClients": "Manage Machine Clients", "manageMachineClients": "Manage Machine Clients",
"manageMachineClientsDescription": "Create and manage clients that servers and systems use to privately connect to resources", "manageMachineClientsDescription": "Create and manage clients that servers and systems use to privately connect to resources",
"machineClientsBannerTitle": "Servers & Automated Systems",
"machineClientsBannerDescription": "Machine clients are for servers and automated systems that are not associated with a specific user. They authenticate with an ID and secret, and can run with Pangolin CLI, Olm CLI, or Olm as a container.",
"machineClientsBannerPangolinCLI": "Pangolin CLI",
"machineClientsBannerOlmCLI": "Olm CLI",
"machineClientsBannerOlmContainer": "Olm Container",
"clientsTableUserClients": "User", "clientsTableUserClients": "User",
"clientsTableMachineClients": "Machine", "clientsTableMachineClients": "Machine",
"licenseTableValidUntil": "Valid Until", "licenseTableValidUntil": "Valid Until",
@@ -2060,13 +2114,15 @@
"request": "Request", "request": "Request",
"requests": "Requests", "requests": "Requests",
"logs": "Logs", "logs": "Logs",
"logsSettingsDescription": "Monitor logs collected from this orginization", "logsSettingsDescription": "Monitor logs collected from this organization",
"searchLogs": "Search logs...", "searchLogs": "Search logs...",
"action": "Action", "action": "Action",
"actor": "Actor", "actor": "Actor",
"timestamp": "Timestamp", "timestamp": "Timestamp",
"accessLogs": "Access Logs", "accessLogs": "Access Logs",
"exportCsv": "Export CSV", "exportCsv": "Export CSV",
"exportError": "Unknown error when exporting CSV",
"exportCsvTooltip": "Within Time Range",
"actorId": "Actor ID", "actorId": "Actor ID",
"allowedByRule": "Allowed by Rule", "allowedByRule": "Allowed by Rule",
"allowedNoAuth": "Allowed No Auth", "allowedNoAuth": "Allowed No Auth",
@@ -2120,7 +2176,7 @@
"unverified": "Unverified", "unverified": "Unverified",
"domainSetting": "Domain Settings", "domainSetting": "Domain Settings",
"domainSettingDescription": "Configure settings for the domain", "domainSettingDescription": "Configure settings for the domain",
"preferWildcardCertDescription": "Attempt to generate a wildcard certificate (require a properly configured certificate resolver).", "preferWildcardCertDescription": "Attempt to generate a wildcard certificate (requires a properly configured certificate resolver).",
"recordName": "Record Name", "recordName": "Record Name",
"auto": "Auto", "auto": "Auto",
"TTL": "TTL", "TTL": "TTL",
@@ -2255,6 +2311,8 @@
"setupFailedToFetchSubnet": "Failed to fetch default subnet", "setupFailedToFetchSubnet": "Failed to fetch default subnet",
"setupSubnetAdvanced": "Subnet (Advanced)", "setupSubnetAdvanced": "Subnet (Advanced)",
"setupSubnetDescription": "The subnet for this organization's internal network.", "setupSubnetDescription": "The subnet for this organization's internal network.",
"setupUtilitySubnet": "Utility Subnet (Advanced)",
"setupUtilitySubnetDescription": "The subnet for this organization's alias addresses and DNS server.",
"siteRegenerateAndDisconnect": "Regenerate and Disconnect", "siteRegenerateAndDisconnect": "Regenerate and Disconnect",
"siteRegenerateAndDisconnectConfirmation": "Are you sure you want to regenerate the credentials and disconnect this site?", "siteRegenerateAndDisconnectConfirmation": "Are you sure you want to regenerate the credentials and disconnect this site?",
"siteRegenerateAndDisconnectWarning": "This will regenerate the credentials and immediately disconnect the site. The site will need to be restarted with the new credentials.", "siteRegenerateAndDisconnectWarning": "This will regenerate the credentials and immediately disconnect the site. The site will need to be restarted with the new credentials.",
@@ -2270,5 +2328,40 @@
"remoteExitNodeRegenerateAndDisconnectWarning": "This will regenerate the credentials and immediately disconnect the remote exit node. The remote exit node will need to be restarted with the new credentials.", "remoteExitNodeRegenerateAndDisconnectWarning": "This will regenerate the credentials and immediately disconnect the remote exit node. The remote exit node will need to be restarted with the new credentials.",
"remoteExitNodeRegenerateCredentialsConfirmation": "Are you sure you want to regenerate the credentials for this remote exit node?", "remoteExitNodeRegenerateCredentialsConfirmation": "Are you sure you want to regenerate the credentials for this remote exit node?",
"remoteExitNodeRegenerateCredentialsWarning": "This will regenerate the credentials. The remote exit node will stay connected until you manually restart it and use the new credentials.", "remoteExitNodeRegenerateCredentialsWarning": "This will regenerate the credentials. The remote exit node will stay connected until you manually restart it and use the new credentials.",
"agent": "Agent" "agent": "Agent",
"personalUseOnly": "Personal Use Only",
"loginPageLicenseWatermark": "This instance is licensed for personal use only.",
"instanceIsUnlicensed": "This instance is unlicensed.",
"portRestrictions": "Port Restrictions",
"allPorts": "All",
"custom": "Custom",
"allPortsAllowed": "All Ports Allowed",
"allPortsBlocked": "All Ports Blocked",
"tcpPortsDescription": "Specify which TCP ports are allowed for this resource. Use '*' for all ports, leave empty to block all, or enter a comma-separated list of ports and ranges (e.g., 80,443,8000-9000).",
"udpPortsDescription": "Specify which UDP ports are allowed for this resource. Use '*' for all ports, leave empty to block all, or enter a comma-separated list of ports and ranges (e.g., 53,123,500-600).",
"organizationLoginPageTitle": "Organization Login Page",
"organizationLoginPageDescription": "Customize the login page for this organization",
"resourceLoginPageTitle": "Resource Login Page",
"resourceLoginPageDescription": "Customize the login page for individual resources",
"enterConfirmation": "Enter confirmation",
"blueprintViewDetails": "Details",
"defaultIdentityProvider": "Default Identity Provider",
"editInternalResourceDialogNetworkSettings": "Network Settings",
"editInternalResourceDialogAccessPolicy": "Access Policy",
"editInternalResourceDialogAddRoles": "Add Roles",
"editInternalResourceDialogAddUsers": "Add Users",
"editInternalResourceDialogAddClients": "Add Clients",
"editInternalResourceDialogDestinationLabel": "Destination",
"editInternalResourceDialogDestinationDescription": "Specify the destination address for the internal resource. This can be a hostname, IP address, or CIDR range depending on the selected mode. Optionally set an internal DNS alias for easier identification.",
"editInternalResourceDialogPortRestrictionsDescription": "Restrict access to specific TCP/UDP ports or allow/block all ports.",
"editInternalResourceDialogTcp": "TCP",
"editInternalResourceDialogUdp": "UDP",
"editInternalResourceDialogIcmp": "ICMP",
"editInternalResourceDialogAccessControl": "Access Control",
"editInternalResourceDialogAccessControlDescription": "Control which roles, users, and machine clients have access to this resource when connected. Admins always have access.",
"editInternalResourceDialogPortRangeValidationError": "Port range must be \"*\" for all ports, or a comma-separated list of ports and ranges (e.g., \"80,443,8000-9000\"). Ports must be between 1 and 65535.",
"orgAuthWhatsThis": "Where can I find my organization ID?",
"learnMore": "Learn more",
"backToHome": "Go back to home",
"needToSignInToOrg": "Need to use your organization's identity provider?"
} }

View File

@@ -1022,6 +1022,8 @@
"actionGetSite": "獲取站點", "actionGetSite": "獲取站點",
"actionListSites": "站點列表", "actionListSites": "站點列表",
"actionApplyBlueprint": "應用藍圖", "actionApplyBlueprint": "應用藍圖",
"actionListBlueprints": "藍圖列表",
"actionGetBlueprint": "獲取藍圖",
"setupToken": "設置令牌", "setupToken": "設置令牌",
"setupTokenDescription": "從伺服器控制台輸入設定令牌。", "setupTokenDescription": "從伺服器控制台輸入設定令牌。",
"setupTokenRequired": "需要設置令牌", "setupTokenRequired": "需要設置令牌",

View File

@@ -4,6 +4,7 @@ import createNextIntlPlugin from "next-intl/plugin";
const withNextIntl = createNextIntlPlugin(); const withNextIntl = createNextIntlPlugin();
const nextConfig: NextConfig = { const nextConfig: NextConfig = {
reactStrictMode: false,
eslint: { eslint: {
ignoreDuringBuilds: true ignoreDuringBuilds: true
}, },

6097
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,9 +19,9 @@
"db:sqlite:studio": "drizzle-kit studio --config=./drizzle.sqlite.config.ts", "db:sqlite:studio": "drizzle-kit studio --config=./drizzle.sqlite.config.ts",
"db:pg:studio": "drizzle-kit studio --config=./drizzle.pg.config.ts", "db:pg:studio": "drizzle-kit studio --config=./drizzle.pg.config.ts",
"db:clear-migrations": "rm -rf server/migrations", "db:clear-migrations": "rm -rf server/migrations",
"set:oss": "echo 'export const build = \"oss\" as any;' > server/build.ts && cp tsconfig.oss.json tsconfig.json", "set:oss": "echo 'export const build = \"oss\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.oss.json tsconfig.json",
"set:saas": "echo 'export const build = \"saas\" as any;' > server/build.ts && cp tsconfig.saas.json tsconfig.json", "set:saas": "echo 'export const build = \"saas\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.saas.json tsconfig.json",
"set:enterprise": "echo 'export const build = \"enterprise\" as any;' > server/build.ts && cp tsconfig.enterprise.json tsconfig.json", "set:enterprise": "echo 'export const build = \"enterprise\" as \"saas\" | \"enterprise\" | \"oss\";' > server/build.ts && cp tsconfig.enterprise.json tsconfig.json",
"set:sqlite": "echo 'export * from \"./sqlite\";\nexport const driver: \"pg\" | \"sqlite\" = \"sqlite\";' > server/db/index.ts", "set:sqlite": "echo 'export * from \"./sqlite\";\nexport const driver: \"pg\" | \"sqlite\" = \"sqlite\";' > server/db/index.ts",
"set:pg": "echo 'export * from \"./pg\";\nexport const driver: \"pg\" | \"sqlite\" = \"pg\";' > server/db/index.ts", "set:pg": "echo 'export * from \"./pg\";\nexport const driver: \"pg\" | \"sqlite\" = \"pg\";' > server/db/index.ts",
"next:build": "next build", "next:build": "next build",
@@ -29,16 +29,17 @@
"build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs", "build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs",
"start": "ENVIRONMENT=prod node dist/migrations.mjs && ENVIRONMENT=prod NODE_ENV=development node --enable-source-maps dist/server.mjs", "start": "ENVIRONMENT=prod node dist/migrations.mjs && ENVIRONMENT=prod NODE_ENV=development node --enable-source-maps dist/server.mjs",
"email": "email dev --dir server/emails/templates --port 3005", "email": "email dev --dir server/emails/templates --port 3005",
"build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs" "build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs",
"format": "prettier --write ."
}, },
"dependencies": { "dependencies": {
"@asteasolutions/zod-to-openapi": "8.1.0", "@asteasolutions/zod-to-openapi": "8.2.0",
"@faker-js/faker": "^10.1.0", "@aws-sdk/client-s3": "3.955.0",
"@headlessui/react": "^2.2.9", "@faker-js/faker": "10.1.0",
"@aws-sdk/client-s3": "3.943.0", "@headlessui/react": "2.2.9",
"@hookform/resolvers": "5.2.2", "@hookform/resolvers": "5.2.2",
"@monaco-editor/react": "^4.7.0", "@monaco-editor/react": "4.7.0",
"@node-rs/argon2": "^2.0.2", "@node-rs/argon2": "2.0.2",
"@oslojs/crypto": "1.0.1", "@oslojs/crypto": "1.0.1",
"@oslojs/encoding": "1.1.0", "@oslojs/encoding": "1.1.0",
"@radix-ui/react-avatar": "1.1.11", "@radix-ui/react-avatar": "1.1.11",
@@ -49,138 +50,132 @@
"@radix-ui/react-icons": "1.3.2", "@radix-ui/react-icons": "1.3.2",
"@radix-ui/react-label": "2.1.8", "@radix-ui/react-label": "2.1.8",
"@radix-ui/react-popover": "1.1.15", "@radix-ui/react-popover": "1.1.15",
"@radix-ui/react-progress": "^1.1.8", "@radix-ui/react-progress": "1.1.8",
"@radix-ui/react-radio-group": "1.3.8", "@radix-ui/react-radio-group": "1.3.8",
"@radix-ui/react-scroll-area": "^1.2.10", "@radix-ui/react-scroll-area": "1.2.10",
"@radix-ui/react-select": "2.2.6", "@radix-ui/react-select": "2.2.6",
"@radix-ui/react-separator": "1.1.8", "@radix-ui/react-separator": "1.1.8",
"@radix-ui/react-slot": "1.2.4", "@radix-ui/react-slot": "1.2.4",
"@radix-ui/react-switch": "1.2.6", "@radix-ui/react-switch": "1.2.6",
"@radix-ui/react-tabs": "1.1.13", "@radix-ui/react-tabs": "1.1.13",
"@radix-ui/react-toast": "1.2.15", "@radix-ui/react-toast": "1.2.15",
"@radix-ui/react-tooltip": "^1.2.8", "@radix-ui/react-tooltip": "1.2.8",
"@react-email/components": "0.5.7", "@react-email/components": "1.0.2",
"@react-email/render": "^1.3.2", "@react-email/render": "2.0.0",
"@react-email/tailwind": "1.2.2", "@react-email/tailwind": "2.0.2",
"@simplewebauthn/browser": "^13.2.2", "@simplewebauthn/browser": "13.2.2",
"@simplewebauthn/server": "^13.2.2", "@simplewebauthn/server": "13.2.2",
"@tailwindcss/forms": "^0.5.10", "@tailwindcss/forms": "0.5.11",
"@tanstack/react-query": "^5.90.6", "@tanstack/react-query": "5.90.12",
"@tanstack/react-table": "8.21.3", "@tanstack/react-table": "8.21.3",
"arctic": "^3.7.0", "arctic": "3.7.0",
"axios": "^1.13.2", "axios": "1.13.2",
"better-sqlite3": "11.7.0", "better-sqlite3": "11.9.1",
"canvas-confetti": "1.9.4", "canvas-confetti": "1.9.4",
"class-variance-authority": "^0.7.1", "class-variance-authority": "0.7.1",
"clsx": "2.1.1", "clsx": "2.1.1",
"cmdk": "1.1.1", "cmdk": "1.1.1",
"cookie": "^1.0.2", "cookie": "1.1.1",
"cookie-parser": "1.4.7", "cookie-parser": "1.4.7",
"cookies": "^0.9.1", "cookies": "0.9.1",
"cors": "2.8.5", "cors": "2.8.5",
"crypto-js": "^4.2.0", "crypto-js": "4.2.0",
"d3": "^7.9.0", "d3": "7.9.0",
"date-fns": "4.1.0", "date-fns": "4.1.0",
"drizzle-orm": "0.45.0", "drizzle-orm": "0.45.1",
"eslint": "9.39.1", "eslint": "9.39.2",
"eslint-config-next": "16.0.7", "eslint-config-next": "16.1.0",
"express": "5.2.1", "express": "5.2.1",
"express-rate-limit": "8.2.1", "express-rate-limit": "8.2.1",
"glob": "11.1.0", "glob": "13.0.0",
"helmet": "8.1.0", "helmet": "8.1.0",
"http-errors": "2.0.1", "http-errors": "2.0.1",
"i": "^0.3.7", "i": "0.3.7",
"input-otp": "1.4.2", "input-otp": "1.4.2",
"ioredis": "5.8.2", "ioredis": "5.8.2",
"jmespath": "^0.16.0", "jmespath": "0.16.0",
"js-yaml": "4.1.1", "js-yaml": "4.1.1",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "9.0.3",
"lucide-react": "^0.556.0", "lucide-react": "0.562.0",
"maxmind": "5.0.1", "maxmind": "5.0.1",
"moment": "2.30.1", "moment": "2.30.1",
"next": "15.5.7", "next": "15.5.9",
"next-intl": "^4.4.0", "next-intl": "4.6.1",
"next-themes": "0.4.6", "next-themes": "0.4.6",
"nextjs-toploader": "^3.9.17", "nextjs-toploader": "3.9.17",
"node-cache": "5.1.2", "node-cache": "5.1.2",
"node-fetch": "3.3.2", "node-fetch": "3.3.2",
"nodemailer": "7.0.11", "nodemailer": "7.0.11",
"npm": "^11.6.4", "npm": "11.7.0",
"nprogress": "^0.2.0", "nprogress": "0.2.0",
"oslo": "1.2.1", "oslo": "1.2.1",
"pg": "^8.16.2", "pg": "8.16.3",
"posthog-node": "^5.11.2", "posthog-node": "5.17.4",
"qrcode.react": "4.2.0", "qrcode.react": "4.2.0",
"react": "19.2.1", "react": "19.2.3",
"react-day-picker": "9.11.3", "react-day-picker": "9.13.0",
"react-dom": "19.2.1", "react-dom": "19.2.3",
"react-easy-sort": "^1.8.0", "react-easy-sort": "1.8.0",
"react-hook-form": "7.68.0", "react-hook-form": "7.68.0",
"react-icons": "^5.5.0", "react-icons": "5.5.0",
"rebuild": "0.1.2", "rebuild": "0.1.2",
"recharts": "^2.15.4", "recharts": "2.15.4",
"reodotdev": "^1.0.0", "reodotdev": "1.0.0",
"resend": "^6.4.2", "resend": "6.6.0",
"semver": "^7.7.3", "semver": "7.7.3",
"stripe": "18.2.1", "stripe": "20.1.0",
"swagger-ui-express": "^5.0.1", "swagger-ui-express": "5.0.1",
"topojson-client": "^3.1.0",
"tailwind-merge": "3.4.0", "tailwind-merge": "3.4.0",
"tw-animate-css": "^1.3.8", "topojson-client": "3.1.0",
"uuid": "^13.0.0", "tw-animate-css": "1.4.0",
"uuid": "13.0.0",
"vaul": "1.1.2", "vaul": "1.1.2",
"visionscarto-world-atlas": "^1.0.0", "visionscarto-world-atlas": "1.0.0",
"winston": "3.18.3", "winston": "3.19.0",
"winston-daily-rotate-file": "5.0.0", "winston-daily-rotate-file": "5.0.0",
"ws": "8.18.3", "ws": "8.18.3",
"yaml": "^2.8.1", "yaml": "2.8.2",
"yargs": "18.0.0", "yargs": "18.0.0",
"zod": "4.1.12", "zod": "4.2.1",
"zod-validation-error": "5.0.0" "zod-validation-error": "5.0.0"
}, },
"devDependencies": { "devDependencies": {
"@dotenvx/dotenvx": "1.51.1", "@dotenvx/dotenvx": "1.51.2",
"@esbuild-plugins/tsconfig-paths": "0.1.2", "@esbuild-plugins/tsconfig-paths": "0.1.2",
"@react-email/preview-server": "4.3.2", "@tailwindcss/postcss": "4.1.18",
"@tailwindcss/postcss": "^4.1.17", "@tanstack/react-query-devtools": "5.91.1",
"@tanstack/react-query-devtools": "^5.90.2", "@types/better-sqlite3": "7.6.13",
"@types/better-sqlite3": "7.6.12",
"@types/cookie-parser": "1.4.10", "@types/cookie-parser": "1.4.10",
"@types/cors": "2.8.19", "@types/cors": "2.8.19",
"@types/crypto-js": "^4.2.2", "@types/crypto-js": "4.2.2",
"@types/d3": "^7.4.3", "@types/d3": "7.4.3",
"@types/express": "5.0.6", "@types/express": "5.0.6",
"@types/express-session": "^1.18.2", "@types/express-session": "1.18.2",
"@types/jmespath": "^0.15.2", "@types/jmespath": "0.15.2",
"@types/js-yaml": "4.0.9", "@types/jsonwebtoken": "9.0.10",
"@types/jsonwebtoken": "^9.0.10", "@types/node": "24.10.2",
"@types/node": "24.10.1",
"@types/nprogress": "^0.2.3",
"@types/nodemailer": "7.0.4", "@types/nodemailer": "7.0.4",
"@types/pg": "8.15.6", "@types/nprogress": "0.2.3",
"@types/pg": "8.16.0",
"@types/react": "19.2.7", "@types/react": "19.2.7",
"@types/react-dom": "19.2.3", "@types/react-dom": "19.2.3",
"@types/semver": "^7.7.1", "@types/semver": "7.7.1",
"@types/swagger-ui-express": "^4.1.8", "@types/swagger-ui-express": "4.1.8",
"@types/topojson-client": "^3.1.5", "@types/topojson-client": "3.1.5",
"@types/ws": "8.18.1", "@types/ws": "8.18.1",
"babel-plugin-react-compiler": "^1.0.0",
"@types/yargs": "17.0.35", "@types/yargs": "17.0.35",
"@types/js-yaml": "4.0.9",
"babel-plugin-react-compiler": "1.0.0",
"drizzle-kit": "0.31.8", "drizzle-kit": "0.31.8",
"esbuild": "0.27.1", "esbuild": "0.27.2",
"esbuild-node-externals": "1.20.1", "esbuild-node-externals": "1.20.1",
"postcss": "^8", "postcss": "8.5.6",
"react-email": "4.3.2", "prettier": "3.7.4",
"tailwindcss": "^4.1.4", "react-email": "5.0.7",
"tailwindcss": "4.1.18",
"tsc-alias": "1.8.16", "tsc-alias": "1.8.16",
"tsx": "4.21.0", "tsx": "4.21.0",
"typescript": "^5", "typescript": "5.9.3",
"typescript-eslint": "^8.46.3" "typescript-eslint": "8.49.0"
},
"overrides": {
"emblor": {
"react": "19.0.0",
"react-dom": "19.0.0"
}
} }
} }

View File

@@ -1,8 +1,8 @@
/** @type {import('postcss-load-config').Config} */ /** @type {import('postcss-load-config').Config} */
const config = { const config = {
plugins: { plugins: {
"@tailwindcss/postcss": {}, "@tailwindcss/postcss": {}
}, }
}; };
export default config; export default config;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 687 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 713 KiB

After

Width:  |  Height:  |  Size: 493 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 636 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 713 KiB

After

Width:  |  Height:  |  Size: 484 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 421 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 484 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 713 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 456 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 674 KiB

After

Width:  |  Height:  |  Size: 396 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 434 KiB

View File

@@ -2,13 +2,13 @@ import { hash, verify } from "@node-rs/argon2";
export async function verifyPassword( export async function verifyPassword(
password: string, password: string,
hash: string, hash: string
): Promise<boolean> { ): Promise<boolean> {
const validPassword = await verify(hash, password, { const validPassword = await verify(hash, password, {
memoryCost: 19456, memoryCost: 19456,
timeCost: 2, timeCost: 2,
outputLen: 32, outputLen: 32,
parallelism: 1, parallelism: 1
}); });
return validPassword; return validPassword;
} }
@@ -18,7 +18,7 @@ export async function hashPassword(password: string): Promise<string> {
memoryCost: 19456, memoryCost: 19456,
timeCost: 2, timeCost: 2,
outputLen: 32, outputLen: 32,
parallelism: 1, parallelism: 1
}); });
return passwordHash; return passwordHash;

View File

@@ -4,10 +4,13 @@ export const passwordSchema = z
.string() .string()
.min(8, { message: "Password must be at least 8 characters long" }) .min(8, { message: "Password must be at least 8 characters long" })
.max(128, { message: "Password must be at most 128 characters long" }) .max(128, { message: "Password must be at most 128 characters long" })
.regex(/^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[~!`@#$%^&*()_\-+={}[\]|\\:;"'<>,.\/?]).*$/, { .regex(
message: `Your password must meet the following conditions: /^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[~!`@#$%^&*()_\-+={}[\]|\\:;"'<>,.\/?]).*$/,
{
message: `Your password must meet the following conditions:
at least one uppercase English letter, at least one uppercase English letter,
at least one lowercase English letter, at least one lowercase English letter,
at least one digit, at least one digit,
at least one special character.` at least one special character.`
}); }
);

View File

@@ -1,6 +1,4 @@
import { import { encodeHexLowerCase } from "@oslojs/encoding";
encodeHexLowerCase,
} from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2"; import { sha256 } from "@oslojs/crypto/sha2";
import { Newt, newts, newtSessions, NewtSession } from "@server/db"; import { Newt, newts, newtSessions, NewtSession } from "@server/db";
import { db } from "@server/db"; import { db } from "@server/db";
@@ -10,25 +8,25 @@ export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
export async function createNewtSession( export async function createNewtSession(
token: string, token: string,
newtId: string, newtId: string
): Promise<NewtSession> { ): Promise<NewtSession> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const session: NewtSession = { const session: NewtSession = {
sessionId: sessionId, sessionId: sessionId,
newtId, newtId,
expiresAt: new Date(Date.now() + EXPIRES).getTime(), expiresAt: new Date(Date.now() + EXPIRES).getTime()
}; };
await db.insert(newtSessions).values(session); await db.insert(newtSessions).values(session);
return session; return session;
} }
export async function validateNewtSessionToken( export async function validateNewtSessionToken(
token: string, token: string
): Promise<SessionValidationResult> { ): Promise<SessionValidationResult> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const result = await db const result = await db
.select({ newt: newts, session: newtSessions }) .select({ newt: newts, session: newtSessions })
@@ -45,14 +43,12 @@ export async function validateNewtSessionToken(
.where(eq(newtSessions.sessionId, session.sessionId)); .where(eq(newtSessions.sessionId, session.sessionId));
return { session: null, newt: null }; return { session: null, newt: null };
} }
if (Date.now() >= session.expiresAt - (EXPIRES / 2)) { if (Date.now() >= session.expiresAt - EXPIRES / 2) {
session.expiresAt = new Date( session.expiresAt = new Date(Date.now() + EXPIRES).getTime();
Date.now() + EXPIRES,
).getTime();
await db await db
.update(newtSessions) .update(newtSessions)
.set({ .set({
expiresAt: session.expiresAt, expiresAt: session.expiresAt
}) })
.where(eq(newtSessions.sessionId, session.sessionId)); .where(eq(newtSessions.sessionId, session.sessionId));
} }

View File

@@ -1,6 +1,4 @@
import { import { encodeHexLowerCase } from "@oslojs/encoding";
encodeHexLowerCase,
} from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2"; import { sha256 } from "@oslojs/crypto/sha2";
import { Olm, olms, olmSessions, OlmSession } from "@server/db"; import { Olm, olms, olmSessions, OlmSession } from "@server/db";
import { db } from "@server/db"; import { db } from "@server/db";
@@ -10,25 +8,25 @@ export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
export async function createOlmSession( export async function createOlmSession(
token: string, token: string,
olmId: string, olmId: string
): Promise<OlmSession> { ): Promise<OlmSession> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const session: OlmSession = { const session: OlmSession = {
sessionId: sessionId, sessionId: sessionId,
olmId, olmId,
expiresAt: new Date(Date.now() + EXPIRES).getTime(), expiresAt: new Date(Date.now() + EXPIRES).getTime()
}; };
await db.insert(olmSessions).values(session); await db.insert(olmSessions).values(session);
return session; return session;
} }
export async function validateOlmSessionToken( export async function validateOlmSessionToken(
token: string, token: string
): Promise<SessionValidationResult> { ): Promise<SessionValidationResult> {
const sessionId = encodeHexLowerCase( const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(token)), sha256(new TextEncoder().encode(token))
); );
const result = await db const result = await db
.select({ olm: olms, session: olmSessions }) .select({ olm: olms, session: olmSessions })
@@ -45,14 +43,12 @@ export async function validateOlmSessionToken(
.where(eq(olmSessions.sessionId, session.sessionId)); .where(eq(olmSessions.sessionId, session.sessionId));
return { session: null, olm: null }; return { session: null, olm: null };
} }
if (Date.now() >= session.expiresAt - (EXPIRES / 2)) { if (Date.now() >= session.expiresAt - EXPIRES / 2) {
session.expiresAt = new Date( session.expiresAt = new Date(Date.now() + EXPIRES).getTime();
Date.now() + EXPIRES,
).getTime();
await db await db
.update(olmSessions) .update(olmSessions)
.set({ .set({
expiresAt: session.expiresAt, expiresAt: session.expiresAt
}) })
.where(eq(olmSessions.sessionId, session.sessionId)); .where(eq(olmSessions.sessionId, session.sessionId));
} }

321
server/db/asns.ts Normal file
View File

@@ -0,0 +1,321 @@
// Curated list of major ASNs (Cloud Providers, CDNs, ISPs, etc.)
// This is not exhaustive - there are 100,000+ ASNs globally
// Users can still enter any ASN manually in the input field
export const MAJOR_ASNS = [
{
name: "ALL ASNs",
code: "ALL",
asn: 0 // Special value that will match all
},
// Major Cloud Providers
{
name: "Google LLC",
code: "AS15169",
asn: 15169
},
{
name: "Amazon AWS",
code: "AS16509",
asn: 16509
},
{
name: "Amazon AWS (EC2)",
code: "AS14618",
asn: 14618
},
{
name: "Microsoft Azure",
code: "AS8075",
asn: 8075
},
{
name: "Microsoft Corporation",
code: "AS8068",
asn: 8068
},
{
name: "DigitalOcean",
code: "AS14061",
asn: 14061
},
{
name: "Linode",
code: "AS63949",
asn: 63949
},
{
name: "Hetzner Online",
code: "AS24940",
asn: 24940
},
{
name: "OVH SAS",
code: "AS16276",
asn: 16276
},
{
name: "Oracle Cloud",
code: "AS31898",
asn: 31898
},
{
name: "Alibaba Cloud",
code: "AS45102",
asn: 45102
},
{
name: "IBM Cloud",
code: "AS36351",
asn: 36351
},
// CDNs
{
name: "Cloudflare",
code: "AS13335",
asn: 13335
},
{
name: "Fastly",
code: "AS54113",
asn: 54113
},
{
name: "Akamai Technologies",
code: "AS20940",
asn: 20940
},
{
name: "Akamai (Primary)",
code: "AS16625",
asn: 16625
},
// Mobile Carriers - US
{
name: "T-Mobile USA",
code: "AS21928",
asn: 21928
},
{
name: "Verizon Wireless",
code: "AS6167",
asn: 6167
},
{
name: "AT&T Mobility",
code: "AS20057",
asn: 20057
},
{
name: "Sprint (T-Mobile)",
code: "AS1239",
asn: 1239
},
{
name: "US Cellular",
code: "AS6430",
asn: 6430
},
// Mobile Carriers - Europe
{
name: "Vodafone UK",
code: "AS25135",
asn: 25135
},
{
name: "EE (UK)",
code: "AS12576",
asn: 12576
},
{
name: "Three UK",
code: "AS29194",
asn: 29194
},
{
name: "O2 UK",
code: "AS13285",
asn: 13285
},
{
name: "Telefonica Spain Mobile",
code: "AS12430",
asn: 12430
},
// Mobile Carriers - Asia
{
name: "NTT DoCoMo (Japan)",
code: "AS9605",
asn: 9605
},
{
name: "SoftBank Mobile (Japan)",
code: "AS17676",
asn: 17676
},
{
name: "SK Telecom (Korea)",
code: "AS9318",
asn: 9318
},
{
name: "KT Corporation Mobile (Korea)",
code: "AS4766",
asn: 4766
},
{
name: "Airtel India",
code: "AS24560",
asn: 24560
},
{
name: "China Mobile",
code: "AS9808",
asn: 9808
},
// Major US ISPs
{
name: "AT&T Services",
code: "AS7018",
asn: 7018
},
{
name: "Comcast Cable",
code: "AS7922",
asn: 7922
},
{
name: "Verizon",
code: "AS701",
asn: 701
},
{
name: "Cox Communications",
code: "AS22773",
asn: 22773
},
{
name: "Charter Communications",
code: "AS20115",
asn: 20115
},
{
name: "CenturyLink",
code: "AS209",
asn: 209
},
// Major European ISPs
{
name: "Deutsche Telekom",
code: "AS3320",
asn: 3320
},
{
name: "Vodafone",
code: "AS1273",
asn: 1273
},
{
name: "British Telecom",
code: "AS2856",
asn: 2856
},
{
name: "Orange",
code: "AS3215",
asn: 3215
},
{
name: "Telefonica",
code: "AS12956",
asn: 12956
},
// Major Asian ISPs
{
name: "China Telecom",
code: "AS4134",
asn: 4134
},
{
name: "China Unicom",
code: "AS4837",
asn: 4837
},
{
name: "NTT Communications",
code: "AS2914",
asn: 2914
},
{
name: "KDDI Corporation",
code: "AS2516",
asn: 2516
},
{
name: "Reliance Jio (India)",
code: "AS55836",
asn: 55836
},
// VPN/Proxy Providers
{
name: "Private Internet Access",
code: "AS46562",
asn: 46562
},
{
name: "NordVPN",
code: "AS202425",
asn: 202425
},
{
name: "Mullvad VPN",
code: "AS213281",
asn: 213281
},
// Social Media / Major Tech
{
name: "Facebook/Meta",
code: "AS32934",
asn: 32934
},
{
name: "Twitter/X",
code: "AS13414",
asn: 13414
},
{
name: "Apple",
code: "AS714",
asn: 714
},
{
name: "Netflix",
code: "AS2906",
asn: 2906
},
// Academic/Research
{
name: "MIT",
code: "AS3",
asn: 3
},
{
name: "Stanford University",
code: "AS32",
asn: 32
},
{
name: "CERN",
code: "AS513",
asn: 513
}
];

File diff suppressed because it is too large Load Diff

13
server/db/maxmindAsn.ts Normal file
View File

@@ -0,0 +1,13 @@
import maxmind, { AsnResponse, Reader } from "maxmind";
import config from "@server/lib/config";
let maxmindAsnLookup: Reader<AsnResponse> | null;
if (config.getRawConfig().server.maxmind_asn_path) {
maxmindAsnLookup = await maxmind.open<AsnResponse>(
config.getRawConfig().server.maxmind_asn_path!
);
} else {
maxmindAsnLookup = null;
}
export { maxmindAsnLookup };

View File

@@ -6,28 +6,28 @@ import { withReplicas } from "drizzle-orm/pg-core";
function createDb() { function createDb() {
const config = readConfigFile(); const config = readConfigFile();
if (!config.postgres) { // check the environment variables for postgres config first before the config file
// check the environment variables for postgres config if (process.env.POSTGRES_CONNECTION_STRING) {
if (process.env.POSTGRES_CONNECTION_STRING) { config.postgres = {
config.postgres = { connection_string: process.env.POSTGRES_CONNECTION_STRING
connection_string: process.env.POSTGRES_CONNECTION_STRING };
}; if (process.env.POSTGRES_REPLICA_CONNECTION_STRINGS) {
if (process.env.POSTGRES_REPLICA_CONNECTION_STRINGS) { const replicas =
const replicas = process.env.POSTGRES_REPLICA_CONNECTION_STRINGS.split(",").map(
process.env.POSTGRES_REPLICA_CONNECTION_STRINGS.split( (conn) => ({
","
).map((conn) => ({
connection_string: conn.trim() connection_string: conn.trim()
})); })
config.postgres.replicas = replicas; );
} config.postgres.replicas = replicas;
} else {
throw new Error(
"Postgres configuration is missing in the configuration file."
);
} }
} }
if (!config.postgres) {
throw new Error(
"Postgres configuration is missing in the configuration file."
);
}
const connectionString = config.postgres?.connection_string; const connectionString = config.postgres?.connection_string;
const replicaConnections = config.postgres?.replicas || []; const replicaConnections = config.postgres?.replicas || [];
@@ -81,6 +81,7 @@ function createDb() {
export const db = createDb(); export const db = createDb();
export default db; export default db;
export const primaryDb = db.$primary;
export type Transaction = Parameters< export type Transaction = Parameters<
Parameters<(typeof db)["transaction"]>[0] Parameters<(typeof db)["transaction"]>[0]
>[0]; >[0];

View File

@@ -10,7 +10,7 @@ const runMigrations = async () => {
await migrate(db as any, { await migrate(db as any, {
migrationsFolder: migrationsFolder migrationsFolder: migrationsFolder
}); });
console.log("Migrations completed successfully."); console.log("Migrations completed successfully.");
process.exit(0); process.exit(0);
} catch (error) { } catch (error) {
console.error("Error running migrations:", error); console.error("Error running migrations:", error);

View File

@@ -204,6 +204,29 @@ export const loginPageOrg = pgTable("loginPageOrg", {
.references(() => orgs.orgId, { onDelete: "cascade" }) .references(() => orgs.orgId, { onDelete: "cascade" })
}); });
export const loginPageBranding = pgTable("loginPageBranding", {
loginPageBrandingId: serial("loginPageBrandingId").primaryKey(),
logoUrl: text("logoUrl").notNull(),
logoWidth: integer("logoWidth").notNull(),
logoHeight: integer("logoHeight").notNull(),
primaryColor: text("primaryColor"),
resourceTitle: text("resourceTitle").notNull(),
resourceSubtitle: text("resourceSubtitle"),
orgTitle: text("orgTitle"),
orgSubtitle: text("orgSubtitle")
});
export const loginPageBrandingOrg = pgTable("loginPageBrandingOrg", {
loginPageBrandingId: integer("loginPageBrandingId")
.notNull()
.references(() => loginPageBranding.loginPageBrandingId, {
onDelete: "cascade"
}),
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" })
});
export const sessionTransferToken = pgTable("sessionTransferToken", { export const sessionTransferToken = pgTable("sessionTransferToken", {
token: varchar("token").primaryKey(), token: varchar("token").primaryKey(),
sessionId: varchar("sessionId") sessionId: varchar("sessionId")
@@ -215,42 +238,56 @@ export const sessionTransferToken = pgTable("sessionTransferToken", {
expiresAt: bigint("expiresAt", { mode: "number" }).notNull() expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
}); });
export const actionAuditLog = pgTable("actionAuditLog", { export const actionAuditLog = pgTable(
id: serial("id").primaryKey(), "actionAuditLog",
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds {
orgId: varchar("orgId") id: serial("id").primaryKey(),
.notNull() timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
.references(() => orgs.orgId, { onDelete: "cascade" }), orgId: varchar("orgId")
actorType: varchar("actorType", { length: 50 }).notNull(), .notNull()
actor: varchar("actor", { length: 255 }).notNull(), .references(() => orgs.orgId, { onDelete: "cascade" }),
actorId: varchar("actorId", { length: 255 }).notNull(), actorType: varchar("actorType", { length: 50 }).notNull(),
action: varchar("action", { length: 100 }).notNull(), actor: varchar("actor", { length: 255 }).notNull(),
metadata: text("metadata") actorId: varchar("actorId", { length: 255 }).notNull(),
}, (table) => ([ action: varchar("action", { length: 100 }).notNull(),
index("idx_actionAuditLog_timestamp").on(table.timestamp), metadata: text("metadata")
index("idx_actionAuditLog_org_timestamp").on(table.orgId, table.timestamp) },
])); (table) => [
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export const accessAuditLog = pgTable("accessAuditLog", { export const accessAuditLog = pgTable(
id: serial("id").primaryKey(), "accessAuditLog",
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds {
orgId: varchar("orgId") id: serial("id").primaryKey(),
.notNull() timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
.references(() => orgs.orgId, { onDelete: "cascade" }), orgId: varchar("orgId")
actorType: varchar("actorType", { length: 50 }), .notNull()
actor: varchar("actor", { length: 255 }), .references(() => orgs.orgId, { onDelete: "cascade" }),
actorId: varchar("actorId", { length: 255 }), actorType: varchar("actorType", { length: 50 }),
resourceId: integer("resourceId"), actor: varchar("actor", { length: 255 }),
ip: varchar("ip", { length: 45 }), actorId: varchar("actorId", { length: 255 }),
type: varchar("type", { length: 100 }).notNull(), resourceId: integer("resourceId"),
action: boolean("action").notNull(), ip: varchar("ip", { length: 45 }),
location: text("location"), type: varchar("type", { length: 100 }).notNull(),
userAgent: text("userAgent"), action: boolean("action").notNull(),
metadata: text("metadata") location: text("location"),
}, (table) => ([ userAgent: text("userAgent"),
index("idx_identityAuditLog_timestamp").on(table.timestamp), metadata: text("metadata")
index("idx_identityAuditLog_org_timestamp").on(table.orgId, table.timestamp) },
])); (table) => [
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export type Limit = InferSelectModel<typeof limits>; export type Limit = InferSelectModel<typeof limits>;
export type Account = InferSelectModel<typeof account>; export type Account = InferSelectModel<typeof account>;
@@ -269,5 +306,6 @@ export type RemoteExitNodeSession = InferSelectModel<
>; >;
export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>; export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>;
export type LoginPage = InferSelectModel<typeof loginPage>; export type LoginPage = InferSelectModel<typeof loginPage>;
export type LoginPageBranding = InferSelectModel<typeof loginPageBranding>;
export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>; export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>; export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;

View File

@@ -7,7 +7,8 @@ import {
bigint, bigint,
real, real,
text, text,
index index,
uniqueIndex
} from "drizzle-orm/pg-core"; } from "drizzle-orm/pg-core";
import { InferSelectModel } from "drizzle-orm"; import { InferSelectModel } from "drizzle-orm";
import { randomUUID } from "crypto"; import { randomUUID } from "crypto";
@@ -177,7 +178,7 @@ export const targetHealthCheck = pgTable("targetHealthCheck", {
hcMethod: varchar("hcMethod").default("GET"), hcMethod: varchar("hcMethod").default("GET"),
hcStatus: integer("hcStatus"), // http code hcStatus: integer("hcStatus"), // http code
hcHealth: text("hcHealth").default("unknown"), // "unknown", "healthy", "unhealthy" hcHealth: text("hcHealth").default("unknown"), // "unknown", "healthy", "unhealthy"
hcTlsServerName: text("hcTlsServerName"), hcTlsServerName: text("hcTlsServerName")
}); });
export const exitNodes = pgTable("exitNodes", { export const exitNodes = pgTable("exitNodes", {
@@ -213,7 +214,10 @@ export const siteResources = pgTable("siteResources", {
destination: varchar("destination").notNull(), // ip, cidr, hostname; validate against the mode destination: varchar("destination").notNull(), // ip, cidr, hostname; validate against the mode
enabled: boolean("enabled").notNull().default(true), enabled: boolean("enabled").notNull().default(true),
alias: varchar("alias"), alias: varchar("alias"),
aliasAddress: varchar("aliasAddress") aliasAddress: varchar("aliasAddress"),
tcpPortRangeString: varchar("tcpPortRangeString"),
udpPortRangeString: varchar("udpPortRangeString"),
disableIcmp: boolean("disableIcmp").notNull().default(false)
}); });
export const clientSiteResources = pgTable("clientSiteResources", { export const clientSiteResources = pgTable("clientSiteResources", {

View File

@@ -52,10 +52,7 @@ export async function getResourceByDomain(
resourceHeaderAuth, resourceHeaderAuth,
eq(resourceHeaderAuth.resourceId, resources.resourceId) eq(resourceHeaderAuth.resourceId, resources.resourceId)
) )
.innerJoin( .innerJoin(orgs, eq(orgs.orgId, resources.orgId))
orgs,
eq(orgs.orgId, resources.orgId)
)
.where(eq(resources.fullDomain, domain)) .where(eq(resources.fullDomain, domain))
.limit(1); .limit(1);

View File

@@ -20,6 +20,7 @@ function createDb() {
export const db = createDb(); export const db = createDb();
export default db; export default db;
export const primaryDb = db;
export type Transaction = Parameters< export type Transaction = Parameters<
Parameters<(typeof db)["transaction"]>[0] Parameters<(typeof db)["transaction"]>[0]
>[0]; >[0];

View File

@@ -8,7 +8,7 @@ const runMigrations = async () => {
console.log("Running migrations..."); console.log("Running migrations...");
try { try {
migrate(db as any, { migrate(db as any, {
migrationsFolder: migrationsFolder, migrationsFolder: migrationsFolder
}); });
console.log("Migrations completed successfully."); console.log("Migrations completed successfully.");
} catch (error) { } catch (error) {

View File

@@ -1,13 +1,12 @@
import {
sqliteTable,
integer,
text,
real,
index
} from "drizzle-orm/sqlite-core";
import { InferSelectModel } from "drizzle-orm"; import { InferSelectModel } from "drizzle-orm";
import { domains, orgs, targets, users, exitNodes, sessions } from "./schema"; import {
import { metadata } from "@app/app/[orgId]/settings/layout"; index,
integer,
real,
sqliteTable,
text
} from "drizzle-orm/sqlite-core";
import { domains, exitNodes, orgs, sessions, users } from "./schema";
export const certificates = sqliteTable("certificates", { export const certificates = sqliteTable("certificates", {
certId: integer("certId").primaryKey({ autoIncrement: true }), certId: integer("certId").primaryKey({ autoIncrement: true }),
@@ -29,7 +28,9 @@ export const certificates = sqliteTable("certificates", {
}); });
export const dnsChallenge = sqliteTable("dnsChallenges", { export const dnsChallenge = sqliteTable("dnsChallenges", {
dnsChallengeId: integer("dnsChallengeId").primaryKey({ autoIncrement: true }), dnsChallengeId: integer("dnsChallengeId").primaryKey({
autoIncrement: true
}),
domain: text("domain").notNull(), domain: text("domain").notNull(),
token: text("token").notNull(), token: text("token").notNull(),
keyAuthorization: text("keyAuthorization").notNull(), keyAuthorization: text("keyAuthorization").notNull(),
@@ -61,9 +62,7 @@ export const customers = sqliteTable("customers", {
}); });
export const subscriptions = sqliteTable("subscriptions", { export const subscriptions = sqliteTable("subscriptions", {
subscriptionId: text("subscriptionId") subscriptionId: text("subscriptionId").primaryKey().notNull(),
.primaryKey()
.notNull(),
customerId: text("customerId") customerId: text("customerId")
.notNull() .notNull()
.references(() => customers.customerId, { onDelete: "cascade" }), .references(() => customers.customerId, { onDelete: "cascade" }),
@@ -75,7 +74,9 @@ export const subscriptions = sqliteTable("subscriptions", {
}); });
export const subscriptionItems = sqliteTable("subscriptionItems", { export const subscriptionItems = sqliteTable("subscriptionItems", {
subscriptionItemId: integer("subscriptionItemId").primaryKey({ autoIncrement: true }), subscriptionItemId: integer("subscriptionItemId").primaryKey({
autoIncrement: true
}),
subscriptionId: text("subscriptionId") subscriptionId: text("subscriptionId")
.notNull() .notNull()
.references(() => subscriptions.subscriptionId, { .references(() => subscriptions.subscriptionId, {
@@ -129,7 +130,9 @@ export const limits = sqliteTable("limits", {
}); });
export const usageNotifications = sqliteTable("usageNotifications", { export const usageNotifications = sqliteTable("usageNotifications", {
notificationId: integer("notificationId").primaryKey({ autoIncrement: true }), notificationId: integer("notificationId").primaryKey({
autoIncrement: true
}),
orgId: text("orgId") orgId: text("orgId")
.notNull() .notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }), .references(() => orgs.orgId, { onDelete: "cascade" }),
@@ -199,6 +202,31 @@ export const loginPageOrg = sqliteTable("loginPageOrg", {
.references(() => orgs.orgId, { onDelete: "cascade" }) .references(() => orgs.orgId, { onDelete: "cascade" })
}); });
export const loginPageBranding = sqliteTable("loginPageBranding", {
loginPageBrandingId: integer("loginPageBrandingId").primaryKey({
autoIncrement: true
}),
logoUrl: text("logoUrl").notNull(),
logoWidth: integer("logoWidth").notNull(),
logoHeight: integer("logoHeight").notNull(),
primaryColor: text("primaryColor"),
resourceTitle: text("resourceTitle").notNull(),
resourceSubtitle: text("resourceSubtitle"),
orgTitle: text("orgTitle"),
orgSubtitle: text("orgSubtitle")
});
export const loginPageBrandingOrg = sqliteTable("loginPageBrandingOrg", {
loginPageBrandingId: integer("loginPageBrandingId")
.notNull()
.references(() => loginPageBranding.loginPageBrandingId, {
onDelete: "cascade"
}),
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" })
});
export const sessionTransferToken = sqliteTable("sessionTransferToken", { export const sessionTransferToken = sqliteTable("sessionTransferToken", {
token: text("token").primaryKey(), token: text("token").primaryKey(),
sessionId: text("sessionId") sessionId: text("sessionId")
@@ -210,42 +238,56 @@ export const sessionTransferToken = sqliteTable("sessionTransferToken", {
expiresAt: integer("expiresAt").notNull() expiresAt: integer("expiresAt").notNull()
}); });
export const actionAuditLog = sqliteTable("actionAuditLog", { export const actionAuditLog = sqliteTable(
id: integer("id").primaryKey({ autoIncrement: true }), "actionAuditLog",
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds {
orgId: text("orgId") id: integer("id").primaryKey({ autoIncrement: true }),
.notNull() timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
.references(() => orgs.orgId, { onDelete: "cascade" }), orgId: text("orgId")
actorType: text("actorType").notNull(), .notNull()
actor: text("actor").notNull(), .references(() => orgs.orgId, { onDelete: "cascade" }),
actorId: text("actorId").notNull(), actorType: text("actorType").notNull(),
action: text("action").notNull(), actor: text("actor").notNull(),
metadata: text("metadata") actorId: text("actorId").notNull(),
}, (table) => ([ action: text("action").notNull(),
index("idx_actionAuditLog_timestamp").on(table.timestamp), metadata: text("metadata")
index("idx_actionAuditLog_org_timestamp").on(table.orgId, table.timestamp) },
])); (table) => [
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export const accessAuditLog = sqliteTable("accessAuditLog", { export const accessAuditLog = sqliteTable(
id: integer("id").primaryKey({ autoIncrement: true }), "accessAuditLog",
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds {
orgId: text("orgId") id: integer("id").primaryKey({ autoIncrement: true }),
.notNull() timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
.references(() => orgs.orgId, { onDelete: "cascade" }), orgId: text("orgId")
actorType: text("actorType"), .notNull()
actor: text("actor"), .references(() => orgs.orgId, { onDelete: "cascade" }),
actorId: text("actorId"), actorType: text("actorType"),
resourceId: integer("resourceId"), actor: text("actor"),
ip: text("ip"), actorId: text("actorId"),
location: text("location"), resourceId: integer("resourceId"),
type: text("type").notNull(), ip: text("ip"),
action: integer("action", { mode: "boolean" }).notNull(), location: text("location"),
userAgent: text("userAgent"), type: text("type").notNull(),
metadata: text("metadata") action: integer("action", { mode: "boolean" }).notNull(),
}, (table) => ([ userAgent: text("userAgent"),
index("idx_identityAuditLog_timestamp").on(table.timestamp), metadata: text("metadata")
index("idx_identityAuditLog_org_timestamp").on(table.orgId, table.timestamp) },
])); (table) => [
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export type Limit = InferSelectModel<typeof limits>; export type Limit = InferSelectModel<typeof limits>;
export type Account = InferSelectModel<typeof account>; export type Account = InferSelectModel<typeof account>;
@@ -264,5 +306,6 @@ export type RemoteExitNodeSession = InferSelectModel<
>; >;
export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>; export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>;
export type LoginPage = InferSelectModel<typeof loginPage>; export type LoginPage = InferSelectModel<typeof loginPage>;
export type LoginPageBranding = InferSelectModel<typeof loginPageBranding>;
export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>; export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>; export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;

View File

@@ -1,6 +1,12 @@
import { randomUUID } from "crypto"; import { randomUUID } from "crypto";
import { InferSelectModel } from "drizzle-orm"; import { InferSelectModel } from "drizzle-orm";
import { sqliteTable, text, integer, index } from "drizzle-orm/sqlite-core"; import {
sqliteTable,
text,
integer,
index,
uniqueIndex
} from "drizzle-orm/sqlite-core";
import { no } from "zod/v4/locales"; import { no } from "zod/v4/locales";
export const domains = sqliteTable("domains", { export const domains = sqliteTable("domains", {
@@ -234,7 +240,10 @@ export const siteResources = sqliteTable("siteResources", {
destination: text("destination").notNull(), // ip, cidr, hostname destination: text("destination").notNull(), // ip, cidr, hostname
enabled: integer("enabled", { mode: "boolean" }).notNull().default(true), enabled: integer("enabled", { mode: "boolean" }).notNull().default(true),
alias: text("alias"), alias: text("alias"),
aliasAddress: text("aliasAddress") aliasAddress: text("aliasAddress"),
tcpPortRangeString: text("tcpPortRangeString"),
udpPortRangeString: text("udpPortRangeString"),
disableIcmp: integer("disableIcmp", { mode: "boolean" })
}); });
export const clientSiteResources = sqliteTable("clientSiteResources", { export const clientSiteResources = sqliteTable("clientSiteResources", {

View File

@@ -18,10 +18,13 @@ function createEmailClient() {
host: emailConfig.smtp_host, host: emailConfig.smtp_host,
port: emailConfig.smtp_port, port: emailConfig.smtp_port,
secure: emailConfig.smtp_secure || false, secure: emailConfig.smtp_secure || false,
auth: (emailConfig.smtp_user && emailConfig.smtp_pass) ? { auth:
user: emailConfig.smtp_user, emailConfig.smtp_user && emailConfig.smtp_pass
pass: emailConfig.smtp_pass ? {
} : null user: emailConfig.smtp_user,
pass: emailConfig.smtp_pass
}
: null
} as SMTPTransport.Options; } as SMTPTransport.Options;
if (emailConfig.smtp_tls_reject_unauthorized !== undefined) { if (emailConfig.smtp_tls_reject_unauthorized !== undefined) {

View File

@@ -10,6 +10,7 @@ export async function sendEmail(
from: string | undefined; from: string | undefined;
to: string | undefined; to: string | undefined;
subject: string; subject: string;
replyTo?: string;
} }
) { ) {
if (!emailClient) { if (!emailClient) {
@@ -32,6 +33,7 @@ export async function sendEmail(
address: opts.from address: opts.from
}, },
to: opts.to, to: opts.to,
replyTo: opts.replyTo,
subject: opts.subject, subject: opts.subject,
html: emailHtml html: emailHtml
}); });

View File

@@ -19,7 +19,13 @@ interface Props {
billingLink: string; // Link to billing page billingLink: string; // Link to billing page
} }
export const NotifyUsageLimitApproaching = ({ email, limitName, currentUsage, usageLimit, billingLink }: Props) => { export const NotifyUsageLimitApproaching = ({
email,
limitName,
currentUsage,
usageLimit,
billingLink
}: Props) => {
const previewText = `Your usage for ${limitName} is approaching the limit.`; const previewText = `Your usage for ${limitName} is approaching the limit.`;
const usagePercentage = Math.round((currentUsage / usageLimit) * 100); const usagePercentage = Math.round((currentUsage / usageLimit) * 100);
@@ -37,23 +43,32 @@ export const NotifyUsageLimitApproaching = ({ email, limitName, currentUsage, us
<EmailGreeting>Hi there,</EmailGreeting> <EmailGreeting>Hi there,</EmailGreeting>
<EmailText> <EmailText>
We wanted to let you know that your usage for <strong>{limitName}</strong> is approaching your plan limit. We wanted to let you know that your usage for{" "}
<strong>{limitName}</strong> is approaching your
plan limit.
</EmailText> </EmailText>
<EmailText> <EmailText>
<strong>Current Usage:</strong> {currentUsage} of {usageLimit} ({usagePercentage}%) <strong>Current Usage:</strong> {currentUsage} of{" "}
{usageLimit} ({usagePercentage}%)
</EmailText> </EmailText>
<EmailText> <EmailText>
Once you reach your limit, some functionality may be restricted or your sites may disconnect until you upgrade your plan or your usage resets. Once you reach your limit, some functionality may be
restricted or your sites may disconnect until you
upgrade your plan or your usage resets.
</EmailText> </EmailText>
<EmailText> <EmailText>
To avoid any interruption to your service, we recommend upgrading your plan or monitoring your usage closely. You can <a href={billingLink}>upgrade your plan here</a>. To avoid any interruption to your service, we
recommend upgrading your plan or monitoring your
usage closely. You can{" "}
<a href={billingLink}>upgrade your plan here</a>.
</EmailText> </EmailText>
<EmailText> <EmailText>
If you have any questions or need assistance, please don't hesitate to reach out to our support team. If you have any questions or need assistance, please
don't hesitate to reach out to our support team.
</EmailText> </EmailText>
<EmailFooter> <EmailFooter>

View File

@@ -19,7 +19,13 @@ interface Props {
billingLink: string; // Link to billing page billingLink: string; // Link to billing page
} }
export const NotifyUsageLimitReached = ({ email, limitName, currentUsage, usageLimit, billingLink }: Props) => { export const NotifyUsageLimitReached = ({
email,
limitName,
currentUsage,
usageLimit,
billingLink
}: Props) => {
const previewText = `You've reached your ${limitName} usage limit - Action required`; const previewText = `You've reached your ${limitName} usage limit - Action required`;
const usagePercentage = Math.round((currentUsage / usageLimit) * 100); const usagePercentage = Math.round((currentUsage / usageLimit) * 100);
@@ -32,30 +38,48 @@ export const NotifyUsageLimitReached = ({ email, limitName, currentUsage, usageL
<EmailContainer> <EmailContainer>
<EmailLetterHead /> <EmailLetterHead />
<EmailHeading>Usage Limit Reached - Action Required</EmailHeading> <EmailHeading>
Usage Limit Reached - Action Required
</EmailHeading>
<EmailGreeting>Hi there,</EmailGreeting> <EmailGreeting>Hi there,</EmailGreeting>
<EmailText> <EmailText>
You have reached your usage limit for <strong>{limitName}</strong>. You have reached your usage limit for{" "}
<strong>{limitName}</strong>.
</EmailText> </EmailText>
<EmailText> <EmailText>
<strong>Current Usage:</strong> {currentUsage} of {usageLimit} ({usagePercentage}%) <strong>Current Usage:</strong> {currentUsage} of{" "}
{usageLimit} ({usagePercentage}%)
</EmailText> </EmailText>
<EmailText> <EmailText>
<strong>Important:</strong> Your functionality may now be restricted and your sites may disconnect until you either upgrade your plan or your usage resets. To prevent any service interruption, immediate action is recommended. <strong>Important:</strong> Your functionality may
now be restricted and your sites may disconnect
until you either upgrade your plan or your usage
resets. To prevent any service interruption,
immediate action is recommended.
</EmailText> </EmailText>
<EmailText> <EmailText>
<strong>What you can do:</strong> <strong>What you can do:</strong>
<br /> <a href={billingLink} style={{ color: '#2563eb', fontWeight: 'bold' }}>Upgrade your plan immediately</a> to restore full functionality <br />{" "}
<br /> Monitor your usage to stay within limits in the future <a
href={billingLink}
style={{ color: "#2563eb", fontWeight: "bold" }}
>
Upgrade your plan immediately
</a>{" "}
to restore full functionality
<br /> Monitor your usage to stay within limits in
the future
</EmailText> </EmailText>
<EmailText> <EmailText>
If you have any questions or need immediate assistance, please contact our support team right away. If you have any questions or need immediate
assistance, please contact our support team right
away.
</EmailText> </EmailText>
<EmailFooter> <EmailFooter>

View File

@@ -5,7 +5,7 @@ import config from "@server/lib/config";
import logger from "@server/logger"; import logger from "@server/logger";
import { import {
errorHandlerMiddleware, errorHandlerMiddleware,
notFoundMiddleware, notFoundMiddleware
} from "@server/middlewares"; } from "@server/middlewares";
import { authenticated, unauthenticated } from "#dynamic/routers/integration"; import { authenticated, unauthenticated } from "#dynamic/routers/integration";
import { logIncomingMiddleware } from "./middlewares/logIncoming"; import { logIncomingMiddleware } from "./middlewares/logIncoming";

29
server/lib/asn.ts Normal file
View File

@@ -0,0 +1,29 @@
import logger from "@server/logger";
import { maxmindAsnLookup } from "@server/db/maxmindAsn";
export async function getAsnForIp(ip: string): Promise<number | undefined> {
try {
if (!maxmindAsnLookup) {
logger.debug(
"MaxMind ASN DB path not configured, cannot perform ASN lookup"
);
return;
}
const result = maxmindAsnLookup.get(ip);
if (!result || !result.autonomous_system_number) {
return;
}
logger.debug(
`ASN lookup successful for IP ${ip}: AS${result.autonomous_system_number}`
);
return result.autonomous_system_number;
} catch (error) {
logger.error("Error performing ASN lookup:", error);
}
return;
}

View File

@@ -25,16 +25,22 @@ export const FeatureMeterIdsSandbox: Record<FeatureId, string> = {
}; };
export function getFeatureMeterId(featureId: FeatureId): string { export function getFeatureMeterId(featureId: FeatureId): string {
if (process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") { if (
process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true"
) {
return FeatureMeterIds[featureId]; return FeatureMeterIds[featureId];
} else { } else {
return FeatureMeterIdsSandbox[featureId]; return FeatureMeterIdsSandbox[featureId];
} }
} }
export function getFeatureIdByMetricId(metricId: string): FeatureId | undefined { export function getFeatureIdByMetricId(
return (Object.entries(FeatureMeterIds) as [FeatureId, string][]) metricId: string
.find(([_, v]) => v === metricId)?.[0]; ): FeatureId | undefined {
return (Object.entries(FeatureMeterIds) as [FeatureId, string][]).find(
([_, v]) => v === metricId
)?.[0];
} }
export type FeaturePriceSet = { export type FeaturePriceSet = {
@@ -43,7 +49,8 @@ export type FeaturePriceSet = {
[FeatureId.DOMAINS]?: string; // Optional since domains are not billed [FeatureId.DOMAINS]?: string; // Optional since domains are not billed
}; };
export const standardFeaturePriceSet: FeaturePriceSet = { // Free tier matches the freeLimitSet export const standardFeaturePriceSet: FeaturePriceSet = {
// Free tier matches the freeLimitSet
[FeatureId.SITE_UPTIME]: "price_1RrQc4D3Ee2Ir7WmaJGZ3MtF", [FeatureId.SITE_UPTIME]: "price_1RrQc4D3Ee2Ir7WmaJGZ3MtF",
[FeatureId.USERS]: "price_1RrQeJD3Ee2Ir7WmgveP3xea", [FeatureId.USERS]: "price_1RrQeJD3Ee2Ir7WmgveP3xea",
[FeatureId.EGRESS_DATA_MB]: "price_1RrQXFD3Ee2Ir7WmvGDlgxQk", [FeatureId.EGRESS_DATA_MB]: "price_1RrQXFD3Ee2Ir7WmvGDlgxQk",
@@ -51,7 +58,8 @@ export const standardFeaturePriceSet: FeaturePriceSet = { // Free tier matches t
[FeatureId.REMOTE_EXIT_NODES]: "price_1S46weD3Ee2Ir7Wm94KEHI4h" [FeatureId.REMOTE_EXIT_NODES]: "price_1S46weD3Ee2Ir7Wm94KEHI4h"
}; };
export const standardFeaturePriceSetSandbox: FeaturePriceSet = { // Free tier matches the freeLimitSet export const standardFeaturePriceSetSandbox: FeaturePriceSet = {
// Free tier matches the freeLimitSet
[FeatureId.SITE_UPTIME]: "price_1RefFBDCpkOb237BPrKZ8IEU", [FeatureId.SITE_UPTIME]: "price_1RefFBDCpkOb237BPrKZ8IEU",
[FeatureId.USERS]: "price_1ReNa4DCpkOb237Bc67G5muF", [FeatureId.USERS]: "price_1ReNa4DCpkOb237Bc67G5muF",
[FeatureId.EGRESS_DATA_MB]: "price_1Rfp9LDCpkOb237BwuN5Oiu0", [FeatureId.EGRESS_DATA_MB]: "price_1Rfp9LDCpkOb237BwuN5Oiu0",
@@ -60,15 +68,20 @@ export const standardFeaturePriceSetSandbox: FeaturePriceSet = { // Free tier ma
}; };
export function getStandardFeaturePriceSet(): FeaturePriceSet { export function getStandardFeaturePriceSet(): FeaturePriceSet {
if (process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") { if (
process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true"
) {
return standardFeaturePriceSet; return standardFeaturePriceSet;
} else { } else {
return standardFeaturePriceSetSandbox; return standardFeaturePriceSetSandbox;
} }
} }
export function getLineItems(featurePriceSet: FeaturePriceSet): Stripe.Checkout.SessionCreateParams.LineItem[] { export function getLineItems(
featurePriceSet: FeaturePriceSet
): Stripe.Checkout.SessionCreateParams.LineItem[] {
return Object.entries(featurePriceSet).map(([featureId, priceId]) => ({ return Object.entries(featurePriceSet).map(([featureId, priceId]) => ({
price: priceId, price: priceId
})); }));
} }

View File

@@ -12,7 +12,7 @@ export const sandboxLimitSet: LimitSet = {
[FeatureId.USERS]: { value: 1, description: "Sandbox limit" }, [FeatureId.USERS]: { value: 1, description: "Sandbox limit" },
[FeatureId.EGRESS_DATA_MB]: { value: 1000, description: "Sandbox limit" }, // 1 GB [FeatureId.EGRESS_DATA_MB]: { value: 1000, description: "Sandbox limit" }, // 1 GB
[FeatureId.DOMAINS]: { value: 0, description: "Sandbox limit" }, [FeatureId.DOMAINS]: { value: 0, description: "Sandbox limit" },
[FeatureId.REMOTE_EXIT_NODES]: { value: 0, description: "Sandbox limit" }, [FeatureId.REMOTE_EXIT_NODES]: { value: 0, description: "Sandbox limit" }
}; };
export const freeLimitSet: LimitSet = { export const freeLimitSet: LimitSet = {
@@ -29,7 +29,7 @@ export const freeLimitSet: LimitSet = {
export const subscribedLimitSet: LimitSet = { export const subscribedLimitSet: LimitSet = {
[FeatureId.SITE_UPTIME]: { [FeatureId.SITE_UPTIME]: {
value: 2232000, value: 2232000,
description: "Contact us to increase soft limit.", description: "Contact us to increase soft limit."
}, // 50 sites up for 31 days }, // 50 sites up for 31 days
[FeatureId.USERS]: { [FeatureId.USERS]: {
value: 150, value: 150,

View File

@@ -1,22 +1,32 @@
export enum TierId { export enum TierId {
STANDARD = "standard", STANDARD = "standard"
} }
export type TierPriceSet = { export type TierPriceSet = {
[key in TierId]: string; [key in TierId]: string;
}; };
export const tierPriceSet: TierPriceSet = { // Free tier matches the freeLimitSet export const tierPriceSet: TierPriceSet = {
[TierId.STANDARD]: "price_1RrQ9cD3Ee2Ir7Wmqdy3KBa0", // Free tier matches the freeLimitSet
[TierId.STANDARD]: "price_1RrQ9cD3Ee2Ir7Wmqdy3KBa0"
}; };
export const tierPriceSetSandbox: TierPriceSet = { // Free tier matches the freeLimitSet export const tierPriceSetSandbox: TierPriceSet = {
// Free tier matches the freeLimitSet
// when matching tier the keys closer to 0 index are matched first so list the tiers in descending order of value // when matching tier the keys closer to 0 index are matched first so list the tiers in descending order of value
[TierId.STANDARD]: "price_1RrAYJDCpkOb237By2s1P32m", [TierId.STANDARD]: "price_1RrAYJDCpkOb237By2s1P32m"
}; };
export function getTierPriceSet(environment?: string, sandbox_mode?: boolean): TierPriceSet { export function getTierPriceSet(
if ((process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") || (environment === "prod" && sandbox_mode !== true)) { // THIS GETS LOADED CLIENT SIDE AND SERVER SIDE environment?: string,
sandbox_mode?: boolean
): TierPriceSet {
if (
(process.env.ENVIRONMENT == "prod" &&
process.env.SANDBOX_MODE !== "true") ||
(environment === "prod" && sandbox_mode !== true)
) {
// THIS GETS LOADED CLIENT SIDE AND SERVER SIDE
return tierPriceSet; return tierPriceSet;
} else { } else {
return tierPriceSetSandbox; return tierPriceSetSandbox;

View File

@@ -1,4 +1,4 @@
import { db, newts, blueprints, Blueprint } from "@server/db"; import { db, newts, blueprints, Blueprint, Site, siteResources, roleSiteResources, userSiteResources, clientSiteResources } from "@server/db";
import { Config, ConfigSchema } from "./types"; import { Config, ConfigSchema } from "./types";
import { ProxyResourcesResults, updateProxyResources } from "./proxyResources"; import { ProxyResourcesResults, updateProxyResources } from "./proxyResources";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
@@ -15,6 +15,7 @@ import { BlueprintSource } from "@server/routers/blueprints/types";
import { stringify as stringifyYaml } from "yaml"; import { stringify as stringifyYaml } from "yaml";
import { faker } from "@faker-js/faker"; import { faker } from "@faker-js/faker";
import { handleMessagingForUpdatedSiteResource } from "@server/routers/siteResource"; import { handleMessagingForUpdatedSiteResource } from "@server/routers/siteResource";
import { rebuildClientAssociationsFromSiteResource } from "../rebuildClientAssociations";
type ApplyBlueprintArgs = { type ApplyBlueprintArgs = {
orgId: string; orgId: string;
@@ -108,38 +109,136 @@ export async function applyBlueprint({
// We need to update the targets on the newts from the successfully updated information // We need to update the targets on the newts from the successfully updated information
for (const result of clientResourcesResults) { for (const result of clientResourcesResults) {
const [site] = await trx if (
.select() result.oldSiteResource &&
.from(sites) result.oldSiteResource.siteId !=
.innerJoin(newts, eq(sites.siteId, newts.siteId)) result.newSiteResource.siteId
.where( ) {
and( // query existing associations
eq(sites.siteId, result.newSiteResource.siteId), const existingRoleIds = await trx
eq(sites.orgId, orgId), .select()
eq(sites.type, "newt"), .from(roleSiteResources)
isNotNull(sites.pubKey) .where(
eq(
roleSiteResources.siteResourceId,
result.oldSiteResource.siteResourceId
)
) )
) .then((rows) => rows.map((row) => row.roleId));
.limit(1);
if (!site) { const existingUserIds= await trx
logger.debug( .select()
`No newt site found for client resource ${result.newSiteResource.siteResourceId}, skipping target update` .from(userSiteResources)
.where(
eq(
userSiteResources.siteResourceId,
result.oldSiteResource.siteResourceId
)
).then((rows) => rows.map((row) => row.userId));
const existingClientIds = await trx
.select()
.from(clientSiteResources)
.where(
eq(
clientSiteResources.siteResourceId,
result.oldSiteResource.siteResourceId
)
).then((rows) => rows.map((row) => row.clientId));
// delete the existing site resource
await trx
.delete(siteResources)
.where(
and(eq(siteResources.siteResourceId, result.oldSiteResource.siteResourceId))
);
await rebuildClientAssociationsFromSiteResource(
result.oldSiteResource,
trx
);
const [insertedSiteResource] = await trx
.insert(siteResources)
.values({
...result.newSiteResource,
})
.returning();
// wait some time to allow for messages to be handled
await new Promise((resolve) => setTimeout(resolve, 750));
//////////////////// update the associations ////////////////////
if (existingRoleIds.length > 0) {
await trx.insert(roleSiteResources).values(
existingRoleIds.map((roleId) => ({
roleId,
siteResourceId: insertedSiteResource!.siteResourceId
}))
);
}
if (existingUserIds.length > 0) {
await trx.insert(userSiteResources).values(
existingUserIds.map((userId) => ({
userId,
siteResourceId: insertedSiteResource!.siteResourceId
}))
);
}
if (existingClientIds.length > 0) {
await trx.insert(clientSiteResources).values(
existingClientIds.map((clientId) => ({
clientId,
siteResourceId: insertedSiteResource!.siteResourceId
}))
);
}
await rebuildClientAssociationsFromSiteResource(
insertedSiteResource,
trx
);
} else {
const [newSite] = await trx
.select()
.from(sites)
.innerJoin(newts, eq(sites.siteId, newts.siteId))
.where(
and(
eq(sites.siteId, result.newSiteResource.siteId),
eq(sites.orgId, orgId),
eq(sites.type, "newt"),
isNotNull(sites.pubKey)
)
)
.limit(1);
if (!newSite) {
logger.debug(
`No newt site found for client resource ${result.newSiteResource.siteResourceId}, skipping target update`
);
continue;
}
logger.debug(
`Updating client resource ${result.newSiteResource.siteResourceId} on site ${newSite.sites.siteId}`
);
await handleMessagingForUpdatedSiteResource(
result.oldSiteResource,
result.newSiteResource,
{
siteId: newSite.sites.siteId,
orgId: newSite.sites.orgId
},
trx
); );
continue;
} }
logger.debug(
`Updating client resource ${result.newSiteResource.siteResourceId} on site ${site.sites.siteId}`
);
await handleMessagingForUpdatedSiteResource(
result.oldSiteResource,
result.newSiteResource,
{ siteId: site.sites.siteId, orgId: site.sites.orgId },
trx
);
// await addClientTargets( // await addClientTargets(
// site.newt.newtId, // site.newt.newtId,
// result.resource.destination, // result.resource.destination,

View File

@@ -34,7 +34,10 @@ export async function applyNewtDockerBlueprint(
return; return;
} }
if (isEmptyObject(blueprint["proxy-resources"]) && isEmptyObject(blueprint["client-resources"])) { if (
isEmptyObject(blueprint["proxy-resources"]) &&
isEmptyObject(blueprint["client-resources"])
) {
return; return;
} }

View File

@@ -14,6 +14,7 @@ import { sites } from "@server/db";
import { eq, and, ne, inArray } from "drizzle-orm"; import { eq, and, ne, inArray } from "drizzle-orm";
import { Config } from "./types"; import { Config } from "./types";
import logger from "@server/logger"; import logger from "@server/logger";
import { getNextAvailableAliasAddress } from "../ip";
export type ClientResourcesResults = { export type ClientResourcesResults = {
newSiteResource: SiteResource; newSiteResource: SiteResource;
@@ -75,22 +76,20 @@ export async function updateClientResources(
} }
if (existingResource) { if (existingResource) {
if (existingResource.siteId !== site.siteId) {
throw new Error(
`You can not change the site of an existing client resource (${resourceNiceId}). Please delete and recreate it instead.`
);
}
// Update existing resource // Update existing resource
const [updatedResource] = await trx const [updatedResource] = await trx
.update(siteResources) .update(siteResources)
.set({ .set({
name: resourceData.name || resourceNiceId, name: resourceData.name || resourceNiceId,
siteId: site.siteId,
mode: resourceData.mode, mode: resourceData.mode,
destination: resourceData.destination, destination: resourceData.destination,
enabled: true, // hardcoded for now enabled: true, // hardcoded for now
// enabled: resourceData.enabled ?? true, // enabled: resourceData.enabled ?? true,
alias: resourceData.alias || null alias: resourceData.alias || null,
disableIcmp: resourceData["disable-icmp"],
tcpPortRangeString: resourceData["tcp-ports"],
udpPortRangeString: resourceData["udp-ports"]
}) })
.where( .where(
eq( eq(
@@ -205,6 +204,12 @@ export async function updateClientResources(
oldSiteResource: existingResource oldSiteResource: existingResource
}); });
} else { } else {
let aliasAddress: string | null = null;
if (resourceData.mode == "host") {
// we can only have an alias on a host
aliasAddress = await getNextAvailableAliasAddress(orgId);
}
// Create new resource // Create new resource
const [newResource] = await trx const [newResource] = await trx
.insert(siteResources) .insert(siteResources)
@@ -217,7 +222,11 @@ export async function updateClientResources(
destination: resourceData.destination, destination: resourceData.destination,
enabled: true, // hardcoded for now enabled: true, // hardcoded for now
// enabled: resourceData.enabled ?? true, // enabled: resourceData.enabled ?? true,
alias: resourceData.alias || null alias: resourceData.alias || null,
aliasAddress: aliasAddress,
disableIcmp: resourceData["disable-icmp"],
tcpPortRangeString: resourceData["tcp-ports"],
udpPortRangeString: resourceData["udp-ports"]
}) })
.returning(); .returning();

View File

@@ -84,12 +84,20 @@ export function processContainerLabels(containers: Container[]): {
// Process proxy resources // Process proxy resources
if (Object.keys(proxyResourceLabels).length > 0) { if (Object.keys(proxyResourceLabels).length > 0) {
processResourceLabels(proxyResourceLabels, container, result["proxy-resources"]); processResourceLabels(
proxyResourceLabels,
container,
result["proxy-resources"]
);
} }
// Process client resources // Process client resources
if (Object.keys(clientResourceLabels).length > 0) { if (Object.keys(clientResourceLabels).length > 0) {
processResourceLabels(clientResourceLabels, container, result["client-resources"]); processResourceLabels(
clientResourceLabels,
container,
result["client-resources"]
);
} }
}); });
@@ -161,8 +169,7 @@ function processResourceLabels(
const finalTarget = { ...target }; const finalTarget = { ...target };
if (!finalTarget.hostname) { if (!finalTarget.hostname) {
finalTarget.hostname = finalTarget.hostname =
container.name || container.name || container.hostname;
container.hostname;
} }
if (!finalTarget.port) { if (!finalTarget.port) {
const containerPort = const containerPort =

View File

@@ -1086,10 +1086,8 @@ async function getDomainId(
// remove the base domain of the domain // remove the base domain of the domain
let subdomain = null; let subdomain = null;
if (domainSelection.type == "ns" || domainSelection.type == "wildcard") { if (fullDomain != baseDomain) {
if (fullDomain != baseDomain) { subdomain = fullDomain.replace(`.${baseDomain}`, "");
subdomain = fullDomain.replace(`.${baseDomain}`, "");
}
} }
// Return the first valid domain // Return the first valid domain

View File

@@ -1,4 +1,5 @@
import { z } from "zod"; import { z } from "zod";
import { portRangeStringSchema } from "@server/lib/ip";
export const SiteSchema = z.object({ export const SiteSchema = z.object({
name: z.string().min(1).max(100), name: z.string().min(1).max(100),
@@ -71,11 +72,71 @@ export const AuthSchema = z.object({
"auto-login-idp": z.int().positive().optional() "auto-login-idp": z.int().positive().optional()
}); });
export const RuleSchema = z.object({ export const RuleSchema = z
action: z.enum(["allow", "deny", "pass"]), .object({
match: z.enum(["cidr", "path", "ip", "country"]), action: z.enum(["allow", "deny", "pass"]),
value: z.string() match: z.enum(["cidr", "path", "ip", "country", "asn"]),
}); value: z.string()
})
.refine(
(rule) => {
if (rule.match === "ip") {
// Check if it's a valid IP address (v4 or v6)
return z.union([z.ipv4(), z.ipv6()]).safeParse(rule.value)
.success;
}
return true;
},
{
path: ["value"],
message: "Value must be a valid IP address when match is 'ip'"
}
)
.refine(
(rule) => {
if (rule.match === "cidr") {
// Check if it's a valid CIDR (v4 or v6)
return z.union([z.cidrv4(), z.cidrv6()]).safeParse(rule.value)
.success;
}
return true;
},
{
path: ["value"],
message: "Value must be a valid CIDR notation when match is 'cidr'"
}
)
.refine(
(rule) => {
if (rule.match === "country") {
// Check if it's a valid 2-letter country code
return /^[A-Z]{2}$/.test(rule.value);
}
return true;
},
{
path: ["value"],
message:
"Value must be a 2-letter country code when match is 'country'"
}
)
.refine(
(rule) => {
if (rule.match === "asn") {
// Check if it's either AS<number> format or just a number
const asNumberPattern = /^AS\d+$/i;
const isASFormat = asNumberPattern.test(rule.value);
const isNumeric = /^\d+$/.test(rule.value);
return isASFormat || isNumeric;
}
return true;
},
{
path: ["value"],
message:
"Value must be either 'AS<number>' format or a number when match is 'asn'"
}
);
export const HeaderSchema = z.object({ export const HeaderSchema = z.object({
name: z.string().min(1), name: z.string().min(1),
@@ -222,6 +283,9 @@ export const ClientResourceSchema = z
// destinationPort: z.int().positive().optional(), // destinationPort: z.int().positive().optional(),
destination: z.string().min(1), destination: z.string().min(1),
// enabled: z.boolean().default(true), // enabled: z.boolean().default(true),
"tcp-ports": portRangeStringSchema.optional().default("*"),
"udp-ports": portRangeStringSchema.optional().default("*"),
"disable-icmp": z.boolean().optional().default(false),
alias: z alias: z
.string() .string()
.regex( .regex(
@@ -324,7 +388,10 @@ export const ConfigSchema = z
return data as { return data as {
"proxy-resources": Record<string, z.infer<typeof ResourceSchema>>; "proxy-resources": Record<string, z.infer<typeof ResourceSchema>>;
"client-resources": Record<string, z.infer<typeof ClientResourceSchema>>; "client-resources": Record<
string,
z.infer<typeof ClientResourceSchema>
>;
sites: Record<string, z.infer<typeof SiteSchema>>; sites: Record<string, z.infer<typeof SiteSchema>>;
}; };
}) })

View File

@@ -166,7 +166,10 @@ export async function calculateUserClientsForOrgs(
]; ];
// Get next available subnet // Get next available subnet
const newSubnet = await getNextAvailableClientSubnet(orgId); const newSubnet = await getNextAvailableClientSubnet(
orgId,
transaction
);
if (!newSubnet) { if (!newSubnet) {
logger.warn( logger.warn(
`Skipping org ${orgId} for OLM ${olm.olmId} (user ${userId}): no available subnet found` `Skipping org ${orgId} for OLM ${olm.olmId} (user ${userId}): no available subnet found`

View File

@@ -1,4 +1,6 @@
export async function getValidCertificatesForDomains(domains: Set<string>): Promise< export async function getValidCertificatesForDomains(
domains: Set<string>
): Promise<
Array<{ Array<{
id: number; id: number;
domain: string; domain: string;

View File

@@ -7,7 +7,10 @@ function dateToTimestamp(dateStr: string): number {
// Testable version of calculateCutoffTimestamp that accepts a "now" timestamp // Testable version of calculateCutoffTimestamp that accepts a "now" timestamp
// This matches the logic in cleanupLogs.ts but allows injecting the current time // This matches the logic in cleanupLogs.ts but allows injecting the current time
function calculateCutoffTimestampWithNow(retentionDays: number, nowTimestamp: number): number { function calculateCutoffTimestampWithNow(
retentionDays: number,
nowTimestamp: number
): number {
if (retentionDays === 9001) { if (retentionDays === 9001) {
// Special case: data is erased at the end of the year following the year it was generated // Special case: data is erased at the end of the year following the year it was generated
// This means we delete logs from 2 years ago or older (logs from year Y are deleted after Dec 31 of year Y+1) // This means we delete logs from 2 years ago or older (logs from year Y are deleted after Dec 31 of year Y+1)
@@ -28,7 +31,7 @@ function testCalculateCutoffTimestamp() {
{ {
const now = dateToTimestamp("2025-12-06T12:00:00Z"); const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(30, now); const result = calculateCutoffTimestampWithNow(30, now);
const expected = now - (30 * 24 * 60 * 60); const expected = now - 30 * 24 * 60 * 60;
assertEquals(result, expected, "30 days retention calculation failed"); assertEquals(result, expected, "30 days retention calculation failed");
} }
@@ -36,7 +39,7 @@ function testCalculateCutoffTimestamp() {
{ {
const now = dateToTimestamp("2025-06-15T00:00:00Z"); const now = dateToTimestamp("2025-06-15T00:00:00Z");
const result = calculateCutoffTimestampWithNow(90, now); const result = calculateCutoffTimestampWithNow(90, now);
const expected = now - (90 * 24 * 60 * 60); const expected = now - 90 * 24 * 60 * 60;
assertEquals(result, expected, "90 days retention calculation failed"); assertEquals(result, expected, "90 days retention calculation failed");
} }
@@ -48,7 +51,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-12-06T12:00:00Z"); const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z"); const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Dec 2025) - should cutoff at Jan 1, 2024"); assertEquals(
result,
expected,
"9001 retention (Dec 2025) - should cutoff at Jan 1, 2024"
);
} }
// Test 4: Special case 9001 - January 2026 // Test 4: Special case 9001 - January 2026
@@ -58,7 +65,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2026-01-15T12:00:00Z"); const now = dateToTimestamp("2026-01-15T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2025-01-01T00:00:00Z"); const expected = dateToTimestamp("2025-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 2026) - should cutoff at Jan 1, 2025"); assertEquals(
result,
expected,
"9001 retention (Jan 2026) - should cutoff at Jan 1, 2025"
);
} }
// Test 5: Special case 9001 - December 31, 2025 at 23:59:59 UTC // Test 5: Special case 9001 - December 31, 2025 at 23:59:59 UTC
@@ -68,7 +79,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-12-31T23:59:59Z"); const now = dateToTimestamp("2025-12-31T23:59:59Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z"); const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Dec 31, 2025 23:59:59) - should cutoff at Jan 1, 2024"); assertEquals(
result,
expected,
"9001 retention (Dec 31, 2025 23:59:59) - should cutoff at Jan 1, 2024"
);
} }
// Test 6: Special case 9001 - January 1, 2026 at 00:00:01 UTC // Test 6: Special case 9001 - January 1, 2026 at 00:00:01 UTC
@@ -78,7 +93,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2026-01-01T00:00:01Z"); const now = dateToTimestamp("2026-01-01T00:00:01Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2025-01-01T00:00:00Z"); const expected = dateToTimestamp("2025-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 1, 2026 00:00:01) - should cutoff at Jan 1, 2025"); assertEquals(
result,
expected,
"9001 retention (Jan 1, 2026 00:00:01) - should cutoff at Jan 1, 2025"
);
} }
// Test 7: Special case 9001 - Mid year 2025 // Test 7: Special case 9001 - Mid year 2025
@@ -87,7 +106,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-06-15T12:00:00Z"); const now = dateToTimestamp("2025-06-15T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z"); const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (mid 2025) - should cutoff at Jan 1, 2024"); assertEquals(
result,
expected,
"9001 retention (mid 2025) - should cutoff at Jan 1, 2024"
);
} }
// Test 8: Special case 9001 - Early 2024 // Test 8: Special case 9001 - Early 2024
@@ -96,14 +119,18 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2024-02-01T12:00:00Z"); const now = dateToTimestamp("2024-02-01T12:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2023-01-01T00:00:00Z"); const expected = dateToTimestamp("2023-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (early 2024) - should cutoff at Jan 1, 2023"); assertEquals(
result,
expected,
"9001 retention (early 2024) - should cutoff at Jan 1, 2023"
);
} }
// Test 9: 1 day retention // Test 9: 1 day retention
{ {
const now = dateToTimestamp("2025-12-06T12:00:00Z"); const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(1, now); const result = calculateCutoffTimestampWithNow(1, now);
const expected = now - (1 * 24 * 60 * 60); const expected = now - 1 * 24 * 60 * 60;
assertEquals(result, expected, "1 day retention calculation failed"); assertEquals(result, expected, "1 day retention calculation failed");
} }
@@ -111,7 +138,7 @@ function testCalculateCutoffTimestamp() {
{ {
const now = dateToTimestamp("2025-12-06T12:00:00Z"); const now = dateToTimestamp("2025-12-06T12:00:00Z");
const result = calculateCutoffTimestampWithNow(365, now); const result = calculateCutoffTimestampWithNow(365, now);
const expected = now - (365 * 24 * 60 * 60); const expected = now - 365 * 24 * 60 * 60;
assertEquals(result, expected, "365 days retention calculation failed"); assertEquals(result, expected, "365 days retention calculation failed");
} }
@@ -125,9 +152,17 @@ function testCalculateCutoffTimestamp() {
const logFromJan2024 = dateToTimestamp("2024-01-01T00:00:00Z"); const logFromJan2024 = dateToTimestamp("2024-01-01T00:00:00Z");
// Log from Dec 2023 should be before cutoff (deleted) // Log from Dec 2023 should be before cutoff (deleted)
assertEquals(logFromDec2023 < cutoff, true, "Log from Dec 2023 should be deleted"); assertEquals(
logFromDec2023 < cutoff,
true,
"Log from Dec 2023 should be deleted"
);
// Log from Jan 2024 should be at or after cutoff (kept) // Log from Jan 2024 should be at or after cutoff (kept)
assertEquals(logFromJan2024 >= cutoff, true, "Log from Jan 2024 should be kept"); assertEquals(
logFromJan2024 >= cutoff,
true,
"Log from Jan 2024 should be kept"
);
} }
// Test 12: Verify 9001 in 2026 - logs from 2024 should now be deleted // Test 12: Verify 9001 in 2026 - logs from 2024 should now be deleted
@@ -138,9 +173,17 @@ function testCalculateCutoffTimestamp() {
const logFromJan2025 = dateToTimestamp("2025-01-01T00:00:00Z"); const logFromJan2025 = dateToTimestamp("2025-01-01T00:00:00Z");
// Log from Dec 2024 should be before cutoff (deleted) // Log from Dec 2024 should be before cutoff (deleted)
assertEquals(logFromDec2024 < cutoff, true, "Log from Dec 2024 should be deleted in 2026"); assertEquals(
logFromDec2024 < cutoff,
true,
"Log from Dec 2024 should be deleted in 2026"
);
// Log from Jan 2025 should be at or after cutoff (kept) // Log from Jan 2025 should be at or after cutoff (kept)
assertEquals(logFromJan2025 >= cutoff, true, "Log from Jan 2025 should be kept in 2026"); assertEquals(
logFromJan2025 >= cutoff,
true,
"Log from Jan 2025 should be kept in 2026"
);
} }
// Test 13: Edge case - exactly at year boundary for 9001 // Test 13: Edge case - exactly at year boundary for 9001
@@ -149,7 +192,11 @@ function testCalculateCutoffTimestamp() {
const now = dateToTimestamp("2025-01-01T00:00:00Z"); const now = dateToTimestamp("2025-01-01T00:00:00Z");
const result = calculateCutoffTimestampWithNow(9001, now); const result = calculateCutoffTimestampWithNow(9001, now);
const expected = dateToTimestamp("2024-01-01T00:00:00Z"); const expected = dateToTimestamp("2024-01-01T00:00:00Z");
assertEquals(result, expected, "9001 retention (Jan 1, 2025 00:00:00) - should cutoff at Jan 1, 2024"); assertEquals(
result,
expected,
"9001 retention (Jan 1, 2025 00:00:00) - should cutoff at Jan 1, 2024"
);
} }
// Test 14: Verify data from 2024 is kept throughout 2025 when using 9001 // Test 14: Verify data from 2024 is kept throughout 2025 when using 9001
@@ -157,18 +204,29 @@ function testCalculateCutoffTimestamp() {
{ {
// Running in June 2025 // Running in June 2025
const nowJune2025 = dateToTimestamp("2025-06-15T12:00:00Z"); const nowJune2025 = dateToTimestamp("2025-06-15T12:00:00Z");
const cutoffJune2025 = calculateCutoffTimestampWithNow(9001, nowJune2025); const cutoffJune2025 = calculateCutoffTimestampWithNow(
9001,
nowJune2025
);
const logFromJuly2024 = dateToTimestamp("2024-07-15T12:00:00Z"); const logFromJuly2024 = dateToTimestamp("2024-07-15T12:00:00Z");
// Log from July 2024 should be KEPT in June 2025 // Log from July 2024 should be KEPT in June 2025
assertEquals(logFromJuly2024 >= cutoffJune2025, true, "Log from July 2024 should be kept in June 2025"); assertEquals(
logFromJuly2024 >= cutoffJune2025,
true,
"Log from July 2024 should be kept in June 2025"
);
// Running in January 2026 // Running in January 2026
const nowJan2026 = dateToTimestamp("2026-01-15T12:00:00Z"); const nowJan2026 = dateToTimestamp("2026-01-15T12:00:00Z");
const cutoffJan2026 = calculateCutoffTimestampWithNow(9001, nowJan2026); const cutoffJan2026 = calculateCutoffTimestampWithNow(9001, nowJan2026);
// Log from July 2024 should be DELETED in January 2026 // Log from July 2024 should be DELETED in January 2026
assertEquals(logFromJuly2024 < cutoffJan2026, true, "Log from July 2024 should be deleted in Jan 2026"); assertEquals(
logFromJuly2024 < cutoffJan2026,
true,
"Log from July 2024 should be deleted in Jan 2026"
);
} }
// Test 15: Verify the exact requirement - data from 2024 must be purged on December 31, 2025 // Test 15: Verify the exact requirement - data from 2024 must be purged on December 31, 2025
@@ -179,13 +237,24 @@ function testCalculateCutoffTimestamp() {
// Dec 31, 2025 23:59:59 - still 2025, log should be kept // Dec 31, 2025 23:59:59 - still 2025, log should be kept
const nowDec31_2025 = dateToTimestamp("2025-12-31T23:59:59Z"); const nowDec31_2025 = dateToTimestamp("2025-12-31T23:59:59Z");
const cutoffDec31 = calculateCutoffTimestampWithNow(9001, nowDec31_2025); const cutoffDec31 = calculateCutoffTimestampWithNow(
assertEquals(logFromMid2024 >= cutoffDec31, true, "Log from mid-2024 should be kept on Dec 31, 2025"); 9001,
nowDec31_2025
);
assertEquals(
logFromMid2024 >= cutoffDec31,
true,
"Log from mid-2024 should be kept on Dec 31, 2025"
);
// Jan 1, 2026 00:00:00 - now 2026, log can be deleted // Jan 1, 2026 00:00:00 - now 2026, log can be deleted
const nowJan1_2026 = dateToTimestamp("2026-01-01T00:00:00Z"); const nowJan1_2026 = dateToTimestamp("2026-01-01T00:00:00Z");
const cutoffJan1 = calculateCutoffTimestampWithNow(9001, nowJan1_2026); const cutoffJan1 = calculateCutoffTimestampWithNow(9001, nowJan1_2026);
assertEquals(logFromMid2024 < cutoffJan1, true, "Log from mid-2024 should be deleted on Jan 1, 2026"); assertEquals(
logFromMid2024 < cutoffJan1,
true,
"Log from mid-2024 should be deleted on Jan 1, 2026"
);
} }
console.log("All calculateCutoffTimestamp tests passed!"); console.log("All calculateCutoffTimestamp tests passed!");

View File

@@ -99,6 +99,10 @@ export class Config {
process.env.MAXMIND_DB_PATH = parsedConfig.server.maxmind_db_path; process.env.MAXMIND_DB_PATH = parsedConfig.server.maxmind_db_path;
} }
if (parsedConfig.server.maxmind_asn_path) {
process.env.MAXMIND_ASN_PATH = parsedConfig.server.maxmind_asn_path;
}
this.rawConfig = parsedConfig; this.rawConfig = parsedConfig;
} }

View File

@@ -2,7 +2,7 @@ import path from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
// This is a placeholder value replaced by the build process // This is a placeholder value replaced by the build process
export const APP_VERSION = "1.13.0-rc.0"; export const APP_VERSION = "1.13.1";
export const __FILENAME = fileURLToPath(import.meta.url); export const __FILENAME = fileURLToPath(import.meta.url);
export const __DIRNAME = path.dirname(__FILENAME); export const __DIRNAME = path.dirname(__FILENAME);

View File

@@ -4,14 +4,16 @@ import { eq, and } from "drizzle-orm";
import { subdomainSchema } from "@server/lib/schemas"; import { subdomainSchema } from "@server/lib/schemas";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
export type DomainValidationResult = { export type DomainValidationResult =
success: true; | {
fullDomain: string; success: true;
subdomain: string | null; fullDomain: string;
} | { subdomain: string | null;
success: false; }
error: string; | {
}; success: false;
error: string;
};
/** /**
* Validates a domain and constructs the full domain based on domain type and subdomain. * Validates a domain and constructs the full domain based on domain type and subdomain.
@@ -34,7 +36,10 @@ export async function validateAndConstructDomain(
.where(eq(domains.domainId, domainId)) .where(eq(domains.domainId, domainId))
.leftJoin( .leftJoin(
orgDomains, orgDomains,
and(eq(orgDomains.orgId, orgId), eq(orgDomains.domainId, domainId)) and(
eq(orgDomains.orgId, orgId),
eq(orgDomains.domainId, domainId)
)
); );
// Check if domain exists // Check if domain exists
@@ -106,7 +111,7 @@ export async function validateAndConstructDomain(
} catch (error) { } catch (error) {
return { return {
success: false, success: false,
error: `An error occurred while validating domain: ${error instanceof Error ? error.message : 'Unknown error'}` error: `An error occurred while validating domain: ${error instanceof Error ? error.message : "Unknown error"}`
}; };
} }
} }

View File

@@ -1,39 +1,39 @@
import crypto from 'crypto'; import crypto from "crypto";
export function encryptData(data: string, key: Buffer): string { export function encryptData(data: string, key: Buffer): string {
const algorithm = 'aes-256-gcm'; const algorithm = "aes-256-gcm";
const iv = crypto.randomBytes(16); const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, key, iv); const cipher = crypto.createCipheriv(algorithm, key, iv);
let encrypted = cipher.update(data, 'utf8', 'hex'); let encrypted = cipher.update(data, "utf8", "hex");
encrypted += cipher.final('hex'); encrypted += cipher.final("hex");
const authTag = cipher.getAuthTag(); const authTag = cipher.getAuthTag();
// Combine IV, auth tag, and encrypted data // Combine IV, auth tag, and encrypted data
return iv.toString('hex') + ':' + authTag.toString('hex') + ':' + encrypted; return iv.toString("hex") + ":" + authTag.toString("hex") + ":" + encrypted;
} }
// Helper function to decrypt data (you'll need this to read certificates) // Helper function to decrypt data (you'll need this to read certificates)
export function decryptData(encryptedData: string, key: Buffer): string { export function decryptData(encryptedData: string, key: Buffer): string {
const algorithm = 'aes-256-gcm'; const algorithm = "aes-256-gcm";
const parts = encryptedData.split(':'); const parts = encryptedData.split(":");
if (parts.length !== 3) { if (parts.length !== 3) {
throw new Error('Invalid encrypted data format'); throw new Error("Invalid encrypted data format");
} }
const iv = Buffer.from(parts[0], 'hex'); const iv = Buffer.from(parts[0], "hex");
const authTag = Buffer.from(parts[1], 'hex'); const authTag = Buffer.from(parts[1], "hex");
const encrypted = parts[2]; const encrypted = parts[2];
const decipher = crypto.createDecipheriv(algorithm, key, iv); const decipher = crypto.createDecipheriv(algorithm, key, iv);
decipher.setAuthTag(authTag); decipher.setAuthTag(authTag);
let decrypted = decipher.update(encrypted, 'hex', 'utf8'); let decrypted = decipher.update(encrypted, "hex", "utf8");
decrypted += decipher.final('utf8'); decrypted += decipher.final("utf8");
return decrypted; return decrypted;
} }
// openssl rand -hex 32 > config/encryption.key // openssl rand -hex 32 > config/encryption.key

View File

@@ -33,7 +33,11 @@ export async function generateOidcRedirectUrl(
) )
.limit(1); .limit(1);
if (res?.loginPage && res.loginPage.domainId && res.loginPage.fullDomain) { if (
res?.loginPage &&
res.loginPage.domainId &&
res.loginPage.fullDomain
) {
baseUrl = `${method}://${res.loginPage.fullDomain}`; baseUrl = `${method}://${res.loginPage.fullDomain}`;
} }
} }

View File

@@ -23,7 +23,11 @@ function testFindNextAvailableCidr() {
{ {
const existing = ["10.0.0.0/16", "10.2.0.0/16"]; const existing = ["10.0.0.0/16", "10.2.0.0/16"];
const result = findNextAvailableCidr(existing, 16, "10.0.0.0/8"); const result = findNextAvailableCidr(existing, 16, "10.0.0.0/8");
assertEquals(result, "10.1.0.0/16", "Finding gap between allocations failed"); assertEquals(
result,
"10.1.0.0/16",
"Finding gap between allocations failed"
);
} }
// Test 3: No available space // Test 3: No available space

View File

@@ -1,10 +1,4 @@
import { import { db, SiteResource, siteResources, Transaction } from "@server/db";
clientSitesAssociationsCache,
db,
SiteResource,
siteResources,
Transaction
} from "@server/db";
import { clients, orgs, sites } from "@server/db"; import { clients, orgs, sites } from "@server/db";
import { and, eq, isNotNull } from "drizzle-orm"; import { and, eq, isNotNull } from "drizzle-orm";
import config from "@server/lib/config"; import config from "@server/lib/config";
@@ -116,6 +110,70 @@ function bigIntToIp(num: bigint, version: IPVersion): string {
} }
} }
/**
* Parses an endpoint string (ip:port) handling both IPv4 and IPv6 addresses.
* IPv6 addresses may be bracketed like [::1]:8080 or unbracketed like ::1:8080.
* For unbracketed IPv6, the last colon-separated segment is treated as the port.
*
* @param endpoint The endpoint string to parse (e.g., "192.168.1.1:8080" or "[::1]:8080" or "2607:fea8::1:8080")
* @returns An object with ip and port, or null if parsing fails
*/
export function parseEndpoint(
endpoint: string
): { ip: string; port: number } | null {
if (!endpoint) return null;
// Check for bracketed IPv6 format: [ip]:port
const bracketedMatch = endpoint.match(/^\[([^\]]+)\]:(\d+)$/);
if (bracketedMatch) {
const ip = bracketedMatch[1];
const port = parseInt(bracketedMatch[2], 10);
if (isNaN(port)) return null;
return { ip, port };
}
// Check if this looks like IPv6 (contains multiple colons)
const colonCount = (endpoint.match(/:/g) || []).length;
if (colonCount > 1) {
// This is IPv6 - the port is after the last colon
const lastColonIndex = endpoint.lastIndexOf(":");
const ip = endpoint.substring(0, lastColonIndex);
const portStr = endpoint.substring(lastColonIndex + 1);
const port = parseInt(portStr, 10);
if (isNaN(port)) return null;
return { ip, port };
}
// IPv4 format: ip:port
if (colonCount === 1) {
const [ip, portStr] = endpoint.split(":");
const port = parseInt(portStr, 10);
if (isNaN(port)) return null;
return { ip, port };
}
return null;
}
/**
* Formats an IP and port into a consistent endpoint string.
* IPv6 addresses are wrapped in brackets for proper parsing.
*
* @param ip The IP address (IPv4 or IPv6)
* @param port The port number
* @returns Formatted endpoint string
*/
export function formatEndpoint(ip: string, port: number): string {
// Check if this is IPv6 (contains colons)
if (ip.includes(":")) {
// Remove brackets if already present
const cleanIp = ip.replace(/^\[|\]$/g, "");
return `[${cleanIp}]:${port}`;
}
return `${ip}:${port}`;
}
/** /**
* Converts CIDR to IP range * Converts CIDR to IP range
*/ */
@@ -243,10 +301,37 @@ export function isIpInCidr(ip: string, cidr: string): boolean {
return ipBigInt >= range.start && ipBigInt <= range.end; return ipBigInt >= range.start && ipBigInt <= range.end;
} }
/**
* Checks if two CIDR ranges overlap
* @param cidr1 First CIDR string
* @param cidr2 Second CIDR string
* @returns boolean indicating if the two CIDRs overlap
*/
export function doCidrsOverlap(cidr1: string, cidr2: string): boolean {
const version1 = detectIpVersion(cidr1.split("/")[0]);
const version2 = detectIpVersion(cidr2.split("/")[0]);
if (version1 !== version2) {
// Different IP versions cannot overlap
return false;
}
const range1 = cidrToRange(cidr1);
const range2 = cidrToRange(cidr2);
// Overlap if the ranges intersect
return (
range1.start <= range2.end &&
range2.start <= range1.end
);
}
export async function getNextAvailableClientSubnet( export async function getNextAvailableClientSubnet(
orgId: string orgId: string,
transaction: Transaction | typeof db = db
): Promise<string> { ): Promise<string> {
const [org] = await db.select().from(orgs).where(eq(orgs.orgId, orgId)); const [org] = await transaction
.select()
.from(orgs)
.where(eq(orgs.orgId, orgId));
if (!org) { if (!org) {
throw new Error(`Organization with ID ${orgId} not found`); throw new Error(`Organization with ID ${orgId} not found`);
@@ -256,14 +341,14 @@ export async function getNextAvailableClientSubnet(
throw new Error(`Organization with ID ${orgId} has no subnet defined`); throw new Error(`Organization with ID ${orgId} has no subnet defined`);
} }
const existingAddressesSites = await db const existingAddressesSites = await transaction
.select({ .select({
address: sites.address address: sites.address
}) })
.from(sites) .from(sites)
.where(and(isNotNull(sites.address), eq(sites.orgId, orgId))); .where(and(isNotNull(sites.address), eq(sites.orgId, orgId)));
const existingAddressesClients = await db const existingAddressesClients = await transaction
.select({ .select({
address: clients.subnet address: clients.subnet
}) })
@@ -359,10 +444,17 @@ export async function getNextAvailableOrgSubnet(): Promise<string> {
return subnet; return subnet;
} }
export function generateRemoteSubnets(allSiteResources: SiteResource[]): string[] { export function generateRemoteSubnets(
allSiteResources: SiteResource[]
): string[] {
const remoteSubnets = allSiteResources const remoteSubnets = allSiteResources
.filter((sr) => { .filter((sr) => {
if (sr.mode === "cidr") return true; if (sr.mode === "cidr") {
// check if its a valid CIDR using zod
const cidrSchema = z.union([z.cidrv4(), z.cidrv6()]);
const parseResult = cidrSchema.safeParse(sr.destination);
return parseResult.success;
}
if (sr.mode === "host") { if (sr.mode === "host") {
// check if its a valid IP using zod // check if its a valid IP using zod
const ipSchema = z.union([z.ipv4(), z.ipv6()]); const ipSchema = z.union([z.ipv4(), z.ipv6()]);
@@ -386,22 +478,23 @@ export function generateRemoteSubnets(allSiteResources: SiteResource[]): string[
export type Alias = { alias: string | null; aliasAddress: string | null }; export type Alias = { alias: string | null; aliasAddress: string | null };
export function generateAliasConfig(allSiteResources: SiteResource[]): Alias[] { export function generateAliasConfig(allSiteResources: SiteResource[]): Alias[] {
let aliasConfigs = allSiteResources return allSiteResources
.filter((sr) => sr.alias && sr.aliasAddress && sr.mode == "host") .filter((sr) => sr.alias && sr.aliasAddress && sr.mode == "host")
.map((sr) => ({ .map((sr) => ({
alias: sr.alias, alias: sr.alias,
aliasAddress: sr.aliasAddress aliasAddress: sr.aliasAddress
})); }));
return aliasConfigs;
} }
export type SubnetProxyTarget = { export type SubnetProxyTarget = {
sourcePrefix: string; // must be a cidr sourcePrefix: string; // must be a cidr
destPrefix: string; // must be a cidr destPrefix: string; // must be a cidr
disableIcmp?: boolean;
rewriteTo?: string; // must be a cidr rewriteTo?: string; // must be a cidr
portRange?: { portRange?: {
min: number; min: number;
max: number; max: number;
protocol: "tcp" | "udp";
}[]; }[];
}; };
@@ -431,6 +524,11 @@ export function generateSubnetProxyTargets(
} }
const clientPrefix = `${clientSite.subnet.split("/")[0]}/32`; const clientPrefix = `${clientSite.subnet.split("/")[0]}/32`;
const portRange = [
...parsePortRangeString(siteResource.tcpPortRangeString, "tcp"),
...parsePortRangeString(siteResource.udpPortRangeString, "udp")
];
const disableIcmp = siteResource.disableIcmp ?? false;
if (siteResource.mode == "host") { if (siteResource.mode == "host") {
let destination = siteResource.destination; let destination = siteResource.destination;
@@ -441,7 +539,9 @@ export function generateSubnetProxyTargets(
targets.push({ targets.push({
sourcePrefix: clientPrefix, sourcePrefix: clientPrefix,
destPrefix: destination destPrefix: destination,
portRange,
disableIcmp
}); });
} }
@@ -450,13 +550,17 @@ export function generateSubnetProxyTargets(
targets.push({ targets.push({
sourcePrefix: clientPrefix, sourcePrefix: clientPrefix,
destPrefix: `${siteResource.aliasAddress}/32`, destPrefix: `${siteResource.aliasAddress}/32`,
rewriteTo: destination rewriteTo: destination,
portRange,
disableIcmp
}); });
} }
} else if (siteResource.mode == "cidr") { } else if (siteResource.mode == "cidr") {
targets.push({ targets.push({
sourcePrefix: clientPrefix, sourcePrefix: clientPrefix,
destPrefix: siteResource.destination destPrefix: siteResource.destination,
portRange,
disableIcmp
}); });
} }
} }
@@ -468,3 +572,117 @@ export function generateSubnetProxyTargets(
return targets; return targets;
} }
// Custom schema for validating port range strings
// Format: "80,443,8000-9000" or "*" for all ports, or empty string
export const portRangeStringSchema = z
.string()
.optional()
.refine(
(val) => {
if (!val || val.trim() === "" || val.trim() === "*") {
return true;
}
// Split by comma and validate each part
const parts = val.split(",").map((p) => p.trim());
for (const part of parts) {
if (part === "") {
return false; // empty parts not allowed
}
// Check if it's a range (contains dash)
if (part.includes("-")) {
const [start, end] = part.split("-").map((p) => p.trim());
// Both parts must be present
if (!start || !end) {
return false;
}
const startPort = parseInt(start, 10);
const endPort = parseInt(end, 10);
// Must be valid numbers
if (isNaN(startPort) || isNaN(endPort)) {
return false;
}
// Must be valid port range (1-65535)
if (
startPort < 1 ||
startPort > 65535 ||
endPort < 1 ||
endPort > 65535
) {
return false;
}
// Start must be <= end
if (startPort > endPort) {
return false;
}
} else {
// Single port
const port = parseInt(part, 10);
// Must be a valid number
if (isNaN(port)) {
return false;
}
// Must be valid port range (1-65535)
if (port < 1 || port > 65535) {
return false;
}
}
}
return true;
},
{
message:
'Port range must be "*" for all ports, or a comma-separated list of ports and ranges (e.g., "80,443,8000-9000"). Ports must be between 1 and 65535, and ranges must have start <= end.'
}
);
/**
* Parses a port range string into an array of port range objects
* @param portRangeStr - Port range string (e.g., "80,443,8000-9000", "*", or "")
* @param protocol - Protocol to use for all ranges (default: "tcp")
* @returns Array of port range objects with min, max, and protocol fields
*/
export function parsePortRangeString(
portRangeStr: string | undefined | null,
protocol: "tcp" | "udp" = "tcp"
): { min: number; max: number; protocol: "tcp" | "udp" }[] {
// Handle undefined or empty string - insert dummy value with port 0
if (!portRangeStr || portRangeStr.trim() === "") {
return [{ min: 0, max: 0, protocol }];
}
// Handle wildcard - return empty array (all ports allowed)
if (portRangeStr.trim() === "*") {
return [];
}
const result: { min: number; max: number; protocol: "tcp" | "udp" }[] = [];
const parts = portRangeStr.split(",").map((p) => p.trim());
for (const part of parts) {
if (part.includes("-")) {
// Range
const [start, end] = part.split("-").map((p) => p.trim());
const startPort = parseInt(start, 10);
const endPort = parseInt(end, 10);
result.push({ min: startPort, max: endPort, protocol });
} else {
// Single port
const port = parseInt(part, 10);
result.push({ min: port, max: port, protocol });
}
}
return result;
}

View File

@@ -14,7 +14,8 @@ export const configSchema = z
.object({ .object({
app: z app: z
.object({ .object({
dashboard_url: z.url() dashboard_url: z
.url()
.pipe(z.url()) .pipe(z.url())
.transform((url) => url.toLowerCase()) .transform((url) => url.toLowerCase())
.optional(), .optional(),
@@ -132,7 +133,8 @@ export const configSchema = z
.optional(), .optional(),
trust_proxy: z.int().gte(0).optional().default(1), trust_proxy: z.int().gte(0).optional().default(1),
secret: z.string().pipe(z.string().min(8)).optional(), secret: z.string().pipe(z.string().min(8)).optional(),
maxmind_db_path: z.string().optional() maxmind_db_path: z.string().optional(),
maxmind_asn_path: z.string().optional()
}) })
.optional() .optional()
.default({ .default({
@@ -254,8 +256,11 @@ export const configSchema = z
orgs: z orgs: z
.object({ .object({
block_size: z.number().positive().gt(0).optional().default(24), block_size: z.number().positive().gt(0).optional().default(24),
subnet_group: z.string().optional().default("100.90.128.0/24"), subnet_group: z.string().optional().default("100.90.128.0/20"),
utility_subnet_group: z.string().optional().default("100.96.128.0/24") //just hardcode this for now as well utility_subnet_group: z
.string()
.optional()
.default("100.96.128.0/20") //just hardcode this for now as well
}) })
.optional() .optional()
.default({ .default({

View File

@@ -24,7 +24,7 @@ import {
deletePeer as newtDeletePeer deletePeer as newtDeletePeer
} from "@server/routers/newt/peers"; } from "@server/routers/newt/peers";
import { import {
initPeerAddHandshake as holepunchSiteAdd, initPeerAddHandshake,
deletePeer as olmDeletePeer deletePeer as olmDeletePeer
} from "@server/routers/olm/peers"; } from "@server/routers/olm/peers";
import { sendToExitNode } from "#dynamic/lib/exitNodes"; import { sendToExitNode } from "#dynamic/lib/exitNodes";
@@ -33,6 +33,8 @@ import {
generateAliasConfig, generateAliasConfig,
generateRemoteSubnets, generateRemoteSubnets,
generateSubnetProxyTargets, generateSubnetProxyTargets,
parseEndpoint,
formatEndpoint
} from "@server/lib/ip"; } from "@server/lib/ip";
import { import {
addPeerData, addPeerData,
@@ -109,21 +111,22 @@ export async function getClientSiteResourceAccess(
const directClientIds = allClientSiteResources.map((row) => row.clientId); const directClientIds = allClientSiteResources.map((row) => row.clientId);
// Get full client details for directly associated clients // Get full client details for directly associated clients
const directClients = directClientIds.length > 0 const directClients =
? await trx directClientIds.length > 0
.select({ ? await trx
clientId: clients.clientId, .select({
pubKey: clients.pubKey, clientId: clients.clientId,
subnet: clients.subnet pubKey: clients.pubKey,
}) subnet: clients.subnet
.from(clients) })
.where( .from(clients)
and( .where(
inArray(clients.clientId, directClientIds), and(
eq(clients.orgId, siteResource.orgId) // filter by org to prevent cross-org associations inArray(clients.clientId, directClientIds),
eq(clients.orgId, siteResource.orgId) // filter by org to prevent cross-org associations
)
) )
) : [];
: [];
// Merge user-based clients with directly associated clients // Merge user-based clients with directly associated clients
const allClientsMap = new Map( const allClientsMap = new Map(
@@ -474,7 +477,7 @@ async function handleMessagesForSiteClients(
} }
if (isAdd) { if (isAdd) {
await holepunchSiteAdd( await initPeerAddHandshake(
// this will kick off the add peer process for the client // this will kick off the add peer process for the client
client.clientId, client.clientId,
{ {
@@ -541,6 +544,17 @@ export async function updateClientSiteDestinations(
continue; continue;
} }
// Parse the endpoint properly for both IPv4 and IPv6
const parsedEndpoint = parseEndpoint(
site.clientSitesAssociationsCache.endpoint
);
if (!parsedEndpoint) {
logger.warn(
`Failed to parse endpoint ${site.clientSitesAssociationsCache.endpoint}, skipping`
);
continue;
}
// find the destinations in the array // find the destinations in the array
let destinations = exitNodeDestinations.find( let destinations = exitNodeDestinations.find(
(d) => d.reachableAt === site.exitNodes?.reachableAt (d) => d.reachableAt === site.exitNodes?.reachableAt
@@ -552,13 +566,8 @@ export async function updateClientSiteDestinations(
exitNodeId: site.exitNodes?.exitNodeId || 0, exitNodeId: site.exitNodes?.exitNodeId || 0,
type: site.exitNodes?.type || "", type: site.exitNodes?.type || "",
name: site.exitNodes?.name || "", name: site.exitNodes?.name || "",
sourceIp: sourceIp: parsedEndpoint.ip,
site.clientSitesAssociationsCache.endpoint.split(":")[0] || sourcePort: parsedEndpoint.port,
"",
sourcePort:
parseInt(
site.clientSitesAssociationsCache.endpoint.split(":")[1]
) || 0,
destinations: [ destinations: [
{ {
destinationIP: site.sites.subnet.split("/")[0], destinationIP: site.sites.subnet.split("/")[0],
@@ -701,11 +710,46 @@ async function handleSubnetProxyTargetUpdates(
} }
for (const client of removedClients) { for (const client of removedClients) {
// Check if this client still has access to another resource on this site with the same destination
const destinationStillInUse = await trx
.select()
.from(siteResources)
.innerJoin(
clientSiteResourcesAssociationsCache,
eq(
clientSiteResourcesAssociationsCache.siteResourceId,
siteResources.siteResourceId
)
)
.where(
and(
eq(
clientSiteResourcesAssociationsCache.clientId,
client.clientId
),
eq(siteResources.siteId, siteResource.siteId),
eq(
siteResources.destination,
siteResource.destination
),
ne(
siteResources.siteResourceId,
siteResource.siteResourceId
)
)
);
// Only remove remote subnet if no other resource uses the same destination
const remoteSubnetsToRemove =
destinationStillInUse.length > 0
? []
: generateRemoteSubnets([siteResource]);
olmJobs.push( olmJobs.push(
removePeerData( removePeerData(
client.clientId, client.clientId,
siteResource.siteId, siteResource.siteId,
generateRemoteSubnets([siteResource]), remoteSubnetsToRemove,
generateAliasConfig([siteResource]) generateAliasConfig([siteResource])
) )
); );
@@ -783,7 +827,10 @@ export async function rebuildClientAssociationsFromClient(
.from(roleSiteResources) .from(roleSiteResources)
.innerJoin( .innerJoin(
siteResources, siteResources,
eq(siteResources.siteResourceId, roleSiteResources.siteResourceId) eq(
siteResources.siteResourceId,
roleSiteResources.siteResourceId
)
) )
.where( .where(
and( and(
@@ -908,28 +955,8 @@ export async function rebuildClientAssociationsFromClient(
/////////// Send messages /////////// /////////// Send messages ///////////
// Get the olm for this client
const [olm] = await trx
.select({ olmId: olms.olmId })
.from(olms)
.where(eq(olms.clientId, client.clientId))
.limit(1);
if (!olm) {
logger.warn(
`Olm not found for client ${client.clientId}, skipping peer updates`
);
return;
}
// Handle messages for sites being added // Handle messages for sites being added
await handleMessagesForClientSites( await handleMessagesForClientSites(client, sitesToAdd, sitesToRemove, trx);
client,
olm.olmId,
sitesToAdd,
sitesToRemove,
trx
);
// Handle subnet proxy target updates for resources // Handle subnet proxy target updates for resources
await handleMessagesForClientResources( await handleMessagesForClientResources(
@@ -949,11 +976,26 @@ async function handleMessagesForClientSites(
userId: string | null; userId: string | null;
orgId: string; orgId: string;
}, },
olmId: string,
sitesToAdd: number[], sitesToAdd: number[],
sitesToRemove: number[], sitesToRemove: number[],
trx: Transaction | typeof db = db trx: Transaction | typeof db = db
): Promise<void> { ): Promise<void> {
// Get the olm for this client
const [olm] = await trx
.select({ olmId: olms.olmId })
.from(olms)
.where(eq(olms.clientId, client.clientId))
.limit(1);
if (!olm) {
logger.warn(
`Olm not found for client ${client.clientId}, skipping peer updates`
);
return;
}
const olmId = olm.olmId;
if (!client.subnet || !client.pubKey) { if (!client.subnet || !client.pubKey) {
logger.warn( logger.warn(
`Client ${client.clientId} missing subnet or pubKey, skipping peer updates` `Client ${client.clientId} missing subnet or pubKey, skipping peer updates`
@@ -974,9 +1016,9 @@ async function handleMessagesForClientSites(
.leftJoin(newts, eq(sites.siteId, newts.siteId)) .leftJoin(newts, eq(sites.siteId, newts.siteId))
.where(inArray(sites.siteId, allSiteIds)); .where(inArray(sites.siteId, allSiteIds));
let newtJobs: Promise<any>[] = []; const newtJobs: Promise<any>[] = [];
let olmJobs: Promise<any>[] = []; const olmJobs: Promise<any>[] = [];
let exitNodeJobs: Promise<any>[] = []; const exitNodeJobs: Promise<any>[] = [];
for (const siteData of sitesData) { for (const siteData of sitesData) {
const site = siteData.sites; const site = siteData.sites;
@@ -1038,7 +1080,7 @@ async function handleMessagesForClientSites(
continue; continue;
} }
await holepunchSiteAdd( await initPeerAddHandshake(
// this will kick off the add peer process for the client // this will kick off the add peer process for the client
client.clientId, client.clientId,
{ {
@@ -1083,18 +1125,8 @@ async function handleMessagesForClientResources(
resourcesToRemove: number[], resourcesToRemove: number[],
trx: Transaction | typeof db = db trx: Transaction | typeof db = db
): Promise<void> { ): Promise<void> {
// Group resources by site const proxyJobs: Promise<any>[] = [];
const resourcesBySite = new Map<number, SiteResource[]>(); const olmJobs: Promise<any>[] = [];
for (const resource of allNewResources) {
if (!resourcesBySite.has(resource.siteId)) {
resourcesBySite.set(resource.siteId, []);
}
resourcesBySite.get(resource.siteId)!.push(resource);
}
let proxyJobs: Promise<any>[] = [];
let olmJobs: Promise<any>[] = [];
// Handle additions // Handle additions
if (resourcesToAdd.length > 0) { if (resourcesToAdd.length > 0) {
@@ -1213,12 +1245,47 @@ async function handleMessagesForClientResources(
} }
try { try {
// Check if this client still has access to another resource on this site with the same destination
const destinationStillInUse = await trx
.select()
.from(siteResources)
.innerJoin(
clientSiteResourcesAssociationsCache,
eq(
clientSiteResourcesAssociationsCache.siteResourceId,
siteResources.siteResourceId
)
)
.where(
and(
eq(
clientSiteResourcesAssociationsCache.clientId,
client.clientId
),
eq(siteResources.siteId, resource.siteId),
eq(
siteResources.destination,
resource.destination
),
ne(
siteResources.siteResourceId,
resource.siteResourceId
)
)
);
// Only remove remote subnet if no other resource uses the same destination
const remoteSubnetsToRemove =
destinationStillInUse.length > 0
? []
: generateRemoteSubnets([resource]);
// Remove peer data from olm // Remove peer data from olm
olmJobs.push( olmJobs.push(
removePeerData( removePeerData(
client.clientId, client.clientId,
resource.siteId, resource.siteId,
generateRemoteSubnets([resource]), remoteSubnetsToRemove,
generateAliasConfig([resource]) generateAliasConfig([resource])
) )
); );

View File

@@ -1,8 +1,8 @@
export enum AudienceIds { export enum AudienceIds {
SignUps = "", SignUps = "",
Subscribed = "", Subscribed = "",
Churned = "", Churned = "",
Newsletter = "" Newsletter = ""
} }
let resend; let resend;

View File

@@ -3,14 +3,14 @@ import { Response } from "express";
export const response = <T>( export const response = <T>(
res: Response, res: Response,
{ data, success, error, message, status }: ResponseT<T>, { data, success, error, message, status }: ResponseT<T>
) => { ) => {
return res.status(status).send({ return res.status(status).send({
data, data,
success, success,
error, error,
message, message,
status, status
}); });
}; };

View File

@@ -1,5 +1,5 @@
import { S3Client } from "@aws-sdk/client-s3"; import { S3Client } from "@aws-sdk/client-s3";
export const s3Client = new S3Client({ export const s3Client = new S3Client({
region: process.env.S3_REGION || "us-east-1", region: process.env.S3_REGION || "us-east-1"
}); });

View File

@@ -6,7 +6,7 @@ let serverIp: string | null = null;
const services = [ const services = [
"https://checkip.amazonaws.com", "https://checkip.amazonaws.com",
"https://ifconfig.io/ip", "https://ifconfig.io/ip",
"https://api.ipify.org", "https://api.ipify.org"
]; ];
export async function fetchServerIp() { export async function fetchServerIp() {
@@ -17,7 +17,9 @@ export async function fetchServerIp() {
logger.debug("Detected public IP: " + serverIp); logger.debug("Detected public IP: " + serverIp);
return; return;
} catch (err: any) { } catch (err: any) {
console.warn(`Failed to fetch server IP from ${url}: ${err.message || err.code}`); console.warn(
`Failed to fetch server IP from ${url}: ${err.message || err.code}`
);
} }
} }

View File

@@ -1,8 +1,7 @@
export default function stoi(val: any) { export default function stoi(val: any) {
if (typeof val === "string") { if (typeof val === "string") {
return parseInt(val); return parseInt(val);
} } else {
else {
return val; return val;
} }
} }

View File

@@ -2,9 +2,9 @@ import { PostHog } from "posthog-node";
import config from "./config"; import config from "./config";
import { getHostMeta } from "./hostMeta"; import { getHostMeta } from "./hostMeta";
import logger from "@server/logger"; import logger from "@server/logger";
import { apiKeys, db, roles } from "@server/db"; import { apiKeys, db, roles, siteResources } from "@server/db";
import { sites, users, orgs, resources, clients, idp } from "@server/db"; import { sites, users, orgs, resources, clients, idp } from "@server/db";
import { eq, count, notInArray, and } from "drizzle-orm"; import { eq, count, notInArray, and, isNotNull, isNull } from "drizzle-orm";
import { APP_VERSION } from "./consts"; import { APP_VERSION } from "./consts";
import crypto from "crypto"; import crypto from "crypto";
import { UserType } from "@server/types/UserTypes"; import { UserType } from "@server/types/UserTypes";
@@ -25,7 +25,7 @@ class TelemetryClient {
return; return;
} }
if (build !== "oss") { if (build === "saas") {
return; return;
} }
@@ -41,14 +41,18 @@ class TelemetryClient {
this.client?.shutdown(); this.client?.shutdown();
}); });
this.sendStartupEvents().catch((err) => { this.sendStartupEvents()
logger.error("Failed to send startup telemetry:", err); .catch((err) => {
}); logger.error("Failed to send startup telemetry:", err);
})
.then(() => {
logger.debug("Successfully sent startup telemetry data");
});
this.startAnalyticsInterval(); this.startAnalyticsInterval();
logger.info( logger.info(
"Pangolin now gathers anonymous usage data to help us better understand how the software is used and guide future improvements and feature development. You can find more details, including instructions for opting out of this anonymous data collection, at: https://docs.pangolin.net/telemetry" "Pangolin gathers anonymous usage data to help us better understand how the software is used and guide future improvements and feature development. You can find more details, including instructions for opting out of this anonymous data collection, at: https://docs.pangolin.net/telemetry"
); );
} else if (!this.enabled) { } else if (!this.enabled) {
logger.info( logger.info(
@@ -60,9 +64,13 @@ class TelemetryClient {
private startAnalyticsInterval() { private startAnalyticsInterval() {
this.intervalId = setInterval( this.intervalId = setInterval(
() => { () => {
this.collectAndSendAnalytics().catch((err) => { this.collectAndSendAnalytics()
logger.error("Failed to collect analytics:", err); .catch((err) => {
}); logger.error("Failed to collect analytics:", err);
})
.then(() => {
logger.debug("Successfully sent analytics data");
});
}, },
48 * 60 * 60 * 1000 48 * 60 * 60 * 1000
); );
@@ -99,9 +107,14 @@ class TelemetryClient {
const [resourcesCount] = await db const [resourcesCount] = await db
.select({ count: count() }) .select({ count: count() })
.from(resources); .from(resources);
const [clientsCount] = await db const [userDevicesCount] = await db
.select({ count: count() }) .select({ count: count() })
.from(clients); .from(clients)
.where(isNotNull(clients.userId));
const [machineClients] = await db
.select({ count: count() })
.from(clients)
.where(isNull(clients.userId));
const [idpCount] = await db.select({ count: count() }).from(idp); const [idpCount] = await db.select({ count: count() }).from(idp);
const [onlineSitesCount] = await db const [onlineSitesCount] = await db
.select({ count: count() }) .select({ count: count() })
@@ -146,6 +159,24 @@ class TelemetryClient {
const supporterKey = config.getSupporterData(); const supporterKey = config.getSupporterData();
const allPrivateResources = await db.select().from(siteResources);
const numPrivResources = allPrivateResources.length;
let numPrivResourceAliases = 0;
let numPrivResourceHosts = 0;
let numPrivResourceCidr = 0;
for (const res of allPrivateResources) {
if (res.mode === "host") {
numPrivResourceHosts += 1;
} else if (res.mode === "cidr") {
numPrivResourceCidr += 1;
}
if (res.alias) {
numPrivResourceAliases += 1;
}
}
return { return {
numSites: sitesCount.count, numSites: sitesCount.count,
numUsers: usersCount.count, numUsers: usersCount.count,
@@ -153,7 +184,11 @@ class TelemetryClient {
numUsersOidc: usersOidcCount.count, numUsersOidc: usersOidcCount.count,
numOrganizations: orgsCount.count, numOrganizations: orgsCount.count,
numResources: resourcesCount.count, numResources: resourcesCount.count,
numClients: clientsCount.count, numPrivateResources: numPrivResources,
numPrivateResourceAliases: numPrivResourceAliases,
numPrivateResourceHosts: numPrivResourceHosts,
numUserDevices: userDevicesCount.count,
numMachineClients: machineClients.count,
numIdentityProviders: idpCount.count, numIdentityProviders: idpCount.count,
numSitesOnline: onlineSitesCount.count, numSitesOnline: onlineSitesCount.count,
resources: resourceDetails, resources: resourceDetails,
@@ -196,7 +231,7 @@ class TelemetryClient {
logger.debug("Sending enterprise startup telemetry payload:", { logger.debug("Sending enterprise startup telemetry payload:", {
payload payload
}); });
// this.client.capture(payload); this.client.capture(payload);
} }
if (build === "oss") { if (build === "oss") {
@@ -246,7 +281,12 @@ class TelemetryClient {
num_users_oidc: stats.numUsersOidc, num_users_oidc: stats.numUsersOidc,
num_organizations: stats.numOrganizations, num_organizations: stats.numOrganizations,
num_resources: stats.numResources, num_resources: stats.numResources,
num_clients: stats.numClients, num_private_resources: stats.numPrivateResources,
num_private_resource_aliases:
stats.numPrivateResourceAliases,
num_private_resource_hosts: stats.numPrivateResourceHosts,
num_user_devices: stats.numUserDevices,
num_machine_clients: stats.numMachineClients,
num_identity_providers: stats.numIdentityProviders, num_identity_providers: stats.numIdentityProviders,
num_sites_online: stats.numSitesOnline, num_sites_online: stats.numSitesOnline,
num_resources_sso_enabled: stats.resources.filter( num_resources_sso_enabled: stats.resources.filter(

View File

@@ -195,7 +195,9 @@ export class TraefikConfigManager {
state.set(domain, { state.set(domain, {
exists: certExists && keyExists, exists: certExists && keyExists,
lastModified: lastModified ? Math.floor(lastModified.getTime() / 1000) : null, lastModified: lastModified
? Math.floor(lastModified.getTime() / 1000)
: null,
expiresAt, expiresAt,
wildcard wildcard
}); });
@@ -464,7 +466,9 @@ export class TraefikConfigManager {
config.getRawConfig().traefik.site_types, config.getRawConfig().traefik.site_types,
build == "oss", // filter out the namespace domains in open source build == "oss", // filter out the namespace domains in open source
build != "oss", // generate the login pages on the cloud and hybrid, build != "oss", // generate the login pages on the cloud and hybrid,
build == "saas" ? false : config.getRawConfig().traefik.allow_raw_resources // dont allow raw resources on saas otherwise use config build == "saas"
? false
: config.getRawConfig().traefik.allow_raw_resources // dont allow raw resources on saas otherwise use config
); );
const domains = new Set<string>(); const domains = new Set<string>();
@@ -786,29 +790,30 @@ export class TraefikConfigManager {
"utf8" "utf8"
); );
// Store the certificate expiry time
if (cert.expiresAt) {
const expiresAtPath = path.join(domainDir, ".expires_at");
fs.writeFileSync(
expiresAtPath,
cert.expiresAt.toString(),
"utf8"
);
}
logger.info( logger.info(
`Certificate updated for domain: ${cert.domain}${cert.wildcard ? " (wildcard)" : ""}` `Certificate updated for domain: ${cert.domain}${cert.wildcard ? " (wildcard)" : ""}`
); );
// Update local state tracking
this.lastLocalCertificateState.set(cert.domain, {
exists: true,
lastModified: Math.floor(Date.now() / 1000),
expiresAt: cert.expiresAt,
wildcard: cert.wildcard
});
} }
// Always update expiry tracking when we fetch a certificate,
// even if the cert content didn't change
if (cert.expiresAt) {
const expiresAtPath = path.join(domainDir, ".expires_at");
fs.writeFileSync(
expiresAtPath,
cert.expiresAt.toString(),
"utf8"
);
}
// Update local state tracking
this.lastLocalCertificateState.set(cert.domain, {
exists: true,
lastModified: Math.floor(Date.now() / 1000),
expiresAt: cert.expiresAt,
wildcard: cert.wildcard
});
// Always ensure the config entry exists and is up to date // Always ensure the config entry exists and is up to date
const certEntry = { const certEntry = {
certFile: certPath, certFile: certPath,

View File

@@ -2,234 +2,249 @@ import { assertEquals } from "@test/assert";
import { isDomainCoveredByWildcard } from "./TraefikConfigManager"; import { isDomainCoveredByWildcard } from "./TraefikConfigManager";
function runTests() { function runTests() {
console.log('Running wildcard domain coverage tests...'); console.log("Running wildcard domain coverage tests...");
// Test case 1: Basic wildcard certificate at example.com // Test case 1: Basic wildcard certificate at example.com
const basicWildcardCerts = new Map([ const basicWildcardCerts = new Map([
['example.com', { exists: true, wildcard: true }] ["example.com", { exists: true, wildcard: true }]
]); ]);
// Should match first-level subdomains // Should match first-level subdomains
assertEquals( assertEquals(
isDomainCoveredByWildcard('level1.example.com', basicWildcardCerts), isDomainCoveredByWildcard("level1.example.com", basicWildcardCerts),
true, true,
'Wildcard cert at example.com should match level1.example.com' "Wildcard cert at example.com should match level1.example.com"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('api.example.com', basicWildcardCerts), isDomainCoveredByWildcard("api.example.com", basicWildcardCerts),
true, true,
'Wildcard cert at example.com should match api.example.com' "Wildcard cert at example.com should match api.example.com"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('www.example.com', basicWildcardCerts), isDomainCoveredByWildcard("www.example.com", basicWildcardCerts),
true, true,
'Wildcard cert at example.com should match www.example.com' "Wildcard cert at example.com should match www.example.com"
); );
// Should match the root domain (exact match) // Should match the root domain (exact match)
assertEquals( assertEquals(
isDomainCoveredByWildcard('example.com', basicWildcardCerts), isDomainCoveredByWildcard("example.com", basicWildcardCerts),
true, true,
'Wildcard cert at example.com should match example.com itself' "Wildcard cert at example.com should match example.com itself"
); );
// Should NOT match second-level subdomains // Should NOT match second-level subdomains
assertEquals( assertEquals(
isDomainCoveredByWildcard('level2.level1.example.com', basicWildcardCerts), isDomainCoveredByWildcard(
"level2.level1.example.com",
basicWildcardCerts
),
false, false,
'Wildcard cert at example.com should NOT match level2.level1.example.com' "Wildcard cert at example.com should NOT match level2.level1.example.com"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('deep.nested.subdomain.example.com', basicWildcardCerts), isDomainCoveredByWildcard(
"deep.nested.subdomain.example.com",
basicWildcardCerts
),
false, false,
'Wildcard cert at example.com should NOT match deep.nested.subdomain.example.com' "Wildcard cert at example.com should NOT match deep.nested.subdomain.example.com"
); );
// Should NOT match different domains // Should NOT match different domains
assertEquals( assertEquals(
isDomainCoveredByWildcard('test.otherdomain.com', basicWildcardCerts), isDomainCoveredByWildcard("test.otherdomain.com", basicWildcardCerts),
false, false,
'Wildcard cert at example.com should NOT match test.otherdomain.com' "Wildcard cert at example.com should NOT match test.otherdomain.com"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('notexample.com', basicWildcardCerts), isDomainCoveredByWildcard("notexample.com", basicWildcardCerts),
false, false,
'Wildcard cert at example.com should NOT match notexample.com' "Wildcard cert at example.com should NOT match notexample.com"
); );
// Test case 2: Multiple wildcard certificates // Test case 2: Multiple wildcard certificates
const multipleWildcardCerts = new Map([ const multipleWildcardCerts = new Map([
['example.com', { exists: true, wildcard: true }], ["example.com", { exists: true, wildcard: true }],
['test.org', { exists: true, wildcard: true }], ["test.org", { exists: true, wildcard: true }],
['api.service.net', { exists: true, wildcard: true }] ["api.service.net", { exists: true, wildcard: true }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('app.example.com', multipleWildcardCerts), isDomainCoveredByWildcard("app.example.com", multipleWildcardCerts),
true, true,
'Should match subdomain of first wildcard cert' "Should match subdomain of first wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('staging.test.org', multipleWildcardCerts), isDomainCoveredByWildcard("staging.test.org", multipleWildcardCerts),
true, true,
'Should match subdomain of second wildcard cert' "Should match subdomain of second wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('v1.api.service.net', multipleWildcardCerts), isDomainCoveredByWildcard("v1.api.service.net", multipleWildcardCerts),
true, true,
'Should match subdomain of third wildcard cert' "Should match subdomain of third wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('deep.nested.api.service.net', multipleWildcardCerts), isDomainCoveredByWildcard(
"deep.nested.api.service.net",
multipleWildcardCerts
),
false, false,
'Should NOT match multi-level subdomain of third wildcard cert' "Should NOT match multi-level subdomain of third wildcard cert"
); );
// Test exact domain matches for multiple certs // Test exact domain matches for multiple certs
assertEquals( assertEquals(
isDomainCoveredByWildcard('example.com', multipleWildcardCerts), isDomainCoveredByWildcard("example.com", multipleWildcardCerts),
true, true,
'Should match exact domain of first wildcard cert' "Should match exact domain of first wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('test.org', multipleWildcardCerts), isDomainCoveredByWildcard("test.org", multipleWildcardCerts),
true, true,
'Should match exact domain of second wildcard cert' "Should match exact domain of second wildcard cert"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('api.service.net', multipleWildcardCerts), isDomainCoveredByWildcard("api.service.net", multipleWildcardCerts),
true, true,
'Should match exact domain of third wildcard cert' "Should match exact domain of third wildcard cert"
); );
// Test case 3: Non-wildcard certificates (should not match anything) // Test case 3: Non-wildcard certificates (should not match anything)
const nonWildcardCerts = new Map([ const nonWildcardCerts = new Map([
['example.com', { exists: true, wildcard: false }], ["example.com", { exists: true, wildcard: false }],
['specific.domain.com', { exists: true, wildcard: false }] ["specific.domain.com", { exists: true, wildcard: false }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('sub.example.com', nonWildcardCerts), isDomainCoveredByWildcard("sub.example.com", nonWildcardCerts),
false, false,
'Non-wildcard cert should not match subdomains' "Non-wildcard cert should not match subdomains"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('example.com', nonWildcardCerts), isDomainCoveredByWildcard("example.com", nonWildcardCerts),
false, false,
'Non-wildcard cert should not match even exact domain via this function' "Non-wildcard cert should not match even exact domain via this function"
); );
// Test case 4: Non-existent certificates (should not match) // Test case 4: Non-existent certificates (should not match)
const nonExistentCerts = new Map([ const nonExistentCerts = new Map([
['example.com', { exists: false, wildcard: true }], ["example.com", { exists: false, wildcard: true }],
['missing.com', { exists: false, wildcard: true }] ["missing.com", { exists: false, wildcard: true }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('sub.example.com', nonExistentCerts), isDomainCoveredByWildcard("sub.example.com", nonExistentCerts),
false, false,
'Non-existent wildcard cert should not match' "Non-existent wildcard cert should not match"
); );
// Test case 5: Edge cases with special domain names // Test case 5: Edge cases with special domain names
const specialDomainCerts = new Map([ const specialDomainCerts = new Map([
['localhost', { exists: true, wildcard: true }], ["localhost", { exists: true, wildcard: true }],
['127-0-0-1.nip.io', { exists: true, wildcard: true }], ["127-0-0-1.nip.io", { exists: true, wildcard: true }],
['xn--e1afmkfd.xn--p1ai', { exists: true, wildcard: true }] // IDN domain ["xn--e1afmkfd.xn--p1ai", { exists: true, wildcard: true }] // IDN domain
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('app.localhost', specialDomainCerts), isDomainCoveredByWildcard("app.localhost", specialDomainCerts),
true, true,
'Should match subdomain of localhost wildcard' "Should match subdomain of localhost wildcard"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('test.127-0-0-1.nip.io', specialDomainCerts), isDomainCoveredByWildcard("test.127-0-0-1.nip.io", specialDomainCerts),
true, true,
'Should match subdomain of nip.io wildcard' "Should match subdomain of nip.io wildcard"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('sub.xn--e1afmkfd.xn--p1ai', specialDomainCerts), isDomainCoveredByWildcard(
"sub.xn--e1afmkfd.xn--p1ai",
specialDomainCerts
),
true, true,
'Should match subdomain of IDN wildcard' "Should match subdomain of IDN wildcard"
); );
// Test case 6: Empty input and edge cases // Test case 6: Empty input and edge cases
const emptyCerts = new Map(); const emptyCerts = new Map();
assertEquals( assertEquals(
isDomainCoveredByWildcard('any.domain.com', emptyCerts), isDomainCoveredByWildcard("any.domain.com", emptyCerts),
false, false,
'Empty certificate map should not match any domain' "Empty certificate map should not match any domain"
); );
// Test case 7: Domains with single character components // Test case 7: Domains with single character components
const singleCharCerts = new Map([ const singleCharCerts = new Map([
['a.com', { exists: true, wildcard: true }], ["a.com", { exists: true, wildcard: true }],
['x.y.z', { exists: true, wildcard: true }] ["x.y.z", { exists: true, wildcard: true }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('b.a.com', singleCharCerts), isDomainCoveredByWildcard("b.a.com", singleCharCerts),
true, true,
'Should match single character subdomain' "Should match single character subdomain"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('w.x.y.z', singleCharCerts), isDomainCoveredByWildcard("w.x.y.z", singleCharCerts),
true, true,
'Should match single character subdomain of multi-part domain' "Should match single character subdomain of multi-part domain"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('v.w.x.y.z', singleCharCerts), isDomainCoveredByWildcard("v.w.x.y.z", singleCharCerts),
false, false,
'Should NOT match multi-level subdomain of single char domain' "Should NOT match multi-level subdomain of single char domain"
); );
// Test case 8: Domains with numbers and hyphens // Test case 8: Domains with numbers and hyphens
const numericCerts = new Map([ const numericCerts = new Map([
['api-v2.service-1.com', { exists: true, wildcard: true }], ["api-v2.service-1.com", { exists: true, wildcard: true }],
['123.456.net', { exists: true, wildcard: true }] ["123.456.net", { exists: true, wildcard: true }]
]); ]);
assertEquals( assertEquals(
isDomainCoveredByWildcard('staging.api-v2.service-1.com', numericCerts), isDomainCoveredByWildcard("staging.api-v2.service-1.com", numericCerts),
true, true,
'Should match subdomain with hyphens and numbers' "Should match subdomain with hyphens and numbers"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('test.123.456.net', numericCerts), isDomainCoveredByWildcard("test.123.456.net", numericCerts),
true, true,
'Should match subdomain with numeric components' "Should match subdomain with numeric components"
); );
assertEquals( assertEquals(
isDomainCoveredByWildcard('deep.staging.api-v2.service-1.com', numericCerts), isDomainCoveredByWildcard(
"deep.staging.api-v2.service-1.com",
numericCerts
),
false, false,
'Should NOT match multi-level subdomain with hyphens and numbers' "Should NOT match multi-level subdomain with hyphens and numbers"
); );
console.log('All wildcard domain coverage tests passed!'); console.log("All wildcard domain coverage tests passed!");
} }
// Run all tests // Run all tests
try { try {
runTests(); runTests();
} catch (error) { } catch (error) {
console.error('Test failed:', error); console.error("Test failed:", error);
process.exit(1); process.exit(1);
} }

View File

@@ -31,12 +31,17 @@ export function validatePathRewriteConfig(
} }
if (rewritePathType !== "stripPrefix") { if (rewritePathType !== "stripPrefix") {
if ((rewritePath && !rewritePathType) || (!rewritePath && rewritePathType)) { if (
return { isValid: false, error: "Both rewritePath and rewritePathType must be specified together" }; (rewritePath && !rewritePathType) ||
(!rewritePath && rewritePathType)
) {
return {
isValid: false,
error: "Both rewritePath and rewritePathType must be specified together"
};
} }
} }
if (!rewritePath || !rewritePathType) { if (!rewritePath || !rewritePathType) {
return { isValid: true }; return { isValid: true };
} }
@@ -68,14 +73,14 @@ export function validatePathRewriteConfig(
} }
} }
// Additional validation for stripPrefix // Additional validation for stripPrefix
if (rewritePathType === "stripPrefix") { if (rewritePathType === "stripPrefix") {
if (pathMatchType !== "prefix") { if (pathMatchType !== "prefix") {
logger.warn(`stripPrefix rewrite type is most effective with prefix path matching. Current match type: ${pathMatchType}`); logger.warn(
`stripPrefix rewrite type is most effective with prefix path matching. Current match type: ${pathMatchType}`
);
} }
} }
return { isValid: true }; return { isValid: true };
} }

View File

@@ -2,70 +2,246 @@ import { isValidUrlGlobPattern } from "./validators";
import { assertEquals } from "@test/assert"; import { assertEquals } from "@test/assert";
function runTests() { function runTests() {
console.log('Running URL pattern validation tests...'); console.log("Running URL pattern validation tests...");
// Test valid patterns // Test valid patterns
assertEquals(isValidUrlGlobPattern('simple'), true, 'Simple path segment should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('simple/path'), true, 'Simple path with slash should be valid'); isValidUrlGlobPattern("simple"),
assertEquals(isValidUrlGlobPattern('/leading/slash'), true, 'Path with leading slash should be valid'); true,
assertEquals(isValidUrlGlobPattern('path/'), true, 'Path with trailing slash should be valid'); "Simple path segment should be valid"
assertEquals(isValidUrlGlobPattern('path/*'), true, 'Path with wildcard segment should be valid'); );
assertEquals(isValidUrlGlobPattern('*'), true, 'Single wildcard should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('*/subpath'), true, 'Wildcard with subpath should be valid'); isValidUrlGlobPattern("simple/path"),
assertEquals(isValidUrlGlobPattern('path/*/more'), true, 'Path with wildcard in the middle should be valid'); true,
"Simple path with slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("/leading/slash"),
true,
"Path with leading slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/"),
true,
"Path with trailing slash should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/*"),
true,
"Path with wildcard segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("*"),
true,
"Single wildcard should be valid"
);
assertEquals(
isValidUrlGlobPattern("*/subpath"),
true,
"Wildcard with subpath should be valid"
);
assertEquals(
isValidUrlGlobPattern("path/*/more"),
true,
"Path with wildcard in the middle should be valid"
);
// Test with special characters // Test with special characters
assertEquals(isValidUrlGlobPattern('path-with-dash'), true, 'Path with dash should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('path_with_underscore'), true, 'Path with underscore should be valid'); isValidUrlGlobPattern("path-with-dash"),
assertEquals(isValidUrlGlobPattern('path.with.dots'), true, 'Path with dots should be valid'); true,
assertEquals(isValidUrlGlobPattern('path~with~tilde'), true, 'Path with tilde should be valid'); "Path with dash should be valid"
assertEquals(isValidUrlGlobPattern('path!with!exclamation'), true, 'Path with exclamation should be valid'); );
assertEquals(isValidUrlGlobPattern('path$with$dollar'), true, 'Path with dollar should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('path&with&ampersand'), true, 'Path with ampersand should be valid'); isValidUrlGlobPattern("path_with_underscore"),
assertEquals(isValidUrlGlobPattern("path'with'quote"), true, "Path with quote should be valid"); true,
assertEquals(isValidUrlGlobPattern('path(with)parentheses'), true, 'Path with parentheses should be valid'); "Path with underscore should be valid"
assertEquals(isValidUrlGlobPattern('path+with+plus'), true, 'Path with plus should be valid'); );
assertEquals(isValidUrlGlobPattern('path,with,comma'), true, 'Path with comma should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('path;with;semicolon'), true, 'Path with semicolon should be valid'); isValidUrlGlobPattern("path.with.dots"),
assertEquals(isValidUrlGlobPattern('path=with=equals'), true, 'Path with equals should be valid'); true,
assertEquals(isValidUrlGlobPattern('path:with:colon'), true, 'Path with colon should be valid'); "Path with dots should be valid"
assertEquals(isValidUrlGlobPattern('path@with@at'), true, 'Path with at should be valid'); );
assertEquals(
isValidUrlGlobPattern("path~with~tilde"),
true,
"Path with tilde should be valid"
);
assertEquals(
isValidUrlGlobPattern("path!with!exclamation"),
true,
"Path with exclamation should be valid"
);
assertEquals(
isValidUrlGlobPattern("path$with$dollar"),
true,
"Path with dollar should be valid"
);
assertEquals(
isValidUrlGlobPattern("path&with&ampersand"),
true,
"Path with ampersand should be valid"
);
assertEquals(
isValidUrlGlobPattern("path'with'quote"),
true,
"Path with quote should be valid"
);
assertEquals(
isValidUrlGlobPattern("path(with)parentheses"),
true,
"Path with parentheses should be valid"
);
assertEquals(
isValidUrlGlobPattern("path+with+plus"),
true,
"Path with plus should be valid"
);
assertEquals(
isValidUrlGlobPattern("path,with,comma"),
true,
"Path with comma should be valid"
);
assertEquals(
isValidUrlGlobPattern("path;with;semicolon"),
true,
"Path with semicolon should be valid"
);
assertEquals(
isValidUrlGlobPattern("path=with=equals"),
true,
"Path with equals should be valid"
);
assertEquals(
isValidUrlGlobPattern("path:with:colon"),
true,
"Path with colon should be valid"
);
assertEquals(
isValidUrlGlobPattern("path@with@at"),
true,
"Path with at should be valid"
);
// Test with percent encoding // Test with percent encoding
assertEquals(isValidUrlGlobPattern('path%20with%20spaces'), true, 'Path with percent-encoded spaces should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('path%2Fwith%2Fencoded%2Fslashes'), true, 'Path with percent-encoded slashes should be valid'); isValidUrlGlobPattern("path%20with%20spaces"),
true,
"Path with percent-encoded spaces should be valid"
);
assertEquals(
isValidUrlGlobPattern("path%2Fwith%2Fencoded%2Fslashes"),
true,
"Path with percent-encoded slashes should be valid"
);
// Test with wildcards in segments (the fixed functionality) // Test with wildcards in segments (the fixed functionality)
assertEquals(isValidUrlGlobPattern('padbootstrap*'), true, 'Path with wildcard at the end of segment should be valid'); assertEquals(
assertEquals(isValidUrlGlobPattern('pad*bootstrap'), true, 'Path with wildcard in the middle of segment should be valid'); isValidUrlGlobPattern("padbootstrap*"),
assertEquals(isValidUrlGlobPattern('*bootstrap'), true, 'Path with wildcard at the start of segment should be valid'); true,
assertEquals(isValidUrlGlobPattern('multiple*wildcards*in*segment'), true, 'Path with multiple wildcards in segment should be valid'); "Path with wildcard at the end of segment should be valid"
assertEquals(isValidUrlGlobPattern('wild*/cards/in*/different/seg*ments'), true, 'Path with wildcards in different segments should be valid'); );
assertEquals(
isValidUrlGlobPattern("pad*bootstrap"),
true,
"Path with wildcard in the middle of segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("*bootstrap"),
true,
"Path with wildcard at the start of segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("multiple*wildcards*in*segment"),
true,
"Path with multiple wildcards in segment should be valid"
);
assertEquals(
isValidUrlGlobPattern("wild*/cards/in*/different/seg*ments"),
true,
"Path with wildcards in different segments should be valid"
);
// Test invalid patterns // Test invalid patterns
assertEquals(isValidUrlGlobPattern(''), false, 'Empty string should be invalid'); assertEquals(
assertEquals(isValidUrlGlobPattern('//double/slash'), false, 'Path with double slash should be invalid'); isValidUrlGlobPattern(""),
assertEquals(isValidUrlGlobPattern('path//end'), false, 'Path with double slash in the middle should be invalid'); false,
assertEquals(isValidUrlGlobPattern('invalid<char>'), false, 'Path with invalid characters should be invalid'); "Empty string should be invalid"
assertEquals(isValidUrlGlobPattern('invalid|char'), false, 'Path with invalid pipe character should be invalid'); );
assertEquals(isValidUrlGlobPattern('invalid"char'), false, 'Path with invalid quote character should be invalid'); assertEquals(
assertEquals(isValidUrlGlobPattern('invalid`char'), false, 'Path with invalid backtick character should be invalid'); isValidUrlGlobPattern("//double/slash"),
assertEquals(isValidUrlGlobPattern('invalid^char'), false, 'Path with invalid caret character should be invalid'); false,
assertEquals(isValidUrlGlobPattern('invalid\\char'), false, 'Path with invalid backslash character should be invalid'); "Path with double slash should be invalid"
assertEquals(isValidUrlGlobPattern('invalid[char]'), false, 'Path with invalid square brackets should be invalid'); );
assertEquals(isValidUrlGlobPattern('invalid{char}'), false, 'Path with invalid curly braces should be invalid'); assertEquals(
isValidUrlGlobPattern("path//end"),
false,
"Path with double slash in the middle should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid<char>"),
false,
"Path with invalid characters should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid|char"),
false,
"Path with invalid pipe character should be invalid"
);
assertEquals(
isValidUrlGlobPattern('invalid"char'),
false,
"Path with invalid quote character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid`char"),
false,
"Path with invalid backtick character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid^char"),
false,
"Path with invalid caret character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid\\char"),
false,
"Path with invalid backslash character should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid[char]"),
false,
"Path with invalid square brackets should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid{char}"),
false,
"Path with invalid curly braces should be invalid"
);
// Test invalid percent encoding // Test invalid percent encoding
assertEquals(isValidUrlGlobPattern('invalid%2'), false, 'Path with incomplete percent encoding should be invalid'); assertEquals(
assertEquals(isValidUrlGlobPattern('invalid%GZ'), false, 'Path with invalid hex in percent encoding should be invalid'); isValidUrlGlobPattern("invalid%2"),
assertEquals(isValidUrlGlobPattern('invalid%'), false, 'Path with isolated percent sign should be invalid'); false,
"Path with incomplete percent encoding should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid%GZ"),
false,
"Path with invalid hex in percent encoding should be invalid"
);
assertEquals(
isValidUrlGlobPattern("invalid%"),
false,
"Path with isolated percent sign should be invalid"
);
console.log('All tests passed!'); console.log("All tests passed!");
} }
// Run all tests // Run all tests
try { try {
runTests(); runTests();
} catch (error) { } catch (error) {
console.error('Test failed:', error); console.error("Test failed:", error);
} }

View File

@@ -2,7 +2,9 @@ import z from "zod";
import ipaddr from "ipaddr.js"; import ipaddr from "ipaddr.js";
export function isValidCIDR(cidr: string): boolean { export function isValidCIDR(cidr: string): boolean {
return z.cidrv4().safeParse(cidr).success || z.cidrv6().safeParse(cidr).success; return (
z.cidrv4().safeParse(cidr).success || z.cidrv6().safeParse(cidr).success
);
} }
export function isValidIP(ip: string): boolean { export function isValidIP(ip: string): boolean {
@@ -69,11 +71,11 @@ export function isUrlValid(url: string | undefined) {
if (!url) return true; // the link is optional in the schema so if it's empty it's valid if (!url) return true; // the link is optional in the schema so if it's empty it's valid
var pattern = new RegExp( var pattern = new RegExp(
"^(https?:\\/\\/)?" + // protocol "^(https?:\\/\\/)?" + // protocol
"((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|" + // domain name "((([a-z\\d]([a-z\\d-]*[a-z\\d])*)\\.)+[a-z]{2,}|" + // domain name
"((\\d{1,3}\\.){3}\\d{1,3}))" + // OR ip (v4) address "((\\d{1,3}\\.){3}\\d{1,3}))" + // OR ip (v4) address
"(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*" + // port and path "(\\:\\d+)?(\\/[-a-z\\d%_.~+]*)*" + // port and path
"(\\?[;&a-z\\d%_.~+=-]*)?" + // query string "(\\?[;&a-z\\d%_.~+=-]*)?" + // query string
"(\\#[-a-z\\d_]*)?$", "(\\#[-a-z\\d_]*)?$",
"i" "i"
); );
return !!pattern.test(url); return !!pattern.test(url);
@@ -168,14 +170,14 @@ export function validateHeaders(headers: string): boolean {
} }
export function isSecondLevelDomain(domain: string): boolean { export function isSecondLevelDomain(domain: string): boolean {
if (!domain || typeof domain !== 'string') { if (!domain || typeof domain !== "string") {
return false; return false;
} }
const trimmedDomain = domain.trim().toLowerCase(); const trimmedDomain = domain.trim().toLowerCase();
// Split into parts // Split into parts
const parts = trimmedDomain.split('.'); const parts = trimmedDomain.split(".");
// Should have exactly 2 parts for a second-level domain (e.g., "example.com") // Should have exactly 2 parts for a second-level domain (e.g., "example.com")
if (parts.length !== 2) { if (parts.length !== 2) {

View File

@@ -20,6 +20,6 @@ export const errorHandlerMiddleware: ErrorRequestHandler = (
error: true, error: true,
message: error.message || "Internal Server Error", message: error.message || "Internal Server Error",
status: statusCode, status: statusCode,
stack: process.env.ENVIRONMENT === "prod" ? null : error.stack, stack: process.env.ENVIRONMENT === "prod" ? null : error.stack
}); });
}; };

View File

@@ -8,13 +8,13 @@ import HttpCode from "@server/types/HttpCode";
export async function getUserOrgs( export async function getUserOrgs(
req: Request, req: Request,
res: Response, res: Response,
next: NextFunction, next: NextFunction
) { ) {
const userId = req.user?.userId; // Assuming you have user information in the request const userId = req.user?.userId; // Assuming you have user information in the request
if (!userId) { if (!userId) {
return next( return next(
createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated"), createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated")
); );
} }
@@ -22,7 +22,7 @@ export async function getUserOrgs(
const userOrganizations = await db const userOrganizations = await db
.select({ .select({
orgId: userOrgs.orgId, orgId: userOrgs.orgId,
roleId: userOrgs.roleId, roleId: userOrgs.roleId
}) })
.from(userOrgs) .from(userOrgs)
.where(eq(userOrgs.userId, userId)); .where(eq(userOrgs.userId, userId));
@@ -38,8 +38,8 @@ export async function getUserOrgs(
next( next(
createHttpError( createHttpError(
HttpCode.INTERNAL_SERVER_ERROR, HttpCode.INTERNAL_SERVER_ERROR,
"Error retrieving user organizations", "Error retrieving user organizations"
), )
); );
} }
} }

View File

@@ -97,7 +97,6 @@ export async function verifyApiKeyAccessTokenAccess(
); );
} }
return next(); return next();
} catch (e) { } catch (e) {
return next( return next(

View File

@@ -11,7 +11,7 @@ export async function verifyApiKeyApiKeyAccess(
next: NextFunction next: NextFunction
) { ) {
try { try {
const {apiKey: callerApiKey } = req; const { apiKey: callerApiKey } = req;
const apiKeyId = const apiKeyId =
req.params.apiKeyId || req.body.apiKeyId || req.query.apiKeyId; req.params.apiKeyId || req.body.apiKeyId || req.query.apiKeyId;
@@ -44,7 +44,10 @@ export async function verifyApiKeyApiKeyAccess(
.select() .select()
.from(apiKeyOrg) .from(apiKeyOrg)
.where( .where(
and(eq(apiKeys.apiKeyId, callerApiKey.apiKeyId), eq(apiKeyOrg.orgId, orgId)) and(
eq(apiKeys.apiKeyId, callerApiKey.apiKeyId),
eq(apiKeyOrg.orgId, orgId)
)
) )
.limit(1); .limit(1);

View File

@@ -11,9 +11,12 @@ export async function verifyApiKeySetResourceClients(
next: NextFunction next: NextFunction
) { ) {
const apiKey = req.apiKey; const apiKey = req.apiKey;
const singleClientId = req.params.clientId || req.body.clientId || req.query.clientId; const singleClientId =
req.params.clientId || req.body.clientId || req.query.clientId;
const { clientIds } = req.body; const { clientIds } = req.body;
const allClientIds = clientIds || (singleClientId ? [parseInt(singleClientId as string)] : []); const allClientIds =
clientIds ||
(singleClientId ? [parseInt(singleClientId as string)] : []);
if (!apiKey) { if (!apiKey) {
return next( return next(
@@ -70,4 +73,3 @@ export async function verifyApiKeySetResourceClients(
); );
} }
} }

Some files were not shown because too many files have changed in this diff Show More