Compare commits

..

5 Commits

Author SHA1 Message Date
Owen Schwartz
4b3375ab8e Merge pull request #2783 from fosrl/dev
Fix 1.17.0
2026-04-03 22:42:03 -04:00
Owen Schwartz
6ce165bfd5 Merge pull request #2780 from fosrl/dev
1.17.0
2026-04-03 18:19:40 -04:00
Owen Schwartz
035644eaf7 Merge pull request #2778 from fosrl/dev
1.17.0-s.2
2026-04-03 12:35:03 -04:00
Owen Schwartz
16e7233a3e Merge pull request #2777 from fosrl/dev
1.17.0-s.1
2026-04-03 12:19:23 -04:00
Owen Schwartz
1f74e1b320 Merge pull request #2776 from fosrl/dev
1.17.0-s.0
2026-04-03 11:39:35 -04:00
75 changed files with 1600 additions and 4326 deletions

1
.github/CODEOWNERS vendored
View File

@@ -1 +0,0 @@
* @oschwartz10612 @miloschwartz

View File

@@ -1817,11 +1817,6 @@
"editInternalResourceDialogModePort": "Port", "editInternalResourceDialogModePort": "Port",
"editInternalResourceDialogModeHost": "Host", "editInternalResourceDialogModeHost": "Host",
"editInternalResourceDialogModeCidr": "CIDR", "editInternalResourceDialogModeCidr": "CIDR",
"editInternalResourceDialogModeHttp": "HTTP",
"editInternalResourceDialogModeHttps": "HTTPS",
"editInternalResourceDialogScheme": "Scheme",
"editInternalResourceDialogEnableSsl": "Enable SSL",
"editInternalResourceDialogEnableSslDescription": "Enable SSL/TLS encryption for secure HTTPS connections to the destination.",
"editInternalResourceDialogDestination": "Destination", "editInternalResourceDialogDestination": "Destination",
"editInternalResourceDialogDestinationHostDescription": "The IP address or hostname of the resource on the site's network.", "editInternalResourceDialogDestinationHostDescription": "The IP address or hostname of the resource on the site's network.",
"editInternalResourceDialogDestinationIPDescription": "The IP or hostname address of the resource on the site's network.", "editInternalResourceDialogDestinationIPDescription": "The IP or hostname address of the resource on the site's network.",
@@ -1837,7 +1832,6 @@
"createInternalResourceDialogName": "Name", "createInternalResourceDialogName": "Name",
"createInternalResourceDialogSite": "Site", "createInternalResourceDialogSite": "Site",
"selectSite": "Select site...", "selectSite": "Select site...",
"multiSitesSelectorSitesCount": "{count, plural, one {# site} other {# sites}}",
"noSitesFound": "No sites found.", "noSitesFound": "No sites found.",
"createInternalResourceDialogProtocol": "Protocol", "createInternalResourceDialogProtocol": "Protocol",
"createInternalResourceDialogTcp": "TCP", "createInternalResourceDialogTcp": "TCP",
@@ -1866,19 +1860,11 @@
"createInternalResourceDialogModePort": "Port", "createInternalResourceDialogModePort": "Port",
"createInternalResourceDialogModeHost": "Host", "createInternalResourceDialogModeHost": "Host",
"createInternalResourceDialogModeCidr": "CIDR", "createInternalResourceDialogModeCidr": "CIDR",
"createInternalResourceDialogModeHttp": "HTTP",
"createInternalResourceDialogModeHttps": "HTTPS",
"scheme": "Scheme",
"createInternalResourceDialogScheme": "Scheme",
"createInternalResourceDialogEnableSsl": "Enable SSL",
"createInternalResourceDialogEnableSslDescription": "Enable SSL/TLS encryption for secure HTTPS connections to the destination.",
"createInternalResourceDialogDestination": "Destination", "createInternalResourceDialogDestination": "Destination",
"createInternalResourceDialogDestinationHostDescription": "The IP address or hostname of the resource on the site's network.", "createInternalResourceDialogDestinationHostDescription": "The IP address or hostname of the resource on the site's network.",
"createInternalResourceDialogDestinationCidrDescription": "The CIDR range of the resource on the site's network.", "createInternalResourceDialogDestinationCidrDescription": "The CIDR range of the resource on the site's network.",
"createInternalResourceDialogAlias": "Alias", "createInternalResourceDialogAlias": "Alias",
"createInternalResourceDialogAliasDescription": "An optional internal DNS alias for this resource.", "createInternalResourceDialogAliasDescription": "An optional internal DNS alias for this resource.",
"internalResourceDownstreamSchemeRequired": "Scheme is required for HTTP resources",
"internalResourceHttpPortRequired": "Destination port is required for HTTP resources",
"siteConfiguration": "Configuration", "siteConfiguration": "Configuration",
"siteAcceptClientConnections": "Accept Client Connections", "siteAcceptClientConnections": "Accept Client Connections",
"siteAcceptClientConnectionsDescription": "Allow user devices and clients to access resources on this site. This can be changed later.", "siteAcceptClientConnectionsDescription": "Allow user devices and clients to access resources on this site. This can be changed later.",
@@ -2127,11 +2113,9 @@
"addDomainToEnableCustomAuthPages": "Users will be able to access the organization's login page and complete resource authentication using this domain.", "addDomainToEnableCustomAuthPages": "Users will be able to access the organization's login page and complete resource authentication using this domain.",
"selectDomainForOrgAuthPage": "Select a domain for the organization's authentication page", "selectDomainForOrgAuthPage": "Select a domain for the organization's authentication page",
"domainPickerProvidedDomain": "Provided Domain", "domainPickerProvidedDomain": "Provided Domain",
"domainPickerFreeProvidedDomain": "Provided Domain", "domainPickerFreeProvidedDomain": "Free Provided Domain",
"domainPickerFreeDomainsPaidFeature": "Provided domains are a paid feature. Subscribe to get a domain included with your plan — no need to bring your own.",
"domainPickerVerified": "Verified", "domainPickerVerified": "Verified",
"domainPickerUnverified": "Unverified", "domainPickerUnverified": "Unverified",
"domainPickerManual": "Manual",
"domainPickerInvalidSubdomainStructure": "This subdomain contains invalid characters or structure. It will be sanitized automatically when you save.", "domainPickerInvalidSubdomainStructure": "This subdomain contains invalid characters or structure. It will be sanitized automatically when you save.",
"domainPickerError": "Error", "domainPickerError": "Error",
"domainPickerErrorLoadDomains": "Failed to load organization domains", "domainPickerErrorLoadDomains": "Failed to load organization domains",
@@ -2438,7 +2422,6 @@
"validPassword": "Valid Password", "validPassword": "Valid Password",
"validEmail": "Valid email", "validEmail": "Valid email",
"validSSO": "Valid SSO", "validSSO": "Valid SSO",
"connectedClient": "Connected Client",
"resourceBlocked": "Resource Blocked", "resourceBlocked": "Resource Blocked",
"droppedByRule": "Dropped by Rule", "droppedByRule": "Dropped by Rule",
"noSessions": "No Sessions", "noSessions": "No Sessions",
@@ -2676,12 +2659,8 @@
"editInternalResourceDialogAddUsers": "Add Users", "editInternalResourceDialogAddUsers": "Add Users",
"editInternalResourceDialogAddClients": "Add Clients", "editInternalResourceDialogAddClients": "Add Clients",
"editInternalResourceDialogDestinationLabel": "Destination", "editInternalResourceDialogDestinationLabel": "Destination",
"editInternalResourceDialogDestinationDescription": "Choose where this resource runs and how clients reach it. Selecting multiple sites will create a high availability resource that can be accessed from any of the selected sites.", "editInternalResourceDialogDestinationDescription": "Specify the destination address for the internal resource. This can be a hostname, IP address, or CIDR range depending on the selected mode. Optionally set an internal DNS alias for easier identification.",
"editInternalResourceDialogPortRestrictionsDescription": "Restrict access to specific TCP/UDP ports or allow/block all ports.", "editInternalResourceDialogPortRestrictionsDescription": "Restrict access to specific TCP/UDP ports or allow/block all ports.",
"createInternalResourceDialogHttpConfiguration": "HTTP configuration",
"createInternalResourceDialogHttpConfigurationDescription": "Choose the domain clients will use to reach this resource over HTTP or HTTPS.",
"editInternalResourceDialogHttpConfiguration": "HTTP configuration",
"editInternalResourceDialogHttpConfigurationDescription": "Choose the domain clients will use to reach this resource over HTTP or HTTPS.",
"editInternalResourceDialogTcp": "TCP", "editInternalResourceDialogTcp": "TCP",
"editInternalResourceDialogUdp": "UDP", "editInternalResourceDialogUdp": "UDP",
"editInternalResourceDialogIcmp": "ICMP", "editInternalResourceDialogIcmp": "ICMP",
@@ -2720,8 +2699,6 @@
"maintenancePageMessagePlaceholder": "We'll be back soon! Our site is currently undergoing scheduled maintenance.", "maintenancePageMessagePlaceholder": "We'll be back soon! Our site is currently undergoing scheduled maintenance.",
"maintenancePageMessageDescription": "Detailed message explaining the maintenance", "maintenancePageMessageDescription": "Detailed message explaining the maintenance",
"maintenancePageTimeTitle": "Estimated Completion Time (Optional)", "maintenancePageTimeTitle": "Estimated Completion Time (Optional)",
"privateMaintenanceScreenTitle": "Private Placeholder Screen",
"privateMaintenanceScreenMessage": "This domain is being used on a private resource. Please connect using the Pangolin client to access this resource.",
"maintenanceTime": "e.g., 2 hours, Nov 1 at 5:00 PM", "maintenanceTime": "e.g., 2 hours, Nov 1 at 5:00 PM",
"maintenanceEstimatedTimeDescription": "When you expect maintenance to be completed", "maintenanceEstimatedTimeDescription": "When you expect maintenance to be completed",
"editDomain": "Edit Domain", "editDomain": "Edit Domain",

View File

@@ -57,9 +57,7 @@ export const orgs = pgTable("orgs", {
settingsLogRetentionDaysAction: integer("settingsLogRetentionDaysAction") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year settingsLogRetentionDaysAction: integer("settingsLogRetentionDaysAction") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year
.notNull() .notNull()
.default(0), .default(0),
settingsLogRetentionDaysConnection: integer( settingsLogRetentionDaysConnection: integer("settingsLogRetentionDaysConnection") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year
"settingsLogRetentionDaysConnection"
) // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year
.notNull() .notNull()
.default(0), .default(0),
sshCaPrivateKey: text("sshCaPrivateKey"), // Encrypted SSH CA private key (PEM format) sshCaPrivateKey: text("sshCaPrivateKey"), // Encrypted SSH CA private key (PEM format)
@@ -103,9 +101,7 @@ export const sites = pgTable("sites", {
lastHolePunch: bigint("lastHolePunch", { mode: "number" }), lastHolePunch: bigint("lastHolePunch", { mode: "number" }),
listenPort: integer("listenPort"), listenPort: integer("listenPort"),
dockerSocketEnabled: boolean("dockerSocketEnabled").notNull().default(true), dockerSocketEnabled: boolean("dockerSocketEnabled").notNull().default(true),
status: varchar("status") status: varchar("status").$type<"pending" | "approved">().default("approved")
.$type<"pending" | "approved">()
.default("approved")
}); });
export const resources = pgTable("resources", { export const resources = pgTable("resources", {
@@ -226,23 +222,16 @@ export const exitNodes = pgTable("exitNodes", {
export const siteResources = pgTable("siteResources", { export const siteResources = pgTable("siteResources", {
// this is for the clients // this is for the clients
siteResourceId: serial("siteResourceId").primaryKey(), siteResourceId: serial("siteResourceId").primaryKey(),
siteId: integer("siteId")
.notNull()
.references(() => sites.siteId, { onDelete: "cascade" }),
orgId: varchar("orgId") orgId: varchar("orgId")
.notNull() .notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }), .references(() => orgs.orgId, { onDelete: "cascade" }),
networkId: integer("networkId").references(() => networks.networkId, {
onDelete: "set null"
}),
defaultNetworkId: integer("defaultNetworkId").references(
() => networks.networkId,
{
onDelete: "restrict"
}
),
niceId: varchar("niceId").notNull(), niceId: varchar("niceId").notNull(),
name: varchar("name").notNull(), name: varchar("name").notNull(),
ssl: boolean("ssl").notNull().default(false), mode: varchar("mode").$type<"host" | "cidr">().notNull(), // "host" | "cidr" | "port"
mode: varchar("mode").$type<"host" | "cidr" | "http">().notNull(), // "host" | "cidr" | "http" protocol: varchar("protocol"), // only for port mode
scheme: varchar("scheme").$type<"http" | "https">(), // only for when we are doing https or http mode
proxyPort: integer("proxyPort"), // only for port mode proxyPort: integer("proxyPort"), // only for port mode
destinationPort: integer("destinationPort"), // only for port mode destinationPort: integer("destinationPort"), // only for port mode
destination: varchar("destination").notNull(), // ip, cidr, hostname; validate against the mode destination: varchar("destination").notNull(), // ip, cidr, hostname; validate against the mode
@@ -255,38 +244,7 @@ export const siteResources = pgTable("siteResources", {
authDaemonPort: integer("authDaemonPort").default(22123), authDaemonPort: integer("authDaemonPort").default(22123),
authDaemonMode: varchar("authDaemonMode", { length: 32 }) authDaemonMode: varchar("authDaemonMode", { length: 32 })
.$type<"site" | "remote">() .$type<"site" | "remote">()
.default("site"), .default("site")
domainId: varchar("domainId").references(() => domains.domainId, {
onDelete: "set null"
}),
subdomain: varchar("subdomain"),
fullDomain: varchar("fullDomain")
});
export const networks = pgTable("networks", {
networkId: serial("networkId").primaryKey(),
niceId: text("niceId"),
name: text("name"),
scope: varchar("scope")
.$type<"global" | "resource">()
.notNull()
.default("global"),
orgId: varchar("orgId")
.references(() => orgs.orgId, {
onDelete: "cascade"
})
.notNull()
});
export const siteNetworks = pgTable("siteNetworks", {
siteId: integer("siteId")
.notNull()
.references(() => sites.siteId, {
onDelete: "cascade"
}),
networkId: integer("networkId")
.notNull()
.references(() => networks.networkId, { onDelete: "cascade" })
}); });
export const clientSiteResources = pgTable("clientSiteResources", { export const clientSiteResources = pgTable("clientSiteResources", {
@@ -1036,7 +994,6 @@ export const requestAuditLog = pgTable(
actor: text("actor"), actor: text("actor"),
actorId: text("actorId"), actorId: text("actorId"),
resourceId: integer("resourceId"), resourceId: integer("resourceId"),
siteResourceId: integer("siteResourceId"),
ip: text("ip"), ip: text("ip"),
location: text("location"), location: text("location"),
userAgent: text("userAgent"), userAgent: text("userAgent"),
@@ -1149,4 +1106,3 @@ export type RequestAuditLog = InferSelectModel<typeof requestAuditLog>;
export type RoundTripMessageTracker = InferSelectModel< export type RoundTripMessageTracker = InferSelectModel<
typeof roundTripMessageTracker typeof roundTripMessageTracker
>; >;
export type Network = InferSelectModel<typeof networks>;

View File

@@ -54,9 +54,7 @@ export const orgs = sqliteTable("orgs", {
settingsLogRetentionDaysAction: integer("settingsLogRetentionDaysAction") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year settingsLogRetentionDaysAction: integer("settingsLogRetentionDaysAction") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year
.notNull() .notNull()
.default(0), .default(0),
settingsLogRetentionDaysConnection: integer( settingsLogRetentionDaysConnection: integer("settingsLogRetentionDaysConnection") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year
"settingsLogRetentionDaysConnection"
) // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year
.notNull() .notNull()
.default(0), .default(0),
sshCaPrivateKey: text("sshCaPrivateKey"), // Encrypted SSH CA private key (PEM format) sshCaPrivateKey: text("sshCaPrivateKey"), // Encrypted SSH CA private key (PEM format)
@@ -94,9 +92,6 @@ export const sites = sqliteTable("sites", {
exitNodeId: integer("exitNode").references(() => exitNodes.exitNodeId, { exitNodeId: integer("exitNode").references(() => exitNodes.exitNodeId, {
onDelete: "set null" onDelete: "set null"
}), }),
networkId: integer("networkId").references(() => networks.networkId, {
onDelete: "set null"
}),
name: text("name").notNull(), name: text("name").notNull(),
pubKey: text("pubKey"), pubKey: text("pubKey"),
subnet: text("subnet"), subnet: text("subnet"),
@@ -255,21 +250,16 @@ export const siteResources = sqliteTable("siteResources", {
siteResourceId: integer("siteResourceId").primaryKey({ siteResourceId: integer("siteResourceId").primaryKey({
autoIncrement: true autoIncrement: true
}), }),
siteId: integer("siteId")
.notNull()
.references(() => sites.siteId, { onDelete: "cascade" }),
orgId: text("orgId") orgId: text("orgId")
.notNull() .notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }), .references(() => orgs.orgId, { onDelete: "cascade" }),
networkId: integer("networkId").references(() => networks.networkId, {
onDelete: "set null"
}),
defaultNetworkId: integer("defaultNetworkId").references(
() => networks.networkId,
{ onDelete: "restrict" }
),
niceId: text("niceId").notNull(), niceId: text("niceId").notNull(),
name: text("name").notNull(), name: text("name").notNull(),
ssl: integer("ssl", { mode: "boolean" }).notNull().default(false), mode: text("mode").$type<"host" | "cidr">().notNull(), // "host" | "cidr" | "port"
mode: text("mode").$type<"host" | "cidr" | "http">().notNull(), // "host" | "cidr" | "http" protocol: text("protocol"), // only for port mode
scheme: text("scheme").$type<"http" | "https">(), // only for when we are doing https or http mode
proxyPort: integer("proxyPort"), // only for port mode proxyPort: integer("proxyPort"), // only for port mode
destinationPort: integer("destinationPort"), // only for port mode destinationPort: integer("destinationPort"), // only for port mode
destination: text("destination").notNull(), // ip, cidr, hostname destination: text("destination").notNull(), // ip, cidr, hostname
@@ -284,36 +274,7 @@ export const siteResources = sqliteTable("siteResources", {
authDaemonPort: integer("authDaemonPort").default(22123), authDaemonPort: integer("authDaemonPort").default(22123),
authDaemonMode: text("authDaemonMode") authDaemonMode: text("authDaemonMode")
.$type<"site" | "remote">() .$type<"site" | "remote">()
.default("site"), .default("site")
domainId: text("domainId").references(() => domains.domainId, {
onDelete: "set null"
}),
subdomain: text("subdomain"),
fullDomain: text("fullDomain"),
});
export const networks = sqliteTable("networks", {
networkId: integer("networkId").primaryKey({ autoIncrement: true }),
niceId: text("niceId"),
name: text("name"),
scope: text("scope")
.$type<"global" | "resource">()
.notNull()
.default("global"),
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" })
});
export const siteNetworks = sqliteTable("siteNetworks", {
siteId: integer("siteId")
.notNull()
.references(() => sites.siteId, {
onDelete: "cascade"
}),
networkId: integer("networkId")
.notNull()
.references(() => networks.networkId, { onDelete: "cascade" })
}); });
export const clientSiteResources = sqliteTable("clientSiteResources", { export const clientSiteResources = sqliteTable("clientSiteResources", {
@@ -1135,7 +1096,6 @@ export const requestAuditLog = sqliteTable(
actor: text("actor"), actor: text("actor"),
actorId: text("actorId"), actorId: text("actorId"),
resourceId: integer("resourceId"), resourceId: integer("resourceId"),
siteResourceId: integer("siteResourceId"),
ip: text("ip"), ip: text("ip"),
location: text("location"), location: text("location"),
userAgent: text("userAgent"), userAgent: text("userAgent"),
@@ -1235,7 +1195,6 @@ export type ApiKey = InferSelectModel<typeof apiKeys>;
export type ApiKeyAction = InferSelectModel<typeof apiKeyActions>; export type ApiKeyAction = InferSelectModel<typeof apiKeyActions>;
export type ApiKeyOrg = InferSelectModel<typeof apiKeyOrg>; export type ApiKeyOrg = InferSelectModel<typeof apiKeyOrg>;
export type SiteResource = InferSelectModel<typeof siteResources>; export type SiteResource = InferSelectModel<typeof siteResources>;
export type Network = InferSelectModel<typeof networks>;
export type OrgDomains = InferSelectModel<typeof orgDomains>; export type OrgDomains = InferSelectModel<typeof orgDomains>;
export type SetupToken = InferSelectModel<typeof setupTokens>; export type SetupToken = InferSelectModel<typeof setupTokens>;
export type HostMeta = InferSelectModel<typeof hostMeta>; export type HostMeta = InferSelectModel<typeof hostMeta>;

View File

@@ -22,7 +22,6 @@ import { TraefikConfigManager } from "@server/lib/traefik/TraefikConfigManager";
import { initCleanup } from "#dynamic/cleanup"; import { initCleanup } from "#dynamic/cleanup";
import license from "#dynamic/license/license"; import license from "#dynamic/license/license";
import { initLogCleanupInterval } from "@server/lib/cleanupLogs"; import { initLogCleanupInterval } from "@server/lib/cleanupLogs";
import { initAcmeCertSync } from "#dynamic/lib/acmeCertSync";
import { fetchServerIp } from "@server/lib/serverIpService"; import { fetchServerIp } from "@server/lib/serverIpService";
async function startServers() { async function startServers() {
@@ -40,7 +39,6 @@ async function startServers() {
initTelemetryClient(); initTelemetryClient();
initLogCleanupInterval(); initLogCleanupInterval();
initAcmeCertSync();
// Start all servers // Start all servers
const apiServer = createApiServer(); const apiServer = createApiServer();

View File

@@ -1,3 +0,0 @@
export function initAcmeCertSync(): void {
// stub
}

View File

@@ -19,9 +19,7 @@ export enum TierFeature {
SshPam = "sshPam", SshPam = "sshPam",
FullRbac = "fullRbac", FullRbac = "fullRbac",
SiteProvisioningKeys = "siteProvisioningKeys", // handle downgrade by revoking keys if needed SiteProvisioningKeys = "siteProvisioningKeys", // handle downgrade by revoking keys if needed
SIEM = "siem", // handle downgrade by disabling SIEM integrations SIEM = "siem" // handle downgrade by disabling SIEM integrations
HTTPPrivateResources = "httpPrivateResources", // handle downgrade by disabling HTTP private resources
DomainNamespaces = "domainNamespaces" // handle downgrade by removing custom domain namespaces
} }
export const tierMatrix: Record<TierFeature, Tier[]> = { export const tierMatrix: Record<TierFeature, Tier[]> = {
@@ -58,7 +56,5 @@ export const tierMatrix: Record<TierFeature, Tier[]> = {
[TierFeature.SshPam]: ["tier1", "tier3", "enterprise"], [TierFeature.SshPam]: ["tier1", "tier3", "enterprise"],
[TierFeature.FullRbac]: ["tier1", "tier2", "tier3", "enterprise"], [TierFeature.FullRbac]: ["tier1", "tier2", "tier3", "enterprise"],
[TierFeature.SiteProvisioningKeys]: ["tier3", "enterprise"], [TierFeature.SiteProvisioningKeys]: ["tier3", "enterprise"],
[TierFeature.SIEM]: ["enterprise"], [TierFeature.SIEM]: ["enterprise"]
[TierFeature.HTTPPrivateResources]: ["tier3", "enterprise"],
[TierFeature.DomainNamespaces]: ["tier1", "tier2", "tier3", "enterprise"]
}; };

View File

@@ -121,8 +121,8 @@ export async function applyBlueprint({
for (const result of clientResourcesResults) { for (const result of clientResourcesResults) {
if ( if (
result.oldSiteResource && result.oldSiteResource &&
JSON.stringify(result.newSites?.sort()) !== result.oldSiteResource.siteId !=
JSON.stringify(result.oldSites?.sort()) result.newSiteResource.siteId
) { ) {
// query existing associations // query existing associations
const existingRoleIds = await trx const existingRoleIds = await trx
@@ -222,46 +222,38 @@ export async function applyBlueprint({
trx trx
); );
} else { } else {
let good = true; const [newSite] = await trx
for (const newSite of result.newSites) { .select()
const [site] = await trx .from(sites)
.select() .innerJoin(newts, eq(sites.siteId, newts.siteId))
.from(sites) .where(
.innerJoin(newts, eq(sites.siteId, newts.siteId)) and(
.where( eq(sites.siteId, result.newSiteResource.siteId),
and( eq(sites.orgId, orgId),
eq(sites.siteId, newSite.siteId), eq(sites.type, "newt"),
eq(sites.orgId, orgId), isNotNull(sites.pubKey)
eq(sites.type, "newt"),
isNotNull(sites.pubKey)
)
) )
.limit(1); )
.limit(1);
if (!site) {
logger.debug(
`No newt sites found for client resource ${result.newSiteResource.siteResourceId}, skipping target update`
);
good = false;
break;
}
if (!newSite) {
logger.debug( logger.debug(
`Updating client resource ${result.newSiteResource.siteResourceId} on site ${newSite.siteId}` `No newt site found for client resource ${result.newSiteResource.siteResourceId}, skipping target update`
); );
}
if (!good) {
continue; continue;
} }
logger.debug(
`Updating client resource ${result.newSiteResource.siteResourceId} on site ${newSite.sites.siteId}`
);
await handleMessagingForUpdatedSiteResource( await handleMessagingForUpdatedSiteResource(
result.oldSiteResource, result.oldSiteResource,
result.newSiteResource, result.newSiteResource,
result.newSites.map((site) => ({ {
siteId: site.siteId, siteId: newSite.sites.siteId,
orgId: result.newSiteResource.orgId orgId: newSite.sites.orgId
})), },
trx trx
); );
} }

View File

@@ -1,104 +1,24 @@
import { import {
clients, clients,
clientSiteResources, clientSiteResources,
domains,
orgDomains,
roles, roles,
roleSiteResources, roleSiteResources,
Site,
SiteResource, SiteResource,
siteNetworks,
siteResources, siteResources,
Transaction, Transaction,
userOrgs, userOrgs,
users, users,
userSiteResources, userSiteResources
networks
} from "@server/db"; } from "@server/db";
import { sites } from "@server/db"; import { sites } from "@server/db";
import { eq, and, ne, inArray, or, isNotNull } from "drizzle-orm"; import { eq, and, ne, inArray, or } from "drizzle-orm";
import { Config } from "./types"; import { Config } from "./types";
import logger from "@server/logger"; import logger from "@server/logger";
import { getNextAvailableAliasAddress } from "../ip"; import { getNextAvailableAliasAddress } from "../ip";
import { createCertificate } from "#dynamic/routers/certificates/createCertificate";
async function getDomainForSiteResource(
siteResourceId: number | undefined,
fullDomain: string,
orgId: string,
trx: Transaction
): Promise<{ subdomain: string | null; domainId: string }> {
const [fullDomainExists] = await trx
.select({ siteResourceId: siteResources.siteResourceId })
.from(siteResources)
.where(
and(
eq(siteResources.fullDomain, fullDomain),
eq(siteResources.orgId, orgId),
siteResourceId
? ne(siteResources.siteResourceId, siteResourceId)
: isNotNull(siteResources.siteResourceId)
)
)
.limit(1);
if (fullDomainExists) {
throw new Error(
`Site resource already exists with domain: ${fullDomain} in org ${orgId}`
);
}
const possibleDomains = await trx
.select()
.from(domains)
.innerJoin(orgDomains, eq(domains.domainId, orgDomains.domainId))
.where(and(eq(orgDomains.orgId, orgId), eq(domains.verified, true)))
.execute();
if (possibleDomains.length === 0) {
throw new Error(
`Domain not found for full-domain: ${fullDomain} in org ${orgId}`
);
}
const validDomains = possibleDomains.filter((domain) => {
if (domain.domains.type == "ns" || domain.domains.type == "wildcard") {
return (
fullDomain === domain.domains.baseDomain ||
fullDomain.endsWith(`.${domain.domains.baseDomain}`)
);
} else if (domain.domains.type == "cname") {
return fullDomain === domain.domains.baseDomain;
}
});
if (validDomains.length === 0) {
throw new Error(
`Domain not found for full-domain: ${fullDomain} in org ${orgId}`
);
}
const domainSelection = validDomains[0].domains;
const baseDomain = domainSelection.baseDomain;
let subdomain: string | null = null;
if (fullDomain !== baseDomain) {
subdomain = fullDomain.replace(`.${baseDomain}`, "");
}
await createCertificate(domainSelection.domainId, fullDomain, trx);
return {
subdomain,
domainId: domainSelection.domainId
};
}
export type ClientResourcesResults = { export type ClientResourcesResults = {
newSiteResource: SiteResource; newSiteResource: SiteResource;
oldSiteResource?: SiteResource; oldSiteResource?: SiteResource;
newSites: { siteId: number }[];
oldSites: { siteId: number }[];
}[]; }[];
export async function updateClientResources( export async function updateClientResources(
@@ -123,104 +43,53 @@ export async function updateClientResources(
) )
.limit(1); .limit(1);
const existingSiteIds = existingResource?.networkId const resourceSiteId = resourceData.site;
? await trx let site;
.select({ siteId: sites.siteId })
.from(siteNetworks)
.where(eq(siteNetworks.networkId, existingResource.networkId))
: [];
let allSites: { siteId: number }[] = []; if (resourceSiteId) {
if (resourceData.site) { // Look up site by niceId
let siteSingle; [site] = await trx
const resourceSiteId = resourceData.site; .select({ siteId: sites.siteId })
.from(sites)
if (resourceSiteId) { .where(
// Look up site by niceId and(
[siteSingle] = await trx eq(sites.niceId, resourceSiteId),
.select({ siteId: sites.siteId }) eq(sites.orgId, orgId)
.from(sites)
.where(
and(
eq(sites.niceId, resourceSiteId),
eq(sites.orgId, orgId)
)
) )
.limit(1); )
} else if (siteId) { .limit(1);
// Use the provided siteId directly, but verify it belongs to the org } else if (siteId) {
[siteSingle] = await trx // Use the provided siteId directly, but verify it belongs to the org
.select({ siteId: sites.siteId }) [site] = await trx
.from(sites) .select({ siteId: sites.siteId })
.where( .from(sites)
and(eq(sites.siteId, siteId), eq(sites.orgId, orgId)) .where(and(eq(sites.siteId, siteId), eq(sites.orgId, orgId)))
) .limit(1);
.limit(1); } else {
} else { throw new Error(`Target site is required`);
throw new Error(`Target site is required`);
}
if (!siteSingle) {
throw new Error(
`Site not found: ${resourceSiteId} in org ${orgId}`
);
}
allSites.push(siteSingle);
} }
if (resourceData.sites) { if (!site) {
for (const siteNiceId of resourceData.sites) { throw new Error(
const [site] = await trx `Site not found: ${resourceSiteId} in org ${orgId}`
.select({ siteId: sites.siteId }) );
.from(sites)
.where(
and(
eq(sites.niceId, siteNiceId),
eq(sites.orgId, orgId)
)
)
.limit(1);
if (!site) {
throw new Error(
`Site not found: ${siteId} in org ${orgId}`
);
}
allSites.push(site);
}
} }
if (existingResource) { if (existingResource) {
let domainInfo:
| { subdomain: string | null; domainId: string }
| undefined;
if (resourceData["full-domain"] && resourceData.mode === "http") {
domainInfo = await getDomainForSiteResource(
existingResource.siteResourceId,
resourceData["full-domain"],
orgId,
trx
);
}
// Update existing resource // Update existing resource
const [updatedResource] = await trx const [updatedResource] = await trx
.update(siteResources) .update(siteResources)
.set({ .set({
name: resourceData.name || resourceNiceId, name: resourceData.name || resourceNiceId,
siteId: site.siteId,
mode: resourceData.mode, mode: resourceData.mode,
ssl: resourceData.ssl,
scheme: resourceData.scheme,
destination: resourceData.destination, destination: resourceData.destination,
destinationPort: resourceData["destination-port"],
enabled: true, // hardcoded for now enabled: true, // hardcoded for now
// enabled: resourceData.enabled ?? true, // enabled: resourceData.enabled ?? true,
alias: resourceData.alias || null, alias: resourceData.alias || null,
disableIcmp: resourceData["disable-icmp"], disableIcmp: resourceData["disable-icmp"],
tcpPortRangeString: resourceData["tcp-ports"], tcpPortRangeString: resourceData["tcp-ports"],
udpPortRangeString: resourceData["udp-ports"], udpPortRangeString: resourceData["udp-ports"]
fullDomain: resourceData["full-domain"] || null,
subdomain: domainInfo ? domainInfo.subdomain : null,
domainId: domainInfo ? domainInfo.domainId : null
}) })
.where( .where(
eq( eq(
@@ -231,21 +100,7 @@ export async function updateClientResources(
.returning(); .returning();
const siteResourceId = existingResource.siteResourceId; const siteResourceId = existingResource.siteResourceId;
const orgId = existingResource.orgId;
if (updatedResource.networkId) {
await trx
.delete(siteNetworks)
.where(
eq(siteNetworks.networkId, updatedResource.networkId)
);
for (const site of allSites) {
await trx.insert(siteNetworks).values({
siteId: site.siteId,
networkId: updatedResource.networkId
});
}
}
await trx await trx
.delete(clientSiteResources) .delete(clientSiteResources)
@@ -349,72 +204,37 @@ export async function updateClientResources(
results.push({ results.push({
newSiteResource: updatedResource, newSiteResource: updatedResource,
oldSiteResource: existingResource, oldSiteResource: existingResource
newSites: allSites,
oldSites: existingSiteIds
}); });
} else { } else {
let aliasAddress: string | null = null; let aliasAddress: string | null = null;
if (resourceData.mode === "host" || resourceData.mode === "http") { if (resourceData.mode == "host") {
// we can only have an alias on a host
aliasAddress = await getNextAvailableAliasAddress(orgId); aliasAddress = await getNextAvailableAliasAddress(orgId);
} }
let domainInfo:
| { subdomain: string | null; domainId: string }
| undefined;
if (resourceData["full-domain"] && resourceData.mode === "http") {
domainInfo = await getDomainForSiteResource(
undefined,
resourceData["full-domain"],
orgId,
trx
);
}
const [network] = await trx
.insert(networks)
.values({
scope: "resource",
orgId: orgId
})
.returning();
// Create new resource // Create new resource
const [newResource] = await trx const [newResource] = await trx
.insert(siteResources) .insert(siteResources)
.values({ .values({
orgId: orgId, orgId: orgId,
siteId: site.siteId,
niceId: resourceNiceId, niceId: resourceNiceId,
networkId: network.networkId,
defaultNetworkId: network.networkId,
name: resourceData.name || resourceNiceId, name: resourceData.name || resourceNiceId,
mode: resourceData.mode, mode: resourceData.mode,
ssl: resourceData.ssl,
scheme: resourceData.scheme,
destination: resourceData.destination, destination: resourceData.destination,
destinationPort: resourceData["destination-port"],
enabled: true, // hardcoded for now enabled: true, // hardcoded for now
// enabled: resourceData.enabled ?? true, // enabled: resourceData.enabled ?? true,
alias: resourceData.alias || null, alias: resourceData.alias || null,
aliasAddress: aliasAddress, aliasAddress: aliasAddress,
disableIcmp: resourceData["disable-icmp"], disableIcmp: resourceData["disable-icmp"],
tcpPortRangeString: resourceData["tcp-ports"], tcpPortRangeString: resourceData["tcp-ports"],
udpPortRangeString: resourceData["udp-ports"], udpPortRangeString: resourceData["udp-ports"]
fullDomain: resourceData["full-domain"] || null,
subdomain: domainInfo ? domainInfo.subdomain : null,
domainId: domainInfo ? domainInfo.domainId : null
}) })
.returning(); .returning();
const siteResourceId = newResource.siteResourceId; const siteResourceId = newResource.siteResourceId;
for (const site of allSites) {
await trx.insert(siteNetworks).values({
siteId: site.siteId,
networkId: network.networkId
});
}
const [adminRole] = await trx const [adminRole] = await trx
.select() .select()
.from(roles) .from(roles)
@@ -504,11 +324,7 @@ export async function updateClientResources(
`Created new client resource ${newResource.name} (${newResource.siteResourceId}) for org ${orgId}` `Created new client resource ${newResource.name} (${newResource.siteResourceId}) for org ${orgId}`
); );
results.push({ results.push({ newSiteResource: newResource });
newSiteResource: newResource,
newSites: allSites,
oldSites: existingSiteIds
});
} }
} }

View File

@@ -1100,7 +1100,7 @@ function checkIfTargetChanged(
return false; return false;
} }
export async function getDomain( async function getDomain(
resourceId: number | undefined, resourceId: number | undefined,
fullDomain: string, fullDomain: string,
orgId: string, orgId: string,

View File

@@ -164,7 +164,6 @@ export const ResourceSchema = z
name: z.string().optional(), name: z.string().optional(),
protocol: z.enum(["http", "tcp", "udp"]).optional(), protocol: z.enum(["http", "tcp", "udp"]).optional(),
ssl: z.boolean().optional(), ssl: z.boolean().optional(),
scheme: z.enum(["http", "https"]).optional(),
"full-domain": z.string().optional(), "full-domain": z.string().optional(),
"proxy-port": z.int().min(1).max(65535).optional(), "proxy-port": z.int().min(1).max(65535).optional(),
enabled: z.boolean().optional(), enabled: z.boolean().optional(),
@@ -326,20 +325,16 @@ export function isTargetsOnlyResource(resource: any): boolean {
export const ClientResourceSchema = z export const ClientResourceSchema = z
.object({ .object({
name: z.string().min(1).max(255), name: z.string().min(1).max(255),
mode: z.enum(["host", "cidr", "http"]), mode: z.enum(["host", "cidr"]),
site: z.string(), // DEPRECATED IN FAVOR OF sites site: z.string(),
sites: z.array(z.string()).optional().default([]),
// protocol: z.enum(["tcp", "udp"]).optional(), // protocol: z.enum(["tcp", "udp"]).optional(),
// proxyPort: z.int().positive().optional(), // proxyPort: z.int().positive().optional(),
"destination-port": z.int().positive().optional(), // destinationPort: z.int().positive().optional(),
destination: z.string().min(1), destination: z.string().min(1),
// enabled: z.boolean().default(true), // enabled: z.boolean().default(true),
"tcp-ports": portRangeStringSchema.optional().default("*"), "tcp-ports": portRangeStringSchema.optional().default("*"),
"udp-ports": portRangeStringSchema.optional().default("*"), "udp-ports": portRangeStringSchema.optional().default("*"),
"disable-icmp": z.boolean().optional().default(false), "disable-icmp": z.boolean().optional().default(false),
"full-domain": z.string().optional(),
ssl: z.boolean().optional(),
scheme: z.enum(["http", "https"]).optional().nullable(),
alias: z alias: z
.string() .string()
.regex( .regex(
@@ -482,39 +477,6 @@ export const ConfigSchema = z
}); });
} }
// Enforce the full-domain uniqueness across client-resources in the same stack
const clientFullDomainMap = new Map<string, string[]>();
Object.entries(config["client-resources"]).forEach(
([resourceKey, resource]) => {
const fullDomain = resource["full-domain"];
if (fullDomain) {
if (!clientFullDomainMap.has(fullDomain)) {
clientFullDomainMap.set(fullDomain, []);
}
clientFullDomainMap.get(fullDomain)!.push(resourceKey);
}
}
);
const clientFullDomainDuplicates = Array.from(
clientFullDomainMap.entries()
)
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
.map(
([fullDomain, resourceKeys]) =>
`'${fullDomain}' used by resources: ${resourceKeys.join(", ")}`
)
.join("; ");
if (clientFullDomainDuplicates.length !== 0) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
path: ["client-resources"],
message: `Duplicate 'full-domain' values found: ${clientFullDomainDuplicates}`
});
}
// Enforce proxy-port uniqueness within proxy-resources per protocol // Enforce proxy-port uniqueness within proxy-resources per protocol
const protocolPortMap = new Map<string, string[]>(); const protocolPortMap = new Map<string, string[]>();

39
server/lib/encryption.ts Normal file
View File

@@ -0,0 +1,39 @@
import crypto from "crypto";
export function encryptData(data: string, key: Buffer): string {
const algorithm = "aes-256-gcm";
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm, key, iv);
let encrypted = cipher.update(data, "utf8", "hex");
encrypted += cipher.final("hex");
const authTag = cipher.getAuthTag();
// Combine IV, auth tag, and encrypted data
return iv.toString("hex") + ":" + authTag.toString("hex") + ":" + encrypted;
}
// Helper function to decrypt data (you'll need this to read certificates)
export function decryptData(encryptedData: string, key: Buffer): string {
const algorithm = "aes-256-gcm";
const parts = encryptedData.split(":");
if (parts.length !== 3) {
throw new Error("Invalid encrypted data format");
}
const iv = Buffer.from(parts[0], "hex");
const authTag = Buffer.from(parts[1], "hex");
const encrypted = parts[2];
const decipher = crypto.createDecipheriv(algorithm, key, iv);
decipher.setAuthTag(authTag);
let decrypted = decipher.update(encrypted, "hex", "utf8");
decrypted += decipher.final("utf8");
return decrypted;
}
// openssl rand -hex 32 > config/encryption.key

View File

@@ -5,7 +5,6 @@ import config from "@server/lib/config";
import z from "zod"; import z from "zod";
import logger from "@server/logger"; import logger from "@server/logger";
import semver from "semver"; import semver from "semver";
import { getValidCertificatesForDomains } from "#dynamic/lib/certificates";
interface IPRange { interface IPRange {
start: bigint; start: bigint;
@@ -478,9 +477,9 @@ export type Alias = { alias: string | null; aliasAddress: string | null };
export function generateAliasConfig(allSiteResources: SiteResource[]): Alias[] { export function generateAliasConfig(allSiteResources: SiteResource[]): Alias[] {
return allSiteResources return allSiteResources
.filter((sr) => sr.aliasAddress && ((sr.alias && sr.mode == "host") || (sr.fullDomain && sr.mode == "http"))) .filter((sr) => sr.alias && sr.aliasAddress && sr.mode == "host")
.map((sr) => ({ .map((sr) => ({
alias: sr.alias || sr.fullDomain, alias: sr.alias,
aliasAddress: sr.aliasAddress aliasAddress: sr.aliasAddress
})); }));
} }
@@ -583,26 +582,16 @@ export type SubnetProxyTargetV2 = {
protocol: "tcp" | "udp"; protocol: "tcp" | "udp";
}[]; }[];
resourceId?: number; resourceId?: number;
protocol?: "http" | "https"; // if set, this target only applies to the specified protocol
httpTargets?: HTTPTarget[];
tlsCert?: string;
tlsKey?: string;
}; };
export type HTTPTarget = { export function generateSubnetProxyTargetV2(
destAddr: string; // must be an IP or hostname
destPort: number;
scheme: "http" | "https";
};
export async function generateSubnetProxyTargetV2(
siteResource: SiteResource, siteResource: SiteResource,
clients: { clients: {
clientId: number; clientId: number;
pubKey: string | null; pubKey: string | null;
subnet: string | null; subnet: string | null;
}[] }[]
): Promise<SubnetProxyTargetV2 | undefined> { ): SubnetProxyTargetV2 | undefined {
if (clients.length === 0) { if (clients.length === 0) {
logger.debug( logger.debug(
`No clients have access to site resource ${siteResource.siteResourceId}, skipping target generation.` `No clients have access to site resource ${siteResource.siteResourceId}, skipping target generation.`
@@ -630,7 +619,7 @@ export async function generateSubnetProxyTargetV2(
destPrefix: destination, destPrefix: destination,
portRange, portRange,
disableIcmp, disableIcmp,
resourceId: siteResource.siteResourceId resourceId: siteResource.siteResourceId,
}; };
} }
@@ -642,7 +631,7 @@ export async function generateSubnetProxyTargetV2(
rewriteTo: destination, rewriteTo: destination,
portRange, portRange,
disableIcmp, disableIcmp,
resourceId: siteResource.siteResourceId resourceId: siteResource.siteResourceId,
}; };
} }
} else if (siteResource.mode == "cidr") { } else if (siteResource.mode == "cidr") {
@@ -651,68 +640,7 @@ export async function generateSubnetProxyTargetV2(
destPrefix: siteResource.destination, destPrefix: siteResource.destination,
portRange, portRange,
disableIcmp, disableIcmp,
resourceId: siteResource.siteResourceId
};
} else if (siteResource.mode == "http") {
let destination = siteResource.destination;
// check if this is a valid ip
const ipSchema = z.union([z.ipv4(), z.ipv6()]);
if (ipSchema.safeParse(destination).success) {
destination = `${destination}/32`;
}
if (
!siteResource.aliasAddress ||
!siteResource.destinationPort ||
!siteResource.scheme ||
!siteResource.fullDomain
) {
logger.debug(
`Site resource ${siteResource.siteResourceId} is in HTTP mode but is missing alias or alias address or destinationPort or scheme, skipping alias target generation.`
);
return;
}
// also push a match for the alias address
let tlsCert: string | undefined;
let tlsKey: string | undefined;
if (siteResource.ssl && siteResource.fullDomain) {
try {
const certs = await getValidCertificatesForDomains(
new Set([siteResource.fullDomain]),
true
);
if (certs.length > 0 && certs[0].certFile && certs[0].keyFile) {
tlsCert = certs[0].certFile;
tlsKey = certs[0].keyFile;
} else {
logger.warn(
`No valid certificate found for SSL site resource ${siteResource.siteResourceId} with domain ${siteResource.fullDomain}`
);
}
} catch (err) {
logger.error(
`Failed to retrieve certificate for site resource ${siteResource.siteResourceId} domain ${siteResource.fullDomain}: ${err}`
);
}
}
target = {
sourcePrefixes: [],
destPrefix: `${siteResource.aliasAddress}/32`,
rewriteTo: destination,
portRange,
disableIcmp,
resourceId: siteResource.siteResourceId, resourceId: siteResource.siteResourceId,
protocol: siteResource.ssl ? "https" : "http",
httpTargets: [
{
destAddr: siteResource.destination,
destPort: siteResource.destinationPort,
scheme: siteResource.scheme
}
],
...(tlsCert && tlsKey ? { tlsCert, tlsKey } : {})
}; };
} }
@@ -742,31 +670,33 @@ export async function generateSubnetProxyTargetV2(
return target; return target;
} }
/** /**
* Converts a SubnetProxyTargetV2 to an array of SubnetProxyTarget (v1) * Converts a SubnetProxyTargetV2 to an array of SubnetProxyTarget (v1)
* by expanding each source prefix into its own target entry. * by expanding each source prefix into its own target entry.
* @param targetV2 - The v2 target to convert * @param targetV2 - The v2 target to convert
* @returns Array of v1 SubnetProxyTarget objects * @returns Array of v1 SubnetProxyTarget objects
*/ */
export function convertSubnetProxyTargetsV2ToV1( export function convertSubnetProxyTargetsV2ToV1(
targetsV2: SubnetProxyTargetV2[] targetsV2: SubnetProxyTargetV2[]
): SubnetProxyTarget[] { ): SubnetProxyTarget[] {
return targetsV2.flatMap((targetV2) => return targetsV2.flatMap((targetV2) =>
targetV2.sourcePrefixes.map((sourcePrefix) => ({ targetV2.sourcePrefixes.map((sourcePrefix) => ({
sourcePrefix, sourcePrefix,
destPrefix: targetV2.destPrefix, destPrefix: targetV2.destPrefix,
...(targetV2.disableIcmp !== undefined && { ...(targetV2.disableIcmp !== undefined && {
disableIcmp: targetV2.disableIcmp disableIcmp: targetV2.disableIcmp
}), }),
...(targetV2.rewriteTo !== undefined && { ...(targetV2.rewriteTo !== undefined && {
rewriteTo: targetV2.rewriteTo rewriteTo: targetV2.rewriteTo
}), }),
...(targetV2.portRange !== undefined && { ...(targetV2.portRange !== undefined && {
portRange: targetV2.portRange portRange: targetV2.portRange
}) })
})) }))
); );
} }
// Custom schema for validating port range strings // Custom schema for validating port range strings
// Format: "80,443,8000-9000" or "*" for all ports, or empty string // Format: "80,443,8000-9000" or "*" for all ports, or empty string

View File

@@ -11,11 +11,11 @@ import {
roleSiteResources, roleSiteResources,
Site, Site,
SiteResource, SiteResource,
siteNetworks,
siteResources, siteResources,
sites, sites,
Transaction, Transaction,
userOrgRoles, userOrgRoles,
userOrgs,
userSiteResources userSiteResources
} from "@server/db"; } from "@server/db";
import { and, eq, inArray, ne } from "drizzle-orm"; import { and, eq, inArray, ne } from "drizzle-orm";
@@ -48,23 +48,15 @@ export async function getClientSiteResourceAccess(
siteResource: SiteResource, siteResource: SiteResource,
trx: Transaction | typeof db = db trx: Transaction | typeof db = db
) { ) {
// get all sites associated with this siteResource via its network // get the site
const sitesList = siteResource.networkId const [site] = await trx
? await trx .select()
.select() .from(sites)
.from(sites) .where(eq(sites.siteId, siteResource.siteId))
.innerJoin( .limit(1);
siteNetworks,
eq(siteNetworks.siteId, sites.siteId)
)
.where(eq(siteNetworks.networkId, siteResource.networkId))
.then((rows) => rows.map((row) => row.sites))
: [];
if (sitesList.length === 0) { if (!site) {
logger.warn( throw new Error(`Site with ID ${siteResource.siteId} not found`);
`No sites found for siteResource ${siteResource.siteResourceId} with networkId ${siteResource.networkId}`
);
} }
const roleIds = await trx const roleIds = await trx
@@ -145,7 +137,7 @@ export async function getClientSiteResourceAccess(
const mergedAllClientIds = mergedAllClients.map((c) => c.clientId); const mergedAllClientIds = mergedAllClients.map((c) => c.clientId);
return { return {
sitesList, site,
mergedAllClients, mergedAllClients,
mergedAllClientIds mergedAllClientIds
}; };
@@ -161,51 +153,40 @@ export async function rebuildClientAssociationsFromSiteResource(
subnet: string | null; subnet: string | null;
}[]; }[];
}> { }> {
const { sitesList, mergedAllClients, mergedAllClientIds } = const siteId = siteResource.siteId;
const { site, mergedAllClients, mergedAllClientIds } =
await getClientSiteResourceAccess(siteResource, trx); await getClientSiteResourceAccess(siteResource, trx);
/////////// process the client-siteResource associations /////////// /////////// process the client-siteResource associations ///////////
// get all of the clients associated with other resources in the same network, // get all of the clients associated with other resources on this site
// joined through siteNetworks so we know which siteId each client belongs to const allUpdatedClientsFromOtherResourcesOnThisSite = await trx
const allUpdatedClientsFromOtherResourcesOnThisSite = siteResource.networkId .select({
? await trx clientId: clientSiteResourcesAssociationsCache.clientId
.select({ })
clientId: clientSiteResourcesAssociationsCache.clientId, .from(clientSiteResourcesAssociationsCache)
siteId: siteNetworks.siteId .innerJoin(
}) siteResources,
.from(clientSiteResourcesAssociationsCache) eq(
.innerJoin( clientSiteResourcesAssociationsCache.siteResourceId,
siteResources, siteResources.siteResourceId
eq( )
clientSiteResourcesAssociationsCache.siteResourceId, )
siteResources.siteResourceId .where(
) and(
) eq(siteResources.siteId, siteId),
.innerJoin( ne(siteResources.siteResourceId, siteResource.siteResourceId)
siteNetworks, )
eq(siteNetworks.networkId, siteResources.networkId) );
)
.where(
and(
eq(siteResources.networkId, siteResource.networkId),
ne(
siteResources.siteResourceId,
siteResource.siteResourceId
)
)
)
: [];
// Build a per-site map so the loop below can check by siteId rather than const allClientIdsFromOtherResourcesOnThisSite = Array.from(
// across the entire network. new Set(
const clientsFromOtherResourcesBySite = new Map<number, Set<number>>(); allUpdatedClientsFromOtherResourcesOnThisSite.map(
for (const row of allUpdatedClientsFromOtherResourcesOnThisSite) { (row) => row.clientId
if (!clientsFromOtherResourcesBySite.has(row.siteId)) { )
clientsFromOtherResourcesBySite.set(row.siteId, new Set()); )
} );
clientsFromOtherResourcesBySite.get(row.siteId)!.add(row.clientId);
}
const existingClientSiteResources = await trx const existingClientSiteResources = await trx
.select({ .select({
@@ -279,90 +260,82 @@ export async function rebuildClientAssociationsFromSiteResource(
/////////// process the client-site associations /////////// /////////// process the client-site associations ///////////
for (const site of sitesList) { const existingClientSites = await trx
const siteId = site.siteId; .select({
clientId: clientSitesAssociationsCache.clientId
})
.from(clientSitesAssociationsCache)
.where(eq(clientSitesAssociationsCache.siteId, siteResource.siteId));
const existingClientSites = await trx const existingClientSiteIds = existingClientSites.map(
.select({ (row) => row.clientId
clientId: clientSitesAssociationsCache.clientId );
})
.from(clientSitesAssociationsCache)
.where(eq(clientSitesAssociationsCache.siteId, siteId));
const existingClientSiteIds = existingClientSites.map( // Get full client details for existing clients (needed for sending delete messages)
(row) => row.clientId const existingClients = await trx
); .select({
clientId: clients.clientId,
pubKey: clients.pubKey,
subnet: clients.subnet
})
.from(clients)
.where(inArray(clients.clientId, existingClientSiteIds));
// Get full client details for existing clients (needed for sending delete messages) const clientSitesToAdd = mergedAllClientIds.filter(
const existingClients = (clientId) =>
existingClientSiteIds.length > 0 !existingClientSiteIds.includes(clientId) &&
? await trx !allClientIdsFromOtherResourcesOnThisSite.includes(clientId) // dont remove if there is still another connection for another site resource
.select({ );
clientId: clients.clientId,
pubKey: clients.pubKey,
subnet: clients.subnet
})
.from(clients)
.where(inArray(clients.clientId, existingClientSiteIds))
: [];
const otherResourceClientIds = clientsFromOtherResourcesBySite.get(siteId) ?? new Set<number>(); const clientSitesToInsert = clientSitesToAdd.map((clientId) => ({
clientId,
siteId
}));
const clientSitesToAdd = mergedAllClientIds.filter( if (clientSitesToInsert.length > 0) {
(clientId) => await trx
!existingClientSiteIds.includes(clientId) && .insert(clientSitesAssociationsCache)
!otherResourceClientIds.has(clientId) // dont add if already connected via another site resource .values(clientSitesToInsert)
); .returning();
const clientSitesToInsert = clientSitesToAdd.map((clientId) => ({
clientId,
siteId
}));
if (clientSitesToInsert.length > 0) {
await trx
.insert(clientSitesAssociationsCache)
.values(clientSitesToInsert)
.returning();
}
// Now remove any client-site associations that should no longer exist
const clientSitesToRemove = existingClientSiteIds.filter(
(clientId) =>
!mergedAllClientIds.includes(clientId) &&
!otherResourceClientIds.has(clientId) // dont remove if there is still another connection for another site resource
);
if (clientSitesToRemove.length > 0) {
await trx
.delete(clientSitesAssociationsCache)
.where(
and(
eq(clientSitesAssociationsCache.siteId, siteId),
inArray(
clientSitesAssociationsCache.clientId,
clientSitesToRemove
)
)
);
}
// Now handle the messages to add/remove peers on both the newt and olm sides
await handleMessagesForSiteClients(
site,
siteId,
mergedAllClients,
existingClients,
clientSitesToAdd,
clientSitesToRemove,
trx
);
} }
// Now remove any client-site associations that should no longer exist
const clientSitesToRemove = existingClientSiteIds.filter(
(clientId) =>
!mergedAllClientIds.includes(clientId) &&
!allClientIdsFromOtherResourcesOnThisSite.includes(clientId) // dont remove if there is still another connection for another site resource
);
if (clientSitesToRemove.length > 0) {
await trx
.delete(clientSitesAssociationsCache)
.where(
and(
eq(clientSitesAssociationsCache.siteId, siteId),
inArray(
clientSitesAssociationsCache.clientId,
clientSitesToRemove
)
)
);
}
/////////// send the messages ///////////
// Now handle the messages to add/remove peers on both the newt and olm sides
await handleMessagesForSiteClients(
site,
siteId,
mergedAllClients,
existingClients,
clientSitesToAdd,
clientSitesToRemove,
trx
);
// Handle subnet proxy target updates for the resource associations // Handle subnet proxy target updates for the resource associations
await handleSubnetProxyTargetUpdates( await handleSubnetProxyTargetUpdates(
siteResource, siteResource,
sitesList,
mergedAllClients, mergedAllClients,
existingResourceClients, existingResourceClients,
clientSiteResourcesToAdd, clientSiteResourcesToAdd,
@@ -651,7 +624,6 @@ export async function updateClientSiteDestinations(
async function handleSubnetProxyTargetUpdates( async function handleSubnetProxyTargetUpdates(
siteResource: SiteResource, siteResource: SiteResource,
sitesList: Site[],
allClients: { allClients: {
clientId: number; clientId: number;
pubKey: string | null; pubKey: string | null;
@@ -666,138 +638,125 @@ async function handleSubnetProxyTargetUpdates(
clientSiteResourcesToRemove: number[], clientSiteResourcesToRemove: number[],
trx: Transaction | typeof db = db trx: Transaction | typeof db = db
): Promise<void> { ): Promise<void> {
const proxyJobs: Promise<any>[] = []; // Get the newt for this site
const olmJobs: Promise<any>[] = []; const [newt] = await trx
.select()
.from(newts)
.where(eq(newts.siteId, siteResource.siteId))
.limit(1);
for (const siteData of sitesList) { if (!newt) {
const siteId = siteData.siteId; logger.warn(
`Newt not found for site ${siteResource.siteId}, skipping subnet proxy target updates`
);
return;
}
// Get the newt for this site const proxyJobs = [];
const [newt] = await trx const olmJobs = [];
.select() // Generate targets for added associations
.from(newts) if (clientSiteResourcesToAdd.length > 0) {
.where(eq(newts.siteId, siteId)) const addedClients = allClients.filter((client) =>
.limit(1); clientSiteResourcesToAdd.includes(client.clientId)
);
if (!newt) { if (addedClients.length > 0) {
logger.warn( const targetToAdd = generateSubnetProxyTargetV2(
`Newt not found for site ${siteId}, skipping subnet proxy target updates` siteResource,
); addedClients
continue;
}
// Generate targets for added associations
if (clientSiteResourcesToAdd.length > 0) {
const addedClients = allClients.filter((client) =>
clientSiteResourcesToAdd.includes(client.clientId)
); );
if (addedClients.length > 0) { if (targetToAdd) {
const targetToAdd = await generateSubnetProxyTargetV2( proxyJobs.push(
siteResource, addSubnetProxyTargets(
addedClients newt.newtId,
[targetToAdd],
newt.version
)
); );
}
if (targetToAdd) { for (const client of addedClients) {
proxyJobs.push( olmJobs.push(
addSubnetProxyTargets( addPeerData(
newt.newtId, client.clientId,
[targetToAdd], siteResource.siteId,
newt.version generateRemoteSubnets([siteResource]),
) generateAliasConfig([siteResource])
); )
} );
for (const client of addedClients) {
olmJobs.push(
addPeerData(
client.clientId,
siteId,
generateRemoteSubnets([siteResource]),
generateAliasConfig([siteResource])
)
);
}
} }
} }
}
// here we use the existingSiteResource from BEFORE we updated the destination so we dont need to worry about updating destinations here // here we use the existingSiteResource from BEFORE we updated the destination so we dont need to worry about updating destinations here
// Generate targets for removed associations // Generate targets for removed associations
if (clientSiteResourcesToRemove.length > 0) { if (clientSiteResourcesToRemove.length > 0) {
const removedClients = existingClients.filter((client) => const removedClients = existingClients.filter((client) =>
clientSiteResourcesToRemove.includes(client.clientId) clientSiteResourcesToRemove.includes(client.clientId)
);
if (removedClients.length > 0) {
const targetToRemove = generateSubnetProxyTargetV2(
siteResource,
removedClients
); );
if (removedClients.length > 0) { if (targetToRemove) {
const targetToRemove = await generateSubnetProxyTargetV2( proxyJobs.push(
siteResource, removeSubnetProxyTargets(
removedClients newt.newtId,
[targetToRemove],
newt.version
)
); );
}
if (targetToRemove) { for (const client of removedClients) {
proxyJobs.push( // Check if this client still has access to another resource on this site with the same destination
removeSubnetProxyTargets( const destinationStillInUse = await trx
newt.newtId, .select()
[targetToRemove], .from(siteResources)
newt.version .innerJoin(
clientSiteResourcesAssociationsCache,
eq(
clientSiteResourcesAssociationsCache.siteResourceId,
siteResources.siteResourceId
) )
); )
} .where(
and(
for (const client of removedClients) {
// Check if this client still has access to another resource
// on this specific site with the same destination. We scope
// by siteId (via siteNetworks) rather than networkId because
// removePeerData operates per-site — a resource on a different
// site sharing the same network should not block removal here.
const destinationStillInUse = await trx
.select()
.from(siteResources)
.innerJoin(
clientSiteResourcesAssociationsCache,
eq( eq(
clientSiteResourcesAssociationsCache.siteResourceId, clientSiteResourcesAssociationsCache.clientId,
siteResources.siteResourceId client.clientId
),
eq(siteResources.siteId, siteResource.siteId),
eq(
siteResources.destination,
siteResource.destination
),
ne(
siteResources.siteResourceId,
siteResource.siteResourceId
) )
) )
.innerJoin(
siteNetworks,
eq(siteNetworks.networkId, siteResources.networkId)
)
.where(
and(
eq(
clientSiteResourcesAssociationsCache.clientId,
client.clientId
),
eq(siteNetworks.siteId, siteId),
eq(
siteResources.destination,
siteResource.destination
),
ne(
siteResources.siteResourceId,
siteResource.siteResourceId
)
)
);
// Only remove remote subnet if no other resource uses the same destination
const remoteSubnetsToRemove =
destinationStillInUse.length > 0
? []
: generateRemoteSubnets([siteResource]);
olmJobs.push(
removePeerData(
client.clientId,
siteId,
remoteSubnetsToRemove,
generateAliasConfig([siteResource])
)
); );
}
// Only remove remote subnet if no other resource uses the same destination
const remoteSubnetsToRemove =
destinationStillInUse.length > 0
? []
: generateRemoteSubnets([siteResource]);
olmJobs.push(
removePeerData(
client.clientId,
siteResource.siteId,
remoteSubnetsToRemove,
generateAliasConfig([siteResource])
)
);
} }
} }
} }
@@ -904,25 +863,10 @@ export async function rebuildClientAssociationsFromClient(
) )
: []; : [];
// Group by siteId for site-level associations — look up via siteNetworks since // Group by siteId for site-level associations
// siteResources no longer carries a direct siteId column. const newSiteIds = Array.from(
const networkIds = Array.from( new Set(newSiteResources.map((sr) => sr.siteId))
new Set(
newSiteResources
.map((sr) => sr.networkId)
.filter((id): id is number => id !== null)
)
); );
const newSiteIds =
networkIds.length > 0
? await trx
.select({ siteId: siteNetworks.siteId })
.from(siteNetworks)
.where(inArray(siteNetworks.networkId, networkIds))
.then((rows) =>
Array.from(new Set(rows.map((r) => r.siteId)))
)
: [];
/////////// Process client-siteResource associations /////////// /////////// Process client-siteResource associations ///////////
@@ -1195,45 +1139,13 @@ async function handleMessagesForClientResources(
resourcesToAdd.includes(r.siteResourceId) resourcesToAdd.includes(r.siteResourceId)
); );
// Build (resource, siteId) pairs by looking up siteNetworks for each resource's networkId
const addedNetworkIds = Array.from(
new Set(
addedResources
.map((r) => r.networkId)
.filter((id): id is number => id !== null)
)
);
const addedSiteNetworkRows =
addedNetworkIds.length > 0
? await trx
.select({
networkId: siteNetworks.networkId,
siteId: siteNetworks.siteId
})
.from(siteNetworks)
.where(inArray(siteNetworks.networkId, addedNetworkIds))
: [];
const addedNetworkToSites = new Map<number, number[]>();
for (const row of addedSiteNetworkRows) {
if (!addedNetworkToSites.has(row.networkId)) {
addedNetworkToSites.set(row.networkId, []);
}
addedNetworkToSites.get(row.networkId)!.push(row.siteId);
}
// Group by site for proxy updates // Group by site for proxy updates
const addedBySite = new Map<number, SiteResource[]>(); const addedBySite = new Map<number, SiteResource[]>();
for (const resource of addedResources) { for (const resource of addedResources) {
const siteIds = if (!addedBySite.has(resource.siteId)) {
resource.networkId != null addedBySite.set(resource.siteId, []);
? (addedNetworkToSites.get(resource.networkId) ?? [])
: [];
for (const siteId of siteIds) {
if (!addedBySite.has(siteId)) {
addedBySite.set(siteId, []);
}
addedBySite.get(siteId)!.push(resource);
} }
addedBySite.get(resource.siteId)!.push(resource);
} }
// Add subnet proxy targets for each site // Add subnet proxy targets for each site
@@ -1252,7 +1164,7 @@ async function handleMessagesForClientResources(
} }
for (const resource of resources) { for (const resource of resources) {
const target = await generateSubnetProxyTargetV2(resource, [ const target = generateSubnetProxyTargetV2(resource, [
{ {
clientId: client.clientId, clientId: client.clientId,
pubKey: client.pubKey, pubKey: client.pubKey,
@@ -1275,7 +1187,7 @@ async function handleMessagesForClientResources(
olmJobs.push( olmJobs.push(
addPeerData( addPeerData(
client.clientId, client.clientId,
siteId, resource.siteId,
generateRemoteSubnets([resource]), generateRemoteSubnets([resource]),
generateAliasConfig([resource]) generateAliasConfig([resource])
) )
@@ -1287,7 +1199,7 @@ async function handleMessagesForClientResources(
error.message.includes("not found") error.message.includes("not found")
) { ) {
logger.debug( logger.debug(
`Olm data not found for client ${client.clientId} and site ${siteId}, skipping addition` `Olm data not found for client ${client.clientId} and site ${resource.siteId}, skipping removal`
); );
} else { } else {
throw error; throw error;
@@ -1304,45 +1216,13 @@ async function handleMessagesForClientResources(
.from(siteResources) .from(siteResources)
.where(inArray(siteResources.siteResourceId, resourcesToRemove)); .where(inArray(siteResources.siteResourceId, resourcesToRemove));
// Build (resource, siteId) pairs via siteNetworks
const removedNetworkIds = Array.from(
new Set(
removedResources
.map((r) => r.networkId)
.filter((id): id is number => id !== null)
)
);
const removedSiteNetworkRows =
removedNetworkIds.length > 0
? await trx
.select({
networkId: siteNetworks.networkId,
siteId: siteNetworks.siteId
})
.from(siteNetworks)
.where(inArray(siteNetworks.networkId, removedNetworkIds))
: [];
const removedNetworkToSites = new Map<number, number[]>();
for (const row of removedSiteNetworkRows) {
if (!removedNetworkToSites.has(row.networkId)) {
removedNetworkToSites.set(row.networkId, []);
}
removedNetworkToSites.get(row.networkId)!.push(row.siteId);
}
// Group by site for proxy updates // Group by site for proxy updates
const removedBySite = new Map<number, SiteResource[]>(); const removedBySite = new Map<number, SiteResource[]>();
for (const resource of removedResources) { for (const resource of removedResources) {
const siteIds = if (!removedBySite.has(resource.siteId)) {
resource.networkId != null removedBySite.set(resource.siteId, []);
? (removedNetworkToSites.get(resource.networkId) ?? [])
: [];
for (const siteId of siteIds) {
if (!removedBySite.has(siteId)) {
removedBySite.set(siteId, []);
}
removedBySite.get(siteId)!.push(resource);
} }
removedBySite.get(resource.siteId)!.push(resource);
} }
// Remove subnet proxy targets for each site // Remove subnet proxy targets for each site
@@ -1361,7 +1241,7 @@ async function handleMessagesForClientResources(
} }
for (const resource of resources) { for (const resource of resources) {
const target = await generateSubnetProxyTargetV2(resource, [ const target = generateSubnetProxyTargetV2(resource, [
{ {
clientId: client.clientId, clientId: client.clientId,
pubKey: client.pubKey, pubKey: client.pubKey,
@@ -1380,11 +1260,7 @@ async function handleMessagesForClientResources(
} }
try { try {
// Check if this client still has access to another resource // Check if this client still has access to another resource on this site with the same destination
// on this specific site with the same destination. We scope
// by siteId (via siteNetworks) rather than networkId because
// removePeerData operates per-site — a resource on a different
// site sharing the same network should not block removal here.
const destinationStillInUse = await trx const destinationStillInUse = await trx
.select() .select()
.from(siteResources) .from(siteResources)
@@ -1395,17 +1271,13 @@ async function handleMessagesForClientResources(
siteResources.siteResourceId siteResources.siteResourceId
) )
) )
.innerJoin(
siteNetworks,
eq(siteNetworks.networkId, siteResources.networkId)
)
.where( .where(
and( and(
eq( eq(
clientSiteResourcesAssociationsCache.clientId, clientSiteResourcesAssociationsCache.clientId,
client.clientId client.clientId
), ),
eq(siteNetworks.siteId, siteId), eq(siteResources.siteId, resource.siteId),
eq( eq(
siteResources.destination, siteResources.destination,
resource.destination resource.destination
@@ -1427,7 +1299,7 @@ async function handleMessagesForClientResources(
olmJobs.push( olmJobs.push(
removePeerData( removePeerData(
client.clientId, client.clientId,
siteId, resource.siteId,
remoteSubnetsToRemove, remoteSubnetsToRemove,
generateAliasConfig([resource]) generateAliasConfig([resource])
) )
@@ -1439,7 +1311,7 @@ async function handleMessagesForClientResources(
error.message.includes("not found") error.message.includes("not found")
) { ) {
logger.debug( logger.debug(
`Olm data not found for client ${client.clientId} and site ${siteId}, skipping removal` `Olm data not found for client ${client.clientId} and site ${resource.siteId}, skipping removal`
); );
} else { } else {
throw error; throw error;

View File

@@ -1,461 +0,0 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import fs from "fs";
import crypto from "crypto";
import {
certificates,
clients,
clientSiteResourcesAssociationsCache,
db,
domains,
newts,
siteNetworks,
SiteResource,
siteResources
} from "@server/db";
import { and, eq } from "drizzle-orm";
import { encrypt, decrypt } from "@server/lib/crypto";
import logger from "@server/logger";
import privateConfig from "#private/lib/config";
import config from "@server/lib/config";
import { generateSubnetProxyTargetV2, SubnetProxyTargetV2 } from "@server/lib/ip";
import { updateTargets } from "@server/routers/client/targets";
import cache from "#private/lib/cache";
import { build } from "@server/build";
interface AcmeCert {
domain: { main: string; sans?: string[] };
certificate: string;
key: string;
Store: string;
}
interface AcmeJson {
[resolver: string]: {
Certificates: AcmeCert[];
};
}
async function pushCertUpdateToAffectedNewts(
domain: string,
domainId: string | null,
oldCertPem: string | null,
oldKeyPem: string | null
): Promise<void> {
// Find all SSL-enabled HTTP site resources that use this cert's domain
let affectedResources: SiteResource[] = [];
if (domainId) {
affectedResources = await db
.select()
.from(siteResources)
.where(
and(
eq(siteResources.domainId, domainId),
eq(siteResources.ssl, true)
)
);
} else {
// Fallback: match by exact fullDomain when no domainId is available
affectedResources = await db
.select()
.from(siteResources)
.where(
and(
eq(siteResources.fullDomain, domain),
eq(siteResources.ssl, true)
)
);
}
if (affectedResources.length === 0) {
logger.debug(
`acmeCertSync: no affected site resources for cert domain "${domain}"`
);
return;
}
logger.info(
`acmeCertSync: pushing cert update to ${affectedResources.length} affected site resource(s) for domain "${domain}"`
);
for (const resource of affectedResources) {
try {
// Get all sites for this resource via siteNetworks
const resourceSiteRows = resource.networkId
? await db
.select({ siteId: siteNetworks.siteId })
.from(siteNetworks)
.where(eq(siteNetworks.networkId, resource.networkId))
: [];
if (resourceSiteRows.length === 0) {
logger.debug(
`acmeCertSync: no sites for resource ${resource.siteResourceId}, skipping`
);
continue;
}
// Get all clients with access to this resource
const resourceClients = await db
.select({
clientId: clients.clientId,
pubKey: clients.pubKey,
subnet: clients.subnet
})
.from(clients)
.innerJoin(
clientSiteResourcesAssociationsCache,
eq(
clients.clientId,
clientSiteResourcesAssociationsCache.clientId
)
)
.where(
eq(
clientSiteResourcesAssociationsCache.siteResourceId,
resource.siteResourceId
)
);
if (resourceClients.length === 0) {
logger.debug(
`acmeCertSync: no clients for resource ${resource.siteResourceId}, skipping`
);
continue;
}
// Invalidate the cert cache so generateSubnetProxyTargetV2 fetches fresh data
if (resource.fullDomain) {
await cache.del(`cert:${resource.fullDomain}`);
}
// Generate target once — same cert applies to all sites for this resource
const newTarget = await generateSubnetProxyTargetV2(
resource,
resourceClients
);
if (!newTarget) {
logger.debug(
`acmeCertSync: could not generate target for resource ${resource.siteResourceId}, skipping`
);
continue;
}
// Construct the old target — same routing shape but with the previous cert/key.
// The newt only uses destPrefix/sourcePrefixes for removal, but we keep the
// semantics correct so the update message accurately reflects what changed.
const oldTarget: SubnetProxyTargetV2 = {
...newTarget,
tlsCert: oldCertPem ?? undefined,
tlsKey: oldKeyPem ?? undefined
};
// Push update to each site's newt
for (const { siteId } of resourceSiteRows) {
const [newt] = await db
.select()
.from(newts)
.where(eq(newts.siteId, siteId))
.limit(1);
if (!newt) {
logger.debug(
`acmeCertSync: no newt found for site ${siteId}, skipping resource ${resource.siteResourceId}`
);
continue;
}
await updateTargets(
newt.newtId,
{ oldTargets: [oldTarget], newTargets: [newTarget] },
newt.version
);
logger.info(
`acmeCertSync: pushed cert update to newt for site ${siteId}, resource ${resource.siteResourceId}`
);
}
} catch (err) {
logger.error(
`acmeCertSync: error pushing cert update for resource ${resource?.siteResourceId}: ${err}`
);
}
}
}
async function findDomainId(certDomain: string): Promise<string | null> {
// Strip wildcard prefix before lookup (*.example.com -> example.com)
const lookupDomain = certDomain.startsWith("*.")
? certDomain.slice(2)
: certDomain;
// 1. Exact baseDomain match (any domain type)
const exactMatch = await db
.select({ domainId: domains.domainId })
.from(domains)
.where(eq(domains.baseDomain, lookupDomain))
.limit(1);
if (exactMatch.length > 0) {
return exactMatch[0].domainId;
}
// 2. Walk up the domain hierarchy looking for a wildcard-type domain whose
// baseDomain is a suffix of the cert domain. e.g. cert "sub.example.com"
// matches a wildcard domain with baseDomain "example.com".
const parts = lookupDomain.split(".");
for (let i = 1; i < parts.length; i++) {
const candidate = parts.slice(i).join(".");
if (!candidate) continue;
const wildcardMatch = await db
.select({ domainId: domains.domainId })
.from(domains)
.where(
and(
eq(domains.baseDomain, candidate),
eq(domains.type, "wildcard")
)
)
.limit(1);
if (wildcardMatch.length > 0) {
return wildcardMatch[0].domainId;
}
}
return null;
}
function extractFirstCert(pemBundle: string): string | null {
const match = pemBundle.match(
/-----BEGIN CERTIFICATE-----[\s\S]+?-----END CERTIFICATE-----/
);
return match ? match[0] : null;
}
async function syncAcmeCerts(
acmeJsonPath: string,
resolver: string
): Promise<void> {
let raw: string;
try {
raw = fs.readFileSync(acmeJsonPath, "utf8");
} catch (err) {
logger.debug(`acmeCertSync: could not read ${acmeJsonPath}: ${err}`);
return;
}
let acmeJson: AcmeJson;
try {
acmeJson = JSON.parse(raw);
} catch (err) {
logger.debug(`acmeCertSync: could not parse acme.json: ${err}`);
return;
}
const resolverData = acmeJson[resolver];
if (!resolverData || !Array.isArray(resolverData.Certificates)) {
logger.debug(
`acmeCertSync: no certificates found for resolver "${resolver}"`
);
return;
}
for (const cert of resolverData.Certificates) {
const domain = cert.domain?.main;
if (!domain) {
logger.debug(`acmeCertSync: skipping cert with missing domain`);
continue;
}
if (!cert.certificate || !cert.key) {
logger.debug(
`acmeCertSync: skipping cert for ${domain} - empty certificate or key field`
);
continue;
}
const certPem = Buffer.from(cert.certificate, "base64").toString(
"utf8"
);
const keyPem = Buffer.from(cert.key, "base64").toString("utf8");
if (!certPem.trim() || !keyPem.trim()) {
logger.debug(
`acmeCertSync: skipping cert for ${domain} - blank PEM after base64 decode`
);
continue;
}
// Check if cert already exists in DB
const existing = await db
.select()
.from(certificates)
.where(eq(certificates.domain, domain))
.limit(1);
let oldCertPem: string | null = null;
let oldKeyPem: string | null = null;
if (existing.length > 0 && existing[0].certFile) {
try {
const storedCertPem = decrypt(
existing[0].certFile,
config.getRawConfig().server.secret!
);
if (storedCertPem === certPem) {
logger.debug(
`acmeCertSync: cert for ${domain} is unchanged, skipping`
);
continue;
}
// Cert has changed; capture old values so we can send a correct
// update message to the newt after the DB write.
oldCertPem = storedCertPem;
if (existing[0].keyFile) {
try {
oldKeyPem = decrypt(
existing[0].keyFile,
config.getRawConfig().server.secret!
);
} catch (keyErr) {
logger.debug(
`acmeCertSync: could not decrypt stored key for ${domain}: ${keyErr}`
);
}
}
} catch (err) {
// Decryption failure means we should proceed with the update
logger.debug(
`acmeCertSync: could not decrypt stored cert for ${domain}, will update: ${err}`
);
}
}
// Parse cert expiry from the first cert in the PEM bundle
let expiresAt: number | null = null;
const firstCertPem = extractFirstCert(certPem);
if (firstCertPem) {
try {
const x509 = new crypto.X509Certificate(firstCertPem);
expiresAt = Math.floor(new Date(x509.validTo).getTime() / 1000);
} catch (err) {
logger.debug(
`acmeCertSync: could not parse cert expiry for ${domain}: ${err}`
);
}
}
const wildcard = domain.startsWith("*.");
const encryptedCert = encrypt(certPem, config.getRawConfig().server.secret!);
const encryptedKey = encrypt(keyPem, config.getRawConfig().server.secret!);
const now = Math.floor(Date.now() / 1000);
const domainId = await findDomainId(domain);
if (domainId) {
logger.debug(
`acmeCertSync: resolved domainId "${domainId}" for cert domain "${domain}"`
);
} else {
logger.debug(
`acmeCertSync: no matching domain record found for cert domain "${domain}"`
);
}
if (existing.length > 0) {
await db
.update(certificates)
.set({
certFile: encryptedCert,
keyFile: encryptedKey,
status: "valid",
expiresAt,
updatedAt: now,
wildcard,
...(domainId !== null && { domainId })
})
.where(eq(certificates.domain, domain));
logger.info(
`acmeCertSync: updated certificate for ${domain} (expires ${expiresAt ? new Date(expiresAt * 1000).toISOString() : "unknown"})`
);
await pushCertUpdateToAffectedNewts(domain, domainId, oldCertPem, oldKeyPem);
} else {
await db.insert(certificates).values({
domain,
domainId,
certFile: encryptedCert,
keyFile: encryptedKey,
status: "valid",
expiresAt,
createdAt: now,
updatedAt: now,
wildcard
});
logger.info(
`acmeCertSync: inserted new certificate for ${domain} (expires ${expiresAt ? new Date(expiresAt * 1000).toISOString() : "unknown"})`
);
// For a brand-new cert, push to any SSL resources that were waiting for it
await pushCertUpdateToAffectedNewts(domain, domainId, null, null);
}
}
}
export function initAcmeCertSync(): void {
if (build == "saas") {
logger.debug(`acmeCertSync: skipping ACME cert sync in SaaS build`);
return;
}
const privateConfigData = privateConfig.getRawPrivateConfig();
if (!privateConfigData.flags?.enable_acme_cert_sync) {
logger.debug(`acmeCertSync: ACME cert sync is disabled by config flag, skipping`);
return;
}
if (!privateConfigData.flags.use_pangolin_dns) {
logger.debug(`acmeCertSync: ACME cert sync requires use_pangolin_dns flag to be enabled, skipping`);
return;
}
const acmeJsonPath =
privateConfigData.acme?.acme_json_path ?? "config/letsencrypt/acme.json";
const resolver = privateConfigData.acme?.resolver ?? "letsencrypt";
const intervalMs = privateConfigData.acme?.sync_interval_ms ?? 5000;
logger.info(
`acmeCertSync: starting ACME cert sync from "${acmeJsonPath}" using resolver "${resolver}" every ${intervalMs}ms`
);
// Run immediately on init, then on the configured interval
syncAcmeCerts(acmeJsonPath, resolver).catch((err) => {
logger.error(`acmeCertSync: error during initial sync: ${err}`);
});
setInterval(() => {
syncAcmeCerts(acmeJsonPath, resolver).catch((err) => {
logger.error(`acmeCertSync: error during sync: ${err}`);
});
}, intervalMs);
}

View File

@@ -11,15 +11,23 @@
* This file is not licensed under the AGPLv3. * This file is not licensed under the AGPLv3.
*/ */
import privateConfig from "./config"; import config from "./config";
import config from "@server/lib/config";
import { certificates, db } from "@server/db"; import { certificates, db } from "@server/db";
import { and, eq, isNotNull, or, inArray, sql } from "drizzle-orm"; import { and, eq, isNotNull, or, inArray, sql } from "drizzle-orm";
import { decrypt } from "@server/lib/crypto"; import { decryptData } from "@server/lib/encryption";
import logger from "@server/logger"; import logger from "@server/logger";
import cache from "#private/lib/cache"; import cache from "#private/lib/cache";
let encryptionKeyHex = "";
let encryptionKey: Buffer;
function loadEncryptData() {
if (encryptionKey) {
return; // already loaded
}
encryptionKeyHex = config.getRawPrivateConfig().server.encryption_key;
encryptionKey = Buffer.from(encryptionKeyHex, "hex");
}
// Define the return type for clarity and type safety // Define the return type for clarity and type safety
export type CertificateResult = { export type CertificateResult = {
@@ -37,7 +45,7 @@ export async function getValidCertificatesForDomains(
domains: Set<string>, domains: Set<string>,
useCache: boolean = true useCache: boolean = true
): Promise<Array<CertificateResult>> { ): Promise<Array<CertificateResult>> {
loadEncryptData(); // Ensure encryption key is loaded
const finalResults: CertificateResult[] = []; const finalResults: CertificateResult[] = [];
const domainsToQuery = new Set<string>(); const domainsToQuery = new Set<string>();
@@ -60,7 +68,7 @@ export async function getValidCertificatesForDomains(
// 2. If all domains were resolved from the cache, return early // 2. If all domains were resolved from the cache, return early
if (domainsToQuery.size === 0) { if (domainsToQuery.size === 0) {
const decryptedResults = decryptFinalResults(finalResults, config.getRawConfig().server.secret!); const decryptedResults = decryptFinalResults(finalResults);
return decryptedResults; return decryptedResults;
} }
@@ -165,23 +173,22 @@ export async function getValidCertificatesForDomains(
} }
} }
const decryptedResults = decryptFinalResults(finalResults, config.getRawConfig().server.secret!); const decryptedResults = decryptFinalResults(finalResults);
return decryptedResults; return decryptedResults;
} }
function decryptFinalResults( function decryptFinalResults(
finalResults: CertificateResult[], finalResults: CertificateResult[]
secret: string
): CertificateResult[] { ): CertificateResult[] {
const validCertsDecrypted = finalResults.map((cert) => { const validCertsDecrypted = finalResults.map((cert) => {
// Decrypt and save certificate file // Decrypt and save certificate file
const decryptedCert = decrypt( const decryptedCert = decryptData(
cert.certFile!, // is not null from query cert.certFile!, // is not null from query
secret encryptionKey
); );
// Decrypt and save key file // Decrypt and save key file
const decryptedKey = decrypt(cert.keyFile!, secret); const decryptedKey = decryptData(cert.keyFile!, encryptionKey);
// Return only the certificate data without org information // Return only the certificate data without org information
return { return {

View File

@@ -34,6 +34,10 @@ export const privateConfigSchema = z.object({
}), }),
server: z server: z
.object({ .object({
encryption_key: z
.string()
.optional()
.transform(getEnvOrYaml("SERVER_ENCRYPTION_KEY")),
reo_client_id: z reo_client_id: z
.string() .string()
.optional() .optional()
@@ -91,21 +95,10 @@ export const privateConfigSchema = z.object({
.object({ .object({
enable_redis: z.boolean().optional().default(false), enable_redis: z.boolean().optional().default(false),
use_pangolin_dns: z.boolean().optional().default(false), use_pangolin_dns: z.boolean().optional().default(false),
use_org_only_idp: z.boolean().optional(), use_org_only_idp: z.boolean().optional()
enable_acme_cert_sync: z.boolean().optional().default(true)
}) })
.optional() .optional()
.prefault({}), .prefault({}),
acme: z
.object({
acme_json_path: z
.string()
.optional()
.default("config/letsencrypt/acme.json"),
resolver: z.string().optional().default("letsencrypt"),
sync_interval_ms: z.number().optional().default(5000)
})
.optional(),
branding: z branding: z
.object({ .object({
app_name: z.string().optional(), app_name: z.string().optional(),

View File

@@ -33,7 +33,7 @@ import {
} from "drizzle-orm"; } from "drizzle-orm";
import logger from "@server/logger"; import logger from "@server/logger";
import config from "@server/lib/config"; import config from "@server/lib/config";
import { orgs, resources, sites, siteNetworks, siteResources, Target, targets } from "@server/db"; import { orgs, resources, sites, Target, targets } from "@server/db";
import { import {
sanitize, sanitize,
encodePath, encodePath,
@@ -267,35 +267,6 @@ export async function getTraefikConfig(
}); });
}); });
// Query siteResources in HTTP mode with SSL enabled and aliases — cert generation / HTTPS edge
const siteResourcesWithFullDomain = await db
.select({
siteResourceId: siteResources.siteResourceId,
fullDomain: siteResources.fullDomain,
mode: siteResources.mode
})
.from(siteResources)
.innerJoin(siteNetworks, eq(siteResources.networkId, siteNetworks.networkId))
.innerJoin(sites, eq(siteNetworks.siteId, sites.siteId))
.where(
and(
eq(siteResources.enabled, true),
isNotNull(siteResources.fullDomain),
eq(siteResources.mode, "http"),
eq(siteResources.ssl, true),
or(
eq(sites.exitNodeId, exitNodeId),
and(
isNull(sites.exitNodeId),
sql`(${siteTypes.includes("local") ? 1 : 0} = 1)`,
eq(sites.type, "local"),
sql`(${build != "saas" ? 1 : 0} = 1)`
)
),
inArray(sites.type, siteTypes)
)
);
let validCerts: CertificateResult[] = []; let validCerts: CertificateResult[] = [];
if (privateConfig.getRawPrivateConfig().flags.use_pangolin_dns) { if (privateConfig.getRawPrivateConfig().flags.use_pangolin_dns) {
// create a list of all domains to get certs for // create a list of all domains to get certs for
@@ -305,12 +276,6 @@ export async function getTraefikConfig(
domains.add(resource.fullDomain); domains.add(resource.fullDomain);
} }
} }
// Include siteResource aliases so pangolin-dns also fetches certs for them
for (const sr of siteResourcesWithFullDomain) {
if (sr.fullDomain) {
domains.add(sr.fullDomain);
}
}
// get the valid certs for these domains // get the valid certs for these domains
validCerts = await getValidCertificatesForDomains(domains, true); // we are caching here because this is called often validCerts = await getValidCertificatesForDomains(domains, true); // we are caching here because this is called often
// logger.debug(`Valid certs for domains: ${JSON.stringify(validCerts)}`); // logger.debug(`Valid certs for domains: ${JSON.stringify(validCerts)}`);
@@ -902,139 +867,6 @@ export async function getTraefikConfig(
} }
} }
// Add Traefik routes for siteResource aliases (HTTP mode + SSL) so that
// Traefik generates TLS certificates for those domains even when no
// matching resource exists yet.
if (siteResourcesWithFullDomain.length > 0) {
// Build a set of domains already covered by normal resources
const existingFullDomains = new Set<string>();
for (const resource of resourcesMap.values()) {
if (resource.fullDomain) {
existingFullDomains.add(resource.fullDomain);
}
}
for (const sr of siteResourcesWithFullDomain) {
if (!sr.fullDomain) continue;
// Skip if this alias is already handled by a resource router
if (existingFullDomains.has(sr.fullDomain)) continue;
const fullDomain = sr.fullDomain;
const srKey = `site-resource-cert-${sr.siteResourceId}`;
const siteResourceServiceName = `${srKey}-service`;
const siteResourceRouterName = `${srKey}-router`;
const siteResourceRewriteMiddlewareName = `${srKey}-rewrite`;
const maintenancePort = config.getRawConfig().server.next_port;
const maintenanceHost =
config.getRawConfig().server.internal_hostname;
if (!config_output.http.routers) {
config_output.http.routers = {};
}
if (!config_output.http.services) {
config_output.http.services = {};
}
if (!config_output.http.middlewares) {
config_output.http.middlewares = {};
}
// Service pointing at the internal maintenance/Next.js page
config_output.http.services[siteResourceServiceName] = {
loadBalancer: {
servers: [
{
url: `http://${maintenanceHost}:${maintenancePort}`
}
],
passHostHeader: true
}
};
// Middleware that rewrites any path to /maintenance-screen
config_output.http.middlewares[
siteResourceRewriteMiddlewareName
] = {
replacePathRegex: {
regex: "^/(.*)",
replacement: "/private-maintenance-screen"
}
};
// HTTP -> HTTPS redirect so the ACME challenge can be served
config_output.http.routers[
`${siteResourceRouterName}-redirect`
] = {
entryPoints: [
config.getRawConfig().traefik.http_entrypoint
],
middlewares: [redirectHttpsMiddlewareName],
service: siteResourceServiceName,
rule: `Host(\`${fullDomain}\`)`,
priority: 100
};
// Determine TLS / cert-resolver configuration
let tls: any = {};
if (
!privateConfig.getRawPrivateConfig().flags.use_pangolin_dns
) {
const domainParts = fullDomain.split(".");
const wildCard =
domainParts.length <= 2
? `*.${domainParts.join(".")}`
: `*.${domainParts.slice(1).join(".")}`;
const globalDefaultResolver =
config.getRawConfig().traefik.cert_resolver;
const globalDefaultPreferWildcard =
config.getRawConfig().traefik.prefer_wildcard_cert;
tls = {
certResolver: globalDefaultResolver,
...(globalDefaultPreferWildcard
? { domains: [{ main: wildCard }] }
: {})
};
} else {
// pangolin-dns: only add route if we already have a valid cert
const matchingCert = validCerts.find(
(cert) => cert.queriedDomain === fullDomain
);
if (!matchingCert) {
logger.debug(
`No matching certificate found for siteResource alias: ${fullDomain}`
);
continue;
}
}
// HTTPS router — presence of this entry triggers cert generation
config_output.http.routers[siteResourceRouterName] = {
entryPoints: [
config.getRawConfig().traefik.https_entrypoint
],
service: siteResourceServiceName,
middlewares: [siteResourceRewriteMiddlewareName],
rule: `Host(\`${fullDomain}\`)`,
priority: 100,
tls
};
// Assets bypass router — lets Next.js static files load without rewrite
config_output.http.routers[`${siteResourceRouterName}-assets`] = {
entryPoints: [
config.getRawConfig().traefik.https_entrypoint
],
service: siteResourceServiceName,
rule: `Host(\`${fullDomain}\`) && (PathPrefix(\`/_next\`) || PathRegexp(\`^/__nextjs*\`))`,
priority: 101,
tls
};
}
}
if (generateLoginPageRouters) { if (generateLoginPageRouters) {
const exitNodeLoginPages = await db const exitNodeLoginPages = await db
.select({ .select({

View File

@@ -22,15 +22,11 @@ import { OpenAPITags, registry } from "@server/openApi";
import { db, domainNamespaces, resources } from "@server/db"; import { db, domainNamespaces, resources } from "@server/db";
import { inArray } from "drizzle-orm"; import { inArray } from "drizzle-orm";
import { CheckDomainAvailabilityResponse } from "@server/routers/domain/types"; import { CheckDomainAvailabilityResponse } from "@server/routers/domain/types";
import { build } from "@server/build";
import { isSubscribed } from "#private/lib/isSubscribed";
import { tierMatrix } from "@server/lib/billing/tierMatrix";
const paramsSchema = z.strictObject({}); const paramsSchema = z.strictObject({});
const querySchema = z.strictObject({ const querySchema = z.strictObject({
subdomain: z.string(), subdomain: z.string()
// orgId: build === "saas" ? z.string() : z.string().optional() // Required for saas, optional otherwise
}); });
registry.registerPath({ registry.registerPath({
@@ -62,23 +58,6 @@ export async function checkDomainNamespaceAvailability(
} }
const { subdomain } = parsedQuery.data; const { subdomain } = parsedQuery.data;
// if (
// build == "saas" &&
// !isSubscribed(orgId!, tierMatrix.domainNamespaces)
// ) {
// // return not available
// return response<CheckDomainAvailabilityResponse>(res, {
// data: {
// available: false,
// options: []
// },
// success: true,
// error: false,
// message: "Your current subscription does not support custom domain namespaces. Please upgrade to access this feature.",
// status: HttpCode.OK
// });
// }
const namespaces = await db.select().from(domainNamespaces); const namespaces = await db.select().from(domainNamespaces);
let possibleDomains = namespaces.map((ns) => { let possibleDomains = namespaces.map((ns) => {
const desired = `${subdomain}.${ns.domainNamespaceId}`; const desired = `${subdomain}.${ns.domainNamespaceId}`;

View File

@@ -22,9 +22,6 @@ import { eq, sql } from "drizzle-orm";
import logger from "@server/logger"; import logger from "@server/logger";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi"; import { OpenAPITags, registry } from "@server/openApi";
import { isSubscribed } from "#private/lib/isSubscribed";
import { build } from "@server/build";
import { tierMatrix } from "@server/lib/billing/tierMatrix";
const paramsSchema = z.strictObject({}); const paramsSchema = z.strictObject({});
@@ -40,8 +37,7 @@ const querySchema = z.strictObject({
.optional() .optional()
.default("0") .default("0")
.transform(Number) .transform(Number)
.pipe(z.int().nonnegative()), .pipe(z.int().nonnegative())
// orgId: build === "saas" ? z.string() : z.string().optional() // Required for saas, optional otherwise
}); });
async function query(limit: number, offset: number) { async function query(limit: number, offset: number) {
@@ -103,26 +99,6 @@ export async function listDomainNamespaces(
); );
} }
// if (
// build == "saas" &&
// !isSubscribed(orgId!, tierMatrix.domainNamespaces)
// ) {
// return response<ListDomainNamespacesResponse>(res, {
// data: {
// domainNamespaces: [],
// pagination: {
// total: 0,
// limit,
// offset
// }
// },
// success: true,
// error: false,
// message: "No namespaces found. Your current subscription does not support custom domain namespaces. Please upgrade to access this feature.",
// status: HttpCode.OK
// });
// }
const domainNamespacesList = await query(limit, offset); const domainNamespacesList = await query(limit, offset);
const [{ count }] = await db const [{ count }] = await db

View File

@@ -24,8 +24,14 @@ import {
User, User,
certificates, certificates,
exitNodeOrgs, exitNodeOrgs,
RemoteExitNode,
olms,
newts,
clients,
sites,
domains, domains,
orgDomains, orgDomains,
targets,
loginPage, loginPage,
loginPageOrg, loginPageOrg,
LoginPage, LoginPage,
@@ -64,9 +70,12 @@ import {
updateAndGenerateEndpointDestinations, updateAndGenerateEndpointDestinations,
updateSiteBandwidth updateSiteBandwidth
} from "@server/routers/gerbil"; } from "@server/routers/gerbil";
import * as gerbil from "@server/routers/gerbil";
import logger from "@server/logger"; import logger from "@server/logger";
import { decrypt } from "@server/lib/crypto"; import { decryptData } from "@server/lib/encryption";
import config from "@server/lib/config"; import config from "@server/lib/config";
import privateConfig from "#private/lib/config";
import * as fs from "fs";
import { exchangeSession } from "@server/routers/badger"; import { exchangeSession } from "@server/routers/badger";
import { validateResourceSessionToken } from "@server/auth/sessions/resource"; import { validateResourceSessionToken } from "@server/auth/sessions/resource";
import { checkExitNodeOrg, resolveExitNodes } from "#private/lib/exitNodes"; import { checkExitNodeOrg, resolveExitNodes } from "#private/lib/exitNodes";
@@ -289,11 +298,25 @@ hybridRouter.get(
} }
); );
let encryptionKeyHex = "";
let encryptionKey: Buffer;
function loadEncryptData() {
if (encryptionKey) {
return; // already loaded
}
encryptionKeyHex =
privateConfig.getRawPrivateConfig().server.encryption_key;
encryptionKey = Buffer.from(encryptionKeyHex, "hex");
}
// Get valid certificates for given domains (supports wildcard certs) // Get valid certificates for given domains (supports wildcard certs)
hybridRouter.get( hybridRouter.get(
"/certificates/domains", "/certificates/domains",
async (req: Request, res: Response, next: NextFunction) => { async (req: Request, res: Response, next: NextFunction) => {
try { try {
loadEncryptData(); // Ensure encryption key is loaded
const parsed = getCertificatesByDomainsQuerySchema.safeParse( const parsed = getCertificatesByDomainsQuerySchema.safeParse(
req.query req.query
); );
@@ -424,13 +447,13 @@ hybridRouter.get(
const result = filtered.map((cert) => { const result = filtered.map((cert) => {
// Decrypt and save certificate file // Decrypt and save certificate file
const decryptedCert = decrypt( const decryptedCert = decryptData(
cert.certFile!, // is not null from query cert.certFile!, // is not null from query
config.getRawConfig().server.secret! encryptionKey
); );
// Decrypt and save key file // Decrypt and save key file
const decryptedKey = decrypt(cert.keyFile!, config.getRawConfig().server.secret!); const decryptedKey = decryptData(cert.keyFile!, encryptionKey);
// Return only the certificate data without org information // Return only the certificate data without org information
return { return {
@@ -810,12 +833,9 @@ hybridRouter.get(
) )
); );
logger.debug( logger.debug(`User ${userId} has roles in org ${orgId}:`, userOrgRoleRows);
`User ${userId} has roles in org ${orgId}:`,
userOrgRoleRows
);
return response<{ roleId: number; roleName: string }[]>(res, { return response<{ roleId: number, roleName: string }[]>(res, {
data: userOrgRoleRows, data: userOrgRoleRows,
success: true, success: true,
error: false, error: false,

View File

@@ -92,14 +92,9 @@ export const handleConnectionLogMessage: MessageHandler = async (context) => {
return; return;
} }
// Look up the org for this site and check retention settings // Look up the org for this site
const [site] = await db const [site] = await db
.select({ .select({ orgId: sites.orgId, orgSubnet: orgs.subnet })
orgId: sites.orgId,
orgSubnet: orgs.subnet,
settingsLogRetentionDaysConnection:
orgs.settingsLogRetentionDaysConnection
})
.from(sites) .from(sites)
.innerJoin(orgs, eq(sites.orgId, orgs.orgId)) .innerJoin(orgs, eq(sites.orgId, orgs.orgId))
.where(eq(sites.siteId, newt.siteId)); .where(eq(sites.siteId, newt.siteId));
@@ -113,13 +108,6 @@ export const handleConnectionLogMessage: MessageHandler = async (context) => {
const orgId = site.orgId; const orgId = site.orgId;
if (site.settingsLogRetentionDaysConnection === 0) {
logger.debug(
`Connection log retention is disabled for org ${orgId}, skipping`
);
return;
}
// Extract the CIDR suffix (e.g. "/16") from the org subnet so we can // Extract the CIDR suffix (e.g. "/16") from the org subnet so we can
// reconstruct the exact subnet string stored on each client record. // reconstruct the exact subnet string stored on each client record.
const cidrSuffix = site.orgSubnet?.includes("/") const cidrSuffix = site.orgSubnet?.includes("/")

View File

@@ -1,238 +0,0 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { db } from "@server/db";
import { MessageHandler } from "@server/routers/ws";
import { sites, Newt, orgs, clients, clientSitesAssociationsCache } from "@server/db";
import { and, eq, inArray } from "drizzle-orm";
import logger from "@server/logger";
import { inflate } from "zlib";
import { promisify } from "util";
import { logRequestAudit } from "@server/routers/badger/logRequestAudit";
import { getCountryCodeForIp } from "@server/lib/geoip";
export async function flushRequestLogToDb(): Promise<void> {
return;
}
const zlibInflate = promisify(inflate);
interface HTTPRequestLogData {
requestId: string;
resourceId: number; // siteResourceId
timestamp: string; // ISO 8601
method: string;
scheme: string; // "http" or "https"
host: string;
path: string;
rawQuery?: string;
userAgent?: string;
sourceAddr: string; // ip:port
tls: boolean;
}
/**
* Decompress a base64-encoded zlib-compressed string into parsed JSON.
*/
async function decompressRequestLog(
compressed: string
): Promise<HTTPRequestLogData[]> {
const compressedBuffer = Buffer.from(compressed, "base64");
const decompressed = await zlibInflate(compressedBuffer);
const jsonString = decompressed.toString("utf-8");
const parsed = JSON.parse(jsonString);
if (!Array.isArray(parsed)) {
throw new Error("Decompressed request log data is not an array");
}
return parsed;
}
export const handleRequestLogMessage: MessageHandler = async (context) => {
const { message, client } = context;
const newt = client as Newt;
if (!newt) {
logger.warn("Request log received but no newt client in context");
return;
}
if (!newt.siteId) {
logger.warn("Request log received but newt has no siteId");
return;
}
if (!message.data?.compressed) {
logger.warn("Request log message missing compressed data");
return;
}
// Look up the org for this site and check retention settings
const [site] = await db
.select({
orgId: sites.orgId,
orgSubnet: orgs.subnet,
settingsLogRetentionDaysRequest:
orgs.settingsLogRetentionDaysRequest
})
.from(sites)
.innerJoin(orgs, eq(sites.orgId, orgs.orgId))
.where(eq(sites.siteId, newt.siteId));
if (!site) {
logger.warn(
`Request log received but site ${newt.siteId} not found in database`
);
return;
}
const orgId = site.orgId;
if (site.settingsLogRetentionDaysRequest === 0) {
logger.debug(
`Request log retention is disabled for org ${orgId}, skipping`
);
return;
}
let entries: HTTPRequestLogData[];
try {
entries = await decompressRequestLog(message.data.compressed);
} catch (error) {
logger.error("Failed to decompress request log data:", error);
return;
}
if (entries.length === 0) {
return;
}
logger.debug(`Request log entries: ${JSON.stringify(entries)}`);
// Build a map from sourceIp → external endpoint string by joining clients
// with clientSitesAssociationsCache. The endpoint is the real-world IP:port
// of the client device and is used for GeoIP lookup.
const ipToEndpoint = new Map<string, string>();
const cidrSuffix = site.orgSubnet?.includes("/")
? site.orgSubnet.substring(site.orgSubnet.indexOf("/"))
: null;
if (cidrSuffix) {
const uniqueSourceAddrs = new Set<string>();
for (const entry of entries) {
if (entry.sourceAddr) {
uniqueSourceAddrs.add(entry.sourceAddr);
}
}
if (uniqueSourceAddrs.size > 0) {
const subnetQueries = Array.from(uniqueSourceAddrs).map((addr) => {
const ip = addr.includes(":") ? addr.split(":")[0] : addr;
return `${ip}${cidrSuffix}`;
});
const matchedClients = await db
.select({
subnet: clients.subnet,
endpoint: clientSitesAssociationsCache.endpoint
})
.from(clients)
.innerJoin(
clientSitesAssociationsCache,
and(
eq(
clientSitesAssociationsCache.clientId,
clients.clientId
),
eq(clientSitesAssociationsCache.siteId, newt.siteId)
)
)
.where(
and(
eq(clients.orgId, orgId),
inArray(clients.subnet, subnetQueries)
)
);
for (const c of matchedClients) {
if (c.endpoint) {
const ip = c.subnet.split("/")[0];
ipToEndpoint.set(ip, c.endpoint);
}
}
}
}
for (const entry of entries) {
if (
!entry.requestId ||
!entry.resourceId ||
!entry.method ||
!entry.scheme ||
!entry.host ||
!entry.path ||
!entry.sourceAddr
) {
logger.debug(
`Skipping request log entry with missing required fields: ${JSON.stringify(entry)}`
);
continue;
}
const originalRequestURL =
entry.scheme +
"://" +
entry.host +
entry.path +
(entry.rawQuery ? "?" + entry.rawQuery : "");
// Resolve the client's external endpoint for GeoIP lookup.
// sourceAddr is the WireGuard IP (possibly ip:port), so strip the port.
const sourceIp = entry.sourceAddr.includes(":")
? entry.sourceAddr.split(":")[0]
: entry.sourceAddr;
const endpoint = ipToEndpoint.get(sourceIp);
let location: string | undefined;
if (endpoint) {
const endpointIp = endpoint.includes(":")
? endpoint.split(":")[0]
: endpoint;
location = await getCountryCodeForIp(endpointIp);
}
await logRequestAudit(
{
action: true,
reason: 108,
siteResourceId: entry.resourceId,
orgId,
location
},
{
path: entry.path,
originalRequestURL,
scheme: entry.scheme,
host: entry.host,
method: entry.method,
tls: entry.tls,
requestIp: entry.sourceAddr
}
);
}
logger.debug(
`Buffered ${entries.length} request log entry/entries from newt ${newt.newtId} (site ${newt.siteId})`
);
};

View File

@@ -12,4 +12,3 @@
*/ */
export * from "./handleConnectionLogMessage"; export * from "./handleConnectionLogMessage";
export * from "./handleRequestLogMessage";

View File

@@ -21,7 +21,7 @@ import {
roles, roles,
roundTripMessageTracker, roundTripMessageTracker,
siteResources, siteResources,
siteNetworks, sites,
userOrgs userOrgs
} from "@server/db"; } from "@server/db";
import { logAccessAudit } from "#private/lib/logAccessAudit"; import { logAccessAudit } from "#private/lib/logAccessAudit";
@@ -63,12 +63,10 @@ const bodySchema = z
export type SignSshKeyResponse = { export type SignSshKeyResponse = {
certificate: string; certificate: string;
messageIds: number[];
messageId: number; messageId: number;
sshUsername: string; sshUsername: string;
sshHost: string; sshHost: string;
resourceId: number; resourceId: number;
siteIds: number[];
siteId: number; siteId: number;
keyId: string; keyId: string;
validPrincipals: string[]; validPrincipals: string[];
@@ -262,7 +260,10 @@ export async function signSshKey(
.update(userOrgs) .update(userOrgs)
.set({ pamUsername: usernameToUse }) .set({ pamUsername: usernameToUse })
.where( .where(
and(eq(userOrgs.orgId, orgId), eq(userOrgs.userId, userId)) and(
eq(userOrgs.orgId, orgId),
eq(userOrgs.userId, userId)
)
); );
} else { } else {
usernameToUse = userOrg.pamUsername; usernameToUse = userOrg.pamUsername;
@@ -394,12 +395,21 @@ export async function signSshKey(
homedir = roleRows[0].sshCreateHomeDir ?? null; homedir = roleRows[0].sshCreateHomeDir ?? null;
} }
const sites = await db // get the site
.select({ siteId: siteNetworks.siteId }) const [newt] = await db
.from(siteNetworks) .select()
.where(eq(siteNetworks.networkId, resource.networkId!)); .from(newts)
.where(eq(newts.siteId, resource.siteId))
.limit(1);
const siteIds = sites.map((site) => site.siteId); if (!newt) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Site associated with resource not found"
)
);
}
// Sign the public key // Sign the public key
const now = BigInt(Math.floor(Date.now() / 1000)); const now = BigInt(Math.floor(Date.now() / 1000));
@@ -413,65 +423,44 @@ export async function signSshKey(
validBefore: now + validFor validBefore: now + validFor
}); });
const messageIds: number[] = []; const [message] = await db
for (const siteId of siteIds) { .insert(roundTripMessageTracker)
// get the site .values({
const [newt] = await db wsClientId: newt.newtId,
.select() messageType: `newt/pam/connection`,
.from(newts) sentAt: Math.floor(Date.now() / 1000)
.where(eq(newts.siteId, siteId)) })
.limit(1); .returning();
if (!newt) { if (!message) {
return next( return next(
createHttpError( createHttpError(
HttpCode.INTERNAL_SERVER_ERROR, HttpCode.INTERNAL_SERVER_ERROR,
"Site associated with resource not found" "Failed to create message tracker entry"
) )
); );
}
const [message] = await db
.insert(roundTripMessageTracker)
.values({
wsClientId: newt.newtId,
messageType: `newt/pam/connection`,
sentAt: Math.floor(Date.now() / 1000)
})
.returning();
if (!message) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Failed to create message tracker entry"
)
);
}
messageIds.push(message.messageId);
await sendToClient(newt.newtId, {
type: `newt/pam/connection`,
data: {
messageId: message.messageId,
orgId: orgId,
agentPort: resource.authDaemonPort ?? 22123,
externalAuthDaemon: resource.authDaemonMode === "remote",
agentHost: resource.destination,
caCert: caKeys.publicKeyOpenSSH,
username: usernameToUse,
niceId: resource.niceId,
metadata: {
sudoMode: sudoMode,
sudoCommands: parsedSudoCommands,
homedir: homedir,
groups: parsedGroups
}
}
});
} }
await sendToClient(newt.newtId, {
type: `newt/pam/connection`,
data: {
messageId: message.messageId,
orgId: orgId,
agentPort: resource.authDaemonPort ?? 22123,
externalAuthDaemon: resource.authDaemonMode === "remote",
agentHost: resource.destination,
caCert: caKeys.publicKeyOpenSSH,
username: usernameToUse,
niceId: resource.niceId,
metadata: {
sudoMode: sudoMode,
sudoCommands: parsedSudoCommands,
homedir: homedir,
groups: parsedGroups
}
}
});
const expiresIn = Number(validFor); // seconds const expiresIn = Number(validFor); // seconds
let sshHost; let sshHost;
@@ -491,7 +480,7 @@ export async function signSshKey(
metadata: JSON.stringify({ metadata: JSON.stringify({
resourceId: resource.siteResourceId, resourceId: resource.siteResourceId,
resource: resource.name, resource: resource.name,
siteIds: siteIds siteId: resource.siteId,
}) })
}); });
@@ -505,7 +494,7 @@ export async function signSshKey(
: undefined, : undefined,
metadata: { metadata: {
resourceName: resource.name, resourceName: resource.name,
siteId: siteIds[0], siteId: resource.siteId,
sshUsername: usernameToUse, sshUsername: usernameToUse,
sshHost: sshHost sshHost: sshHost
}, },
@@ -516,13 +505,11 @@ export async function signSshKey(
return response<SignSshKeyResponse>(res, { return response<SignSshKeyResponse>(res, {
data: { data: {
certificate: cert.certificate, certificate: cert.certificate,
messageIds: messageIds, messageId: message.messageId,
messageId: messageIds[0], // just pick the first one for backward compatibility
sshUsername: usernameToUse, sshUsername: usernameToUse,
sshHost: sshHost, sshHost: sshHost,
resourceId: resource.siteResourceId, resourceId: resource.siteResourceId,
siteIds: siteIds, siteId: resource.siteId,
siteId: siteIds[0], // just pick the first one for backward compatibility
keyId: cert.keyId, keyId: cert.keyId,
validPrincipals: cert.validPrincipals, validPrincipals: cert.validPrincipals,
validAfter: cert.validAfter.toISOString(), validAfter: cert.validAfter.toISOString(),

View File

@@ -18,13 +18,12 @@ import {
} from "#private/routers/remoteExitNode"; } from "#private/routers/remoteExitNode";
import { MessageHandler } from "@server/routers/ws"; import { MessageHandler } from "@server/routers/ws";
import { build } from "@server/build"; import { build } from "@server/build";
import { handleConnectionLogMessage, handleRequestLogMessage } from "#private/routers/newt"; import { handleConnectionLogMessage } from "#private/routers/newt";
export const messageHandlers: Record<string, MessageHandler> = { export const messageHandlers: Record<string, MessageHandler> = {
"remoteExitNode/register": handleRemoteExitNodeRegisterMessage, "remoteExitNode/register": handleRemoteExitNodeRegisterMessage,
"remoteExitNode/ping": handleRemoteExitNodePingMessage, "remoteExitNode/ping": handleRemoteExitNodePingMessage,
"newt/access-log": handleConnectionLogMessage, "newt/access-log": handleConnectionLogMessage,
"newt/request-log": handleRequestLogMessage,
}; };
if (build != "saas") { if (build != "saas") {

View File

@@ -1,8 +1,8 @@
import { logsDb, primaryLogsDb, requestAuditLog, resources, siteResources, db, primaryDb } from "@server/db"; import { logsDb, primaryLogsDb, requestAuditLog, resources, db, primaryDb } from "@server/db";
import { registry } from "@server/openApi"; import { registry } from "@server/openApi";
import { NextFunction } from "express"; import { NextFunction } from "express";
import { Request, Response } from "express"; import { Request, Response } from "express";
import { eq, gt, lt, and, count, desc, inArray, isNull, or } from "drizzle-orm"; import { eq, gt, lt, and, count, desc, inArray } from "drizzle-orm";
import { OpenAPITags } from "@server/openApi"; import { OpenAPITags } from "@server/openApi";
import { z } from "zod"; import { z } from "zod";
import createHttpError from "http-errors"; import createHttpError from "http-errors";
@@ -92,10 +92,7 @@ function getWhere(data: Q) {
lt(requestAuditLog.timestamp, data.timeEnd), lt(requestAuditLog.timestamp, data.timeEnd),
eq(requestAuditLog.orgId, data.orgId), eq(requestAuditLog.orgId, data.orgId),
data.resourceId data.resourceId
? or( ? eq(requestAuditLog.resourceId, data.resourceId)
eq(requestAuditLog.resourceId, data.resourceId),
eq(requestAuditLog.siteResourceId, data.resourceId)
)
: undefined, : undefined,
data.actor ? eq(requestAuditLog.actor, data.actor) : undefined, data.actor ? eq(requestAuditLog.actor, data.actor) : undefined,
data.method ? eq(requestAuditLog.method, data.method) : undefined, data.method ? eq(requestAuditLog.method, data.method) : undefined,
@@ -113,16 +110,15 @@ export function queryRequest(data: Q) {
return primaryLogsDb return primaryLogsDb
.select({ .select({
id: requestAuditLog.id, id: requestAuditLog.id,
timestamp: requestAuditLog.timestamp, timestamp: requestAuditLog.timestamp,
orgId: requestAuditLog.orgId, orgId: requestAuditLog.orgId,
action: requestAuditLog.action, action: requestAuditLog.action,
reason: requestAuditLog.reason, reason: requestAuditLog.reason,
actorType: requestAuditLog.actorType, actorType: requestAuditLog.actorType,
actor: requestAuditLog.actor, actor: requestAuditLog.actor,
actorId: requestAuditLog.actorId, actorId: requestAuditLog.actorId,
resourceId: requestAuditLog.resourceId, resourceId: requestAuditLog.resourceId,
siteResourceId: requestAuditLog.siteResourceId, ip: requestAuditLog.ip,
ip: requestAuditLog.ip,
location: requestAuditLog.location, location: requestAuditLog.location,
userAgent: requestAuditLog.userAgent, userAgent: requestAuditLog.userAgent,
metadata: requestAuditLog.metadata, metadata: requestAuditLog.metadata,
@@ -141,73 +137,37 @@ export function queryRequest(data: Q) {
} }
async function enrichWithResourceDetails(logs: Awaited<ReturnType<typeof queryRequest>>) { async function enrichWithResourceDetails(logs: Awaited<ReturnType<typeof queryRequest>>) {
// If logs database is the same as main database, we can do a join
// Otherwise, we need to fetch resource details separately
const resourceIds = logs const resourceIds = logs
.map(log => log.resourceId) .map(log => log.resourceId)
.filter((id): id is number => id !== null && id !== undefined); .filter((id): id is number => id !== null && id !== undefined);
const siteResourceIds = logs if (resourceIds.length === 0) {
.filter(log => log.resourceId == null && log.siteResourceId != null)
.map(log => log.siteResourceId)
.filter((id): id is number => id !== null && id !== undefined);
if (resourceIds.length === 0 && siteResourceIds.length === 0) {
return logs.map(log => ({ ...log, resourceName: null, resourceNiceId: null })); return logs.map(log => ({ ...log, resourceName: null, resourceNiceId: null }));
} }
const resourceMap = new Map<number, { name: string | null; niceId: string | null }>(); // Fetch resource details from main database
const resourceDetails = await primaryDb
.select({
resourceId: resources.resourceId,
name: resources.name,
niceId: resources.niceId
})
.from(resources)
.where(inArray(resources.resourceId, resourceIds));
if (resourceIds.length > 0) { // Create a map for quick lookup
const resourceDetails = await primaryDb const resourceMap = new Map(
.select({ resourceDetails.map(r => [r.resourceId, { name: r.name, niceId: r.niceId }])
resourceId: resources.resourceId, );
name: resources.name,
niceId: resources.niceId
})
.from(resources)
.where(inArray(resources.resourceId, resourceIds));
for (const r of resourceDetails) {
resourceMap.set(r.resourceId, { name: r.name, niceId: r.niceId });
}
}
const siteResourceMap = new Map<number, { name: string | null; niceId: string | null }>();
if (siteResourceIds.length > 0) {
const siteResourceDetails = await primaryDb
.select({
siteResourceId: siteResources.siteResourceId,
name: siteResources.name,
niceId: siteResources.niceId
})
.from(siteResources)
.where(inArray(siteResources.siteResourceId, siteResourceIds));
for (const r of siteResourceDetails) {
siteResourceMap.set(r.siteResourceId, { name: r.name, niceId: r.niceId });
}
}
// Enrich logs with resource details // Enrich logs with resource details
return logs.map(log => { return logs.map(log => ({
if (log.resourceId != null) { ...log,
const details = resourceMap.get(log.resourceId); resourceName: log.resourceId ? resourceMap.get(log.resourceId)?.name ?? null : null,
return { resourceNiceId: log.resourceId ? resourceMap.get(log.resourceId)?.niceId ?? null : null
...log, }));
resourceName: details?.name ?? null,
resourceNiceId: details?.niceId ?? null
};
} else if (log.siteResourceId != null) {
const details = siteResourceMap.get(log.siteResourceId);
return {
...log,
resourceId: log.siteResourceId,
resourceName: details?.name ?? null,
resourceNiceId: details?.niceId ?? null
};
}
return { ...log, resourceName: null, resourceNiceId: null };
});
} }
export function countRequestQuery(data: Q) { export function countRequestQuery(data: Q) {
@@ -251,8 +211,7 @@ async function queryUniqueFilterAttributes(
uniqueLocations, uniqueLocations,
uniqueHosts, uniqueHosts,
uniquePaths, uniquePaths,
uniqueResources, uniqueResources
uniqueSiteResources
] = await Promise.all([ ] = await Promise.all([
primaryLogsDb primaryLogsDb
.selectDistinct({ actor: requestAuditLog.actor }) .selectDistinct({ actor: requestAuditLog.actor })
@@ -280,13 +239,6 @@ async function queryUniqueFilterAttributes(
}) })
.from(requestAuditLog) .from(requestAuditLog)
.where(baseConditions) .where(baseConditions)
.limit(DISTINCT_LIMIT + 1),
primaryLogsDb
.selectDistinct({
id: requestAuditLog.siteResourceId
})
.from(requestAuditLog)
.where(and(baseConditions, isNull(requestAuditLog.resourceId)))
.limit(DISTINCT_LIMIT + 1) .limit(DISTINCT_LIMIT + 1)
]); ]);
@@ -307,10 +259,6 @@ async function queryUniqueFilterAttributes(
.map(row => row.id) .map(row => row.id)
.filter((id): id is number => id !== null); .filter((id): id is number => id !== null);
const siteResourceIds = uniqueSiteResources
.map(row => row.id)
.filter((id): id is number => id !== null);
let resourcesWithNames: Array<{ id: number; name: string | null }> = []; let resourcesWithNames: Array<{ id: number; name: string | null }> = [];
if (resourceIds.length > 0) { if (resourceIds.length > 0) {
@@ -322,31 +270,10 @@ async function queryUniqueFilterAttributes(
.from(resources) .from(resources)
.where(inArray(resources.resourceId, resourceIds)); .where(inArray(resources.resourceId, resourceIds));
resourcesWithNames = [ resourcesWithNames = resourceDetails.map(r => ({
...resourcesWithNames, id: r.resourceId,
...resourceDetails.map(r => ({ name: r.name
id: r.resourceId, }));
name: r.name
}))
];
}
if (siteResourceIds.length > 0) {
const siteResourceDetails = await primaryDb
.select({
siteResourceId: siteResources.siteResourceId,
name: siteResources.name
})
.from(siteResources)
.where(inArray(siteResources.siteResourceId, siteResourceIds));
resourcesWithNames = [
...resourcesWithNames,
...siteResourceDetails.map(r => ({
id: r.siteResourceId,
name: r.name
}))
];
} }
return { return {

View File

@@ -28,7 +28,6 @@ export type QueryRequestAuditLogResponse = {
actor: string | null; actor: string | null;
actorId: string | null; actorId: string | null;
resourceId: number | null; resourceId: number | null;
siteResourceId: number | null;
resourceNiceId: string | null; resourceNiceId: string | null;
resourceName: string | null; resourceName: string | null;
ip: string | null; ip: string | null;

View File

@@ -18,7 +18,6 @@ Reasons:
105 - Valid Password 105 - Valid Password
106 - Valid email 106 - Valid email
107 - Valid SSO 107 - Valid SSO
108 - Connected Client
201 - Resource Not Found 201 - Resource Not Found
202 - Resource Blocked 202 - Resource Blocked
@@ -39,7 +38,6 @@ const auditLogBuffer: Array<{
metadata: any; metadata: any;
action: boolean; action: boolean;
resourceId?: number; resourceId?: number;
siteResourceId?: number;
reason: number; reason: number;
location?: string; location?: string;
originalRequestURL: string; originalRequestURL: string;
@@ -188,7 +186,6 @@ export async function logRequestAudit(
action: boolean; action: boolean;
reason: number; reason: number;
resourceId?: number; resourceId?: number;
siteResourceId?: number;
orgId?: string; orgId?: string;
location?: string; location?: string;
user?: { username: string; userId: string }; user?: { username: string; userId: string };
@@ -265,7 +262,6 @@ export async function logRequestAudit(
metadata: sanitizeString(metadata), metadata: sanitizeString(metadata),
action: data.action, action: data.action,
resourceId: data.resourceId, resourceId: data.resourceId,
siteResourceId: data.siteResourceId,
reason: data.reason, reason: data.reason,
location: sanitizeString(data.location), location: sanitizeString(data.location),
originalRequestURL: sanitizeString(body.originalRequestURL) ?? "", originalRequestURL: sanitizeString(body.originalRequestURL) ?? "",

View File

@@ -4,10 +4,8 @@ import {
clientSitesAssociationsCache, clientSitesAssociationsCache,
db, db,
ExitNode, ExitNode,
networks,
resources, resources,
Site, Site,
siteNetworks,
siteResources, siteResources,
targetHealthCheck, targetHealthCheck,
targets targets
@@ -139,14 +137,11 @@ export async function buildClientConfigurationForNewtClient(
// Filter out any null values from peers that didn't have an olm // Filter out any null values from peers that didn't have an olm
const validPeers = peers.filter((peer) => peer !== null); const validPeers = peers.filter((peer) => peer !== null);
// Get all enabled site resources for this site by joining through siteNetworks and networks // Get all enabled site resources for this site
const allSiteResources = await db const allSiteResources = await db
.select() .select()
.from(siteResources) .from(siteResources)
.innerJoin(networks, eq(siteResources.networkId, networks.networkId)) .where(eq(siteResources.siteId, siteId));
.innerJoin(siteNetworks, eq(networks.networkId, siteNetworks.networkId))
.where(eq(siteNetworks.siteId, siteId))
.then((rows) => rows.map((r) => r.siteResources));
const targetsToSend: SubnetProxyTargetV2[] = []; const targetsToSend: SubnetProxyTargetV2[] = [];
@@ -173,7 +168,7 @@ export async function buildClientConfigurationForNewtClient(
) )
); );
const resourceTarget = await generateSubnetProxyTargetV2( const resourceTarget = generateSubnetProxyTargetV2(
resource, resource,
resourceClients resourceClients
); );

View File

@@ -56,7 +56,7 @@ export const handleGetConfigMessage: MessageHandler = async (context) => {
if (existingSite.lastHolePunch && now - existingSite.lastHolePunch > 5) { if (existingSite.lastHolePunch && now - existingSite.lastHolePunch > 5) {
logger.warn( logger.warn(
`Site last hole punch is too old; skipping this register. The site is failing to hole punch and identify its network address with the server. Can the site reach the server on UDP port ${config.getRawConfig().gerbil.clients_start_port}?` `Site last hole punch is too old; skipping this register. The site is failing to hole punch and identify its network address with the server. Can the client reach the server on UDP port ${config.getRawConfig().gerbil.clients_start_port}?`
); );
return; return;
} }

View File

@@ -1,9 +0,0 @@
import { MessageHandler } from "@server/routers/ws";
export async function flushRequestLogToDb(): Promise<void> {
return;
}
export const handleRequestLogMessage: MessageHandler = async (context) => {
return;
};

View File

@@ -9,5 +9,4 @@ export * from "./handleApplyBlueprintMessage";
export * from "./handleNewtPingMessage"; export * from "./handleNewtPingMessage";
export * from "./handleNewtDisconnectingMessage"; export * from "./handleNewtDisconnectingMessage";
export * from "./handleConnectionLogMessage"; export * from "./handleConnectionLogMessage";
export * from "./handleRequestLogMessage";
export * from "./registerNewt"; export * from "./registerNewt";

View File

@@ -1,6 +1,6 @@
import { db } from "@server/db"; import { db } from "@server/db";
import { sites, clients, olms } from "@server/db"; import { sites, clients, olms } from "@server/db";
import { inArray } from "drizzle-orm"; import { eq, inArray } from "drizzle-orm";
import logger from "@server/logger"; import logger from "@server/logger";
/** /**
@@ -21,7 +21,7 @@ import logger from "@server/logger";
*/ */
const FLUSH_INTERVAL_MS = 10_000; // Flush every 10 seconds const FLUSH_INTERVAL_MS = 10_000; // Flush every 10 seconds
const MAX_RETRIES = 5; const MAX_RETRIES = 2;
const BASE_DELAY_MS = 50; const BASE_DELAY_MS = 50;
// ── Site (newt) pings ────────────────────────────────────────────────── // ── Site (newt) pings ──────────────────────────────────────────────────
@@ -36,14 +36,6 @@ const pendingOlmArchiveResets: Set<string> = new Set();
let flushTimer: NodeJS.Timeout | null = null; let flushTimer: NodeJS.Timeout | null = null;
/**
* Guard that prevents two flush cycles from running concurrently.
* setInterval does not await async callbacks, so without this a slow flush
* (e.g. due to DB latency) would overlap with the next scheduled cycle and
* the two concurrent bulk UPDATEs would deadlock each other.
*/
let isFlushing = false;
// ── Public API ───────────────────────────────────────────────────────── // ── Public API ─────────────────────────────────────────────────────────
/** /**
@@ -80,12 +72,6 @@ export function recordClientPing(
/** /**
* Flush all accumulated site pings to the database. * Flush all accumulated site pings to the database.
*
* Each batch of up to BATCH_SIZE rows is written with a **single** UPDATE
* statement. We use the maximum timestamp across the batch so that `lastPing`
* reflects the most recent ping seen for any site in the group. This avoids
* the multi-statement transaction that previously created additional
* row-lock ordering hazards.
*/ */
async function flushSitePingsToDb(): Promise<void> { async function flushSitePingsToDb(): Promise<void> {
if (pendingSitePings.size === 0) { if (pendingSitePings.size === 0) {
@@ -97,35 +83,55 @@ async function flushSitePingsToDb(): Promise<void> {
const pingsToFlush = new Map(pendingSitePings); const pingsToFlush = new Map(pendingSitePings);
pendingSitePings.clear(); pendingSitePings.clear();
const entries = Array.from(pingsToFlush.entries()); // Sort by siteId for consistent lock ordering (prevents deadlocks)
const sortedEntries = Array.from(pingsToFlush.entries()).sort(
([a], [b]) => a - b
);
const BATCH_SIZE = 50; const BATCH_SIZE = 50;
for (let i = 0; i < entries.length; i += BATCH_SIZE) { for (let i = 0; i < sortedEntries.length; i += BATCH_SIZE) {
const batch = entries.slice(i, i + BATCH_SIZE); const batch = sortedEntries.slice(i, i + BATCH_SIZE);
// Use the latest timestamp in the batch so that `lastPing` always
// moves forward. Using a single timestamp for the whole batch means
// we only ever need one UPDATE statement (no transaction).
const maxTimestamp = Math.max(...batch.map(([, ts]) => ts));
const siteIds = batch.map(([id]) => id);
try { try {
await withRetry(async () => { await withRetry(async () => {
await db // Group by timestamp for efficient bulk updates
.update(sites) const byTimestamp = new Map<number, number[]>();
.set({ for (const [siteId, timestamp] of batch) {
online: true, const group = byTimestamp.get(timestamp) || [];
lastPing: maxTimestamp group.push(siteId);
}) byTimestamp.set(timestamp, group);
.where(inArray(sites.siteId, siteIds)); }
if (byTimestamp.size === 1) {
const [timestamp, siteIds] = Array.from(
byTimestamp.entries()
)[0];
await db
.update(sites)
.set({
online: true,
lastPing: timestamp
})
.where(inArray(sites.siteId, siteIds));
} else {
await db.transaction(async (tx) => {
for (const [timestamp, siteIds] of byTimestamp) {
await tx
.update(sites)
.set({
online: true,
lastPing: timestamp
})
.where(inArray(sites.siteId, siteIds));
}
});
}
}, "flushSitePingsToDb"); }, "flushSitePingsToDb");
} catch (error) { } catch (error) {
logger.error( logger.error(
`Failed to flush site ping batch (${batch.length} sites), re-queuing for next cycle`, `Failed to flush site ping batch (${batch.length} sites), re-queuing for next cycle`,
{ error } { error }
); );
// Re-queue only if the preserved timestamp is newer than any
// update that may have landed since we snapshotted.
for (const [siteId, timestamp] of batch) { for (const [siteId, timestamp] of batch) {
const existing = pendingSitePings.get(siteId); const existing = pendingSitePings.get(siteId);
if (!existing || existing < timestamp) { if (!existing || existing < timestamp) {
@@ -138,8 +144,6 @@ async function flushSitePingsToDb(): Promise<void> {
/** /**
* Flush all accumulated client (OLM) pings to the database. * Flush all accumulated client (OLM) pings to the database.
*
* Same single-UPDATE-per-batch approach as `flushSitePingsToDb`.
*/ */
async function flushClientPingsToDb(): Promise<void> { async function flushClientPingsToDb(): Promise<void> {
if (pendingClientPings.size === 0 && pendingOlmArchiveResets.size === 0) { if (pendingClientPings.size === 0 && pendingOlmArchiveResets.size === 0) {
@@ -155,25 +159,51 @@ async function flushClientPingsToDb(): Promise<void> {
// ── Flush client pings ───────────────────────────────────────────── // ── Flush client pings ─────────────────────────────────────────────
if (pingsToFlush.size > 0) { if (pingsToFlush.size > 0) {
const entries = Array.from(pingsToFlush.entries()); const sortedEntries = Array.from(pingsToFlush.entries()).sort(
([a], [b]) => a - b
);
const BATCH_SIZE = 50; const BATCH_SIZE = 50;
for (let i = 0; i < entries.length; i += BATCH_SIZE) { for (let i = 0; i < sortedEntries.length; i += BATCH_SIZE) {
const batch = entries.slice(i, i + BATCH_SIZE); const batch = sortedEntries.slice(i, i + BATCH_SIZE);
const maxTimestamp = Math.max(...batch.map(([, ts]) => ts));
const clientIds = batch.map(([id]) => id);
try { try {
await withRetry(async () => { await withRetry(async () => {
await db const byTimestamp = new Map<number, number[]>();
.update(clients) for (const [clientId, timestamp] of batch) {
.set({ const group = byTimestamp.get(timestamp) || [];
lastPing: maxTimestamp, group.push(clientId);
online: true, byTimestamp.set(timestamp, group);
archived: false }
})
.where(inArray(clients.clientId, clientIds)); if (byTimestamp.size === 1) {
const [timestamp, clientIds] = Array.from(
byTimestamp.entries()
)[0];
await db
.update(clients)
.set({
lastPing: timestamp,
online: true,
archived: false
})
.where(inArray(clients.clientId, clientIds));
} else {
await db.transaction(async (tx) => {
for (const [timestamp, clientIds] of byTimestamp) {
await tx
.update(clients)
.set({
lastPing: timestamp,
online: true,
archived: false
})
.where(
inArray(clients.clientId, clientIds)
);
}
});
}
}, "flushClientPingsToDb"); }, "flushClientPingsToDb");
} catch (error) { } catch (error) {
logger.error( logger.error(
@@ -230,12 +260,7 @@ export async function flushPingsToDb(): Promise<void> {
/** /**
* Simple retry wrapper with exponential backoff for transient errors * Simple retry wrapper with exponential backoff for transient errors
* (deadlocks, connection timeouts, unexpected disconnects). * (connection timeouts, unexpected disconnects).
*
* PostgreSQL deadlocks (40P01) are always safe to retry: the database
* guarantees exactly one winner per deadlock pair, so the loser just needs
* to try again. MAX_RETRIES is intentionally higher than typical connection
* retry budgets to give deadlock victims enough chances to succeed.
*/ */
async function withRetry<T>( async function withRetry<T>(
operation: () => Promise<T>, operation: () => Promise<T>,
@@ -252,8 +277,7 @@ async function withRetry<T>(
const jitter = Math.random() * baseDelay; const jitter = Math.random() * baseDelay;
const delay = baseDelay + jitter; const delay = baseDelay + jitter;
logger.warn( logger.warn(
`Transient DB error in ${context}, retrying attempt ${attempt}/${MAX_RETRIES} after ${delay.toFixed(0)}ms`, `Transient DB error in ${context}, retrying attempt ${attempt}/${MAX_RETRIES} after ${delay.toFixed(0)}ms`
{ code: error?.code ?? error?.cause?.code }
); );
await new Promise((resolve) => setTimeout(resolve, delay)); await new Promise((resolve) => setTimeout(resolve, delay));
continue; continue;
@@ -264,14 +288,14 @@ async function withRetry<T>(
} }
/** /**
* Detect transient errors that are safe to retry. * Detect transient connection errors that are safe to retry.
*/ */
function isTransientError(error: any): boolean { function isTransientError(error: any): boolean {
if (!error) return false; if (!error) return false;
const message = (error.message || "").toLowerCase(); const message = (error.message || "").toLowerCase();
const causeMessage = (error.cause?.message || "").toLowerCase(); const causeMessage = (error.cause?.message || "").toLowerCase();
const code = error.code || error.cause?.code || ""; const code = error.code || "";
// Connection timeout / terminated // Connection timeout / terminated
if ( if (
@@ -284,17 +308,12 @@ function isTransientError(error: any): boolean {
return true; return true;
} }
// PostgreSQL deadlock detected — always safe to retry (one winner guaranteed) // PostgreSQL deadlock
if (code === "40P01" || message.includes("deadlock")) { if (code === "40P01" || message.includes("deadlock")) {
return true; return true;
} }
// PostgreSQL serialization failure // ECONNRESET, ECONNREFUSED, EPIPE
if (code === "40001") {
return true;
}
// ECONNRESET, ECONNREFUSED, EPIPE, ETIMEDOUT
if ( if (
code === "ECONNRESET" || code === "ECONNRESET" ||
code === "ECONNREFUSED" || code === "ECONNREFUSED" ||
@@ -318,26 +337,12 @@ export function startPingAccumulator(): void {
} }
flushTimer = setInterval(async () => { flushTimer = setInterval(async () => {
// Skip this tick if the previous flush is still in progress.
// setInterval does not await async callbacks, so without this guard
// two flush cycles can run concurrently and deadlock each other on
// overlapping bulk UPDATE statements.
if (isFlushing) {
logger.debug(
"Ping accumulator: previous flush still in progress, skipping cycle"
);
return;
}
isFlushing = true;
try { try {
await flushPingsToDb(); await flushPingsToDb();
} catch (error) { } catch (error) {
logger.error("Unhandled error in ping accumulator flush", { logger.error("Unhandled error in ping accumulator flush", {
error error
}); });
} finally {
isFlushing = false;
} }
}, FLUSH_INTERVAL_MS); }, FLUSH_INTERVAL_MS);
@@ -359,22 +364,7 @@ export async function stopPingAccumulator(): Promise<void> {
flushTimer = null; flushTimer = null;
} }
// Final flush to persist any remaining pings. // Final flush to persist any remaining pings
// Wait for any in-progress flush to finish first so we don't race.
if (isFlushing) {
logger.debug(
"Ping accumulator: waiting for in-progress flush before stopping…"
);
await new Promise<void>((resolve) => {
const poll = setInterval(() => {
if (!isFlushing) {
clearInterval(poll);
resolve();
}
}, 50);
});
}
try { try {
await flushPingsToDb(); await flushPingsToDb();
} catch (error) { } catch (error) {
@@ -389,4 +379,4 @@ export async function stopPingAccumulator(): Promise<void> {
*/ */
export function getPendingPingCount(): number { export function getPendingPingCount(): number {
return pendingSitePings.size + pendingClientPings.size; return pendingSitePings.size + pendingClientPings.size;
} }

View File

@@ -4,8 +4,6 @@ import {
clientSitesAssociationsCache, clientSitesAssociationsCache,
db, db,
exitNodes, exitNodes,
networks,
siteNetworks,
siteResources, siteResources,
sites sites
} from "@server/db"; } from "@server/db";
@@ -61,17 +59,9 @@ export async function buildSiteConfigurationForOlmClient(
clientSiteResourcesAssociationsCache.siteResourceId clientSiteResourcesAssociationsCache.siteResourceId
) )
) )
.innerJoin(
networks,
eq(siteResources.networkId, networks.networkId)
)
.innerJoin(
siteNetworks,
eq(networks.networkId, siteNetworks.networkId)
)
.where( .where(
and( and(
eq(siteNetworks.siteId, site.siteId), eq(siteResources.siteId, site.siteId),
eq( eq(
clientSiteResourcesAssociationsCache.clientId, clientSiteResourcesAssociationsCache.clientId,
client.clientId client.clientId
@@ -79,7 +69,6 @@ export async function buildSiteConfigurationForOlmClient(
) )
); );
if (jitMode) { if (jitMode) {
// Add site configuration to the array // Add site configuration to the array
siteConfigurations.push({ siteConfigurations.push({

View File

@@ -17,6 +17,7 @@ import { getUserDeviceName } from "@server/db/names";
import { buildSiteConfigurationForOlmClient } from "./buildConfiguration"; import { buildSiteConfigurationForOlmClient } from "./buildConfiguration";
import { OlmErrorCodes, sendOlmError } from "./error"; import { OlmErrorCodes, sendOlmError } from "./error";
import { handleFingerprintInsertion } from "./fingerprintingUtils"; import { handleFingerprintInsertion } from "./fingerprintingUtils";
import { Alias } from "@server/lib/ip";
import { build } from "@server/build"; import { build } from "@server/build";
import { canCompress } from "@server/lib/clientVersionChecks"; import { canCompress } from "@server/lib/clientVersionChecks";
import config from "@server/lib/config"; import config from "@server/lib/config";

View File

@@ -4,12 +4,10 @@ import {
db, db,
exitNodes, exitNodes,
Site, Site,
siteNetworks, siteResources
siteResources,
sites
} from "@server/db"; } from "@server/db";
import { MessageHandler } from "@server/routers/ws"; import { MessageHandler } from "@server/routers/ws";
import { clients, Olm } from "@server/db"; import { clients, Olm, sites } from "@server/db";
import { and, eq, or } from "drizzle-orm"; import { and, eq, or } from "drizzle-orm";
import logger from "@server/logger"; import logger from "@server/logger";
import { initPeerAddHandshake } from "./peers"; import { initPeerAddHandshake } from "./peers";
@@ -46,31 +44,20 @@ export const handleOlmServerInitAddPeerHandshake: MessageHandler = async (
const { siteId, resourceId, chainId } = message.data; const { siteId, resourceId, chainId } = message.data;
const sendCancel = async () => { let site: Site | null = null;
await sendToClient(
olm.olmId,
{
type: "olm/wg/peer/chain/cancel",
data: { chainId }
},
{ incrementConfigVersion: false }
).catch((error) => {
logger.warn(`Error sending message:`, error);
});
};
let sitesToProcess: Site[] = [];
if (siteId) { if (siteId) {
// get the site
const [siteRes] = await db const [siteRes] = await db
.select() .select()
.from(sites) .from(sites)
.where(eq(sites.siteId, siteId)) .where(eq(sites.siteId, siteId))
.limit(1); .limit(1);
if (siteRes) { if (siteRes) {
sitesToProcess = [siteRes]; site = siteRes;
} }
} else if (resourceId) { }
if (resourceId && !site) {
const resources = await db const resources = await db
.select() .select()
.from(siteResources) .from(siteResources)
@@ -85,17 +72,27 @@ export const handleOlmServerInitAddPeerHandshake: MessageHandler = async (
); );
if (!resources || resources.length === 0) { if (!resources || resources.length === 0) {
logger.error( logger.error(`handleOlmServerPeerAddMessage: Resource not found`);
`handleOlmServerInitAddPeerHandshake: Resource not found` // cancel the request from the olm side to not keep doing this
); await sendToClient(
await sendCancel(); olm.olmId,
{
type: "olm/wg/peer/chain/cancel",
data: {
chainId
}
},
{ incrementConfigVersion: false }
).catch((error) => {
logger.warn(`Error sending message:`, error);
});
return; return;
} }
if (resources.length > 1) { if (resources.length > 1) {
// error but this should not happen because the nice id cant contain a dot and the alias has to have a dot and both have to be unique within the org so there should never be multiple matches // error but this should not happen because the nice id cant contain a dot and the alias has to have a dot and both have to be unique within the org so there should never be multiple matches
logger.error( logger.error(
`handleOlmServerInitAddPeerHandshake: Multiple resources found matching the criteria` `handleOlmServerPeerAddMessage: Multiple resources found matching the criteria`
); );
return; return;
} }
@@ -120,120 +117,125 @@ export const handleOlmServerInitAddPeerHandshake: MessageHandler = async (
if (currentResourceAssociationCaches.length === 0) { if (currentResourceAssociationCaches.length === 0) {
logger.error( logger.error(
`handleOlmServerInitAddPeerHandshake: Client ${client.clientId} does not have access to resource ${resource.siteResourceId}` `handleOlmServerPeerAddMessage: Client ${client.clientId} does not have access to resource ${resource.siteResourceId}`
); );
await sendCancel(); // cancel the request from the olm side to not keep doing this
await sendToClient(
olm.olmId,
{
type: "olm/wg/peer/chain/cancel",
data: {
chainId
}
},
{ incrementConfigVersion: false }
).catch((error) => {
logger.warn(`Error sending message:`, error);
});
return; return;
} }
if (!resource.networkId) { const siteIdFromResource = resource.siteId;
// get the site
const [siteRes] = await db
.select()
.from(sites)
.where(eq(sites.siteId, siteIdFromResource));
if (!siteRes) {
logger.error( logger.error(
`handleOlmServerInitAddPeerHandshake: Resource ${resource.siteResourceId} has no network` `handleOlmServerPeerAddMessage: Site with ID ${site} not found`
); );
await sendCancel();
return; return;
} }
// Get all sites associated with this resource's network via siteNetworks site = siteRes;
const siteRows = await db
.select({ siteId: siteNetworks.siteId })
.from(siteNetworks)
.where(eq(siteNetworks.networkId, resource.networkId));
if (!siteRows || siteRows.length === 0) {
logger.error(
`handleOlmServerInitAddPeerHandshake: No sites found for resource ${resource.siteResourceId}`
);
await sendCancel();
return;
}
// Fetch full site objects for all network members
const foundSites = await Promise.all(
siteRows.map(async ({ siteId: sid }) => {
const [s] = await db
.select()
.from(sites)
.where(eq(sites.siteId, sid))
.limit(1);
return s ?? null;
})
);
sitesToProcess = foundSites.filter((s): s is Site => s !== null);
} }
if (sitesToProcess.length === 0) { if (!site) {
logger.error( logger.error(`handleOlmServerPeerAddMessage: Site not found`);
`handleOlmServerInitAddPeerHandshake: No sites to process`
);
await sendCancel();
return; return;
} }
let handshakeInitiated = false; // check if the client can access this site using the cache
const currentSiteAssociationCaches = await db
.select()
.from(clientSitesAssociationsCache)
.where(
and(
eq(clientSitesAssociationsCache.clientId, client.clientId),
eq(clientSitesAssociationsCache.siteId, site.siteId)
)
);
for (const site of sitesToProcess) { if (currentSiteAssociationCaches.length === 0) {
// Check if the client can access this site using the cache logger.error(
const currentSiteAssociationCaches = await db `handleOlmServerPeerAddMessage: Client ${client.clientId} does not have access to site ${site.siteId}`
.select() );
.from(clientSitesAssociationsCache) // cancel the request from the olm side to not keep doing this
.where( await sendToClient(
and( olm.olmId,
eq(clientSitesAssociationsCache.clientId, client.clientId),
eq(clientSitesAssociationsCache.siteId, site.siteId)
)
);
if (currentSiteAssociationCaches.length === 0) {
logger.warn(
`handleOlmServerInitAddPeerHandshake: Client ${client.clientId} does not have access to site ${site.siteId}, skipping`
);
continue;
}
if (!site.exitNodeId) {
logger.error(
`handleOlmServerInitAddPeerHandshake: Site ${site.siteId} has no exit node, skipping`
);
continue;
}
const [exitNode] = await db
.select()
.from(exitNodes)
.where(eq(exitNodes.exitNodeId, site.exitNodeId));
if (!exitNode) {
logger.error(
`handleOlmServerInitAddPeerHandshake: Exit node not found for site ${site.siteId}, skipping`
);
continue;
}
// Trigger the peer add handshake — if the peer was already added this will be a no-op
await initPeerAddHandshake(
client.clientId,
{ {
siteId: site.siteId, type: "olm/wg/peer/chain/cancel",
exitNode: { data: {
publicKey: exitNode.publicKey, chainId
endpoint: exitNode.endpoint
} }
}, },
olm.olmId, { incrementConfigVersion: false }
chainId ).catch((error) => {
); logger.warn(`Error sending message:`, error);
});
handshakeInitiated = true; return;
} }
if (!handshakeInitiated) { if (!site.exitNodeId) {
logger.error( logger.error(
`handleOlmServerInitAddPeerHandshake: No accessible sites with valid exit nodes found, cancelling chain` `handleOlmServerPeerAddMessage: Site with ID ${site.siteId} has no exit node`
); );
await sendCancel(); // cancel the request from the olm side to not keep doing this
await sendToClient(
olm.olmId,
{
type: "olm/wg/peer/chain/cancel",
data: {
chainId
}
},
{ incrementConfigVersion: false }
).catch((error) => {
logger.warn(`Error sending message:`, error);
});
return;
} }
// get the exit node from the side
const [exitNode] = await db
.select()
.from(exitNodes)
.where(eq(exitNodes.exitNodeId, site.exitNodeId));
if (!exitNode) {
logger.error(
`handleOlmServerPeerAddMessage: Site with ID ${site.siteId} has no exit node`
);
return;
}
// also trigger the peer add handshake in case the peer was not already added to the olm and we need to hole punch
// if it has already been added this will be a no-op
await initPeerAddHandshake(
// this will kick off the add peer process for the client
client.clientId,
{
siteId: site.siteId,
exitNode: {
publicKey: exitNode.publicKey,
endpoint: exitNode.endpoint
}
},
olm.olmId,
chainId
);
return; return;
}; };

View File

@@ -1,25 +1,43 @@
import { import {
Client,
clientSiteResourcesAssociationsCache, clientSiteResourcesAssociationsCache,
db, db,
networks, ExitNode,
siteNetworks, Org,
orgs,
roleClients,
roles,
siteResources, siteResources,
Transaction,
userClients,
userOrgs,
users
} from "@server/db"; } from "@server/db";
import { MessageHandler } from "@server/routers/ws"; import { MessageHandler } from "@server/routers/ws";
import { import {
clients, clients,
clientSitesAssociationsCache, clientSitesAssociationsCache,
exitNodes,
Olm, Olm,
olms,
sites sites
} from "@server/db"; } from "@server/db";
import { and, eq, inArray, isNotNull, isNull } from "drizzle-orm"; import { and, eq, inArray, isNotNull, isNull } from "drizzle-orm";
import { addPeer, deletePeer } from "../newt/peers";
import logger from "@server/logger"; import logger from "@server/logger";
import { listExitNodes } from "#dynamic/lib/exitNodes";
import { import {
generateAliasConfig, generateAliasConfig,
getNextAvailableClientSubnet
} from "@server/lib/ip"; } from "@server/lib/ip";
import { generateRemoteSubnets } from "@server/lib/ip"; import { generateRemoteSubnets } from "@server/lib/ip";
import { rebuildClientAssociationsFromClient } from "@server/lib/rebuildClientAssociations";
import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
import { validateSessionToken } from "@server/auth/sessions/app";
import config from "@server/lib/config";
import { import {
addPeer as newtAddPeer, addPeer as newtAddPeer,
deletePeer as newtDeletePeer
} from "@server/routers/newt/peers"; } from "@server/routers/newt/peers";
export const handleOlmServerPeerAddMessage: MessageHandler = async ( export const handleOlmServerPeerAddMessage: MessageHandler = async (
@@ -135,21 +153,13 @@ export const handleOlmServerPeerAddMessage: MessageHandler = async (
clientSiteResourcesAssociationsCache.siteResourceId clientSiteResourcesAssociationsCache.siteResourceId
) )
) )
.innerJoin(
networks,
eq(siteResources.networkId, networks.networkId)
)
.innerJoin(
siteNetworks,
and(
eq(networks.networkId, siteNetworks.networkId),
eq(siteNetworks.siteId, site.siteId)
)
)
.where( .where(
eq( and(
clientSiteResourcesAssociationsCache.clientId, eq(siteResources.siteId, site.siteId),
client.clientId eq(
clientSiteResourcesAssociationsCache.clientId,
client.clientId
)
) )
); );

View File

@@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import { z } from "zod"; import { z } from "zod";
import { db, domainNamespaces, loginPage } from "@server/db"; import { db, loginPage } from "@server/db";
import { import {
domains, domains,
orgDomains, orgDomains,
@@ -24,8 +24,6 @@ import { build } from "@server/build";
import { createCertificate } from "#dynamic/routers/certificates/createCertificate"; import { createCertificate } from "#dynamic/routers/certificates/createCertificate";
import { getUniqueResourceName } from "@server/db/names"; import { getUniqueResourceName } from "@server/db/names";
import { validateAndConstructDomain } from "@server/lib/domainUtils"; import { validateAndConstructDomain } from "@server/lib/domainUtils";
import { isSubscribed } from "#dynamic/lib/isSubscribed";
import { tierMatrix } from "@server/lib/billing/tierMatrix";
const createResourceParamsSchema = z.strictObject({ const createResourceParamsSchema = z.strictObject({
orgId: z.string() orgId: z.string()
@@ -114,10 +112,7 @@ export async function createResource(
const { orgId } = parsedParams.data; const { orgId } = parsedParams.data;
if ( if (req.user && (!req.userOrgRoleIds || req.userOrgRoleIds.length === 0)) {
req.user &&
(!req.userOrgRoleIds || req.userOrgRoleIds.length === 0)
) {
return next( return next(
createHttpError(HttpCode.FORBIDDEN, "User does not have a role") createHttpError(HttpCode.FORBIDDEN, "User does not have a role")
); );
@@ -198,29 +193,6 @@ async function createHttpResource(
const subdomain = parsedBody.data.subdomain; const subdomain = parsedBody.data.subdomain;
const stickySession = parsedBody.data.stickySession; const stickySession = parsedBody.data.stickySession;
if (build == "saas" && !isSubscribed(orgId!, tierMatrix.domainNamespaces)) {
// grandfather in existing users
const lastAllowedDate = new Date("2026-04-12");
const userCreatedDate = new Date(req.user?.dateCreated || new Date());
if (userCreatedDate > lastAllowedDate) {
// check if this domain id is a namespace domain and if so, reject
const domain = await db
.select()
.from(domainNamespaces)
.where(eq(domainNamespaces.domainId, domainId))
.limit(1);
if (domain.length > 0) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Your current subscription does not support custom domain namespaces. Please upgrade to access this feature."
)
);
}
}
}
// Validate domain and construct full domain // Validate domain and construct full domain
const domainResult = await validateAndConstructDomain( const domainResult = await validateAndConstructDomain(
domainId, domainId,

View File

@@ -144,7 +144,7 @@ export async function getUserResources(
name: string; name: string;
destination: string; destination: string;
mode: string; mode: string;
scheme: string | null; protocol: string | null;
enabled: boolean; enabled: boolean;
alias: string | null; alias: string | null;
aliasAddress: string | null; aliasAddress: string | null;
@@ -156,7 +156,7 @@ export async function getUserResources(
name: siteResources.name, name: siteResources.name,
destination: siteResources.destination, destination: siteResources.destination,
mode: siteResources.mode, mode: siteResources.mode,
scheme: siteResources.scheme, protocol: siteResources.protocol,
enabled: siteResources.enabled, enabled: siteResources.enabled,
alias: siteResources.alias, alias: siteResources.alias,
aliasAddress: siteResources.aliasAddress aliasAddress: siteResources.aliasAddress
@@ -240,7 +240,7 @@ export async function getUserResources(
name: siteResource.name, name: siteResource.name,
destination: siteResource.destination, destination: siteResource.destination,
mode: siteResource.mode, mode: siteResource.mode,
protocol: siteResource.scheme, protocol: siteResource.protocol,
enabled: siteResource.enabled, enabled: siteResource.enabled,
alias: siteResource.alias, alias: siteResource.alias,
aliasAddress: siteResource.aliasAddress, aliasAddress: siteResource.aliasAddress,
@@ -289,7 +289,7 @@ export type GetUserResourcesResponse = {
enabled: boolean; enabled: boolean;
alias: string | null; alias: string | null;
aliasAddress: string | null; aliasAddress: string | null;
type: "site"; type: 'site';
}>; }>;
}; };
}; };

View File

@@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import { z } from "zod"; import { z } from "zod";
import { db, domainNamespaces, loginPage } from "@server/db"; import { db, loginPage } from "@server/db";
import { import {
domains, domains,
Org, Org,
@@ -25,7 +25,6 @@ import { validateAndConstructDomain } from "@server/lib/domainUtils";
import { build } from "@server/build"; import { build } from "@server/build";
import { isLicensedOrSubscribed } from "#dynamic/lib/isLicencedOrSubscribed"; import { isLicensedOrSubscribed } from "#dynamic/lib/isLicencedOrSubscribed";
import { tierMatrix } from "@server/lib/billing/tierMatrix"; import { tierMatrix } from "@server/lib/billing/tierMatrix";
import { isSubscribed } from "#dynamic/lib/isSubscribed";
const updateResourceParamsSchema = z.strictObject({ const updateResourceParamsSchema = z.strictObject({
resourceId: z.string().transform(Number).pipe(z.int().positive()) resourceId: z.string().transform(Number).pipe(z.int().positive())
@@ -121,9 +120,7 @@ const updateHttpResourceBodySchema = z
if (data.headers) { if (data.headers) {
// HTTP header values must be visible ASCII or horizontal whitespace, no control chars (RFC 7230) // HTTP header values must be visible ASCII or horizontal whitespace, no control chars (RFC 7230)
const validHeaderValue = /^[\t\x20-\x7E]*$/; const validHeaderValue = /^[\t\x20-\x7E]*$/;
return data.headers.every((h) => return data.headers.every((h) => validHeaderValue.test(h.value));
validHeaderValue.test(h.value)
);
} }
return true; return true;
}, },
@@ -321,34 +318,6 @@ async function updateHttpResource(
if (updateData.domainId) { if (updateData.domainId) {
const domainId = updateData.domainId; const domainId = updateData.domainId;
if (
build == "saas" &&
!isSubscribed(resource.orgId, tierMatrix.domainNamespaces)
) {
// grandfather in existing users
const lastAllowedDate = new Date("2026-04-12");
const userCreatedDate = new Date(
req.user?.dateCreated || new Date()
);
if (userCreatedDate > lastAllowedDate) {
// check if this domain id is a namespace domain and if so, reject
const domain = await db
.select()
.from(domainNamespaces)
.where(eq(domainNamespaces.domainId, domainId))
.limit(1);
if (domain.length > 0) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Your current subscription does not support custom domain namespaces. Please upgrade to access this feature."
)
);
}
}
}
// Validate domain and construct full domain // Validate domain and construct full domain
const domainResult = await validateAndConstructDomain( const domainResult = await validateAndConstructDomain(
domainId, domainId,
@@ -397,7 +366,7 @@ async function updateHttpResource(
); );
} }
} }
if (build != "oss") { if (build != "oss") {
const existingLoginPages = await db const existingLoginPages = await db
.select() .select()

View File

@@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import { z } from "zod"; import { z } from "zod";
import { db, Site, siteNetworks, siteResources } from "@server/db"; import { db, Site, siteResources } from "@server/db";
import { newts, newtSessions, sites } from "@server/db"; import { newts, newtSessions, sites } from "@server/db";
import { eq } from "drizzle-orm"; import { eq } from "drizzle-orm";
import response from "@server/lib/response"; import response from "@server/lib/response";
@@ -71,23 +71,18 @@ export async function deleteSite(
await deletePeer(site.exitNodeId!, site.pubKey); await deletePeer(site.exitNodeId!, site.pubKey);
} }
} else if (site.type == "newt") { } else if (site.type == "newt") {
const networks = await trx // delete all of the site resources on this site
.select({ networkId: siteNetworks.networkId }) const siteResourcesOnSite = trx
.from(siteNetworks) .delete(siteResources)
.where(eq(siteNetworks.siteId, siteId)); .where(eq(siteResources.siteId, siteId))
.returning();
// loop through them // loop through them
for (const network of await networks) { for (const removedSiteResource of await siteResourcesOnSite) {
const [siteResource] = await trx await rebuildClientAssociationsFromSiteResource(
.select() removedSiteResource,
.from(siteResources) trx
.where(eq(siteResources.networkId, network.networkId)); );
if (siteResource) {
await rebuildClientAssociationsFromSiteResource(
siteResource,
trx
);
}
} }
// get the newt on the site by querying the newt table for siteId // get the newt on the site by querying the newt table for siteId

View File

@@ -5,8 +5,6 @@ import {
orgs, orgs,
roles, roles,
roleSiteResources, roleSiteResources,
siteNetworks,
networks,
SiteResource, SiteResource,
siteResources, siteResources,
sites, sites,
@@ -19,18 +17,17 @@ import {
portRangeStringSchema portRangeStringSchema
} from "@server/lib/ip"; } from "@server/lib/ip";
import { isLicensedOrSubscribed } from "#dynamic/lib/isLicencedOrSubscribed"; import { isLicensedOrSubscribed } from "#dynamic/lib/isLicencedOrSubscribed";
import { TierFeature, tierMatrix } from "@server/lib/billing/tierMatrix"; import { tierMatrix } from "@server/lib/billing/tierMatrix";
import { rebuildClientAssociationsFromSiteResource } from "@server/lib/rebuildClientAssociations"; import { rebuildClientAssociationsFromSiteResource } from "@server/lib/rebuildClientAssociations";
import response from "@server/lib/response"; import response from "@server/lib/response";
import logger from "@server/logger"; import logger from "@server/logger";
import { OpenAPITags, registry } from "@server/openApi"; import { OpenAPITags, registry } from "@server/openApi";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
import { and, eq, inArray } from "drizzle-orm"; import { and, eq } from "drizzle-orm";
import { NextFunction, Request, Response } from "express"; import { NextFunction, Request, Response } from "express";
import createHttpError from "http-errors"; import createHttpError from "http-errors";
import { z } from "zod"; import { z } from "zod";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
import { validateAndConstructDomain } from "@server/lib/domainUtils";
const createSiteResourceParamsSchema = z.strictObject({ const createSiteResourceParamsSchema = z.strictObject({
orgId: z.string() orgId: z.string()
@@ -39,12 +36,11 @@ const createSiteResourceParamsSchema = z.strictObject({
const createSiteResourceSchema = z const createSiteResourceSchema = z
.strictObject({ .strictObject({
name: z.string().min(1).max(255), name: z.string().min(1).max(255),
mode: z.enum(["host", "cidr", "http"]), mode: z.enum(["host", "cidr", "port"]),
ssl: z.boolean().optional(), // only used for http mode siteId: z.int(),
scheme: z.enum(["http", "https"]).optional(), // protocol: z.enum(["tcp", "udp"]).optional(),
siteIds: z.array(z.int()),
// proxyPort: z.int().positive().optional(), // proxyPort: z.int().positive().optional(),
destinationPort: z.int().positive().optional(), // destinationPort: z.int().positive().optional(),
destination: z.string().min(1), destination: z.string().min(1),
enabled: z.boolean().default(true), enabled: z.boolean().default(true),
alias: z alias: z
@@ -61,24 +57,20 @@ const createSiteResourceSchema = z
udpPortRangeString: portRangeStringSchema, udpPortRangeString: portRangeStringSchema,
disableIcmp: z.boolean().optional(), disableIcmp: z.boolean().optional(),
authDaemonPort: z.int().positive().optional(), authDaemonPort: z.int().positive().optional(),
authDaemonMode: z.enum(["site", "remote"]).optional(), authDaemonMode: z.enum(["site", "remote"]).optional()
domainId: z.string().optional(), // only used for http mode, we need this to verify the alias is unique within the org
subdomain: z.string().optional() // only used for http mode, we need this to verify the alias is unique within the org
}) })
.strict() .strict()
.refine( .refine(
(data) => { (data) => {
if (data.mode === "host") { if (data.mode === "host") {
if (data.mode == "host") { // Check if it's a valid IP address using zod (v4 or v6)
// Check if it's a valid IP address using zod (v4 or v6) const isValidIP = z
const isValidIP = z // .union([z.ipv4(), z.ipv6()])
// .union([z.ipv4(), z.ipv6()]) .union([z.ipv4()]) // for now lets just do ipv4 until we verify ipv6 works everywhere
.union([z.ipv4()]) // for now lets just do ipv4 until we verify ipv6 works everywhere .safeParse(data.destination).success;
.safeParse(data.destination).success;
if (isValidIP) { if (isValidIP) {
return true; return true;
}
} }
// Check if it's a valid domain (hostname pattern, TLD not required) // Check if it's a valid domain (hostname pattern, TLD not required)
@@ -113,21 +105,6 @@ const createSiteResourceSchema = z
{ {
message: "Destination must be a valid CIDR notation for cidr mode" message: "Destination must be a valid CIDR notation for cidr mode"
} }
)
.refine(
(data) => {
if (data.mode !== "http") return true;
return (
data.scheme !== undefined &&
data.destinationPort !== undefined &&
data.destinationPort >= 1 &&
data.destinationPort <= 65535
);
},
{
message:
"HTTP mode requires scheme (http or https) and a valid destination port"
}
); );
export type CreateSiteResourceBody = z.infer<typeof createSiteResourceSchema>; export type CreateSiteResourceBody = z.infer<typeof createSiteResourceSchema>;
@@ -182,14 +159,13 @@ export async function createSiteResource(
const { orgId } = parsedParams.data; const { orgId } = parsedParams.data;
const { const {
name, name,
siteIds, siteId,
mode, mode,
scheme, // protocol,
// proxyPort, // proxyPort,
destinationPort, // destinationPort,
destination, destination,
enabled, enabled,
ssl,
alias, alias,
userIds, userIds,
roleIds, roleIds,
@@ -198,37 +174,18 @@ export async function createSiteResource(
udpPortRangeString, udpPortRangeString,
disableIcmp, disableIcmp,
authDaemonPort, authDaemonPort,
authDaemonMode, authDaemonMode
domainId,
subdomain
} = parsedBody.data; } = parsedBody.data;
if (mode == "http") {
const hasHttpFeature = await isLicensedOrSubscribed(
orgId,
tierMatrix[TierFeature.HTTPPrivateResources]
);
if (!hasHttpFeature) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"HTTP private resources are not included in your current plan. Please upgrade."
)
);
}
}
// Verify the site exists and belongs to the org // Verify the site exists and belongs to the org
const sitesToAssign = await db const [site] = await db
.select() .select()
.from(sites) .from(sites)
.where(and(inArray(sites.siteId, siteIds), eq(sites.orgId, orgId))) .where(and(eq(sites.siteId, siteId), eq(sites.orgId, orgId)))
.limit(1); .limit(1);
if (sitesToAssign.length !== siteIds.length) { if (!site) {
return next( return next(createHttpError(HttpCode.NOT_FOUND, "Site not found"));
createHttpError(HttpCode.NOT_FOUND, "Some site not found")
);
} }
const [org] = await db const [org] = await db
@@ -269,50 +226,29 @@ export async function createSiteResource(
); );
} }
if (domainId && alias) { // // check if resource with same protocol and proxy port already exists (only for port mode)
// throw an error because we can only have one or the other // if (mode === "port" && protocol && proxyPort) {
return next( // const [existingResource] = await db
createHttpError( // .select()
HttpCode.BAD_REQUEST, // .from(siteResources)
"Alias and domain cannot both be set. Please choose one or the other." // .where(
) // and(
); // eq(siteResources.siteId, siteId),
} // eq(siteResources.orgId, orgId),
// eq(siteResources.protocol, protocol),
let fullDomain: string | null = null; // eq(siteResources.proxyPort, proxyPort)
let finalSubdomain: string | null = null; // )
if (domainId) { // )
// Validate domain and construct full domain // .limit(1);
const domainResult = await validateAndConstructDomain( // if (existingResource && existingResource.siteResourceId) {
domainId, // return next(
orgId, // createHttpError(
subdomain // HttpCode.CONFLICT,
); // "A resource with the same protocol and proxy port already exists"
// )
if (!domainResult.success) { // );
return next( // }
createHttpError(HttpCode.BAD_REQUEST, domainResult.error) // }
);
}
fullDomain = domainResult.fullDomain;
finalSubdomain = domainResult.subdomain;
// make sure the full domain is unique
const existingResource = await db
.select()
.from(siteResources)
.where(eq(siteResources.fullDomain, fullDomain));
if (existingResource.length > 0) {
return next(
createHttpError(
HttpCode.CONFLICT,
"Resource with that domain already exists"
)
);
}
}
// make sure the alias is unique within the org if provided // make sure the alias is unique within the org if provided
if (alias) { if (alias) {
@@ -344,49 +280,27 @@ export async function createSiteResource(
const niceId = await getUniqueSiteResourceName(orgId); const niceId = await getUniqueSiteResourceName(orgId);
let aliasAddress: string | null = null; let aliasAddress: string | null = null;
if (mode === "host" || mode === "http") { if (mode == "host") {
// we can only have an alias on a host
aliasAddress = await getNextAvailableAliasAddress(orgId); aliasAddress = await getNextAvailableAliasAddress(orgId);
} }
let newSiteResource: SiteResource | undefined; let newSiteResource: SiteResource | undefined;
await db.transaction(async (trx) => { await db.transaction(async (trx) => {
const [network] = await trx
.insert(networks)
.values({
scope: "resource",
orgId: orgId
})
.returning();
if (!network) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
`Failed to create network`
)
);
}
// Create the site resource // Create the site resource
const insertValues: typeof siteResources.$inferInsert = { const insertValues: typeof siteResources.$inferInsert = {
siteId,
niceId, niceId,
orgId, orgId,
name, name,
mode, mode: mode as "host" | "cidr",
ssl,
networkId: network.networkId,
destination, destination,
scheme,
destinationPort,
enabled, enabled,
alias: alias ? alias.trim() : null, alias,
aliasAddress, aliasAddress,
tcpPortRangeString, tcpPortRangeString,
udpPortRangeString, udpPortRangeString,
disableIcmp, disableIcmp
domainId,
subdomain: finalSubdomain,
fullDomain
}; };
if (isLicensedSshPam) { if (isLicensedSshPam) {
if (authDaemonPort !== undefined) if (authDaemonPort !== undefined)
@@ -403,13 +317,6 @@ export async function createSiteResource(
//////////////////// update the associations //////////////////// //////////////////// update the associations ////////////////////
for (const siteId of siteIds) {
await trx.insert(siteNetworks).values({
siteId: siteId,
networkId: network.networkId
});
}
const [adminRole] = await trx const [adminRole] = await trx
.select() .select()
.from(roles) .from(roles)
@@ -452,21 +359,16 @@ export async function createSiteResource(
); );
} }
for (const siteToAssign of sitesToAssign) { const [newt] = await trx
const [newt] = await trx .select()
.select() .from(newts)
.from(newts) .where(eq(newts.siteId, site.siteId))
.where(eq(newts.siteId, siteToAssign.siteId)) .limit(1);
.limit(1);
if (!newt) { if (!newt) {
return next( return next(
createHttpError( createHttpError(HttpCode.NOT_FOUND, "Newt not found")
HttpCode.NOT_FOUND, );
`Newt not found for site ${siteToAssign.siteId}`
)
);
}
} }
await rebuildClientAssociationsFromSiteResource( await rebuildClientAssociationsFromSiteResource(
@@ -485,7 +387,7 @@ export async function createSiteResource(
} }
logger.info( logger.info(
`Created site resource ${newSiteResource.siteResourceId} for org ${orgId}` `Created site resource ${newSiteResource.siteResourceId} for site ${siteId}`
); );
return response(res, { return response(res, {

View File

@@ -70,18 +70,17 @@ export async function deleteSiteResource(
.where(and(eq(siteResources.siteResourceId, siteResourceId))) .where(and(eq(siteResources.siteResourceId, siteResourceId)))
.returning(); .returning();
// not sure why this is here... const [newt] = await trx
// const [newt] = await trx .select()
// .select() .from(newts)
// .from(newts) .where(eq(newts.siteId, removedSiteResource.siteId))
// .where(eq(newts.siteId, removedSiteResource.siteId)) .limit(1);
// .limit(1);
// if (!newt) { if (!newt) {
// return next( return next(
// createHttpError(HttpCode.NOT_FOUND, "Newt not found") createHttpError(HttpCode.NOT_FOUND, "Newt not found")
// ); );
// } }
await rebuildClientAssociationsFromSiteResource( await rebuildClientAssociationsFromSiteResource(
removedSiteResource, removedSiteResource,

View File

@@ -17,34 +17,38 @@ const getSiteResourceParamsSchema = z.strictObject({
.transform((val) => (val ? Number(val) : undefined)) .transform((val) => (val ? Number(val) : undefined))
.pipe(z.int().positive().optional()) .pipe(z.int().positive().optional())
.optional(), .optional(),
siteId: z.string().transform(Number).pipe(z.int().positive()),
niceId: z.string().optional(), niceId: z.string().optional(),
orgId: z.string() orgId: z.string()
}); });
async function query( async function query(
siteResourceId?: number, siteResourceId?: number,
siteId?: number,
niceId?: string, niceId?: string,
orgId?: string orgId?: string
) { ) {
if (siteResourceId && orgId) { if (siteResourceId && siteId && orgId) {
const [siteResource] = await db const [siteResource] = await db
.select() .select()
.from(siteResources) .from(siteResources)
.where( .where(
and( and(
eq(siteResources.siteResourceId, siteResourceId), eq(siteResources.siteResourceId, siteResourceId),
eq(siteResources.siteId, siteId),
eq(siteResources.orgId, orgId) eq(siteResources.orgId, orgId)
) )
) )
.limit(1); .limit(1);
return siteResource; return siteResource;
} else if (niceId && orgId) { } else if (niceId && siteId && orgId) {
const [siteResource] = await db const [siteResource] = await db
.select() .select()
.from(siteResources) .from(siteResources)
.where( .where(
and( and(
eq(siteResources.niceId, niceId), eq(siteResources.niceId, niceId),
eq(siteResources.siteId, siteId),
eq(siteResources.orgId, orgId) eq(siteResources.orgId, orgId)
) )
) )
@@ -80,6 +84,7 @@ registry.registerPath({
request: { request: {
params: z.object({ params: z.object({
niceId: z.string(), niceId: z.string(),
siteId: z.number(),
orgId: z.string() orgId: z.string()
}) })
}, },
@@ -102,10 +107,10 @@ export async function getSiteResource(
); );
} }
const { siteResourceId, niceId, orgId } = parsedParams.data; const { siteResourceId, siteId, niceId, orgId } = parsedParams.data;
// Get the site resource // Get the site resource
const siteResource = await query(siteResourceId, niceId, orgId); const siteResource = await query(siteResourceId, siteId, niceId, orgId);
if (!siteResource) { if (!siteResource) {
return next( return next(

View File

@@ -1,4 +1,4 @@
import { db, SiteResource, siteNetworks, siteResources, sites } from "@server/db"; import { db, SiteResource, siteResources, sites } from "@server/db";
import response from "@server/lib/response"; import response from "@server/lib/response";
import logger from "@server/logger"; import logger from "@server/logger";
import { OpenAPITags, registry } from "@server/openApi"; import { OpenAPITags, registry } from "@server/openApi";
@@ -41,12 +41,12 @@ const listAllSiteResourcesByOrgQuerySchema = z.object({
}), }),
query: z.string().optional(), query: z.string().optional(),
mode: z mode: z
.enum(["host", "cidr", "http"]) .enum(["host", "cidr"])
.optional() .optional()
.catch(undefined) .catch(undefined)
.openapi({ .openapi({
type: "string", type: "string",
enum: ["host", "cidr", "http"], enum: ["host", "cidr"],
description: "Filter site resources by mode" description: "Filter site resources by mode"
}), }),
sort_by: z sort_by: z
@@ -73,11 +73,9 @@ const listAllSiteResourcesByOrgQuerySchema = z.object({
export type ListAllSiteResourcesByOrgResponse = PaginatedResponse<{ export type ListAllSiteResourcesByOrgResponse = PaginatedResponse<{
siteResources: (SiteResource & { siteResources: (SiteResource & {
siteOnlines: boolean[]; siteName: string;
siteIds: number[]; siteNiceId: string;
siteNames: string[]; siteAddress: string | null;
siteNiceIds: string[];
siteAddresses: (string | null)[];
})[]; })[];
}>; }>;
@@ -85,12 +83,12 @@ function querySiteResourcesBase() {
return db return db
.select({ .select({
siteResourceId: siteResources.siteResourceId, siteResourceId: siteResources.siteResourceId,
siteId: siteResources.siteId,
orgId: siteResources.orgId, orgId: siteResources.orgId,
niceId: siteResources.niceId, niceId: siteResources.niceId,
name: siteResources.name, name: siteResources.name,
mode: siteResources.mode, mode: siteResources.mode,
ssl: siteResources.ssl, protocol: siteResources.protocol,
scheme: siteResources.scheme,
proxyPort: siteResources.proxyPort, proxyPort: siteResources.proxyPort,
destinationPort: siteResources.destinationPort, destinationPort: siteResources.destinationPort,
destination: siteResources.destination, destination: siteResources.destination,
@@ -102,24 +100,14 @@ function querySiteResourcesBase() {
disableIcmp: siteResources.disableIcmp, disableIcmp: siteResources.disableIcmp,
authDaemonMode: siteResources.authDaemonMode, authDaemonMode: siteResources.authDaemonMode,
authDaemonPort: siteResources.authDaemonPort, authDaemonPort: siteResources.authDaemonPort,
subdomain: siteResources.subdomain, siteName: sites.name,
domainId: siteResources.domainId, siteNiceId: sites.niceId,
fullDomain: siteResources.fullDomain, siteAddress: sites.address
networkId: siteResources.networkId,
defaultNetworkId: siteResources.defaultNetworkId,
siteNames: sql<string[]>`array_agg(${sites.name})`,
siteNiceIds: sql<string[]>`array_agg(${sites.niceId})`,
siteIds: sql<number[]>`array_agg(${sites.siteId})`,
siteAddresses: sql<(string | null)[]>`array_agg(${sites.address})`,
siteOnlines: sql<boolean[]>`array_agg(${sites.online})`
}) })
.from(siteResources) .from(siteResources)
.innerJoin(siteNetworks, eq(siteResources.networkId, siteNetworks.networkId)) .innerJoin(sites, eq(siteResources.siteId, sites.siteId));
.innerJoin(sites, eq(siteNetworks.siteId, sites.siteId))
.groupBy(siteResources.siteResourceId);
} }
registry.registerPath({ registry.registerPath({
method: "get", method: "get",
path: "/org/{orgId}/site-resources", path: "/org/{orgId}/site-resources",
@@ -205,9 +193,7 @@ export async function listAllSiteResourcesByOrg(
const baseQuery = querySiteResourcesBase().where(and(...conditions)); const baseQuery = querySiteResourcesBase().where(and(...conditions));
const countQuery = db.$count( const countQuery = db.$count(
querySiteResourcesBase() querySiteResourcesBase().where(and(...conditions)).as("filtered_site_resources")
.where(and(...conditions))
.as("filtered_site_resources")
); );
const [siteResourcesList, totalCount] = await Promise.all([ const [siteResourcesList, totalCount] = await Promise.all([

View File

@@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import { z } from "zod"; import { z } from "zod";
import { db, networks, siteNetworks } from "@server/db"; import { db } from "@server/db";
import { siteResources, sites, SiteResource } from "@server/db"; import { siteResources, sites, SiteResource } from "@server/db";
import response from "@server/lib/response"; import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
@@ -108,21 +108,13 @@ export async function listSiteResources(
return next(createHttpError(HttpCode.NOT_FOUND, "Site not found")); return next(createHttpError(HttpCode.NOT_FOUND, "Site not found"));
} }
// Get site resources by joining networks to siteResources via siteNetworks // Get site resources
const siteResourcesList = await db const siteResourcesList = await db
.select() .select()
.from(siteNetworks) .from(siteResources)
.innerJoin(
networks,
eq(siteNetworks.networkId, networks.networkId)
)
.innerJoin(
siteResources,
eq(siteResources.networkId, networks.networkId)
)
.where( .where(
and( and(
eq(siteNetworks.siteId, siteId), eq(siteResources.siteId, siteId),
eq(siteResources.orgId, orgId) eq(siteResources.orgId, orgId)
) )
) )
@@ -136,7 +128,6 @@ export async function listSiteResources(
.limit(limit) .limit(limit)
.offset(offset); .offset(offset);
return response(res, { return response(res, {
data: { siteResources: siteResourcesList }, data: { siteResources: siteResourcesList },
success: true, success: true,

View File

@@ -1,3 +1,4 @@
import { isLicensedOrSubscribed } from "#dynamic/lib/isLicencedOrSubscribed";
import { import {
clientSiteResources, clientSiteResources,
clientSiteResourcesAssociationsCache, clientSiteResourcesAssociationsCache,
@@ -6,21 +7,13 @@ import {
orgs, orgs,
roles, roles,
roleSiteResources, roleSiteResources,
siteNetworks,
SiteResource, SiteResource,
siteResources, siteResources,
sites, sites,
networks,
Transaction, Transaction,
userSiteResources userSiteResources
} from "@server/db"; } from "@server/db";
import { isLicensedOrSubscribed } from "#dynamic/lib/isLicencedOrSubscribed"; import { tierMatrix } from "@server/lib/billing/tierMatrix";
import { TierFeature, tierMatrix } from "@server/lib/billing/tierMatrix";
import { validateAndConstructDomain } from "@server/lib/domainUtils";
import response from "@server/lib/response";
import { eq, and, ne, inArray } from "drizzle-orm";
import { OpenAPITags, registry } from "@server/openApi";
import { updatePeerData, updateTargets } from "@server/routers/client/targets";
import { import {
generateAliasConfig, generateAliasConfig,
generateRemoteSubnets, generateRemoteSubnets,
@@ -29,8 +22,12 @@ import {
portRangeStringSchema portRangeStringSchema
} from "@server/lib/ip"; } from "@server/lib/ip";
import { rebuildClientAssociationsFromSiteResource } from "@server/lib/rebuildClientAssociations"; import { rebuildClientAssociationsFromSiteResource } from "@server/lib/rebuildClientAssociations";
import response from "@server/lib/response";
import logger from "@server/logger"; import logger from "@server/logger";
import { OpenAPITags, registry } from "@server/openApi";
import { updatePeerData, updateTargets } from "@server/routers/client/targets";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
import { and, eq, ne } from "drizzle-orm";
import { NextFunction, Request, Response } from "express"; import { NextFunction, Request, Response } from "express";
import createHttpError from "http-errors"; import createHttpError from "http-errors";
import { z } from "zod"; import { z } from "zod";
@@ -43,8 +40,7 @@ const updateSiteResourceParamsSchema = z.strictObject({
const updateSiteResourceSchema = z const updateSiteResourceSchema = z
.strictObject({ .strictObject({
name: z.string().min(1).max(255).optional(), name: z.string().min(1).max(255).optional(),
siteIds: z.array(z.int()), siteId: z.int(),
// niceId: z.string().min(1).max(255).regex(/^[a-zA-Z0-9-]+$/, "niceId can only contain letters, numbers, and dashes").optional(),
niceId: z niceId: z
.string() .string()
.min(1) .min(1)
@@ -55,11 +51,10 @@ const updateSiteResourceSchema = z
) )
.optional(), .optional(),
// mode: z.enum(["host", "cidr", "port"]).optional(), // mode: z.enum(["host", "cidr", "port"]).optional(),
mode: z.enum(["host", "cidr", "http"]).optional(), mode: z.enum(["host", "cidr"]).optional(),
ssl: z.boolean().optional(), // protocol: z.enum(["tcp", "udp"]).nullish(),
scheme: z.enum(["http", "https"]).nullish(),
// proxyPort: z.int().positive().nullish(), // proxyPort: z.int().positive().nullish(),
destinationPort: z.int().positive().nullish(), // destinationPort: z.int().positive().nullish(),
destination: z.string().min(1).optional(), destination: z.string().min(1).optional(),
enabled: z.boolean().optional(), enabled: z.boolean().optional(),
alias: z alias: z
@@ -76,9 +71,7 @@ const updateSiteResourceSchema = z
udpPortRangeString: portRangeStringSchema, udpPortRangeString: portRangeStringSchema,
disableIcmp: z.boolean().optional(), disableIcmp: z.boolean().optional(),
authDaemonPort: z.int().positive().nullish(), authDaemonPort: z.int().positive().nullish(),
authDaemonMode: z.enum(["site", "remote"]).optional(), authDaemonMode: z.enum(["site", "remote"]).optional()
domainId: z.string().optional(),
subdomain: z.string().optional()
}) })
.strict() .strict()
.refine( .refine(
@@ -125,23 +118,6 @@ const updateSiteResourceSchema = z
{ {
message: "Destination must be a valid CIDR notation for cidr mode" message: "Destination must be a valid CIDR notation for cidr mode"
} }
)
.refine(
(data) => {
if (data.mode !== "http") return true;
return (
data.scheme !== undefined &&
data.scheme !== null &&
data.destinationPort !== undefined &&
data.destinationPort !== null &&
data.destinationPort >= 1 &&
data.destinationPort <= 65535
);
},
{
message:
"HTTP mode requires scheme (http or https) and a valid destination port"
}
); );
export type UpdateSiteResourceBody = z.infer<typeof updateSiteResourceSchema>; export type UpdateSiteResourceBody = z.infer<typeof updateSiteResourceSchema>;
@@ -196,14 +172,11 @@ export async function updateSiteResource(
const { siteResourceId } = parsedParams.data; const { siteResourceId } = parsedParams.data;
const { const {
name, name,
siteIds, // because it can change siteId, // because it can change
niceId, niceId,
mode, mode,
scheme,
destination, destination,
destinationPort,
alias, alias,
ssl,
enabled, enabled,
userIds, userIds,
roleIds, roleIds,
@@ -212,11 +185,19 @@ export async function updateSiteResource(
udpPortRangeString, udpPortRangeString,
disableIcmp, disableIcmp,
authDaemonPort, authDaemonPort,
authDaemonMode, authDaemonMode
domainId,
subdomain
} = parsedBody.data; } = parsedBody.data;
const [site] = await db
.select()
.from(sites)
.where(eq(sites.siteId, siteId))
.limit(1);
if (!site) {
return next(createHttpError(HttpCode.NOT_FOUND, "Site not found"));
}
// Check if site resource exists // Check if site resource exists
const [existingSiteResource] = await db const [existingSiteResource] = await db
.select() .select()
@@ -230,21 +211,6 @@ export async function updateSiteResource(
); );
} }
if (mode == "http") {
const hasHttpFeature = await isLicensedOrSubscribed(
existingSiteResource.orgId,
tierMatrix[TierFeature.HTTPPrivateResources]
);
if (!hasHttpFeature) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"HTTP private resources are not included in your current plan. Please upgrade."
)
);
}
}
const isLicensedSshPam = await isLicensedOrSubscribed( const isLicensedSshPam = await isLicensedOrSubscribed(
existingSiteResource.orgId, existingSiteResource.orgId,
tierMatrix.sshPam tierMatrix.sshPam
@@ -271,24 +237,6 @@ export async function updateSiteResource(
); );
} }
// Verify the site exists and belongs to the org
const sitesToAssign = await db
.select()
.from(sites)
.where(
and(
inArray(sites.siteId, siteIds),
eq(sites.orgId, existingSiteResource.orgId)
)
)
.limit(1);
if (sitesToAssign.length !== siteIds.length) {
return next(
createHttpError(HttpCode.NOT_FOUND, "Some site not found")
);
}
// Only check if destination is an IP address // Only check if destination is an IP address
const isIp = z const isIp = z
.union([z.ipv4(), z.ipv6()]) .union([z.ipv4(), z.ipv6()])
@@ -306,60 +254,22 @@ export async function updateSiteResource(
); );
} }
let sitesChanged = false; let existingSite = site;
const existingSiteIds = existingSiteResource.networkId let siteChanged = false;
? await db if (existingSiteResource.siteId !== siteId) {
.select() siteChanged = true;
.from(siteNetworks) // get the existing site
.where( [existingSite] = await db
eq(siteNetworks.networkId, existingSiteResource.networkId)
)
: [];
const existingSiteIdSet = new Set(existingSiteIds.map((s) => s.siteId));
const newSiteIdSet = new Set(siteIds);
if (
existingSiteIdSet.size !== newSiteIdSet.size ||
![...existingSiteIdSet].every((id) => newSiteIdSet.has(id))
) {
sitesChanged = true;
}
let fullDomain: string | null = null;
let finalSubdomain: string | null = null;
if (domainId) {
// Validate domain and construct full domain
const domainResult = await validateAndConstructDomain(
domainId,
org.orgId,
subdomain
);
if (!domainResult.success) {
return next(
createHttpError(HttpCode.BAD_REQUEST, domainResult.error)
);
}
fullDomain = domainResult.fullDomain;
finalSubdomain = domainResult.subdomain;
// make sure the full domain is unique
const [existingDomain] = await db
.select() .select()
.from(siteResources) .from(sites)
.where(eq(siteResources.fullDomain, fullDomain)); .where(eq(sites.siteId, existingSiteResource.siteId))
.limit(1);
if ( if (!existingSite) {
existingDomain &&
existingDomain.siteResourceId !==
existingSiteResource.siteResourceId
) {
return next( return next(
createHttpError( createHttpError(
HttpCode.CONFLICT, HttpCode.NOT_FOUND,
"Resource with that domain already exists" "Existing site not found"
) )
); );
} }
@@ -392,7 +302,7 @@ export async function updateSiteResource(
let updatedSiteResource: SiteResource | undefined; let updatedSiteResource: SiteResource | undefined;
await db.transaction(async (trx) => { await db.transaction(async (trx) => {
// if the site is changed we need to delete and recreate the resource to avoid complications with the rebuild function otherwise we can just update in place // if the site is changed we need to delete and recreate the resource to avoid complications with the rebuild function otherwise we can just update in place
if (sitesChanged) { if (siteChanged) {
// delete the existing site resource // delete the existing site resource
await trx await trx
.delete(siteResources) .delete(siteResources)
@@ -433,20 +343,15 @@ export async function updateSiteResource(
.update(siteResources) .update(siteResources)
.set({ .set({
name, name,
siteId,
niceId, niceId,
mode, mode,
scheme,
ssl,
destination, destination,
destinationPort,
enabled, enabled,
alias: alias ? alias.trim() : null, alias: alias && alias.trim() ? alias : null,
tcpPortRangeString, tcpPortRangeString,
udpPortRangeString, udpPortRangeString,
disableIcmp, disableIcmp,
domainId,
subdomain: finalSubdomain,
fullDomain,
...sshPamSet ...sshPamSet
}) })
.where( .where(
@@ -542,20 +447,14 @@ export async function updateSiteResource(
.update(siteResources) .update(siteResources)
.set({ .set({
name: name, name: name,
niceId: niceId, siteId: siteId,
mode: mode, mode: mode,
scheme,
ssl,
destination: destination, destination: destination,
destinationPort: destinationPort,
enabled: enabled, enabled: enabled,
alias: alias ? alias.trim() : null, alias: alias && alias.trim() ? alias : null,
tcpPortRangeString: tcpPortRangeString, tcpPortRangeString: tcpPortRangeString,
udpPortRangeString: udpPortRangeString, udpPortRangeString: udpPortRangeString,
disableIcmp: disableIcmp, disableIcmp: disableIcmp,
domainId,
subdomain: finalSubdomain,
fullDomain,
...sshPamSet ...sshPamSet
}) })
.where( .where(
@@ -565,23 +464,6 @@ export async function updateSiteResource(
//////////////////// update the associations //////////////////// //////////////////// update the associations ////////////////////
// delete the site - site resources associations
await trx
.delete(siteNetworks)
.where(
eq(
siteNetworks.networkId,
updatedSiteResource.networkId!
)
);
for (const siteId of siteIds) {
await trx.insert(siteNetworks).values({
siteId: siteId,
networkId: updatedSiteResource.networkId!
});
}
await trx await trx
.delete(clientSiteResources) .delete(clientSiteResources)
.where( .where(
@@ -651,15 +533,14 @@ export async function updateSiteResource(
); );
} }
logger.info(`Updated site resource ${siteResourceId}`); logger.info(
`Updated site resource ${siteResourceId} for site ${siteId}`
);
await handleMessagingForUpdatedSiteResource( await handleMessagingForUpdatedSiteResource(
existingSiteResource, existingSiteResource,
updatedSiteResource, updatedSiteResource,
siteIds.map((siteId) => ({ { siteId: site.siteId, orgId: site.orgId },
siteId,
orgId: existingSiteResource.orgId
})),
trx trx
); );
} }
@@ -686,7 +567,7 @@ export async function updateSiteResource(
export async function handleMessagingForUpdatedSiteResource( export async function handleMessagingForUpdatedSiteResource(
existingSiteResource: SiteResource | undefined, existingSiteResource: SiteResource | undefined,
updatedSiteResource: SiteResource, updatedSiteResource: SiteResource,
sites: { siteId: number; orgId: string }[], site: { siteId: number; orgId: string },
trx: Transaction trx: Transaction
) { ) {
logger.debug( logger.debug(
@@ -708,14 +589,9 @@ export async function handleMessagingForUpdatedSiteResource(
const destinationChanged = const destinationChanged =
existingSiteResource && existingSiteResource &&
existingSiteResource.destination !== updatedSiteResource.destination; existingSiteResource.destination !== updatedSiteResource.destination;
const destinationPortChanged =
existingSiteResource &&
existingSiteResource.destinationPort !==
updatedSiteResource.destinationPort;
const aliasChanged = const aliasChanged =
existingSiteResource && existingSiteResource &&
(existingSiteResource.alias !== updatedSiteResource.alias || existingSiteResource.alias !== updatedSiteResource.alias;
existingSiteResource.fullDomain !== updatedSiteResource.fullDomain); // because the full domain gets sent down to the stuff as an alias
const portRangesChanged = const portRangesChanged =
existingSiteResource && existingSiteResource &&
(existingSiteResource.tcpPortRangeString !== (existingSiteResource.tcpPortRangeString !==
@@ -727,113 +603,106 @@ export async function handleMessagingForUpdatedSiteResource(
// if the existingSiteResource is undefined (new resource) we don't need to do anything here, the rebuild above handled it all // if the existingSiteResource is undefined (new resource) we don't need to do anything here, the rebuild above handled it all
if (destinationChanged || aliasChanged || portRangesChanged || destinationPortChanged) { if (destinationChanged || aliasChanged || portRangesChanged) {
for (const site of sites) { const [newt] = await trx
const [newt] = await trx .select()
.select() .from(newts)
.from(newts) .where(eq(newts.siteId, site.siteId))
.where(eq(newts.siteId, site.siteId)) .limit(1);
.limit(1);
if (!newt) { if (!newt) {
throw new Error( throw new Error(
"Newt not found for site during site resource update" "Newt not found for site during site resource update"
); );
}
// Only update targets on newt if destination changed
if (destinationChanged || portRangesChanged || destinationPortChanged) {
const oldTarget = await generateSubnetProxyTargetV2(
existingSiteResource,
mergedAllClients
);
const newTarget = await generateSubnetProxyTargetV2(
updatedSiteResource,
mergedAllClients
);
await updateTargets(
newt.newtId,
{
oldTargets: oldTarget ? [oldTarget] : [],
newTargets: newTarget ? [newTarget] : []
},
newt.version
);
}
const olmJobs: Promise<void>[] = [];
for (const client of mergedAllClients) {
// does this client have access to another resource on this site that has the same destination still? if so we dont want to remove it from their olm yet
// todo: optimize this query if needed
const oldDestinationStillInUseSites = await trx
.select()
.from(siteResources)
.innerJoin(
clientSiteResourcesAssociationsCache,
eq(
clientSiteResourcesAssociationsCache.siteResourceId,
siteResources.siteResourceId
)
)
.innerJoin(
siteNetworks,
eq(siteNetworks.networkId, siteResources.networkId)
)
.where(
and(
eq(
clientSiteResourcesAssociationsCache.clientId,
client.clientId
),
eq(siteNetworks.siteId, site.siteId),
eq(
siteResources.destination,
existingSiteResource.destination
),
ne(
siteResources.siteResourceId,
existingSiteResource.siteResourceId
)
)
);
const oldDestinationStillInUseByASite =
oldDestinationStillInUseSites.length > 0;
// we also need to update the remote subnets on the olms for each client that has access to this site
olmJobs.push(
updatePeerData(
client.clientId,
site.siteId,
destinationChanged
? {
oldRemoteSubnets:
!oldDestinationStillInUseByASite
? generateRemoteSubnets([
existingSiteResource
])
: [],
newRemoteSubnets: generateRemoteSubnets([
updatedSiteResource
])
}
: undefined,
aliasChanged
? {
oldAliases: generateAliasConfig([
existingSiteResource
]),
newAliases: generateAliasConfig([
updatedSiteResource
])
}
: undefined
)
);
}
await Promise.all(olmJobs);
} }
// Only update targets on newt if destination changed
if (destinationChanged || portRangesChanged) {
const oldTarget = generateSubnetProxyTargetV2(
existingSiteResource,
mergedAllClients
);
const newTarget = generateSubnetProxyTargetV2(
updatedSiteResource,
mergedAllClients
);
await updateTargets(
newt.newtId,
{
oldTargets: oldTarget ? [oldTarget] : [],
newTargets: newTarget ? [newTarget] : []
},
newt.version
);
}
const olmJobs: Promise<void>[] = [];
for (const client of mergedAllClients) {
// does this client have access to another resource on this site that has the same destination still? if so we dont want to remove it from their olm yet
// todo: optimize this query if needed
const oldDestinationStillInUseSites = await trx
.select()
.from(siteResources)
.innerJoin(
clientSiteResourcesAssociationsCache,
eq(
clientSiteResourcesAssociationsCache.siteResourceId,
siteResources.siteResourceId
)
)
.where(
and(
eq(
clientSiteResourcesAssociationsCache.clientId,
client.clientId
),
eq(siteResources.siteId, site.siteId),
eq(
siteResources.destination,
existingSiteResource.destination
),
ne(
siteResources.siteResourceId,
existingSiteResource.siteResourceId
)
)
);
const oldDestinationStillInUseByASite =
oldDestinationStillInUseSites.length > 0;
// we also need to update the remote subnets on the olms for each client that has access to this site
olmJobs.push(
updatePeerData(
client.clientId,
updatedSiteResource.siteId,
destinationChanged
? {
oldRemoteSubnets: !oldDestinationStillInUseByASite
? generateRemoteSubnets([
existingSiteResource
])
: [],
newRemoteSubnets: generateRemoteSubnets([
updatedSiteResource
])
}
: undefined,
aliasChanged
? {
oldAliases: generateAliasConfig([
existingSiteResource
]),
newAliases: generateAliasConfig([
updatedSiteResource
])
}
: undefined
)
);
}
await Promise.all(olmJobs);
} }
} }

View File

@@ -1,14 +1,7 @@
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import { z } from "zod"; import { z } from "zod";
import { db } from "@server/db"; import { db } from "@server/db";
import { import { orgs, roles, userInviteRoles, userInvites, userOrgs, users } from "@server/db";
orgs,
roles,
userInviteRoles,
userInvites,
userOrgs,
users
} from "@server/db";
import { and, eq, inArray } from "drizzle-orm"; import { and, eq, inArray } from "drizzle-orm";
import response from "@server/lib/response"; import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
@@ -44,7 +37,8 @@ const inviteUserBodySchema = z
regenerate: z.boolean().optional() regenerate: z.boolean().optional()
}) })
.refine( .refine(
(d) => (d.roleIds != null && d.roleIds.length > 0) || d.roleId != null, (d) =>
(d.roleIds != null && d.roleIds.length > 0) || d.roleId != null,
{ message: "roleIds or roleId is required", path: ["roleIds"] } { message: "roleIds or roleId is required", path: ["roleIds"] }
) )
.transform((data) => ({ .transform((data) => ({
@@ -271,7 +265,7 @@ export async function inviteUser(
) )
); );
const inviteLink = `${config.getRawConfig().app.dashboard_url}/invite?token=${inviteId}-${token}&email=${email}`; const inviteLink = `${config.getRawConfig().app.dashboard_url}/invite?token=${inviteId}-${token}&email=${encodeURIComponent(email)}`;
if (doEmail) { if (doEmail) {
await sendEmail( await sendEmail(
@@ -320,12 +314,12 @@ export async function inviteUser(
expiresAt, expiresAt,
tokenHash tokenHash
}); });
await trx await trx.insert(userInviteRoles).values(
.insert(userInviteRoles) uniqueRoleIds.map((roleId) => ({ inviteId, roleId }))
.values(uniqueRoleIds.map((roleId) => ({ inviteId, roleId }))); );
}); });
const inviteLink = `${config.getRawConfig().app.dashboard_url}/invite?token=${inviteId}-${token}&email=${email}`; const inviteLink = `${config.getRawConfig().app.dashboard_url}/invite?token=${inviteId}-${token}&email=${encodeURIComponent(email)}`;
if (doEmail) { if (doEmail) {
await sendEmail( await sendEmail(

View File

@@ -235,9 +235,7 @@ export default async function migration() {
for (const row of existingUserInviteRoles) { for (const row of existingUserInviteRoles) {
await db.execute(sql` await db.execute(sql`
INSERT INTO "userInviteRoles" ("inviteId", "roleId") INSERT INTO "userInviteRoles" ("inviteId", "roleId")
SELECT ${row.inviteId}, ${row.roleId} VALUES (${row.inviteId}, ${row.roleId})
WHERE EXISTS (SELECT 1 FROM "userInvites" WHERE "inviteId" = ${row.inviteId})
AND EXISTS (SELECT 1 FROM "roles" WHERE "roleId" = ${row.roleId})
ON CONFLICT DO NOTHING ON CONFLICT DO NOTHING
`); `);
} }
@@ -260,10 +258,7 @@ export default async function migration() {
for (const row of existingUserOrgRoles) { for (const row of existingUserOrgRoles) {
await db.execute(sql` await db.execute(sql`
INSERT INTO "userOrgRoles" ("userId", "orgId", "roleId") INSERT INTO "userOrgRoles" ("userId", "orgId", "roleId")
SELECT ${row.userId}, ${row.orgId}, ${row.roleId} VALUES (${row.userId}, ${row.orgId}, ${row.roleId})
WHERE EXISTS (SELECT 1 FROM "user" WHERE "id" = ${row.userId})
AND EXISTS (SELECT 1 FROM "orgs" WHERE "orgId" = ${row.orgId})
AND EXISTS (SELECT 1 FROM "roles" WHERE "roleId" = ${row.roleId})
ON CONFLICT DO NOTHING ON CONFLICT DO NOTHING
`); `);
} }

View File

@@ -145,7 +145,7 @@ export default async function migration() {
).run(); ).run();
db.prepare( db.prepare(
`INSERT INTO '__new_userOrgs'("userId", "orgId", "isOwner", "autoProvisioned", "pamUsername") SELECT "userId", "orgId", "isOwner", "autoProvisioned", "pamUsername" FROM 'userOrgs' WHERE EXISTS (SELECT 1 FROM 'user' WHERE id = userOrgs.userId) AND EXISTS (SELECT 1 FROM 'orgs' WHERE orgId = userOrgs.orgId);` `INSERT INTO '__new_userOrgs'("userId", "orgId", "isOwner", "autoProvisioned", "pamUsername") SELECT "userId", "orgId", "isOwner", "autoProvisioned", "pamUsername" FROM 'userOrgs';`
).run(); ).run();
db.prepare(`DROP TABLE 'userOrgs';`).run(); db.prepare(`DROP TABLE 'userOrgs';`).run();
db.prepare( db.prepare(
@@ -246,15 +246,12 @@ export default async function migration() {
// Re-insert the preserved invite role assignments into the new userInviteRoles table // Re-insert the preserved invite role assignments into the new userInviteRoles table
if (existingUserInviteRoles.length > 0) { if (existingUserInviteRoles.length > 0) {
const insertUserInviteRole = db.prepare( const insertUserInviteRole = db.prepare(
`INSERT OR IGNORE INTO 'userInviteRoles' ("inviteId", "roleId") `INSERT OR IGNORE INTO 'userInviteRoles' ("inviteId", "roleId") VALUES (?, ?)`
SELECT ?, ?
WHERE EXISTS (SELECT 1 FROM 'userInvites' WHERE inviteId = ?)
AND EXISTS (SELECT 1 FROM 'roles' WHERE roleId = ?)`
); );
const insertAll = db.transaction(() => { const insertAll = db.transaction(() => {
for (const row of existingUserInviteRoles) { for (const row of existingUserInviteRoles) {
insertUserInviteRole.run(row.inviteId, row.roleId, row.inviteId, row.roleId); insertUserInviteRole.run(row.inviteId, row.roleId);
} }
}); });
@@ -268,16 +265,12 @@ export default async function migration() {
// Re-insert the preserved role assignments into the new userOrgRoles table // Re-insert the preserved role assignments into the new userOrgRoles table
if (existingUserOrgRoles.length > 0) { if (existingUserOrgRoles.length > 0) {
const insertUserOrgRole = db.prepare( const insertUserOrgRole = db.prepare(
`INSERT OR IGNORE INTO 'userOrgRoles' ("userId", "orgId", "roleId") `INSERT OR IGNORE INTO 'userOrgRoles' ("userId", "orgId", "roleId") VALUES (?, ?, ?)`
SELECT ?, ?, ?
WHERE EXISTS (SELECT 1 FROM 'user' WHERE id = ?)
AND EXISTS (SELECT 1 FROM 'orgs' WHERE orgId = ?)
AND EXISTS (SELECT 1 FROM 'roles' WHERE roleId = ?)`
); );
const insertAll = db.transaction(() => { const insertAll = db.transaction(() => {
for (const row of existingUserOrgRoles) { for (const row of existingUserOrgRoles) {
insertUserOrgRole.run(row.userId, row.orgId, row.roleId, row.userId, row.orgId, row.roleId); insertUserOrgRole.run(row.userId, row.orgId, row.roleId);
} }
}); });

View File

@@ -10,7 +10,6 @@ import { authCookieHeader } from "@app/lib/api/cookies";
import { GetDNSRecordsResponse } from "@server/routers/domain"; import { GetDNSRecordsResponse } from "@server/routers/domain";
import DNSRecordsTable from "@app/components/DNSRecordTable"; import DNSRecordsTable from "@app/components/DNSRecordTable";
import DomainCertForm from "@app/components/DomainCertForm"; import DomainCertForm from "@app/components/DomainCertForm";
import { build } from "@server/build";
interface DomainSettingsPageProps { interface DomainSettingsPageProps {
params: Promise<{ domainId: string; orgId: string }>; params: Promise<{ domainId: string; orgId: string }>;
@@ -66,14 +65,12 @@ export default async function DomainSettingsPage({
)} )}
</div> </div>
<div className="space-y-6"> <div className="space-y-6">
{build != "oss" && env.flags.usePangolinDns ? ( <DomainInfoCard
<DomainInfoCard failed={domain.failed}
failed={domain.failed} verified={domain.verified}
verified={domain.verified} type={domain.type}
type={domain.type} errorMessage={domain.errorMessage}
errorMessage={domain.errorMessage} />
/>
) : null}
<DNSRecordsTable records={dnsRecords} type={domain.type} /> <DNSRecordsTable records={dnsRecords} type={domain.type} />

View File

@@ -471,7 +471,11 @@ export default function GeneralPage() {
: `/${row.original.orgId}/settings/resources/proxy/${row.original.resourceNiceId}` : `/${row.original.orgId}/settings/resources/proxy/${row.original.resourceNiceId}`
} }
> >
<Button variant="outline" size="sm"> <Button
variant="outline"
size="sm"
className="text-xs h-6"
>
{row.original.resourceName} {row.original.resourceName}
<ArrowUpRight className="ml-2 h-3 w-3" /> <ArrowUpRight className="ml-2 h-3 w-3" />
</Button> </Button>

View File

@@ -451,7 +451,11 @@ export default function ConnectionLogsPage() {
<Link <Link
href={`/${row.original.orgId}/settings/resources/client/?query=${row.original.resourceNiceId}`} href={`/${row.original.orgId}/settings/resources/client/?query=${row.original.resourceNiceId}`}
> >
<Button variant="outline" size="sm"> <Button
variant="outline"
size="sm"
className="text-xs h-6"
>
{row.original.resourceName} {row.original.resourceName}
<ArrowUpRight className="ml-2 h-3 w-3" /> <ArrowUpRight className="ml-2 h-3 w-3" />
</Button> </Button>
@@ -493,7 +497,11 @@ export default function ConnectionLogsPage() {
<Link <Link
href={`/${row.original.orgId}/settings/clients/${clientType}/${row.original.clientNiceId}`} href={`/${row.original.orgId}/settings/clients/${clientType}/${row.original.clientNiceId}`}
> >
<Button variant="outline" size="sm"> <Button
variant="outline"
size="sm"
className="text-xs h-6"
>
<Laptop className="mr-1 h-3 w-3" /> <Laptop className="mr-1 h-3 w-3" />
{row.original.clientName} {row.original.clientName}
<ArrowUpRight className="ml-2 h-3 w-3" /> <ArrowUpRight className="ml-2 h-3 w-3" />
@@ -667,7 +675,9 @@ export default function ConnectionLogsPage() {
<div> <div>
<strong>Ended At:</strong>{" "} <strong>Ended At:</strong>{" "}
{row.endedAt {row.endedAt
? new Date(row.endedAt * 1000).toLocaleString() ? new Date(
row.endedAt * 1000
).toLocaleString()
: "Active"} : "Active"}
</div> </div>
<div> <div>

View File

@@ -360,7 +360,6 @@ export default function GeneralPage() {
// 105 - Valid Password // 105 - Valid Password
// 106 - Valid email // 106 - Valid email
// 107 - Valid SSO // 107 - Valid SSO
// 108 - Connected Client
// 201 - Resource Not Found // 201 - Resource Not Found
// 202 - Resource Blocked // 202 - Resource Blocked
@@ -378,7 +377,6 @@ export default function GeneralPage() {
105: t("validPassword"), 105: t("validPassword"),
106: t("validEmail"), 106: t("validEmail"),
107: t("validSSO"), 107: t("validSSO"),
108: t("connectedClient"),
201: t("resourceNotFound"), 201: t("resourceNotFound"),
202: t("resourceBlocked"), 202: t("resourceBlocked"),
203: t("droppedByRule"), 203: t("droppedByRule"),
@@ -512,14 +510,14 @@ export default function GeneralPage() {
cell: ({ row }) => { cell: ({ row }) => {
return ( return (
<Link <Link
href={ href={`/${row.original.orgId}/settings/resources/proxy/${row.original.resourceNiceId}`}
row.original.reason == 108 // for now the client will only have reason 108 so we know where to go
? `/${row.original.orgId}/settings/resources/client?query=${row.original.resourceNiceId}`
: `/${row.original.orgId}/settings/resources/proxy/${row.original.resourceNiceId}`
}
onClick={(e) => e.stopPropagation()} onClick={(e) => e.stopPropagation()}
> >
<Button variant="outline" size="sm"> <Button
variant="outline"
size="sm"
className="text-xs h-6"
>
{row.original.resourceName} {row.original.resourceName}
<ArrowUpRight className="ml-2 h-3 w-3" /> <ArrowUpRight className="ml-2 h-3 w-3" />
</Button> </Button>
@@ -636,7 +634,6 @@ export default function GeneralPage() {
{ value: "105", label: t("validPassword") }, { value: "105", label: t("validPassword") },
{ value: "106", label: t("validEmail") }, { value: "106", label: t("validEmail") },
{ value: "107", label: t("validSSO") }, { value: "107", label: t("validSSO") },
{ value: "108", label: t("connectedClient") },
{ value: "201", label: t("resourceNotFound") }, { value: "201", label: t("resourceNotFound") },
{ value: "202", label: t("resourceBlocked") }, { value: "202", label: t("resourceBlocked") },
{ value: "203", label: t("droppedByRule") }, { value: "203", label: t("droppedByRule") },

View File

@@ -60,34 +60,23 @@ export default async function ClientResourcesPage(
id: siteResource.siteResourceId, id: siteResource.siteResourceId,
name: siteResource.name, name: siteResource.name,
orgId: params.orgId, orgId: params.orgId,
sites: siteResource.siteIds.map((siteId, idx) => ({ siteName: siteResource.siteName,
siteId, siteAddress: siteResource.siteAddress || null,
siteName: siteResource.siteNames[idx], mode: siteResource.mode || ("port" as any),
siteNiceId: siteResource.siteNiceIds[idx],
online: siteResource.siteOnlines[idx]
})),
mode: siteResource.mode,
scheme: siteResource.scheme,
ssl: siteResource.ssl,
siteNames: siteResource.siteNames,
siteAddresses: siteResource.siteAddresses || null,
// protocol: siteResource.protocol, // protocol: siteResource.protocol,
// proxyPort: siteResource.proxyPort, // proxyPort: siteResource.proxyPort,
siteIds: siteResource.siteIds, siteId: siteResource.siteId,
destination: siteResource.destination, destination: siteResource.destination,
httpHttpsPort: siteResource.destinationPort ?? null, // destinationPort: siteResource.destinationPort,
alias: siteResource.alias || null, alias: siteResource.alias || null,
aliasAddress: siteResource.aliasAddress || null, aliasAddress: siteResource.aliasAddress || null,
siteNiceIds: siteResource.siteNiceIds, siteNiceId: siteResource.siteNiceId,
niceId: siteResource.niceId, niceId: siteResource.niceId,
tcpPortRangeString: siteResource.tcpPortRangeString || null, tcpPortRangeString: siteResource.tcpPortRangeString || null,
udpPortRangeString: siteResource.udpPortRangeString || null, udpPortRangeString: siteResource.udpPortRangeString || null,
disableIcmp: siteResource.disableIcmp || false, disableIcmp: siteResource.disableIcmp || false,
authDaemonMode: siteResource.authDaemonMode ?? null, authDaemonMode: siteResource.authDaemonMode ?? null,
authDaemonPort: siteResource.authDaemonPort ?? null, authDaemonPort: siteResource.authDaemonPort ?? null
subdomain: siteResource.subdomain ?? null,
domainId: siteResource.domainId ?? null,
fullDomain: siteResource.fullDomain ?? null
}; };
} }
); );

View File

@@ -678,7 +678,6 @@ function ProxyResourceTargetsForm({
getPaginationRowModel: getPaginationRowModel(), getPaginationRowModel: getPaginationRowModel(),
getSortedRowModel: getSortedRowModel(), getSortedRowModel: getSortedRowModel(),
getFilteredRowModel: getFilteredRowModel(), getFilteredRowModel: getFilteredRowModel(),
getRowId: (row) => String(row.targetId),
state: { state: {
pagination: { pagination: {
pageIndex: 0, pageIndex: 0,

View File

@@ -999,7 +999,6 @@ export default function Page() {
getPaginationRowModel: getPaginationRowModel(), getPaginationRowModel: getPaginationRowModel(),
getSortedRowModel: getSortedRowModel(), getSortedRowModel: getSortedRowModel(),
getFilteredRowModel: getFilteredRowModel(), getFilteredRowModel: getFilteredRowModel(),
getRowId: (row) => String(row.targetId),
state: { state: {
pagination: { pagination: {
pageIndex: 0, pageIndex: 0,

View File

@@ -1,32 +0,0 @@
import { Metadata } from "next";
import { getTranslations } from "next-intl/server";
import {
Card,
CardContent,
CardHeader,
CardTitle
} from "@app/components/ui/card";
export const dynamic = "force-dynamic";
export const metadata: Metadata = {
title: "Private Placeholder"
};
export default async function MaintenanceScreen() {
const t = await getTranslations();
let title = t("privateMaintenanceScreenTitle");
let message = t("privateMaintenanceScreenMessage");
return (
<div className="min-h-screen flex items-center justify-center p-4">
<Card className="w-full max-w-md">
<CardHeader>
<CardTitle>{title}</CardTitle>
</CardHeader>
<CardContent className="space-y-4">{message}</CardContent>
</Card>
</div>
);
}

View File

@@ -21,7 +21,6 @@ import {
ArrowUp10Icon, ArrowUp10Icon,
ArrowUpDown, ArrowUpDown,
ArrowUpRight, ArrowUpRight,
ChevronDown,
ChevronsUpDownIcon, ChevronsUpDownIcon,
MoreHorizontal MoreHorizontal
} from "lucide-react"; } from "lucide-react";
@@ -39,32 +38,21 @@ import { ControlledDataTable } from "./ui/controlled-data-table";
import { useNavigationContext } from "@app/hooks/useNavigationContext"; import { useNavigationContext } from "@app/hooks/useNavigationContext";
import { useDebouncedCallback } from "use-debounce"; import { useDebouncedCallback } from "use-debounce";
import { ColumnFilterButton } from "./ColumnFilterButton"; import { ColumnFilterButton } from "./ColumnFilterButton";
import { cn } from "@app/lib/cn";
export type InternalResourceSiteRow = {
siteId: number;
siteName: string;
siteNiceId: string;
online: boolean;
};
export type InternalResourceRow = { export type InternalResourceRow = {
id: number; id: number;
name: string; name: string;
orgId: string; orgId: string;
sites: InternalResourceSiteRow[]; siteName: string;
siteNames: string[]; siteAddress: string | null;
siteAddresses: (string | null)[];
siteIds: number[];
siteNiceIds: string[];
// mode: "host" | "cidr" | "port"; // mode: "host" | "cidr" | "port";
mode: "host" | "cidr" | "http"; mode: "host" | "cidr";
scheme: "http" | "https" | null;
ssl: boolean;
// protocol: string | null; // protocol: string | null;
// proxyPort: number | null; // proxyPort: number | null;
siteId: number;
siteNiceId: string;
destination: string; destination: string;
httpHttpsPort: number | null; // destinationPort: number | null;
alias: string | null; alias: string | null;
aliasAddress: string | null; aliasAddress: string | null;
niceId: string; niceId: string;
@@ -73,147 +61,8 @@ export type InternalResourceRow = {
disableIcmp: boolean; disableIcmp: boolean;
authDaemonMode?: "site" | "remote" | null; authDaemonMode?: "site" | "remote" | null;
authDaemonPort?: number | null; authDaemonPort?: number | null;
subdomain?: string | null;
domainId?: string | null;
fullDomain?: string | null;
}; };
function resolveHttpHttpsDisplayPort(
mode: "http",
httpHttpsPort: number | null
): number {
if (httpHttpsPort != null) {
return httpHttpsPort;
}
return 80;
}
function formatDestinationDisplay(row: InternalResourceRow): string {
const { mode, destination, httpHttpsPort, scheme } = row;
if (mode !== "http") {
return destination;
}
const port = resolveHttpHttpsDisplayPort(mode, httpHttpsPort);
const downstreamScheme = scheme ?? "http";
const hostPart =
destination.includes(":") && !destination.startsWith("[")
? `[${destination}]`
: destination;
return `${downstreamScheme}://${hostPart}:${port}`;
}
function isSafeUrlForLink(href: string): boolean {
try {
void new URL(href);
return true;
} catch {
return false;
}
}
type AggregateSitesStatus = "allOnline" | "partial" | "allOffline";
function aggregateSitesStatus(
resourceSites: InternalResourceSiteRow[]
): AggregateSitesStatus {
if (resourceSites.length === 0) {
return "allOffline";
}
const onlineCount = resourceSites.filter((rs) => rs.online).length;
if (onlineCount === resourceSites.length) return "allOnline";
if (onlineCount > 0) return "partial";
return "allOffline";
}
function aggregateStatusDotClass(status: AggregateSitesStatus): string {
switch (status) {
case "allOnline":
return "bg-green-500";
case "partial":
return "bg-yellow-500";
case "allOffline":
default:
return "bg-gray-500";
}
}
function ClientResourceSitesStatusCell({
orgId,
resourceSites
}: {
orgId: string;
resourceSites: InternalResourceSiteRow[];
}) {
const t = useTranslations();
if (resourceSites.length === 0) {
return <span>-</span>;
}
const aggregate = aggregateSitesStatus(resourceSites);
const countLabel = t("multiSitesSelectorSitesCount", {
count: resourceSites.length
});
return (
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button
variant="ghost"
size="sm"
className="flex h-8 items-center gap-2 px-0 font-normal"
>
<div
className={cn(
"h-2 w-2 shrink-0 rounded-full",
aggregateStatusDotClass(aggregate)
)}
/>
<span className="text-sm tabular-nums">{countLabel}</span>
<ChevronDown className="h-3 w-3 shrink-0" />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="start" className="min-w-56">
{resourceSites.map((site) => {
const isOnline = site.online;
return (
<DropdownMenuItem key={site.siteId} asChild>
<Link
href={`/${orgId}/settings/sites/${site.siteNiceId}`}
className="flex cursor-pointer items-center justify-between gap-4"
>
<div className="flex min-w-0 items-center gap-2">
<div
className={cn(
"h-2 w-2 shrink-0 rounded-full",
isOnline
? "bg-green-500"
: "bg-gray-500"
)}
/>
<span className="truncate">
{site.siteName}
</span>
</div>
<span
className={cn(
"shrink-0 capitalize",
isOnline
? "text-green-600"
: "text-muted-foreground"
)}
>
{isOnline ? t("online") : t("offline")}
</span>
</Link>
</DropdownMenuItem>
);
})}
</DropdownMenuContent>
</DropdownMenu>
);
}
type ClientResourcesTableProps = { type ClientResourcesTableProps = {
internalResources: InternalResourceRow[]; internalResources: InternalResourceRow[];
orgId: string; orgId: string;
@@ -248,6 +97,8 @@ export default function ClientResourcesTable({
useState<InternalResourceRow | null>(); useState<InternalResourceRow | null>();
const [isCreateDialogOpen, setIsCreateDialogOpen] = useState(false); const [isCreateDialogOpen, setIsCreateDialogOpen] = useState(false);
const { data: sites = [] } = useQuery(orgQueries.sites({ orgId }));
const [isRefreshing, startTransition] = useTransition(); const [isRefreshing, startTransition] = useTransition();
const refreshData = () => { const refreshData = () => {
@@ -285,60 +136,6 @@ export default function ClientResourcesTable({
} }
}; };
function SiteCell({ resourceRow }: { resourceRow: InternalResourceRow }) {
const { siteNames, siteNiceIds, orgId } = resourceRow;
if (!siteNames || siteNames.length === 0) {
return <span>-</span>;
}
if (siteNames.length === 1) {
return (
<Link
href={`/${orgId}/settings/sites/${siteNiceIds[0]}`}
>
<Button variant="outline">
{siteNames[0]}
<ArrowUpRight className="ml-2 h-4 w-4" />
</Button>
</Link>
);
}
return (
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button
variant="outline"
size="sm"
className="flex items-center gap-2"
>
<span>
{siteNames.length} {t("sites")}
</span>
<ChevronDown className="h-3 w-3" />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="start">
{siteNames.map((siteName, idx) => (
<DropdownMenuItem
key={siteNiceIds[idx]}
asChild
>
<Link
href={`/${orgId}/settings/sites/${siteNiceIds[idx]}`}
className="flex items-center gap-2 cursor-pointer"
>
{siteName}
<ArrowUpRight className="h-3 w-3" />
</Link>
</DropdownMenuItem>
))}
</DropdownMenuContent>
</DropdownMenu>
);
}
const internalColumns: ExtendedColumnDef<InternalResourceRow>[] = [ const internalColumns: ExtendedColumnDef<InternalResourceRow>[] = [
{ {
accessorKey: "name", accessorKey: "name",
@@ -388,17 +185,20 @@ export default function ClientResourcesTable({
} }
}, },
{ {
id: "sites", accessorKey: "siteName",
accessorFn: (row) => row.sites.map((s) => s.siteName).join(", "), friendlyName: t("site"),
friendlyName: t("sites"), header: () => <span className="p-3">{t("site")}</span>,
header: () => <span className="p-3">{t("sites")}</span>,
cell: ({ row }) => { cell: ({ row }) => {
const resourceRow = row.original; const resourceRow = row.original;
return ( return (
<ClientResourceSitesStatusCell <Link
orgId={resourceRow.orgId} href={`/${resourceRow.orgId}/settings/sites/${resourceRow.siteNiceId}`}
resourceSites={resourceRow.sites} >
/> <Button variant="outline">
{resourceRow.siteName}
<ArrowUpRight className="ml-2 h-4 w-4" />
</Button>
</Link>
); );
} }
}, },
@@ -415,10 +215,6 @@ export default function ClientResourcesTable({
{ {
value: "cidr", value: "cidr",
label: t("editInternalResourceDialogModeCidr") label: t("editInternalResourceDialogModeCidr")
},
{
value: "http",
label: t("editInternalResourceDialogModeHttp")
} }
]} ]}
selectedValue={searchParams.get("mode") ?? undefined} selectedValue={searchParams.get("mode") ?? undefined}
@@ -431,14 +227,10 @@ export default function ClientResourcesTable({
), ),
cell: ({ row }) => { cell: ({ row }) => {
const resourceRow = row.original; const resourceRow = row.original;
const modeLabels: Record< const modeLabels: Record<"host" | "cidr" | "port", string> = {
"host" | "cidr" | "port" | "http",
string
> = {
host: t("editInternalResourceDialogModeHost"), host: t("editInternalResourceDialogModeHost"),
cidr: t("editInternalResourceDialogModeCidr"), cidr: t("editInternalResourceDialogModeCidr"),
port: t("editInternalResourceDialogModePort"), port: t("editInternalResourceDialogModePort")
http: t("editInternalResourceDialogModeHttp")
}; };
return <span>{modeLabels[resourceRow.mode]}</span>; return <span>{modeLabels[resourceRow.mode]}</span>;
} }
@@ -451,12 +243,11 @@ export default function ClientResourcesTable({
), ),
cell: ({ row }) => { cell: ({ row }) => {
const resourceRow = row.original; const resourceRow = row.original;
const display = formatDestinationDisplay(resourceRow);
return ( return (
<CopyToClipboard <CopyToClipboard
text={display} text={resourceRow.destination}
isLink={false} isLink={false}
displayText={display} displayText={resourceRow.destination}
/> />
); );
} }
@@ -469,26 +260,15 @@ export default function ClientResourcesTable({
), ),
cell: ({ row }) => { cell: ({ row }) => {
const resourceRow = row.original; const resourceRow = row.original;
if (resourceRow.mode === "host" && resourceRow.alias) { return resourceRow.mode === "host" && resourceRow.alias ? (
return ( <CopyToClipboard
<CopyToClipboard text={resourceRow.alias}
text={resourceRow.alias} isLink={false}
isLink={false} displayText={resourceRow.alias}
displayText={resourceRow.alias} />
/> ) : (
); <span>-</span>
} );
if (resourceRow.mode === "http") {
const url = `${resourceRow.ssl ? "https" : "http"}://${resourceRow.fullDomain}`;
return (
<CopyToClipboard
text={url}
isLink={isSafeUrlForLink(url)}
displayText={url}
/>
);
}
return <span>-</span>;
} }
}, },
{ {
@@ -619,7 +399,7 @@ export default function ClientResourcesTable({
onConfirm={async () => onConfirm={async () =>
deleteInternalResource( deleteInternalResource(
selectedInternalResource!.id, selectedInternalResource!.id,
selectedInternalResource!.siteIds[0] selectedInternalResource!.siteId
) )
} }
string={selectedInternalResource.name} string={selectedInternalResource.name}
@@ -653,12 +433,9 @@ export default function ClientResourcesTable({
<EditInternalResourceDialog <EditInternalResourceDialog
open={isEditDialogOpen} open={isEditDialogOpen}
setOpen={setIsEditDialogOpen} setOpen={setIsEditDialogOpen}
resource={{ resource={editingResource}
...editingResource,
siteName: editingResource.siteNames[0] ?? "",
siteId: editingResource.siteIds[0]
}}
orgId={orgId} orgId={orgId}
sites={sites}
onSuccess={() => { onSuccess={() => {
// Delay refresh to allow modal to close smoothly // Delay refresh to allow modal to close smoothly
setTimeout(() => { setTimeout(() => {
@@ -673,6 +450,7 @@ export default function ClientResourcesTable({
open={isCreateDialogOpen} open={isCreateDialogOpen}
setOpen={setIsCreateDialogOpen} setOpen={setIsCreateDialogOpen}
orgId={orgId} orgId={orgId}
sites={sites}
onSuccess={() => { onSuccess={() => {
// Delay refresh to allow modal to close smoothly // Delay refresh to allow modal to close smoothly
setTimeout(() => { setTimeout(() => {

View File

@@ -154,7 +154,7 @@ export default function CreateDomainForm({
const punycodePreview = useMemo(() => { const punycodePreview = useMemo(() => {
if (!baseDomain) return ""; if (!baseDomain) return "";
const punycode = toPunycode(baseDomain.toLowerCase()); const punycode = toPunycode(baseDomain);
return punycode !== baseDomain.toLowerCase() ? punycode : ""; return punycode !== baseDomain.toLowerCase() ? punycode : "";
}, [baseDomain]); }, [baseDomain]);
@@ -239,24 +239,21 @@ export default function CreateDomainForm({
className="space-y-4" className="space-y-4"
id="create-domain-form" id="create-domain-form"
> >
{build != "oss" && env.flags.usePangolinDns ? ( <FormField
<FormField control={form.control}
control={form.control} name="type"
name="type" render={({ field }) => (
render={({ field }) => ( <FormItem>
<FormItem> <StrategySelect
<StrategySelect options={domainOptions}
options={domainOptions} defaultValue={field.value}
defaultValue={field.value} onChange={field.onChange}
onChange={field.onChange} cols={1}
cols={1} />
/> <FormMessage />
<FormMessage /> </FormItem>
</FormItem> )}
)} />
/>
) : null}
<FormField <FormField
control={form.control} control={form.control}
name="baseDomain" name="baseDomain"

View File

@@ -31,6 +31,7 @@ type CreateInternalResourceDialogProps = {
open: boolean; open: boolean;
setOpen: (val: boolean) => void; setOpen: (val: boolean) => void;
orgId: string; orgId: string;
sites: Site[];
onSuccess?: () => void; onSuccess?: () => void;
}; };
@@ -38,21 +39,18 @@ export default function CreateInternalResourceDialog({
open, open,
setOpen, setOpen,
orgId, orgId,
sites,
onSuccess onSuccess
}: CreateInternalResourceDialogProps) { }: CreateInternalResourceDialogProps) {
const t = useTranslations(); const t = useTranslations();
const api = createApiClient(useEnvContext()); const api = createApiClient(useEnvContext());
const [isSubmitting, setIsSubmitting] = useState(false); const [isSubmitting, setIsSubmitting] = useState(false);
const [isHttpModeDisabled, setIsHttpModeDisabled] = useState(false);
async function handleSubmit(values: InternalResourceFormValues) { async function handleSubmit(values: InternalResourceFormValues) {
setIsSubmitting(true); setIsSubmitting(true);
try { try {
let data = { ...values }; let data = { ...values };
if ( if (data.mode === "host" && isHostname(data.destination)) {
(data.mode === "host" || data.mode === "http") &&
isHostname(data.destination)
) {
const currentAlias = data.alias?.trim() || ""; const currentAlias = data.alias?.trim() || "";
if (!currentAlias) { if (!currentAlias) {
let aliasValue = data.destination; let aliasValue = data.destination;
@@ -67,56 +65,25 @@ export default function CreateInternalResourceDialog({
`/org/${orgId}/site-resource`, `/org/${orgId}/site-resource`,
{ {
name: data.name, name: data.name,
siteId: data.siteIds[0], siteId: data.siteId,
mode: data.mode, mode: data.mode,
destination: data.destination, destination: data.destination,
enabled: true, enabled: true,
...(data.mode === "http" && { alias: data.alias && typeof data.alias === "string" && data.alias.trim() ? data.alias : undefined,
scheme: data.scheme, tcpPortRangeString: data.tcpPortRangeString,
ssl: data.ssl ?? false, udpPortRangeString: data.udpPortRangeString,
destinationPort: data.httpHttpsPort ?? undefined, disableIcmp: data.disableIcmp ?? false,
domainId: data.httpConfigDomainId ...(data.authDaemonMode != null && { authDaemonMode: data.authDaemonMode }),
? data.httpConfigDomainId ...(data.authDaemonMode === "remote" && data.authDaemonPort != null && { authDaemonPort: data.authDaemonPort }),
: undefined, roleIds: data.roles ? data.roles.map((r) => parseInt(r.id)) : [],
subdomain: data.httpConfigSubdomain
? data.httpConfigSubdomain
: undefined
}),
...(data.mode === "host" && {
alias:
data.alias &&
typeof data.alias === "string" &&
data.alias.trim()
? data.alias
: undefined,
...(data.authDaemonMode != null && {
authDaemonMode: data.authDaemonMode
}),
...(data.authDaemonMode === "remote" &&
data.authDaemonPort != null && {
authDaemonPort: data.authDaemonPort
})
}),
...((data.mode === "host" || data.mode == "cidr") && {
tcpPortRangeString: data.tcpPortRangeString,
udpPortRangeString: data.udpPortRangeString,
disableIcmp: data.disableIcmp ?? false
}),
roleIds: data.roles
? data.roles.map((r) => parseInt(r.id))
: [],
userIds: data.users ? data.users.map((u) => u.id) : [], userIds: data.users ? data.users.map((u) => u.id) : [],
clientIds: data.clients clientIds: data.clients ? data.clients.map((c) => parseInt(c.id)) : []
? data.clients.map((c) => parseInt(c.id))
: []
} }
); );
toast({ toast({
title: t("createInternalResourceDialogSuccess"), title: t("createInternalResourceDialogSuccess"),
description: t( description: t("createInternalResourceDialogInternalResourceCreatedSuccessfully"),
"createInternalResourceDialogInternalResourceCreatedSuccessfully"
),
variant: "default" variant: "default"
}); });
setOpen(false); setOpen(false);
@@ -126,9 +93,7 @@ export default function CreateInternalResourceDialog({
title: t("createInternalResourceDialogError"), title: t("createInternalResourceDialogError"),
description: formatAxiosError( description: formatAxiosError(
error, error,
t( t("createInternalResourceDialogFailedToCreateInternalResource")
"createInternalResourceDialogFailedToCreateInternalResource"
)
), ),
variant: "destructive" variant: "destructive"
}); });
@@ -141,39 +106,31 @@ export default function CreateInternalResourceDialog({
<Credenza open={open} onOpenChange={setOpen}> <Credenza open={open} onOpenChange={setOpen}>
<CredenzaContent className="max-w-3xl"> <CredenzaContent className="max-w-3xl">
<CredenzaHeader> <CredenzaHeader>
<CredenzaTitle> <CredenzaTitle>{t("createInternalResourceDialogCreateClientResource")}</CredenzaTitle>
{t("createInternalResourceDialogCreateClientResource")}
</CredenzaTitle>
<CredenzaDescription> <CredenzaDescription>
{t( {t("createInternalResourceDialogCreateClientResourceDescription")}
"createInternalResourceDialogCreateClientResourceDescription"
)}
</CredenzaDescription> </CredenzaDescription>
</CredenzaHeader> </CredenzaHeader>
<CredenzaBody> <CredenzaBody>
<InternalResourceForm <InternalResourceForm
variant="create" variant="create"
open={open} open={open}
sites={sites}
orgId={orgId} orgId={orgId}
formId="create-internal-resource-form" formId="create-internal-resource-form"
onSubmit={handleSubmit} onSubmit={handleSubmit}
onSubmitDisabledChange={setIsHttpModeDisabled}
/> />
</CredenzaBody> </CredenzaBody>
<CredenzaFooter> <CredenzaFooter>
<CredenzaClose asChild> <CredenzaClose asChild>
<Button <Button variant="outline" onClick={() => setOpen(false)} disabled={isSubmitting}>
variant="outline"
onClick={() => setOpen(false)}
disabled={isSubmitting}
>
{t("createInternalResourceDialogCancel")} {t("createInternalResourceDialogCancel")}
</Button> </Button>
</CredenzaClose> </CredenzaClose>
<Button <Button
type="submit" type="submit"
form="create-internal-resource-form" form="create-internal-resource-form"
disabled={isSubmitting || isHttpModeDisabled} disabled={isSubmitting}
loading={isSubmitting} loading={isSubmitting}
> >
{t("createInternalResourceDialogCreateResource")} {t("createInternalResourceDialogCreateResource")}

View File

@@ -319,7 +319,6 @@ export default function DeviceLoginForm({
<div className="flex justify-center"> <div className="flex justify-center">
<InputOTP <InputOTP
maxLength={9} maxLength={9}
pattern={REGEXP_ONLY_DIGITS_AND_CHARS}
{...field} {...field}
value={field.value value={field.value
.replace(/-/g, "") .replace(/-/g, "")

View File

@@ -2,7 +2,6 @@
import { Alert, AlertDescription } from "@/components/ui/alert"; import { Alert, AlertDescription } from "@/components/ui/alert";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { Card, CardContent } from "@/components/ui/card";
import { import {
Command, Command,
CommandEmpty, CommandEmpty,
@@ -41,12 +40,9 @@ import {
Check, Check,
CheckCircle2, CheckCircle2,
ChevronsUpDown, ChevronsUpDown,
KeyRound,
Zap Zap
} from "lucide-react"; } from "lucide-react";
import { useTranslations } from "next-intl"; import { useTranslations } from "next-intl";
import { usePaidStatus } from "@/hooks/usePaidStatus";
import { TierFeature, tierMatrix } from "@server/lib/billing/tierMatrix";
import { toUnicode } from "punycode"; import { toUnicode } from "punycode";
import { useCallback, useEffect, useMemo, useState } from "react"; import { useCallback, useEffect, useMemo, useState } from "react";
@@ -99,7 +95,6 @@ export default function DomainPicker({
const { env } = useEnvContext(); const { env } = useEnvContext();
const api = createApiClient({ env }); const api = createApiClient({ env });
const t = useTranslations(); const t = useTranslations();
const { hasSaasSubscription } = usePaidStatus();
const { data = [], isLoading: loadingDomains } = useQuery( const { data = [], isLoading: loadingDomains } = useQuery(
orgQueries.domains({ orgId }) orgQueries.domains({ orgId })
@@ -168,18 +163,15 @@ export default function DomainPicker({
domainId: firstOrExistingDomain.domainId domainId: firstOrExistingDomain.domainId
}; };
const base = firstOrExistingDomain.baseDomain;
const sub =
firstOrExistingDomain.type !== "cname"
? defaultSubdomain?.trim() || undefined
: undefined;
onDomainChange?.({ onDomainChange?.({
domainId: firstOrExistingDomain.domainId, domainId: firstOrExistingDomain.domainId,
type: "organization", type: "organization",
subdomain: sub, subdomain:
fullDomain: sub ? `${sub}.${base}` : base, firstOrExistingDomain.type !== "cname"
baseDomain: base ? defaultSubdomain || undefined
: undefined,
fullDomain: firstOrExistingDomain.baseDomain,
baseDomain: firstOrExistingDomain.baseDomain
}); });
} }
} }
@@ -517,11 +509,9 @@ export default function DomainPicker({
<span className="truncate"> <span className="truncate">
{selectedBaseDomain.domain} {selectedBaseDomain.domain}
</span> </span>
{selectedBaseDomain.verified && {selectedBaseDomain.verified && (
selectedBaseDomain.domainType !== <CheckCircle2 className="h-3 w-3 text-green-500 shrink-0" />
"wildcard" && ( )}
<CheckCircle2 className="h-3 w-3 text-green-500 shrink-0" />
)}
</div> </div>
) : ( ) : (
t("domainPickerSelectBaseDomain") t("domainPickerSelectBaseDomain")
@@ -584,23 +574,14 @@ export default function DomainPicker({
} }
</span> </span>
<span className="text-xs text-muted-foreground"> <span className="text-xs text-muted-foreground">
{orgDomain.type === {orgDomain.type.toUpperCase()}{" "}
"wildcard" {" "}
{orgDomain.verified
? t( ? t(
"domainPickerManual" "domainPickerVerified"
) )
: ( : t(
<> "domainPickerUnverified"
{orgDomain.type.toUpperCase()}{" "}
{" "}
{orgDomain.verified
? t(
"domainPickerVerified"
)
: t(
"domainPickerUnverified"
)}
</>
)} )}
</span> </span>
</div> </div>
@@ -699,23 +680,6 @@ export default function DomainPicker({
</div> </div>
</div> </div>
{build === "saas" &&
!hasSaasSubscription(
tierMatrix[TierFeature.DomainNamespaces]
) &&
!hideFreeDomain && (
<Card className="mt-3 border-black-500/30 bg-linear-to-br from-black-500/10 via-background to-background overflow-hidden">
<CardContent className="py-3 px-4">
<div className="flex items-center gap-2.5 text-sm text-muted-foreground">
<KeyRound className="size-4 shrink-0 text-black-500" />
<span>
{t("domainPickerFreeDomainsPaidFeature")}
</span>
</div>
</CardContent>
</Card>
)}
{/*showProvidedDomainSearch && build === "saas" && ( {/*showProvidedDomainSearch && build === "saas" && (
<Alert> <Alert>
<AlertCircle className="h-4 w-4" /> <AlertCircle className="h-4 w-4" />

View File

@@ -34,6 +34,7 @@ type EditInternalResourceDialogProps = {
setOpen: (val: boolean) => void; setOpen: (val: boolean) => void;
resource: InternalResourceData; resource: InternalResourceData;
orgId: string; orgId: string;
sites: Site[];
onSuccess?: () => void; onSuccess?: () => void;
}; };
@@ -42,21 +43,18 @@ export default function EditInternalResourceDialog({
setOpen, setOpen,
resource, resource,
orgId, orgId,
sites,
onSuccess onSuccess
}: EditInternalResourceDialogProps) { }: EditInternalResourceDialogProps) {
const t = useTranslations(); const t = useTranslations();
const api = createApiClient(useEnvContext()); const api = createApiClient(useEnvContext());
const queryClient = useQueryClient(); const queryClient = useQueryClient();
const [isSubmitting, startTransition] = useTransition(); const [isSubmitting, startTransition] = useTransition();
const [isHttpModeDisabled, setIsHttpModeDisabled] = useState(false);
async function handleSubmit(values: InternalResourceFormValues) { async function handleSubmit(values: InternalResourceFormValues) {
try { try {
let data = { ...values }; let data = { ...values };
if ( if (data.mode === "host" && isHostname(data.destination)) {
(data.mode === "host" || data.mode === "http") &&
isHostname(data.destination)
) {
const currentAlias = data.alias?.trim() || ""; const currentAlias = data.alias?.trim() || "";
if (!currentAlias) { if (!currentAlias) {
let aliasValue = data.destination; let aliasValue = data.destination;
@@ -69,39 +67,24 @@ export default function EditInternalResourceDialog({
await api.post(`/site-resource/${resource.id}`, { await api.post(`/site-resource/${resource.id}`, {
name: data.name, name: data.name,
siteId: data.siteIds[0], siteId: data.siteId,
mode: data.mode, mode: data.mode,
niceId: data.niceId, niceId: data.niceId,
destination: data.destination, destination: data.destination,
...(data.mode === "http" && { alias:
scheme: data.scheme, data.alias &&
ssl: data.ssl ?? false, typeof data.alias === "string" &&
destinationPort: data.httpHttpsPort ?? null, data.alias.trim()
domainId: data.httpConfigDomainId ? data.alias
? data.httpConfigDomainId : null,
: undefined, tcpPortRangeString: data.tcpPortRangeString,
subdomain: data.httpConfigSubdomain udpPortRangeString: data.udpPortRangeString,
? data.httpConfigSubdomain disableIcmp: data.disableIcmp ?? false,
: undefined ...(data.authDaemonMode != null && {
authDaemonMode: data.authDaemonMode
}), }),
...(data.mode === "host" && { ...(data.authDaemonMode === "remote" && {
alias: authDaemonPort: data.authDaemonPort || null
data.alias &&
typeof data.alias === "string" &&
data.alias.trim()
? data.alias
: null,
...(data.authDaemonMode != null && {
authDaemonMode: data.authDaemonMode
}),
...(data.authDaemonMode === "remote" && {
authDaemonPort: data.authDaemonPort || null
})
}),
...((data.mode === "host" || data.mode === "cidr") && {
tcpPortRangeString: data.tcpPortRangeString,
udpPortRangeString: data.udpPortRangeString,
disableIcmp: data.disableIcmp ?? false
}), }),
roleIds: (data.roles || []).map((r) => parseInt(r.id)), roleIds: (data.roles || []).map((r) => parseInt(r.id)),
userIds: (data.users || []).map((u) => u.id), userIds: (data.users || []).map((u) => u.id),
@@ -173,13 +156,13 @@ export default function EditInternalResourceDialog({
variant="edit" variant="edit"
open={open} open={open}
resource={resource} resource={resource}
sites={sites}
orgId={orgId} orgId={orgId}
siteResourceId={resource.id} siteResourceId={resource.id}
formId="edit-internal-resource-form" formId="edit-internal-resource-form"
onSubmit={(values) => onSubmit={(values) =>
startTransition(() => handleSubmit(values)) startTransition(() => handleSubmit(values))
} }
onSubmitDisabledChange={setIsHttpModeDisabled}
/> />
</CredenzaBody> </CredenzaBody>
<CredenzaFooter> <CredenzaFooter>
@@ -195,7 +178,7 @@ export default function EditInternalResourceDialog({
<Button <Button
type="submit" type="submit"
form="edit-internal-resource-form" form="edit-internal-resource-form"
disabled={isSubmitting || isHttpModeDisabled} disabled={isSubmitting}
loading={isSubmitting} loading={isSubmitting}
> >
{t("editInternalResourceDialogSaveResource")} {t("editInternalResourceDialogSaveResource")}

File diff suppressed because it is too large Load Diff

View File

@@ -39,11 +39,7 @@ export default function InviteStatusCard({
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [error, setError] = useState(""); const [error, setError] = useState("");
const [type, setType] = useState< const [type, setType] = useState<
| "rejected" "rejected" | "wrong_user" | "user_does_not_exist" | "not_logged_in" | "user_limit_exceeded"
| "wrong_user"
| "user_does_not_exist"
| "not_logged_in"
| "user_limit_exceeded"
>("rejected"); >("rejected");
useEffect(() => { useEffect(() => {
@@ -94,12 +90,12 @@ export default function InviteStatusCard({
if (!user && type === "user_does_not_exist") { if (!user && type === "user_does_not_exist") {
const redirectUrl = email const redirectUrl = email
? `/auth/signup?redirect=/invite?token=${tokenParam}&email=${email}` ? `/auth/signup?redirect=/invite?token=${tokenParam}&email=${encodeURIComponent(email)}`
: `/auth/signup?redirect=/invite?token=${tokenParam}`; : `/auth/signup?redirect=/invite?token=${tokenParam}`;
router.push(redirectUrl); router.push(redirectUrl);
} else if (!user && type === "not_logged_in") { } else if (!user && type === "not_logged_in") {
const redirectUrl = email const redirectUrl = email
? `/auth/login?redirect=/invite?token=${tokenParam}&email=${email}` ? `/auth/login?redirect=/invite?token=${tokenParam}&email=${encodeURIComponent(email)}`
: `/auth/login?redirect=/invite?token=${tokenParam}`; : `/auth/login?redirect=/invite?token=${tokenParam}`;
router.push(redirectUrl); router.push(redirectUrl);
} else { } else {
@@ -113,7 +109,7 @@ export default function InviteStatusCard({
async function goToLogin() { async function goToLogin() {
await api.post("/auth/logout", {}); await api.post("/auth/logout", {});
const redirectUrl = email const redirectUrl = email
? `/auth/login?redirect=/invite?token=${tokenParam}&email=${email}` ? `/auth/login?redirect=/invite?token=${tokenParam}&email=${encodeURIComponent(email)}`
: `/auth/login?redirect=/invite?token=${tokenParam}`; : `/auth/login?redirect=/invite?token=${tokenParam}`;
router.push(redirectUrl); router.push(redirectUrl);
} }
@@ -121,7 +117,7 @@ export default function InviteStatusCard({
async function goToSignup() { async function goToSignup() {
await api.post("/auth/logout", {}); await api.post("/auth/logout", {});
const redirectUrl = email const redirectUrl = email
? `/auth/signup?redirect=/invite?token=${tokenParam}&email=${email}` ? `/auth/signup?redirect=/invite?token=${tokenParam}&email=${encodeURIComponent(email)}`
: `/auth/signup?redirect=/invite?token=${tokenParam}`; : `/auth/signup?redirect=/invite?token=${tokenParam}`;
router.push(redirectUrl); router.push(redirectUrl);
} }
@@ -161,9 +157,7 @@ export default function InviteStatusCard({
Cannot Accept Invite Cannot Accept Invite
</p> </p>
<p className="text-center text-sm"> <p className="text-center text-sm">
This organization has reached its user limit. Please This organization has reached its user limit. Please contact the organization administrator to upgrade their plan before accepting this invite.
contact the organization administrator to upgrade their
plan before accepting this invite.
</p> </p>
</div> </div>
); );

View File

@@ -405,11 +405,7 @@ export function LogDataTable<TData, TValue>({
onClick={() => onClick={() =>
!disabled && onExport() !disabled && onExport()
} }
disabled={ disabled={isExporting || disabled || isExportDisabled}
isExporting ||
disabled ||
isExportDisabled
}
> >
{isExporting ? ( {isExporting ? (
<Loader className="mr-2 size-4 animate-spin" /> <Loader className="mr-2 size-4 animate-spin" />

View File

@@ -333,8 +333,7 @@ export default function PendingSitesTable({
"jupiter", "jupiter",
"saturn", "saturn",
"uranus", "uranus",
"neptune", "neptune"
"pluto"
].includes(originalRow.exitNodeName.toLowerCase()); ].includes(originalRow.exitNodeName.toLowerCase());
if (isCloudNode) { if (isCloudNode) {
@@ -353,9 +352,9 @@ export default function PendingSitesTable({
<Link <Link
href={`/${originalRow.orgId}/settings/remote-exit-nodes/${originalRow.remoteExitNodeId}`} href={`/${originalRow.orgId}/settings/remote-exit-nodes/${originalRow.remoteExitNodeId}`}
> >
<Button variant="outline" size="sm"> <Button variant="outline">
{originalRow.exitNodeName} {originalRow.exitNodeName}
<ArrowUpRight className="ml-2 h-3 w-3" /> <ArrowUpRight className="ml-2 h-4 w-4" />
</Button> </Button>
</Link> </Link>
); );

View File

@@ -144,9 +144,9 @@ export default function ShareLinksTable({
<Link <Link
href={`/${orgId}/settings/resources/proxy/${r.resourceNiceId}`} href={`/${orgId}/settings/resources/proxy/${r.resourceNiceId}`}
> >
<Button variant="outline" size="sm"> <Button variant="outline">
{r.resourceName} {r.resourceName}
<ArrowUpRight className="ml-2 h-3 w-3" /> <ArrowUpRight className="ml-2 h-4 w-4" />
</Button> </Button>
</Link> </Link>
); );

View File

@@ -342,8 +342,7 @@ export default function SitesTable({
"jupiter", "jupiter",
"saturn", "saturn",
"uranus", "uranus",
"neptune", "neptune"
"pluto"
].includes(originalRow.exitNodeName.toLowerCase()); ].includes(originalRow.exitNodeName.toLowerCase());
if (isCloudNode) { if (isCloudNode) {
@@ -363,9 +362,9 @@ export default function SitesTable({
<Link <Link
href={`/${originalRow.orgId}/settings/remote-exit-nodes/${originalRow.remoteExitNodeId}`} href={`/${originalRow.orgId}/settings/remote-exit-nodes/${originalRow.remoteExitNodeId}`}
> >
<Button variant="outline" size="sm"> <Button variant="outline">
{originalRow.exitNodeName} {originalRow.exitNodeName}
<ArrowUpRight className="ml-2 h-3 w-3" /> <ArrowUpRight className="ml-2 h-4 w-4" />
</Button> </Button>
</Link> </Link>
); );

View File

@@ -373,12 +373,12 @@ export default function UserDevicesTable({
<Link <Link
href={`/${r.orgId}/settings/access/users/${r.userId}`} href={`/${r.orgId}/settings/access/users/${r.userId}`}
> >
<Button variant="outline" size="sm"> <Button variant="outline">
{getUserDisplayName({ {getUserDisplayName({
email: r.userEmail, email: r.userEmail,
username: r.username username: r.username
}) || r.userId} }) || r.userId}
<ArrowUpRight className="ml-2 h-3 w-3" /> <ArrowUpRight className="ml-2 h-4 w-4" />
</Button> </Button>
</Link> </Link>
) : ( ) : (

View File

@@ -164,7 +164,7 @@ const countryClass = cn(
const highlightedCountryClass = cn( const highlightedCountryClass = cn(
sharedCountryClass, sharedCountryClass,
"stroke-[3]", "stroke-2",
"fill-[#f4f4f5]", "fill-[#f4f4f5]",
"stroke-[#f36117]", "stroke-[#f36117]",
"dark:fill-[#3f3f46]" "dark:fill-[#3f3f46]"
@@ -194,20 +194,11 @@ function drawInteractiveCountries(
const path = setupProjetionPath(); const path = setupProjetionPath();
const data = parseWorldTopoJsonToGeoJsonFeatures(); const data = parseWorldTopoJsonToGeoJsonFeatures();
const svg = d3.select(element); const svg = d3.select(element);
const countriesLayer = svg.append("g");
const hoverLayer = svg.append("g").style("pointer-events", "none");
const hoverPath = hoverLayer
.append("path")
.datum(null)
.attr("class", highlightedCountryClass)
.style("display", "none");
countriesLayer svg.selectAll("path")
.selectAll("path")
.data(data) .data(data)
.enter() .enter()
.append("path") .append("path")
.attr("data-country-path", "true")
.attr("class", countryClass) .attr("class", countryClass)
.attr("d", path as never) .attr("d", path as never)
@@ -218,10 +209,9 @@ function drawInteractiveCountries(
y, y,
hoveredCountryAlpha3Code: country.properties.a3 hoveredCountryAlpha3Code: country.properties.a3
}); });
hoverPath // brings country to front
.datum(country) this.parentNode?.appendChild(this);
.attr("d", path(country as any) as string) d3.select(this).attr("class", highlightedCountryClass);
.style("display", null);
}) })
.on("mousemove", function (event) { .on("mousemove", function (event) {
@@ -231,7 +221,7 @@ function drawInteractiveCountries(
.on("mouseout", function () { .on("mouseout", function () {
setTooltip({ x: 0, y: 0, hoveredCountryAlpha3Code: null }); setTooltip({ x: 0, y: 0, hoveredCountryAlpha3Code: null });
hoverPath.style("display", "none"); d3.select(this).attr("class", countryClass);
}); });
return svg; return svg;
@@ -267,7 +257,7 @@ function colorInCountriesWithValues(
const svg = d3.select(element); const svg = d3.select(element);
return svg return svg
.selectAll('path[data-country-path="true"]') .selectAll("path")
.style("fill", (countryPath) => { .style("fill", (countryPath) => {
const country = getCountryByCountryPath(countryPath); const country = getCountryByCountryPath(countryPath);
if (!country?.count) { if (!country?.count) {

View File

@@ -1,117 +0,0 @@
import { orgQueries } from "@app/lib/queries";
import { useQuery } from "@tanstack/react-query";
import { useMemo, useState } from "react";
import {
Command,
CommandEmpty,
CommandGroup,
CommandInput,
CommandItem,
CommandList
} from "./ui/command";
import { Checkbox } from "./ui/checkbox";
import { useTranslations } from "next-intl";
import { useDebounce } from "use-debounce";
import type { Selectedsite } from "./site-selector";
export type MultiSitesSelectorProps = {
orgId: string;
selectedSites: Selectedsite[];
onSelectionChange: (sites: Selectedsite[]) => void;
filterTypes?: string[];
};
export function formatMultiSitesSelectorLabel(
selectedSites: Selectedsite[],
t: (key: string, values?: { count: number }) => string
): string {
if (selectedSites.length === 0) {
return t("selectSites");
}
if (selectedSites.length === 1) {
return selectedSites[0]!.name;
}
return t("multiSitesSelectorSitesCount", {
count: selectedSites.length
});
}
export function MultiSitesSelector({
orgId,
selectedSites,
onSelectionChange,
filterTypes
}: MultiSitesSelectorProps) {
const t = useTranslations();
const [siteSearchQuery, setSiteSearchQuery] = useState("");
const [debouncedQuery] = useDebounce(siteSearchQuery, 150);
const { data: sites = [] } = useQuery(
orgQueries.sites({
orgId,
query: debouncedQuery,
perPage: 10
})
);
const sitesShown = useMemo(() => {
const base = filterTypes
? sites.filter((s) => filterTypes.includes(s.type))
: [...sites];
if (debouncedQuery.trim().length === 0 && selectedSites.length > 0) {
const selectedNotInBase = selectedSites.filter(
(sel) => !base.some((s) => s.siteId === sel.siteId)
);
return [...selectedNotInBase, ...base];
}
return base;
}, [debouncedQuery, sites, selectedSites, filterTypes]);
const selectedIds = useMemo(
() => new Set(selectedSites.map((s) => s.siteId)),
[selectedSites]
);
const toggleSite = (site: Selectedsite) => {
if (selectedIds.has(site.siteId)) {
onSelectionChange(
selectedSites.filter((s) => s.siteId !== site.siteId)
);
} else {
onSelectionChange([...selectedSites, site]);
}
};
return (
<Command shouldFilter={false}>
<CommandInput
placeholder={t("siteSearch")}
value={siteSearchQuery}
onValueChange={(v) => setSiteSearchQuery(v)}
/>
<CommandList>
<CommandEmpty>{t("siteNotFound")}</CommandEmpty>
<CommandGroup>
{sitesShown.map((site) => (
<CommandItem
key={site.siteId}
value={`${site.siteId}:${site.name}`}
onSelect={() => {
toggleSite(site);
}}
>
<Checkbox
className="pointer-events-none shrink-0"
checked={selectedIds.has(site.siteId)}
onCheckedChange={() => {}}
aria-hidden
tabIndex={-1}
/>
<span className="truncate">{site.name}</span>
</CommandItem>
))}
</CommandGroup>
</CommandList>
</Command>
);
}

View File

@@ -43,8 +43,8 @@ const Checkbox = React.forwardRef<
className={cn(checkboxVariants({ variant }), className)} className={cn(checkboxVariants({ variant }), className)}
{...props} {...props}
> >
<CheckboxPrimitive.Indicator className="flex items-center justify-center"> <CheckboxPrimitive.Indicator className="flex items-center justify-center text-current">
<Check className="h-4 w-4 text-white" /> <Check className="h-4 w-4" />
</CheckboxPrimitive.Indicator> </CheckboxPrimitive.Indicator>
</CheckboxPrimitive.Root> </CheckboxPrimitive.Root>
)); ));