diff --git a/messages/en-US.json b/messages/en-US.json index 4d1bb02be..7a3fde1d4 100644 --- a/messages/en-US.json +++ b/messages/en-US.json @@ -328,6 +328,41 @@ "apiKeysDelete": "Delete API Key", "apiKeysManage": "Manage API Keys", "apiKeysDescription": "API keys are used to authenticate with the integration API", + "provisioningKeysTitle": "Provisioning Key", + "provisioningKeysManage": "Manage Provisioning Keys", + "provisioningKeysDescription": "Provisioning keys are used to authenticate automated site provisioning for your organization.", + "provisioningKeys": "Provisioning Keys", + "searchProvisioningKeys": "Search provisioning keys...", + "provisioningKeysAdd": "Generate Provisioning Key", + "provisioningKeysErrorDelete": "Error deleting provisioning key", + "provisioningKeysErrorDeleteMessage": "Error deleting provisioning key", + "provisioningKeysQuestionRemove": "Are you sure you want to remove this provisioning key from the organization?", + "provisioningKeysMessageRemove": "Once removed, the key can no longer be used for site provisioning.", + "provisioningKeysDeleteConfirm": "Confirm Delete Provisioning Key", + "provisioningKeysDelete": "Delete Provisioning key", + "provisioningKeysCreate": "Generate Provisioning Key", + "provisioningKeysCreateDescription": "Generate a new provisioning key for the organization", + "provisioningKeysSeeAll": "See all provisioning keys", + "provisioningKeysSave": "Save the provisioning key", + "provisioningKeysSaveDescription": "You will only be able to see this once. Copy it to a secure place.", + "provisioningKeysErrorCreate": "Error creating provisioning key", + "provisioningKeysList": "New provisioning key", + "provisioningKeysMaxBatchSize": "Max batch size", + "provisioningKeysUnlimitedBatchSize": "Unlimited batch size (no limit)", + "provisioningKeysMaxBatchUnlimited": "Unlimited", + "provisioningKeysMaxBatchSizeInvalid": "Enter a valid max batch size (1–1,000,000).", + "provisioningKeysValidUntil": "Valid until", + "provisioningKeysValidUntilHint": "Leave empty for no expiration.", + "provisioningKeysValidUntilInvalid": "Enter a valid date and time.", + "provisioningKeysNumUsed": "Times used", + "provisioningKeysLastUsed": "Last used", + "provisioningKeysNoExpiry": "No expiration", + "provisioningKeysNeverUsed": "Never", + "provisioningKeysEdit": "Edit Provisioning Key", + "provisioningKeysEditDescription": "Update the max batch size and expiration time for this key.", + "provisioningKeysUpdateError": "Error updating provisioning key", + "provisioningKeysUpdated": "Provisioning key updated", + "provisioningKeysUpdatedDescription": "Your changes have been saved.", "apiKeysSettings": "{apiKeyName} Settings", "userTitle": "Manage All Users", "userDescription": "View and manage all users in the system", @@ -1274,6 +1309,7 @@ "sidebarRoles": "Roles", "sidebarShareableLinks": "Links", "sidebarApiKeys": "API Keys", + "sidebarProvisioning": "Provisioning", "sidebarSettings": "Settings", "sidebarAllUsers": "All Users", "sidebarIdentityProviders": "Identity Providers", @@ -2372,6 +2408,12 @@ "logRetentionEndOfFollowingYear": "End of following year", "actionLogsDescription": "View a history of actions performed in this organization", "accessLogsDescription": "View access auth requests for resources in this organization", + "connectionLogs": "Connection Logs", + "connectionLogsDescription": "View connection logs for tunnels in this organization", + "sidebarLogsConnection": "Connection Logs", + "sourceAddress": "Source Address", + "destinationAddress": "Destination Address", + "duration": "Duration", "licenseRequiredToUse": "An Enterprise Edition license or Pangolin Cloud is required to use this feature. Book a demo or POC trial.", "ossEnterpriseEditionRequired": "The Enterprise Edition is required to use this feature. This feature is also available in Pangolin Cloud. Book a demo or POC trial.", "certResolver": "Certificate Resolver", diff --git a/server/auth/actions.ts b/server/auth/actions.ts index ae5136659..fc5daa4f8 100644 --- a/server/auth/actions.ts +++ b/server/auth/actions.ts @@ -112,6 +112,10 @@ export enum ActionsEnum { listApiKeyActions = "listApiKeyActions", listApiKeys = "listApiKeys", getApiKey = "getApiKey", + createSiteProvisioningKey = "createSiteProvisioningKey", + listSiteProvisioningKeys = "listSiteProvisioningKeys", + updateSiteProvisioningKey = "updateSiteProvisioningKey", + deleteSiteProvisioningKey = "deleteSiteProvisioningKey", getCertificate = "getCertificate", restartCertificate = "restartCertificate", billing = "billing", diff --git a/server/cleanup.ts b/server/cleanup.ts index 3c462f3f2..10e9f4cc3 100644 --- a/server/cleanup.ts +++ b/server/cleanup.ts @@ -1,4 +1,5 @@ import { flushBandwidthToDb } from "@server/routers/newt/handleReceiveBandwidthMessage"; +import { flushConnectionLogToDb } from "#dynamic/routers/newt"; import { flushSiteBandwidthToDb } from "@server/routers/gerbil/receiveBandwidth"; import { stopPingAccumulator } from "@server/routers/newt/pingAccumulator"; import { cleanup as wsCleanup } from "#dynamic/routers/ws"; @@ -6,6 +7,7 @@ import { cleanup as wsCleanup } from "#dynamic/routers/ws"; async function cleanup() { await stopPingAccumulator(); await flushBandwidthToDb(); + await flushConnectionLogToDb(); await flushSiteBandwidthToDb(); await wsCleanup(); @@ -16,4 +18,4 @@ export async function initCleanup() { // Handle process termination process.on("SIGTERM", () => cleanup()); process.on("SIGINT", () => cleanup()); -} \ No newline at end of file +} diff --git a/server/db/pg/schema/privateSchema.ts b/server/db/pg/schema/privateSchema.ts index c9d7cc907..bb1e866c4 100644 --- a/server/db/pg/schema/privateSchema.ts +++ b/server/db/pg/schema/privateSchema.ts @@ -7,7 +7,8 @@ import { bigint, real, text, - index + index, + primaryKey } from "drizzle-orm/pg-core"; import { InferSelectModel } from "drizzle-orm"; import { @@ -17,7 +18,9 @@ import { users, exitNodes, sessions, - clients + clients, + siteResources, + sites } from "./schema"; export const certificates = pgTable("certificates", { @@ -89,7 +92,9 @@ export const subscriptions = pgTable("subscriptions", { export const subscriptionItems = pgTable("subscriptionItems", { subscriptionItemId: serial("subscriptionItemId").primaryKey(), - stripeSubscriptionItemId: varchar("stripeSubscriptionItemId", { length: 255 }), + stripeSubscriptionItemId: varchar("stripeSubscriptionItemId", { + length: 255 + }), subscriptionId: varchar("subscriptionId", { length: 255 }) .notNull() .references(() => subscriptions.subscriptionId, { @@ -302,6 +307,45 @@ export const accessAuditLog = pgTable( ] ); +export const connectionAuditLog = pgTable( + "connectionAuditLog", + { + id: serial("id").primaryKey(), + sessionId: text("sessionId").notNull(), + siteResourceId: integer("siteResourceId").references( + () => siteResources.siteResourceId, + { onDelete: "cascade" } + ), + orgId: text("orgId").references(() => orgs.orgId, { + onDelete: "cascade" + }), + siteId: integer("siteId").references(() => sites.siteId, { + onDelete: "cascade" + }), + clientId: integer("clientId").references(() => clients.clientId, { + onDelete: "cascade" + }), + userId: text("userId").references(() => users.userId, { + onDelete: "cascade" + }), + sourceAddr: text("sourceAddr").notNull(), + destAddr: text("destAddr").notNull(), + protocol: text("protocol").notNull(), + startedAt: integer("startedAt").notNull(), + endedAt: integer("endedAt"), + bytesTx: integer("bytesTx"), + bytesRx: integer("bytesRx") + }, + (table) => [ + index("idx_accessAuditLog_startedAt").on(table.startedAt), + index("idx_accessAuditLog_org_startedAt").on( + table.orgId, + table.startedAt + ), + index("idx_accessAuditLog_siteResourceId").on(table.siteResourceId) + ] +); + export const approvals = pgTable("approvals", { approvalId: serial("approvalId").primaryKey(), timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds @@ -329,13 +373,48 @@ export const approvals = pgTable("approvals", { }); export const bannedEmails = pgTable("bannedEmails", { - email: varchar("email", { length: 255 }).primaryKey(), + email: varchar("email", { length: 255 }).primaryKey() }); export const bannedIps = pgTable("bannedIps", { - ip: varchar("ip", { length: 255 }).primaryKey(), + ip: varchar("ip", { length: 255 }).primaryKey() }); +export const siteProvisioningKeys = pgTable("siteProvisioningKeys", { + siteProvisioningKeyId: varchar("siteProvisioningKeyId", { + length: 255 + }).primaryKey(), + name: varchar("name", { length: 255 }).notNull(), + siteProvisioningKeyHash: text("siteProvisioningKeyHash").notNull(), + lastChars: varchar("lastChars", { length: 4 }).notNull(), + createdAt: varchar("dateCreated", { length: 255 }).notNull(), + lastUsed: varchar("lastUsed", { length: 255 }), + maxBatchSize: integer("maxBatchSize"), // null = no limit + numUsed: integer("numUsed").notNull().default(0), + validUntil: varchar("validUntil", { length: 255 }) +}); + +export const siteProvisioningKeyOrg = pgTable( + "siteProvisioningKeyOrg", + { + siteProvisioningKeyId: varchar("siteProvisioningKeyId", { + length: 255 + }) + .notNull() + .references(() => siteProvisioningKeys.siteProvisioningKeyId, { + onDelete: "cascade" + }), + orgId: varchar("orgId", { length: 255 }) + .notNull() + .references(() => orgs.orgId, { onDelete: "cascade" }) + }, + (table) => [ + primaryKey({ + columns: [table.siteProvisioningKeyId, table.orgId] + }) + ] +); + export type Approval = InferSelectModel; export type Limit = InferSelectModel; export type Account = InferSelectModel; @@ -357,3 +436,4 @@ export type LoginPage = InferSelectModel; export type LoginPageBranding = InferSelectModel; export type ActionAuditLog = InferSelectModel; export type AccessAuditLog = InferSelectModel; +export type ConnectionAuditLog = InferSelectModel; diff --git a/server/db/pg/schema/schema.ts b/server/db/pg/schema/schema.ts index 2bd9624e7..bb05ca358 100644 --- a/server/db/pg/schema/schema.ts +++ b/server/db/pg/schema/schema.ts @@ -57,6 +57,9 @@ export const orgs = pgTable("orgs", { settingsLogRetentionDaysAction: integer("settingsLogRetentionDaysAction") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year .notNull() .default(0), + settingsLogRetentionDaysConnection: integer("settingsLogRetentionDaysConnection") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year + .notNull() + .default(0), sshCaPrivateKey: text("sshCaPrivateKey"), // Encrypted SSH CA private key (PEM format) sshCaPublicKey: text("sshCaPublicKey"), // SSH CA public key (OpenSSH format) isBillingOrg: boolean("isBillingOrg"), diff --git a/server/db/sqlite/schema/privateSchema.ts b/server/db/sqlite/schema/privateSchema.ts index 8baeb5220..5913497b3 100644 --- a/server/db/sqlite/schema/privateSchema.ts +++ b/server/db/sqlite/schema/privateSchema.ts @@ -2,11 +2,12 @@ import { InferSelectModel } from "drizzle-orm"; import { index, integer, + primaryKey, real, sqliteTable, text } from "drizzle-orm/sqlite-core"; -import { clients, domains, exitNodes, orgs, sessions, users } from "./schema"; +import { clients, domains, exitNodes, orgs, sessions, siteResources, sites, users } from "./schema"; export const certificates = sqliteTable("certificates", { certId: integer("certId").primaryKey({ autoIncrement: true }), @@ -294,6 +295,45 @@ export const accessAuditLog = sqliteTable( ] ); +export const connectionAuditLog = sqliteTable( + "connectionAuditLog", + { + id: integer("id").primaryKey({ autoIncrement: true }), + sessionId: text("sessionId").notNull(), + siteResourceId: integer("siteResourceId").references( + () => siteResources.siteResourceId, + { onDelete: "cascade" } + ), + orgId: text("orgId").references(() => orgs.orgId, { + onDelete: "cascade" + }), + siteId: integer("siteId").references(() => sites.siteId, { + onDelete: "cascade" + }), + clientId: integer("clientId").references(() => clients.clientId, { + onDelete: "cascade" + }), + userId: text("userId").references(() => users.userId, { + onDelete: "cascade" + }), + sourceAddr: text("sourceAddr").notNull(), + destAddr: text("destAddr").notNull(), + protocol: text("protocol").notNull(), + startedAt: integer("startedAt").notNull(), + endedAt: integer("endedAt"), + bytesTx: integer("bytesTx"), + bytesRx: integer("bytesRx") + }, + (table) => [ + index("idx_accessAuditLog_startedAt").on(table.startedAt), + index("idx_accessAuditLog_org_startedAt").on( + table.orgId, + table.startedAt + ), + index("idx_accessAuditLog_siteResourceId").on(table.siteResourceId) + ] +); + export const approvals = sqliteTable("approvals", { approvalId: integer("approvalId").primaryKey({ autoIncrement: true }), timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds @@ -318,7 +358,6 @@ export const approvals = sqliteTable("approvals", { .notNull() }); - export const bannedEmails = sqliteTable("bannedEmails", { email: text("email").primaryKey() }); @@ -327,6 +366,37 @@ export const bannedIps = sqliteTable("bannedIps", { ip: text("ip").primaryKey() }); +export const siteProvisioningKeys = sqliteTable("siteProvisioningKeys", { + siteProvisioningKeyId: text("siteProvisioningKeyId").primaryKey(), + name: text("name").notNull(), + siteProvisioningKeyHash: text("siteProvisioningKeyHash").notNull(), + lastChars: text("lastChars").notNull(), + createdAt: text("dateCreated").notNull(), + lastUsed: text("lastUsed"), + maxBatchSize: integer("maxBatchSize"), // null = no limit + numUsed: integer("numUsed").notNull().default(0), + validUntil: text("validUntil") +}); + +export const siteProvisioningKeyOrg = sqliteTable( + "siteProvisioningKeyOrg", + { + siteProvisioningKeyId: text("siteProvisioningKeyId") + .notNull() + .references(() => siteProvisioningKeys.siteProvisioningKeyId, { + onDelete: "cascade" + }), + orgId: text("orgId") + .notNull() + .references(() => orgs.orgId, { onDelete: "cascade" }) + }, + (table) => [ + primaryKey({ + columns: [table.siteProvisioningKeyId, table.orgId] + }) + ] +); + export type Approval = InferSelectModel; export type Limit = InferSelectModel; export type Account = InferSelectModel; @@ -348,3 +418,4 @@ export type LoginPage = InferSelectModel; export type LoginPageBranding = InferSelectModel; export type ActionAuditLog = InferSelectModel; export type AccessAuditLog = InferSelectModel; +export type ConnectionAuditLog = InferSelectModel; diff --git a/server/db/sqlite/schema/schema.ts b/server/db/sqlite/schema/schema.ts index b43f3b4a6..5d7c01377 100644 --- a/server/db/sqlite/schema/schema.ts +++ b/server/db/sqlite/schema/schema.ts @@ -54,6 +54,9 @@ export const orgs = sqliteTable("orgs", { settingsLogRetentionDaysAction: integer("settingsLogRetentionDaysAction") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year .notNull() .default(0), + settingsLogRetentionDaysConnection: integer("settingsLogRetentionDaysConnection") // where 0 = dont keep logs and -1 = keep forever and 9001 = end of the following year + .notNull() + .default(0), sshCaPrivateKey: text("sshCaPrivateKey"), // Encrypted SSH CA private key (PEM format) sshCaPublicKey: text("sshCaPublicKey"), // SSH CA public key (OpenSSH format) isBillingOrg: integer("isBillingOrg", { mode: "boolean" }), diff --git a/server/lib/billing/tierMatrix.ts b/server/lib/billing/tierMatrix.ts index a66f566a9..2aa38e1ef 100644 --- a/server/lib/billing/tierMatrix.ts +++ b/server/lib/billing/tierMatrix.ts @@ -8,6 +8,7 @@ export enum TierFeature { LogExport = "logExport", AccessLogs = "accessLogs", // set the retention period to none on downgrade ActionLogs = "actionLogs", // set the retention period to none on downgrade + ConnectionLogs = "connectionLogs", RotateCredentials = "rotateCredentials", MaintencePage = "maintencePage", // handle downgrade DevicePosture = "devicePosture", @@ -16,7 +17,8 @@ export enum TierFeature { PasswordExpirationPolicies = "passwordExpirationPolicies", // handle downgrade by setting to default duration AutoProvisioning = "autoProvisioning", // handle downgrade by disabling auto provisioning SshPam = "sshPam", - FullRbac = "fullRbac" + FullRbac = "fullRbac", + SiteProvisioningKeys = "siteProvisioningKeys" // handle downgrade by revoking keys if needed } export const tierMatrix: Record = { @@ -27,6 +29,7 @@ export const tierMatrix: Record = { [TierFeature.LogExport]: ["tier3", "enterprise"], [TierFeature.AccessLogs]: ["tier2", "tier3", "enterprise"], [TierFeature.ActionLogs]: ["tier2", "tier3", "enterprise"], + [TierFeature.ConnectionLogs]: ["tier2", "tier3", "enterprise"], [TierFeature.RotateCredentials]: ["tier1", "tier2", "tier3", "enterprise"], [TierFeature.MaintencePage]: ["tier1", "tier2", "tier3", "enterprise"], [TierFeature.DevicePosture]: ["tier2", "tier3", "enterprise"], @@ -50,5 +53,6 @@ export const tierMatrix: Record = { ], [TierFeature.AutoProvisioning]: ["tier1", "tier3", "enterprise"], [TierFeature.SshPam]: ["tier1", "tier3", "enterprise"], - [TierFeature.FullRbac]: ["tier1", "tier2", "tier3", "enterprise"] + [TierFeature.FullRbac]: ["tier1", "tier2", "tier3", "enterprise"], + [TierFeature.SiteProvisioningKeys]: ["enterprise"] }; diff --git a/server/lib/cleanupLogs.ts b/server/lib/cleanupLogs.ts index 8eb4ca77f..f5b6d8b2f 100644 --- a/server/lib/cleanupLogs.ts +++ b/server/lib/cleanupLogs.ts @@ -2,6 +2,7 @@ import { db, orgs } from "@server/db"; import { cleanUpOldLogs as cleanUpOldAccessLogs } from "#dynamic/lib/logAccessAudit"; import { cleanUpOldLogs as cleanUpOldActionLogs } from "#dynamic/middlewares/logActionAudit"; import { cleanUpOldLogs as cleanUpOldRequestLogs } from "@server/routers/badger/logRequestAudit"; +import { cleanUpOldLogs as cleanUpOldConnectionLogs } from "#dynamic/routers/newt"; import { gt, or } from "drizzle-orm"; import { cleanUpOldFingerprintSnapshots } from "@server/routers/olm/fingerprintingUtils"; import { build } from "@server/build"; @@ -20,14 +21,17 @@ export function initLogCleanupInterval() { settingsLogRetentionDaysAccess: orgs.settingsLogRetentionDaysAccess, settingsLogRetentionDaysRequest: - orgs.settingsLogRetentionDaysRequest + orgs.settingsLogRetentionDaysRequest, + settingsLogRetentionDaysConnection: + orgs.settingsLogRetentionDaysConnection }) .from(orgs) .where( or( gt(orgs.settingsLogRetentionDaysAction, 0), gt(orgs.settingsLogRetentionDaysAccess, 0), - gt(orgs.settingsLogRetentionDaysRequest, 0) + gt(orgs.settingsLogRetentionDaysRequest, 0), + gt(orgs.settingsLogRetentionDaysConnection, 0) ) ); @@ -37,7 +41,8 @@ export function initLogCleanupInterval() { orgId, settingsLogRetentionDaysAction, settingsLogRetentionDaysAccess, - settingsLogRetentionDaysRequest + settingsLogRetentionDaysRequest, + settingsLogRetentionDaysConnection } = org; if (settingsLogRetentionDaysAction > 0) { @@ -60,6 +65,13 @@ export function initLogCleanupInterval() { settingsLogRetentionDaysRequest ); } + + if (settingsLogRetentionDaysConnection > 0) { + await cleanUpOldConnectionLogs( + orgId, + settingsLogRetentionDaysConnection + ); + } } await cleanUpOldFingerprintSnapshots(365); diff --git a/server/lib/ip.ts b/server/lib/ip.ts index 3a29b8661..7f829bcef 100644 --- a/server/lib/ip.ts +++ b/server/lib/ip.ts @@ -581,6 +581,7 @@ export type SubnetProxyTargetV2 = { max: number; protocol: "tcp" | "udp"; }[]; + resourceId?: number; }; export function generateSubnetProxyTargetV2( @@ -617,7 +618,8 @@ export function generateSubnetProxyTargetV2( sourcePrefixes: [], destPrefix: destination, portRange, - disableIcmp + disableIcmp, + resourceId: siteResource.siteResourceId, }; } @@ -628,7 +630,8 @@ export function generateSubnetProxyTargetV2( destPrefix: `${siteResource.aliasAddress}/32`, rewriteTo: destination, portRange, - disableIcmp + disableIcmp, + resourceId: siteResource.siteResourceId, }; } } else if (siteResource.mode == "cidr") { @@ -636,7 +639,8 @@ export function generateSubnetProxyTargetV2( sourcePrefixes: [], destPrefix: siteResource.destination, portRange, - disableIcmp + disableIcmp, + resourceId: siteResource.siteResourceId, }; } diff --git a/server/middlewares/index.ts b/server/middlewares/index.ts index 435ccdb23..48025e8e7 100644 --- a/server/middlewares/index.ts +++ b/server/middlewares/index.ts @@ -25,6 +25,7 @@ export * from "./verifyClientAccess"; export * from "./integration"; export * from "./verifyUserHasAction"; export * from "./verifyApiKeyAccess"; +export * from "./verifySiteProvisioningKeyAccess"; export * from "./verifyDomainAccess"; export * from "./verifyUserIsOrgOwner"; export * from "./verifySiteResourceAccess"; diff --git a/server/middlewares/verifySiteProvisioningKeyAccess.ts b/server/middlewares/verifySiteProvisioningKeyAccess.ts new file mode 100644 index 000000000..e0d446de6 --- /dev/null +++ b/server/middlewares/verifySiteProvisioningKeyAccess.ts @@ -0,0 +1,131 @@ +import { Request, Response, NextFunction } from "express"; +import { db, userOrgs, siteProvisioningKeys, siteProvisioningKeyOrg } from "@server/db"; +import { and, eq } from "drizzle-orm"; +import createHttpError from "http-errors"; +import HttpCode from "@server/types/HttpCode"; +import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy"; + +export async function verifySiteProvisioningKeyAccess( + req: Request, + res: Response, + next: NextFunction +) { + try { + const userId = req.user!.userId; + const siteProvisioningKeyId = req.params.siteProvisioningKeyId; + const orgId = req.params.orgId; + + if (!userId) { + return next( + createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated") + ); + } + + if (!orgId) { + return next( + createHttpError(HttpCode.BAD_REQUEST, "Invalid organization ID") + ); + } + + if (!siteProvisioningKeyId) { + return next( + createHttpError(HttpCode.BAD_REQUEST, "Invalid key ID") + ); + } + + const [row] = await db + .select() + .from(siteProvisioningKeys) + .innerJoin( + siteProvisioningKeyOrg, + and( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyOrg.siteProvisioningKeyId + ), + eq(siteProvisioningKeyOrg.orgId, orgId) + ) + ) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ) + .limit(1); + + if (!row?.siteProvisioningKeys) { + return next( + createHttpError( + HttpCode.NOT_FOUND, + `Site provisioning key with ID ${siteProvisioningKeyId} not found` + ) + ); + } + + if (!row.siteProvisioningKeyOrg.orgId) { + return next( + createHttpError( + HttpCode.INTERNAL_SERVER_ERROR, + `Site provisioning key with ID ${siteProvisioningKeyId} does not have an organization ID` + ) + ); + } + + if (!req.userOrg) { + const userOrgRole = await db + .select() + .from(userOrgs) + .where( + and( + eq(userOrgs.userId, userId), + eq( + userOrgs.orgId, + row.siteProvisioningKeyOrg.orgId + ) + ) + ) + .limit(1); + req.userOrg = userOrgRole[0]; + } + + if (!req.userOrg) { + return next( + createHttpError( + HttpCode.FORBIDDEN, + "User does not have access to this organization" + ) + ); + } + + if (req.orgPolicyAllowed === undefined && req.userOrg.orgId) { + const policyCheck = await checkOrgAccessPolicy({ + orgId: req.userOrg.orgId, + userId, + session: req.session + }); + req.orgPolicyAllowed = policyCheck.allowed; + if (!policyCheck.allowed || policyCheck.error) { + return next( + createHttpError( + HttpCode.FORBIDDEN, + "Failed organization access policy check: " + + (policyCheck.error || "Unknown error") + ) + ); + } + } + + const userOrgRoleId = req.userOrg.roleId; + req.userOrgRoleId = userOrgRoleId; + + return next(); + } catch (error) { + return next( + createHttpError( + HttpCode.INTERNAL_SERVER_ERROR, + "Error verifying site provisioning key access" + ) + ); + } +} diff --git a/server/private/cleanup.ts b/server/private/cleanup.ts index 5321fbc9e..17d823491 100644 --- a/server/private/cleanup.ts +++ b/server/private/cleanup.ts @@ -14,12 +14,14 @@ import { rateLimitService } from "#private/lib/rateLimit"; import { cleanup as wsCleanup } from "#private/routers/ws"; import { flushBandwidthToDb } from "@server/routers/newt/handleReceiveBandwidthMessage"; +import { flushConnectionLogToDb } from "#dynamic/routers/newt"; import { flushSiteBandwidthToDb } from "@server/routers/gerbil/receiveBandwidth"; import { stopPingAccumulator } from "@server/routers/newt/pingAccumulator"; async function cleanup() { await stopPingAccumulator(); await flushBandwidthToDb(); + await flushConnectionLogToDb(); await flushSiteBandwidthToDb(); await rateLimitService.cleanup(); await wsCleanup(); @@ -31,4 +33,4 @@ export async function initCleanup() { // Handle process termination process.on("SIGTERM", () => cleanup()); process.on("SIGINT", () => cleanup()); -} \ No newline at end of file +} diff --git a/server/private/routers/auditLogs/exportConnectionAuditLog.ts b/server/private/routers/auditLogs/exportConnectionAuditLog.ts new file mode 100644 index 000000000..9349528ad --- /dev/null +++ b/server/private/routers/auditLogs/exportConnectionAuditLog.ts @@ -0,0 +1,99 @@ +/* + * This file is part of a proprietary work. + * + * Copyright (c) 2025 Fossorial, Inc. + * All rights reserved. + * + * This file is licensed under the Fossorial Commercial License. + * You may not use this file except in compliance with the License. + * Unauthorized use, copying, modification, or distribution is strictly prohibited. + * + * This file is not licensed under the AGPLv3. + */ + +import { registry } from "@server/openApi"; +import { NextFunction } from "express"; +import { Request, Response } from "express"; +import { OpenAPITags } from "@server/openApi"; +import createHttpError from "http-errors"; +import HttpCode from "@server/types/HttpCode"; +import { fromError } from "zod-validation-error"; +import logger from "@server/logger"; +import { + queryConnectionAuditLogsParams, + queryConnectionAuditLogsQuery, + queryConnection, + countConnectionQuery +} from "./queryConnectionAuditLog"; +import { generateCSV } from "@server/routers/auditLogs/generateCSV"; +import { MAX_EXPORT_LIMIT } from "@server/routers/auditLogs"; + +registry.registerPath({ + method: "get", + path: "/org/{orgId}/logs/connection/export", + description: "Export the connection audit log for an organization as CSV", + tags: [OpenAPITags.Logs], + request: { + query: queryConnectionAuditLogsQuery, + params: queryConnectionAuditLogsParams + }, + responses: {} +}); + +export async function exportConnectionAuditLogs( + req: Request, + res: Response, + next: NextFunction +): Promise { + try { + const parsedQuery = queryConnectionAuditLogsQuery.safeParse(req.query); + if (!parsedQuery.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedQuery.error) + ) + ); + } + + const parsedParams = queryConnectionAuditLogsParams.safeParse(req.params); + if (!parsedParams.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedParams.error) + ) + ); + } + + const data = { ...parsedQuery.data, ...parsedParams.data }; + const [{ count }] = await countConnectionQuery(data); + if (count > MAX_EXPORT_LIMIT) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + `Export limit exceeded. Your selection contains ${count} rows, but the maximum is ${MAX_EXPORT_LIMIT} rows. Please select a shorter time range to reduce the data.` + ) + ); + } + + const baseQuery = queryConnection(data); + + const log = await baseQuery.limit(data.limit).offset(data.offset); + + const csvData = generateCSV(log); + + res.setHeader("Content-Type", "text/csv"); + res.setHeader( + "Content-Disposition", + `attachment; filename="connection-audit-logs-${data.orgId}-${Date.now()}.csv"` + ); + + return res.send(csvData); + } catch (error) { + logger.error(error); + return next( + createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred") + ); + } +} \ No newline at end of file diff --git a/server/private/routers/auditLogs/index.ts b/server/private/routers/auditLogs/index.ts index e1849a617..122455fea 100644 --- a/server/private/routers/auditLogs/index.ts +++ b/server/private/routers/auditLogs/index.ts @@ -15,3 +15,5 @@ export * from "./queryActionAuditLog"; export * from "./exportActionAuditLog"; export * from "./queryAccessAuditLog"; export * from "./exportAccessAuditLog"; +export * from "./queryConnectionAuditLog"; +export * from "./exportConnectionAuditLog"; diff --git a/server/private/routers/auditLogs/queryConnectionAuditLog.ts b/server/private/routers/auditLogs/queryConnectionAuditLog.ts new file mode 100644 index 000000000..b638ed488 --- /dev/null +++ b/server/private/routers/auditLogs/queryConnectionAuditLog.ts @@ -0,0 +1,524 @@ +/* + * This file is part of a proprietary work. + * + * Copyright (c) 2025 Fossorial, Inc. + * All rights reserved. + * + * This file is licensed under the Fossorial Commercial License. + * You may not use this file except in compliance with the License. + * Unauthorized use, copying, modification, or distribution is strictly prohibited. + * + * This file is not licensed under the AGPLv3. + */ + +import { + connectionAuditLog, + logsDb, + siteResources, + sites, + clients, + users, + primaryDb +} from "@server/db"; +import { registry } from "@server/openApi"; +import { NextFunction } from "express"; +import { Request, Response } from "express"; +import { eq, gt, lt, and, count, desc, inArray } from "drizzle-orm"; +import { OpenAPITags } from "@server/openApi"; +import { z } from "zod"; +import createHttpError from "http-errors"; +import HttpCode from "@server/types/HttpCode"; +import { fromError } from "zod-validation-error"; +import { QueryConnectionAuditLogResponse } from "@server/routers/auditLogs/types"; +import response from "@server/lib/response"; +import logger from "@server/logger"; +import { getSevenDaysAgo } from "@app/lib/getSevenDaysAgo"; + +export const queryConnectionAuditLogsQuery = z.object({ + // iso string just validate its a parseable date + timeStart: z + .string() + .refine((val) => !isNaN(Date.parse(val)), { + error: "timeStart must be a valid ISO date string" + }) + .transform((val) => Math.floor(new Date(val).getTime() / 1000)) + .prefault(() => getSevenDaysAgo().toISOString()) + .openapi({ + type: "string", + format: "date-time", + description: + "Start time as ISO date string (defaults to 7 days ago)" + }), + timeEnd: z + .string() + .refine((val) => !isNaN(Date.parse(val)), { + error: "timeEnd must be a valid ISO date string" + }) + .transform((val) => Math.floor(new Date(val).getTime() / 1000)) + .optional() + .prefault(() => new Date().toISOString()) + .openapi({ + type: "string", + format: "date-time", + description: + "End time as ISO date string (defaults to current time)" + }), + protocol: z.string().optional(), + sourceAddr: z.string().optional(), + destAddr: z.string().optional(), + clientId: z + .string() + .optional() + .transform(Number) + .pipe(z.int().positive()) + .optional(), + siteId: z + .string() + .optional() + .transform(Number) + .pipe(z.int().positive()) + .optional(), + siteResourceId: z + .string() + .optional() + .transform(Number) + .pipe(z.int().positive()) + .optional(), + userId: z.string().optional(), + limit: z + .string() + .optional() + .default("1000") + .transform(Number) + .pipe(z.int().positive()), + offset: z + .string() + .optional() + .default("0") + .transform(Number) + .pipe(z.int().nonnegative()) +}); + +export const queryConnectionAuditLogsParams = z.object({ + orgId: z.string() +}); + +export const queryConnectionAuditLogsCombined = + queryConnectionAuditLogsQuery.merge(queryConnectionAuditLogsParams); +type Q = z.infer; + +function getWhere(data: Q) { + return and( + gt(connectionAuditLog.startedAt, data.timeStart), + lt(connectionAuditLog.startedAt, data.timeEnd), + eq(connectionAuditLog.orgId, data.orgId), + data.protocol + ? eq(connectionAuditLog.protocol, data.protocol) + : undefined, + data.sourceAddr + ? eq(connectionAuditLog.sourceAddr, data.sourceAddr) + : undefined, + data.destAddr + ? eq(connectionAuditLog.destAddr, data.destAddr) + : undefined, + data.clientId + ? eq(connectionAuditLog.clientId, data.clientId) + : undefined, + data.siteId + ? eq(connectionAuditLog.siteId, data.siteId) + : undefined, + data.siteResourceId + ? eq(connectionAuditLog.siteResourceId, data.siteResourceId) + : undefined, + data.userId + ? eq(connectionAuditLog.userId, data.userId) + : undefined + ); +} + +export function queryConnection(data: Q) { + return logsDb + .select({ + sessionId: connectionAuditLog.sessionId, + siteResourceId: connectionAuditLog.siteResourceId, + orgId: connectionAuditLog.orgId, + siteId: connectionAuditLog.siteId, + clientId: connectionAuditLog.clientId, + userId: connectionAuditLog.userId, + sourceAddr: connectionAuditLog.sourceAddr, + destAddr: connectionAuditLog.destAddr, + protocol: connectionAuditLog.protocol, + startedAt: connectionAuditLog.startedAt, + endedAt: connectionAuditLog.endedAt, + bytesTx: connectionAuditLog.bytesTx, + bytesRx: connectionAuditLog.bytesRx + }) + .from(connectionAuditLog) + .where(getWhere(data)) + .orderBy( + desc(connectionAuditLog.startedAt), + desc(connectionAuditLog.id) + ); +} + +export function countConnectionQuery(data: Q) { + const countQuery = logsDb + .select({ count: count() }) + .from(connectionAuditLog) + .where(getWhere(data)); + return countQuery; +} + +async function enrichWithDetails( + logs: Awaited> +) { + // Collect unique IDs from logs + const siteResourceIds = [ + ...new Set( + logs + .map((log) => log.siteResourceId) + .filter((id): id is number => id !== null && id !== undefined) + ) + ]; + const siteIds = [ + ...new Set( + logs + .map((log) => log.siteId) + .filter((id): id is number => id !== null && id !== undefined) + ) + ]; + const clientIds = [ + ...new Set( + logs + .map((log) => log.clientId) + .filter((id): id is number => id !== null && id !== undefined) + ) + ]; + const userIds = [ + ...new Set( + logs + .map((log) => log.userId) + .filter((id): id is string => id !== null && id !== undefined) + ) + ]; + + // Fetch resource details from main database + const resourceMap = new Map< + number, + { name: string; niceId: string } + >(); + if (siteResourceIds.length > 0) { + const resourceDetails = await primaryDb + .select({ + siteResourceId: siteResources.siteResourceId, + name: siteResources.name, + niceId: siteResources.niceId + }) + .from(siteResources) + .where(inArray(siteResources.siteResourceId, siteResourceIds)); + + for (const r of resourceDetails) { + resourceMap.set(r.siteResourceId, { + name: r.name, + niceId: r.niceId + }); + } + } + + // Fetch site details from main database + const siteMap = new Map(); + if (siteIds.length > 0) { + const siteDetails = await primaryDb + .select({ + siteId: sites.siteId, + name: sites.name, + niceId: sites.niceId + }) + .from(sites) + .where(inArray(sites.siteId, siteIds)); + + for (const s of siteDetails) { + siteMap.set(s.siteId, { name: s.name, niceId: s.niceId }); + } + } + + // Fetch client details from main database + const clientMap = new Map< + number, + { name: string; niceId: string; type: string } + >(); + if (clientIds.length > 0) { + const clientDetails = await primaryDb + .select({ + clientId: clients.clientId, + name: clients.name, + niceId: clients.niceId, + type: clients.type + }) + .from(clients) + .where(inArray(clients.clientId, clientIds)); + + for (const c of clientDetails) { + clientMap.set(c.clientId, { + name: c.name, + niceId: c.niceId, + type: c.type + }); + } + } + + // Fetch user details from main database + const userMap = new Map< + string, + { email: string | null } + >(); + if (userIds.length > 0) { + const userDetails = await primaryDb + .select({ + userId: users.userId, + email: users.email + }) + .from(users) + .where(inArray(users.userId, userIds)); + + for (const u of userDetails) { + userMap.set(u.userId, { email: u.email }); + } + } + + // Enrich logs with details + return logs.map((log) => ({ + ...log, + resourceName: log.siteResourceId + ? resourceMap.get(log.siteResourceId)?.name ?? null + : null, + resourceNiceId: log.siteResourceId + ? resourceMap.get(log.siteResourceId)?.niceId ?? null + : null, + siteName: log.siteId + ? siteMap.get(log.siteId)?.name ?? null + : null, + siteNiceId: log.siteId + ? siteMap.get(log.siteId)?.niceId ?? null + : null, + clientName: log.clientId + ? clientMap.get(log.clientId)?.name ?? null + : null, + clientNiceId: log.clientId + ? clientMap.get(log.clientId)?.niceId ?? null + : null, + clientType: log.clientId + ? clientMap.get(log.clientId)?.type ?? null + : null, + userEmail: log.userId + ? userMap.get(log.userId)?.email ?? null + : null + })); +} + +async function queryUniqueFilterAttributes( + timeStart: number, + timeEnd: number, + orgId: string +) { + const baseConditions = and( + gt(connectionAuditLog.startedAt, timeStart), + lt(connectionAuditLog.startedAt, timeEnd), + eq(connectionAuditLog.orgId, orgId) + ); + + // Get unique protocols + const uniqueProtocols = await logsDb + .selectDistinct({ + protocol: connectionAuditLog.protocol + }) + .from(connectionAuditLog) + .where(baseConditions); + + // Get unique destination addresses + const uniqueDestAddrs = await logsDb + .selectDistinct({ + destAddr: connectionAuditLog.destAddr + }) + .from(connectionAuditLog) + .where(baseConditions); + + // Get unique client IDs + const uniqueClients = await logsDb + .selectDistinct({ + clientId: connectionAuditLog.clientId + }) + .from(connectionAuditLog) + .where(baseConditions); + + // Get unique resource IDs + const uniqueResources = await logsDb + .selectDistinct({ + siteResourceId: connectionAuditLog.siteResourceId + }) + .from(connectionAuditLog) + .where(baseConditions); + + // Get unique user IDs + const uniqueUsers = await logsDb + .selectDistinct({ + userId: connectionAuditLog.userId + }) + .from(connectionAuditLog) + .where(baseConditions); + + // Enrich client IDs with names from main database + const clientIds = uniqueClients + .map((row) => row.clientId) + .filter((id): id is number => id !== null); + + let clientsWithNames: Array<{ id: number; name: string }> = []; + if (clientIds.length > 0) { + const clientDetails = await primaryDb + .select({ + clientId: clients.clientId, + name: clients.name + }) + .from(clients) + .where(inArray(clients.clientId, clientIds)); + + clientsWithNames = clientDetails.map((c) => ({ + id: c.clientId, + name: c.name + })); + } + + // Enrich resource IDs with names from main database + const resourceIds = uniqueResources + .map((row) => row.siteResourceId) + .filter((id): id is number => id !== null); + + let resourcesWithNames: Array<{ id: number; name: string | null }> = []; + if (resourceIds.length > 0) { + const resourceDetails = await primaryDb + .select({ + siteResourceId: siteResources.siteResourceId, + name: siteResources.name + }) + .from(siteResources) + .where(inArray(siteResources.siteResourceId, resourceIds)); + + resourcesWithNames = resourceDetails.map((r) => ({ + id: r.siteResourceId, + name: r.name + })); + } + + // Enrich user IDs with emails from main database + const userIdsList = uniqueUsers + .map((row) => row.userId) + .filter((id): id is string => id !== null); + + let usersWithEmails: Array<{ id: string; email: string | null }> = []; + if (userIdsList.length > 0) { + const userDetails = await primaryDb + .select({ + userId: users.userId, + email: users.email + }) + .from(users) + .where(inArray(users.userId, userIdsList)); + + usersWithEmails = userDetails.map((u) => ({ + id: u.userId, + email: u.email + })); + } + + return { + protocols: uniqueProtocols + .map((row) => row.protocol) + .filter((protocol): protocol is string => protocol !== null), + destAddrs: uniqueDestAddrs + .map((row) => row.destAddr) + .filter((addr): addr is string => addr !== null), + clients: clientsWithNames, + resources: resourcesWithNames, + users: usersWithEmails + }; +} + +registry.registerPath({ + method: "get", + path: "/org/{orgId}/logs/connection", + description: "Query the connection audit log for an organization", + tags: [OpenAPITags.Logs], + request: { + query: queryConnectionAuditLogsQuery, + params: queryConnectionAuditLogsParams + }, + responses: {} +}); + +export async function queryConnectionAuditLogs( + req: Request, + res: Response, + next: NextFunction +): Promise { + try { + const parsedQuery = queryConnectionAuditLogsQuery.safeParse(req.query); + if (!parsedQuery.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedQuery.error) + ) + ); + } + const parsedParams = queryConnectionAuditLogsParams.safeParse( + req.params + ); + if (!parsedParams.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedParams.error) + ) + ); + } + + const data = { ...parsedQuery.data, ...parsedParams.data }; + + const baseQuery = queryConnection(data); + + const logsRaw = await baseQuery.limit(data.limit).offset(data.offset); + + // Enrich with resource, site, client, and user details + const log = await enrichWithDetails(logsRaw); + + const totalCountResult = await countConnectionQuery(data); + const totalCount = totalCountResult[0].count; + + const filterAttributes = await queryUniqueFilterAttributes( + data.timeStart, + data.timeEnd, + data.orgId + ); + + return response(res, { + data: { + log: log, + pagination: { + total: totalCount, + limit: data.limit, + offset: data.offset + }, + filterAttributes + }, + success: true, + error: false, + message: "Connection audit logs retrieved successfully", + status: HttpCode.OK + }); + } catch (error) { + logger.error(error); + return next( + createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred") + ); + } +} \ No newline at end of file diff --git a/server/private/routers/billing/featureLifecycle.ts b/server/private/routers/billing/featureLifecycle.ts index 330cf6e03..f6f2d513a 100644 --- a/server/private/routers/billing/featureLifecycle.ts +++ b/server/private/routers/billing/featureLifecycle.ts @@ -27,7 +27,9 @@ import { resources, roles, siteResources, - userOrgRoles + userOrgRoles, + siteProvisioningKeyOrg, + siteProvisioningKeys, } from "@server/db"; import { and, eq } from "drizzle-orm"; @@ -296,6 +298,10 @@ async function disableFeature( await disableFullRbac(orgId); break; + case TierFeature.SiteProvisioningKeys: + await disableSiteProvisioningKeys(orgId); + break; + default: logger.warn( `Unknown feature ${feature} for org ${orgId}, skipping` @@ -335,6 +341,57 @@ async function disableFullRbac(orgId: string): Promise { logger.info(`Disabled full RBAC for org ${orgId}`); } +async function disableSiteProvisioningKeys(orgId: string): Promise { + const rows = await db + .select({ + siteProvisioningKeyId: + siteProvisioningKeyOrg.siteProvisioningKeyId + }) + .from(siteProvisioningKeyOrg) + .where(eq(siteProvisioningKeyOrg.orgId, orgId)); + + for (const { siteProvisioningKeyId } of rows) { + await db.transaction(async (trx) => { + await trx + .delete(siteProvisioningKeyOrg) + .where( + and( + eq( + siteProvisioningKeyOrg.siteProvisioningKeyId, + siteProvisioningKeyId + ), + eq(siteProvisioningKeyOrg.orgId, orgId) + ) + ); + + const remaining = await trx + .select() + .from(siteProvisioningKeyOrg) + .where( + eq( + siteProvisioningKeyOrg.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ); + + if (remaining.length === 0) { + await trx + .delete(siteProvisioningKeys) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ); + } + }); + } + + logger.info( + `Removed site provisioning keys for org ${orgId} after tier downgrade` + ); +} + async function disableLoginPageBranding(orgId: string): Promise { const [existingBranding] = await db .select() diff --git a/server/private/routers/external.ts b/server/private/routers/external.ts index f5749d529..412895a41 100644 --- a/server/private/routers/external.ts +++ b/server/private/routers/external.ts @@ -27,6 +27,7 @@ import * as reKey from "#private/routers/re-key"; import * as approval from "#private/routers/approvals"; import * as ssh from "#private/routers/ssh"; import * as user from "#private/routers/user"; +import * as siteProvisioning from "#private/routers/siteProvisioning"; import { verifyOrgAccess, @@ -37,7 +38,8 @@ import { verifyLimits, verifyRoleAccess, verifyUserAccess, - verifyUserCanSetUserOrgRoles + verifyUserCanSetUserOrgRoles, + verifySiteProvisioningKeyAccess } from "@server/middlewares"; import { ActionsEnum } from "@server/auth/actions"; import { @@ -482,6 +484,25 @@ authenticated.get( logs.exportAccessAuditLogs ); +authenticated.get( + "/org/:orgId/logs/connection", + verifyValidLicense, + verifyValidSubscription(tierMatrix.connectionLogs), + verifyOrgAccess, + verifyUserHasAction(ActionsEnum.exportLogs), + logs.queryConnectionAuditLogs +); + +authenticated.get( + "/org/:orgId/logs/connection/export", + verifyValidLicense, + verifyValidSubscription(tierMatrix.logExport), + verifyOrgAccess, + verifyUserHasAction(ActionsEnum.exportLogs), + logActionAudit(ActionsEnum.exportLogs), + logs.exportConnectionAuditLogs +); + authenticated.post( "/re-key/:clientId/regenerate-client-secret", verifyClientAccess, // this is first to set the org id @@ -552,3 +573,45 @@ authenticated.post( logActionAudit(ActionsEnum.setUserOrgRoles), user.setUserOrgRoles ); + +authenticated.put( + "/org/:orgId/site-provisioning-key", + verifyValidLicense, + verifyValidSubscription(tierMatrix.siteProvisioningKeys), + verifyOrgAccess, + verifyLimits, + verifyUserHasAction(ActionsEnum.createSiteProvisioningKey), + logActionAudit(ActionsEnum.createSiteProvisioningKey), + siteProvisioning.createSiteProvisioningKey +); + +authenticated.get( + "/org/:orgId/site-provisioning-keys", + verifyValidLicense, + verifyValidSubscription(tierMatrix.siteProvisioningKeys), + verifyOrgAccess, + verifyUserHasAction(ActionsEnum.listSiteProvisioningKeys), + siteProvisioning.listSiteProvisioningKeys +); + +authenticated.delete( + "/org/:orgId/site-provisioning-key/:siteProvisioningKeyId", + verifyValidLicense, + verifyValidSubscription(tierMatrix.siteProvisioningKeys), + verifyOrgAccess, + verifySiteProvisioningKeyAccess, + verifyUserHasAction(ActionsEnum.deleteSiteProvisioningKey), + logActionAudit(ActionsEnum.deleteSiteProvisioningKey), + siteProvisioning.deleteSiteProvisioningKey +); + +authenticated.patch( + "/org/:orgId/site-provisioning-key/:siteProvisioningKeyId", + verifyValidLicense, + verifyValidSubscription(tierMatrix.siteProvisioningKeys), + verifyOrgAccess, + verifySiteProvisioningKeyAccess, + verifyUserHasAction(ActionsEnum.updateSiteProvisioningKey), + logActionAudit(ActionsEnum.updateSiteProvisioningKey), + siteProvisioning.updateSiteProvisioningKey +); diff --git a/server/private/routers/integration.ts b/server/private/routers/integration.ts index f8e6a63f4..40bb2b56c 100644 --- a/server/private/routers/integration.ts +++ b/server/private/routers/integration.ts @@ -94,6 +94,25 @@ authenticated.get( logs.exportAccessAuditLogs ); +authenticated.get( + "/org/:orgId/logs/connection", + verifyValidLicense, + verifyValidSubscription(tierMatrix.connectionLogs), + verifyApiKeyOrgAccess, + verifyApiKeyHasAction(ActionsEnum.exportLogs), + logs.queryConnectionAuditLogs +); + +authenticated.get( + "/org/:orgId/logs/connection/export", + verifyValidLicense, + verifyValidSubscription(tierMatrix.logExport), + verifyApiKeyOrgAccess, + verifyApiKeyHasAction(ActionsEnum.exportLogs), + logActionAudit(ActionsEnum.exportLogs), + logs.exportConnectionAuditLogs +); + authenticated.put( "/org/:orgId/idp/oidc", verifyValidLicense, diff --git a/server/private/routers/newt/handleConnectionLogMessage.ts b/server/private/routers/newt/handleConnectionLogMessage.ts new file mode 100644 index 000000000..2ac7153b5 --- /dev/null +++ b/server/private/routers/newt/handleConnectionLogMessage.ts @@ -0,0 +1,394 @@ +import { db, logsDb } from "@server/db"; +import { MessageHandler } from "@server/routers/ws"; +import { connectionAuditLog, sites, Newt, clients, orgs } from "@server/db"; +import { and, eq, lt, inArray } from "drizzle-orm"; +import logger from "@server/logger"; +import { inflate } from "zlib"; +import { promisify } from "util"; +import { calculateCutoffTimestamp } from "@server/lib/cleanupLogs"; + +const zlibInflate = promisify(inflate); + +// Retry configuration for deadlock handling +const MAX_RETRIES = 3; +const BASE_DELAY_MS = 50; + +// How often to flush accumulated connection log data to the database +const FLUSH_INTERVAL_MS = 30_000; // 30 seconds + +// Maximum number of records to buffer before forcing a flush +const MAX_BUFFERED_RECORDS = 500; + +// Maximum number of records to insert in a single batch +const INSERT_BATCH_SIZE = 100; + +interface ConnectionSessionData { + sessionId: string; + resourceId: number; + sourceAddr: string; + destAddr: string; + protocol: string; + startedAt: string; // ISO 8601 timestamp + endedAt?: string; // ISO 8601 timestamp + bytesTx?: number; + bytesRx?: number; +} + +interface ConnectionLogRecord { + sessionId: string; + siteResourceId: number; + orgId: string; + siteId: number; + clientId: number | null; + userId: string | null; + sourceAddr: string; + destAddr: string; + protocol: string; + startedAt: number; // epoch seconds + endedAt: number | null; + bytesTx: number | null; + bytesRx: number | null; +} + +// In-memory buffer of records waiting to be flushed +let buffer: ConnectionLogRecord[] = []; + +/** + * Check if an error is a deadlock error + */ +function isDeadlockError(error: any): boolean { + return ( + error?.code === "40P01" || + error?.cause?.code === "40P01" || + (error?.message && error.message.includes("deadlock")) + ); +} + +/** + * Execute a function with retry logic for deadlock handling + */ +async function withDeadlockRetry( + operation: () => Promise, + context: string +): Promise { + let attempt = 0; + while (true) { + try { + return await operation(); + } catch (error: any) { + if (isDeadlockError(error) && attempt < MAX_RETRIES) { + attempt++; + const baseDelay = Math.pow(2, attempt - 1) * BASE_DELAY_MS; + const jitter = Math.random() * baseDelay; + const delay = baseDelay + jitter; + logger.warn( + `Deadlock detected in ${context}, retrying attempt ${attempt}/${MAX_RETRIES} after ${delay.toFixed(0)}ms` + ); + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + throw error; + } + } +} + +/** + * Decompress a base64-encoded zlib-compressed string into parsed JSON. + */ +async function decompressConnectionLog( + compressed: string +): Promise { + const compressedBuffer = Buffer.from(compressed, "base64"); + const decompressed = await zlibInflate(compressedBuffer); + const jsonString = decompressed.toString("utf-8"); + const parsed = JSON.parse(jsonString); + + if (!Array.isArray(parsed)) { + throw new Error("Decompressed connection log data is not an array"); + } + + return parsed; +} + +/** + * Convert an ISO 8601 timestamp string to epoch seconds. + * Returns null if the input is falsy. + */ +function toEpochSeconds(isoString: string | undefined | null): number | null { + if (!isoString) { + return null; + } + const ms = new Date(isoString).getTime(); + if (isNaN(ms)) { + return null; + } + return Math.floor(ms / 1000); +} + +/** + * Flush all buffered connection log records to the database. + * + * Swaps out the buffer before writing so that any records added during the + * flush are captured in the new buffer rather than being lost. Entries that + * fail to write are re-queued back into the buffer so they will be retried + * on the next flush. + * + * This function is exported so that the application's graceful-shutdown + * cleanup handler can call it before the process exits. + */ +export async function flushConnectionLogToDb(): Promise { + if (buffer.length === 0) { + return; + } + + // Atomically swap out the buffer so new data keeps flowing in + const snapshot = buffer; + buffer = []; + + logger.debug( + `Flushing ${snapshot.length} connection log record(s) to the database` + ); + + // Insert in batches to avoid overly large SQL statements + for (let i = 0; i < snapshot.length; i += INSERT_BATCH_SIZE) { + const batch = snapshot.slice(i, i + INSERT_BATCH_SIZE); + + try { + await withDeadlockRetry(async () => { + await logsDb.insert(connectionAuditLog).values(batch); + }, `flush connection log batch (${batch.length} records)`); + } catch (error) { + logger.error( + `Failed to flush connection log batch of ${batch.length} records:`, + error + ); + + // Re-queue the failed batch so it is retried on the next flush + buffer = [...batch, ...buffer]; + + // Cap buffer to prevent unbounded growth if DB is unreachable + if (buffer.length > MAX_BUFFERED_RECORDS * 5) { + const dropped = buffer.length - MAX_BUFFERED_RECORDS * 5; + buffer = buffer.slice(0, MAX_BUFFERED_RECORDS * 5); + logger.warn( + `Connection log buffer overflow, dropped ${dropped} oldest records` + ); + } + + // Stop trying further batches from this snapshot — they'll be + // picked up by the next flush via the re-queued records above + const remaining = snapshot.slice(i + INSERT_BATCH_SIZE); + if (remaining.length > 0) { + buffer = [...remaining, ...buffer]; + } + break; + } + } +} + +const flushTimer = setInterval(async () => { + try { + await flushConnectionLogToDb(); + } catch (error) { + logger.error( + "Unexpected error during periodic connection log flush:", + error + ); + } +}, FLUSH_INTERVAL_MS); + +// Calling unref() means this timer will not keep the Node.js event loop alive +// on its own — the process can still exit normally when there is no other work +// left. The graceful-shutdown path will call flushConnectionLogToDb() explicitly +// before process.exit(), so no data is lost. +flushTimer.unref(); + +export async function cleanUpOldLogs(orgId: string, retentionDays: number) { + const cutoffTimestamp = calculateCutoffTimestamp(retentionDays); + + try { + await logsDb + .delete(connectionAuditLog) + .where( + and( + lt(connectionAuditLog.startedAt, cutoffTimestamp), + eq(connectionAuditLog.orgId, orgId) + ) + ); + + // logger.debug( + // `Cleaned up connection audit logs older than ${retentionDays} days` + // ); + } catch (error) { + logger.error("Error cleaning up old connection audit logs:", error); + } +} + +export const handleConnectionLogMessage: MessageHandler = async (context) => { + const { message, client } = context; + const newt = client as Newt; + + if (!newt) { + logger.warn("Connection log received but no newt client in context"); + return; + } + + if (!newt.siteId) { + logger.warn("Connection log received but newt has no siteId"); + return; + } + + if (!message.data?.compressed) { + logger.warn("Connection log message missing compressed data"); + return; + } + + // Look up the org for this site + const [site] = await db + .select({ orgId: sites.orgId, orgSubnet: orgs.subnet }) + .from(sites) + .innerJoin(orgs, eq(sites.orgId, orgs.orgId)) + .where(eq(sites.siteId, newt.siteId)); + + if (!site) { + logger.warn( + `Connection log received but site ${newt.siteId} not found in database` + ); + return; + } + + const orgId = site.orgId; + + // Extract the CIDR suffix (e.g. "/16") from the org subnet so we can + // reconstruct the exact subnet string stored on each client record. + const cidrSuffix = site.orgSubnet?.includes("/") + ? site.orgSubnet.substring(site.orgSubnet.indexOf("/")) + : null; + + let sessions: ConnectionSessionData[]; + try { + sessions = await decompressConnectionLog(message.data.compressed); + } catch (error) { + logger.error("Failed to decompress connection log data:", error); + return; + } + + if (sessions.length === 0) { + return; + } + + logger.debug(`Sessions: ${JSON.stringify(sessions)}`) + + // Build a map from sourceAddr → { clientId, userId } by querying clients + // whose subnet field matches exactly. Client subnets are stored with the + // org's CIDR suffix (e.g. "100.90.128.5/16"), so we reconstruct that from + // each unique sourceAddr + the org's CIDR suffix and do a targeted IN query. + const ipToClient = new Map(); + + if (cidrSuffix) { + // Collect unique source addresses so we only query for what we need + const uniqueSourceAddrs = new Set(); + for (const session of sessions) { + if (session.sourceAddr) { + uniqueSourceAddrs.add(session.sourceAddr); + } + } + + if (uniqueSourceAddrs.size > 0) { + // Construct the exact subnet strings as stored in the DB + const subnetQueries = Array.from(uniqueSourceAddrs).map( + (addr) => { + // Strip port if present (e.g. "100.90.128.1:38004" → "100.90.128.1") + const ip = addr.includes(":") ? addr.split(":")[0] : addr; + return `${ip}${cidrSuffix}`; + } + ); + + logger.debug(`Subnet queries: ${JSON.stringify(subnetQueries)}`); + + const matchedClients = await db + .select({ + clientId: clients.clientId, + userId: clients.userId, + subnet: clients.subnet + }) + .from(clients) + .where( + and( + eq(clients.orgId, orgId), + inArray(clients.subnet, subnetQueries) + ) + ); + + for (const c of matchedClients) { + const ip = c.subnet.split("/")[0]; + logger.debug(`Client ${c.clientId} subnet ${c.subnet} matches ${ip}`); + ipToClient.set(ip, { clientId: c.clientId, userId: c.userId }); + } + } + } + + // Convert to DB records and add to the buffer + for (const session of sessions) { + // Validate required fields + if ( + !session.sessionId || + !session.resourceId || + !session.sourceAddr || + !session.destAddr || + !session.protocol + ) { + logger.debug( + `Skipping connection log session with missing required fields: ${JSON.stringify(session)}` + ); + continue; + } + + const startedAt = toEpochSeconds(session.startedAt); + if (startedAt === null) { + logger.debug( + `Skipping connection log session with invalid startedAt: ${session.startedAt}` + ); + continue; + } + + // Match the source address to a client. The sourceAddr is the + // client's IP on the WireGuard network, which corresponds to the IP + // portion of the client's subnet CIDR (e.g. "100.90.128.5/24"). + // Strip port if present (e.g. "100.90.128.1:38004" → "100.90.128.1") + const sourceIp = session.sourceAddr.includes(":") ? session.sourceAddr.split(":")[0] : session.sourceAddr; + const clientInfo = ipToClient.get(sourceIp) ?? null; + + + buffer.push({ + sessionId: session.sessionId, + siteResourceId: session.resourceId, + orgId, + siteId: newt.siteId, + clientId: clientInfo?.clientId ?? null, + userId: clientInfo?.userId ?? null, + sourceAddr: session.sourceAddr, + destAddr: session.destAddr, + protocol: session.protocol, + startedAt, + endedAt: toEpochSeconds(session.endedAt), + bytesTx: session.bytesTx ?? null, + bytesRx: session.bytesRx ?? null + }); + } + + logger.debug( + `Buffered ${sessions.length} connection log session(s) from newt ${newt.newtId} (site ${newt.siteId})` + ); + + // If the buffer has grown large enough, trigger an immediate flush + if (buffer.length >= MAX_BUFFERED_RECORDS) { + // Fire and forget — errors are handled inside flushConnectionLogToDb + flushConnectionLogToDb().catch((error) => { + logger.error( + "Unexpected error during size-triggered connection log flush:", + error + ); + }); + } +}; diff --git a/server/private/routers/newt/index.ts b/server/private/routers/newt/index.ts new file mode 100644 index 000000000..cc182cf7d --- /dev/null +++ b/server/private/routers/newt/index.ts @@ -0,0 +1 @@ +export * from "./handleConnectionLogMessage"; diff --git a/server/private/routers/siteProvisioning/createSiteProvisioningKey.ts b/server/private/routers/siteProvisioning/createSiteProvisioningKey.ts new file mode 100644 index 000000000..abed27550 --- /dev/null +++ b/server/private/routers/siteProvisioning/createSiteProvisioningKey.ts @@ -0,0 +1,146 @@ +/* + * This file is part of a proprietary work. + * + * Copyright (c) 2025 Fossorial, Inc. + * All rights reserved. + * + * This file is licensed under the Fossorial Commercial License. + * You may not use this file except in compliance with the License. + * Unauthorized use, copying, modification, or distribution is strictly prohibited. + * + * This file is not licensed under the AGPLv3. + */ + +import { NextFunction, Request, Response } from "express"; +import { db, siteProvisioningKeyOrg, siteProvisioningKeys } from "@server/db"; +import HttpCode from "@server/types/HttpCode"; +import { z } from "zod"; +import { fromError } from "zod-validation-error"; +import createHttpError from "http-errors"; +import response from "@server/lib/response"; +import moment from "moment"; +import { + generateId, + generateIdFromEntropySize +} from "@server/auth/sessions/app"; +import logger from "@server/logger"; +import { hashPassword } from "@server/auth/password"; +import type { CreateSiteProvisioningKeyResponse } from "@server/routers/siteProvisioning/types"; + +const paramsSchema = z.object({ + orgId: z.string().nonempty() +}); + +const bodySchema = z + .strictObject({ + name: z.string().min(1).max(255), + maxBatchSize: z.union([ + z.null(), + z.coerce.number().int().positive().max(1_000_000) + ]), + validUntil: z.string().max(255).optional() + }) + .superRefine((data, ctx) => { + const v = data.validUntil; + if (v == null || v.trim() === "") { + return; + } + if (Number.isNaN(Date.parse(v))) { + ctx.addIssue({ + code: "custom", + message: "Invalid validUntil", + path: ["validUntil"] + }); + } + }); + +export type CreateSiteProvisioningKeyBody = z.infer; + +export async function createSiteProvisioningKey( + req: Request, + res: Response, + next: NextFunction +): Promise { + const parsedParams = paramsSchema.safeParse(req.params); + if (!parsedParams.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedParams.error).toString() + ) + ); + } + + const parsedBody = bodySchema.safeParse(req.body); + if (!parsedBody.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedBody.error).toString() + ) + ); + } + + const { orgId } = parsedParams.data; + const { name, maxBatchSize } = parsedBody.data; + const vuRaw = parsedBody.data.validUntil; + const validUntil = + vuRaw == null || vuRaw.trim() === "" + ? null + : new Date(Date.parse(vuRaw)).toISOString(); + + const siteProvisioningKeyId = `spk-${generateId(15)}`; + const siteProvisioningKey = generateIdFromEntropySize(25); + const siteProvisioningKeyHash = await hashPassword(siteProvisioningKey); + const lastChars = siteProvisioningKey.slice(-4); + const createdAt = moment().toISOString(); + const provisioningKey = `${siteProvisioningKeyId}.${siteProvisioningKey}`; + + await db.transaction(async (trx) => { + await trx.insert(siteProvisioningKeys).values({ + siteProvisioningKeyId, + name, + siteProvisioningKeyHash, + createdAt, + lastChars, + lastUsed: null, + maxBatchSize, + numUsed: 0, + validUntil + }); + + await trx.insert(siteProvisioningKeyOrg).values({ + siteProvisioningKeyId, + orgId + }); + }); + + try { + return response(res, { + data: { + siteProvisioningKeyId, + orgId, + name, + siteProvisioningKey: provisioningKey, + lastChars, + createdAt, + lastUsed: null, + maxBatchSize, + numUsed: 0, + validUntil + }, + success: true, + error: false, + message: "Site provisioning key created", + status: HttpCode.CREATED + }); + } catch (e) { + logger.error(e); + return next( + createHttpError( + HttpCode.INTERNAL_SERVER_ERROR, + "Failed to create site provisioning key" + ) + ); + } +} diff --git a/server/private/routers/siteProvisioning/deleteSiteProvisioningKey.ts b/server/private/routers/siteProvisioning/deleteSiteProvisioningKey.ts new file mode 100644 index 000000000..fc8b05e60 --- /dev/null +++ b/server/private/routers/siteProvisioning/deleteSiteProvisioningKey.ts @@ -0,0 +1,129 @@ +/* + * This file is part of a proprietary work. + * + * Copyright (c) 2025 Fossorial, Inc. + * All rights reserved. + * + * This file is licensed under the Fossorial Commercial License. + * You may not use this file except in compliance with the License. + * Unauthorized use, copying, modification, or distribution is strictly prohibited. + * + * This file is not licensed under the AGPLv3. + */ + +import { Request, Response, NextFunction } from "express"; +import { z } from "zod"; +import { + db, + siteProvisioningKeyOrg, + siteProvisioningKeys +} from "@server/db"; +import { and, eq } from "drizzle-orm"; +import response from "@server/lib/response"; +import HttpCode from "@server/types/HttpCode"; +import createHttpError from "http-errors"; +import logger from "@server/logger"; +import { fromError } from "zod-validation-error"; + +const paramsSchema = z.object({ + siteProvisioningKeyId: z.string().nonempty(), + orgId: z.string().nonempty() +}); + +export async function deleteSiteProvisioningKey( + req: Request, + res: Response, + next: NextFunction +): Promise { + try { + const parsedParams = paramsSchema.safeParse(req.params); + if (!parsedParams.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedParams.error).toString() + ) + ); + } + + const { siteProvisioningKeyId, orgId } = parsedParams.data; + + const [row] = await db + .select() + .from(siteProvisioningKeys) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ) + .innerJoin( + siteProvisioningKeyOrg, + and( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyOrg.siteProvisioningKeyId + ), + eq(siteProvisioningKeyOrg.orgId, orgId) + ) + ) + .limit(1); + + if (!row) { + return next( + createHttpError( + HttpCode.NOT_FOUND, + `Site provisioning key with ID ${siteProvisioningKeyId} not found` + ) + ); + } + + await db.transaction(async (trx) => { + await trx + .delete(siteProvisioningKeyOrg) + .where( + and( + eq( + siteProvisioningKeyOrg.siteProvisioningKeyId, + siteProvisioningKeyId + ), + eq(siteProvisioningKeyOrg.orgId, orgId) + ) + ); + + const siteProvisioningKeyOrgs = await trx + .select() + .from(siteProvisioningKeyOrg) + .where( + eq( + siteProvisioningKeyOrg.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ); + + if (siteProvisioningKeyOrgs.length === 0) { + await trx + .delete(siteProvisioningKeys) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ); + } + }); + + return response(res, { + data: null, + success: true, + error: false, + message: "Site provisioning key deleted successfully", + status: HttpCode.OK + }); + } catch (error) { + logger.error(error); + return next( + createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred") + ); + } +} diff --git a/server/private/routers/siteProvisioning/index.ts b/server/private/routers/siteProvisioning/index.ts new file mode 100644 index 000000000..d143274f6 --- /dev/null +++ b/server/private/routers/siteProvisioning/index.ts @@ -0,0 +1,17 @@ +/* + * This file is part of a proprietary work. + * + * Copyright (c) 2025 Fossorial, Inc. + * All rights reserved. + * + * This file is licensed under the Fossorial Commercial License. + * You may not use this file except in compliance with the License. + * Unauthorized use, copying, modification, or distribution is strictly prohibited. + * + * This file is not licensed under the AGPLv3. + */ + +export * from "./createSiteProvisioningKey"; +export * from "./listSiteProvisioningKeys"; +export * from "./deleteSiteProvisioningKey"; +export * from "./updateSiteProvisioningKey"; diff --git a/server/private/routers/siteProvisioning/listSiteProvisioningKeys.ts b/server/private/routers/siteProvisioning/listSiteProvisioningKeys.ts new file mode 100644 index 000000000..5f7531a2c --- /dev/null +++ b/server/private/routers/siteProvisioning/listSiteProvisioningKeys.ts @@ -0,0 +1,126 @@ +/* + * This file is part of a proprietary work. + * + * Copyright (c) 2025 Fossorial, Inc. + * All rights reserved. + * + * This file is licensed under the Fossorial Commercial License. + * You may not use this file except in compliance with the License. + * Unauthorized use, copying, modification, or distribution is strictly prohibited. + * + * This file is not licensed under the AGPLv3. + */ + +import { + db, + siteProvisioningKeyOrg, + siteProvisioningKeys +} from "@server/db"; +import logger from "@server/logger"; +import HttpCode from "@server/types/HttpCode"; +import response from "@server/lib/response"; +import { NextFunction, Request, Response } from "express"; +import createHttpError from "http-errors"; +import { z } from "zod"; +import { fromError } from "zod-validation-error"; +import { eq } from "drizzle-orm"; +import type { ListSiteProvisioningKeysResponse } from "@server/routers/siteProvisioning/types"; + +const paramsSchema = z.object({ + orgId: z.string().nonempty() +}); + +const querySchema = z.object({ + limit: z + .string() + .optional() + .default("1000") + .transform(Number) + .pipe(z.int().positive()), + offset: z + .string() + .optional() + .default("0") + .transform(Number) + .pipe(z.int().nonnegative()) +}); + +function querySiteProvisioningKeys(orgId: string) { + return db + .select({ + siteProvisioningKeyId: + siteProvisioningKeys.siteProvisioningKeyId, + orgId: siteProvisioningKeyOrg.orgId, + lastChars: siteProvisioningKeys.lastChars, + createdAt: siteProvisioningKeys.createdAt, + name: siteProvisioningKeys.name, + lastUsed: siteProvisioningKeys.lastUsed, + maxBatchSize: siteProvisioningKeys.maxBatchSize, + numUsed: siteProvisioningKeys.numUsed, + validUntil: siteProvisioningKeys.validUntil + }) + .from(siteProvisioningKeyOrg) + .innerJoin( + siteProvisioningKeys, + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyOrg.siteProvisioningKeyId + ) + ) + .where(eq(siteProvisioningKeyOrg.orgId, orgId)); +} + +export async function listSiteProvisioningKeys( + req: Request, + res: Response, + next: NextFunction +): Promise { + try { + const parsedParams = paramsSchema.safeParse(req.params); + if (!parsedParams.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedParams.error) + ) + ); + } + + const parsedQuery = querySchema.safeParse(req.query); + if (!parsedQuery.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedQuery.error) + ) + ); + } + + const { orgId } = parsedParams.data; + const { limit, offset } = parsedQuery.data; + + const siteProvisioningKeysList = await querySiteProvisioningKeys(orgId) + .limit(limit) + .offset(offset); + + return response(res, { + data: { + siteProvisioningKeys: siteProvisioningKeysList, + pagination: { + total: siteProvisioningKeysList.length, + limit, + offset + } + }, + success: true, + error: false, + message: "Site provisioning keys retrieved successfully", + status: HttpCode.OK + }); + } catch (error) { + logger.error(error); + return next( + createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred") + ); + } +} diff --git a/server/private/routers/siteProvisioning/updateSiteProvisioningKey.ts b/server/private/routers/siteProvisioning/updateSiteProvisioningKey.ts new file mode 100644 index 000000000..526d8bfb8 --- /dev/null +++ b/server/private/routers/siteProvisioning/updateSiteProvisioningKey.ts @@ -0,0 +1,199 @@ +/* + * This file is part of a proprietary work. + * + * Copyright (c) 2025 Fossorial, Inc. + * All rights reserved. + * + * This file is licensed under the Fossorial Commercial License. + * You may not use this file except in compliance with the License. + * Unauthorized use, copying, modification, or distribution is strictly prohibited. + * + * This file is not licensed under the AGPLv3. + */ + +import { Request, Response, NextFunction } from "express"; +import { z } from "zod"; +import { + db, + siteProvisioningKeyOrg, + siteProvisioningKeys +} from "@server/db"; +import { and, eq } from "drizzle-orm"; +import response from "@server/lib/response"; +import HttpCode from "@server/types/HttpCode"; +import createHttpError from "http-errors"; +import logger from "@server/logger"; +import { fromError } from "zod-validation-error"; +import type { UpdateSiteProvisioningKeyResponse } from "@server/routers/siteProvisioning/types"; + +const paramsSchema = z.object({ + siteProvisioningKeyId: z.string().nonempty(), + orgId: z.string().nonempty() +}); + +const bodySchema = z + .strictObject({ + maxBatchSize: z + .union([ + z.null(), + z.coerce.number().int().positive().max(1_000_000) + ]) + .optional(), + validUntil: z.string().max(255).optional() + }) + .superRefine((data, ctx) => { + if ( + data.maxBatchSize === undefined && + data.validUntil === undefined + ) { + ctx.addIssue({ + code: "custom", + message: "Provide maxBatchSize and/or validUntil", + path: ["maxBatchSize"] + }); + } + const v = data.validUntil; + if (v == null || v.trim() === "") { + return; + } + if (Number.isNaN(Date.parse(v))) { + ctx.addIssue({ + code: "custom", + message: "Invalid validUntil", + path: ["validUntil"] + }); + } + }); + +export type UpdateSiteProvisioningKeyBody = z.infer; + +export async function updateSiteProvisioningKey( + req: Request, + res: Response, + next: NextFunction +): Promise { + try { + const parsedParams = paramsSchema.safeParse(req.params); + if (!parsedParams.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedParams.error).toString() + ) + ); + } + + const parsedBody = bodySchema.safeParse(req.body); + if (!parsedBody.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedBody.error).toString() + ) + ); + } + + const { siteProvisioningKeyId, orgId } = parsedParams.data; + const body = parsedBody.data; + + const [row] = await db + .select() + .from(siteProvisioningKeys) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ) + .innerJoin( + siteProvisioningKeyOrg, + and( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyOrg.siteProvisioningKeyId + ), + eq(siteProvisioningKeyOrg.orgId, orgId) + ) + ) + .limit(1); + + if (!row) { + return next( + createHttpError( + HttpCode.NOT_FOUND, + `Site provisioning key with ID ${siteProvisioningKeyId} not found` + ) + ); + } + + const setValues: { + maxBatchSize?: number | null; + validUntil?: string | null; + } = {}; + if (body.maxBatchSize !== undefined) { + setValues.maxBatchSize = body.maxBatchSize; + } + if (body.validUntil !== undefined) { + setValues.validUntil = + body.validUntil.trim() === "" + ? null + : new Date(Date.parse(body.validUntil)).toISOString(); + } + + await db + .update(siteProvisioningKeys) + .set(setValues) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ); + + const [updated] = await db + .select({ + siteProvisioningKeyId: + siteProvisioningKeys.siteProvisioningKeyId, + name: siteProvisioningKeys.name, + lastChars: siteProvisioningKeys.lastChars, + createdAt: siteProvisioningKeys.createdAt, + lastUsed: siteProvisioningKeys.lastUsed, + maxBatchSize: siteProvisioningKeys.maxBatchSize, + numUsed: siteProvisioningKeys.numUsed, + validUntil: siteProvisioningKeys.validUntil + }) + .from(siteProvisioningKeys) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyId + ) + ) + .limit(1); + + if (!updated) { + return next( + createHttpError( + HttpCode.INTERNAL_SERVER_ERROR, + "Failed to load updated site provisioning key" + ) + ); + } + + return response(res, { + data: { + ...updated, + orgId + }, + success: true, + error: false, + message: "Site provisioning key updated successfully", + status: HttpCode.OK + }); + } catch (error) { + logger.error(error); + return next( + createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred") + ); + } +} diff --git a/server/private/routers/ws/messageHandlers.ts b/server/private/routers/ws/messageHandlers.ts index d388ce40a..a3c9c5bdb 100644 --- a/server/private/routers/ws/messageHandlers.ts +++ b/server/private/routers/ws/messageHandlers.ts @@ -18,10 +18,12 @@ import { } from "#private/routers/remoteExitNode"; import { MessageHandler } from "@server/routers/ws"; import { build } from "@server/build"; +import { handleConnectionLogMessage } from "#dynamic/routers/newt"; export const messageHandlers: Record = { "remoteExitNode/register": handleRemoteExitNodeRegisterMessage, - "remoteExitNode/ping": handleRemoteExitNodePingMessage + "remoteExitNode/ping": handleRemoteExitNodePingMessage, + "newt/access-log": handleConnectionLogMessage, }; if (build != "saas") { diff --git a/server/routers/auditLogs/types.ts b/server/routers/auditLogs/types.ts index 474aa9261..4c278cba5 100644 --- a/server/routers/auditLogs/types.ts +++ b/server/routers/auditLogs/types.ts @@ -91,3 +91,50 @@ export type QueryAccessAuditLogResponse = { locations: string[]; }; }; + +export type QueryConnectionAuditLogResponse = { + log: { + sessionId: string; + siteResourceId: number | null; + orgId: string | null; + siteId: number | null; + clientId: number | null; + userId: string | null; + sourceAddr: string; + destAddr: string; + protocol: string; + startedAt: number; + endedAt: number | null; + bytesTx: number | null; + bytesRx: number | null; + resourceName: string | null; + resourceNiceId: string | null; + siteName: string | null; + siteNiceId: string | null; + clientName: string | null; + clientNiceId: string | null; + clientType: string | null; + userEmail: string | null; + }[]; + pagination: { + total: number; + limit: number; + offset: number; + }; + filterAttributes: { + protocols: string[]; + destAddrs: string[]; + clients: { + id: number; + name: string; + }[]; + resources: { + id: number; + name: string | null; + }[]; + users: { + id: string; + email: string | null; + }[]; + }; +}; diff --git a/server/routers/external.ts b/server/routers/external.ts index 03d5fa111..177626aa2 100644 --- a/server/routers/external.ts +++ b/server/routers/external.ts @@ -102,6 +102,8 @@ authenticated.put( logActionAudit(ActionsEnum.createSite), site.createSite ); + + authenticated.get( "/org/:orgId/sites", verifyOrgAccess, @@ -1203,6 +1205,22 @@ authRouter.post( }), newt.getNewtToken ); + +authRouter.post( + "/newt/register", + rateLimit({ + windowMs: 15 * 60 * 1000, + max: 30, + keyGenerator: (req) => + `newtRegister:${req.body.provisioningKey?.split(".")[0] || ipKeyGenerator(req.ip || "")}`, + handler: (req, res, next) => { + const message = `You can only register a newt ${30} times every ${15} minutes. Please try again later.`; + return next(createHttpError(HttpCode.TOO_MANY_REQUESTS, message)); + }, + store: createStore() + }), + newt.registerNewt +); authRouter.post( "/olm/get-token", rateLimit({ diff --git a/server/routers/newt/handleConnectionLogMessage.ts b/server/routers/newt/handleConnectionLogMessage.ts new file mode 100644 index 000000000..ca1b129d2 --- /dev/null +++ b/server/routers/newt/handleConnectionLogMessage.ts @@ -0,0 +1,13 @@ +import { MessageHandler } from "@server/routers/ws"; + +export async function flushConnectionLogToDb(): Promise { + return; +} + +export async function cleanUpOldLogs(orgId: string, retentionDays: number) { + return; +} + +export const handleConnectionLogMessage: MessageHandler = async (context) => { + return; +}; diff --git a/server/routers/newt/index.ts b/server/routers/newt/index.ts index f31cd753b..33b5caf7c 100644 --- a/server/routers/newt/index.ts +++ b/server/routers/newt/index.ts @@ -8,3 +8,5 @@ export * from "./handleNewtPingRequestMessage"; export * from "./handleApplyBlueprintMessage"; export * from "./handleNewtPingMessage"; export * from "./handleNewtDisconnectingMessage"; +export * from "./handleConnectionLogMessage"; +export * from "./registerNewt"; diff --git a/server/routers/newt/registerNewt.ts b/server/routers/newt/registerNewt.ts new file mode 100644 index 000000000..427ac173f --- /dev/null +++ b/server/routers/newt/registerNewt.ts @@ -0,0 +1,266 @@ +import { Request, Response, NextFunction } from "express"; +import { z } from "zod"; +import { db } from "@server/db"; +import { + siteProvisioningKeys, + siteProvisioningKeyOrg, + newts, + orgs, + roles, + roleSites, + sites +} from "@server/db"; +import response from "@server/lib/response"; +import HttpCode from "@server/types/HttpCode"; +import createHttpError from "http-errors"; +import logger from "@server/logger"; +import { eq, and, sql } from "drizzle-orm"; +import { fromError } from "zod-validation-error"; +import { verifyPassword, hashPassword } from "@server/auth/password"; +import { + generateId, + generateIdFromEntropySize +} from "@server/auth/sessions/app"; +import { getUniqueSiteName } from "@server/db/names"; +import moment from "moment"; +import { build } from "@server/build"; +import { usageService } from "@server/lib/billing/usageService"; +import { FeatureId } from "@server/lib/billing"; +import { INSPECT_MAX_BYTES } from "buffer"; +import { v } from "@faker-js/faker/dist/airline-Dz1uGqgJ"; + +const bodySchema = z.object({ + provisioningKey: z.string().nonempty() +}); + +export type RegisterNewtBody = z.infer; + +export type RegisterNewtResponse = { + newtId: string; + secret: string; +}; + +export async function registerNewt( + req: Request, + res: Response, + next: NextFunction +): Promise { + try { + const parsedBody = bodySchema.safeParse(req.body); + if (!parsedBody.success) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + fromError(parsedBody.error).toString() + ) + ); + } + + const { provisioningKey } = parsedBody.data; + + // Keys are in the format "siteProvisioningKeyId.secret" + const dotIndex = provisioningKey.indexOf("."); + if (dotIndex === -1) { + return next( + createHttpError( + HttpCode.BAD_REQUEST, + "Invalid provisioning key format" + ) + ); + } + + const provisioningKeyId = provisioningKey.substring(0, dotIndex); + const provisioningKeySecret = provisioningKey.substring(dotIndex + 1); + + // Look up the provisioning key by ID, joining to get the orgId + const [keyRecord] = await db + .select({ + siteProvisioningKeyId: + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyHash: + siteProvisioningKeys.siteProvisioningKeyHash, + orgId: siteProvisioningKeyOrg.orgId, + maxBatchSize: siteProvisioningKeys.maxBatchSize, + numUsed: siteProvisioningKeys.numUsed, + validUntil: siteProvisioningKeys.validUntil + }) + .from(siteProvisioningKeys) + .innerJoin( + siteProvisioningKeyOrg, + eq( + siteProvisioningKeys.siteProvisioningKeyId, + siteProvisioningKeyOrg.siteProvisioningKeyId + ) + ) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + provisioningKeyId + ) + ) + .limit(1); + + if (!keyRecord) { + return next( + createHttpError( + HttpCode.UNAUTHORIZED, + "Invalid provisioning key" + ) + ); + } + + // Verify the secret portion against the stored hash + const validSecret = await verifyPassword( + provisioningKeySecret, + keyRecord.siteProvisioningKeyHash + ); + if (!validSecret) { + return next( + createHttpError( + HttpCode.UNAUTHORIZED, + "Invalid provisioning key" + ) + ); + } + + if (keyRecord.maxBatchSize && keyRecord.numUsed >= keyRecord.maxBatchSize) { + return next( + createHttpError( + HttpCode.UNAUTHORIZED, + "Provisioning key has reached its maximum usage" + ) + ); + } + + if (keyRecord.validUntil && new Date(keyRecord.validUntil) < new Date()) { + return next( + createHttpError( + HttpCode.UNAUTHORIZED, + "Provisioning key has expired" + ) + ); + } + + const { orgId } = keyRecord; + + // Verify the org exists + const [org] = await db.select().from(orgs).where(eq(orgs.orgId, orgId)); + if (!org) { + return next( + createHttpError(HttpCode.NOT_FOUND, "Organization not found") + ); + } + + // SaaS billing check + if (build == "saas") { + const usage = await usageService.getUsage(orgId, FeatureId.SITES); + if (!usage) { + return next( + createHttpError( + HttpCode.NOT_FOUND, + "No usage data found for this organization" + ) + ); + } + const rejectSites = await usageService.checkLimitSet( + orgId, + FeatureId.SITES, + { + ...usage, + instantaneousValue: (usage.instantaneousValue || 0) + 1 + } + ); + if (rejectSites) { + return next( + createHttpError( + HttpCode.FORBIDDEN, + "Site limit exceeded. Please upgrade your plan." + ) + ); + } + } + + const niceId = await getUniqueSiteName(orgId); + const newtId = generateId(15); + const newtSecret = generateIdFromEntropySize(25); + const secretHash = await hashPassword(newtSecret); + + let newSiteId: number | undefined; + + await db.transaction(async (trx) => { + // Create the site (type "newt", name = niceId) + const [newSite] = await trx + .insert(sites) + .values({ + orgId, + name: niceId, + niceId, + type: "newt", + dockerSocketEnabled: true + }) + .returning(); + + newSiteId = newSite.siteId; + + // Grant admin role access to the new site + const [adminRole] = await trx + .select() + .from(roles) + .where(and(eq(roles.isAdmin, true), eq(roles.orgId, orgId))) + .limit(1); + + if (!adminRole) { + throw new Error(`Admin role not found for org ${orgId}`); + } + + await trx.insert(roleSites).values({ + roleId: adminRole.roleId, + siteId: newSite.siteId + }); + + // Create the newt for this site + await trx.insert(newts).values({ + newtId, + secretHash, + siteId: newSite.siteId, + dateCreated: moment().toISOString() + }); + + // Consume the provisioning key — cascade removes siteProvisioningKeyOrg + await trx + .update(siteProvisioningKeys) + .set({ + lastUsed: moment().toISOString(), + numUsed: sql`${siteProvisioningKeys.numUsed} + 1` + }) + .where( + eq( + siteProvisioningKeys.siteProvisioningKeyId, + provisioningKeyId + ) + ); + + await usageService.add(orgId, FeatureId.SITES, 1, trx); + }); + + logger.info( + `Provisioned new site (ID: ${newSiteId}) and newt (ID: ${newtId}) for org ${orgId} via provisioning key ${provisioningKeyId}` + ); + + return response(res, { + data: { + newtId, + secret: newtSecret + }, + success: true, + error: false, + message: "Newt registered successfully", + status: HttpCode.CREATED + }); + } catch (error) { + logger.error(error); + return next( + createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred") + ); + } +} diff --git a/server/routers/siteProvisioning/types.ts b/server/routers/siteProvisioning/types.ts new file mode 100644 index 000000000..d06c1fe26 --- /dev/null +++ b/server/routers/siteProvisioning/types.ts @@ -0,0 +1,41 @@ +export type SiteProvisioningKeyListItem = { + siteProvisioningKeyId: string; + orgId: string; + lastChars: string; + createdAt: string; + name: string; + lastUsed: string | null; + maxBatchSize: number | null; + numUsed: number; + validUntil: string | null; +}; + +export type ListSiteProvisioningKeysResponse = { + siteProvisioningKeys: SiteProvisioningKeyListItem[]; + pagination: { total: number; limit: number; offset: number }; +}; + +export type CreateSiteProvisioningKeyResponse = { + siteProvisioningKeyId: string; + orgId: string; + name: string; + siteProvisioningKey: string; + lastChars: string; + createdAt: string; + lastUsed: string | null; + maxBatchSize: number | null; + numUsed: number; + validUntil: string | null; +}; + +export type UpdateSiteProvisioningKeyResponse = { + siteProvisioningKeyId: string; + orgId: string; + name: string; + lastChars: string; + createdAt: string; + lastUsed: string | null; + maxBatchSize: number | null; + numUsed: number; + validUntil: string | null; +}; diff --git a/src/app/[orgId]/settings/logs/connection/page.tsx b/src/app/[orgId]/settings/logs/connection/page.tsx new file mode 100644 index 000000000..dff42faac --- /dev/null +++ b/src/app/[orgId]/settings/logs/connection/page.tsx @@ -0,0 +1,760 @@ +"use client"; +import { Button } from "@app/components/ui/button"; +import { ColumnFilter } from "@app/components/ColumnFilter"; +import { DateTimeValue } from "@app/components/DateTimePicker"; +import { LogDataTable } from "@app/components/LogDataTable"; +import { PaidFeaturesAlert } from "@app/components/PaidFeaturesAlert"; +import SettingsSectionTitle from "@app/components/SettingsSectionTitle"; +import { useEnvContext } from "@app/hooks/useEnvContext"; +import { usePaidStatus } from "@app/hooks/usePaidStatus"; +import { useStoredPageSize } from "@app/hooks/useStoredPageSize"; +import { toast } from "@app/hooks/useToast"; +import { createApiClient } from "@app/lib/api"; +import { getSevenDaysAgo } from "@app/lib/getSevenDaysAgo"; +import { build } from "@server/build"; +import { tierMatrix } from "@server/lib/billing/tierMatrix"; +import { ColumnDef } from "@tanstack/react-table"; +import axios from "axios"; +import { ArrowUpRight, Laptop, User } from "lucide-react"; +import Link from "next/link"; +import { useTranslations } from "next-intl"; +import { useParams, useRouter, useSearchParams } from "next/navigation"; +import { useEffect, useState, useTransition } from "react"; + +function formatBytes(bytes: number | null): string { + if (bytes === null || bytes === undefined) return "—"; + if (bytes === 0) return "0 B"; + const units = ["B", "KB", "MB", "GB", "TB"]; + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + const value = bytes / Math.pow(1024, i); + return `${value.toFixed(i === 0 ? 0 : 1)} ${units[i]}`; +} + +function formatDuration(startedAt: number, endedAt: number | null): string { + if (endedAt === null || endedAt === undefined) return "Active"; + const durationSec = endedAt - startedAt; + if (durationSec < 0) return "—"; + if (durationSec < 60) return `${durationSec}s`; + if (durationSec < 3600) { + const m = Math.floor(durationSec / 60); + const s = durationSec % 60; + return `${m}m ${s}s`; + } + const h = Math.floor(durationSec / 3600); + const m = Math.floor((durationSec % 3600) / 60); + return `${h}h ${m}m`; +} + +export default function ConnectionLogsPage() { + const router = useRouter(); + const api = createApiClient(useEnvContext()); + const t = useTranslations(); + const { orgId } = useParams(); + const searchParams = useSearchParams(); + + const { isPaidUser } = usePaidStatus(); + + const [rows, setRows] = useState([]); + const [isRefreshing, setIsRefreshing] = useState(false); + const [isExporting, startTransition] = useTransition(); + const [filterAttributes, setFilterAttributes] = useState<{ + protocols: string[]; + destAddrs: string[]; + clients: { id: number; name: string }[]; + resources: { id: number; name: string | null }[]; + users: { id: string; email: string | null }[]; + }>({ + protocols: [], + destAddrs: [], + clients: [], + resources: [], + users: [] + }); + + // Filter states - unified object for all filters + const [filters, setFilters] = useState<{ + protocol?: string; + destAddr?: string; + clientId?: string; + siteResourceId?: string; + userId?: string; + }>({ + protocol: searchParams.get("protocol") || undefined, + destAddr: searchParams.get("destAddr") || undefined, + clientId: searchParams.get("clientId") || undefined, + siteResourceId: searchParams.get("siteResourceId") || undefined, + userId: searchParams.get("userId") || undefined + }); + + // Pagination state + const [totalCount, setTotalCount] = useState(0); + const [currentPage, setCurrentPage] = useState(0); + const [isLoading, setIsLoading] = useState(false); + + // Initialize page size from storage or default + const [pageSize, setPageSize] = useStoredPageSize( + "connection-audit-logs", + 20 + ); + + // Set default date range to last 7 days + const getDefaultDateRange = () => { + // if the time is in the url params, use that instead + const startParam = searchParams.get("start"); + const endParam = searchParams.get("end"); + if (startParam && endParam) { + return { + startDate: { + date: new Date(startParam) + }, + endDate: { + date: new Date(endParam) + } + }; + } + + const now = new Date(); + const lastWeek = getSevenDaysAgo(); + + return { + startDate: { + date: lastWeek + }, + endDate: { + date: now + } + }; + }; + + const [dateRange, setDateRange] = useState<{ + startDate: DateTimeValue; + endDate: DateTimeValue; + }>(getDefaultDateRange()); + + // Trigger search with default values on component mount + useEffect(() => { + if (build === "oss") { + return; + } + const defaultRange = getDefaultDateRange(); + queryDateTime( + defaultRange.startDate, + defaultRange.endDate, + 0, + pageSize + ); + }, [orgId]); // Re-run if orgId changes + + const handleDateRangeChange = ( + startDate: DateTimeValue, + endDate: DateTimeValue + ) => { + setDateRange({ startDate, endDate }); + setCurrentPage(0); // Reset to first page when filtering + // put the search params in the url for the time + updateUrlParamsForAllFilters({ + start: startDate.date?.toISOString() || "", + end: endDate.date?.toISOString() || "" + }); + + queryDateTime(startDate, endDate, 0, pageSize); + }; + + // Handle page changes + const handlePageChange = (newPage: number) => { + setCurrentPage(newPage); + queryDateTime( + dateRange.startDate, + dateRange.endDate, + newPage, + pageSize + ); + }; + + // Handle page size changes + const handlePageSizeChange = (newPageSize: number) => { + setPageSize(newPageSize); + setCurrentPage(0); // Reset to first page when changing page size + queryDateTime(dateRange.startDate, dateRange.endDate, 0, newPageSize); + }; + + // Handle filter changes generically + const handleFilterChange = ( + filterType: keyof typeof filters, + value: string | undefined + ) => { + // Create new filters object with updated value + const newFilters = { + ...filters, + [filterType]: value + }; + + setFilters(newFilters); + setCurrentPage(0); // Reset to first page when filtering + + // Update URL params + updateUrlParamsForAllFilters(newFilters); + + // Trigger new query with updated filters (pass directly to avoid async state issues) + queryDateTime( + dateRange.startDate, + dateRange.endDate, + 0, + pageSize, + newFilters + ); + }; + + const updateUrlParamsForAllFilters = ( + newFilters: + | typeof filters + | { + start: string; + end: string; + } + ) => { + const params = new URLSearchParams(searchParams); + Object.entries(newFilters).forEach(([key, value]) => { + if (value) { + params.set(key, value); + } else { + params.delete(key); + } + }); + router.replace(`?${params.toString()}`, { scroll: false }); + }; + + const queryDateTime = async ( + startDate: DateTimeValue, + endDate: DateTimeValue, + page: number = currentPage, + size: number = pageSize, + filtersParam?: typeof filters + ) => { + console.log("Date range changed:", { startDate, endDate, page, size }); + if (!isPaidUser(tierMatrix.connectionLogs)) { + console.log( + "Access denied: subscription inactive or license locked" + ); + return; + } + setIsLoading(true); + + try { + // Use the provided filters or fall back to current state + const activeFilters = filtersParam || filters; + + // Convert the date/time values to API parameters + const params: any = { + limit: size, + offset: page * size, + ...activeFilters + }; + + if (startDate?.date) { + const startDateTime = new Date(startDate.date); + if (startDate.time) { + const [hours, minutes, seconds] = startDate.time + .split(":") + .map(Number); + startDateTime.setHours(hours, minutes, seconds || 0); + } + params.timeStart = startDateTime.toISOString(); + } + + if (endDate?.date) { + const endDateTime = new Date(endDate.date); + if (endDate.time) { + const [hours, minutes, seconds] = endDate.time + .split(":") + .map(Number); + endDateTime.setHours(hours, minutes, seconds || 0); + } else { + // If no time is specified, set to NOW + const now = new Date(); + endDateTime.setHours( + now.getHours(), + now.getMinutes(), + now.getSeconds(), + now.getMilliseconds() + ); + } + params.timeEnd = endDateTime.toISOString(); + } + + const res = await api.get(`/org/${orgId}/logs/connection`, { + params + }); + if (res.status === 200) { + setRows(res.data.data.log || []); + setTotalCount(res.data.data.pagination?.total || 0); + setFilterAttributes(res.data.data.filterAttributes); + console.log("Fetched connection logs:", res.data); + } + } catch (error) { + toast({ + title: t("error"), + description: t("Failed to filter logs"), + variant: "destructive" + }); + } finally { + setIsLoading(false); + } + }; + + const refreshData = async () => { + console.log("Data refreshed"); + setIsRefreshing(true); + try { + // Refresh data with current date range and pagination + await queryDateTime( + dateRange.startDate, + dateRange.endDate, + currentPage, + pageSize + ); + } catch (error) { + toast({ + title: t("error"), + description: t("refreshError"), + variant: "destructive" + }); + } finally { + setIsRefreshing(false); + } + }; + + const exportData = async () => { + try { + // Prepare query params for export + const params: any = { + timeStart: dateRange.startDate?.date + ? new Date(dateRange.startDate.date).toISOString() + : undefined, + timeEnd: dateRange.endDate?.date + ? new Date(dateRange.endDate.date).toISOString() + : undefined, + ...filters + }; + + const response = await api.get( + `/org/${orgId}/logs/connection/export`, + { + responseType: "blob", + params + } + ); + + // Create a URL for the blob and trigger a download + const url = window.URL.createObjectURL(new Blob([response.data])); + const link = document.createElement("a"); + link.href = url; + const epoch = Math.floor(Date.now() / 1000); + link.setAttribute( + "download", + `connection-audit-logs-${orgId}-${epoch}.csv` + ); + document.body.appendChild(link); + link.click(); + link.parentNode?.removeChild(link); + } catch (error) { + let apiErrorMessage: string | null = null; + if (axios.isAxiosError(error) && error.response) { + const data = error.response.data; + + if (data instanceof Blob && data.type === "application/json") { + // Parse the Blob as JSON + const text = await data.text(); + const errorData = JSON.parse(text); + apiErrorMessage = errorData.message; + } + } + toast({ + title: t("error"), + description: apiErrorMessage ?? t("exportError"), + variant: "destructive" + }); + } + }; + + const columns: ColumnDef[] = [ + { + accessorKey: "startedAt", + header: ({ column }) => { + return t("timestamp"); + }, + cell: ({ row }) => { + return ( +
+ {new Date( + row.original.startedAt * 1000 + ).toLocaleString()} +
+ ); + } + }, + { + accessorKey: "protocol", + header: ({ column }) => { + return ( +
+ {t("protocol")} + ({ + label: protocol.toUpperCase(), + value: protocol + }) + )} + selectedValue={filters.protocol} + onValueChange={(value) => + handleFilterChange("protocol", value) + } + searchPlaceholder="Search..." + emptyMessage="None found" + /> +
+ ); + }, + cell: ({ row }) => { + return ( + + {row.original.protocol?.toUpperCase()} + + ); + } + }, + { + accessorKey: "resourceName", + header: ({ column }) => { + return ( +
+ {t("resource")} + ({ + value: res.id.toString(), + label: res.name || "Unnamed Resource" + }))} + selectedValue={filters.siteResourceId} + onValueChange={(value) => + handleFilterChange("siteResourceId", value) + } + searchPlaceholder="Search..." + emptyMessage="None found" + /> +
+ ); + }, + cell: ({ row }) => { + if (row.original.resourceName && row.original.resourceNiceId) { + return ( + + + + ); + } + return ( + + {row.original.resourceName ?? "—"} + + ); + } + }, + { + accessorKey: "clientName", + header: ({ column }) => { + return ( +
+ {t("client")} + ({ + value: c.id.toString(), + label: c.name + }))} + selectedValue={filters.clientId} + onValueChange={(value) => + handleFilterChange("clientId", value) + } + searchPlaceholder="Search..." + emptyMessage="None found" + /> +
+ ); + }, + cell: ({ row }) => { + const clientType = row.original.clientType === "olm" ? "machine" : "user"; + if (row.original.clientName && row.original.clientNiceId) { + return ( + + + + ); + } + return ( + + {row.original.clientName ?? "—"} + + ); + } + }, + { + accessorKey: "userEmail", + header: ({ column }) => { + return ( +
+ {t("user")} + ({ + value: u.id, + label: u.email || u.id + }))} + selectedValue={filters.userId} + onValueChange={(value) => + handleFilterChange("userId", value) + } + searchPlaceholder="Search..." + emptyMessage="None found" + /> +
+ ); + }, + cell: ({ row }) => { + if (row.original.userEmail || row.original.userId) { + return ( + + + {row.original.userEmail ?? row.original.userId} + + ); + } + return ; + } + }, + { + accessorKey: "sourceAddr", + header: ({ column }) => { + return t("sourceAddress"); + }, + cell: ({ row }) => { + return ( + + {row.original.sourceAddr} + + ); + } + }, + { + accessorKey: "destAddr", + header: ({ column }) => { + return ( +
+ {t("destinationAddress")} + ({ + value: addr, + label: addr + }))} + selectedValue={filters.destAddr} + onValueChange={(value) => + handleFilterChange("destAddr", value) + } + searchPlaceholder="Search..." + emptyMessage="None found" + /> +
+ ); + }, + cell: ({ row }) => { + return ( + + {row.original.destAddr} + + ); + } + }, + { + accessorKey: "duration", + header: ({ column }) => { + return t("duration"); + }, + cell: ({ row }) => { + return ( + + {formatDuration( + row.original.startedAt, + row.original.endedAt + )} + + ); + } + } + ]; + + const renderExpandedRow = (row: any) => { + return ( +
+
+
+ {/*
+ Connection Details +
*/} +
+ Session ID:{" "} + + {row.sessionId ?? "—"} + +
+
+ Protocol:{" "} + {row.protocol?.toUpperCase() ?? "—"} +
+
+ Source:{" "} + + {row.sourceAddr ?? "—"} + +
+
+ Destination:{" "} + + {row.destAddr ?? "—"} + +
+
+
+ {/*
+ Resource & Site +
*/} + {/*
+ Resource:{" "} + {row.resourceName ?? "—"} + {row.resourceNiceId && ( + + ({row.resourceNiceId}) + + )} +
*/} +
+ Site: {row.siteName ?? "—"} + {row.siteNiceId && ( + + ({row.siteNiceId}) + + )} +
+
+ Site ID: {row.siteId ?? "—"} +
+
+ Started At:{" "} + {row.startedAt + ? new Date( + row.startedAt * 1000 + ).toLocaleString() + : "—"} +
+
+ Ended At:{" "} + {row.endedAt + ? new Date( + row.endedAt * 1000 + ).toLocaleString() + : "Active"} +
+
+ Duration:{" "} + {formatDuration(row.startedAt, row.endedAt)} +
+ {/*
+ Resource ID:{" "} + {row.siteResourceId ?? "—"} +
*/} +
+
+ {/*
+ Client & Transfer +
*/} + {/*
+ Bytes Sent (TX):{" "} + {formatBytes(row.bytesTx)} +
*/} + {/*
+ Bytes Received (RX):{" "} + {formatBytes(row.bytesRx)} +
*/} + {/*
+ Total Transfer:{" "} + {formatBytes( + (row.bytesTx ?? 0) + (row.bytesRx ?? 0) + )} +
*/} +
+
+
+ ); + }; + + return ( + <> + + + + + startTransition(exportData)} + isExporting={isExporting} + onDateRangeChange={handleDateRangeChange} + dateRange={{ + start: dateRange.startDate, + end: dateRange.endDate + }} + defaultSort={{ + id: "startedAt", + desc: true + }} + // Server-side pagination props + totalCount={totalCount} + currentPage={currentPage} + pageSize={pageSize} + onPageChange={handlePageChange} + onPageSizeChange={handlePageSizeChange} + isLoading={isLoading} + // Row expansion props + expandable={true} + renderExpandedRow={renderExpandedRow} + disabled={ + !isPaidUser(tierMatrix.connectionLogs) || build === "oss" + } + /> + + ); +} diff --git a/src/app/[orgId]/settings/provisioning/page.tsx b/src/app/[orgId]/settings/provisioning/page.tsx new file mode 100644 index 000000000..e8b53104f --- /dev/null +++ b/src/app/[orgId]/settings/provisioning/page.tsx @@ -0,0 +1,60 @@ +import { internal } from "@app/lib/api"; +import { authCookieHeader } from "@app/lib/api/cookies"; +import { AxiosResponse } from "axios"; +import { PaidFeaturesAlert } from "@app/components/PaidFeaturesAlert"; +import SettingsSectionTitle from "@app/components/SettingsSectionTitle"; +import SiteProvisioningKeysTable, { + SiteProvisioningKeyRow +} from "../../../../components/SiteProvisioningKeysTable"; +import { ListSiteProvisioningKeysResponse } from "@server/routers/siteProvisioning/types"; +import { getTranslations } from "next-intl/server"; +import { TierFeature, tierMatrix } from "@server/lib/billing/tierMatrix"; + +type ProvisioningPageProps = { + params: Promise<{ orgId: string }>; +}; + +export const dynamic = "force-dynamic"; + +export default async function ProvisioningPage(props: ProvisioningPageProps) { + const params = await props.params; + const t = await getTranslations(); + + let siteProvisioningKeys: ListSiteProvisioningKeysResponse["siteProvisioningKeys"] = + []; + try { + const res = await internal.get< + AxiosResponse + >( + `/org/${params.orgId}/site-provisioning-keys`, + await authCookieHeader() + ); + siteProvisioningKeys = res.data.data.siteProvisioningKeys; + } catch (e) {} + + const rows: SiteProvisioningKeyRow[] = siteProvisioningKeys.map((k) => ({ + name: k.name, + id: k.siteProvisioningKeyId, + key: `${k.siteProvisioningKeyId}••••••••••••••••••••${k.lastChars}`, + createdAt: k.createdAt, + lastUsed: k.lastUsed, + maxBatchSize: k.maxBatchSize, + numUsed: k.numUsed, + validUntil: k.validUntil + })); + + return ( + <> + + + + + + + ); +} diff --git a/src/app/navigation.tsx b/src/app/navigation.tsx index 0066721db..66e6cdad0 100644 --- a/src/app/navigation.tsx +++ b/src/app/navigation.tsx @@ -2,7 +2,9 @@ import { SidebarNavItem } from "@app/components/SidebarNav"; import { Env } from "@app/lib/types/env"; import { build } from "@server/build"; import { + Boxes, Building2, + Cable, ChartLine, Combine, CreditCard, @@ -189,6 +191,11 @@ export const orgNavSections = ( title: "sidebarLogsAction", href: "/{orgId}/settings/logs/action", icon: + }, + { + title: "sidebarLogsConnection", + href: "/{orgId}/settings/logs/connection", + icon: } ] : []) @@ -203,6 +210,11 @@ export const orgNavSections = ( href: "/{orgId}/settings/api-keys", icon: }, + { + title: "sidebarProvisioning", + href: "/{orgId}/settings/provisioning", + icon: + }, { title: "sidebarBluePrints", href: "/{orgId}/settings/blueprints", diff --git a/src/components/CreateSiteProvisioningKeyCredenza.tsx b/src/components/CreateSiteProvisioningKeyCredenza.tsx new file mode 100644 index 000000000..3a1c7c372 --- /dev/null +++ b/src/components/CreateSiteProvisioningKeyCredenza.tsx @@ -0,0 +1,398 @@ +"use client"; + +import { + Credenza, + CredenzaBody, + CredenzaClose, + CredenzaContent, + CredenzaDescription, + CredenzaFooter, + CredenzaHeader, + CredenzaTitle +} from "@app/components/Credenza"; +import { + Form, + FormControl, + FormDescription, + FormField, + FormItem, + FormLabel, + FormMessage +} from "@app/components/ui/form"; +import { Button } from "@app/components/ui/button"; +import { Checkbox } from "@app/components/ui/checkbox"; +import { Input } from "@app/components/ui/input"; +import { Alert, AlertDescription, AlertTitle } from "@app/components/ui/alert"; +import { useEnvContext } from "@app/hooks/useEnvContext"; +import { toast } from "@app/hooks/useToast"; +import { createApiClient, formatAxiosError } from "@app/lib/api"; +import { CreateSiteProvisioningKeyResponse } from "@server/routers/siteProvisioning/types"; +import { AxiosResponse } from "axios"; +import { InfoIcon } from "lucide-react"; +import { useTranslations } from "next-intl"; +import { useRouter } from "next/navigation"; +import { useEffect, useState } from "react"; +import { useForm } from "react-hook-form"; +import { z } from "zod"; +import { zodResolver } from "@hookform/resolvers/zod"; +import CopyTextBox from "@app/components/CopyTextBox"; +import { + DateTimePicker, + DateTimeValue +} from "@app/components/DateTimePicker"; + +const FORM_ID = "create-site-provisioning-key-form"; + +type CreateSiteProvisioningKeyCredenzaProps = { + open: boolean; + setOpen: (open: boolean) => void; + orgId: string; +}; + +export default function CreateSiteProvisioningKeyCredenza({ + open, + setOpen, + orgId +}: CreateSiteProvisioningKeyCredenzaProps) { + const t = useTranslations(); + const router = useRouter(); + const api = createApiClient(useEnvContext()); + const [loading, setLoading] = useState(false); + const [created, setCreated] = + useState(null); + + const createFormSchema = z + .object({ + name: z + .string() + .min(1, { + message: t("nameMin", { len: 1 }) + }) + .max(255, { + message: t("nameMax", { len: 255 }) + }), + unlimitedBatchSize: z.boolean(), + maxBatchSize: z + .number() + .int() + .min(1, { message: t("provisioningKeysMaxBatchSizeInvalid") }) + .max(1_000_000, { + message: t("provisioningKeysMaxBatchSizeInvalid") + }), + validUntil: z.string().optional() + }) + .superRefine((data, ctx) => { + const v = data.validUntil; + if (v == null || v.trim() === "") { + return; + } + if (Number.isNaN(Date.parse(v))) { + ctx.addIssue({ + code: "custom", + message: t("provisioningKeysValidUntilInvalid"), + path: ["validUntil"] + }); + } + }); + + type CreateFormValues = z.infer; + + const form = useForm({ + resolver: zodResolver(createFormSchema), + defaultValues: { + name: "", + unlimitedBatchSize: false, + maxBatchSize: 100, + validUntil: "" + } + }); + + useEffect(() => { + if (!open) { + setCreated(null); + form.reset({ + name: "", + unlimitedBatchSize: false, + maxBatchSize: 100, + validUntil: "" + }); + } + }, [open, form]); + + async function onSubmit(data: CreateFormValues) { + setLoading(true); + try { + const res = await api + .put< + AxiosResponse + >(`/org/${orgId}/site-provisioning-key`, { + name: data.name, + maxBatchSize: data.unlimitedBatchSize + ? null + : data.maxBatchSize, + validUntil: + data.validUntil == null || data.validUntil.trim() === "" + ? undefined + : data.validUntil + }) + .catch((e) => { + toast({ + variant: "destructive", + title: t("provisioningKeysErrorCreate"), + description: formatAxiosError(e) + }); + }); + + if (res && res.status === 201) { + setCreated(res.data.data); + router.refresh(); + } + } finally { + setLoading(false); + } + } + + const credential = + created && + created.siteProvisioningKey; + + const unlimitedBatchSize = form.watch("unlimitedBatchSize"); + + return ( + + + + + {created + ? t("provisioningKeysList") + : t("provisioningKeysCreate")} + + {!created && ( + + {t("provisioningKeysCreateDescription")} + + )} + + + {!created && ( +
+ + ( + + {t("name")} + + + + + + )} + /> + ( + + + {t( + "provisioningKeysMaxBatchSize" + )} + + + { + const v = + e.target.value; + field.onChange( + v === "" + ? 100 + : Number(v) + ); + }} + value={field.value} + /> + + + + )} + /> + ( + + + + field.onChange( + c === true + ) + } + /> + + + {t( + "provisioningKeysUnlimitedBatchSize" + )} + + + )} + /> + { + const dateTimeValue: DateTimeValue = + (() => { + if (!field.value) return {}; + const d = new Date( + field.value + ); + if (isNaN(d.getTime())) + return {}; + const hours = d + .getHours() + .toString() + .padStart(2, "0"); + const minutes = d + .getMinutes() + .toString() + .padStart(2, "0"); + const seconds = d + .getSeconds() + .toString() + .padStart(2, "0"); + return { + date: d, + time: `${hours}:${minutes}:${seconds}` + }; + })(); + + return ( + + + {t( + "provisioningKeysValidUntil" + )} + + + { + if (!value.date) { + field.onChange( + "" + ); + return; + } + const d = new Date( + value.date + ); + if (value.time) { + const [ + h, + m, + s + ] = + value.time.split( + ":" + ); + d.setHours( + parseInt( + h, + 10 + ), + parseInt( + m, + 10 + ), + parseInt( + s || "0", + 10 + ) + ); + } + field.onChange( + d.toISOString() + ); + }} + /> + + + {t( + "provisioningKeysValidUntilHint" + )} + + + + ); + }} + /> + + + )} + + {created && credential && ( +
+ + + + {t("provisioningKeysSave")} + + + {t("provisioningKeysSaveDescription")} + + + +
+ )} +
+ + {!created ? ( + <> + + + + + + ) : ( + + + + )} + +
+
+ ); +} diff --git a/src/components/EditSiteProvisioningKeyCredenza.tsx b/src/components/EditSiteProvisioningKeyCredenza.tsx new file mode 100644 index 000000000..138190edc --- /dev/null +++ b/src/components/EditSiteProvisioningKeyCredenza.tsx @@ -0,0 +1,348 @@ +"use client"; + +import { + Credenza, + CredenzaBody, + CredenzaClose, + CredenzaContent, + CredenzaDescription, + CredenzaFooter, + CredenzaHeader, + CredenzaTitle +} from "@app/components/Credenza"; +import { + Form, + FormControl, + FormDescription, + FormField, + FormItem, + FormLabel, + FormMessage +} from "@app/components/ui/form"; +import { Button } from "@app/components/ui/button"; +import { Checkbox } from "@app/components/ui/checkbox"; +import { Input } from "@app/components/ui/input"; +import { useEnvContext } from "@app/hooks/useEnvContext"; +import { toast } from "@app/hooks/useToast"; +import { createApiClient, formatAxiosError } from "@app/lib/api"; +import { UpdateSiteProvisioningKeyResponse } from "@server/routers/siteProvisioning/types"; +import { AxiosResponse } from "axios"; +import { useTranslations } from "next-intl"; +import { useRouter } from "next/navigation"; +import { useEffect, useState } from "react"; +import { useForm } from "react-hook-form"; +import { z } from "zod"; +import { zodResolver } from "@hookform/resolvers/zod"; +import { + DateTimePicker, + DateTimeValue +} from "@app/components/DateTimePicker"; + +const FORM_ID = "edit-site-provisioning-key-form"; + +export type EditableSiteProvisioningKey = { + id: string; + name: string; + maxBatchSize: number | null; + validUntil: string | null; +}; + +type EditSiteProvisioningKeyCredenzaProps = { + open: boolean; + setOpen: (open: boolean) => void; + orgId: string; + provisioningKey: EditableSiteProvisioningKey | null; +}; + +export default function EditSiteProvisioningKeyCredenza({ + open, + setOpen, + orgId, + provisioningKey +}: EditSiteProvisioningKeyCredenzaProps) { + const t = useTranslations(); + const router = useRouter(); + const api = createApiClient(useEnvContext()); + const [loading, setLoading] = useState(false); + + const editFormSchema = z + .object({ + name: z.string(), + unlimitedBatchSize: z.boolean(), + maxBatchSize: z + .number() + .int() + .min(1, { message: t("provisioningKeysMaxBatchSizeInvalid") }) + .max(1_000_000, { + message: t("provisioningKeysMaxBatchSizeInvalid") + }), + validUntil: z.string().optional() + }) + .superRefine((data, ctx) => { + const v = data.validUntil; + if (v == null || v.trim() === "") { + return; + } + if (Number.isNaN(Date.parse(v))) { + ctx.addIssue({ + code: "custom", + message: t("provisioningKeysValidUntilInvalid"), + path: ["validUntil"] + }); + } + }); + + type EditFormValues = z.infer; + + const form = useForm({ + resolver: zodResolver(editFormSchema), + defaultValues: { + name: "", + unlimitedBatchSize: false, + maxBatchSize: 100, + validUntil: "" + } + }); + + useEffect(() => { + if (!open || !provisioningKey) { + return; + } + form.reset({ + name: provisioningKey.name, + unlimitedBatchSize: provisioningKey.maxBatchSize == null, + maxBatchSize: provisioningKey.maxBatchSize ?? 100, + validUntil: provisioningKey.validUntil ?? "" + }); + }, [open, provisioningKey, form]); + + async function onSubmit(data: EditFormValues) { + if (!provisioningKey) { + return; + } + setLoading(true); + try { + const res = await api + .patch< + AxiosResponse + >( + `/org/${orgId}/site-provisioning-key/${provisioningKey.id}`, + { + maxBatchSize: data.unlimitedBatchSize + ? null + : data.maxBatchSize, + validUntil: + data.validUntil == null || + data.validUntil.trim() === "" + ? "" + : data.validUntil + } + ) + .catch((e) => { + toast({ + variant: "destructive", + title: t("provisioningKeysUpdateError"), + description: formatAxiosError(e) + }); + }); + + if (res && res.status === 200) { + toast({ + title: t("provisioningKeysUpdated"), + description: t("provisioningKeysUpdatedDescription") + }); + setOpen(false); + router.refresh(); + } + } finally { + setLoading(false); + } + } + + const unlimitedBatchSize = form.watch("unlimitedBatchSize"); + + if (!provisioningKey) { + return null; + } + + return ( + + + + {t("provisioningKeysEdit")} + + {t("provisioningKeysEditDescription")} + + + +
+ + ( + + {t("name")} + + + + + )} + /> + ( + + + {t("provisioningKeysMaxBatchSize")} + + + { + const v = e.target.value; + field.onChange( + v === "" + ? 100 + : Number(v) + ); + }} + value={field.value} + /> + + + + )} + /> + ( + + + + field.onChange(c === true) + } + /> + + + {t( + "provisioningKeysUnlimitedBatchSize" + )} + + + )} + /> + { + const dateTimeValue: DateTimeValue = + (() => { + if (!field.value) return {}; + const d = new Date(field.value); + if (isNaN(d.getTime())) return {}; + const hours = d + .getHours() + .toString() + .padStart(2, "0"); + const minutes = d + .getMinutes() + .toString() + .padStart(2, "0"); + const seconds = d + .getSeconds() + .toString() + .padStart(2, "0"); + return { + date: d, + time: `${hours}:${minutes}:${seconds}` + }; + })(); + + return ( + + + {t("provisioningKeysValidUntil")} + + + { + if (!value.date) { + field.onChange(""); + return; + } + const d = new Date( + value.date + ); + if (value.time) { + const [h, m, s] = + value.time.split( + ":" + ); + d.setHours( + parseInt(h, 10), + parseInt(m, 10), + parseInt( + s || "0", + 10 + ) + ); + } + field.onChange( + d.toISOString() + ); + }} + /> + + + {t("provisioningKeysValidUntilHint")} + + + + ); + }} + /> + + +
+ + + + + + +
+
+ ); +} diff --git a/src/components/LayoutMobileMenu.tsx b/src/components/LayoutMobileMenu.tsx index e1c883a2b..854cad6db 100644 --- a/src/components/LayoutMobileMenu.tsx +++ b/src/components/LayoutMobileMenu.tsx @@ -93,7 +93,7 @@ export function LayoutMobileMenu({ ) } > - + diff --git a/src/components/LayoutSidebar.tsx b/src/components/LayoutSidebar.tsx index e9e2d61eb..1cd2131f7 100644 --- a/src/components/LayoutSidebar.tsx +++ b/src/components/LayoutSidebar.tsx @@ -169,8 +169,8 @@ export function LayoutSidebar({ > @@ -222,36 +222,34 @@ export function LayoutSidebar({ )} -
- -
+
{canShowProductUpdates && ( -
+
)} {build === "enterprise" && ( -
+
)} {build === "oss" && ( -
+
)} {build === "saas" && ( -
+
)} {!isSidebarCollapsed && ( -
+
{loadFooterLinks() ? ( <> {loadFooterLinks()!.map((link, index) => ( diff --git a/src/components/ProductUpdates.tsx b/src/components/ProductUpdates.tsx index 01689d9d7..76ab0252d 100644 --- a/src/components/ProductUpdates.tsx +++ b/src/components/ProductUpdates.tsx @@ -192,13 +192,13 @@ function ProductUpdatesListPopup({
- +

{t("productUpdateWhatsNew")} @@ -346,13 +346,13 @@ function NewVersionAvailable({ rel="noopener noreferrer" className={cn( "relative z-2 group cursor-pointer block", - "rounded-md border border-primary/30 bg-linear-to-br dark:from-primary/20 from-primary/20 via-background to-background p-2 py-3 w-full flex flex-col gap-2 text-sm", + "rounded-md border bg-secondary p-2 py-3 w-full flex flex-col gap-2 text-sm", "transition duration-300 ease-in-out", "data-closed:opacity-0 data-closed:translate-y-full" )} >

- +

{t("pangolinUpdateAvailable")}

diff --git a/src/components/SiteProvisioningKeysTable.tsx b/src/components/SiteProvisioningKeysTable.tsx new file mode 100644 index 000000000..df7fd241c --- /dev/null +++ b/src/components/SiteProvisioningKeysTable.tsx @@ -0,0 +1,320 @@ +"use client"; + +import { + DataTable, + ExtendedColumnDef +} from "@app/components/ui/data-table"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger +} from "@app/components/ui/dropdown-menu"; +import { Button } from "@app/components/ui/button"; +import { ArrowUpDown, MoreHorizontal } from "lucide-react"; +import { useRouter } from "next/navigation"; +import { useEffect, useState } from "react"; +import CreateSiteProvisioningKeyCredenza from "@app/components/CreateSiteProvisioningKeyCredenza"; +import EditSiteProvisioningKeyCredenza from "@app/components/EditSiteProvisioningKeyCredenza"; +import ConfirmDeleteDialog from "@app/components/ConfirmDeleteDialog"; +import { toast } from "@app/hooks/useToast"; +import { formatAxiosError } from "@app/lib/api"; +import { createApiClient } from "@app/lib/api"; +import { useEnvContext } from "@app/hooks/useEnvContext"; +import { usePaidStatus } from "@app/hooks/usePaidStatus"; +import moment from "moment"; +import { useTranslations } from "next-intl"; +import { build } from "@server/build"; +import { TierFeature, tierMatrix } from "@server/lib/billing/tierMatrix"; + +export type SiteProvisioningKeyRow = { + id: string; + key: string; + name: string; + createdAt: string; + lastUsed: string | null; + maxBatchSize: number | null; + numUsed: number; + validUntil: string | null; +}; + +type SiteProvisioningKeysTableProps = { + keys: SiteProvisioningKeyRow[]; + orgId: string; +}; + +export default function SiteProvisioningKeysTable({ + keys, + orgId +}: SiteProvisioningKeysTableProps) { + const router = useRouter(); + const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false); + const [selected, setSelected] = useState( + null + ); + const [rows, setRows] = useState(keys); + const api = createApiClient(useEnvContext()); + const t = useTranslations(); + const { isPaidUser } = usePaidStatus(); + const canUseSiteProvisioning = + isPaidUser(tierMatrix[TierFeature.SiteProvisioningKeys]) && + build !== "oss"; + const [isRefreshing, setIsRefreshing] = useState(false); + const [createOpen, setCreateOpen] = useState(false); + const [editOpen, setEditOpen] = useState(false); + const [editingKey, setEditingKey] = + useState(null); + + useEffect(() => { + setRows(keys); + }, [keys]); + + const refreshData = async () => { + setIsRefreshing(true); + try { + await new Promise((resolve) => setTimeout(resolve, 200)); + router.refresh(); + } catch (error) { + toast({ + title: t("error"), + description: t("refreshError"), + variant: "destructive" + }); + } finally { + setIsRefreshing(false); + } + }; + + const deleteKey = async (siteProvisioningKeyId: string) => { + try { + await api.delete( + `/org/${orgId}/site-provisioning-key/${siteProvisioningKeyId}` + ); + router.refresh(); + setIsDeleteModalOpen(false); + setSelected(null); + setRows((prev) => prev.filter((row) => row.id !== siteProvisioningKeyId)); + } catch (e) { + console.error(t("provisioningKeysErrorDelete"), e); + toast({ + variant: "destructive", + title: t("provisioningKeysErrorDelete"), + description: formatAxiosError( + e, + t("provisioningKeysErrorDeleteMessage") + ) + }); + throw e; + } + }; + + const columns: ExtendedColumnDef[] = [ + { + accessorKey: "name", + enableHiding: false, + friendlyName: t("name"), + header: ({ column }) => { + return ( + + ); + } + }, + { + accessorKey: "key", + friendlyName: t("key"), + header: () => {t("key")}, + cell: ({ row }) => { + const r = row.original; + return {r.key}; + } + }, + { + accessorKey: "maxBatchSize", + friendlyName: t("provisioningKeysMaxBatchSize"), + header: () => ( + {t("provisioningKeysMaxBatchSize")} + ), + cell: ({ row }) => { + const r = row.original; + return ( + + {r.maxBatchSize == null + ? t("provisioningKeysMaxBatchUnlimited") + : r.maxBatchSize} + + ); + } + }, + { + accessorKey: "numUsed", + friendlyName: t("provisioningKeysNumUsed"), + header: () => ( + {t("provisioningKeysNumUsed")} + ), + cell: ({ row }) => { + const r = row.original; + return {r.numUsed}; + } + }, + { + accessorKey: "validUntil", + friendlyName: t("provisioningKeysValidUntil"), + header: () => ( + {t("provisioningKeysValidUntil")} + ), + cell: ({ row }) => { + const r = row.original; + return ( + + {r.validUntil + ? moment(r.validUntil).format("lll") + : t("provisioningKeysNoExpiry")} + + ); + } + }, + { + accessorKey: "lastUsed", + friendlyName: t("provisioningKeysLastUsed"), + header: () => ( + {t("provisioningKeysLastUsed")} + ), + cell: ({ row }) => { + const r = row.original; + return ( + + {r.lastUsed + ? moment(r.lastUsed).format("lll") + : t("provisioningKeysNeverUsed")} + + ); + } + }, + { + accessorKey: "createdAt", + friendlyName: t("createdAt"), + header: () => {t("createdAt")}, + cell: ({ row }) => { + const r = row.original; + return {moment(r.createdAt).format("lll")}; + } + }, + { + id: "actions", + enableHiding: false, + header: () => , + cell: ({ row }) => { + const r = row.original; + return ( +
+ + + + + + { + setEditingKey(r); + setEditOpen(true); + }} + > + {t("edit")} + + { + setSelected(r); + setIsDeleteModalOpen(true); + }} + > + + {t("delete")} + + + + +
+ ); + } + } + ]; + + return ( + <> + + + { + setEditOpen(v); + if (!v) { + setEditingKey(null); + } + }} + orgId={orgId} + provisioningKey={editingKey} + /> + + {selected && ( + { + setIsDeleteModalOpen(val); + if (!val) { + setSelected(null); + } + }} + dialog={ +
+

{t("provisioningKeysQuestionRemove")}

+

{t("provisioningKeysMessageRemove")}

+
+ } + buttonText={t("provisioningKeysDeleteConfirm")} + onConfirm={async () => deleteKey(selected.id)} + string={selected.name} + title={t("provisioningKeysDelete")} + /> + )} + + { + if (canUseSiteProvisioning) { + setCreateOpen(true); + } + }} + addButtonDisabled={!canUseSiteProvisioning} + onRefresh={refreshData} + isRefreshing={isRefreshing} + addButtonText={t("provisioningKeysAdd")} + enableColumnVisibility={true} + stickyLeftColumn="name" + stickyRightColumn="actions" + /> + + ); +} diff --git a/src/components/ui/data-table.tsx b/src/components/ui/data-table.tsx index 834c56e88..a0c11ffdf 100644 --- a/src/components/ui/data-table.tsx +++ b/src/components/ui/data-table.tsx @@ -171,6 +171,7 @@ type DataTableProps = { title?: string; addButtonText?: string; onAdd?: () => void; + addButtonDisabled?: boolean; onRefresh?: () => void; isRefreshing?: boolean; searchPlaceholder?: string; @@ -203,6 +204,7 @@ export function DataTable({ title, addButtonText, onAdd, + addButtonDisabled = false, onRefresh, isRefreshing, searchPlaceholder = "Search...", @@ -635,7 +637,7 @@ export function DataTable({ )} {onAdd && addButtonText && (
-