Merge branch 'dev' into feat-blueprint-ui-on-dashboard

This commit is contained in:
Fred KISSIE
2025-10-29 03:31:51 +01:00
committed by GitHub
169 changed files with 14164 additions and 1207 deletions

View File

@@ -81,6 +81,9 @@ export enum ActionsEnum {
listClients = "listClients",
getClient = "getClient",
listOrgDomains = "listOrgDomains",
getDomain = "getDomain",
updateOrgDomain = "updateOrgDomain",
getDNSRecords = "getDNSRecords",
createNewt = "createNewt",
createIdp = "createIdp",
updateIdp = "updateIdp",
@@ -121,6 +124,9 @@ export enum ActionsEnum {
listBlueprints = "listBlueprints",
getBlueprint = "getBlueprint",
applyBlueprint = "applyBlueprint"
applyBlueprint = "applyBlueprint",
viewLogs = "viewLogs",
exportLogs = "exportLogs"
}
export async function checkUserActionPermission(

View File

@@ -39,7 +39,8 @@ export async function createSession(
const session: Session = {
sessionId: sessionId,
userId,
expiresAt: new Date(Date.now() + SESSION_COOKIE_EXPIRES).getTime()
expiresAt: new Date(Date.now() + SESSION_COOKIE_EXPIRES).getTime(),
issuedAt: new Date().getTime()
};
await db.insert(sessions).values(session);
return session;

View File

@@ -50,7 +50,8 @@ export async function createResourceSession(opts: {
doNotExtend: opts.doNotExtend || false,
accessTokenId: opts.accessTokenId || null,
isRequestToken: opts.isRequestToken || false,
userSessionId: opts.userSessionId || null
userSessionId: opts.userSessionId || null,
issuedAt: new Date().getTime()
};
await db.insert(resourceSessions).values(session);

View File

@@ -6,7 +6,8 @@ import {
integer,
bigint,
real,
text
text,
index
} from "drizzle-orm/pg-core";
import { InferSelectModel } from "drizzle-orm";
import { domains, orgs, targets, users, exitNodes, sessions } from "./schema";
@@ -213,6 +214,43 @@ export const sessionTransferToken = pgTable("sessionTransferToken", {
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
});
export const actionAuditLog = pgTable("actionAuditLog", {
id: serial("id").primaryKey(),
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: varchar("actorType", { length: 50 }).notNull(),
actor: varchar("actor", { length: 255 }).notNull(),
actorId: varchar("actorId", { length: 255 }).notNull(),
action: varchar("action", { length: 100 }).notNull(),
metadata: text("metadata")
}, (table) => ([
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(table.orgId, table.timestamp)
]));
export const accessAuditLog = pgTable("accessAuditLog", {
id: serial("id").primaryKey(),
timestamp: bigint("timestamp", { mode: "number" }).notNull(), // this is EPOCH time in seconds
orgId: varchar("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: varchar("actorType", { length: 50 }),
actor: varchar("actor", { length: 255 }),
actorId: varchar("actorId", { length: 255 }),
resourceId: integer("resourceId"),
ip: varchar("ip", { length: 45 }),
type: varchar("type", { length: 100 }).notNull(),
action: boolean("action").notNull(),
location: text("location"),
userAgent: text("userAgent"),
metadata: text("metadata")
}, (table) => ([
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(table.orgId, table.timestamp)
]));
export type Limit = InferSelectModel<typeof limits>;
export type Account = InferSelectModel<typeof account>;
export type Certificate = InferSelectModel<typeof certificates>;
@@ -230,3 +268,5 @@ export type RemoteExitNodeSession = InferSelectModel<
>;
export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>;
export type LoginPage = InferSelectModel<typeof loginPage>;
export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;

View File

@@ -6,7 +6,8 @@ import {
integer,
bigint,
real,
text
text,
index
} from "drizzle-orm/pg-core";
import { InferSelectModel } from "drizzle-orm";
import { randomUUID } from "crypto";
@@ -18,7 +19,22 @@ export const domains = pgTable("domains", {
type: varchar("type"), // "ns", "cname", "wildcard"
verified: boolean("verified").notNull().default(false),
failed: boolean("failed").notNull().default(false),
tries: integer("tries").notNull().default(0)
tries: integer("tries").notNull().default(0),
certResolver: varchar("certResolver"),
customCertResolver: varchar("customCertResolver"),
preferWildcardCert: boolean("preferWildcardCert")
});
export const dnsRecords = pgTable("dnsRecords", {
id: serial("id").primaryKey(),
domainId: varchar("domainId")
.notNull()
.references(() => domains.domainId, { onDelete: "cascade" }),
recordType: varchar("recordType").notNull(), // "NS" | "CNAME" | "A" | "TXT"
baseDomain: varchar("baseDomain"),
value: varchar("value").notNull(),
verified: boolean("verified").notNull().default(false),
});
export const orgs = pgTable("orgs", {
@@ -26,7 +42,18 @@ export const orgs = pgTable("orgs", {
name: varchar("name").notNull(),
subnet: varchar("subnet"),
createdAt: text("createdAt"),
settings: text("settings") // JSON blob of org-specific settings
requireTwoFactor: boolean("requireTwoFactor"),
maxSessionLengthHours: integer("maxSessionLengthHours"),
passwordExpiryDays: integer("passwordExpiryDays"),
settingsLogRetentionDaysRequest: integer("settingsLogRetentionDaysRequest") // where 0 = dont keep logs and -1 = keep forever
.notNull()
.default(7),
settingsLogRetentionDaysAccess: integer("settingsLogRetentionDaysAccess")
.notNull()
.default(0),
settingsLogRetentionDaysAction: integer("settingsLogRetentionDaysAction")
.notNull()
.default(0)
});
export const orgDomains = pgTable("orgDomains", {
@@ -100,9 +127,11 @@ export const resources = pgTable("resources", {
setHostHeader: varchar("setHostHeader"),
enableProxy: boolean("enableProxy").default(true),
skipToIdpId: integer("skipToIdpId").references(() => idp.idpId, {
onDelete: "cascade"
onDelete: "set null"
}),
headers: text("headers") // comma-separated list of headers to add to the request
headers: text("headers"), // comma-separated list of headers to add to the request
proxyProtocol: boolean("proxyProtocol").notNull().default(false),
proxyProtocolVersion: integer("proxyProtocolVersion").default(1)
});
export const targets = pgTable("targets", {
@@ -126,7 +155,7 @@ export const targets = pgTable("targets", {
pathMatchType: text("pathMatchType"), // exact, prefix, regex
rewritePath: text("rewritePath"), // if set, rewrites the path to this value before sending to the target
rewritePathType: text("rewritePathType"), // exact, prefix, regex, stripPrefix
priority: integer("priority").default(100)
priority: integer("priority").notNull().default(100)
});
export const targetHealthCheck = pgTable("targetHealthCheck", {
@@ -200,7 +229,8 @@ export const users = pgTable("user", {
dateCreated: varchar("dateCreated").notNull(),
termsAcceptedTimestamp: varchar("termsAcceptedTimestamp"),
termsVersion: varchar("termsVersion"),
serverAdmin: boolean("serverAdmin").notNull().default(false)
serverAdmin: boolean("serverAdmin").notNull().default(false),
lastPasswordChange: bigint("lastPasswordChange", { mode: "number" })
});
export const newts = pgTable("newt", {
@@ -226,7 +256,8 @@ export const sessions = pgTable("session", {
userId: varchar("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
expiresAt: bigint("expiresAt", { mode: "number" }).notNull(),
issuedAt: bigint("issuedAt", { mode: "number" })
});
export const newtSessions = pgTable("newtSession", {
@@ -443,7 +474,8 @@ export const resourceSessions = pgTable("resourceSessions", {
{
onDelete: "cascade"
}
)
),
issuedAt: bigint("issuedAt", { mode: "number" })
});
export const resourceWhitelist = pgTable("resourceWhitelist", {
@@ -681,6 +713,41 @@ export const blueprints = pgTable("blueprints", {
contents: text("contents").notNull(),
message: text("message")
});
export const requestAuditLog = pgTable(
"requestAuditLog",
{
id: serial("id").primaryKey(),
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
orgId: text("orgId").references(() => orgs.orgId, {
onDelete: "cascade"
}),
action: boolean("action").notNull(),
reason: integer("reason").notNull(),
actorType: text("actorType"),
actor: text("actor"),
actorId: text("actorId"),
resourceId: integer("resourceId"),
ip: text("ip"),
location: text("location"),
userAgent: text("userAgent"),
metadata: text("metadata"),
headers: text("headers"), // JSON blob
query: text("query"), // JSON blob
originalRequestURL: text("originalRequestURL"),
scheme: text("scheme"),
host: text("host"),
path: text("path"),
method: text("method"),
tls: boolean("tls")
},
(table) => [
index("idx_requestAuditLog_timestamp").on(table.timestamp),
index("idx_requestAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export type Org = InferSelectModel<typeof orgs>;
export type User = InferSelectModel<typeof users>;
@@ -734,3 +801,7 @@ export type HostMeta = InferSelectModel<typeof hostMeta>;
export type TargetHealthCheck = InferSelectModel<typeof targetHealthCheck>;
export type IdpOidcConfig = InferSelectModel<typeof idpOidcConfig>;
export type Blueprint = InferSelectModel<typeof blueprints>;
export type LicenseKey = InferSelectModel<typeof licenseKey>;
export type SecurityKey = InferSelectModel<typeof securityKeys>;
export type WebauthnChallenge = InferSelectModel<typeof webauthnChallenge>;
export type RequestAuditLog = InferSelectModel<typeof requestAuditLog>;

View File

@@ -1,4 +1,4 @@
import { db, loginPage, LoginPage, loginPageOrg } from "@server/db";
import { db, loginPage, LoginPage, loginPageOrg, Org, orgs } from "@server/db";
import {
Resource,
ResourcePassword,
@@ -23,6 +23,7 @@ export type ResourceWithAuth = {
pincode: ResourcePincode | null;
password: ResourcePassword | null;
headerAuth: ResourceHeaderAuth | null;
org: Org;
};
export type UserSessionWithUser = {
@@ -51,6 +52,10 @@ export async function getResourceByDomain(
resourceHeaderAuth,
eq(resourceHeaderAuth.resourceId, resources.resourceId)
)
.innerJoin(
orgs,
eq(orgs.orgId, resources.orgId)
)
.where(eq(resources.fullDomain, domain))
.limit(1);
@@ -62,7 +67,8 @@ export async function getResourceByDomain(
resource: result.resources,
pincode: result.resourcePincode,
password: result.resourcePassword,
headerAuth: result.resourceHeaderAuth
headerAuth: result.resourceHeaderAuth,
org: result.orgs
};
}

View File

@@ -2,10 +2,12 @@ import {
sqliteTable,
integer,
text,
real
real,
index
} from "drizzle-orm/sqlite-core";
import { InferSelectModel } from "drizzle-orm";
import { domains, orgs, targets, users, exitNodes, sessions } from "./schema";
import { metadata } from "@app/app/[orgId]/settings/layout";
export const certificates = sqliteTable("certificates", {
certId: integer("certId").primaryKey({ autoIncrement: true }),
@@ -207,6 +209,43 @@ export const sessionTransferToken = sqliteTable("sessionTransferToken", {
expiresAt: integer("expiresAt").notNull()
});
export const actionAuditLog = sqliteTable("actionAuditLog", {
id: integer("id").primaryKey({ autoIncrement: true }),
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: text("actorType").notNull(),
actor: text("actor").notNull(),
actorId: text("actorId").notNull(),
action: text("action").notNull(),
metadata: text("metadata")
}, (table) => ([
index("idx_actionAuditLog_timestamp").on(table.timestamp),
index("idx_actionAuditLog_org_timestamp").on(table.orgId, table.timestamp)
]));
export const accessAuditLog = sqliteTable("accessAuditLog", {
id: integer("id").primaryKey({ autoIncrement: true }),
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
orgId: text("orgId")
.notNull()
.references(() => orgs.orgId, { onDelete: "cascade" }),
actorType: text("actorType"),
actor: text("actor"),
actorId: text("actorId"),
resourceId: integer("resourceId"),
ip: text("ip"),
location: text("location"),
type: text("type").notNull(),
action: integer("action", { mode: "boolean" }).notNull(),
userAgent: text("userAgent"),
metadata: text("metadata")
}, (table) => ([
index("idx_identityAuditLog_timestamp").on(table.timestamp),
index("idx_identityAuditLog_org_timestamp").on(table.orgId, table.timestamp)
]));
export type Limit = InferSelectModel<typeof limits>;
export type Account = InferSelectModel<typeof account>;
export type Certificate = InferSelectModel<typeof certificates>;
@@ -224,3 +263,5 @@ export type RemoteExitNodeSession = InferSelectModel<
>;
export type ExitNodeOrg = InferSelectModel<typeof exitNodeOrgs>;
export type LoginPage = InferSelectModel<typeof loginPage>;
export type ActionAuditLog = InferSelectModel<typeof actionAuditLog>;
export type AccessAuditLog = InferSelectModel<typeof accessAuditLog>;

View File

@@ -1,6 +1,7 @@
import { randomUUID } from "crypto";
import { InferSelectModel } from "drizzle-orm";
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
import { sqliteTable, text, integer, index } from "drizzle-orm/sqlite-core";
import { boolean } from "yargs";
export const domains = sqliteTable("domains", {
domainId: text("domainId").primaryKey(),
@@ -11,15 +12,41 @@ export const domains = sqliteTable("domains", {
type: text("type"), // "ns", "cname", "wildcard"
verified: integer("verified", { mode: "boolean" }).notNull().default(false),
failed: integer("failed", { mode: "boolean" }).notNull().default(false),
tries: integer("tries").notNull().default(0)
tries: integer("tries").notNull().default(0),
certResolver: text("certResolver"),
preferWildcardCert: integer("preferWildcardCert", { mode: "boolean" })
});
export const dnsRecords = sqliteTable("dnsRecords", {
id: integer("id").primaryKey({ autoIncrement: true }),
domainId: text("domainId")
.notNull()
.references(() => domains.domainId, { onDelete: "cascade" }),
recordType: text("recordType").notNull(), // "NS" | "CNAME" | "A" | "TXT"
baseDomain: text("baseDomain"),
value: text("value").notNull(),
verified: integer("verified", { mode: "boolean" }).notNull().default(false),
});
export const orgs = sqliteTable("orgs", {
orgId: text("orgId").primaryKey(),
name: text("name").notNull(),
subnet: text("subnet"),
createdAt: text("createdAt"),
settings: text("settings") // JSON blob of org-specific settings
requireTwoFactor: integer("requireTwoFactor", { mode: "boolean" }),
maxSessionLengthHours: integer("maxSessionLengthHours"), // hours
passwordExpiryDays: integer("passwordExpiryDays"), // days
settingsLogRetentionDaysRequest: integer("settingsLogRetentionDaysRequest") // where 0 = dont keep logs and -1 = keep forever
.notNull()
.default(7),
settingsLogRetentionDaysAccess: integer("settingsLogRetentionDaysAccess")
.notNull()
.default(0),
settingsLogRetentionDaysAction: integer("settingsLogRetentionDaysAction")
.notNull()
.default(0)
});
export const userDomains = sqliteTable("userDomains", {
@@ -112,9 +139,12 @@ export const resources = sqliteTable("resources", {
setHostHeader: text("setHostHeader"),
enableProxy: integer("enableProxy", { mode: "boolean" }).default(true),
skipToIdpId: integer("skipToIdpId").references(() => idp.idpId, {
onDelete: "cascade"
onDelete: "set null"
}),
headers: text("headers") // comma-separated list of headers to add to the request
headers: text("headers"), // comma-separated list of headers to add to the request
proxyProtocol: integer("proxyProtocol", { mode: "boolean" }).notNull().default(false),
proxyProtocolVersion: integer("proxyProtocolVersion").default(1)
});
export const targets = sqliteTable("targets", {
@@ -138,7 +168,7 @@ export const targets = sqliteTable("targets", {
pathMatchType: text("pathMatchType"), // exact, prefix, regex
rewritePath: text("rewritePath"), // if set, rewrites the path to this value before sending to the target
rewritePathType: text("rewritePathType"), // exact, prefix, regex, stripPrefix
priority: integer("priority").default(100)
priority: integer("priority").notNull().default(100)
});
export const targetHealthCheck = sqliteTable("targetHealthCheck", {
@@ -228,7 +258,8 @@ export const users = sqliteTable("user", {
termsVersion: text("termsVersion"),
serverAdmin: integer("serverAdmin", { mode: "boolean" })
.notNull()
.default(false)
.default(false),
lastPasswordChange: integer("lastPasswordChange")
});
export const securityKeys = sqliteTable("webauthnCredentials", {
@@ -333,7 +364,8 @@ export const sessions = sqliteTable("session", {
userId: text("userId")
.notNull()
.references(() => users.userId, { onDelete: "cascade" }),
expiresAt: integer("expiresAt").notNull()
expiresAt: integer("expiresAt").notNull(),
issuedAt: integer("issuedAt")
});
export const newtSessions = sqliteTable("newtSession", {
@@ -583,7 +615,8 @@ export const resourceSessions = sqliteTable("resourceSessions", {
{
onDelete: "cascade"
}
)
),
issuedAt: integer("issuedAt")
});
export const resourceWhitelist = sqliteTable("resourceWhitelist", {
@@ -733,6 +766,41 @@ export const blueprints = sqliteTable("blueprints", {
contents: text("contents").notNull(),
message: text("message")
});
export const requestAuditLog = sqliteTable(
"requestAuditLog",
{
id: integer("id").primaryKey({ autoIncrement: true }),
timestamp: integer("timestamp").notNull(), // this is EPOCH time in seconds
orgId: text("orgId").references(() => orgs.orgId, {
onDelete: "cascade"
}),
action: integer("action", { mode: "boolean" }).notNull(),
reason: integer("reason").notNull(),
actorType: text("actorType"),
actor: text("actor"),
actorId: text("actorId"),
resourceId: integer("resourceId"),
ip: text("ip"),
location: text("location"),
userAgent: text("userAgent"),
metadata: text("metadata"),
headers: text("headers"), // JSON blob
query: text("query"), // JSON blob
originalRequestURL: text("originalRequestURL"),
scheme: text("scheme"),
host: text("host"),
path: text("path"),
method: text("method"),
tls: integer("tls", { mode: "boolean" })
},
(table) => [
index("idx_requestAuditLog_timestamp").on(table.timestamp),
index("idx_requestAuditLog_org_timestamp").on(
table.orgId,
table.timestamp
)
]
);
export type Org = InferSelectModel<typeof orgs>;
export type User = InferSelectModel<typeof users>;
@@ -770,6 +838,7 @@ export type ResourceWhitelist = InferSelectModel<typeof resourceWhitelist>;
export type VersionMigration = InferSelectModel<typeof versionMigrations>;
export type ResourceRule = InferSelectModel<typeof resourceRules>;
export type Domain = InferSelectModel<typeof domains>;
export type DnsRecord = InferSelectModel<typeof dnsRecords>;
export type Client = InferSelectModel<typeof clients>;
export type ClientSite = InferSelectModel<typeof clientSites>;
export type RoleClient = InferSelectModel<typeof roleClients>;
@@ -786,3 +855,7 @@ export type HostMeta = InferSelectModel<typeof hostMeta>;
export type TargetHealthCheck = InferSelectModel<typeof targetHealthCheck>;
export type IdpOidcConfig = InferSelectModel<typeof idpOidcConfig>;
export type Blueprint = InferSelectModel<typeof blueprints>;
export type LicenseKey = InferSelectModel<typeof licenseKey>;
export type SecurityKey = InferSelectModel<typeof securityKeys>;
export type WebauthnChallenge = InferSelectModel<typeof webauthnChallenge>;
export type RequestAuditLog = InferSelectModel<typeof requestAuditLog>;

View File

@@ -0,0 +1,56 @@
import React from "react";
import { Body, Head, Html, Preview, Tailwind } from "@react-email/components";
import { themeColors } from "./lib/theme";
import {
EmailContainer,
EmailGreeting,
EmailLetterHead,
EmailText
} from "./components/Email";
interface SupportEmailProps {
email: string;
username: string;
subject: string;
body: string;
}
export const SupportEmail = ({
username,
email,
body,
subject
}: SupportEmailProps) => {
const previewText = subject;
return (
<Html>
<Head />
<Preview>{previewText}</Preview>
<Tailwind config={themeColors}>
<Body className="font-sans bg-gray-50">
<EmailContainer>
<EmailLetterHead />
<EmailGreeting>Hi support,</EmailGreeting>
<EmailText>
You have received a new support request from{" "}
<strong>{username}</strong> ({email}).
</EmailText>
<EmailText>
<strong>Subject:</strong> {subject}
</EmailText>
<EmailText>
<strong>Message:</strong> {body}
</EmailText>
</EmailContainer>
</Body>
</Tailwind>
</Html>
);
};
export default SupportEmail;

View File

@@ -5,6 +5,7 @@ import { runSetupFunctions } from "./setup";
import { createApiServer } from "./apiServer";
import { createNextServer } from "./nextServer";
import { createInternalServer } from "./internalServer";
import { createIntegrationApiServer } from "./integrationApiServer";
import {
ApiKey,
ApiKeyOrg,
@@ -13,13 +14,14 @@ import {
User,
UserOrg
} from "@server/db";
import { createIntegrationApiServer } from "./integrationApiServer";
import config from "@server/lib/config";
import { setHostMeta } from "@server/lib/hostMeta";
import { initTelemetryClient } from "./lib/telemetry.js";
import { TraefikConfigManager } from "./lib/traefik/TraefikConfigManager.js";
import { initTelemetryClient } from "@server/lib/telemetry";
import { TraefikConfigManager } from "@server/lib/traefik/TraefikConfigManager";
import { initCleanup } from "#dynamic/cleanup";
import license from "#dynamic/license/license";
import { initLogCleanupInterval } from "@server/lib/cleanupLogs";
import { fetchServerIp } from "@server/lib/serverIpService";
async function startServers() {
await setHostMeta();
@@ -31,14 +33,17 @@ async function startServers() {
await runSetupFunctions();
await fetchServerIp();
initTelemetryClient();
initLogCleanupInterval();
// Start all servers
const apiServer = createApiServer();
const internalServer = createInternalServer();
let nextServer;
nextServer = await createNextServer();
const nextServer = await createNextServer();
if (config.getRawConfig().traefik.file_mode) {
const monitor = new TraefikConfigManager();
await monitor.start();

View File

@@ -1,8 +1,8 @@
export async function getOrgTierData(
orgId: string
): Promise<{ tier: string | null; active: boolean }> {
let tier = null;
let active = false;
const tier = null;
const active = false;
return { tier, active };
}

View File

@@ -1,5 +1,4 @@
import { eq, sql, and } from "drizzle-orm";
import NodeCache from "node-cache";
import { v4 as uuidv4 } from "uuid";
import { PutObjectCommand } from "@aws-sdk/client-s3";
import * as fs from "fs/promises";
@@ -20,6 +19,7 @@ import logger from "@server/logger";
import { sendToClient } from "#dynamic/routers/ws";
import { build } from "@server/build";
import { s3Client } from "@server/lib/s3";
import cache from "@server/lib/cache";
interface StripeEvent {
identifier?: string;
@@ -43,7 +43,6 @@ export function noop() {
}
export class UsageService {
private cache: NodeCache;
private bucketName: string | undefined;
private currentEventFile: string | null = null;
private currentFileStartTime: number = 0;
@@ -51,7 +50,6 @@ export class UsageService {
private uploadingFiles: Set<string> = new Set();
constructor() {
this.cache = new NodeCache({ stdTTL: 300 }); // 5 minute TTL
if (noop()) {
return;
}
@@ -399,7 +397,7 @@ export class UsageService {
featureId: FeatureId
): Promise<string | null> {
const cacheKey = `customer_${orgId}_${featureId}`;
const cached = this.cache.get<string>(cacheKey);
const cached = cache.get<string>(cacheKey);
if (cached) {
return cached;
@@ -422,7 +420,7 @@ export class UsageService {
const customerId = customer.customerId;
// Cache the result
this.cache.set(cacheKey, customerId);
cache.set(cacheKey, customerId, 300); // 5 minute TTL
return customerId;
} catch (error) {
@@ -700,10 +698,6 @@ export class UsageService {
await this.uploadFileToS3();
}
public clearCache(): void {
this.cache.flushAll();
}
/**
* Scan the events directory for files older than 1 minute and upload them if not empty.
*/

View File

@@ -527,7 +527,7 @@ export async function updateProxyResources(
if (
existingRule.action !== getRuleAction(rule.action) ||
existingRule.match !== rule.match.toUpperCase() ||
existingRule.value !== rule.value
existingRule.value !== rule.value.toUpperCase()
) {
validateRule(rule);
await trx
@@ -535,7 +535,7 @@ export async function updateProxyResources(
.set({
action: getRuleAction(rule.action),
match: rule.match.toUpperCase(),
value: rule.value
value: rule.value.toUpperCase(),
})
.where(
eq(resourceRules.ruleId, existingRule.ruleId)
@@ -547,7 +547,7 @@ export async function updateProxyResources(
resourceId: existingResource.resourceId,
action: getRuleAction(rule.action),
match: rule.match.toUpperCase(),
value: rule.value,
value: rule.value.toUpperCase(),
priority: index + 1 // start priorities at 1
});
}
@@ -705,7 +705,7 @@ export async function updateProxyResources(
resourceId: newResource.resourceId,
action: getRuleAction(rule.action),
match: rule.match.toUpperCase(),
value: rule.value,
value: rule.value.toUpperCase(),
priority: index + 1 // start priorities at 1
});
}

View File

@@ -275,24 +275,26 @@ export const ConfigSchema = z
}
)
.refine(
// Enforce proxy-port uniqueness within proxy-resources
// Enforce proxy-port uniqueness within proxy-resources per protocol
(config) => {
const proxyPortMap = new Map<number, string[]>();
const protocolPortMap = new Map<string, string[]>();
Object.entries(config["proxy-resources"]).forEach(
([resourceKey, resource]) => {
const proxyPort = resource["proxy-port"];
if (proxyPort !== undefined) {
if (!proxyPortMap.has(proxyPort)) {
proxyPortMap.set(proxyPort, []);
const protocol = resource.protocol;
if (proxyPort !== undefined && protocol !== undefined) {
const key = `${protocol}:${proxyPort}`;
if (!protocolPortMap.has(key)) {
protocolPortMap.set(key, []);
}
proxyPortMap.get(proxyPort)!.push(resourceKey);
protocolPortMap.get(key)!.push(resourceKey);
}
}
);
// Find duplicates
const duplicates = Array.from(proxyPortMap.entries()).filter(
const duplicates = Array.from(protocolPortMap.entries()).filter(
([_, resourceKeys]) => resourceKeys.length > 1
);
@@ -300,25 +302,29 @@ export const ConfigSchema = z
},
(config) => {
// Extract duplicates for error message
const proxyPortMap = new Map<number, string[]>();
const protocolPortMap = new Map<string, string[]>();
Object.entries(config["proxy-resources"]).forEach(
([resourceKey, resource]) => {
const proxyPort = resource["proxy-port"];
if (proxyPort !== undefined) {
if (!proxyPortMap.has(proxyPort)) {
proxyPortMap.set(proxyPort, []);
const protocol = resource.protocol;
if (proxyPort !== undefined && protocol !== undefined) {
const key = `${protocol}:${proxyPort}`;
if (!protocolPortMap.has(key)) {
protocolPortMap.set(key, []);
}
proxyPortMap.get(proxyPort)!.push(resourceKey);
protocolPortMap.get(key)!.push(resourceKey);
}
}
);
const duplicates = Array.from(proxyPortMap.entries())
const duplicates = Array.from(protocolPortMap.entries())
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
.map(
([proxyPort, resourceKeys]) =>
`port ${proxyPort} used by proxy-resources: ${resourceKeys.join(", ")}`
([protocolPort, resourceKeys]) => {
const [protocol, port] = protocolPort.split(':');
return `${protocol.toUpperCase()} port ${port} used by proxy-resources: ${resourceKeys.join(", ")}`;
}
)
.join("; ");

5
server/lib/cache.ts Normal file
View File

@@ -0,0 +1,5 @@
import NodeCache from "node-cache";
export const cache = new NodeCache({ stdTTL: 3600, checkperiod: 120 });
export default cache;

View File

@@ -0,0 +1,41 @@
import { Org, ResourceSession, Session, User } from "@server/db";
export type CheckOrgAccessPolicyProps = {
orgId?: string;
org?: Org;
userId?: string;
user?: User;
sessionId?: string;
session?: Session;
};
export type CheckOrgAccessPolicyResult = {
allowed: boolean;
error?: string;
policies?: {
requiredTwoFactor?: boolean;
maxSessionLength?: {
compliant: boolean;
maxSessionLengthHours: number;
sessionAgeHours: number;
};
passwordAge?: {
compliant: boolean;
maxPasswordAgeDays: number;
passwordAgeDays: number;
};
};
};
export async function enforceResourceSessionLength(
resourceSession: ResourceSession,
org: Org
): Promise<{ valid: boolean; error?: string }> {
return { valid: true };
}
export async function checkOrgAccessPolicy(
props: CheckOrgAccessPolicyProps
): Promise<CheckOrgAccessPolicyResult> {
return { allowed: true };
}

62
server/lib/cleanupLogs.ts Normal file
View File

@@ -0,0 +1,62 @@
import { db, orgs } from "@server/db";
import { cleanUpOldLogs as cleanUpOldAccessLogs } from "#dynamic/lib/logAccessAudit";
import { cleanUpOldLogs as cleanUpOldActionLogs } from "#dynamic/middlewares/logActionAudit";
import { cleanUpOldLogs as cleanUpOldRequestLogs } from "@server/routers/badger/logRequestAudit";
import { gt, or } from "drizzle-orm";
export function initLogCleanupInterval() {
return setInterval(
async () => {
const orgsToClean = await db
.select({
orgId: orgs.orgId,
settingsLogRetentionDaysAction:
orgs.settingsLogRetentionDaysAction,
settingsLogRetentionDaysAccess:
orgs.settingsLogRetentionDaysAccess,
settingsLogRetentionDaysRequest:
orgs.settingsLogRetentionDaysRequest
})
.from(orgs)
.where(
or(
gt(orgs.settingsLogRetentionDaysAction, 0),
gt(orgs.settingsLogRetentionDaysAccess, 0),
gt(orgs.settingsLogRetentionDaysRequest, 0)
)
);
for (const org of orgsToClean) {
const {
orgId,
settingsLogRetentionDaysAction,
settingsLogRetentionDaysAccess,
settingsLogRetentionDaysRequest
} = org;
if (settingsLogRetentionDaysAction > 0) {
await cleanUpOldActionLogs(
orgId,
settingsLogRetentionDaysRequest
);
}
if (settingsLogRetentionDaysAccess > 0) {
await cleanUpOldAccessLogs(
orgId,
settingsLogRetentionDaysRequest
);
}
if (settingsLogRetentionDaysRequest > 0) {
await cleanUpOldRequestLogs(
orgId,
settingsLogRetentionDaysRequest
);
}
}
},
// 3 * 60 * 60 * 1000
60 * 1000 // for testing
); // every 3 hours
}

View File

@@ -2,7 +2,7 @@ import path from "path";
import { fileURLToPath } from "url";
// This is a placeholder value replaced by the build process
export const APP_VERSION = "1.11.0";
export const APP_VERSION = "1.12.0";
export const __FILENAME = fileURLToPath(import.meta.url);
export const __DIRNAME = path.dirname(__FILENAME);

View File

@@ -6,7 +6,7 @@ export async function getCountryCodeForIp(
): Promise<string | undefined> {
try {
if (!maxmindLookup) {
logger.warn(
logger.debug(
"MaxMind DB path not configured, cannot perform GeoIP lookup"
);
return;

View File

@@ -0,0 +1,17 @@
export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
return;
}
export async function logAccessAudit(data: {
action: boolean;
type: string;
orgId: string;
resourceId?: number;
user?: { username: string; userId: string };
apiKey?: { name: string | null; apiKeyId: string };
metadata?: any;
userAgent?: string;
requestIp?: string;
}) {
return;
}

View File

@@ -50,7 +50,7 @@ export const configSchema = z
.string()
.nonempty("base_domain must not be empty")
.transform((url) => url.toLowerCase()),
cert_resolver: z.string().optional().default("letsencrypt"),
cert_resolver: z.string().optional(), // null falls back to traefik.cert_resolver
prefer_wildcard_cert: z.boolean().optional().default(false)
})
)

View File

@@ -1,7 +1,8 @@
export enum AudienceIds {
General = "",
Subscribed = "",
Churned = ""
SignUps = "",
Subscribed = "",
Churned = "",
Newsletter = ""
}
let resend;
@@ -12,4 +13,4 @@ export async function moveEmailToAudience(
audienceId: AudienceIds
) {
return;
}
}

View File

@@ -0,0 +1,29 @@
import logger from "@server/logger";
import axios from "axios";
let serverIp: string | null = null;
const services = [
"https://checkip.amazonaws.com",
"https://ifconfig.io/ip",
"https://api.ipify.org",
];
export async function fetchServerIp() {
for (const url of services) {
try {
const response = await axios.get(url, { timeout: 5000 });
serverIp = response.data.trim();
logger.debug("Detected public IP: " + serverIp);
return;
} catch (err: any) {
console.warn(`Failed to fetch server IP from ${url}: ${err.message || err.code}`);
}
}
console.error("All attempts to fetch server IP failed.");
}
export function getServerIp() {
return serverIp;
}

View File

@@ -200,10 +200,7 @@ class TelemetryClient {
event: "supporter_status",
properties: {
valid: stats.supporterStatus.valid,
tier: stats.supporterStatus.tier,
github_username: stats.supporterStatus.githubUsername
? this.anon(stats.supporterStatus.githubUsername)
: "None"
tier: stats.supporterStatus.tier
}
});
}
@@ -217,21 +214,6 @@ class TelemetryClient {
install_timestamp: hostMeta.createdAt
}
});
for (const email of stats.adminUsers) {
// There should only be on admin user, but just in case
if (email) {
this.client.capture({
distinctId: this.anon(email),
event: "admin_user",
properties: {
host_id: hostMeta.hostMetaId,
app_version: stats.appVersion,
hashed_email: this.anon(email)
}
});
}
}
}
private async collectAndSendAnalytics() {
@@ -262,19 +244,38 @@ class TelemetryClient {
num_clients: stats.numClients,
num_identity_providers: stats.numIdentityProviders,
num_sites_online: stats.numSitesOnline,
resources: stats.resources.map((r) => ({
name: this.anon(r.name),
sso_enabled: r.sso,
protocol: r.protocol,
http_enabled: r.http
})),
sites: stats.sites.map((s) => ({
site_name: this.anon(s.siteName),
megabytes_in: s.megabytesIn,
megabytes_out: s.megabytesOut,
type: s.type,
online: s.online
})),
num_resources_sso_enabled: stats.resources.filter(
(r) => r.sso
).length,
num_resources_non_http: stats.resources.filter(
(r) => !r.http
).length,
num_newt_sites: stats.sites.filter((s) => s.type === "newt")
.length,
num_local_sites: stats.sites.filter(
(s) => s.type === "local"
).length,
num_wg_sites: stats.sites.filter(
(s) => s.type === "wireguard"
).length,
avg_megabytes_in:
stats.sites.length > 0
? Math.round(
stats.sites.reduce(
(sum, s) => sum + (s.megabytesIn ?? 0),
0
) / stats.sites.length
)
: 0,
avg_megabytes_out:
stats.sites.length > 0
? Math.round(
stats.sites.reduce(
(sum, s) => sum + (s.megabytesOut ?? 0),
0
) / stats.sites.length
)
: 0,
num_api_keys: stats.numApiKeys,
num_custom_roles: stats.numCustomRoles
}

View File

@@ -309,10 +309,7 @@ export class TraefikConfigManager {
this.lastActiveDomains = new Set(domains);
}
if (
process.env.USE_PANGOLIN_DNS === "true" &&
build != "oss"
) {
if (process.env.USE_PANGOLIN_DNS === "true" && build != "oss") {
// Scan current local certificate state
this.lastLocalCertificateState =
await this.scanLocalCertificateState();
@@ -450,7 +447,8 @@ export class TraefikConfigManager {
currentExitNode,
config.getRawConfig().traefik.site_types,
build == "oss", // filter out the namespace domains in open source
build != "oss" // generate the login pages on the cloud and hybrid
build != "oss", // generate the login pages on the cloud and hybrid,
build == "saas" ? false : config.getRawConfig().traefik.allow_raw_resources // dont allow raw resources on saas otherwise use config
);
const domains = new Set<string>();
@@ -502,6 +500,25 @@ export class TraefikConfigManager {
};
}
// tcp:
// serversTransports:
// pp-transport-v1:
// proxyProtocol:
// version: 1
// pp-transport-v2:
// proxyProtocol:
// version: 2
if (build != "saas") {
// add the serversTransports section if not present
if (traefikConfig.tcp && !traefikConfig.tcp.serversTransports) {
traefikConfig.tcp.serversTransports = {
"pp-transport-v1": { proxyProtocol: { version: 1 } },
"pp-transport-v2": { proxyProtocol: { version: 2 } }
};
}
}
return { domains, traefikConfig };
} catch (error) {
// pull data out of the axios error to log

View File

@@ -1,4 +1,4 @@
import { db, targetHealthCheck } from "@server/db";
import { db, targetHealthCheck, domains } from "@server/db";
import {
and,
eq,
@@ -23,7 +23,8 @@ export async function getTraefikConfig(
exitNodeId: number,
siteTypes: string[],
filterOutNamespaceDomains = false,
generateLoginPageRouters = false
generateLoginPageRouters = false,
allowRawResources = true
): Promise<any> {
// Define extended target type with site information
type TargetWithSite = Target & {
@@ -56,6 +57,8 @@ export async function getTraefikConfig(
setHostHeader: resources.setHostHeader,
enableProxy: resources.enableProxy,
headers: resources.headers,
proxyProtocol: resources.proxyProtocol,
proxyProtocolVersion: resources.proxyProtocolVersion,
// Target fields
targetId: targets.targetId,
targetEnabled: targets.enabled,
@@ -75,11 +78,14 @@ export async function getTraefikConfig(
siteType: sites.type,
siteOnline: sites.online,
subnet: sites.subnet,
exitNodeId: sites.exitNodeId
exitNodeId: sites.exitNodeId,
// Domain cert resolver fields
domainCertResolver: domains.certResolver
})
.from(sites)
.innerJoin(targets, eq(targets.siteId, sites.siteId))
.innerJoin(resources, eq(resources.resourceId, targets.resourceId))
.leftJoin(domains, eq(domains.domainId, resources.domainId))
.leftJoin(
targetHealthCheck,
eq(targetHealthCheck.targetId, targets.targetId)
@@ -88,13 +94,20 @@ export async function getTraefikConfig(
and(
eq(targets.enabled, true),
eq(resources.enabled, true),
eq(sites.exitNodeId, exitNodeId),
or(
eq(sites.exitNodeId, exitNodeId),
and(
isNull(sites.exitNodeId),
sql`(${siteTypes.includes("local") ? 1 : 0} = 1)`, // only allow local sites if "local" is in siteTypes
eq(sites.type, "local")
)
),
or(
ne(targetHealthCheck.hcHealth, "unhealthy"), // Exclude unhealthy targets
isNull(targetHealthCheck.hcHealth) // Include targets with no health check record
),
inArray(sites.type, siteTypes),
config.getRawConfig().traefik.allow_raw_resources
allowRawResources
? isNotNull(resources.http) // ignore the http check if allow_raw_resources is true
: eq(resources.http, true)
)
@@ -157,11 +170,15 @@ export async function getTraefikConfig(
enableProxy: row.enableProxy,
targets: [],
headers: row.headers,
proxyProtocol: row.proxyProtocol,
proxyProtocolVersion: row.proxyProtocolVersion ?? 1,
path: row.path, // the targets will all have the same path
pathMatchType: row.pathMatchType, // the targets will all have the same pathMatchType
rewritePath: row.rewritePath,
rewritePathType: row.rewritePathType,
priority: priority // may be null, we fallback later
priority: priority,
// Store domain cert resolver fields
domainCertResolver: row.domainCertResolver
});
}
@@ -240,30 +257,45 @@ export async function getTraefikConfig(
wildCard = resource.fullDomain;
}
const configDomain = config.getDomain(resource.domainId);
const globalDefaultResolver =
config.getRawConfig().traefik.cert_resolver;
const globalDefaultPreferWildcard =
config.getRawConfig().traefik.prefer_wildcard_cert;
let certResolver: string, preferWildcardCert: boolean;
if (!configDomain) {
certResolver = config.getRawConfig().traefik.cert_resolver;
preferWildcardCert =
config.getRawConfig().traefik.prefer_wildcard_cert;
} else {
certResolver = configDomain.cert_resolver;
preferWildcardCert = configDomain.prefer_wildcard_cert;
}
const domainCertResolver = resource.domainCertResolver;
const preferWildcardCert = resource.preferWildcardCert;
const tls = {
certResolver: certResolver,
...(preferWildcardCert
? {
domains: [
{
main: wildCard
}
]
}
: {})
};
let resolverName: string | undefined;
let preferWildcard: boolean | undefined;
// Handle both letsencrypt & custom cases
if (domainCertResolver) {
resolverName = domainCertResolver.trim();
} else {
resolverName = globalDefaultResolver;
}
if (
preferWildcardCert !== undefined &&
preferWildcardCert !== null
) {
preferWildcard = preferWildcardCert;
} else {
preferWildcard = globalDefaultPreferWildcard;
}
const tls = {
certResolver: resolverName,
...(preferWildcard
? {
domains: [
{
main: wildCard
}
]
}
: {})
};
const additionalMiddlewares =
config.getRawConfig().traefik.additional_middlewares || [];
@@ -502,14 +534,14 @@ export async function getTraefikConfig(
})(),
...(resource.stickySession
? {
sticky: {
cookie: {
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
secure: resource.ssl,
httpOnly: true
}
}
}
sticky: {
cookie: {
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
secure: resource.ssl,
httpOnly: true
}
}
}
: {})
}
};
@@ -608,15 +640,20 @@ export async function getTraefikConfig(
}
});
})(),
...(resource.proxyProtocol && protocol == "tcp"
? {
serversTransport: `pp-transport-v${resource.proxyProtocolVersion || 1}`
}
: {}),
...(resource.stickySession
? {
sticky: {
ipStrategy: {
depth: 0,
sourcePort: true
}
}
}
sticky: {
ipStrategy: {
depth: 0,
sourcePort: true
}
}
}
: {})
}
};

View File

@@ -40,6 +40,10 @@ export class License {
public setServerSecret(secret: string) {
this.serverSecret = secret;
}
public async isUnlocked() {
return false;
}
}
await setHostMeta();

View File

@@ -27,3 +27,4 @@ export * from "./verifyDomainAccess";
export * from "./verifyClientsEnabled";
export * from "./verifyUserIsOrgOwner";
export * from "./verifySiteResourceAccess";
export * from "./logActionAudit";

View File

@@ -0,0 +1,16 @@
import { ActionsEnum } from "@server/auth/actions";
import { Request, Response, NextFunction } from "express";
export function logActionAudit(action: ActionsEnum) {
return async function (
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
next();
};
}
export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
return;
}

View File

@@ -1,9 +1,11 @@
import { Request, Response, NextFunction } from "express";
import { db } from "@server/db";
import { db, orgs } from "@server/db";
import { userOrgs } from "@server/db";
import { and, eq } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
import logger from "@server/logger";
export async function verifyOrgAccess(
req: Request,
@@ -43,12 +45,30 @@ export async function verifyOrgAccess(
"User does not have access to this organization"
)
);
} else {
// User has access, attach the user's role to the request for potential future use
req.userOrgRoleId = req.userOrg.roleId;
req.userOrgId = orgId;
return next();
}
const policyCheck = await checkOrgAccessPolicy({
orgId,
userId,
session: req.session
});
logger.debug("Org check policy result", { policyCheck });
if (!policyCheck.allowed || policyCheck.error) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Failed organization access policy check: " +
(policyCheck.error || "Unknown error")
)
);
}
// User has access, attach the user's role to the request for potential future use
req.userOrgRoleId = req.userOrg.roleId;
req.userOrgId = orgId;
return next();
} catch (e) {
return next(
createHttpError(

View File

@@ -16,8 +16,8 @@ import { certificates, db } from "@server/db";
import { and, eq, isNotNull, or, inArray, sql } from "drizzle-orm";
import { decryptData } from "@server/lib/encryption";
import * as fs from "fs";
import NodeCache from "node-cache";
import logger from "@server/logger";
import cache from "@server/lib/cache";
let encryptionKeyPath = "";
let encryptionKeyHex = "";
@@ -51,9 +51,6 @@ export type CertificateResult = {
updatedAt?: number | null;
};
// --- In-Memory Cache Implementation ---
const certificateCache = new NodeCache({ stdTTL: 180 }); // Cache for 3 minutes (180 seconds)
export async function getValidCertificatesForDomains(
domains: Set<string>,
useCache: boolean = true
@@ -67,7 +64,8 @@ export async function getValidCertificatesForDomains(
// 1. Check cache first if enabled
if (useCache) {
for (const domain of domains) {
const cachedCert = certificateCache.get<CertificateResult>(domain);
const cacheKey = `cert:${domain}`;
const cachedCert = cache.get<CertificateResult>(cacheKey);
if (cachedCert) {
finalResults.push(cachedCert); // Valid cache hit
} else {
@@ -180,7 +178,8 @@ export async function getValidCertificatesForDomains(
// Add to cache for future requests, using the *requested domain* as the key
if (useCache) {
certificateCache.set(domain, resultCert);
const cacheKey = `cert:${domain}`;
cache.set(cacheKey, resultCert, 180);
}
}
}

View File

@@ -0,0 +1,201 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { build } from "@server/build";
import {
db,
Org,
orgs,
ResourceSession,
sessions,
users
} from "@server/db";
import { getOrgTierData } from "#private/lib/billing";
import { TierId } from "@server/lib/billing/tiers";
import license from "#private/license/license";
import { eq } from "drizzle-orm";
import {
CheckOrgAccessPolicyProps,
CheckOrgAccessPolicyResult
} from "@server/lib/checkOrgAccessPolicy";
import { UserType } from "@server/types/UserTypes";
export async function enforceResourceSessionLength(
resourceSession: ResourceSession,
org: Org
): Promise<{ valid: boolean; error?: string }> {
if (org.maxSessionLengthHours) {
const sessionIssuedAt = resourceSession.issuedAt; // may be null
const maxSessionLengthHours = org.maxSessionLengthHours;
if (sessionIssuedAt) {
const maxSessionLengthMs = maxSessionLengthHours * 60 * 60 * 1000;
const sessionAgeMs = Date.now() - sessionIssuedAt;
if (sessionAgeMs > maxSessionLengthMs) {
return {
valid: false,
error: `Resource session has expired due to organization policy (max session length: ${maxSessionLengthHours} hours)`
};
}
} else {
return {
valid: false,
error: `Resource session is invalid due to organization policy (max session length: ${maxSessionLengthHours} hours)`
};
}
}
return { valid: true };
}
export async function checkOrgAccessPolicy(
props: CheckOrgAccessPolicyProps
): Promise<CheckOrgAccessPolicyResult> {
const userId = props.userId || props.user?.userId;
const orgId = props.orgId || props.org?.orgId;
const sessionId = props.sessionId || props.session?.sessionId;
if (!orgId) {
return {
allowed: false,
error: "Organization ID is required"
};
}
if (!userId) {
return { allowed: false, error: "User ID is required" };
}
if (!sessionId) {
return { allowed: false, error: "Session ID is required" };
}
if (build === "enterprise") {
const isUnlocked = await license.isUnlocked();
// if not licensed, don't check the policies
if (!isUnlocked) {
return { allowed: true };
}
}
// get the needed data
if (!props.org) {
const [orgQuery] = await db
.select()
.from(orgs)
.where(eq(orgs.orgId, orgId));
props.org = orgQuery;
if (!props.org) {
return { allowed: false, error: "Organization not found" };
}
}
if (!props.user) {
const [userQuery] = await db
.select()
.from(users)
.where(eq(users.userId, userId));
props.user = userQuery;
if (!props.user) {
return { allowed: false, error: "User not found" };
}
}
if (!props.session) {
const [sessionQuery] = await db
.select()
.from(sessions)
.where(eq(sessions.sessionId, sessionId));
props.session = sessionQuery;
if (!props.session) {
return { allowed: false, error: "Session not found" };
}
}
if (props.session.userId !== props.user.userId) {
return {
allowed: false,
error: "Session does not belong to the user"
};
}
// now check the policies
const policies: CheckOrgAccessPolicyResult["policies"] = {};
// only applies to internal users; oidc users 2fa is managed by the IDP
if (props.user.type === UserType.Internal && props.org.requireTwoFactor) {
policies.requiredTwoFactor = props.user.twoFactorEnabled || false;
}
// applies to all users
if (props.org.maxSessionLengthHours) {
const sessionIssuedAt = props.session.issuedAt; // may be null
const maxSessionLengthHours = props.org.maxSessionLengthHours;
if (sessionIssuedAt) {
const maxSessionLengthMs = maxSessionLengthHours * 60 * 60 * 1000;
const sessionAgeMs = Date.now() - sessionIssuedAt;
policies.maxSessionLength = {
compliant: sessionAgeMs <= maxSessionLengthMs,
maxSessionLengthHours,
sessionAgeHours: sessionAgeMs / (60 * 60 * 1000)
};
} else {
policies.maxSessionLength = {
compliant: false,
maxSessionLengthHours,
sessionAgeHours: maxSessionLengthHours
};
}
}
// only applies to internal users; oidc users don't have passwords
if (props.user.type === UserType.Internal && props.org.passwordExpiryDays) {
if (props.user.lastPasswordChange) {
const passwordExpiryDays = props.org.passwordExpiryDays;
const passwordAgeMs = Date.now() - props.user.lastPasswordChange;
const passwordAgeDays = passwordAgeMs / (24 * 60 * 60 * 1000);
policies.passwordAge = {
compliant: passwordAgeDays <= passwordExpiryDays,
maxPasswordAgeDays: passwordExpiryDays,
passwordAgeDays: passwordAgeDays
};
} else {
policies.passwordAge = {
compliant: false,
maxPasswordAgeDays: props.org.passwordExpiryDays,
passwordAgeDays: props.org.passwordExpiryDays // Treat as expired
};
}
}
let allowed = true;
if (policies.requiredTwoFactor === false) {
allowed = false;
}
if (
policies.maxSessionLength &&
policies.maxSessionLength.compliant === false
) {
allowed = false;
}
if (policies.passwordAge && policies.passwordAge.compliant === false) {
allowed = false;
}
return {
allowed,
policies
};
}

View File

@@ -0,0 +1,170 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { accessAuditLog, db, orgs } from "@server/db";
import { getCountryCodeForIp } from "@server/lib/geoip";
import logger from "@server/logger";
import { and, eq, lt } from "drizzle-orm";
import cache from "@server/lib/cache";
async function getAccessDays(orgId: string): Promise<number> {
// check cache first
const cached = cache.get<number>(`org_${orgId}_accessDays`);
if (cached !== undefined) {
return cached;
}
const [org] = await db
.select({
settingsLogRetentionDaysAction: orgs.settingsLogRetentionDaysAction
})
.from(orgs)
.where(eq(orgs.orgId, orgId))
.limit(1);
if (!org) {
return 0;
}
// store the result in cache
cache.set(
`org_${orgId}_accessDays`,
org.settingsLogRetentionDaysAction,
300
);
return org.settingsLogRetentionDaysAction;
}
export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
const now = Math.floor(Date.now() / 1000);
const cutoffTimestamp = now - retentionDays * 24 * 60 * 60;
try {
const deleteResult = await db
.delete(accessAuditLog)
.where(
and(
lt(accessAuditLog.timestamp, cutoffTimestamp),
eq(accessAuditLog.orgId, orgId)
)
);
logger.info(
`Cleaned up ${deleteResult.changes} access audit logs older than ${retentionDays} days`
);
} catch (error) {
logger.error("Error cleaning up old action audit logs:", error);
}
}
export async function logAccessAudit(data: {
action: boolean;
type: string;
orgId: string;
resourceId?: number;
user?: { username: string; userId: string };
apiKey?: { name: string | null; apiKeyId: string };
metadata?: any;
userAgent?: string;
requestIp?: string;
}) {
try {
const retentionDays = await getAccessDays(data.orgId);
if (retentionDays === 0) {
// do not log
return;
}
let actorType: string | undefined;
let actor: string | undefined;
let actorId: string | undefined;
const user = data.user;
if (user) {
actorType = "user";
actor = user.username;
actorId = user.userId;
}
const apiKey = data.apiKey;
if (apiKey) {
actorType = "apiKey";
actor = apiKey.name || apiKey.apiKeyId;
actorId = apiKey.apiKeyId;
}
// if (!actorType || !actor || !actorId) {
// logger.warn("logRequestAudit: Incomplete actor information");
// return;
// }
const timestamp = Math.floor(Date.now() / 1000);
let metadata = null;
if (metadata) {
metadata = JSON.stringify(metadata);
}
const clientIp = data.requestIp
? (() => {
if (
data.requestIp.startsWith("[") &&
data.requestIp.includes("]")
) {
// if brackets are found, extract the IPv6 address from between the brackets
const ipv6Match = data.requestIp.match(/\[(.*?)\]/);
if (ipv6Match) {
return ipv6Match[1];
}
}
return data.requestIp;
})()
: undefined;
const countryCode = data.requestIp
? await getCountryCodeFromIp(data.requestIp)
: undefined;
await db.insert(accessAuditLog).values({
timestamp: timestamp,
orgId: data.orgId,
actorType,
actor,
actorId,
action: data.action,
type: data.type,
metadata,
resourceId: data.resourceId,
userAgent: data.userAgent,
ip: clientIp,
location: countryCode
});
} catch (error) {
logger.error(error);
}
}
async function getCountryCodeFromIp(ip: string): Promise<string | undefined> {
const geoIpCacheKey = `geoip_access:${ip}`;
let cachedCountryCode: string | undefined = cache.get(geoIpCacheKey);
if (!cachedCountryCode) {
cachedCountryCode = await getCountryCodeForIp(ip); // do it locally
// Cache for longer since IP geolocation doesn't change frequently
cache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes
}
return cachedCountryCode;
}

View File

@@ -16,9 +16,10 @@ import privateConfig from "#private/lib/config";
import logger from "@server/logger";
export enum AudienceIds {
General = "5cfbf99b-c592-40a9-9b8a-577a4681c158",
Subscribed = "870b43fd-387f-44de-8fc1-707335f30b20",
Churned = "f3ae92bd-2fdb-4d77-8746-2118afd62549"
SignUps = "5cfbf99b-c592-40a9-9b8a-577a4681c158",
Subscribed = "870b43fd-387f-44de-8fc1-707335f30b20",
Churned = "f3ae92bd-2fdb-4d77-8746-2118afd62549",
Newsletter = "5500c431-191c-42f0-a5d4-8b6d445b4ea0"
}
const resend = new Resend(

View File

@@ -15,6 +15,7 @@ import {
certificates,
db,
domainNamespaces,
domains,
exitNodes,
loginPage,
targetHealthCheck
@@ -40,6 +41,7 @@ import {
CertificateResult,
getValidCertificatesForDomains
} from "#private/lib/certificates";
import { build } from "@server/build";
const redirectHttpsMiddlewareName = "redirect-to-https";
const redirectToRootMiddlewareName = "redirect-to-root";
@@ -49,7 +51,8 @@ export async function getTraefikConfig(
exitNodeId: number,
siteTypes: string[],
filterOutNamespaceDomains = false,
generateLoginPageRouters = false
generateLoginPageRouters = false,
allowRawResources = true
): Promise<any> {
// Define extended target type with site information
type TargetWithSite = Target & {
@@ -103,11 +106,16 @@ export async function getTraefikConfig(
subnet: sites.subnet,
exitNodeId: sites.exitNodeId,
// Namespace
domainNamespaceId: domainNamespaces.domainNamespaceId
domainNamespaceId: domainNamespaces.domainNamespaceId,
// Certificate
certificateStatus: certificates.status,
domainCertResolver: domains.certResolver,
})
.from(sites)
.innerJoin(targets, eq(targets.siteId, sites.siteId))
.innerJoin(resources, eq(resources.resourceId, targets.resourceId))
.leftJoin(certificates, eq(certificates.domainId, resources.domainId))
.leftJoin(domains, eq(domains.domainId, resources.domainId))
.leftJoin(
targetHealthCheck,
eq(targetHealthCheck.targetId, targets.targetId)
@@ -120,13 +128,21 @@ export async function getTraefikConfig(
and(
eq(targets.enabled, true),
eq(resources.enabled, true),
eq(sites.exitNodeId, exitNodeId),
or(
eq(sites.exitNodeId, exitNodeId),
and(
isNull(sites.exitNodeId),
sql`(${siteTypes.includes("local") ? 1 : 0} = 1)`, // only allow local sites if "local" is in siteTypes
eq(sites.type, "local"),
sql`(${build != "saas" ? 1 : 0} = 1)` // Dont allow undefined local sites in cloud
)
),
or(
ne(targetHealthCheck.hcHealth, "unhealthy"), // Exclude unhealthy targets
isNull(targetHealthCheck.hcHealth) // Include targets with no health check record
),
inArray(sites.type, siteTypes),
config.getRawConfig().traefik.allow_raw_resources
allowRawResources
? isNotNull(resources.http) // ignore the http check if allow_raw_resources is true
: eq(resources.http, true)
)
@@ -197,7 +213,8 @@ export async function getTraefikConfig(
pathMatchType: row.pathMatchType, // the targets will all have the same pathMatchType
rewritePath: row.rewritePath,
rewritePathType: row.rewritePathType,
priority: priority // may be null, we fallback later
priority: priority, // may be null, we fallback later
domainCertResolver: row.domainCertResolver,
});
}
@@ -285,6 +302,20 @@ export async function getTraefikConfig(
config_output.http.services = {};
}
const domainParts = fullDomain.split(".");
let wildCard;
if (domainParts.length <= 2) {
wildCard = `*.${domainParts.join(".")}`;
} else {
wildCard = `*.${domainParts.slice(1).join(".")}`;
}
if (!resource.subdomain) {
wildCard = resource.fullDomain;
}
const configDomain = config.getDomain(resource.domainId);
let tls = {};
if (!privateConfig.getRawPrivateConfig().flags.use_pangolin_dns) {
const domainParts = fullDomain.split(".");
@@ -315,13 +346,13 @@ export async function getTraefikConfig(
certResolver: certResolver,
...(preferWildcardCert
? {
domains: [
{
main: wildCard
}
]
}
: {})
domains: [
{
main: wildCard,
},
],
}
: {}),
};
} else {
// find a cert that matches the full domain, if not continue
@@ -573,14 +604,14 @@ export async function getTraefikConfig(
})(),
...(resource.stickySession
? {
sticky: {
cookie: {
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
secure: resource.ssl,
httpOnly: true
}
}
}
sticky: {
cookie: {
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
secure: resource.ssl,
httpOnly: true
}
}
}
: {})
}
};
@@ -679,15 +710,20 @@ export async function getTraefikConfig(
}
});
})(),
...(resource.proxyProtocol && protocol == "tcp" // proxy protocol only works for tcp
? {
serversTransport: `pp-transport-v${resource.proxyProtocolVersion || 1}`
}
: {}),
...(resource.stickySession
? {
sticky: {
ipStrategy: {
depth: 0,
sourcePort: true
}
}
}
sticky: {
ipStrategy: {
depth: 0,
sourcePort: true
}
}
}
: {})
}
};
@@ -735,10 +771,9 @@ export async function getTraefikConfig(
loadBalancer: {
servers: [
{
url: `http://${
config.getRawConfig().server
url: `http://${config.getRawConfig().server
.internal_hostname
}:${config.getRawConfig().server.next_port}`
}:${config.getRawConfig().server.next_port}`
}
]
}
@@ -754,7 +789,7 @@ export async function getTraefikConfig(
continue;
}
let tls = {};
const tls = {};
if (
!privateConfig.getRawPrivateConfig().flags.use_pangolin_dns
) {

View File

@@ -15,4 +15,5 @@ export * from "./verifyCertificateAccess";
export * from "./verifyRemoteExitNodeAccess";
export * from "./verifyIdpAccess";
export * from "./verifyLoginPageAccess";
export * from "../../lib/corsWithLoginPage";
export * from "./logActionAudit";
export * from "./verifySubscription";

View File

@@ -0,0 +1,145 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { ActionsEnum } from "@server/auth/actions";
import { actionAuditLog, db, orgs } from "@server/db";
import logger from "@server/logger";
import HttpCode from "@server/types/HttpCode";
import { Request, Response, NextFunction } from "express";
import createHttpError from "http-errors";
import { and, eq, lt } from "drizzle-orm";
import cache from "@server/lib/cache";
async function getActionDays(orgId: string): Promise<number> {
// check cache first
const cached = cache.get<number>(`org_${orgId}_actionDays`);
if (cached !== undefined) {
return cached;
}
const [org] = await db
.select({
settingsLogRetentionDaysAction: orgs.settingsLogRetentionDaysAction
})
.from(orgs)
.where(eq(orgs.orgId, orgId))
.limit(1);
if (!org) {
return 0;
}
// store the result in cache
cache.set(`org_${orgId}_actionDays`, org.settingsLogRetentionDaysAction, 300);
return org.settingsLogRetentionDaysAction;
}
export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
const now = Math.floor(Date.now() / 1000);
const cutoffTimestamp = now - retentionDays * 24 * 60 * 60;
try {
const deleteResult = await db
.delete(actionAuditLog)
.where(
and(
lt(actionAuditLog.timestamp, cutoffTimestamp),
eq(actionAuditLog.orgId, orgId)
)
);
logger.info(
`Cleaned up ${deleteResult.changes} action audit logs older than ${retentionDays} days`
);
} catch (error) {
logger.error("Error cleaning up old action audit logs:", error);
}
}
export function logActionAudit(action: ActionsEnum) {
return async function (
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
let orgId;
let actorType;
let actor;
let actorId;
const user = req.user;
if (user) {
const userOrg = req.userOrg;
orgId = userOrg?.orgId;
actorType = "user";
actor = user.username;
actorId = user.userId;
}
const apiKey = req.apiKey;
if (apiKey) {
const apiKeyOrg = req.apiKeyOrg;
orgId = apiKeyOrg?.orgId;
actorType = "apiKey";
actor = apiKey.name;
actorId = apiKey.apiKeyId;
}
if (!orgId) {
logger.warn("logActionAudit: No organization context found");
return next();
}
if (!actorType || !actor || !actorId) {
logger.warn("logActionAudit: Incomplete actor information");
return next();
}
const retentionDays = await getActionDays(orgId);
if (retentionDays === 0) {
// do not log
return next();
}
const timestamp = Math.floor(Date.now() / 1000);
let metadata = null;
if (req.params) {
metadata = JSON.stringify(req.params);
}
await db.insert(actionAuditLog).values({
timestamp,
orgId,
actorType,
actor,
actorId,
action,
metadata
});
return next();
} catch (error) {
logger.error(error);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying logging action"
)
);
}
};
}

View File

@@ -0,0 +1,50 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { Request, Response, NextFunction } from "express";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { build } from "@server/build";
import { getOrgTierData } from "#private/lib/billing";
export async function verifyValidSubscription(
req: Request,
res: Response,
next: NextFunction
) {
try {
if (build != "saas") {
return next();
}
const tier = await getOrgTierData(req.params.orgId);
if (!tier.active) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Organization does not have an active subscription"
)
);
}
return next();
} catch (e) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Error verifying subscription"
)
);
}
}

View File

@@ -0,0 +1,81 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { registry } from "@server/openApi";
import { NextFunction } from "express";
import { Request, Response } from "express";
import { OpenAPITags } from "@server/openApi";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { fromError } from "zod-validation-error";
import logger from "@server/logger";
import { queryAccessAuditLogsParams, queryAccessAuditLogsQuery, queryAccess } from "./queryAccessAuditLog";
import { generateCSV } from "@server/routers/auditLogs/generateCSV";
registry.registerPath({
method: "get",
path: "/org/{orgId}/logs/access/export",
description: "Export the access audit log for an organization as CSV",
tags: [OpenAPITags.Org],
request: {
query: queryAccessAuditLogsQuery,
params: queryAccessAuditLogsParams
},
responses: {}
});
export async function exportAccessAuditLogs(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = queryAccessAuditLogsQuery.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const parsedParams = queryAccessAuditLogsParams.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error)
)
);
}
const data = { ...parsedQuery.data, ...parsedParams.data };
const baseQuery = queryAccess(data);
const log = await baseQuery.limit(data.limit).offset(data.offset);
const csvData = generateCSV(log);
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Disposition', `attachment; filename="access-audit-logs-${data.orgId}-${Date.now()}.csv"`);
return res.send(csvData);
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -0,0 +1,81 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { registry } from "@server/openApi";
import { NextFunction } from "express";
import { Request, Response } from "express";
import { OpenAPITags } from "@server/openApi";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { fromError } from "zod-validation-error";
import logger from "@server/logger";
import { queryActionAuditLogsParams, queryActionAuditLogsQuery, queryAction } from "./queryActionAuditLog";
import { generateCSV } from "@server/routers/auditLogs/generateCSV";
registry.registerPath({
method: "get",
path: "/org/{orgId}/logs/action/export",
description: "Export the action audit log for an organization as CSV",
tags: [OpenAPITags.Org],
request: {
query: queryActionAuditLogsQuery,
params: queryActionAuditLogsParams
},
responses: {}
});
export async function exportActionAuditLogs(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = queryActionAuditLogsQuery.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const parsedParams = queryActionAuditLogsParams.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error)
)
);
}
const data = { ...parsedQuery.data, ...parsedParams.data };
const baseQuery = queryAction(data);
const log = await baseQuery.limit(data.limit).offset(data.offset);
const csvData = generateCSV(log);
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Disposition', `attachment; filename="action-audit-logs-${data.orgId}-${Date.now()}.csv"`);
return res.send(csvData);
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -0,0 +1,17 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
export * from "./queryActionAuditLog";
export * from "./exportActionAuditLog";
export * from "./queryAccessAuditLog";
export * from "./exportAccessAuditLog";

View File

@@ -0,0 +1,258 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { accessAuditLog, db, resources } from "@server/db";
import { registry } from "@server/openApi";
import { NextFunction } from "express";
import { Request, Response } from "express";
import { eq, gt, lt, and, count } from "drizzle-orm";
import { OpenAPITags } from "@server/openApi";
import { z } from "zod";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { fromError } from "zod-validation-error";
import { QueryAccessAuditLogResponse } from "@server/routers/auditLogs/types";
import response from "@server/lib/response";
import logger from "@server/logger";
export const queryAccessAuditLogsQuery = z.object({
// iso string just validate its a parseable date
timeStart: z
.string()
.refine((val) => !isNaN(Date.parse(val)), {
message: "timeStart must be a valid ISO date string"
})
.transform((val) => Math.floor(new Date(val).getTime() / 1000)),
timeEnd: z
.string()
.refine((val) => !isNaN(Date.parse(val)), {
message: "timeEnd must be a valid ISO date string"
})
.transform((val) => Math.floor(new Date(val).getTime() / 1000))
.optional()
.default(new Date().toISOString()),
action: z
.union([z.boolean(), z.string()])
.transform((val) => (typeof val === "string" ? val === "true" : val))
.optional(),
actorType: z.string().optional(),
actorId: z.string().optional(),
resourceId: z
.string()
.optional()
.transform(Number)
.pipe(z.number().int().positive())
.optional(),
actor: z.string().optional(),
type: z.string().optional(),
location: z.string().optional(),
limit: z
.string()
.optional()
.default("1000")
.transform(Number)
.pipe(z.number().int().positive()),
offset: z
.string()
.optional()
.default("0")
.transform(Number)
.pipe(z.number().int().nonnegative())
});
export const queryAccessAuditLogsParams = z.object({
orgId: z.string()
});
export const queryAccessAuditLogsCombined = queryAccessAuditLogsQuery.merge(
queryAccessAuditLogsParams
);
type Q = z.infer<typeof queryAccessAuditLogsCombined>;
function getWhere(data: Q) {
return and(
gt(accessAuditLog.timestamp, data.timeStart),
lt(accessAuditLog.timestamp, data.timeEnd),
eq(accessAuditLog.orgId, data.orgId),
data.resourceId
? eq(accessAuditLog.resourceId, data.resourceId)
: undefined,
data.actor ? eq(accessAuditLog.actor, data.actor) : undefined,
data.actorType
? eq(accessAuditLog.actorType, data.actorType)
: undefined,
data.actorId ? eq(accessAuditLog.actorId, data.actorId) : undefined,
data.location ? eq(accessAuditLog.location, data.location) : undefined,
data.type ? eq(accessAuditLog.type, data.type) : undefined,
data.action !== undefined
? eq(accessAuditLog.action, data.action)
: undefined
);
}
export function queryAccess(data: Q) {
return db
.select({
orgId: accessAuditLog.orgId,
action: accessAuditLog.action,
actorType: accessAuditLog.actorType,
actorId: accessAuditLog.actorId,
resourceId: accessAuditLog.resourceId,
resourceName: resources.name,
resourceNiceId: resources.niceId,
ip: accessAuditLog.ip,
location: accessAuditLog.location,
userAgent: accessAuditLog.userAgent,
metadata: accessAuditLog.metadata,
type: accessAuditLog.type,
timestamp: accessAuditLog.timestamp,
actor: accessAuditLog.actor
})
.from(accessAuditLog)
.leftJoin(
resources,
eq(accessAuditLog.resourceId, resources.resourceId)
)
.where(getWhere(data))
.orderBy(accessAuditLog.timestamp);
}
export function countAccessQuery(data: Q) {
const countQuery = db
.select({ count: count() })
.from(accessAuditLog)
.where(getWhere(data));
return countQuery;
}
async function queryUniqueFilterAttributes(
timeStart: number,
timeEnd: number,
orgId: string
) {
const baseConditions = and(
gt(accessAuditLog.timestamp, timeStart),
lt(accessAuditLog.timestamp, timeEnd),
eq(accessAuditLog.orgId, orgId)
);
// Get unique actors
const uniqueActors = await db
.selectDistinct({
actor: accessAuditLog.actor
})
.from(accessAuditLog)
.where(baseConditions);
// Get unique locations
const uniqueLocations = await db
.selectDistinct({
locations: accessAuditLog.location
})
.from(accessAuditLog)
.where(baseConditions);
// Get unique resources with names
const uniqueResources = await db
.selectDistinct({
id: accessAuditLog.resourceId,
name: resources.name
})
.from(accessAuditLog)
.leftJoin(
resources,
eq(accessAuditLog.resourceId, resources.resourceId)
)
.where(baseConditions);
return {
actors: uniqueActors.map(row => row.actor).filter((actor): actor is string => actor !== null),
resources: uniqueResources.filter((row): row is { id: number; name: string | null } => row.id !== null),
locations: uniqueLocations.map(row => row.locations).filter((location): location is string => location !== null)
};
}
registry.registerPath({
method: "get",
path: "/org/{orgId}/logs/access",
description: "Query the access audit log for an organization",
tags: [OpenAPITags.Org],
request: {
query: queryAccessAuditLogsQuery,
params: queryAccessAuditLogsParams
},
responses: {}
});
export async function queryAccessAuditLogs(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = queryAccessAuditLogsQuery.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const parsedParams = queryAccessAuditLogsParams.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error)
)
);
}
const data = { ...parsedQuery.data, ...parsedParams.data };
const baseQuery = queryAccess(data);
const log = await baseQuery.limit(data.limit).offset(data.offset);
const totalCountResult = await countAccessQuery(data);
const totalCount = totalCountResult[0].count;
const filterAttributes = await queryUniqueFilterAttributes(
data.timeStart,
data.timeEnd,
data.orgId
);
return response<QueryAccessAuditLogResponse>(res, {
data: {
log: log,
pagination: {
total: totalCount,
limit: data.limit,
offset: data.offset
},
filterAttributes
},
success: true,
error: false,
message: "Access audit logs retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -0,0 +1,211 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { actionAuditLog, db } from "@server/db";
import { registry } from "@server/openApi";
import { NextFunction } from "express";
import { Request, Response } from "express";
import { eq, gt, lt, and, count } from "drizzle-orm";
import { OpenAPITags } from "@server/openApi";
import { z } from "zod";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { fromError } from "zod-validation-error";
import { QueryActionAuditLogResponse } from "@server/routers/auditLogs/types";
import response from "@server/lib/response";
import logger from "@server/logger";
export const queryActionAuditLogsQuery = z.object({
// iso string just validate its a parseable date
timeStart: z
.string()
.refine((val) => !isNaN(Date.parse(val)), {
message: "timeStart must be a valid ISO date string"
})
.transform((val) => Math.floor(new Date(val).getTime() / 1000)),
timeEnd: z
.string()
.refine((val) => !isNaN(Date.parse(val)), {
message: "timeEnd must be a valid ISO date string"
})
.transform((val) => Math.floor(new Date(val).getTime() / 1000))
.optional()
.default(new Date().toISOString()),
action: z.string().optional(),
actorType: z.string().optional(),
actorId: z.string().optional(),
actor: z.string().optional(),
limit: z
.string()
.optional()
.default("1000")
.transform(Number)
.pipe(z.number().int().positive()),
offset: z
.string()
.optional()
.default("0")
.transform(Number)
.pipe(z.number().int().nonnegative())
});
export const queryActionAuditLogsParams = z.object({
orgId: z.string()
});
export const queryActionAuditLogsCombined =
queryActionAuditLogsQuery.merge(queryActionAuditLogsParams);
type Q = z.infer<typeof queryActionAuditLogsCombined>;
function getWhere(data: Q) {
return and(
gt(actionAuditLog.timestamp, data.timeStart),
lt(actionAuditLog.timestamp, data.timeEnd),
eq(actionAuditLog.orgId, data.orgId),
data.actor ? eq(actionAuditLog.actor, data.actor) : undefined,
data.actorType ? eq(actionAuditLog.actorType, data.actorType) : undefined,
data.actorId ? eq(actionAuditLog.actorId, data.actorId) : undefined,
data.action ? eq(actionAuditLog.action, data.action) : undefined
);
}
export function queryAction(data: Q) {
return db
.select({
orgId: actionAuditLog.orgId,
action: actionAuditLog.action,
actorType: actionAuditLog.actorType,
metadata: actionAuditLog.metadata,
actorId: actionAuditLog.actorId,
timestamp: actionAuditLog.timestamp,
actor: actionAuditLog.actor
})
.from(actionAuditLog)
.where(getWhere(data))
.orderBy(actionAuditLog.timestamp);
}
export function countActionQuery(data: Q) {
const countQuery = db
.select({ count: count() })
.from(actionAuditLog)
.where(getWhere(data));
return countQuery;
}
async function queryUniqueFilterAttributes(
timeStart: number,
timeEnd: number,
orgId: string
) {
const baseConditions = and(
gt(actionAuditLog.timestamp, timeStart),
lt(actionAuditLog.timestamp, timeEnd),
eq(actionAuditLog.orgId, orgId)
);
// Get unique actors
const uniqueActors = await db
.selectDistinct({
actor: actionAuditLog.actor
})
.from(actionAuditLog)
.where(baseConditions);
const uniqueActions = await db
.selectDistinct({
action: actionAuditLog.action
})
.from(actionAuditLog)
.where(baseConditions);
return {
actors: uniqueActors.map(row => row.actor).filter((actor): actor is string => actor !== null),
actions: uniqueActions.map(row => row.action).filter((action): action is string => action !== null),
};
}
registry.registerPath({
method: "get",
path: "/org/{orgId}/logs/action",
description: "Query the action audit log for an organization",
tags: [OpenAPITags.Org],
request: {
query: queryActionAuditLogsQuery,
params: queryActionAuditLogsParams
},
responses: {}
});
export async function queryActionAuditLogs(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = queryActionAuditLogsQuery.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const parsedParams = queryActionAuditLogsParams.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error)
)
);
}
const data = { ...parsedQuery.data, ...parsedParams.data };
const baseQuery = queryAction(data);
const log = await baseQuery.limit(data.limit).offset(data.offset);
const totalCountResult = await countActionQuery(data);
const totalCount = totalCountResult[0].count;
const filterAttributes = await queryUniqueFilterAttributes(
data.timeStart,
data.timeEnd,
data.orgId
);
return response<QueryActionAuditLogResponse>(res, {
data: {
log: log,
pagination: {
total: totalCount,
limit: data.limit,
offset: data.offset
},
filterAttributes
},
success: true,
error: false,
message: "Action audit logs retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -21,20 +21,22 @@ import * as domain from "#private/routers/domain";
import * as auth from "#private/routers/auth";
import * as license from "#private/routers/license";
import * as generateLicense from "./generatedLicense";
import * as logs from "#private/routers/auditLogs";
import * as misc from "#private/routers/misc";
import { Router } from "express";
import {
verifyOrgAccess,
verifyUserHasAction,
verifyUserIsOrgOwner,
verifyUserIsServerAdmin
} from "@server/middlewares";
import { ActionsEnum } from "@server/auth/actions";
import {
logActionAudit,
verifyCertificateAccess,
verifyIdpAccess,
verifyLoginPageAccess,
verifyRemoteExitNodeAccess
verifyRemoteExitNodeAccess,
verifyValidSubscription
} from "#private/middlewares";
import rateLimit, { ipKeyGenerator } from "express-rate-limit";
import createHttpError from "http-errors";
@@ -72,6 +74,7 @@ authenticated.put(
verifyValidLicense,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createIdp),
logActionAudit(ActionsEnum.createIdp),
orgIdp.createOrgOidcIdp
);
@@ -81,6 +84,7 @@ authenticated.post(
verifyOrgAccess,
verifyIdpAccess,
verifyUserHasAction(ActionsEnum.updateIdp),
logActionAudit(ActionsEnum.updateIdp),
orgIdp.updateOrgOidcIdp
);
@@ -90,6 +94,7 @@ authenticated.delete(
verifyOrgAccess,
verifyIdpAccess,
verifyUserHasAction(ActionsEnum.deleteIdp),
logActionAudit(ActionsEnum.deleteIdp),
orgIdp.deleteOrgIdp
);
@@ -127,6 +132,7 @@ authenticated.post(
verifyOrgAccess,
verifyCertificateAccess,
verifyUserHasAction(ActionsEnum.restartCertificate),
logActionAudit(ActionsEnum.restartCertificate),
certificates.restartCertificate
);
@@ -152,6 +158,7 @@ if (build === "saas") {
"/org/:orgId/billing/create-checkout-session",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.billing),
logActionAudit(ActionsEnum.billing),
billing.createCheckoutSession
);
@@ -159,6 +166,7 @@ if (build === "saas") {
"/org/:orgId/billing/create-portal-session",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.billing),
logActionAudit(ActionsEnum.billing),
billing.createPortalSession
);
@@ -187,6 +195,24 @@ if (build === "saas") {
verifyOrgAccess,
generateLicense.generateNewLicense
);
authenticated.post(
"/send-support-request",
rateLimit({
windowMs: 15 * 60 * 1000,
max: 3,
keyGenerator: (req) =>
`sendSupportRequest:${req.user?.userId || ipKeyGenerator(req.ip || "")}`,
handler: (req, res, next) => {
const message = `You can only send 3 support requests every 15 minutes. Please try again later.`;
return next(
createHttpError(HttpCode.TOO_MANY_REQUESTS, message)
);
},
store: createStore()
}),
misc.sendSupportEmail
);
}
authenticated.get(
@@ -206,6 +232,7 @@ authenticated.put(
verifyValidLicense,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createRemoteExitNode),
logActionAudit(ActionsEnum.createRemoteExitNode),
remoteExitNode.createRemoteExitNode
);
@@ -240,6 +267,7 @@ authenticated.delete(
verifyOrgAccess,
verifyRemoteExitNodeAccess,
verifyUserHasAction(ActionsEnum.deleteRemoteExitNode),
logActionAudit(ActionsEnum.deleteRemoteExitNode),
remoteExitNode.deleteRemoteExitNode
);
@@ -248,6 +276,7 @@ authenticated.put(
verifyValidLicense,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createLoginPage),
logActionAudit(ActionsEnum.createLoginPage),
loginPage.createLoginPage
);
@@ -257,6 +286,7 @@ authenticated.post(
verifyOrgAccess,
verifyLoginPageAccess,
verifyUserHasAction(ActionsEnum.updateLoginPage),
logActionAudit(ActionsEnum.updateLoginPage),
loginPage.updateLoginPage
);
@@ -266,6 +296,7 @@ authenticated.delete(
verifyOrgAccess,
verifyLoginPageAccess,
verifyUserHasAction(ActionsEnum.deleteLoginPage),
logActionAudit(ActionsEnum.deleteLoginPage),
loginPage.deleteLoginPage
);
@@ -334,3 +365,41 @@ authenticated.post(
verifyUserIsServerAdmin,
license.recheckStatus
);
authenticated.get(
"/org/:orgId/logs/action",
verifyValidLicense,
verifyValidSubscription,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.exportLogs),
logs.queryActionAuditLogs
);
authenticated.get(
"/org/:orgId/logs/action/export",
verifyValidLicense,
verifyValidSubscription,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.exportLogs),
logActionAudit(ActionsEnum.exportLogs),
logs.exportActionAuditLogs
);
authenticated.get(
"/org/:orgId/logs/access",
verifyValidLicense,
verifyValidSubscription,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.exportLogs),
logs.queryAccessAuditLogs
);
authenticated.get(
"/org/:orgId/logs/access/export",
verifyValidLicense,
verifyValidSubscription,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.exportLogs),
logActionAudit(ActionsEnum.exportLogs),
logs.exportAccessAuditLogs
);

View File

@@ -16,7 +16,7 @@ import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { response as sendResponse } from "@server/lib/response";
import privateConfig from "@server/private/lib/config";
import privateConfig from "#private/lib/config";
import { GenerateNewLicenseResponse } from "@server/routers/generatedLicense/types";
async function createNewLicense(orgId: string, licenseData: any): Promise<any> {

View File

@@ -16,7 +16,7 @@ import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { response as sendResponse } from "@server/lib/response";
import privateConfig from "@server/private/lib/config";
import privateConfig from "#private/lib/config";
import { GeneratedLicenseKey, ListGeneratedLicenseKeysResponse } from "@server/routers/generatedLicense/types";
async function fetchLicenseKeys(orgId: string): Promise<any> {

View File

@@ -35,7 +35,9 @@ import {
loginPageOrg,
LoginPage,
resourceHeaderAuth,
ResourceHeaderAuth
ResourceHeaderAuth,
orgs,
requestAuditLog
} from "@server/db";
import {
resources,
@@ -73,6 +75,7 @@ import { validateResourceSessionToken } from "@server/auth/sessions/resource";
import { checkExitNodeOrg, resolveExitNodes } from "#private/lib/exitNodes";
import { maxmindLookup } from "@server/db/maxmind";
import { verifyResourceAccessToken } from "@server/auth/verifyResourceAccessToken";
import semver from "semver";
// Zod schemas for request validation
const getResourceByDomainParamsSchema = z
@@ -270,7 +273,8 @@ hybridRouter.get(
remoteExitNode.exitNodeId,
["newt", "local", "wireguard"], // Allow them to use all the site types
true, // But don't allow domain namespace resources
false // Dont include login pages
false, // Dont include login pages,
true // allow raw resources
);
return response(res, {
@@ -300,7 +304,8 @@ function loadEncryptData() {
return; // already loaded
}
encryptionKeyPath = privateConfig.getRawPrivateConfig().server.encryption_key_path;
encryptionKeyPath =
privateConfig.getRawPrivateConfig().server.encryption_key_path;
if (!fs.existsSync(encryptionKeyPath)) {
throw new Error(
@@ -1066,11 +1071,20 @@ hybridRouter.get(
);
}
const rules = await db
let rules = await db
.select()
.from(resourceRules)
.where(eq(resourceRules.resourceId, resourceId));
// backward compatibility: COUNTRY -> GEOIP
if ((remoteExitNode.version && semver.lt(remoteExitNode.version, "1.1.0")) || !remoteExitNode.version) {
for (const rule of rules) {
if (rule.match == "COUNTRY") {
rule.match = "GEOIP";
}
}
}
return response<(typeof resourceRules.$inferSelect)[]>(res, {
data: rules,
success: true,
@@ -1582,3 +1596,193 @@ hybridRouter.post(
}
}
);
hybridRouter.get(
"/org/:orgId/get-retention-days",
async (req: Request, res: Response, next: NextFunction) => {
try {
const parsedParams = getOrgLoginPageParamsSchema.safeParse(
req.params
);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { orgId } = parsedParams.data;
const remoteExitNode = req.remoteExitNode;
if (!remoteExitNode || !remoteExitNode.exitNodeId) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Remote exit node not found"
)
);
}
if (await checkExitNodeOrg(remoteExitNode.exitNodeId, orgId)) {
// If the exit node is not allowed for the org, return an error
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Exit node not allowed for this organization"
)
);
}
const [org] = await db
.select({
settingsLogRetentionDaysRequest:
orgs.settingsLogRetentionDaysRequest
})
.from(orgs)
.where(eq(orgs.orgId, orgId))
.limit(1);
return response(res, {
data: {
settingsLogRetentionDaysRequest:
org.settingsLogRetentionDaysRequest
},
success: true,
error: false,
message: "Log retention days retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"An error occurred..."
)
);
}
}
);
const batchLogsSchema = z.object({
logs: z.array(
z.object({
timestamp: z.number(),
orgId: z.string().optional(),
actorType: z.string().optional(),
actor: z.string().optional(),
actorId: z.string().optional(),
metadata: z.string().nullable(),
action: z.boolean(),
resourceId: z.number().optional(),
reason: z.number(),
location: z.string().optional(),
originalRequestURL: z.string(),
scheme: z.string(),
host: z.string(),
path: z.string(),
method: z.string(),
ip: z.string().optional(),
tls: z.boolean()
})
)
});
hybridRouter.post(
"/org/:orgId/logs/batch",
async (req: Request, res: Response, next: NextFunction) => {
try {
const parsedParams = getOrgLoginPageParamsSchema.safeParse(
req.params
);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { orgId } = parsedParams.data;
const parsedBody = batchLogsSchema.safeParse(req.body);
if (!parsedBody.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedBody.error).toString()
)
);
}
const { logs } = parsedBody.data;
const remoteExitNode = req.remoteExitNode;
if (!remoteExitNode || !remoteExitNode.exitNodeId) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Remote exit node not found"
)
);
}
if (await checkExitNodeOrg(remoteExitNode.exitNodeId, orgId)) {
// If the exit node is not allowed for the org, return an error
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Exit node not allowed for this organization"
)
);
}
// Batch insert all logs in a single query
const logEntries = logs.map((logEntry) => ({
timestamp: logEntry.timestamp,
orgId: logEntry.orgId,
actorType: logEntry.actorType,
actor: logEntry.actor,
actorId: logEntry.actorId,
metadata: logEntry.metadata,
action: logEntry.action,
resourceId: logEntry.resourceId,
reason: logEntry.reason,
location: logEntry.location,
// userAgent: data.userAgent, // TODO: add this
// headers: data.body.headers,
// query: data.body.query,
originalRequestURL: logEntry.originalRequestURL,
scheme: logEntry.scheme,
host: logEntry.host,
path: logEntry.path,
method: logEntry.method,
ip: logEntry.ip,
tls: logEntry.tls
}));
await db.insert(requestAuditLog).values(logEntries);
return response(res, {
data: null,
success: true,
error: false,
message: "Logs saved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"An error occurred..."
)
);
}
}
);

View File

@@ -23,6 +23,7 @@ import {
import { ActionsEnum } from "@server/auth/actions";
import { unauthenticated as ua, authenticated as a } from "@server/routers/integration";
import { logActionAudit } from "#private/middlewares";
export const unauthenticated = ua;
export const authenticated = a;
@@ -31,12 +32,14 @@ authenticated.post(
`/org/:orgId/send-usage-notification`,
verifyApiKeyIsRoot, // We are the only ones who can use root key so its fine
verifyApiKeyHasAction(ActionsEnum.sendUsageNotification),
org.sendUsageNotification
logActionAudit(ActionsEnum.sendUsageNotification),
org.sendUsageNotification,
);
authenticated.delete(
"/idp/:idpId",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.deleteIdp),
orgIdp.deleteOrgIdp
logActionAudit(ActionsEnum.deleteIdp),
orgIdp.deleteOrgIdp,
);

View File

@@ -25,7 +25,7 @@ import { LoadLoginPageResponse } from "@server/routers/loginPage/types";
const querySchema = z.object({
resourceId: z.coerce.number().int().positive().optional(),
idpId: z.coerce.number().int().positive().optional(),
orgId: z.coerce.number().int().positive().optional(),
orgId: z.string().min(1).optional(),
fullDomain: z.string().min(1)
});
@@ -89,7 +89,7 @@ export async function loadLoginPage(
const { resourceId, idpId, fullDomain } = parsedQuery.data;
let orgId;
let orgId: string | undefined = undefined;
if (resourceId) {
const [resource] = await db
.select()
@@ -118,7 +118,7 @@ export async function loadLoginPage(
orgId = idpOrgLink.orgId;
} else if (parsedQuery.data.orgId) {
orgId = parsedQuery.data.orgId.toString();
orgId = parsedQuery.data.orgId;
}
const loginPage = await query(orgId, fullDomain);

View File

@@ -0,0 +1 @@
export * from "./sendSupportEmail";

View File

@@ -0,0 +1,94 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { Request, Response, NextFunction } from "express";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { response as sendResponse } from "@server/lib/response";
import { z } from "zod";
import { fromError } from "zod-validation-error";
import { sendEmail } from "@server/emails";
import SupportEmail from "@server/emails/templates/SupportEmail";
import config from "@server/lib/config";
const bodySchema = z
.object({
body: z.string().min(1),
subject: z.string().min(1).max(255)
})
.strict();
export async function sendSupportEmail(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedBody = bodySchema.safeParse(req.body);
if (!parsedBody.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedBody.error).toString()
)
);
}
const { body, subject } = parsedBody.data;
const user = req.user!;
if (!user?.email) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"User does not have an email associated with their account"
)
);
}
try {
await sendEmail(
SupportEmail({
username: user.username,
email: user.email,
subject,
body
}),
{
name: req.user?.email || "Support User",
to: "support@pangolin.net",
from: req.user?.email || config.getNoReplyEmail(),
subject: `Support Request: ${subject}`
}
);
return sendResponse(res, {
data: {},
success: true,
error: false,
message: "Sent support email successfully",
status: HttpCode.OK
});
} catch (e) {
logger.error(e);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, `${e}`)
);
}
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -63,6 +63,7 @@ function queryAccessTokens(
description: resourceAccessToken.description,
createdAt: resourceAccessToken.createdAt,
resourceName: resources.name,
resourceNiceId: resources.niceId,
siteName: sites.name
};

View File

@@ -0,0 +1,68 @@
import { registry } from "@server/openApi";
import { NextFunction } from "express";
import { Request, Response } from "express";
import { OpenAPITags } from "@server/openApi";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { fromError } from "zod-validation-error";
import logger from "@server/logger";
import { queryAccessAuditLogsQuery, queryRequestAuditLogsParams, queryRequest } from "./queryRequstAuditLog";
import { generateCSV } from "./generateCSV";
registry.registerPath({
method: "get",
path: "/org/{orgId}/logs/request",
description: "Query the request audit log for an organization",
tags: [OpenAPITags.Org],
request: {
query: queryAccessAuditLogsQuery,
params: queryRequestAuditLogsParams
},
responses: {}
});
export async function exportRequestAuditLogs(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = queryAccessAuditLogsQuery.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const parsedParams = queryRequestAuditLogsParams.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error)
)
);
}
const data = { ...parsedQuery.data, ...parsedParams.data };
const baseQuery = queryRequest(data);
const log = await baseQuery.limit(data.limit).offset(data.offset);
const csvData = generateCSV(log);
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Disposition', `attachment; filename="request-audit-logs-${data.orgId}-${Date.now()}.csv"`);
return res.send(csvData);
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -0,0 +1,16 @@
export function generateCSV(data: any[]): string {
if (data.length === 0) {
return "orgId,action,actorType,timestamp,actor\n";
}
const headers = Object.keys(data[0]).join(",");
const rows = data.map(row =>
Object.values(row).map(value =>
typeof value === 'string' && value.includes(',')
? `"${value.replace(/"/g, '""')}"`
: value
).join(",")
);
return [headers, ...rows].join("\n");
}

View File

@@ -0,0 +1,2 @@
export * from "./queryRequstAuditLog";
export * from "./exportRequstAuditLog";

View File

@@ -0,0 +1,276 @@
import { db, requestAuditLog, resources } from "@server/db";
import { registry } from "@server/openApi";
import { NextFunction } from "express";
import { Request, Response } from "express";
import { eq, gt, lt, and, count } from "drizzle-orm";
import { OpenAPITags } from "@server/openApi";
import { z } from "zod";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import { fromError } from "zod-validation-error";
import { QueryRequestAuditLogResponse } from "@server/routers/auditLogs/types";
import response from "@server/lib/response";
import logger from "@server/logger";
export const queryAccessAuditLogsQuery = z.object({
// iso string just validate its a parseable date
timeStart: z
.string()
.refine((val) => !isNaN(Date.parse(val)), {
message: "timeStart must be a valid ISO date string"
})
.transform((val) => Math.floor(new Date(val).getTime() / 1000)),
timeEnd: z
.string()
.refine((val) => !isNaN(Date.parse(val)), {
message: "timeEnd must be a valid ISO date string"
})
.transform((val) => Math.floor(new Date(val).getTime() / 1000))
.optional()
.default(new Date().toISOString()),
action: z
.union([z.boolean(), z.string()])
.transform((val) => (typeof val === "string" ? val === "true" : val))
.optional(),
method: z.enum(["GET", "POST", "PUT", "DELETE", "PATCH"]).optional(),
reason: z
.string()
.optional()
.transform(Number)
.pipe(z.number().int().positive())
.optional(),
resourceId: z
.string()
.optional()
.transform(Number)
.pipe(z.number().int().positive())
.optional(),
actor: z.string().optional(),
location: z.string().optional(),
host: z.string().optional(),
path: z.string().optional(),
limit: z
.string()
.optional()
.default("1000")
.transform(Number)
.pipe(z.number().int().positive()),
offset: z
.string()
.optional()
.default("0")
.transform(Number)
.pipe(z.number().int().nonnegative())
});
export const queryRequestAuditLogsParams = z.object({
orgId: z.string()
});
export const queryRequestAuditLogsCombined =
queryAccessAuditLogsQuery.merge(queryRequestAuditLogsParams);
type Q = z.infer<typeof queryRequestAuditLogsCombined>;
function getWhere(data: Q) {
return and(
gt(requestAuditLog.timestamp, data.timeStart),
lt(requestAuditLog.timestamp, data.timeEnd),
eq(requestAuditLog.orgId, data.orgId),
data.resourceId
? eq(requestAuditLog.resourceId, data.resourceId)
: undefined,
data.actor ? eq(requestAuditLog.actor, data.actor) : undefined,
data.method ? eq(requestAuditLog.method, data.method) : undefined,
data.reason ? eq(requestAuditLog.reason, data.reason) : undefined,
data.host ? eq(requestAuditLog.host, data.host) : undefined,
data.location ? eq(requestAuditLog.location, data.location) : undefined,
data.path ? eq(requestAuditLog.path, data.path) : undefined,
data.action !== undefined
? eq(requestAuditLog.action, data.action)
: undefined
);
}
export function queryRequest(data: Q) {
return db
.select({
timestamp: requestAuditLog.timestamp,
orgId: requestAuditLog.orgId,
action: requestAuditLog.action,
reason: requestAuditLog.reason,
actorType: requestAuditLog.actorType,
actor: requestAuditLog.actor,
actorId: requestAuditLog.actorId,
resourceId: requestAuditLog.resourceId,
ip: requestAuditLog.ip,
location: requestAuditLog.location,
userAgent: requestAuditLog.userAgent,
metadata: requestAuditLog.metadata,
headers: requestAuditLog.headers,
query: requestAuditLog.query,
originalRequestURL: requestAuditLog.originalRequestURL,
scheme: requestAuditLog.scheme,
host: requestAuditLog.host,
path: requestAuditLog.path,
method: requestAuditLog.method,
tls: requestAuditLog.tls,
resourceName: resources.name,
resourceNiceId: resources.niceId
})
.from(requestAuditLog)
.leftJoin(
resources,
eq(requestAuditLog.resourceId, resources.resourceId)
) // TODO: Is this efficient?
.where(getWhere(data))
.orderBy(requestAuditLog.timestamp);
}
export function countRequestQuery(data: Q) {
const countQuery = db
.select({ count: count() })
.from(requestAuditLog)
.where(getWhere(data));
return countQuery;
}
registry.registerPath({
method: "get",
path: "/org/{orgId}/logs/request",
description: "Query the request audit log for an organization",
tags: [OpenAPITags.Org],
request: {
query: queryAccessAuditLogsQuery,
params: queryRequestAuditLogsParams
},
responses: {}
});
async function queryUniqueFilterAttributes(
timeStart: number,
timeEnd: number,
orgId: string
) {
const baseConditions = and(
gt(requestAuditLog.timestamp, timeStart),
lt(requestAuditLog.timestamp, timeEnd),
eq(requestAuditLog.orgId, orgId)
);
// Get unique actors
const uniqueActors = await db
.selectDistinct({
actor: requestAuditLog.actor
})
.from(requestAuditLog)
.where(baseConditions);
// Get unique locations
const uniqueLocations = await db
.selectDistinct({
locations: requestAuditLog.location
})
.from(requestAuditLog)
.where(baseConditions);
// Get unique actors
const uniqueHosts = await db
.selectDistinct({
hosts: requestAuditLog.host
})
.from(requestAuditLog)
.where(baseConditions);
// Get unique actors
const uniquePaths = await db
.selectDistinct({
paths: requestAuditLog.path
})
.from(requestAuditLog)
.where(baseConditions);
// Get unique resources with names
const uniqueResources = await db
.selectDistinct({
id: requestAuditLog.resourceId,
name: resources.name
})
.from(requestAuditLog)
.leftJoin(
resources,
eq(requestAuditLog.resourceId, resources.resourceId)
)
.where(baseConditions);
return {
actors: uniqueActors.map(row => row.actor).filter((actor): actor is string => actor !== null),
resources: uniqueResources.filter((row): row is { id: number; name: string | null } => row.id !== null),
locations: uniqueLocations.map(row => row.locations).filter((location): location is string => location !== null),
hosts: uniqueHosts.map(row => row.hosts).filter((host): host is string => host !== null),
paths: uniquePaths.map(row => row.paths).filter((path): path is string => path !== null)
};
}
export async function queryRequestAuditLogs(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = queryAccessAuditLogsQuery.safeParse(req.query);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedQuery.error)
)
);
}
const parsedParams = queryRequestAuditLogsParams.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error)
)
);
}
const data = { ...parsedQuery.data, ...parsedParams.data };
const baseQuery = queryRequest(data);
const log = await baseQuery.limit(data.limit).offset(data.offset);
const totalCountResult = await countRequestQuery(data);
const totalCount = totalCountResult[0].count;
const filterAttributes = await queryUniqueFilterAttributes(
data.timeStart,
data.timeEnd,
data.orgId
);
return response<QueryRequestAuditLogResponse>(res, {
data: {
log: log,
pagination: {
total: totalCount,
limit: data.limit,
offset: data.offset
},
filterAttributes
},
success: true,
error: false,
message: "Action audit logs retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -0,0 +1,93 @@
export type QueryActionAuditLogResponse = {
log: {
orgId: string;
action: string;
actorType: string;
actorId: string;
metadata: string | null;
timestamp: number;
actor: string;
}[];
pagination: {
total: number;
limit: number;
offset: number;
};
filterAttributes: {
actors: string[];
};
};
export type QueryRequestAuditLogResponse = {
log: {
timestamp: number;
action: boolean;
reason: number;
orgId: string | null;
actorType: string | null;
actor: string | null;
actorId: string | null;
resourceId: number | null;
resourceNiceId: string | null;
resourceName: string | null;
ip: string | null;
location: string | null;
userAgent: string | null;
metadata: string | null;
headers: string | null;
query: string | null;
originalRequestURL: string | null;
scheme: string | null;
host: string | null;
path: string | null;
method: string | null;
tls: boolean | null;
}[];
pagination: {
total: number;
limit: number;
offset: number;
};
filterAttributes: {
actors: string[];
resources: {
id: number;
name: string | null;
}[];
locations: string[];
hosts: string[];
paths: string[];
};
};
export type QueryAccessAuditLogResponse = {
log: {
orgId: string;
action: boolean;
actorType: string | null;
actorId: string | null;
resourceId: number | null;
resourceName: string | null;
resourceNiceId: string | null;
ip: string | null;
location: string | null;
userAgent: string | null;
metadata: string | null;
type: string;
timestamp: number;
actor: string | null;
}[];
pagination: {
total: number;
limit: number;
offset: number;
};
filterAttributes: {
actors: string[];
resources: {
id: number;
name: string | null;
}[];
locations: string[];
};
};

View File

@@ -5,7 +5,6 @@ import { fromError } from "zod-validation-error";
import { z } from "zod";
import { db } from "@server/db";
import { User, users } from "@server/db";
import { eq } from "drizzle-orm";
import { response } from "@server/lib/response";
import {
hashPassword,
@@ -15,8 +14,13 @@ import { verifyTotpCode } from "@server/auth/totp";
import logger from "@server/logger";
import { unauthorized } from "@server/auth/unauthorizedResponse";
import { invalidateAllSessions } from "@server/auth/sessions/app";
import { sessions, resourceSessions } from "@server/db";
import { and, eq, ne, inArray } from "drizzle-orm";
import { passwordSchema } from "@server/auth/passwordSchema";
import { UserType } from "@server/types/UserTypes";
import { sendEmail } from "@server/emails";
import ConfirmPasswordReset from "@server/emails/templates/NotifyResetPassword";
import config from "@server/lib/config";
export const changePasswordBody = z
.object({
@@ -32,6 +36,46 @@ export type ChangePasswordResponse = {
codeRequested?: boolean;
};
async function invalidateAllSessionsExceptCurrent(
userId: string,
currentSessionId: string
): Promise<void> {
try {
await db.transaction(async (trx) => {
// Get all user sessions except the current one
const userSessions = await trx
.select()
.from(sessions)
.where(
and(
eq(sessions.userId, userId),
ne(sessions.sessionId, currentSessionId)
)
);
// Delete resource sessions for the sessions we're invalidating
if (userSessions.length > 0) {
await trx.delete(resourceSessions).where(
inArray(
resourceSessions.userSessionId,
userSessions.map((s) => s.sessionId)
)
);
}
// Delete the user sessions (except current)
await trx.delete(sessions).where(
and(
eq(sessions.userId, userId),
ne(sessions.sessionId, currentSessionId)
)
);
});
} catch (e) {
logger.error("Failed to invalidate user sessions except current", e);
}
}
export async function changePassword(
req: Request,
res: Response,
@@ -109,13 +153,24 @@ export async function changePassword(
await db
.update(users)
.set({
passwordHash: hash
passwordHash: hash,
lastPasswordChange: new Date().getTime()
})
.where(eq(users.userId, user.userId));
await invalidateAllSessions(user.userId);
// Invalidate all sessions except the current one
await invalidateAllSessionsExceptCurrent(user.userId, req.session.sessionId);
// TODO: send email to user confirming password change
try {
const email = user.email!;
await sendEmail(ConfirmPasswordReset({ email }), {
from: config.getNoReplyEmail(),
to: email,
subject: "Password Reset Confirmation"
});
} catch (e) {
logger.error("Failed to send password reset confirmation email", e);
}
return response(res, {
data: null,

View File

@@ -3,7 +3,7 @@ import {
generateSessionToken,
serializeSessionCookie
} from "@server/auth/sessions/app";
import { db } from "@server/db";
import { db, resources } from "@server/db";
import { users, securityKeys } from "@server/db";
import HttpCode from "@server/types/HttpCode";
import response from "@server/lib/response";
@@ -18,12 +18,14 @@ import logger from "@server/logger";
import { verifyPassword } from "@server/auth/password";
import { verifySession } from "@server/auth/sessions/verifySession";
import { UserType } from "@server/types/UserTypes";
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
export const loginBodySchema = z
.object({
email: z.string().toLowerCase().email(),
password: z.string(),
code: z.string().optional()
code: z.string().optional(),
resourceGuid: z.string().optional()
})
.strict();
@@ -52,7 +54,7 @@ export async function login(
);
}
const { email, password, code } = parsedBody.data;
const { email, password, code, resourceGuid } = parsedBody.data;
try {
const { session: existingSession } = await verifySession(req);
@@ -66,6 +68,28 @@ export async function login(
});
}
let resourceId: number | null = null;
let orgId: string | null = null;
if (resourceGuid) {
const [resource] = await db
.select()
.from(resources)
.where(eq(resources.resourceGuid, resourceGuid))
.limit(1);
if (!resource) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`Resource with GUID ${resourceGuid} not found`
)
);
}
resourceId = resource.resourceId;
orgId = resource.orgId;
}
const existingUserRes = await db
.select()
.from(users)
@@ -78,6 +102,18 @@ export async function login(
`Username or password incorrect. Email: ${email}. IP: ${req.ip}.`
);
}
if (resourceId && orgId) {
logAccessAudit({
orgId: orgId,
resourceId: resourceId,
action: false,
type: "login",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
}
return next(
createHttpError(
HttpCode.UNAUTHORIZED,
@@ -98,6 +134,18 @@ export async function login(
`Username or password incorrect. Email: ${email}. IP: ${req.ip}.`
);
}
if (resourceId && orgId) {
logAccessAudit({
orgId: orgId,
resourceId: resourceId,
action: false,
type: "login",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
}
return next(
createHttpError(
HttpCode.UNAUTHORIZED,
@@ -158,6 +206,18 @@ export async function login(
`Two-factor code incorrect. Email: ${email}. IP: ${req.ip}.`
);
}
if (resourceId && orgId) {
logAccessAudit({
orgId: orgId,
resourceId: resourceId,
action: false,
type: "login",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
}
return next(
createHttpError(
HttpCode.UNAUTHORIZED,

View File

@@ -19,10 +19,7 @@ import { passwordSchema } from "@server/auth/passwordSchema";
export const resetPasswordBody = z
.object({
email: z
.string()
.toLowerCase()
.email(),
email: z.string().toLowerCase().email(),
token: z.string(), // reset secret code
newPassword: passwordSchema,
code: z.string().optional() // 2fa code
@@ -152,7 +149,7 @@ export async function resetPassword(
await db.transaction(async (trx) => {
await trx
.update(users)
.set({ passwordHash })
.set({ passwordHash, lastPasswordChange: new Date().getTime() })
.where(eq(users.userId, resetRequest[0].userId));
await trx

View File

@@ -98,7 +98,8 @@ export async function setServerAdmin(
passwordHash,
dateCreated: moment().toISOString(),
serverAdmin: true,
emailVerified: true
emailVerified: true,
lastPasswordChange: new Date().getTime()
});
});

View File

@@ -23,10 +23,7 @@ import { passwordSchema } from "@server/auth/passwordSchema";
import { UserType } from "@server/types/UserTypes";
import { createUserAccountOrg } from "@server/lib/createUserAccountOrg";
import { build } from "@server/build";
import resend, {
AudienceIds,
moveEmailToAudience
} from "#dynamic/lib/resend";
import resend, { AudienceIds, moveEmailToAudience } from "#dynamic/lib/resend";
export const signupBodySchema = z.object({
email: z.string().toLowerCase().email(),
@@ -183,7 +180,8 @@ export async function signup(
passwordHash,
dateCreated: moment().toISOString(),
termsAcceptedTimestamp: termsAcceptedTimestamp || null,
termsVersion: "1"
termsVersion: "1",
lastPasswordChange: new Date().getTime()
});
// give the user their default permissions:
@@ -224,7 +222,7 @@ export async function signup(
res.appendHeader("Set-Cookie", cookie);
if (build == "saas") {
moveEmailToAudience(email, AudienceIds.General);
moveEmailToAudience(email, AudienceIds.SignUps);
}
if (config.getRawConfig().flags?.require_email_verification) {

View File

@@ -0,0 +1,191 @@
import { db, orgs, requestAuditLog } from "@server/db";
import logger from "@server/logger";
import { and, eq, lt } from "drizzle-orm";
import cache from "@server/lib/cache";
/**
Reasons:
100 - Allowed by Rule
101 - Allowed No Auth
102 - Valid Access Token
103 - Valid header auth
104 - Valid Pincode
105 - Valid Password
106 - Valid email
107 - Valid SSO
201 - Resource Not Found
202 - Resource Blocked
203 - Dropped by Rule
204 - No Sessions
205 - Temporary Request Token
299 - No More Auth Methods
*/
async function getRetentionDays(orgId: string): Promise<number> {
// check cache first
const cached = cache.get<number>(`org_${orgId}_retentionDays`);
if (cached !== undefined) {
return cached;
}
const [org] = await db
.select({
settingsLogRetentionDaysRequest:
orgs.settingsLogRetentionDaysRequest
})
.from(orgs)
.where(eq(orgs.orgId, orgId))
.limit(1);
if (!org) {
return 0;
}
// store the result in cache
cache.set(
`org_${orgId}_retentionDays`,
org.settingsLogRetentionDaysRequest,
300
);
return org.settingsLogRetentionDaysRequest;
}
export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
const now = Math.floor(Date.now() / 1000);
const cutoffTimestamp = now - retentionDays * 24 * 60 * 60;
try {
const deleteResult = await db
.delete(requestAuditLog)
.where(
and(
lt(requestAuditLog.timestamp, cutoffTimestamp),
eq(requestAuditLog.orgId, orgId)
)
);
logger.info(
`Cleaned up ${deleteResult.changes} request audit logs older than ${retentionDays} days`
);
} catch (error) {
logger.error("Error cleaning up old request audit logs:", error);
}
}
export async function logRequestAudit(
data: {
action: boolean;
reason: number;
resourceId?: number;
orgId?: string;
location?: string;
user?: { username: string; userId: string };
apiKey?: { name: string | null; apiKeyId: string };
metadata?: any;
// userAgent?: string;
},
body: {
path: string;
originalRequestURL: string;
scheme: string;
host: string;
method: string;
tls: boolean;
sessions?: Record<string, string>;
headers?: Record<string, string>;
query?: Record<string, string>;
requestIp?: string;
}
) {
try {
if (data.orgId) {
const retentionDays = await getRetentionDays(data.orgId);
if (retentionDays == 0) {
// do not log
return;
}
}
let actorType: string | undefined;
let actor: string | undefined;
let actorId: string | undefined;
const user = data.user;
if (user) {
actorType = "user";
actor = user.username;
actorId = user.userId;
}
const apiKey = data.apiKey;
if (apiKey) {
actorType = "apiKey";
actor = apiKey.name || apiKey.apiKeyId;
actorId = apiKey.apiKeyId;
}
// if (!actorType || !actor || !actorId) {
// logger.warn("logRequestAudit: Incomplete actor information");
// return;
// }
const timestamp = Math.floor(Date.now() / 1000);
let metadata = null;
if (metadata) {
metadata = JSON.stringify(metadata);
}
const clientIp = body.requestIp
? (() => {
if (
body.requestIp.startsWith("[") &&
body.requestIp.includes("]")
) {
// if brackets are found, extract the IPv6 address from between the brackets
const ipv6Match = body.requestIp.match(/\[(.*?)\]/);
if (ipv6Match) {
return ipv6Match[1];
}
}
// ivp4
// split at last colon
const lastColonIndex = body.requestIp.lastIndexOf(":");
if (lastColonIndex !== -1) {
return body.requestIp.substring(0, lastColonIndex);
}
return body.requestIp;
})()
: undefined;
await db.insert(requestAuditLog).values({
timestamp,
orgId: data.orgId,
actorType,
actor,
actorId,
metadata,
action: data.action,
resourceId: data.resourceId,
reason: data.reason,
location: data.location,
// userAgent: data.userAgent, // TODO: add this
// headers: data.body.headers,
// query: data.body.query,
originalRequestURL: body.originalRequestURL,
scheme: body.scheme,
host: body.host,
path: body.path,
method: body.method,
ip: clientIp,
tls: body.tls
});
} catch (error) {
logger.error(error);
}
}

View File

@@ -1,9 +1,4 @@
import { generateSessionToken } from "@server/auth/sessions/app";
import {
createResourceSession,
serializeResourceSessionCookie,
validateResourceSessionToken
} from "@server/auth/sessions/resource";
import { validateResourceSessionToken } from "@server/auth/sessions/resource";
import { verifyResourceAccessToken } from "@server/auth/verifyResourceAccessToken";
import {
getResourceByDomain,
@@ -16,12 +11,13 @@ import {
} from "@server/db/queries/verifySessionQueries";
import {
LoginPage,
Org,
Resource,
ResourceAccessToken,
ResourceHeaderAuth,
ResourcePassword,
ResourcePincode,
ResourceRule
ResourceRule,
resourceSessions
} from "@server/db";
import config from "@server/lib/config";
import { isIpInCidr } from "@server/lib/ip";
@@ -30,18 +26,18 @@ import logger from "@server/logger";
import HttpCode from "@server/types/HttpCode";
import { NextFunction, Request, Response } from "express";
import createHttpError from "http-errors";
import NodeCache from "node-cache";
import { z } from "zod";
import { fromError } from "zod-validation-error";
import { getCountryCodeForIp } from "@server/lib/geoip";
import { getOrgTierData } from "#dynamic/lib/billing";
import { TierId } from "@server/lib/billing/tiers";
import { verifyPassword } from "@server/auth/password";
// We'll see if this speeds anything up
const cache = new NodeCache({
stdTTL: 5 // seconds
});
import {
checkOrgAccessPolicy,
enforceResourceSessionLength
} from "#dynamic/lib/checkOrgAccessPolicy";
import { logRequestAudit } from "./logRequestAudit";
import cache from "@server/lib/cache";
const verifyResourceSessionSchema = z.object({
sessions: z.record(z.string()).optional(),
@@ -127,6 +123,10 @@ export async function verifyResourceSession(
logger.debug("Client IP:", { clientIp });
const ipCC = clientIp
? await getCountryCodeFromIp(clientIp)
: undefined;
let cleanHost = host;
// if the host ends with :port, strip it
if (cleanHost.match(/:[0-9]{1,5}$/)) {
@@ -141,6 +141,7 @@ export async function verifyResourceSession(
pincode: ResourcePincode | null;
password: ResourcePassword | null;
headerAuth: ResourceHeaderAuth | null;
org: Org;
}
| undefined = cache.get(resourceCacheKey);
@@ -149,17 +150,43 @@ export async function verifyResourceSession(
if (!result) {
logger.debug(`Resource not found ${cleanHost}`);
// TODO: we cant log this for now because we dont know the org
// eventually it would be cool to show this for the server admin
// logRequestAudit(
// {
// action: false,
// reason: 201, //resource not found
// location: ipCC
// },
// parsedBody.data
// );
return notAllowed(res);
}
resourceData = result;
cache.set(resourceCacheKey, resourceData);
cache.set(resourceCacheKey, resourceData, 5);
}
const { resource, pincode, password, headerAuth } = resourceData;
if (!resource) {
logger.debug(`Resource not found ${cleanHost}`);
// TODO: we cant log this for now because we dont know the org
// eventually it would be cool to show this for the server admin
// logRequestAudit(
// {
// action: false,
// reason: 201, //resource not found
// location: ipCC
// },
// parsedBody.data
// );
return notAllowed(res);
}
@@ -167,6 +194,18 @@ export async function verifyResourceSession(
if (blockAccess) {
logger.debug("Resource blocked", host);
logRequestAudit(
{
action: false,
reason: 202, //resource blocked
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return notAllowed(res);
}
@@ -175,14 +214,40 @@ export async function verifyResourceSession(
const action = await checkRules(
resource.resourceId,
clientIp,
path
path,
ipCC
);
if (action == "ACCEPT") {
logger.debug("Resource allowed by rule");
logRequestAudit(
{
action: true,
reason: 100, // allowed by rule
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return allowed(res);
} else if (action == "DROP") {
logger.debug("Resource denied by rule");
// TODO: add rules type
logRequestAudit(
{
action: false,
reason: 203, // dropped by rules
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return notAllowed(res);
} else if (action == "PASS") {
logger.debug(
@@ -203,6 +268,18 @@ export async function verifyResourceSession(
!headerAuth
) {
logger.debug("Resource allowed because no auth");
logRequestAudit(
{
action: true,
reason: 101, // allowed no auth
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return allowed(res);
}
@@ -254,6 +331,21 @@ export async function verifyResourceSession(
}
if (valid && tokenItem) {
logRequestAudit(
{
action: true,
reason: 102, // valid access token
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC,
apiKey: {
name: tokenItem.title,
apiKeyId: tokenItem.accessTokenId
}
},
parsedBody.data
);
return allowed(res);
}
}
@@ -290,6 +382,21 @@ export async function verifyResourceSession(
}
if (valid && tokenItem) {
logRequestAudit(
{
action: true,
reason: 102, // valid access token
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC,
apiKey: {
name: tokenItem.title,
apiKeyId: tokenItem.accessTokenId
}
},
parsedBody.data
);
return allowed(res);
}
}
@@ -301,6 +408,18 @@ export async function verifyResourceSession(
logger.debug(
"Resource allowed because header auth is valid (cached)"
);
logRequestAudit(
{
action: true,
reason: 103, // valid header auth
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return allowed(res);
} else if (
await verifyPassword(
@@ -308,17 +427,41 @@ export async function verifyResourceSession(
headerAuth.headerAuthHash
)
) {
cache.set(clientHeaderAuthKey, clientHeaderAuth);
cache.set(clientHeaderAuthKey, clientHeaderAuth, 5);
logger.debug("Resource allowed because header auth is valid");
logRequestAudit(
{
action: true,
reason: 103, // valid header auth
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return allowed(res);
}
if ( // we dont want to redirect if this is the only auth method and we did not pass here
if (
// we dont want to redirect if this is the only auth method and we did not pass here
!sso &&
!pincode &&
!password &&
!resource.emailWhitelistEnabled
) {
logRequestAudit(
{
action: false,
reason: 299, // no more auth methods
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return notAllowed(res);
}
} else if (headerAuth) {
@@ -329,6 +472,17 @@ export async function verifyResourceSession(
!password &&
!resource.emailWhitelistEnabled
) {
logRequestAudit(
{
action: false,
reason: 299, // no more auth methods
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return notAllowed(res);
}
}
@@ -341,6 +495,18 @@ export async function verifyResourceSession(
}. IP: ${clientIp}.`
);
}
logRequestAudit(
{
action: false,
reason: 204, // no sessions
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return notAllowed(res);
}
@@ -360,7 +526,7 @@ export async function verifyResourceSession(
);
resourceSession = result?.resourceSession;
cache.set(sessionCacheKey, resourceSession);
cache.set(sessionCacheKey, resourceSession, 5);
}
if (resourceSession?.isRequestToken) {
@@ -374,14 +540,52 @@ export async function verifyResourceSession(
}. IP: ${clientIp}.`
);
}
logRequestAudit(
{
action: false,
reason: 205, // temporary request token
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return notAllowed(res);
}
if (resourceSession) {
// only run this check if not SSO sesion; SSO session length is checked later
const accessPolicy = await enforceResourceSessionLength(
resourceSession,
resourceData.org
);
if (!accessPolicy.valid) {
logger.debug(
"Resource session invalid due to org policy:",
accessPolicy.error
);
return notAllowed(res, redirectPath, resource.orgId);
}
if (pincode && resourceSession.pincodeId) {
logger.debug(
"Resource allowed because pincode session is valid"
);
logRequestAudit(
{
action: true,
reason: 104, // valid pincode
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return allowed(res);
}
@@ -389,6 +593,18 @@ export async function verifyResourceSession(
logger.debug(
"Resource allowed because password session is valid"
);
logRequestAudit(
{
action: true,
reason: 105, // valid password
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return allowed(res);
}
@@ -399,6 +615,18 @@ export async function verifyResourceSession(
logger.debug(
"Resource allowed because whitelist session is valid"
);
logRequestAudit(
{
action: true,
reason: 106, // valid email
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return allowed(res);
}
@@ -406,6 +634,22 @@ export async function verifyResourceSession(
logger.debug(
"Resource allowed because access token session is valid"
);
logRequestAudit(
{
action: true,
reason: 102, // valid access token
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC,
apiKey: {
name: resourceSession.accessTokenTitle,
apiKeyId: resourceSession.accessTokenId
}
},
parsedBody.data
);
return allowed(res);
}
@@ -420,10 +664,11 @@ export async function verifyResourceSession(
if (allowedUserData === undefined) {
allowedUserData = await isUserAllowedToAccessResource(
resourceSession.userSessionId,
resource
resource,
resourceData.org
);
cache.set(userAccessCacheKey, allowedUserData);
cache.set(userAccessCacheKey, allowedUserData, 5);
}
if (
@@ -433,6 +678,22 @@ export async function verifyResourceSession(
logger.debug(
"Resource allowed because user session is valid"
);
logRequestAudit(
{
action: true,
reason: 107, // valid sso
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC,
user: {
username: allowedUserData.username,
userId: resourceSession.userId
}
},
parsedBody.data
);
return allowed(res, allowedUserData);
}
}
@@ -451,6 +712,17 @@ export async function verifyResourceSession(
logger.debug(`Redirecting to login at ${redirectPath}`);
logRequestAudit(
{
action: false,
reason: 299, // no more auth methods
resourceId: resource.resourceId,
orgId: resource.orgId,
location: ipCC
},
parsedBody.data
);
return notAllowed(res, redirectPath, resource.orgId);
} catch (e) {
console.error(e);
@@ -562,7 +834,8 @@ function allowed(res: Response, userData?: BasicUserData) {
async function isUserAllowedToAccessResource(
userSessionId: string,
resource: Resource
resource: Resource,
org: Org
): Promise<BasicUserData | null> {
const result = await getUserSessionWithUser(userSessionId);
@@ -589,6 +862,18 @@ async function isUserAllowedToAccessResource(
return null;
}
const accessPolicy = await checkOrgAccessPolicy({
org,
user,
session
});
if (!accessPolicy.allowed || accessPolicy.error) {
logger.debug(`User not allowed by org access policy because`, {
accessPolicy
});
return null;
}
const roleResourceAccess = await getRoleResourceAccess(
resource.resourceId,
userOrgRole.roleId
@@ -621,7 +906,8 @@ async function isUserAllowedToAccessResource(
async function checkRules(
resourceId: number,
clientIp: string | undefined,
path: string | undefined
path: string | undefined,
ipCC?: string
): Promise<"ACCEPT" | "DROP" | "PASS" | undefined> {
const ruleCacheKey = `rules:${resourceId}`;
@@ -629,7 +915,7 @@ async function checkRules(
if (!rules) {
rules = await getResourceRules(resourceId);
cache.set(ruleCacheKey, rules);
cache.set(ruleCacheKey, rules, 5);
}
if (rules.length === 0) {
@@ -661,7 +947,7 @@ async function checkRules(
return rule.action as any;
} else if (
clientIp &&
rule.match == "GEOIP" &&
rule.match == "COUNTRY" &&
(await isIpInGeoIP(clientIp, rule.value))
) {
return rule.action as any;
@@ -790,11 +1076,20 @@ export function isPathAllowed(pattern: string, path: string): boolean {
return result;
}
async function isIpInGeoIP(ip: string, countryCode: string): Promise<boolean> {
if (countryCode == "ALL") {
async function isIpInGeoIP(
ipCountryCode: string,
checkCountryCode: string
): Promise<boolean> {
if (checkCountryCode == "ALL") {
return true;
}
logger.debug(`IP ${ipCountryCode} is in country: ${checkCountryCode}`);
return ipCountryCode?.toUpperCase() === checkCountryCode.toUpperCase();
}
async function getCountryCodeFromIp(ip: string): Promise<string | undefined> {
const geoIpCacheKey = `geoip:${ip}`;
let cachedCountryCode: string | undefined = cache.get(geoIpCacheKey);
@@ -805,9 +1100,7 @@ async function isIpInGeoIP(ip: string, countryCode: string): Promise<boolean> {
cache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes
}
logger.debug(`IP ${ip} is in country: ${cachedCountryCode}`);
return cachedCountryCode?.toUpperCase() === countryCode.toUpperCase();
return cachedCountryCode;
}
function extractBasicAuth(

View File

@@ -1,4 +1,4 @@
import { db } from "@server/db";
import { db, olms } from "@server/db";
import {
clients,
orgs,
@@ -16,6 +16,67 @@ import createHttpError from "http-errors";
import { z } from "zod";
import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi";
import NodeCache from "node-cache";
import semver from "semver";
const olmVersionCache = new NodeCache({ stdTTL: 3600 });
async function getLatestOlmVersion(): Promise<string | null> {
try {
const cachedVersion = olmVersionCache.get<string>("latestOlmVersion");
if (cachedVersion) {
return cachedVersion;
}
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 1500);
const response = await fetch(
"https://api.github.com/repos/fosrl/olm/tags",
{
signal: controller.signal
}
);
clearTimeout(timeoutId);
if (!response.ok) {
logger.warn(
`Failed to fetch latest Olm version from GitHub: ${response.status} ${response.statusText}`
);
return null;
}
const tags = await response.json();
if (!Array.isArray(tags) || tags.length === 0) {
logger.warn("No tags found for Olm repository");
return null;
}
const latestVersion = tags[0].name;
olmVersionCache.set("latestOlmVersion", latestVersion);
return latestVersion;
} catch (error: any) {
if (error.name === "AbortError") {
logger.warn(
"Request to fetch latest Olm version timed out (1.5s)"
);
} else if (error.cause?.code === "UND_ERR_CONNECT_TIMEOUT") {
logger.warn(
"Connection timeout while fetching latest Olm version"
);
} else {
logger.warn(
"Error fetching latest Olm version:",
error.message || error
);
}
return null;
}
}
const listClientsParamsSchema = z
.object({
@@ -50,10 +111,12 @@ function queryClients(orgId: string, accessibleClientIds: number[]) {
megabytesOut: clients.megabytesOut,
orgName: orgs.name,
type: clients.type,
online: clients.online
online: clients.online,
olmVersion: olms.version
})
.from(clients)
.leftJoin(orgs, eq(clients.orgId, orgs.orgId))
.leftJoin(olms, eq(clients.clientId, olms.clientId))
.where(
and(
inArray(clients.clientId, accessibleClientIds),
@@ -77,12 +140,20 @@ async function getSiteAssociations(clientIds: number[]) {
.where(inArray(clientSites.clientId, clientIds));
}
type OlmWithUpdateAvailable = Awaited<ReturnType<typeof queryClients>>[0] & {
olmUpdateAvailable?: boolean;
};
export type ListClientsResponse = {
clients: Array<Awaited<ReturnType<typeof queryClients>>[0] & { sites: Array<{
siteId: number;
siteName: string | null;
siteNiceId: string | null;
}> }>;
clients: Array<Awaited<ReturnType<typeof queryClients>>[0] & {
sites: Array<{
siteId: number;
siteName: string | null;
siteNiceId: string | null;
}>
olmUpdateAvailable?: boolean;
}>;
pagination: { total: number; limit: number; offset: number };
};
@@ -206,6 +277,43 @@ export async function listClients(
sites: sitesByClient[client.clientId] || []
}));
const latestOlVersionPromise = getLatestOlmVersion();
const olmsWithUpdates: OlmWithUpdateAvailable[] = clientsWithSites.map(
(client) => {
const OlmWithUpdate: OlmWithUpdateAvailable = { ...client };
// Initially set to false, will be updated if version check succeeds
OlmWithUpdate.olmUpdateAvailable = false;
return OlmWithUpdate;
}
);
// Try to get the latest version, but don't block if it fails
try {
const latestOlVersion = await latestOlVersionPromise;
if (latestOlVersion) {
olmsWithUpdates.forEach((client) => {
try {
client.olmUpdateAvailable = semver.lt(
client.olmVersion ? client.olmVersion : "",
latestOlVersion
);
} catch (error) {
client.olmUpdateAvailable = false;
}
});
}
} catch (error) {
// Log the error but don't let it block the response
logger.warn(
"Failed to check for OLM updates, continuing without update info:",
error
);
}
return response<ListClientsResponse>(res, {
data: {
clients: clientsWithSites,

View File

@@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db, Domain, domains, OrgDomains, orgDomains } from "@server/db";
import { db, Domain, domains, OrgDomains, orgDomains, dnsRecords } from "@server/db";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
@@ -24,16 +24,21 @@ const paramsSchema = z
const bodySchema = z
.object({
type: z.enum(["ns", "cname", "wildcard"]),
baseDomain: subdomainSchema
baseDomain: subdomainSchema,
certResolver: z.string().optional().nullable(),
preferWildcardCert: z.boolean().optional().nullable() // optional, only for wildcard
})
.strict();
export type CreateDomainResponse = {
domainId: string;
nsRecords?: string[];
cnameRecords?: { baseDomain: string; value: string }[];
aRecords?: { baseDomain: string; value: string }[];
txtRecords?: { baseDomain: string; value: string }[];
certResolver?: string | null;
preferWildcardCert?: boolean | null;
};
// Helper to check if a domain is a subdomain or equal to another domain
@@ -71,7 +76,7 @@ export async function createOrgDomain(
}
const { orgId } = parsedParams.data;
const { type, baseDomain } = parsedBody.data;
const { type, baseDomain, certResolver, preferWildcardCert } = parsedBody.data;
if (build == "oss") {
if (type !== "wildcard") {
@@ -254,7 +259,9 @@ export async function createOrgDomain(
domainId,
baseDomain,
type,
verified: type === "wildcard" ? true : false
verified: type === "wildcard" ? true : false,
certResolver: certResolver || null,
preferWildcardCert: preferWildcardCert || false
})
.returning();
@@ -269,9 +276,23 @@ export async function createOrgDomain(
})
.returning();
// Prepare DNS records to insert
const recordsToInsert = [];
// TODO: This needs to be cross region and not hardcoded
if (type === "ns") {
nsRecords = config.getRawConfig().dns.nameservers as string[];
// Save NS records to database
for (const nsValue of nsRecords) {
recordsToInsert.push({
domainId,
recordType: "NS",
baseDomain: baseDomain,
value: nsValue,
verified: false
});
}
} else if (type === "cname") {
cnameRecords = [
{
@@ -283,6 +304,17 @@ export async function createOrgDomain(
baseDomain: `_acme-challenge.${baseDomain}`
}
];
// Save CNAME records to database
for (const cnameRecord of cnameRecords) {
recordsToInsert.push({
domainId,
recordType: "CNAME",
baseDomain: cnameRecord.baseDomain,
value: cnameRecord.value,
verified: false
});
}
} else if (type === "wildcard") {
aRecords = [
{
@@ -294,6 +326,22 @@ export async function createOrgDomain(
baseDomain: `${baseDomain}`
}
];
// Save A records to database
for (const aRecord of aRecords) {
recordsToInsert.push({
domainId,
recordType: "A",
baseDomain: aRecord.baseDomain,
value: aRecord.value,
verified: true
});
}
}
// Insert all DNS records in batch
if (recordsToInsert.length > 0) {
await trx.insert(dnsRecords).values(recordsToInsert);
}
numOrgDomains = await trx
@@ -325,7 +373,9 @@ export async function createOrgDomain(
cnameRecords,
txtRecords,
nsRecords,
aRecords
aRecords,
certResolver: returned.certResolver,
preferWildcardCert: returned.preferWildcardCert
},
success: true,
error: false,

View File

@@ -0,0 +1,97 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db, dnsRecords } from "@server/db";
import { eq } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi";
import { getServerIp } from "@server/lib/serverIpService"; // your in-memory IP module
const getDNSRecordsSchema = z
.object({
domainId: z.string(),
orgId: z.string()
})
.strict();
async function query(domainId: string) {
const records = await db
.select()
.from(dnsRecords)
.where(eq(dnsRecords.domainId, domainId));
return records;
}
export type GetDNSRecordsResponse = Awaited<ReturnType<typeof query>>;
registry.registerPath({
method: "get",
path: "/org/{orgId}/domain/{domainId}/dns-records",
description: "Get all DNS records for a domain by domainId.",
tags: [OpenAPITags.Domain],
request: {
params: z.object({
domainId: z.string(),
orgId: z.string()
})
},
responses: {}
});
export async function getDNSRecords(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedParams = getDNSRecordsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { domainId } = parsedParams.data;
const records = await query(domainId);
if (!records || records.length === 0) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
"No DNS records found for this domain"
)
);
}
const serverIp = getServerIp();
// Override value for type A or wildcard records
const updatedRecords = records.map(record => {
if ((record.recordType === "A" || record.baseDomain === "*") && serverIp) {
return { ...record, value: serverIp };
}
return record;
});
return response<GetDNSRecordsResponse>(res, {
data: updatedRecords,
success: true,
error: false,
message: "DNS records retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -0,0 +1,86 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db, domains } from "@server/db";
import { eq, and } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi";
import { domain } from "zod/v4/core/regexes";
const getDomainSchema = z
.object({
domainId: z
.string()
.optional(),
orgId: z.string().optional()
})
.strict();
async function query(domainId?: string, orgId?: string) {
if (domainId) {
const [res] = await db
.select()
.from(domains)
.where(eq(domains.domainId, domainId))
.limit(1);
return res;
}
}
export type GetDomainResponse = NonNullable<Awaited<ReturnType<typeof query>>>;
registry.registerPath({
method: "get",
path: "/org/{orgId}/domain/{domainId}",
description: "Get a domain by domainId.",
tags: [OpenAPITags.Domain],
request: {
params: z.object({
domainId: z.string(),
orgId: z.string()
})
},
responses: {}
});
export async function getDomain(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedParams = getDomainSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { orgId, domainId } = parsedParams.data;
const domain = await query(domainId, orgId);
if (!domain) {
return next(createHttpError(HttpCode.NOT_FOUND, "Domain not found"));
}
return response<GetDomainResponse>(res, {
data: domain,
success: true,
error: false,
message: "Domain retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -1,4 +1,7 @@
export * from "./listDomains";
export * from "./createOrgDomain";
export * from "./deleteOrgDomain";
export * from "./restartOrgDomain";
export * from "./restartOrgDomain";
export * from "./getDomain";
export * from "./getDNSRecords";
export * from "./updateDomain";

View File

@@ -42,7 +42,9 @@ async function queryDomains(orgId: string, limit: number, offset: number) {
type: domains.type,
failed: domains.failed,
tries: domains.tries,
configManaged: domains.configManaged
configManaged: domains.configManaged,
certResolver: domains.certResolver,
preferWildcardCert: domains.preferWildcardCert
})
.from(orgDomains)
.where(eq(orgDomains.orgId, orgId))

View File

@@ -0,0 +1,161 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db, domains, orgDomains } from "@server/db";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { eq, and } from "drizzle-orm";
import { OpenAPITags, registry } from "@server/openApi";
const paramsSchema = z
.object({
orgId: z.string(),
domainId: z.string()
})
.strict();
const bodySchema = z
.object({
certResolver: z.string().optional().nullable(),
preferWildcardCert: z.boolean().optional().nullable()
})
.strict();
export type UpdateDomainResponse = {
domainId: string;
certResolver: string | null;
preferWildcardCert: boolean | null;
};
registry.registerPath({
method: "patch",
path: "/org/{orgId}/domain/{domainId}",
description: "Update a domain by domainId.",
tags: [OpenAPITags.Domain],
request: {
params: z.object({
domainId: z.string(),
orgId: z.string()
})
},
responses: {}
});
export async function updateOrgDomain(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const parsedBody = bodySchema.safeParse(req.body);
if (!parsedBody.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedBody.error).toString()
)
);
}
const { orgId, domainId } = parsedParams.data;
const { certResolver, preferWildcardCert } = parsedBody.data;
const [orgDomain] = await db
.select()
.from(orgDomains)
.where(
and(
eq(orgDomains.orgId, orgId),
eq(orgDomains.domainId, domainId)
)
);
if (!orgDomain) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
"Domain not found or does not belong to this organization"
)
);
}
const [existingDomain] = await db
.select()
.from(domains)
.where(eq(domains.domainId, domainId));
if (!existingDomain) {
return next(
createHttpError(HttpCode.NOT_FOUND, "Domain not found")
);
}
if (existingDomain.type !== "wildcard") {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Domain settings can only be updated for wildcard domains"
)
);
}
const updateData: Partial<{
certResolver: string | null;
preferWildcardCert: boolean;
}> = {};
if (certResolver !== undefined) {
updateData.certResolver = certResolver;
}
if (preferWildcardCert !== undefined && preferWildcardCert !== null) {
updateData.preferWildcardCert = preferWildcardCert;
}
const [updatedDomain] = await db
.update(domains)
.set(updateData)
.where(eq(domains.domainId, domainId))
.returning();
if (!updatedDomain) {
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Failed to update domain"
)
);
}
return response<UpdateDomainResponse>(res, {
data: {
domainId: updatedDomain.domainId,
certResolver: updatedDomain.certResolver,
preferWildcardCert: updatedDomain.preferWildcardCert
},
success: true,
error: false,
message: "Domain updated successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -15,6 +15,7 @@ import * as accessToken from "./accessToken";
import * as idp from "./idp";
import * as blueprints from "./blueprints";
import * as apiKeys from "./apiKeys";
import * as logs from "./auditLogs";
import HttpCode from "@server/types/HttpCode";
import {
verifyAccessTokenAccess,
@@ -45,6 +46,8 @@ import rateLimit, { ipKeyGenerator } from "express-rate-limit";
import createHttpError from "http-errors";
import { build } from "@server/build";
import { createStore } from "#dynamic/lib/rateLimitStore";
import { logActionAudit } from "#dynamic/middlewares";
import { log } from "console";
// Root routes
export const unauthenticated = Router();
@@ -76,7 +79,8 @@ authenticated.post(
"/org/:orgId",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.updateOrg),
org.updateOrg
logActionAudit(ActionsEnum.updateOrg),
org.updateOrg,
);
if (build !== "saas") {
@@ -85,7 +89,8 @@ if (build !== "saas") {
verifyOrgAccess,
verifyUserIsOrgOwner,
verifyUserHasAction(ActionsEnum.deleteOrg),
org.deleteOrg
logActionAudit(ActionsEnum.deleteOrg),
org.deleteOrg,
);
}
@@ -93,6 +98,7 @@ authenticated.put(
"/org/:orgId/site",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createSite),
logActionAudit(ActionsEnum.createSite),
site.createSite
);
authenticated.get(
@@ -150,7 +156,8 @@ authenticated.put(
verifyClientsEnabled,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createClient),
client.createClient
logActionAudit(ActionsEnum.createClient),
client.createClient,
);
authenticated.delete(
@@ -158,7 +165,8 @@ authenticated.delete(
verifyClientsEnabled,
verifyClientAccess,
verifyUserHasAction(ActionsEnum.deleteClient),
client.deleteClient
logActionAudit(ActionsEnum.deleteClient),
client.deleteClient,
);
authenticated.post(
@@ -166,7 +174,8 @@ authenticated.post(
verifyClientsEnabled,
verifyClientAccess, // this will check if the user has access to the client
verifyUserHasAction(ActionsEnum.updateClient), // this will check if the user has permission to update the client
client.updateClient
logActionAudit(ActionsEnum.updateClient),
client.updateClient,
);
// authenticated.get(
@@ -179,15 +188,18 @@ authenticated.post(
"/site/:siteId",
verifySiteAccess,
verifyUserHasAction(ActionsEnum.updateSite),
site.updateSite
logActionAudit(ActionsEnum.updateSite),
site.updateSite,
);
authenticated.delete(
"/site/:siteId",
verifySiteAccess,
verifyUserHasAction(ActionsEnum.deleteSite),
site.deleteSite
logActionAudit(ActionsEnum.deleteSite),
site.deleteSite,
);
// TODO: BREAK OUT THESE ACTIONS SO THEY ARE NOT ALL "getSite"
authenticated.get(
"/site/:siteId/docker/status",
verifySiteAccess,
@@ -204,13 +216,13 @@ authenticated.post(
"/site/:siteId/docker/check",
verifySiteAccess,
verifyUserHasAction(ActionsEnum.getSite),
site.checkDockerSocket
site.checkDockerSocket,
);
authenticated.post(
"/site/:siteId/docker/trigger",
verifySiteAccess,
verifyUserHasAction(ActionsEnum.getSite),
site.triggerFetchContainers
site.triggerFetchContainers,
);
authenticated.get(
"/site/:siteId/docker/containers",
@@ -225,7 +237,8 @@ authenticated.put(
verifyOrgAccess,
verifySiteAccess,
verifyUserHasAction(ActionsEnum.createSiteResource),
siteResource.createSiteResource
logActionAudit(ActionsEnum.createSiteResource),
siteResource.createSiteResource,
);
authenticated.get(
@@ -258,7 +271,8 @@ authenticated.post(
verifySiteAccess,
verifySiteResourceAccess,
verifyUserHasAction(ActionsEnum.updateSiteResource),
siteResource.updateSiteResource
logActionAudit(ActionsEnum.updateSiteResource),
siteResource.updateSiteResource,
);
authenticated.delete(
@@ -267,14 +281,16 @@ authenticated.delete(
verifySiteAccess,
verifySiteResourceAccess,
verifyUserHasAction(ActionsEnum.deleteSiteResource),
siteResource.deleteSiteResource
logActionAudit(ActionsEnum.deleteSiteResource),
siteResource.deleteSiteResource,
);
authenticated.put(
"/org/:orgId/resource",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createResource),
resource.createResource
logActionAudit(ActionsEnum.createResource),
resource.createResource,
);
authenticated.get(
@@ -303,6 +319,27 @@ authenticated.get(
domain.listDomains
);
authenticated.get(
"/org/:orgId/domain/:domainId",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.getDomain),
domain.getDomain
);
authenticated.patch(
"/org/:orgId/domain/:domainId",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.updateOrgDomain),
domain.updateOrgDomain
);
authenticated.get(
"/org/:orgId/domain/:domainId/dns-records",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.getDNSRecords),
domain.getDNSRecords
);
authenticated.get(
"/org/:orgId/invitations",
verifyOrgAccess,
@@ -314,15 +351,18 @@ authenticated.delete(
"/org/:orgId/invitations/:inviteId",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.removeInvitation),
user.removeInvitation
logActionAudit(ActionsEnum.removeInvitation),
user.removeInvitation,
);
authenticated.post(
"/org/:orgId/create-invite",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.inviteUser),
user.inviteUser
logActionAudit(ActionsEnum.inviteUser),
user.inviteUser,
); // maybe make this /invite/create instead
unauthenticated.post("/invite/accept", user.acceptInvite); // this is supposed to be unauthenticated
authenticated.get(
@@ -355,20 +395,23 @@ authenticated.post(
"/resource/:resourceId",
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.updateResource),
resource.updateResource
logActionAudit(ActionsEnum.updateResource),
resource.updateResource,
);
authenticated.delete(
"/resource/:resourceId",
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.deleteResource),
resource.deleteResource
logActionAudit(ActionsEnum.deleteResource),
resource.deleteResource,
);
authenticated.put(
"/resource/:resourceId/target",
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.createTarget),
target.createTarget
logActionAudit(ActionsEnum.createTarget),
target.createTarget,
);
authenticated.get(
"/resource/:resourceId/targets",
@@ -381,7 +424,8 @@ authenticated.put(
"/resource/:resourceId/rule",
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.createResourceRule),
resource.createResourceRule
logActionAudit(ActionsEnum.createResourceRule),
resource.createResourceRule,
);
authenticated.get(
"/resource/:resourceId/rules",
@@ -393,13 +437,15 @@ authenticated.post(
"/resource/:resourceId/rule/:ruleId",
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.updateResourceRule),
resource.updateResourceRule
logActionAudit(ActionsEnum.updateResourceRule),
resource.updateResourceRule,
);
authenticated.delete(
"/resource/:resourceId/rule/:ruleId",
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.deleteResourceRule),
resource.deleteResourceRule
logActionAudit(ActionsEnum.deleteResourceRule),
resource.deleteResourceRule,
);
authenticated.get(
@@ -412,20 +458,23 @@ authenticated.post(
"/target/:targetId",
verifyTargetAccess,
verifyUserHasAction(ActionsEnum.updateTarget),
target.updateTarget
logActionAudit(ActionsEnum.updateTarget),
target.updateTarget,
);
authenticated.delete(
"/target/:targetId",
verifyTargetAccess,
verifyUserHasAction(ActionsEnum.deleteTarget),
target.deleteTarget
logActionAudit(ActionsEnum.deleteTarget),
target.deleteTarget,
);
authenticated.put(
"/org/:orgId/role",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createRole),
role.createRole
logActionAudit(ActionsEnum.createRole),
role.createRole,
);
authenticated.get(
"/org/:orgId/roles",
@@ -450,14 +499,16 @@ authenticated.delete(
"/role/:roleId",
verifyRoleAccess,
verifyUserHasAction(ActionsEnum.deleteRole),
role.deleteRole
logActionAudit(ActionsEnum.deleteRole),
role.deleteRole,
);
authenticated.post(
"/role/:roleId/add/:userId",
verifyRoleAccess,
verifyUserAccess,
verifyUserHasAction(ActionsEnum.addUserRole),
user.addUserRole
logActionAudit(ActionsEnum.addUserRole),
user.addUserRole,
);
authenticated.post(
@@ -465,7 +516,8 @@ authenticated.post(
verifyResourceAccess,
verifyRoleAccess,
verifyUserHasAction(ActionsEnum.setResourceRoles),
resource.setResourceRoles
logActionAudit(ActionsEnum.setResourceRoles),
resource.setResourceRoles,
);
authenticated.post(
@@ -473,35 +525,40 @@ authenticated.post(
verifyResourceAccess,
verifySetResourceUsers,
verifyUserHasAction(ActionsEnum.setResourceUsers),
resource.setResourceUsers
logActionAudit(ActionsEnum.setResourceUsers),
resource.setResourceUsers,
);
authenticated.post(
`/resource/:resourceId/password`,
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.setResourcePassword),
resource.setResourcePassword
logActionAudit(ActionsEnum.setResourcePassword),
resource.setResourcePassword,
);
authenticated.post(
`/resource/:resourceId/pincode`,
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.setResourcePincode),
resource.setResourcePincode
logActionAudit(ActionsEnum.setResourcePincode),
resource.setResourcePincode,
);
authenticated.post(
`/resource/:resourceId/header-auth`,
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.setResourceHeaderAuth),
resource.setResourceHeaderAuth
logActionAudit(ActionsEnum.setResourceHeaderAuth),
resource.setResourceHeaderAuth,
);
authenticated.post(
`/resource/:resourceId/whitelist`,
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.setResourceWhitelist),
resource.setResourceWhitelist
logActionAudit(ActionsEnum.setResourceWhitelist),
resource.setResourceWhitelist,
);
authenticated.get(
@@ -515,14 +572,16 @@ authenticated.post(
`/resource/:resourceId/access-token`,
verifyResourceAccess,
verifyUserHasAction(ActionsEnum.generateAccessToken),
accessToken.generateAccessToken
logActionAudit(ActionsEnum.generateAccessToken),
accessToken.generateAccessToken,
);
authenticated.delete(
`/access-token/:accessTokenId`,
verifyAccessTokenAccess,
verifyUserHasAction(ActionsEnum.deleteAcessToken),
accessToken.deleteAccessToken
logActionAudit(ActionsEnum.deleteAcessToken),
accessToken.deleteAccessToken,
);
authenticated.get(
@@ -595,7 +654,8 @@ authenticated.put(
"/org/:orgId/user",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createOrgUser),
user.createOrgUser
logActionAudit(ActionsEnum.createOrgUser),
user.createOrgUser,
);
authenticated.post(
@@ -603,10 +663,12 @@ authenticated.post(
verifyOrgAccess,
verifyUserAccess,
verifyUserHasAction(ActionsEnum.updateOrgUser),
user.updateOrgUser
logActionAudit(ActionsEnum.updateOrgUser),
user.updateOrgUser,
);
authenticated.get("/org/:orgId/user/:userId", verifyOrgAccess, user.getOrgUser);
authenticated.get("/org/:orgId/user/:userId/check", org.checkOrgUserAccess);
authenticated.post(
"/user/:userId/2fa",
@@ -625,7 +687,8 @@ authenticated.delete(
verifyOrgAccess,
verifyUserAccess,
verifyUserHasAction(ActionsEnum.removeUser),
user.removeUserOrg
logActionAudit(ActionsEnum.removeUser),
user.removeUserOrg,
);
// authenticated.put(
@@ -755,7 +818,8 @@ authenticated.post(
verifyOrgAccess,
verifyApiKeyAccess,
verifyUserHasAction(ActionsEnum.setApiKeyActions),
apiKeys.setApiKeyActions
logActionAudit(ActionsEnum.setApiKeyActions),
apiKeys.setApiKeyActions,
);
authenticated.get(
@@ -770,7 +834,8 @@ authenticated.put(
`/org/:orgId/api-key`,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createApiKey),
apiKeys.createOrgApiKey
logActionAudit(ActionsEnum.createApiKey),
apiKeys.createOrgApiKey,
);
authenticated.delete(
@@ -778,7 +843,8 @@ authenticated.delete(
verifyOrgAccess,
verifyApiKeyAccess,
verifyUserHasAction(ActionsEnum.deleteApiKey),
apiKeys.deleteOrgApiKey
logActionAudit(ActionsEnum.deleteApiKey),
apiKeys.deleteOrgApiKey,
);
authenticated.get(
@@ -793,7 +859,8 @@ authenticated.put(
`/org/:orgId/domain`,
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.createOrgDomain),
domain.createOrgDomain
logActionAudit(ActionsEnum.createOrgDomain),
domain.createOrgDomain,
);
authenticated.post(
@@ -801,7 +868,8 @@ authenticated.post(
verifyOrgAccess,
verifyDomainAccess,
verifyUserHasAction(ActionsEnum.restartOrgDomain),
domain.restartOrgDomain
logActionAudit(ActionsEnum.restartOrgDomain),
domain.restartOrgDomain,
);
authenticated.delete(
@@ -809,7 +877,23 @@ authenticated.delete(
verifyOrgAccess,
verifyDomainAccess,
verifyUserHasAction(ActionsEnum.deleteOrgDomain),
domain.deleteAccountDomain
logActionAudit(ActionsEnum.deleteOrgDomain),
domain.deleteAccountDomain,
);
authenticated.get(
"/org/:orgId/logs/request",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.viewLogs),
logs.queryRequestAuditLogs
);
authenticated.get(
"/org/:orgId/logs/request/export",
verifyOrgAccess,
verifyUserHasAction(ActionsEnum.exportLogs),
logActionAudit(ActionsEnum.exportLogs),
logs.exportRequestAuditLogs
);
authenticated.get(
@@ -994,11 +1078,11 @@ authRouter.post(
auth.requestEmailVerificationCode
);
// authRouter.post(
// "/change-password",
// verifySessionUserMiddleware,
// auth.changePassword
// );
authRouter.post(
"/change-password",
verifySessionUserMiddleware,
auth.changePassword
);
authRouter.post(
"/reset-password/request",
@@ -1153,4 +1237,4 @@ authRouter.delete(
store: createStore()
}),
auth.deleteSecurityKey
);
);

View File

@@ -30,7 +30,7 @@ import {
import HttpCode from "@server/types/HttpCode";
import { Router } from "express";
import { ActionsEnum } from "@server/auth/actions";
import { build } from "@server/build";
import { logActionAudit } from "#dynamic/middlewares";
export const unauthenticated = Router();
@@ -52,7 +52,8 @@ authenticated.put(
"/org",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.createOrg),
org.createOrg
logActionAudit(ActionsEnum.createOrg),
org.createOrg,
);
authenticated.get(
@@ -73,21 +74,24 @@ authenticated.post(
"/org/:orgId",
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.updateOrg),
org.updateOrg
logActionAudit(ActionsEnum.updateOrg),
org.updateOrg,
);
authenticated.delete(
"/org/:orgId",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.deleteOrg),
org.deleteOrg
logActionAudit(ActionsEnum.deleteOrg),
org.deleteOrg,
);
authenticated.put(
"/org/:orgId/site",
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.createSite),
site.createSite
logActionAudit(ActionsEnum.createSite),
site.createSite,
);
authenticated.get(
@@ -122,14 +126,16 @@ authenticated.post(
"/site/:siteId",
verifyApiKeySiteAccess,
verifyApiKeyHasAction(ActionsEnum.updateSite),
site.updateSite
logActionAudit(ActionsEnum.updateSite),
site.updateSite,
);
authenticated.delete(
"/site/:siteId",
verifyApiKeySiteAccess,
verifyApiKeyHasAction(ActionsEnum.deleteSite),
site.deleteSite
logActionAudit(ActionsEnum.deleteSite),
site.deleteSite,
);
authenticated.get(
@@ -143,7 +149,8 @@ authenticated.put(
verifyApiKeyOrgAccess,
verifyApiKeySiteAccess,
verifyApiKeyHasAction(ActionsEnum.createSiteResource),
siteResource.createSiteResource
logActionAudit(ActionsEnum.createSiteResource),
siteResource.createSiteResource,
);
authenticated.get(
@@ -176,7 +183,8 @@ authenticated.post(
verifyApiKeySiteAccess,
verifyApiKeySiteResourceAccess,
verifyApiKeyHasAction(ActionsEnum.updateSiteResource),
siteResource.updateSiteResource
logActionAudit(ActionsEnum.updateSiteResource),
siteResource.updateSiteResource,
);
authenticated.delete(
@@ -185,21 +193,24 @@ authenticated.delete(
verifyApiKeySiteAccess,
verifyApiKeySiteResourceAccess,
verifyApiKeyHasAction(ActionsEnum.deleteSiteResource),
siteResource.deleteSiteResource
logActionAudit(ActionsEnum.deleteSiteResource),
siteResource.deleteSiteResource,
);
authenticated.put(
"/org/:orgId/resource",
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.createResource),
resource.createResource
logActionAudit(ActionsEnum.createResource),
resource.createResource,
);
authenticated.put(
"/org/:orgId/site/:siteId/resource",
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.createResource),
resource.createResource
logActionAudit(ActionsEnum.createResource),
resource.createResource,
);
authenticated.get(
@@ -234,7 +245,8 @@ authenticated.post(
"/org/:orgId/create-invite",
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.inviteUser),
user.inviteUser
logActionAudit(ActionsEnum.inviteUser),
user.inviteUser,
);
authenticated.get(
@@ -262,21 +274,24 @@ authenticated.post(
"/resource/:resourceId",
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.updateResource),
resource.updateResource
logActionAudit(ActionsEnum.updateResource),
resource.updateResource,
);
authenticated.delete(
"/resource/:resourceId",
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.deleteResource),
resource.deleteResource
logActionAudit(ActionsEnum.deleteResource),
resource.deleteResource,
);
authenticated.put(
"/resource/:resourceId/target",
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.createTarget),
target.createTarget
logActionAudit(ActionsEnum.createTarget),
target.createTarget,
);
authenticated.get(
@@ -290,7 +305,8 @@ authenticated.put(
"/resource/:resourceId/rule",
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.createResourceRule),
resource.createResourceRule
logActionAudit(ActionsEnum.createResourceRule),
resource.createResourceRule,
);
authenticated.get(
@@ -304,14 +320,16 @@ authenticated.post(
"/resource/:resourceId/rule/:ruleId",
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.updateResourceRule),
resource.updateResourceRule
logActionAudit(ActionsEnum.updateResourceRule),
resource.updateResourceRule,
);
authenticated.delete(
"/resource/:resourceId/rule/:ruleId",
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.deleteResourceRule),
resource.deleteResourceRule
logActionAudit(ActionsEnum.deleteResourceRule),
resource.deleteResourceRule,
);
authenticated.get(
@@ -325,21 +343,24 @@ authenticated.post(
"/target/:targetId",
verifyApiKeyTargetAccess,
verifyApiKeyHasAction(ActionsEnum.updateTarget),
target.updateTarget
logActionAudit(ActionsEnum.updateTarget),
target.updateTarget,
);
authenticated.delete(
"/target/:targetId",
verifyApiKeyTargetAccess,
verifyApiKeyHasAction(ActionsEnum.deleteTarget),
target.deleteTarget
logActionAudit(ActionsEnum.deleteTarget),
target.deleteTarget,
);
authenticated.put(
"/org/:orgId/role",
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.createRole),
role.createRole
logActionAudit(ActionsEnum.createRole),
role.createRole,
);
authenticated.get(
@@ -353,7 +374,8 @@ authenticated.delete(
"/role/:roleId",
verifyApiKeyRoleAccess,
verifyApiKeyHasAction(ActionsEnum.deleteRole),
role.deleteRole
logActionAudit(ActionsEnum.deleteRole),
role.deleteRole,
);
authenticated.get(
@@ -368,7 +390,8 @@ authenticated.post(
verifyApiKeyRoleAccess,
verifyApiKeyUserAccess,
verifyApiKeyHasAction(ActionsEnum.addUserRole),
user.addUserRole
logActionAudit(ActionsEnum.addUserRole),
user.addUserRole,
);
authenticated.post(
@@ -376,7 +399,8 @@ authenticated.post(
verifyApiKeyResourceAccess,
verifyApiKeyRoleAccess,
verifyApiKeyHasAction(ActionsEnum.setResourceRoles),
resource.setResourceRoles
logActionAudit(ActionsEnum.setResourceRoles),
resource.setResourceRoles,
);
authenticated.post(
@@ -384,45 +408,50 @@ authenticated.post(
verifyApiKeyResourceAccess,
verifyApiKeySetResourceUsers,
verifyApiKeyHasAction(ActionsEnum.setResourceUsers),
resource.setResourceUsers
logActionAudit(ActionsEnum.setResourceUsers),
resource.setResourceUsers,
);
authenticated.post(
`/resource/:resourceId/password`,
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.setResourcePassword),
resource.setResourcePassword
logActionAudit(ActionsEnum.setResourcePassword),
resource.setResourcePassword,
);
authenticated.post(
`/resource/:resourceId/pincode`,
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.setResourcePincode),
resource.setResourcePincode
logActionAudit(ActionsEnum.setResourcePincode),
resource.setResourcePincode,
);
authenticated.post(
`/resource/:resourceId/header-auth`,
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.setResourceHeaderAuth),
resource.setResourceHeaderAuth
logActionAudit(ActionsEnum.setResourceHeaderAuth),
resource.setResourceHeaderAuth,
);
authenticated.post(
`/resource/:resourceId/whitelist`,
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.setResourceWhitelist),
resource.setResourceWhitelist
logActionAudit(ActionsEnum.setResourceWhitelist),
resource.setResourceWhitelist,
);
authenticated.get(
authenticated.post(
`/resource/:resourceId/whitelist/add`,
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.setResourceWhitelist),
resource.addEmailToResourceWhitelist
);
authenticated.get(
authenticated.post(
`/resource/:resourceId/whitelist/remove`,
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.setResourceWhitelist),
@@ -440,14 +469,16 @@ authenticated.post(
`/resource/:resourceId/access-token`,
verifyApiKeyResourceAccess,
verifyApiKeyHasAction(ActionsEnum.generateAccessToken),
accessToken.generateAccessToken
logActionAudit(ActionsEnum.generateAccessToken),
accessToken.generateAccessToken,
);
authenticated.delete(
`/access-token/:accessTokenId`,
verifyApiKeyAccessTokenAccess,
verifyApiKeyHasAction(ActionsEnum.deleteAcessToken),
accessToken.deleteAccessToken
logActionAudit(ActionsEnum.deleteAcessToken),
accessToken.deleteAccessToken,
);
authenticated.get(
@@ -475,7 +506,8 @@ authenticated.post(
"/user/:userId/2fa",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.updateUser),
user.updateUser2FA
logActionAudit(ActionsEnum.updateUser),
user.updateUser2FA,
);
authenticated.get(
@@ -496,7 +528,8 @@ authenticated.put(
"/org/:orgId/user",
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.createOrgUser),
user.createOrgUser
logActionAudit(ActionsEnum.createOrgUser),
user.createOrgUser,
);
authenticated.post(
@@ -504,7 +537,8 @@ authenticated.post(
verifyApiKeyOrgAccess,
verifyApiKeyUserAccess,
verifyApiKeyHasAction(ActionsEnum.updateOrgUser),
user.updateOrgUser
logActionAudit(ActionsEnum.updateOrgUser),
user.updateOrgUser,
);
authenticated.delete(
@@ -512,7 +546,8 @@ authenticated.delete(
verifyApiKeyOrgAccess,
verifyApiKeyUserAccess,
verifyApiKeyHasAction(ActionsEnum.removeUser),
user.removeUserOrg
logActionAudit(ActionsEnum.removeUser),
user.removeUserOrg,
);
// authenticated.put(
@@ -532,7 +567,8 @@ authenticated.post(
`/org/:orgId/api-key/:apiKeyId/actions`,
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.setApiKeyActions),
apiKeys.setApiKeyActions
logActionAudit(ActionsEnum.setApiKeyActions),
apiKeys.setApiKeyActions,
);
authenticated.get(
@@ -546,28 +582,32 @@ authenticated.put(
`/org/:orgId/api-key`,
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.createApiKey),
apiKeys.createOrgApiKey
logActionAudit(ActionsEnum.createApiKey),
apiKeys.createOrgApiKey,
);
authenticated.delete(
`/org/:orgId/api-key/:apiKeyId`,
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.deleteApiKey),
apiKeys.deleteApiKey
logActionAudit(ActionsEnum.deleteApiKey),
apiKeys.deleteApiKey,
);
authenticated.put(
"/idp/oidc",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.createIdp),
idp.createOidcIdp
logActionAudit(ActionsEnum.createIdp),
idp.createOidcIdp,
);
authenticated.post(
"/idp/:idpId/oidc",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.updateIdp),
idp.updateOidcIdp
logActionAudit(ActionsEnum.updateIdp),
idp.updateOidcIdp,
);
authenticated.get(
@@ -588,21 +628,24 @@ authenticated.put(
"/idp/:idpId/org/:orgId",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.createIdpOrg),
idp.createIdpOrgPolicy
logActionAudit(ActionsEnum.createIdpOrg),
idp.createIdpOrgPolicy,
);
authenticated.post(
"/idp/:idpId/org/:orgId",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.updateIdpOrg),
idp.updateIdpOrgPolicy
logActionAudit(ActionsEnum.updateIdpOrg),
idp.updateIdpOrgPolicy,
);
authenticated.delete(
"/idp/:idpId/org/:orgId",
verifyApiKeyIsRoot,
verifyApiKeyHasAction(ActionsEnum.deleteIdpOrg),
idp.deleteIdpOrgPolicy
logActionAudit(ActionsEnum.deleteIdpOrg),
idp.deleteIdpOrgPolicy,
);
authenticated.get(
@@ -641,7 +684,8 @@ authenticated.put(
verifyClientsEnabled,
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.createClient),
client.createClient
logActionAudit(ActionsEnum.createClient),
client.createClient,
);
authenticated.delete(
@@ -649,7 +693,8 @@ authenticated.delete(
verifyClientsEnabled,
verifyApiKeyClientAccess,
verifyApiKeyHasAction(ActionsEnum.deleteClient),
client.deleteClient
logActionAudit(ActionsEnum.deleteClient),
client.deleteClient,
);
authenticated.post(
@@ -657,7 +702,8 @@ authenticated.post(
verifyClientsEnabled,
verifyApiKeyClientAccess,
verifyApiKeyHasAction(ActionsEnum.updateClient),
client.updateClient
logActionAudit(ActionsEnum.updateClient),
client.updateClient,
);
authenticated.put(
@@ -665,4 +711,6 @@ authenticated.put(
verifyApiKeyOrgAccess,
verifyApiKeyHasAction(ActionsEnum.applyBlueprint),
blueprints.applyJSONBlueprint
logActionAudit(ActionsEnum.applyBlueprint),
org.applyBlueprint,
);

View File

@@ -1,10 +1,5 @@
import NodeCache from "node-cache";
import { sendToClient } from "#dynamic/routers/ws";
export const dockerSocketCache = new NodeCache({
stdTTL: 3600 // seconds
});
export function fetchContainers(newtId: string) {
const payload = {
type: `newt/socket/fetch`,

View File

@@ -1,8 +1,8 @@
import { MessageHandler } from "@server/routers/ws";
import logger from "@server/logger";
import { dockerSocketCache } from "./dockerSocket";
import { Newt } from "@server/db";
import { applyNewtDockerBlueprint } from "@server/lib/blueprints/applyNewtDockerBlueprint";
import cache from "@server/lib/cache";
export const handleDockerStatusMessage: MessageHandler = async (context) => {
const { message, client, sendToClient } = context;
@@ -24,8 +24,8 @@ export const handleDockerStatusMessage: MessageHandler = async (context) => {
if (available) {
logger.info(`Newt ${newt.newtId} has Docker socket access`);
dockerSocketCache.set(`${newt.newtId}:socketPath`, socketPath, 0);
dockerSocketCache.set(`${newt.newtId}:isAvailable`, available, 0);
cache.set(`${newt.newtId}:socketPath`, socketPath, 0);
cache.set(`${newt.newtId}:isAvailable`, available, 0);
} else {
logger.warn(`Newt ${newt.newtId} does not have Docker socket access`);
}
@@ -54,7 +54,7 @@ export const handleDockerContainersMessage: MessageHandler = async (
);
if (containers && containers.length > 0) {
dockerSocketCache.set(`${newt.newtId}:dockerContainers`, containers, 0);
cache.set(`${newt.newtId}:dockerContainers`, containers, 0);
} else {
logger.warn(`Newt ${newt.newtId} does not have Docker containers`);
}

View File

@@ -0,0 +1,136 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db, idp, idpOidcConfig } from "@server/db";
import { roles, userOrgs, users } from "@server/db";
import { and, eq } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { ActionsEnum, checkUserActionPermission } from "@server/auth/actions";
import { OpenAPITags, registry } from "@server/openApi";
import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
import { CheckOrgAccessPolicyResult } from "@server/lib/checkOrgAccessPolicy";
async function queryUser(orgId: string, userId: string) {
const [user] = await db
.select({
orgId: userOrgs.orgId,
userId: users.userId,
email: users.email,
username: users.username,
name: users.name,
type: users.type,
roleId: userOrgs.roleId,
roleName: roles.name,
isOwner: userOrgs.isOwner,
isAdmin: roles.isAdmin,
twoFactorEnabled: users.twoFactorEnabled,
autoProvisioned: userOrgs.autoProvisioned,
idpId: users.idpId,
idpName: idp.name,
idpType: idp.type,
idpVariant: idpOidcConfig.variant,
idpAutoProvision: idp.autoProvision
})
.from(userOrgs)
.leftJoin(roles, eq(userOrgs.roleId, roles.roleId))
.leftJoin(users, eq(userOrgs.userId, users.userId))
.leftJoin(idp, eq(users.idpId, idp.idpId))
.leftJoin(idpOidcConfig, eq(idp.idpId, idpOidcConfig.idpId))
.where(and(eq(userOrgs.userId, userId), eq(userOrgs.orgId, orgId)))
.limit(1);
return user;
}
export type CheckOrgUserAccessResponse = CheckOrgAccessPolicyResult;
const paramsSchema = z.object({
userId: z.string(),
orgId: z.string()
});
registry.registerPath({
method: "get",
path: "/org/{orgId}/user/{userId}/check",
description: "Check a user's access in an organization.",
tags: [OpenAPITags.Org, OpenAPITags.User],
request: {
params: paramsSchema
},
responses: {}
});
export async function checkOrgUserAccess(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedParams = paramsSchema.safeParse(req.params);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromError(parsedParams.error).toString()
)
);
}
const { orgId, userId } = parsedParams.data;
if (userId !== req.user?.userId) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"You do not have permission to check this user's access"
)
);
}
let user;
user = await queryUser(orgId, userId);
if (!user) {
const [fullUser] = await db
.select()
.from(users)
.where(eq(users.email, userId))
.limit(1);
if (fullUser) {
user = await queryUser(orgId, fullUser.userId);
}
}
if (!user) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
`User with ID ${userId} not found in org`
)
);
}
const policyCheck = await checkOrgAccessPolicy({
orgId,
userId,
session: req.session
});
// if we get here, the user has an org join, we just don't know if they pass the policies
return response<CheckOrgUserAccessResponse>(res, {
data: policyCheck,
success: true,
error: false,
message: "User access checked successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
);
}
}

View File

@@ -17,7 +17,7 @@ const getOrgSchema = z
.strict();
export type GetOrgResponse = {
org: Org & { settings: { } | null };
org: Org;
};
registry.registerPath({
@@ -49,13 +49,13 @@ export async function getOrg(
const { orgId } = parsedParams.data;
const org = await db
const [org] = await db
.select()
.from(orgs)
.where(eq(orgs.orgId, orgId))
.limit(1);
if (org.length === 0) {
if (!org) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
@@ -64,23 +64,9 @@ export async function getOrg(
);
}
// Parse settings from JSON string back to object
let parsedSettings = null;
if (org[0].settings) {
try {
parsedSettings = JSON.parse(org[0].settings);
} catch (error) {
// If parsing fails, keep as string for backward compatibility
parsedSettings = org[0].settings;
}
}
return response<GetOrgResponse>(res, {
data: {
org: {
...org[0],
settings: parsedSettings
}
org
},
success: true,
error: false,

View File

@@ -7,3 +7,5 @@ export * from "./checkId";
export * from "./getOrgOverview";
export * from "./listOrgs";
export * from "./pickOrgDefaults";
export * from "./applyBlueprint";
export * from "./checkOrgUserAccess";

View File

@@ -1,7 +1,7 @@
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db } from "@server/db";
import { orgs } from "@server/db";
import { orgs, users } from "@server/db";
import { eq } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
@@ -9,18 +9,36 @@ import createHttpError from "http-errors";
import logger from "@server/logger";
import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi";
import { build } from "@server/build";
import { UserType } from "@server/types/UserTypes";
import license from "#dynamic/license/license";
import { getOrgTierData } from "#dynamic/lib/billing";
import { TierId } from "@server/lib/billing/tiers";
const updateOrgParamsSchema = z
.object({
orgId: z.string(),
orgId: z.string()
})
.strict();
const updateOrgBodySchema = z
.object({
name: z.string().min(1).max(255).optional(),
settings: z.object({
}).optional(),
requireTwoFactor: z.boolean().optional(),
maxSessionLengthHours: z.number().nullable().optional(),
passwordExpiryDays: z.number().nullable().optional(),
settingsLogRetentionDaysRequest: z
.number()
.min(build === "saas" ? 0 : -1)
.optional(),
settingsLogRetentionDaysAccess: z
.number()
.min(build === "saas" ? 0 : -1)
.optional(),
settingsLogRetentionDaysAction: z
.number()
.min(build === "saas" ? 0 : -1)
.optional()
})
.strict()
.refine((data) => Object.keys(data).length > 0, {
@@ -73,13 +91,40 @@ export async function updateOrg(
const { orgId } = parsedParams.data;
const settings = parsedBody.data.settings ? JSON.stringify(parsedBody.data.settings) : undefined;
const isLicensed = await isLicensedOrSubscribed(orgId);
if (!isLicensed) {
parsedBody.data.requireTwoFactor = undefined;
parsedBody.data.maxSessionLengthHours = undefined;
parsedBody.data.passwordExpiryDays = undefined;
}
const { tier } = await getOrgTierData(orgId);
if (
tier != TierId.STANDARD &&
parsedBody.data.settingsLogRetentionDaysRequest &&
parsedBody.data.settingsLogRetentionDaysRequest > 30
) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"You are not allowed to set log retention days greater than 30 with your current subscription"
)
);
}
const updatedOrg = await db
.update(orgs)
.set({
name: parsedBody.data.name,
settings: settings
requireTwoFactor: parsedBody.data.requireTwoFactor,
maxSessionLengthHours: parsedBody.data.maxSessionLengthHours,
passwordExpiryDays: parsedBody.data.passwordExpiryDays,
settingsLogRetentionDaysRequest:
parsedBody.data.settingsLogRetentionDaysRequest,
settingsLogRetentionDaysAccess:
parsedBody.data.settingsLogRetentionDaysAccess,
settingsLogRetentionDaysAction:
parsedBody.data.settingsLogRetentionDaysAction
})
.where(eq(orgs.orgId, orgId))
.returning();
@@ -107,3 +152,22 @@ export async function updateOrg(
);
}
}
async function isLicensedOrSubscribed(orgId: string): Promise<boolean> {
if (build === "enterprise") {
const isUnlocked = await license.isUnlocked();
if (!isUnlocked) {
return false;
}
}
if (build === "saas") {
const { tier } = await getOrgTierData(orgId);
const subscribed = tier === TierId.STANDARD;
if (!subscribed) {
return false;
}
}
return true;
}

View File

@@ -10,11 +10,10 @@ import { z } from "zod";
import { fromError } from "zod-validation-error";
import { createResourceSession } from "@server/auth/sessions/resource";
import logger from "@server/logger";
import {
verifyResourceAccessToken
} from "@server/auth/verifyResourceAccessToken";
import { verifyResourceAccessToken } from "@server/auth/verifyResourceAccessToken";
import config from "@server/lib/config";
import stoi from "@server/lib/stoi";
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
const authWithAccessTokenBodySchema = z
.object({
@@ -131,6 +130,16 @@ export async function authWithAccessToken(
`Resource access token invalid. Resource ID: ${resource.resourceId}. IP: ${req.ip}.`
);
}
logAccessAudit({
orgId: resource.orgId,
resourceId: resource.resourceId,
action: false,
type: "accessToken",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
return next(
createHttpError(
HttpCode.UNAUTHORIZED,
@@ -150,6 +159,15 @@ export async function authWithAccessToken(
doNotExtend: true
});
logAccessAudit({
orgId: resource.orgId,
resourceId: resource.resourceId,
action: true,
type: "accessToken",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
return response<AuthWithAccessTokenResponse>(res, {
data: {
session: token,

View File

@@ -13,6 +13,7 @@ import { createResourceSession } from "@server/auth/sessions/resource";
import logger from "@server/logger";
import { verifyPassword } from "@server/auth/password";
import config from "@server/lib/config";
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
export const authWithPasswordBodySchema = z
.object({
@@ -113,6 +114,16 @@ export async function authWithPassword(
`Resource password incorrect. Resource ID: ${resource.resourceId}. IP: ${req.ip}.`
);
}
logAccessAudit({
orgId: org.orgId,
resourceId: resource.resourceId,
action: false,
type: "password",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Incorrect password")
);
@@ -129,6 +140,15 @@ export async function authWithPassword(
doNotExtend: true
});
logAccessAudit({
orgId: org.orgId,
resourceId: resource.resourceId,
action: true,
type: "password",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
return response<AuthWithPasswordResponse>(res, {
data: {
session: token

View File

@@ -12,6 +12,7 @@ import { createResourceSession } from "@server/auth/sessions/resource";
import logger from "@server/logger";
import { verifyPassword } from "@server/auth/password";
import config from "@server/lib/config";
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
export const authWithPincodeBodySchema = z
.object({
@@ -112,6 +113,16 @@ export async function authWithPincode(
`Resource pin code incorrect. Resource ID: ${resource.resourceId}. IP: ${req.ip}.`
);
}
logAccessAudit({
orgId: org.orgId,
resourceId: resource.resourceId,
action: false,
type: "pincode",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
return next(
createHttpError(HttpCode.UNAUTHORIZED, "Incorrect PIN")
);
@@ -128,6 +139,15 @@ export async function authWithPincode(
doNotExtend: true
});
logAccessAudit({
orgId: org.orgId,
resourceId: resource.resourceId,
action: true,
type: "pincode",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
return response<AuthWithPincodeResponse>(res, {
data: {
session: token

View File

@@ -1,11 +1,6 @@
import { generateSessionToken } from "@server/auth/sessions/app";
import { db } from "@server/db";
import {
orgs,
resourceOtp,
resources,
resourceWhitelist
} from "@server/db";
import { orgs, resourceOtp, resources, resourceWhitelist } from "@server/db";
import HttpCode from "@server/types/HttpCode";
import response from "@server/lib/response";
import { eq, and } from "drizzle-orm";
@@ -17,13 +12,11 @@ import { createResourceSession } from "@server/auth/sessions/resource";
import { isValidOtp, sendResourceOtpEmail } from "@server/auth/resourceOtp";
import logger from "@server/logger";
import config from "@server/lib/config";
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
const authWithWhitelistBodySchema = z
.object({
email: z
.string()
.toLowerCase()
.email(),
email: z.string().toLowerCase().email(),
otp: z.string().optional()
})
.strict();
@@ -126,6 +119,19 @@ export async function authWithWhitelist(
`Email is not whitelisted. Email: ${email}. IP: ${req.ip}.`
);
}
if (org && resource) {
logAccessAudit({
orgId: org.orgId,
resourceId: resource.resourceId,
action: false,
type: "whitelistedEmail",
metadata: { email },
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
}
return next(
createHttpError(
HttpCode.UNAUTHORIZED,
@@ -219,6 +225,16 @@ export async function authWithWhitelist(
doNotExtend: true
});
logAccessAudit({
orgId: org.orgId,
resourceId: resource.resourceId,
action: true,
metadata: { email },
type: "whitelistedEmail",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
return response<AuthWithWhitelistResponse>(res, {
data: {
session: token

View File

@@ -18,7 +18,7 @@ import { OpenAPITags, registry } from "@server/openApi";
const createResourceRuleSchema = z
.object({
action: z.enum(["ACCEPT", "DROP", "PASS"]),
match: z.enum(["CIDR", "IP", "PATH", "GEOIP"]),
match: z.enum(["CIDR", "IP", "PATH", "COUNTRY"]),
value: z.string().min(1),
priority: z.number().int(),
enabled: z.boolean().optional()

View File

@@ -10,11 +10,11 @@ import { fromError } from "zod-validation-error";
import logger from "@server/logger";
import { generateSessionToken } from "@server/auth/sessions/app";
import config from "@server/lib/config";
import {
encodeHexLowerCase
} from "@oslojs/encoding";
import { encodeHexLowerCase } from "@oslojs/encoding";
import { sha256 } from "@oslojs/crypto/sha2";
import { response } from "@server/lib/response";
import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
const getExchangeTokenParams = z
.object({
@@ -47,13 +47,13 @@ export async function getExchangeToken(
const { resourceId } = parsedParams.data;
const resource = await db
const [resource] = await db
.select()
.from(resources)
.where(eq(resources.resourceId, resourceId))
.limit(1);
if (resource.length === 0) {
if (!resource) {
return next(
createHttpError(
HttpCode.NOT_FOUND,
@@ -74,6 +74,23 @@ export async function getExchangeToken(
);
}
// check org policy here
const hasAccess = await checkOrgAccessPolicy({
orgId: resource.orgId,
userId: req.user!.userId,
session: req.session
});
if (!hasAccess.allowed || hasAccess.error) {
return next(
createHttpError(
HttpCode.FORBIDDEN,
"Failed organization access policy check: " +
(hasAccess.error || "Unknown error")
)
);
}
const sessionId = encodeHexLowerCase(
sha256(new TextEncoder().encode(ssoSession))
);
@@ -89,6 +106,21 @@ export async function getExchangeToken(
doNotExtend: true
});
if (req.user) {
logAccessAudit({
orgId: resource.orgId,
resourceId: resourceId,
user: {
username: req.user.username,
userId: req.user.userId
},
action: true,
type: "login",
userAgent: req.headers["user-agent"],
requestIp: req.ip
});
}
logger.debug("Request token created successfully");
return response<GetExchangeTokenResponse>(res, {

View File

@@ -99,8 +99,9 @@ const updateRawResourceBodySchema = z
name: z.string().min(1).max(255).optional(),
proxyPort: z.number().int().min(1).max(65535).optional(),
stickySession: z.boolean().optional(),
enabled: z.boolean().optional()
// enableProxy: z.boolean().optional() // always true now
enabled: z.boolean().optional(),
proxyProtocol: z.boolean().optional(),
proxyProtocolVersion: z.number().int().min(1).optional()
})
.strict()
.refine((data) => Object.keys(data).length > 0, {

View File

@@ -30,7 +30,7 @@ const updateResourceRuleParamsSchema = z
const updateResourceRuleSchema = z
.object({
action: z.enum(["ACCEPT", "DROP", "PASS"]).optional(),
match: z.enum(["CIDR", "IP", "PATH", "GEOIP"]).optional(),
match: z.enum(["CIDR", "IP", "PATH", "COUNTRY"]).optional(),
value: z.string().min(1).optional(),
priority: z.number().int(),
enabled: z.boolean().optional()

View File

@@ -267,50 +267,10 @@ export async function createSite(
})
.returning();
} else if (type == "local") {
let exitNodeIdToCreate = exitNodeId;
if (!exitNodeIdToCreate) {
if (build == "saas") {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Exit node ID of a remote node is required for local sites"
)
);
}
// select the exit node for local sites
// TODO: THIS SHOULD BE CHOSEN IN THE FRONTEND OR SOMETHING BECAUSE
// YOU CAN HAVE MORE THAN ONE NODE IN THE SYSTEM AND YOU SHOULD SELECT
// WHICH GERBIL NODE TO PUT THE SITE ON BUT FOR NOW THIS WILL DO
const [localExitNode] = await trx
.select()
.from(exitNodes)
.where(eq(exitNodes.type, "gerbil"))
.limit(1);
if (!localExitNode) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"No gerbil exit node found for organization. Please create a gerbil exit node first."
)
);
}
exitNodeIdToCreate = localExitNode.exitNodeId;
} else {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Site type not recognized"
)
);
}
[newSite] = await trx
.insert(sites)
.values({
exitNodeId: exitNodeIdToCreate,
exitNodeId: exitNodeId || null,
orgId,
name,
niceId,
@@ -321,6 +281,13 @@ export async function createSite(
subnet: "0.0.0.0/32"
})
.returning();
} else {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Site type not recognized"
)
);
}
const adminRole = await trx

View File

@@ -9,14 +9,12 @@ import createHttpError from "http-errors";
import { z } from "zod";
import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi";
import NodeCache from "node-cache";
import semver from "semver";
const newtVersionCache = new NodeCache({ stdTTL: 3600 }); // 1 hours in seconds
import cache from "@server/lib/cache";
async function getLatestNewtVersion(): Promise<string | null> {
try {
const cachedVersion = newtVersionCache.get<string>("latestNewtVersion");
const cachedVersion = cache.get<string>("latestNewtVersion");
if (cachedVersion) {
return cachedVersion;
}
@@ -48,7 +46,7 @@ async function getLatestNewtVersion(): Promise<string | null> {
const latestVersion = tags[0].name;
newtVersionCache.set("latestNewtVersion", latestVersion);
cache.set("latestNewtVersion", latestVersion);
return latestVersion;
} catch (error: any) {

View File

@@ -12,9 +12,9 @@ import stoi from "@server/lib/stoi";
import { sendToClient } from "#dynamic/routers/ws";
import {
fetchContainers,
dockerSocketCache,
dockerSocket
} from "../newt/dockerSocket";
import cache from "@server/lib/cache";
export interface ContainerNetwork {
networkId: string;
@@ -157,7 +157,7 @@ async function triggerFetch(siteId: number) {
// clear the cache for this Newt ID so that the site has to keep asking for the containers
// this is to ensure that the site always gets the latest data
dockerSocketCache.del(`${newt.newtId}:dockerContainers`);
cache.del(`${newt.newtId}:dockerContainers`);
return { siteId, newtId: newt.newtId };
}
@@ -165,7 +165,7 @@ async function triggerFetch(siteId: number) {
async function queryContainers(siteId: number) {
const { newt } = await getSiteAndNewt(siteId);
const result = dockerSocketCache.get(
const result = cache.get(
`${newt.newtId}:dockerContainers`
) as Container[];
if (!result) {
@@ -182,7 +182,7 @@ async function isDockerAvailable(siteId: number): Promise<boolean> {
const { newt } = await getSiteAndNewt(siteId);
const key = `${newt.newtId}:isAvailable`;
const isAvailable = dockerSocketCache.get(key);
const isAvailable = cache.get(key);
return !!isAvailable;
}
@@ -196,8 +196,8 @@ async function getDockerStatus(
const mappedKeys = keys.map((x) => `${newt.newtId}:${x}`);
const result = {
isAvailable: dockerSocketCache.get(mappedKeys[0]) as boolean,
socketPath: dockerSocketCache.get(mappedKeys[1]) as string | undefined
isAvailable: cache.get(mappedKeys[0]) as boolean,
socketPath: cache.get(mappedKeys[1]) as string | undefined
};
return result;

View File

@@ -21,7 +21,8 @@ export async function traefikConfigProvider(
currentExitNodeId,
config.getRawConfig().traefik.site_types,
build == "oss", // filter out the namespace domains in open source
build != "oss" // generate the login pages on the cloud and hybrid
build != "oss", // generate the login pages on the cloud and and enterprise,
config.getRawConfig().traefik.allow_raw_resources
);
if (traefikConfig?.http?.middlewares) {

View File

@@ -1,8 +1,7 @@
import NodeCache from "node-cache";
import { Request, Response, NextFunction } from "express";
import { z } from "zod";
import { db } from "@server/db";
import { orgs, userInvites, userOrgs, users } from "@server/db";
import { orgs, roles, userInvites, userOrgs, users } from "@server/db";
import { and, eq } from "drizzle-orm";
import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode";
@@ -20,8 +19,7 @@ import { UserType } from "@server/types/UserTypes";
import { usageService } from "@server/lib/billing/usageService";
import { FeatureId } from "@server/lib/billing";
import { build } from "@server/build";
const regenerateTracker = new NodeCache({ stdTTL: 3600, checkperiod: 600 });
import cache from "@server/lib/cache";
const inviteUserParamsSchema = z
.object({
@@ -111,6 +109,27 @@ export async function inviteUser(
);
}
// Validate that the roleId belongs to the target organization
const [role] = await db
.select()
.from(roles)
.where(
and(
eq(roles.roleId, roleId),
eq(roles.orgId, orgId)
)
)
.limit(1);
if (!role) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Invalid role ID or role does not belong to this organization"
)
);
}
if (build == "saas") {
const usage = await usageService.getUsage(orgId, FeatureId.USERS);
if (!usage) {
@@ -182,7 +201,7 @@ export async function inviteUser(
}
if (existingInvite.length) {
const attempts = regenerateTracker.get<number>(email) || 0;
const attempts = cache.get<number>(email) || 0;
if (attempts >= 3) {
return next(
createHttpError(
@@ -192,7 +211,7 @@ export async function inviteUser(
);
}
regenerateTracker.set(email, attempts + 1);
cache.set(email, attempts + 1);
const inviteId = existingInvite[0].inviteId; // Retrieve the original inviteId
const token = generateRandomString(

View File

@@ -1,4 +1,4 @@
import { db } from "@server/db";
import { db, dnsRecords } from "@server/db";
import { domains, exitNodes, orgDomains, orgs, resources } from "@server/db";
import config from "@server/lib/config";
import { eq, ne } from "drizzle-orm";
@@ -8,7 +8,10 @@ export async function copyInConfig() {
const endpoint = config.getRawConfig().gerbil.base_endpoint;
const listenPort = config.getRawConfig().gerbil.start_port;
if (!config.getRawConfig().flags?.disable_config_managed_domains && config.getRawConfig().domains) {
if (
!config.getRawConfig().flags?.disable_config_managed_domains &&
config.getRawConfig().domains
) {
await copyInDomains();
}
@@ -37,7 +40,9 @@ async function copyInDomains() {
const configDomains = Object.entries(rawDomains).map(
([key, value]) => ({
domainId: key,
baseDomain: value.base_domain.toLowerCase()
baseDomain: value.base_domain.toLowerCase(),
certResolver: value.cert_resolver || null,
preferWildcardCert: value.prefer_wildcard_cert || null
})
);
@@ -54,29 +59,79 @@ async function copyInDomains() {
if (!configDomainKeys.has(existingDomain.domainId)) {
await trx
.delete(domains)
.where(eq(domains.domainId, existingDomain.domainId))
.execute();
.where(eq(domains.domainId, existingDomain.domainId));
await trx
.delete(dnsRecords)
.where(eq(dnsRecords.domainId, existingDomain.domainId));
}
}
for (const { domainId, baseDomain } of configDomains) {
for (const {
domainId,
baseDomain,
certResolver,
preferWildcardCert
} of configDomains) {
if (existingDomainKeys.has(domainId)) {
await trx
.update(domains)
.set({ baseDomain, verified: true, type: "wildcard" })
.where(eq(domains.domainId, domainId))
.execute();
} else {
await trx
.insert(domains)
.values({
domainId,
.set({
baseDomain,
configManaged: true,
verified: true,
type: "wildcard",
verified: true
certResolver,
preferWildcardCert
})
.execute();
.where(eq(domains.domainId, domainId));
// delete the dns records and add them again to ensure they are correct
await trx
.delete(dnsRecords)
.where(eq(dnsRecords.domainId, domainId));
await trx.insert(dnsRecords).values([
{
domainId,
recordType: "A",
baseDomain,
value: "Server IP Address",
verified: true
},
{
domainId,
recordType: "A",
baseDomain,
value: "Server IP Address",
verified: true
}
]);
} else {
await trx.insert(domains).values({
domainId,
baseDomain,
configManaged: true,
type: "wildcard",
verified: true,
certResolver,
preferWildcardCert
});
await trx.insert(dnsRecords).values([
{
domainId,
recordType: "A",
baseDomain,
value: "Server IP Address",
verified: true
},
{
domainId,
recordType: "A",
baseDomain,
value: "Server IP Address",
verified: true
}
]);
}
}

View File

@@ -13,6 +13,7 @@ import m5 from "./scriptsPg/1.10.0";
import m6 from "./scriptsPg/1.10.2";
import m7 from "./scriptsPg/1.11.0";
import m8 from "./scriptsPg/1.11.1";
import m9 from "./scriptsPg/1.12.0";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
@@ -26,7 +27,8 @@ const migrations = [
{ version: "1.10.0", run: m5 },
{ version: "1.10.2", run: m6 },
{ version: "1.11.0", run: m7 },
{ version: "1.11.1", run: m8 }
{ version: "1.11.1", run: m8 },
{ version: "1.12.0", run: m9 }
// Add new migrations here as they are created
] as {
version: string;

View File

@@ -31,6 +31,7 @@ import m26 from "./scriptsSqlite/1.10.1";
import m27 from "./scriptsSqlite/1.10.2";
import m28 from "./scriptsSqlite/1.11.0";
import m29 from "./scriptsSqlite/1.11.1";
import m30 from "./scriptsSqlite/1.12.0";
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
// EXCEPT FOR THE DATABASE AND THE SCHEMA
@@ -60,7 +61,8 @@ const migrations = [
{ version: "1.10.1", run: m26 },
{ version: "1.10.2", run: m27 },
{ version: "1.11.0", run: m28 },
{ version: "1.11.1", run: m29 }
{ version: "1.11.1", run: m29 },
{ version: "1.12.0", run: m30 }
// Add new migrations here as they are created
] as const;

View File

@@ -9,31 +9,6 @@ export default async function migration() {
try {
await db.execute(sql`BEGIN`);
// Get the first exit node with type 'gerbil'
const exitNodesQuery = await db.execute(
sql`SELECT * FROM "exitNodes" WHERE "type" = 'gerbil' LIMIT 1`
);
const exitNodes = exitNodesQuery.rows as {
exitNodeId: number;
}[];
const exitNodeId = exitNodes.length > 0 ? exitNodes[0].exitNodeId : null;
// Get all sites with type 'local'
const sitesQuery = await db.execute(
sql`SELECT "siteId" FROM "sites" WHERE "type" = 'local'`
);
const sites = sitesQuery.rows as {
siteId: number;
}[];
// Update sites to use the exit node
for (const site of sites) {
await db.execute(sql`
UPDATE "sites" SET "exitNode" = ${exitNodeId} WHERE "siteId" = ${site.siteId}
`);
}
await db.execute(sql`UPDATE "exitNodes" SET "online" = true`); // Mark exit nodes as online
await db.execute(sql`COMMIT`);

View File

@@ -0,0 +1,120 @@
import { db } from "@server/db/pg/driver";
import { sql } from "drizzle-orm";
const version = "1.12.0";
export default async function migration() {
console.log(`Running setup script ${version}...`);
try {
await db.execute(sql`BEGIN`);
await db.execute(sql`UPDATE "resourceRules" SET "match" = 'COUNTRY' WHERE "match" = 'GEOIP'`);
await db.execute(sql`
CREATE TABLE "accessAuditLog" (
"id" serial PRIMARY KEY NOT NULL,
"timestamp" bigint NOT NULL,
"orgId" varchar NOT NULL,
"actorType" varchar(50),
"actor" varchar(255),
"actorId" varchar(255),
"resourceId" integer,
"ip" varchar(45),
"type" varchar(100) NOT NULL,
"action" boolean NOT NULL,
"location" text,
"userAgent" text,
"metadata" text
);
`);
await db.execute(sql`
CREATE TABLE "actionAuditLog" (
"id" serial PRIMARY KEY NOT NULL,
"timestamp" bigint NOT NULL,
"orgId" varchar NOT NULL,
"actorType" varchar(50) NOT NULL,
"actor" varchar(255) NOT NULL,
"actorId" varchar(255) NOT NULL,
"action" varchar(100) NOT NULL,
"metadata" text
);
`);
await db.execute(sql`
CREATE TABLE "dnsRecords" (
"id" serial PRIMARY KEY NOT NULL,
"domainId" varchar NOT NULL,
"recordType" varchar NOT NULL,
"baseDomain" varchar,
"value" varchar NOT NULL,
"verified" boolean DEFAULT false NOT NULL
);
`);
await db.execute(sql`
CREATE TABLE "requestAuditLog" (
"id" serial PRIMARY KEY NOT NULL,
"timestamp" integer NOT NULL,
"orgId" text,
"action" boolean NOT NULL,
"reason" integer NOT NULL,
"actorType" text,
"actor" text,
"actorId" text,
"resourceId" integer,
"ip" text,
"location" text,
"userAgent" text,
"metadata" text,
"headers" text,
"query" text,
"originalRequestURL" text,
"scheme" text,
"host" text,
"path" text,
"method" text,
"tls" boolean
);
`);
await db.execute(sql`ALTER TABLE "resources" DROP CONSTRAINT "resources_skipToIdpId_idp_idpId_fk";`);
await db.execute(sql`ALTER TABLE "domains" ADD COLUMN "certResolver" varchar;`);
await db.execute(sql`ALTER TABLE "domains" ADD COLUMN "customCertResolver" varchar;`);
await db.execute(sql`ALTER TABLE "domains" ADD COLUMN "preferWildcardCert" boolean;`);
await db.execute(sql`ALTER TABLE "orgs" ADD COLUMN "requireTwoFactor" boolean;`);
await db.execute(sql`ALTER TABLE "orgs" ADD COLUMN "maxSessionLengthHours" integer;`);
await db.execute(sql`ALTER TABLE "orgs" ADD COLUMN "passwordExpiryDays" integer;`);
await db.execute(sql`ALTER TABLE "orgs" ADD COLUMN "settingsLogRetentionDaysRequest" integer DEFAULT 7 NOT NULL;`);
await db.execute(sql`ALTER TABLE "orgs" ADD COLUMN "settingsLogRetentionDaysAccess" integer DEFAULT 0 NOT NULL;`);
await db.execute(sql`ALTER TABLE "orgs" ADD COLUMN "settingsLogRetentionDaysAction" integer DEFAULT 0 NOT NULL;`);
await db.execute(sql`ALTER TABLE "resourceSessions" ADD COLUMN "issuedAt" bigint;`);
await db.execute(sql`ALTER TABLE "resources" ADD COLUMN "proxyProtocol" boolean DEFAULT false NOT NULL;`);
await db.execute(sql`ALTER TABLE "resources" ADD COLUMN "proxyProtocolVersion" integer DEFAULT 1;`);
await db.execute(sql`ALTER TABLE "session" ADD COLUMN "issuedAt" bigint;`);
await db.execute(sql`ALTER TABLE "user" ADD COLUMN "lastPasswordChange" bigint;`);
await db.execute(sql`ALTER TABLE "accessAuditLog" ADD CONSTRAINT "accessAuditLog_orgId_orgs_orgId_fk" FOREIGN KEY ("orgId") REFERENCES "public"."orgs"("orgId") ON DELETE cascade ON UPDATE no action;`);
await db.execute(sql`ALTER TABLE "actionAuditLog" ADD CONSTRAINT "actionAuditLog_orgId_orgs_orgId_fk" FOREIGN KEY ("orgId") REFERENCES "public"."orgs"("orgId") ON DELETE cascade ON UPDATE no action;`);
await db.execute(sql`ALTER TABLE "dnsRecords" ADD CONSTRAINT "dnsRecords_domainId_domains_domainId_fk" FOREIGN KEY ("domainId") REFERENCES "public"."domains"("domainId") ON DELETE cascade ON UPDATE no action;`);
await db.execute(sql`ALTER TABLE "requestAuditLog" ADD CONSTRAINT "requestAuditLog_orgId_orgs_orgId_fk" FOREIGN KEY ("orgId") REFERENCES "public"."orgs"("orgId") ON DELETE cascade ON UPDATE no action;`);
await db.execute(sql`CREATE INDEX "idx_identityAuditLog_timestamp" ON "accessAuditLog" USING btree ("timestamp");`);
await db.execute(sql`CREATE INDEX "idx_identityAuditLog_org_timestamp" ON "accessAuditLog" USING btree ("orgId","timestamp");`);
await db.execute(sql`CREATE INDEX "idx_actionAuditLog_timestamp" ON "actionAuditLog" USING btree ("timestamp");`);
await db.execute(sql`CREATE INDEX "idx_actionAuditLog_org_timestamp" ON "actionAuditLog" USING btree ("orgId","timestamp");`);
await db.execute(sql`CREATE INDEX "idx_requestAuditLog_timestamp" ON "requestAuditLog" USING btree ("timestamp");`);
await db.execute(sql`CREATE INDEX "idx_requestAuditLog_org_timestamp" ON "requestAuditLog" USING btree ("orgId","timestamp");`);
await db.execute(sql`ALTER TABLE "resources" ADD CONSTRAINT "resources_skipToIdpId_idp_idpId_fk" FOREIGN KEY ("skipToIdpId") REFERENCES "public"."idp"("idpId") ON DELETE set null ON UPDATE no action;`);
await db.execute(sql`ALTER TABLE "orgs" DROP COLUMN "settings";`);
await db.execute(sql`COMMIT`);
console.log("Migrated database");
} catch (e) {
await db.execute(sql`ROLLBACK`);
console.log("Unable to migrate database");
console.log(e);
throw e;
}
console.log(`${version} migration complete`);
}

View File

@@ -11,32 +11,6 @@ export default async function migration() {
const db = new Database(location);
db.transaction(() => {
const exitNodes = db
.prepare(`SELECT * FROM exitNodes WHERE type = 'gerbil' LIMIT 1`)
.all() as {
exitNodeId: number;
name: string;
}[];
const exitNodeId =
exitNodes.length > 0 ? exitNodes[0].exitNodeId : null;
// get all of the targets
const sites = db
.prepare(`SELECT * FROM sites WHERE type = 'local'`)
.all() as {
siteId: number;
exitNodeId: number | null;
}[];
const defineExitNodeOnSite = db.prepare(
`UPDATE sites SET exitNode = ? WHERE siteId = ?`
);
for (const site of sites) {
defineExitNodeOnSite.run(exitNodeId, site.siteId);
}
db.prepare(`UPDATE exitNodes SET online = 1`).run(); // mark exit nodes as online
})();

View File

@@ -0,0 +1,209 @@
import { APP_PATH } from "@server/lib/consts";
import Database from "better-sqlite3";
import path from "path";
const version = "1.12.0";
export default async function migration() {
console.log(`Running setup script ${version}...`);
const location = path.join(APP_PATH, "db", "db.sqlite");
const db = new Database(location);
try {
db.pragma("foreign_keys = OFF");
db.transaction(() => {
db.prepare(
`UPDATE 'resourceRules' SET 'match' = 'COUNTRY' WHERE 'match' = 'GEOIP'`
).run();
db.prepare(
`
CREATE TABLE 'accessAuditLog' (
'id' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'timestamp' integer NOT NULL,
'orgId' text NOT NULL,
'actorType' text,
'actor' text,
'actorId' text,
'resourceId' integer,
'ip' text,
'location' text,
'type' text NOT NULL,
'action' integer NOT NULL,
'userAgent' text,
'metadata' text,
FOREIGN KEY ('orgId') REFERENCES 'orgs'('orgId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`CREATE INDEX 'idx_identityAuditLog_timestamp' ON 'accessAuditLog' ('timestamp');`
).run();
db.prepare(
`CREATE INDEX 'idx_identityAuditLog_org_timestamp' ON 'accessAuditLog' ('orgId','timestamp');`
).run();
db.prepare(
`
CREATE TABLE 'actionAuditLog' (
'id' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'timestamp' integer NOT NULL,
'orgId' text NOT NULL,
'actorType' text NOT NULL,
'actor' text NOT NULL,
'actorId' text NOT NULL,
'action' text NOT NULL,
'metadata' text,
FOREIGN KEY ('orgId') REFERENCES 'orgs'('orgId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`CREATE INDEX 'idx_actionAuditLog_timestamp' ON 'actionAuditLog' ('timestamp');`
).run();
db.prepare(
`CREATE INDEX 'idx_actionAuditLog_org_timestamp' ON 'actionAuditLog' ('orgId','timestamp');`
).run();
db.prepare(
`
CREATE TABLE 'dnsRecords' (
'id' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'domainId' text NOT NULL,
'recordType' text NOT NULL,
'baseDomain' text,
'value' text NOT NULL,
'verified' integer DEFAULT false NOT NULL,
FOREIGN KEY ('domainId') REFERENCES 'domains'('domainId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`
CREATE TABLE 'requestAuditLog' (
'id' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'timestamp' integer NOT NULL,
'orgId' text,
'action' integer NOT NULL,
'reason' integer NOT NULL,
'actorType' text,
'actor' text,
'actorId' text,
'resourceId' integer,
'ip' text,
'location' text,
'userAgent' text,
'metadata' text,
'headers' text,
'query' text,
'originalRequestURL' text,
'scheme' text,
'host' text,
'path' text,
'method' text,
'tls' integer,
FOREIGN KEY ('orgId') REFERENCES 'orgs'('orgId') ON UPDATE no action ON DELETE cascade
);
`
).run();
db.prepare(
`CREATE INDEX 'idx_requestAuditLog_timestamp' ON 'requestAuditLog' ('timestamp');`
).run();
db.prepare(
`CREATE INDEX 'idx_requestAuditLog_org_timestamp' ON 'requestAuditLog' ('orgId','timestamp');`
).run();
db.prepare(
`
CREATE TABLE '__new_resources' (
'resourceId' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'resourceGuid' text(36) NOT NULL,
'orgId' text NOT NULL,
'niceId' text NOT NULL,
'name' text NOT NULL,
'subdomain' text,
'fullDomain' text,
'domainId' text,
'ssl' integer DEFAULT false NOT NULL,
'blockAccess' integer DEFAULT false NOT NULL,
'sso' integer DEFAULT true NOT NULL,
'http' integer DEFAULT true NOT NULL,
'protocol' text NOT NULL,
'proxyPort' integer,
'emailWhitelistEnabled' integer DEFAULT false NOT NULL,
'applyRules' integer DEFAULT false NOT NULL,
'enabled' integer DEFAULT true NOT NULL,
'stickySession' integer DEFAULT false NOT NULL,
'tlsServerName' text,
'setHostHeader' text,
'enableProxy' integer DEFAULT true,
'skipToIdpId' integer,
'headers' text,
'proxyProtocol' integer DEFAULT false NOT NULL,
'proxyProtocolVersion' integer DEFAULT 1,
FOREIGN KEY ('orgId') REFERENCES 'orgs'('orgId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('domainId') REFERENCES 'domains'('domainId') ON UPDATE no action ON DELETE set null,
FOREIGN KEY ('skipToIdpId') REFERENCES 'idp'('idpId') ON UPDATE no action ON DELETE set null
);
`
).run();
db.prepare(
`INSERT INTO '__new_resources'("resourceId", "resourceGuid", "orgId", "niceId", "name", "subdomain", "fullDomain", "domainId", "ssl", "blockAccess", "sso", "http", "protocol", "proxyPort", "emailWhitelistEnabled", "applyRules", "enabled", "stickySession", "tlsServerName", "setHostHeader", "enableProxy", "skipToIdpId", "headers") SELECT "resourceId", "resourceGuid", "orgId", "niceId", "name", "subdomain", "fullDomain", "domainId", "ssl", "blockAccess", "sso", "http", "protocol", "proxyPort", "emailWhitelistEnabled", "applyRules", "enabled", "stickySession", "tlsServerName", "setHostHeader", "enableProxy", "skipToIdpId", "headers" FROM 'resources';`
).run();
db.prepare(`DROP TABLE 'resources';`).run();
db.prepare(
`ALTER TABLE '__new_resources' RENAME TO 'resources';`
).run();
db.prepare(
`CREATE UNIQUE INDEX 'resources_resourceGuid_unique' ON 'resources' ('resourceGuid');`
).run();
db.prepare(`ALTER TABLE 'domains' ADD 'certResolver' text;`).run();
db.prepare(
`ALTER TABLE 'domains' ADD 'preferWildcardCert' integer;`
).run();
db.prepare(
`ALTER TABLE 'orgs' ADD 'requireTwoFactor' integer;`
).run();
db.prepare(
`ALTER TABLE 'orgs' ADD 'maxSessionLengthHours' integer;`
).run();
db.prepare(
`ALTER TABLE 'orgs' ADD 'passwordExpiryDays' integer;`
).run();
db.prepare(
`ALTER TABLE 'orgs' ADD 'settingsLogRetentionDaysRequest' integer DEFAULT 7 NOT NULL;`
).run();
db.prepare(
`ALTER TABLE 'orgs' ADD 'settingsLogRetentionDaysAccess' integer DEFAULT 0 NOT NULL;`
).run();
db.prepare(
`ALTER TABLE 'orgs' ADD 'settingsLogRetentionDaysAction' integer DEFAULT 0 NOT NULL;`
).run();
db.prepare(`ALTER TABLE 'orgs' DROP COLUMN 'settings';`).run();
db.prepare(
`ALTER TABLE 'resourceSessions' ADD 'issuedAt' integer;`
).run();
db.prepare(`ALTER TABLE 'session' ADD 'issuedAt' integer;`).run();
db.prepare(
`ALTER TABLE 'user' ADD 'lastPasswordChange' integer;`
).run();
})();
db.pragma("foreign_keys = ON");
console.log(`Migrated database`);
} catch (e) {
console.log("Failed to migrate db:", e);
throw e;
}
console.log(`${version} migration complete`);
}