mirror of
https://github.com/fosrl/pangolin.git
synced 2026-02-08 05:56:38 +00:00
Merge branch 'dev' into audit-logs
This commit is contained in:
@@ -81,6 +81,9 @@ export enum ActionsEnum {
|
||||
listClients = "listClients",
|
||||
getClient = "getClient",
|
||||
listOrgDomains = "listOrgDomains",
|
||||
getDomain = "getDomain",
|
||||
updateOrgDomain = "updateOrgDomain",
|
||||
getDNSRecords = "getDNSRecords",
|
||||
createNewt = "createNewt",
|
||||
createIdp = "createIdp",
|
||||
updateIdp = "updateIdp",
|
||||
|
||||
@@ -19,7 +19,22 @@ export const domains = pgTable("domains", {
|
||||
type: varchar("type"), // "ns", "cname", "wildcard"
|
||||
verified: boolean("verified").notNull().default(false),
|
||||
failed: boolean("failed").notNull().default(false),
|
||||
tries: integer("tries").notNull().default(0)
|
||||
tries: integer("tries").notNull().default(0),
|
||||
certResolver: varchar("certResolver"),
|
||||
customCertResolver: varchar("customCertResolver"),
|
||||
preferWildcardCert: boolean("preferWildcardCert")
|
||||
});
|
||||
|
||||
|
||||
export const dnsRecords = pgTable("dnsRecords", {
|
||||
id: varchar("id").primaryKey(),
|
||||
domainId: varchar("domainId")
|
||||
.notNull()
|
||||
.references(() => domains.domainId, { onDelete: "cascade" }),
|
||||
recordType: varchar("recordType").notNull(), // "NS" | "CNAME" | "A" | "TXT"
|
||||
baseDomain: varchar("baseDomain"),
|
||||
value: varchar("value").notNull(),
|
||||
verified: boolean("verified").notNull().default(false),
|
||||
});
|
||||
|
||||
export const orgs = pgTable("orgs", {
|
||||
@@ -111,7 +126,9 @@ export const resources = pgTable("resources", {
|
||||
skipToIdpId: integer("skipToIdpId").references(() => idp.idpId, {
|
||||
onDelete: "cascade"
|
||||
}),
|
||||
headers: text("headers") // comma-separated list of headers to add to the request
|
||||
headers: text("headers"), // comma-separated list of headers to add to the request
|
||||
proxyProtocol: boolean("proxyProtocol").notNull().default(false),
|
||||
proxyProtocolVersion: integer("proxyProtocolVersion").default(1)
|
||||
});
|
||||
|
||||
export const targets = pgTable("targets", {
|
||||
|
||||
@@ -12,9 +12,24 @@ export const domains = sqliteTable("domains", {
|
||||
type: text("type"), // "ns", "cname", "wildcard"
|
||||
verified: integer("verified", { mode: "boolean" }).notNull().default(false),
|
||||
failed: integer("failed", { mode: "boolean" }).notNull().default(false),
|
||||
tries: integer("tries").notNull().default(0)
|
||||
tries: integer("tries").notNull().default(0),
|
||||
certResolver: text("certResolver"),
|
||||
preferWildcardCert: integer("preferWildcardCert", { mode: "boolean" })
|
||||
});
|
||||
|
||||
export const dnsRecords = sqliteTable("dnsRecords", {
|
||||
id: text("id").primaryKey(),
|
||||
domainId: text("domainId")
|
||||
.notNull()
|
||||
.references(() => domains.domainId, { onDelete: "cascade" }),
|
||||
|
||||
recordType: text("recordType").notNull(), // "NS" | "CNAME" | "A" | "TXT"
|
||||
baseDomain: text("baseDomain"),
|
||||
value: text("value").notNull(),
|
||||
verified: integer("verified", { mode: "boolean" }).notNull().default(false),
|
||||
});
|
||||
|
||||
|
||||
export const orgs = sqliteTable("orgs", {
|
||||
orgId: text("orgId").primaryKey(),
|
||||
name: text("name").notNull(),
|
||||
@@ -123,7 +138,10 @@ export const resources = sqliteTable("resources", {
|
||||
skipToIdpId: integer("skipToIdpId").references(() => idp.idpId, {
|
||||
onDelete: "cascade"
|
||||
}),
|
||||
headers: text("headers") // comma-separated list of headers to add to the request
|
||||
headers: text("headers"), // comma-separated list of headers to add to the request
|
||||
proxyProtocol: integer("proxyProtocol", { mode: "boolean" }).notNull().default(false),
|
||||
proxyProtocolVersion: integer("proxyProtocolVersion").default(1)
|
||||
|
||||
});
|
||||
|
||||
export const targets = sqliteTable("targets", {
|
||||
@@ -797,6 +815,7 @@ export type ResourceWhitelist = InferSelectModel<typeof resourceWhitelist>;
|
||||
export type VersionMigration = InferSelectModel<typeof versionMigrations>;
|
||||
export type ResourceRule = InferSelectModel<typeof resourceRules>;
|
||||
export type Domain = InferSelectModel<typeof domains>;
|
||||
export type DnsRecord = InferSelectModel<typeof dnsRecords>;
|
||||
export type Client = InferSelectModel<typeof clients>;
|
||||
export type ClientSite = InferSelectModel<typeof clientSites>;
|
||||
export type RoleClient = InferSelectModel<typeof roleClients>;
|
||||
|
||||
@@ -21,6 +21,7 @@ import { TraefikConfigManager } from "@server/lib/traefik/TraefikConfigManager";
|
||||
import { initCleanup } from "#dynamic/cleanup";
|
||||
import license from "#dynamic/license/license";
|
||||
import { initLogCleanupInterval } from "@server/lib/cleanupLogs";
|
||||
import { fetchServerIp } from "@server/lib/serverIpService";
|
||||
|
||||
async function startServers() {
|
||||
await setHostMeta();
|
||||
@@ -32,6 +33,8 @@ async function startServers() {
|
||||
|
||||
await runSetupFunctions();
|
||||
|
||||
await fetchServerIp();
|
||||
|
||||
initTelemetryClient();
|
||||
|
||||
initLogCleanupInterval();
|
||||
|
||||
@@ -527,7 +527,7 @@ export async function updateProxyResources(
|
||||
if (
|
||||
existingRule.action !== getRuleAction(rule.action) ||
|
||||
existingRule.match !== rule.match.toUpperCase() ||
|
||||
existingRule.value !== rule.value
|
||||
existingRule.value !== rule.value.toUpperCase()
|
||||
) {
|
||||
validateRule(rule);
|
||||
await trx
|
||||
@@ -535,7 +535,7 @@ export async function updateProxyResources(
|
||||
.set({
|
||||
action: getRuleAction(rule.action),
|
||||
match: rule.match.toUpperCase(),
|
||||
value: rule.value
|
||||
value: rule.value.toUpperCase(),
|
||||
})
|
||||
.where(
|
||||
eq(resourceRules.ruleId, existingRule.ruleId)
|
||||
@@ -547,7 +547,7 @@ export async function updateProxyResources(
|
||||
resourceId: existingResource.resourceId,
|
||||
action: getRuleAction(rule.action),
|
||||
match: rule.match.toUpperCase(),
|
||||
value: rule.value,
|
||||
value: rule.value.toUpperCase(),
|
||||
priority: index + 1 // start priorities at 1
|
||||
});
|
||||
}
|
||||
@@ -705,7 +705,7 @@ export async function updateProxyResources(
|
||||
resourceId: newResource.resourceId,
|
||||
action: getRuleAction(rule.action),
|
||||
match: rule.match.toUpperCase(),
|
||||
value: rule.value,
|
||||
value: rule.value.toUpperCase(),
|
||||
priority: index + 1 // start priorities at 1
|
||||
});
|
||||
}
|
||||
|
||||
@@ -275,24 +275,26 @@ export const ConfigSchema = z
|
||||
}
|
||||
)
|
||||
.refine(
|
||||
// Enforce proxy-port uniqueness within proxy-resources
|
||||
// Enforce proxy-port uniqueness within proxy-resources per protocol
|
||||
(config) => {
|
||||
const proxyPortMap = new Map<number, string[]>();
|
||||
const protocolPortMap = new Map<string, string[]>();
|
||||
|
||||
Object.entries(config["proxy-resources"]).forEach(
|
||||
([resourceKey, resource]) => {
|
||||
const proxyPort = resource["proxy-port"];
|
||||
if (proxyPort !== undefined) {
|
||||
if (!proxyPortMap.has(proxyPort)) {
|
||||
proxyPortMap.set(proxyPort, []);
|
||||
const protocol = resource.protocol;
|
||||
if (proxyPort !== undefined && protocol !== undefined) {
|
||||
const key = `${protocol}:${proxyPort}`;
|
||||
if (!protocolPortMap.has(key)) {
|
||||
protocolPortMap.set(key, []);
|
||||
}
|
||||
proxyPortMap.get(proxyPort)!.push(resourceKey);
|
||||
protocolPortMap.get(key)!.push(resourceKey);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Find duplicates
|
||||
const duplicates = Array.from(proxyPortMap.entries()).filter(
|
||||
const duplicates = Array.from(protocolPortMap.entries()).filter(
|
||||
([_, resourceKeys]) => resourceKeys.length > 1
|
||||
);
|
||||
|
||||
@@ -300,25 +302,29 @@ export const ConfigSchema = z
|
||||
},
|
||||
(config) => {
|
||||
// Extract duplicates for error message
|
||||
const proxyPortMap = new Map<number, string[]>();
|
||||
const protocolPortMap = new Map<string, string[]>();
|
||||
|
||||
Object.entries(config["proxy-resources"]).forEach(
|
||||
([resourceKey, resource]) => {
|
||||
const proxyPort = resource["proxy-port"];
|
||||
if (proxyPort !== undefined) {
|
||||
if (!proxyPortMap.has(proxyPort)) {
|
||||
proxyPortMap.set(proxyPort, []);
|
||||
const protocol = resource.protocol;
|
||||
if (proxyPort !== undefined && protocol !== undefined) {
|
||||
const key = `${protocol}:${proxyPort}`;
|
||||
if (!protocolPortMap.has(key)) {
|
||||
protocolPortMap.set(key, []);
|
||||
}
|
||||
proxyPortMap.get(proxyPort)!.push(resourceKey);
|
||||
protocolPortMap.get(key)!.push(resourceKey);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const duplicates = Array.from(proxyPortMap.entries())
|
||||
const duplicates = Array.from(protocolPortMap.entries())
|
||||
.filter(([_, resourceKeys]) => resourceKeys.length > 1)
|
||||
.map(
|
||||
([proxyPort, resourceKeys]) =>
|
||||
`port ${proxyPort} used by proxy-resources: ${resourceKeys.join(", ")}`
|
||||
([protocolPort, resourceKeys]) => {
|
||||
const [protocol, port] = protocolPort.split(':');
|
||||
return `${protocol.toUpperCase()} port ${port} used by proxy-resources: ${resourceKeys.join(", ")}`;
|
||||
}
|
||||
)
|
||||
.join("; ");
|
||||
|
||||
|
||||
28
server/lib/serverIpService.ts
Normal file
28
server/lib/serverIpService.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import axios from "axios";
|
||||
|
||||
let serverIp: string | null = null;
|
||||
|
||||
const services = [
|
||||
"https://ifconfig.io/ip",
|
||||
"https://api.ipify.org",
|
||||
"https://checkip.amazonaws.com"
|
||||
];
|
||||
|
||||
export async function fetchServerIp() {
|
||||
for (const url of services) {
|
||||
try {
|
||||
const response = await axios.get(url, { timeout: 5000 });
|
||||
serverIp = response.data.trim();
|
||||
console.log("Detected public IP:", serverIp);
|
||||
return;
|
||||
} catch (err: any) {
|
||||
console.warn(`Failed to fetch server IP from ${url}: ${err.message || err.code}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.error("All attempts to fetch server IP failed.");
|
||||
}
|
||||
|
||||
export function getServerIp() {
|
||||
return serverIp;
|
||||
}
|
||||
@@ -309,10 +309,7 @@ export class TraefikConfigManager {
|
||||
this.lastActiveDomains = new Set(domains);
|
||||
}
|
||||
|
||||
if (
|
||||
process.env.USE_PANGOLIN_DNS === "true" &&
|
||||
build != "oss"
|
||||
) {
|
||||
if (process.env.USE_PANGOLIN_DNS === "true" && build != "oss") {
|
||||
// Scan current local certificate state
|
||||
this.lastLocalCertificateState =
|
||||
await this.scanLocalCertificateState();
|
||||
@@ -450,7 +447,8 @@ export class TraefikConfigManager {
|
||||
currentExitNode,
|
||||
config.getRawConfig().traefik.site_types,
|
||||
build == "oss", // filter out the namespace domains in open source
|
||||
build != "oss" // generate the login pages on the cloud and hybrid
|
||||
build != "oss", // generate the login pages on the cloud and hybrid,
|
||||
build == "saas" ? false : config.getRawConfig().traefik.allow_raw_resources // dont allow raw resources on saas otherwise use config
|
||||
);
|
||||
|
||||
const domains = new Set<string>();
|
||||
@@ -502,6 +500,25 @@ export class TraefikConfigManager {
|
||||
};
|
||||
}
|
||||
|
||||
// tcp:
|
||||
// serversTransports:
|
||||
// pp-transport-v1:
|
||||
// proxyProtocol:
|
||||
// version: 1
|
||||
// pp-transport-v2:
|
||||
// proxyProtocol:
|
||||
// version: 2
|
||||
|
||||
if (build != "saas") {
|
||||
// add the serversTransports section if not present
|
||||
if (traefikConfig.tcp && !traefikConfig.tcp.serversTransports) {
|
||||
traefikConfig.tcp.serversTransports = {
|
||||
"pp-transport-v1": { proxyProtocol: { version: 1 } },
|
||||
"pp-transport-v2": { proxyProtocol: { version: 2 } }
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { domains, traefikConfig };
|
||||
} catch (error) {
|
||||
// pull data out of the axios error to log
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { db, targetHealthCheck } from "@server/db";
|
||||
import { db, targetHealthCheck, domains } from "@server/db";
|
||||
import {
|
||||
and,
|
||||
eq,
|
||||
@@ -23,7 +23,8 @@ export async function getTraefikConfig(
|
||||
exitNodeId: number,
|
||||
siteTypes: string[],
|
||||
filterOutNamespaceDomains = false,
|
||||
generateLoginPageRouters = false
|
||||
generateLoginPageRouters = false,
|
||||
allowRawResources = true
|
||||
): Promise<any> {
|
||||
// Define extended target type with site information
|
||||
type TargetWithSite = Target & {
|
||||
@@ -56,6 +57,8 @@ export async function getTraefikConfig(
|
||||
setHostHeader: resources.setHostHeader,
|
||||
enableProxy: resources.enableProxy,
|
||||
headers: resources.headers,
|
||||
proxyProtocol: resources.proxyProtocol,
|
||||
proxyProtocolVersion: resources.proxyProtocolVersion,
|
||||
// Target fields
|
||||
targetId: targets.targetId,
|
||||
targetEnabled: targets.enabled,
|
||||
@@ -75,11 +78,14 @@ export async function getTraefikConfig(
|
||||
siteType: sites.type,
|
||||
siteOnline: sites.online,
|
||||
subnet: sites.subnet,
|
||||
exitNodeId: sites.exitNodeId
|
||||
exitNodeId: sites.exitNodeId,
|
||||
// Domain cert resolver fields
|
||||
domainCertResolver: domains.certResolver
|
||||
})
|
||||
.from(sites)
|
||||
.innerJoin(targets, eq(targets.siteId, sites.siteId))
|
||||
.innerJoin(resources, eq(resources.resourceId, targets.resourceId))
|
||||
.leftJoin(domains, eq(domains.domainId, resources.domainId))
|
||||
.leftJoin(
|
||||
targetHealthCheck,
|
||||
eq(targetHealthCheck.targetId, targets.targetId)
|
||||
@@ -101,7 +107,7 @@ export async function getTraefikConfig(
|
||||
isNull(targetHealthCheck.hcHealth) // Include targets with no health check record
|
||||
),
|
||||
inArray(sites.type, siteTypes),
|
||||
config.getRawConfig().traefik.allow_raw_resources
|
||||
allowRawResources
|
||||
? isNotNull(resources.http) // ignore the http check if allow_raw_resources is true
|
||||
: eq(resources.http, true)
|
||||
)
|
||||
@@ -164,11 +170,15 @@ export async function getTraefikConfig(
|
||||
enableProxy: row.enableProxy,
|
||||
targets: [],
|
||||
headers: row.headers,
|
||||
proxyProtocol: row.proxyProtocol,
|
||||
proxyProtocolVersion: row.proxyProtocolVersion ?? 1,
|
||||
path: row.path, // the targets will all have the same path
|
||||
pathMatchType: row.pathMatchType, // the targets will all have the same pathMatchType
|
||||
rewritePath: row.rewritePath,
|
||||
rewritePathType: row.rewritePathType,
|
||||
priority: priority // may be null, we fallback later
|
||||
priority: priority,
|
||||
// Store domain cert resolver fields
|
||||
domainCertResolver: row.domainCertResolver
|
||||
});
|
||||
}
|
||||
|
||||
@@ -247,30 +257,45 @@ export async function getTraefikConfig(
|
||||
wildCard = resource.fullDomain;
|
||||
}
|
||||
|
||||
const configDomain = config.getDomain(resource.domainId);
|
||||
const globalDefaultResolver =
|
||||
config.getRawConfig().traefik.cert_resolver;
|
||||
const globalDefaultPreferWildcard =
|
||||
config.getRawConfig().traefik.prefer_wildcard_cert;
|
||||
|
||||
let certResolver: string, preferWildcardCert: boolean;
|
||||
if (!configDomain) {
|
||||
certResolver = config.getRawConfig().traefik.cert_resolver;
|
||||
preferWildcardCert =
|
||||
config.getRawConfig().traefik.prefer_wildcard_cert;
|
||||
} else {
|
||||
certResolver = configDomain.cert_resolver;
|
||||
preferWildcardCert = configDomain.prefer_wildcard_cert;
|
||||
}
|
||||
const domainCertResolver = resource.domainCertResolver;
|
||||
const preferWildcardCert = resource.preferWildcardCert;
|
||||
|
||||
const tls = {
|
||||
certResolver: certResolver,
|
||||
...(preferWildcardCert
|
||||
? {
|
||||
domains: [
|
||||
{
|
||||
main: wildCard
|
||||
}
|
||||
]
|
||||
}
|
||||
: {})
|
||||
};
|
||||
let resolverName: string | undefined;
|
||||
let preferWildcard: boolean | undefined;
|
||||
// Handle both letsencrypt & custom cases
|
||||
if (domainCertResolver) {
|
||||
resolverName = domainCertResolver.trim();
|
||||
} else {
|
||||
resolverName = globalDefaultResolver;
|
||||
}
|
||||
|
||||
if (
|
||||
preferWildcardCert !== undefined &&
|
||||
preferWildcardCert !== null
|
||||
) {
|
||||
preferWildcard = preferWildcardCert;
|
||||
} else {
|
||||
preferWildcard = globalDefaultPreferWildcard;
|
||||
}
|
||||
|
||||
const tls = {
|
||||
certResolver: resolverName,
|
||||
...(preferWildcard
|
||||
? {
|
||||
domains: [
|
||||
{
|
||||
main: wildCard
|
||||
}
|
||||
]
|
||||
}
|
||||
: {})
|
||||
};
|
||||
|
||||
|
||||
const additionalMiddlewares =
|
||||
config.getRawConfig().traefik.additional_middlewares || [];
|
||||
@@ -509,14 +534,14 @@ export async function getTraefikConfig(
|
||||
})(),
|
||||
...(resource.stickySession
|
||||
? {
|
||||
sticky: {
|
||||
cookie: {
|
||||
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
|
||||
secure: resource.ssl,
|
||||
httpOnly: true
|
||||
}
|
||||
}
|
||||
}
|
||||
sticky: {
|
||||
cookie: {
|
||||
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
|
||||
secure: resource.ssl,
|
||||
httpOnly: true
|
||||
}
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
};
|
||||
@@ -615,15 +640,20 @@ export async function getTraefikConfig(
|
||||
}
|
||||
});
|
||||
})(),
|
||||
...(resource.proxyProtocol && protocol == "tcp"
|
||||
? {
|
||||
serversTransport: `pp-transport-v${resource.proxyProtocolVersion || 1}`
|
||||
}
|
||||
: {}),
|
||||
...(resource.stickySession
|
||||
? {
|
||||
sticky: {
|
||||
ipStrategy: {
|
||||
depth: 0,
|
||||
sourcePort: true
|
||||
}
|
||||
}
|
||||
}
|
||||
sticky: {
|
||||
ipStrategy: {
|
||||
depth: 0,
|
||||
sourcePort: true
|
||||
}
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
};
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
certificates,
|
||||
db,
|
||||
domainNamespaces,
|
||||
domains,
|
||||
exitNodes,
|
||||
loginPage,
|
||||
targetHealthCheck
|
||||
@@ -50,7 +51,8 @@ export async function getTraefikConfig(
|
||||
exitNodeId: number,
|
||||
siteTypes: string[],
|
||||
filterOutNamespaceDomains = false,
|
||||
generateLoginPageRouters = false
|
||||
generateLoginPageRouters = false,
|
||||
allowRawResources = true
|
||||
): Promise<any> {
|
||||
// Define extended target type with site information
|
||||
type TargetWithSite = Target & {
|
||||
@@ -104,11 +106,16 @@ export async function getTraefikConfig(
|
||||
subnet: sites.subnet,
|
||||
exitNodeId: sites.exitNodeId,
|
||||
// Namespace
|
||||
domainNamespaceId: domainNamespaces.domainNamespaceId
|
||||
domainNamespaceId: domainNamespaces.domainNamespaceId,
|
||||
// Certificate
|
||||
certificateStatus: certificates.status,
|
||||
domainCertResolver: domains.certResolver,
|
||||
})
|
||||
.from(sites)
|
||||
.innerJoin(targets, eq(targets.siteId, sites.siteId))
|
||||
.innerJoin(resources, eq(resources.resourceId, targets.resourceId))
|
||||
.leftJoin(certificates, eq(certificates.domainId, resources.domainId))
|
||||
.leftJoin(domains, eq(domains.domainId, resources.domainId))
|
||||
.leftJoin(
|
||||
targetHealthCheck,
|
||||
eq(targetHealthCheck.targetId, targets.targetId)
|
||||
@@ -135,7 +142,7 @@ export async function getTraefikConfig(
|
||||
isNull(targetHealthCheck.hcHealth) // Include targets with no health check record
|
||||
),
|
||||
inArray(sites.type, siteTypes),
|
||||
config.getRawConfig().traefik.allow_raw_resources
|
||||
allowRawResources
|
||||
? isNotNull(resources.http) // ignore the http check if allow_raw_resources is true
|
||||
: eq(resources.http, true)
|
||||
)
|
||||
@@ -206,7 +213,8 @@ export async function getTraefikConfig(
|
||||
pathMatchType: row.pathMatchType, // the targets will all have the same pathMatchType
|
||||
rewritePath: row.rewritePath,
|
||||
rewritePathType: row.rewritePathType,
|
||||
priority: priority // may be null, we fallback later
|
||||
priority: priority, // may be null, we fallback later
|
||||
domainCertResolver: row.domainCertResolver,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -294,6 +302,20 @@ export async function getTraefikConfig(
|
||||
config_output.http.services = {};
|
||||
}
|
||||
|
||||
const domainParts = fullDomain.split(".");
|
||||
let wildCard;
|
||||
if (domainParts.length <= 2) {
|
||||
wildCard = `*.${domainParts.join(".")}`;
|
||||
} else {
|
||||
wildCard = `*.${domainParts.slice(1).join(".")}`;
|
||||
}
|
||||
|
||||
if (!resource.subdomain) {
|
||||
wildCard = resource.fullDomain;
|
||||
}
|
||||
|
||||
const configDomain = config.getDomain(resource.domainId);
|
||||
|
||||
let tls = {};
|
||||
if (!privateConfig.getRawPrivateConfig().flags.use_pangolin_dns) {
|
||||
const domainParts = fullDomain.split(".");
|
||||
@@ -324,13 +346,13 @@ export async function getTraefikConfig(
|
||||
certResolver: certResolver,
|
||||
...(preferWildcardCert
|
||||
? {
|
||||
domains: [
|
||||
{
|
||||
main: wildCard
|
||||
}
|
||||
]
|
||||
}
|
||||
: {})
|
||||
domains: [
|
||||
{
|
||||
main: wildCard,
|
||||
},
|
||||
],
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
} else {
|
||||
// find a cert that matches the full domain, if not continue
|
||||
@@ -582,14 +604,14 @@ export async function getTraefikConfig(
|
||||
})(),
|
||||
...(resource.stickySession
|
||||
? {
|
||||
sticky: {
|
||||
cookie: {
|
||||
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
|
||||
secure: resource.ssl,
|
||||
httpOnly: true
|
||||
}
|
||||
}
|
||||
}
|
||||
sticky: {
|
||||
cookie: {
|
||||
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
|
||||
secure: resource.ssl,
|
||||
httpOnly: true
|
||||
}
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
};
|
||||
@@ -688,15 +710,20 @@ export async function getTraefikConfig(
|
||||
}
|
||||
});
|
||||
})(),
|
||||
...(resource.proxyProtocol && protocol == "tcp" // proxy protocol only works for tcp
|
||||
? {
|
||||
serversTransport: `pp-transport-v${resource.proxyProtocolVersion || 1}`
|
||||
}
|
||||
: {}),
|
||||
...(resource.stickySession
|
||||
? {
|
||||
sticky: {
|
||||
ipStrategy: {
|
||||
depth: 0,
|
||||
sourcePort: true
|
||||
}
|
||||
}
|
||||
}
|
||||
sticky: {
|
||||
ipStrategy: {
|
||||
depth: 0,
|
||||
sourcePort: true
|
||||
}
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
};
|
||||
@@ -744,10 +771,9 @@ export async function getTraefikConfig(
|
||||
loadBalancer: {
|
||||
servers: [
|
||||
{
|
||||
url: `http://${
|
||||
config.getRawConfig().server
|
||||
url: `http://${config.getRawConfig().server
|
||||
.internal_hostname
|
||||
}:${config.getRawConfig().server.next_port}`
|
||||
}:${config.getRawConfig().server.next_port}`
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -272,7 +272,8 @@ hybridRouter.get(
|
||||
remoteExitNode.exitNodeId,
|
||||
["newt", "local", "wireguard"], // Allow them to use all the site types
|
||||
true, // But don't allow domain namespace resources
|
||||
false // Dont include login pages
|
||||
false, // Dont include login pages,
|
||||
true // allow raw resources
|
||||
);
|
||||
|
||||
return response(res, {
|
||||
|
||||
@@ -911,9 +911,9 @@ async function checkRules(
|
||||
) {
|
||||
return rule.action as any;
|
||||
} else if (
|
||||
ipCC &&
|
||||
rule.match == "GEOIP" &&
|
||||
(await isIpInGeoIP(ipCC, rule.value))
|
||||
clientIp &&
|
||||
rule.match == "COUNTRY" &&
|
||||
(await isIpInGeoIP(clientIp, rule.value))
|
||||
) {
|
||||
return rule.action as any;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { db } from "@server/db";
|
||||
import { db, olms } from "@server/db";
|
||||
import {
|
||||
clients,
|
||||
orgs,
|
||||
@@ -16,6 +16,67 @@ import createHttpError from "http-errors";
|
||||
import { z } from "zod";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
import NodeCache from "node-cache";
|
||||
import semver from "semver";
|
||||
|
||||
const olmVersionCache = new NodeCache({ stdTTL: 3600 });
|
||||
|
||||
async function getLatestOlmVersion(): Promise<string | null> {
|
||||
try {
|
||||
const cachedVersion = olmVersionCache.get<string>("latestOlmVersion");
|
||||
if (cachedVersion) {
|
||||
return cachedVersion;
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), 1500);
|
||||
|
||||
const response = await fetch(
|
||||
"https://api.github.com/repos/fosrl/olm/tags",
|
||||
{
|
||||
signal: controller.signal
|
||||
}
|
||||
);
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(
|
||||
`Failed to fetch latest Olm version from GitHub: ${response.status} ${response.statusText}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const tags = await response.json();
|
||||
if (!Array.isArray(tags) || tags.length === 0) {
|
||||
logger.warn("No tags found for Olm repository");
|
||||
return null;
|
||||
}
|
||||
|
||||
const latestVersion = tags[0].name;
|
||||
|
||||
olmVersionCache.set("latestOlmVersion", latestVersion);
|
||||
|
||||
return latestVersion;
|
||||
} catch (error: any) {
|
||||
if (error.name === "AbortError") {
|
||||
logger.warn(
|
||||
"Request to fetch latest Olm version timed out (1.5s)"
|
||||
);
|
||||
} else if (error.cause?.code === "UND_ERR_CONNECT_TIMEOUT") {
|
||||
logger.warn(
|
||||
"Connection timeout while fetching latest Olm version"
|
||||
);
|
||||
} else {
|
||||
logger.warn(
|
||||
"Error fetching latest Olm version:",
|
||||
error.message || error
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const listClientsParamsSchema = z
|
||||
.object({
|
||||
@@ -50,10 +111,12 @@ function queryClients(orgId: string, accessibleClientIds: number[]) {
|
||||
megabytesOut: clients.megabytesOut,
|
||||
orgName: orgs.name,
|
||||
type: clients.type,
|
||||
online: clients.online
|
||||
online: clients.online,
|
||||
olmVersion: olms.version
|
||||
})
|
||||
.from(clients)
|
||||
.leftJoin(orgs, eq(clients.orgId, orgs.orgId))
|
||||
.leftJoin(olms, eq(clients.clientId, olms.clientId))
|
||||
.where(
|
||||
and(
|
||||
inArray(clients.clientId, accessibleClientIds),
|
||||
@@ -77,12 +140,20 @@ async function getSiteAssociations(clientIds: number[]) {
|
||||
.where(inArray(clientSites.clientId, clientIds));
|
||||
}
|
||||
|
||||
type OlmWithUpdateAvailable = Awaited<ReturnType<typeof queryClients>>[0] & {
|
||||
olmUpdateAvailable?: boolean;
|
||||
};
|
||||
|
||||
|
||||
export type ListClientsResponse = {
|
||||
clients: Array<Awaited<ReturnType<typeof queryClients>>[0] & { sites: Array<{
|
||||
siteId: number;
|
||||
siteName: string | null;
|
||||
siteNiceId: string | null;
|
||||
}> }>;
|
||||
clients: Array<Awaited<ReturnType<typeof queryClients>>[0] & {
|
||||
sites: Array<{
|
||||
siteId: number;
|
||||
siteName: string | null;
|
||||
siteNiceId: string | null;
|
||||
}>
|
||||
olmUpdateAvailable?: boolean;
|
||||
}>;
|
||||
pagination: { total: number; limit: number; offset: number };
|
||||
};
|
||||
|
||||
@@ -206,6 +277,43 @@ export async function listClients(
|
||||
sites: sitesByClient[client.clientId] || []
|
||||
}));
|
||||
|
||||
const latestOlVersionPromise = getLatestOlmVersion();
|
||||
|
||||
const olmsWithUpdates: OlmWithUpdateAvailable[] = clientsWithSites.map(
|
||||
(client) => {
|
||||
const OlmWithUpdate: OlmWithUpdateAvailable = { ...client };
|
||||
// Initially set to false, will be updated if version check succeeds
|
||||
OlmWithUpdate.olmUpdateAvailable = false;
|
||||
return OlmWithUpdate;
|
||||
}
|
||||
);
|
||||
|
||||
// Try to get the latest version, but don't block if it fails
|
||||
try {
|
||||
const latestOlVersion = await latestOlVersionPromise;
|
||||
|
||||
if (latestOlVersion) {
|
||||
olmsWithUpdates.forEach((client) => {
|
||||
try {
|
||||
client.olmUpdateAvailable = semver.lt(
|
||||
client.olmVersion ? client.olmVersion : "",
|
||||
latestOlVersion
|
||||
);
|
||||
} catch (error) {
|
||||
client.olmUpdateAvailable = false;
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
// Log the error but don't let it block the response
|
||||
logger.warn(
|
||||
"Failed to check for OLM updates, continuing without update info:",
|
||||
error
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
return response<ListClientsResponse>(res, {
|
||||
data: {
|
||||
clients: clientsWithSites,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { z } from "zod";
|
||||
import { db, Domain, domains, OrgDomains, orgDomains } from "@server/db";
|
||||
import { db, Domain, domains, OrgDomains, orgDomains, dnsRecords } from "@server/db";
|
||||
import response from "@server/lib/response";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
@@ -24,16 +24,21 @@ const paramsSchema = z
|
||||
const bodySchema = z
|
||||
.object({
|
||||
type: z.enum(["ns", "cname", "wildcard"]),
|
||||
baseDomain: subdomainSchema
|
||||
baseDomain: subdomainSchema,
|
||||
certResolver: z.string().optional().nullable(),
|
||||
preferWildcardCert: z.boolean().optional().nullable() // optional, only for wildcard
|
||||
})
|
||||
.strict();
|
||||
|
||||
|
||||
export type CreateDomainResponse = {
|
||||
domainId: string;
|
||||
nsRecords?: string[];
|
||||
cnameRecords?: { baseDomain: string; value: string }[];
|
||||
aRecords?: { baseDomain: string; value: string }[];
|
||||
txtRecords?: { baseDomain: string; value: string }[];
|
||||
certResolver?: string | null;
|
||||
preferWildcardCert?: boolean | null;
|
||||
};
|
||||
|
||||
// Helper to check if a domain is a subdomain or equal to another domain
|
||||
@@ -71,7 +76,7 @@ export async function createOrgDomain(
|
||||
}
|
||||
|
||||
const { orgId } = parsedParams.data;
|
||||
const { type, baseDomain } = parsedBody.data;
|
||||
const { type, baseDomain, certResolver, preferWildcardCert } = parsedBody.data;
|
||||
|
||||
if (build == "oss") {
|
||||
if (type !== "wildcard") {
|
||||
@@ -254,7 +259,9 @@ export async function createOrgDomain(
|
||||
domainId,
|
||||
baseDomain,
|
||||
type,
|
||||
verified: type === "wildcard" ? true : false
|
||||
verified: type === "wildcard" ? true : false,
|
||||
certResolver: certResolver || null,
|
||||
preferWildcardCert: preferWildcardCert || false
|
||||
})
|
||||
.returning();
|
||||
|
||||
@@ -269,9 +276,24 @@ export async function createOrgDomain(
|
||||
})
|
||||
.returning();
|
||||
|
||||
// Prepare DNS records to insert
|
||||
const recordsToInsert = [];
|
||||
|
||||
// TODO: This needs to be cross region and not hardcoded
|
||||
if (type === "ns") {
|
||||
nsRecords = config.getRawConfig().dns.nameservers as string[];
|
||||
|
||||
// Save NS records to database
|
||||
for (const nsValue of nsRecords) {
|
||||
recordsToInsert.push({
|
||||
id: generateId(15),
|
||||
domainId,
|
||||
recordType: "NS",
|
||||
baseDomain: baseDomain,
|
||||
value: nsValue,
|
||||
verified: false
|
||||
});
|
||||
}
|
||||
} else if (type === "cname") {
|
||||
cnameRecords = [
|
||||
{
|
||||
@@ -283,6 +305,18 @@ export async function createOrgDomain(
|
||||
baseDomain: `_acme-challenge.${baseDomain}`
|
||||
}
|
||||
];
|
||||
|
||||
// Save CNAME records to database
|
||||
for (const cnameRecord of cnameRecords) {
|
||||
recordsToInsert.push({
|
||||
id: generateId(15),
|
||||
domainId,
|
||||
recordType: "CNAME",
|
||||
baseDomain: cnameRecord.baseDomain,
|
||||
value: cnameRecord.value,
|
||||
verified: false
|
||||
});
|
||||
}
|
||||
} else if (type === "wildcard") {
|
||||
aRecords = [
|
||||
{
|
||||
@@ -294,6 +328,23 @@ export async function createOrgDomain(
|
||||
baseDomain: `${baseDomain}`
|
||||
}
|
||||
];
|
||||
|
||||
// Save A records to database
|
||||
for (const aRecord of aRecords) {
|
||||
recordsToInsert.push({
|
||||
id: generateId(15),
|
||||
domainId,
|
||||
recordType: "A",
|
||||
baseDomain: aRecord.baseDomain,
|
||||
value: aRecord.value,
|
||||
verified: true
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Insert all DNS records in batch
|
||||
if (recordsToInsert.length > 0) {
|
||||
await trx.insert(dnsRecords).values(recordsToInsert);
|
||||
}
|
||||
|
||||
numOrgDomains = await trx
|
||||
@@ -325,7 +376,9 @@ export async function createOrgDomain(
|
||||
cnameRecords,
|
||||
txtRecords,
|
||||
nsRecords,
|
||||
aRecords
|
||||
aRecords,
|
||||
certResolver: returned.certResolver,
|
||||
preferWildcardCert: returned.preferWildcardCert
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
|
||||
97
server/routers/domain/getDNSRecords.ts
Normal file
97
server/routers/domain/getDNSRecords.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { z } from "zod";
|
||||
import { db, dnsRecords } from "@server/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import response from "@server/lib/response";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { getServerIp } from "@server/lib/serverIpService"; // your in-memory IP module
|
||||
|
||||
const getDNSRecordsSchema = z
|
||||
.object({
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
})
|
||||
.strict();
|
||||
|
||||
async function query(domainId: string) {
|
||||
const records = await db
|
||||
.select()
|
||||
.from(dnsRecords)
|
||||
.where(eq(dnsRecords.domainId, domainId));
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
export type GetDNSRecordsResponse = Awaited<ReturnType<typeof query>>;
|
||||
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
path: "/org/{orgId}/domain/{domainId}/dns-records",
|
||||
description: "Get all DNS records for a domain by domainId.",
|
||||
tags: [OpenAPITags.Domain],
|
||||
request: {
|
||||
params: z.object({
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
})
|
||||
},
|
||||
responses: {}
|
||||
});
|
||||
|
||||
export async function getDNSRecords(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
try {
|
||||
const parsedParams = getDNSRecordsSchema.safeParse(req.params);
|
||||
if (!parsedParams.success) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
fromError(parsedParams.error).toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const { domainId } = parsedParams.data;
|
||||
|
||||
const records = await query(domainId);
|
||||
|
||||
if (!records || records.length === 0) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.NOT_FOUND,
|
||||
"No DNS records found for this domain"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const serverIp = getServerIp();
|
||||
|
||||
// Override value for type A or wildcard records
|
||||
const updatedRecords = records.map(record => {
|
||||
if ((record.recordType === "A" || record.baseDomain === "*") && serverIp) {
|
||||
return { ...record, value: serverIp };
|
||||
}
|
||||
return record;
|
||||
});
|
||||
|
||||
return response<GetDNSRecordsResponse>(res, {
|
||||
data: updatedRecords,
|
||||
success: true,
|
||||
error: false,
|
||||
message: "DNS records retrieved successfully",
|
||||
status: HttpCode.OK
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return next(
|
||||
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
|
||||
);
|
||||
}
|
||||
}
|
||||
86
server/routers/domain/getDomain.ts
Normal file
86
server/routers/domain/getDomain.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { z } from "zod";
|
||||
import { db, domains } from "@server/db";
|
||||
import { eq, and } from "drizzle-orm";
|
||||
import response from "@server/lib/response";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { domain } from "zod/v4/core/regexes";
|
||||
|
||||
const getDomainSchema = z
|
||||
.object({
|
||||
domainId: z
|
||||
.string()
|
||||
.optional(),
|
||||
orgId: z.string().optional()
|
||||
})
|
||||
.strict();
|
||||
|
||||
async function query(domainId?: string, orgId?: string) {
|
||||
if (domainId) {
|
||||
const [res] = await db
|
||||
.select()
|
||||
.from(domains)
|
||||
.where(eq(domains.domainId, domainId))
|
||||
.limit(1);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
export type GetDomainResponse = NonNullable<Awaited<ReturnType<typeof query>>>;
|
||||
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
path: "/org/{orgId}/domain/{domainId}",
|
||||
description: "Get a domain by domainId.",
|
||||
tags: [OpenAPITags.Domain],
|
||||
request: {
|
||||
params: z.object({
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
})
|
||||
},
|
||||
responses: {}
|
||||
});
|
||||
|
||||
export async function getDomain(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
try {
|
||||
const parsedParams = getDomainSchema.safeParse(req.params);
|
||||
if (!parsedParams.success) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
fromError(parsedParams.error).toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const { orgId, domainId } = parsedParams.data;
|
||||
|
||||
const domain = await query(domainId, orgId);
|
||||
|
||||
if (!domain) {
|
||||
return next(createHttpError(HttpCode.NOT_FOUND, "Domain not found"));
|
||||
}
|
||||
|
||||
return response<GetDomainResponse>(res, {
|
||||
data: domain,
|
||||
success: true,
|
||||
error: false,
|
||||
message: "Domain retrieved successfully",
|
||||
status: HttpCode.OK
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return next(
|
||||
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
export * from "./listDomains";
|
||||
export * from "./createOrgDomain";
|
||||
export * from "./deleteOrgDomain";
|
||||
export * from "./restartOrgDomain";
|
||||
export * from "./restartOrgDomain";
|
||||
export * from "./getDomain";
|
||||
export * from "./getDNSRecords";
|
||||
export * from "./updateDomain";
|
||||
@@ -42,7 +42,9 @@ async function queryDomains(orgId: string, limit: number, offset: number) {
|
||||
type: domains.type,
|
||||
failed: domains.failed,
|
||||
tries: domains.tries,
|
||||
configManaged: domains.configManaged
|
||||
configManaged: domains.configManaged,
|
||||
certResolver: domains.certResolver,
|
||||
preferWildcardCert: domains.preferWildcardCert
|
||||
})
|
||||
.from(orgDomains)
|
||||
.where(eq(orgDomains.orgId, orgId))
|
||||
|
||||
161
server/routers/domain/updateDomain.ts
Normal file
161
server/routers/domain/updateDomain.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { z } from "zod";
|
||||
import { db, domains, orgDomains } from "@server/db";
|
||||
import response from "@server/lib/response";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { eq, and } from "drizzle-orm";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const paramsSchema = z
|
||||
.object({
|
||||
orgId: z.string(),
|
||||
domainId: z.string()
|
||||
})
|
||||
.strict();
|
||||
|
||||
const bodySchema = z
|
||||
.object({
|
||||
certResolver: z.string().optional().nullable(),
|
||||
preferWildcardCert: z.boolean().optional().nullable()
|
||||
})
|
||||
.strict();
|
||||
|
||||
export type UpdateDomainResponse = {
|
||||
domainId: string;
|
||||
certResolver: string | null;
|
||||
preferWildcardCert: boolean | null;
|
||||
};
|
||||
|
||||
|
||||
registry.registerPath({
|
||||
method: "patch",
|
||||
path: "/org/{orgId}/domain/{domainId}",
|
||||
description: "Update a domain by domainId.",
|
||||
tags: [OpenAPITags.Domain],
|
||||
request: {
|
||||
params: z.object({
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
})
|
||||
},
|
||||
responses: {}
|
||||
});
|
||||
|
||||
export async function updateOrgDomain(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
try {
|
||||
const parsedParams = paramsSchema.safeParse(req.params);
|
||||
if (!parsedParams.success) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
fromError(parsedParams.error).toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const parsedBody = bodySchema.safeParse(req.body);
|
||||
if (!parsedBody.success) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
fromError(parsedBody.error).toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const { orgId, domainId } = parsedParams.data;
|
||||
const { certResolver, preferWildcardCert } = parsedBody.data;
|
||||
|
||||
const [orgDomain] = await db
|
||||
.select()
|
||||
.from(orgDomains)
|
||||
.where(
|
||||
and(
|
||||
eq(orgDomains.orgId, orgId),
|
||||
eq(orgDomains.domainId, domainId)
|
||||
)
|
||||
);
|
||||
|
||||
if (!orgDomain) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.NOT_FOUND,
|
||||
"Domain not found or does not belong to this organization"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
const [existingDomain] = await db
|
||||
.select()
|
||||
.from(domains)
|
||||
.where(eq(domains.domainId, domainId));
|
||||
|
||||
if (!existingDomain) {
|
||||
return next(
|
||||
createHttpError(HttpCode.NOT_FOUND, "Domain not found")
|
||||
);
|
||||
}
|
||||
|
||||
if (existingDomain.type !== "wildcard") {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
"Domain settings can only be updated for wildcard domains"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const updateData: Partial<{
|
||||
certResolver: string | null;
|
||||
preferWildcardCert: boolean;
|
||||
}> = {};
|
||||
|
||||
if (certResolver !== undefined) {
|
||||
updateData.certResolver = certResolver;
|
||||
}
|
||||
|
||||
if (preferWildcardCert !== undefined && preferWildcardCert !== null) {
|
||||
updateData.preferWildcardCert = preferWildcardCert;
|
||||
}
|
||||
|
||||
const [updatedDomain] = await db
|
||||
.update(domains)
|
||||
.set(updateData)
|
||||
.where(eq(domains.domainId, domainId))
|
||||
.returning();
|
||||
|
||||
if (!updatedDomain) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.INTERNAL_SERVER_ERROR,
|
||||
"Failed to update domain"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return response<UpdateDomainResponse>(res, {
|
||||
data: {
|
||||
domainId: updatedDomain.domainId,
|
||||
certResolver: updatedDomain.certResolver,
|
||||
preferWildcardCert: updatedDomain.preferWildcardCert
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
message: "Domain updated successfully",
|
||||
status: HttpCode.OK
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return next(
|
||||
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -318,6 +318,27 @@ authenticated.get(
|
||||
domain.listDomains
|
||||
);
|
||||
|
||||
authenticated.get(
|
||||
"/org/:orgId/domain/:domainId",
|
||||
verifyOrgAccess,
|
||||
verifyUserHasAction(ActionsEnum.getDomain),
|
||||
domain.getDomain
|
||||
);
|
||||
|
||||
authenticated.patch(
|
||||
"/org/:orgId/domain/:domainId",
|
||||
verifyOrgAccess,
|
||||
verifyUserHasAction(ActionsEnum.updateOrgDomain),
|
||||
domain.updateOrgDomain
|
||||
);
|
||||
|
||||
authenticated.get(
|
||||
"/org/:orgId/domain/:domainId/dns-records",
|
||||
verifyOrgAccess,
|
||||
verifyUserHasAction(ActionsEnum.getDNSRecords),
|
||||
domain.getDNSRecords
|
||||
);
|
||||
|
||||
authenticated.get(
|
||||
"/org/:orgId/invitations",
|
||||
verifyOrgAccess,
|
||||
|
||||
@@ -18,7 +18,7 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
const createResourceRuleSchema = z
|
||||
.object({
|
||||
action: z.enum(["ACCEPT", "DROP", "PASS"]),
|
||||
match: z.enum(["CIDR", "IP", "PATH", "GEOIP"]),
|
||||
match: z.enum(["CIDR", "IP", "PATH", "COUNTRY"]),
|
||||
value: z.string().min(1),
|
||||
priority: z.number().int(),
|
||||
enabled: z.boolean().optional()
|
||||
|
||||
@@ -99,8 +99,9 @@ const updateRawResourceBodySchema = z
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
proxyPort: z.number().int().min(1).max(65535).optional(),
|
||||
stickySession: z.boolean().optional(),
|
||||
enabled: z.boolean().optional()
|
||||
// enableProxy: z.boolean().optional() // always true now
|
||||
enabled: z.boolean().optional(),
|
||||
proxyProtocol: z.boolean().optional(),
|
||||
proxyProtocolVersion: z.number().int().min(1).optional()
|
||||
})
|
||||
.strict()
|
||||
.refine((data) => Object.keys(data).length > 0, {
|
||||
|
||||
@@ -30,7 +30,7 @@ const updateResourceRuleParamsSchema = z
|
||||
const updateResourceRuleSchema = z
|
||||
.object({
|
||||
action: z.enum(["ACCEPT", "DROP", "PASS"]).optional(),
|
||||
match: z.enum(["CIDR", "IP", "PATH", "GEOIP"]).optional(),
|
||||
match: z.enum(["CIDR", "IP", "PATH", "COUNTRY"]).optional(),
|
||||
value: z.string().min(1).optional(),
|
||||
priority: z.number().int(),
|
||||
enabled: z.boolean().optional()
|
||||
|
||||
@@ -21,7 +21,8 @@ export async function traefikConfigProvider(
|
||||
currentExitNodeId,
|
||||
config.getRawConfig().traefik.site_types,
|
||||
build == "oss", // filter out the namespace domains in open source
|
||||
build != "oss" // generate the login pages on the cloud and hybrid
|
||||
build != "oss", // generate the login pages on the cloud and and enterprise,
|
||||
config.getRawConfig().traefik.allow_raw_resources
|
||||
);
|
||||
|
||||
if (traefikConfig?.http?.middlewares) {
|
||||
|
||||
@@ -37,7 +37,9 @@ async function copyInDomains() {
|
||||
const configDomains = Object.entries(rawDomains).map(
|
||||
([key, value]) => ({
|
||||
domainId: key,
|
||||
baseDomain: value.base_domain.toLowerCase()
|
||||
baseDomain: value.base_domain.toLowerCase(),
|
||||
certResolver: value.cert_resolver || null,
|
||||
preferWildcardCert: value.prefer_wildcard_cert || null,
|
||||
})
|
||||
);
|
||||
|
||||
@@ -59,11 +61,11 @@ async function copyInDomains() {
|
||||
}
|
||||
}
|
||||
|
||||
for (const { domainId, baseDomain } of configDomains) {
|
||||
for (const { domainId, baseDomain, certResolver, preferWildcardCert } of configDomains) {
|
||||
if (existingDomainKeys.has(domainId)) {
|
||||
await trx
|
||||
.update(domains)
|
||||
.set({ baseDomain, verified: true, type: "wildcard" })
|
||||
.set({ baseDomain, verified: true, type: "wildcard", certResolver, preferWildcardCert })
|
||||
.where(eq(domains.domainId, domainId))
|
||||
.execute();
|
||||
} else {
|
||||
@@ -74,7 +76,9 @@ async function copyInDomains() {
|
||||
baseDomain,
|
||||
configManaged: true,
|
||||
type: "wildcard",
|
||||
verified: true
|
||||
verified: true,
|
||||
certResolver,
|
||||
preferWildcardCert
|
||||
})
|
||||
.execute();
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import m5 from "./scriptsPg/1.10.0";
|
||||
import m6 from "./scriptsPg/1.10.2";
|
||||
import m7 from "./scriptsPg/1.11.0";
|
||||
import m8 from "./scriptsPg/1.11.1";
|
||||
import m9 from "./scriptsPg/1.11.2";
|
||||
|
||||
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
|
||||
// EXCEPT FOR THE DATABASE AND THE SCHEMA
|
||||
@@ -26,7 +27,8 @@ const migrations = [
|
||||
{ version: "1.10.0", run: m5 },
|
||||
{ version: "1.10.2", run: m6 },
|
||||
{ version: "1.11.0", run: m7 },
|
||||
{ version: "1.11.1", run: m8 }
|
||||
{ version: "1.11.1", run: m8 },
|
||||
{ version: "1.11.2", run: m9 }
|
||||
// Add new migrations here as they are created
|
||||
] as {
|
||||
version: string;
|
||||
|
||||
@@ -31,6 +31,7 @@ import m26 from "./scriptsSqlite/1.10.1";
|
||||
import m27 from "./scriptsSqlite/1.10.2";
|
||||
import m28 from "./scriptsSqlite/1.11.0";
|
||||
import m29 from "./scriptsSqlite/1.11.1";
|
||||
import m30 from "./scriptsSqlite/1.11.2";
|
||||
|
||||
// THIS CANNOT IMPORT ANYTHING FROM THE SERVER
|
||||
// EXCEPT FOR THE DATABASE AND THE SCHEMA
|
||||
@@ -60,7 +61,8 @@ const migrations = [
|
||||
{ version: "1.10.1", run: m26 },
|
||||
{ version: "1.10.2", run: m27 },
|
||||
{ version: "1.11.0", run: m28 },
|
||||
{ version: "1.11.1", run: m29 }
|
||||
{ version: "1.11.1", run: m29 },
|
||||
{ version: "1.11.2", run: m30 }
|
||||
// Add new migrations here as they are created
|
||||
] as const;
|
||||
|
||||
|
||||
24
server/setup/scriptsPg/1.11.2.ts
Normal file
24
server/setup/scriptsPg/1.11.2.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { db } from "@server/db/pg/driver";
|
||||
import { sql } from "drizzle-orm";
|
||||
|
||||
const version = "1.11.2";
|
||||
|
||||
export default async function migration() {
|
||||
console.log(`Running setup script ${version}...`);
|
||||
|
||||
try {
|
||||
await db.execute(sql`BEGIN`);
|
||||
|
||||
await db.execute(sql`UPDATE "resourceRules" SET "match" = "COUNTRY" WHERE "match" = "GEOIP"`);
|
||||
|
||||
await db.execute(sql`COMMIT`);
|
||||
console.log(`Updated resource rules match value from GEOIP to COUNTRY`);
|
||||
} catch (e) {
|
||||
await db.execute(sql`ROLLBACK`);
|
||||
console.log("Unable to update resource rules match value");
|
||||
console.log(e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
console.log(`${version} migration complete`);
|
||||
}
|
||||
18
server/setup/scriptsSqlite/1.11.2.ts
Normal file
18
server/setup/scriptsSqlite/1.11.2.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { APP_PATH } from "@server/lib/consts";
|
||||
import Database from "better-sqlite3";
|
||||
import path from "path";
|
||||
|
||||
const version = "1.11.2";
|
||||
|
||||
export default async function migration() {
|
||||
console.log(`Running setup script ${version}...`);
|
||||
|
||||
const location = path.join(APP_PATH, "db", "db.sqlite");
|
||||
const db = new Database(location);
|
||||
|
||||
db.transaction(() => {
|
||||
db.prepare(`UPDATE resourceRules SET match = "COUNTRY" WHERE match = "GEOIP"`).run();
|
||||
})();
|
||||
|
||||
console.log(`${version} migration complete`);
|
||||
}
|
||||
Reference in New Issue
Block a user