mirror of
https://github.com/fosrl/pangolin.git
synced 2026-02-17 02:16:38 +00:00
Merge dev into fix/log-analytics-adjustments
This commit is contained in:
@@ -11,8 +11,8 @@ import { db } from "@server/db";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const deleteAccessTokenParamsSchema = z.strictObject({
|
||||
accessTokenId: z.string()
|
||||
});
|
||||
accessTokenId: z.string()
|
||||
});
|
||||
|
||||
registry.registerPath({
|
||||
method: "delete",
|
||||
|
||||
@@ -25,17 +25,14 @@ import { sha256 } from "@oslojs/crypto/sha2";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
export const generateAccessTokenBodySchema = z.strictObject({
|
||||
validForSeconds: z.int().positive().optional(), // seconds
|
||||
title: z.string().optional(),
|
||||
description: z.string().optional()
|
||||
});
|
||||
validForSeconds: z.int().positive().optional(), // seconds
|
||||
title: z.string().optional(),
|
||||
description: z.string().optional()
|
||||
});
|
||||
|
||||
export const generateAccssTokenParamsSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
export type GenerateAccessTokenResponse = Omit<
|
||||
ResourceAccessToken,
|
||||
|
||||
@@ -17,7 +17,8 @@ import stoi from "@server/lib/stoi";
|
||||
import { fromZodError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const listAccessTokensParamsSchema = z.strictObject({
|
||||
const listAccessTokensParamsSchema = z
|
||||
.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.optional()
|
||||
|
||||
@@ -15,8 +15,8 @@ import logger from "@server/logger";
|
||||
import { hashPassword } from "@server/auth/password";
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
name: z.string().min(1).max(255)
|
||||
});
|
||||
name: z.string().min(1).max(255)
|
||||
});
|
||||
|
||||
export type CreateRootApiKeyBody = z.infer<typeof bodySchema>;
|
||||
|
||||
|
||||
@@ -47,8 +47,7 @@ export type ListApiKeyActionsResponse = {
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
path: "/org/{orgId}/api-key/{apiKeyId}/actions",
|
||||
description:
|
||||
"List all actions set for an API key.",
|
||||
description: "List all actions set for an API key.",
|
||||
tags: [OpenAPITags.Org, OpenAPITags.ApiKey],
|
||||
request: {
|
||||
params: paramsSchema,
|
||||
|
||||
@@ -11,9 +11,10 @@ import { eq, and, inArray } from "drizzle-orm";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
actionIds: z.tuple([z.string()], z.string())
|
||||
.transform((v) => Array.from(new Set(v)))
|
||||
});
|
||||
actionIds: z
|
||||
.tuple([z.string()], z.string())
|
||||
.transform((v) => Array.from(new Set(v)))
|
||||
});
|
||||
|
||||
const paramsSchema = z.object({
|
||||
apiKeyId: z.string().nonempty()
|
||||
|
||||
@@ -10,9 +10,10 @@ import { fromError } from "zod-validation-error";
|
||||
import { eq, and, inArray } from "drizzle-orm";
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
orgIds: z.tuple([z.string()], z.string())
|
||||
.transform((v) => Array.from(new Set(v)))
|
||||
});
|
||||
orgIds: z
|
||||
.tuple([z.string()], z.string())
|
||||
.transform((v) => Array.from(new Set(v)))
|
||||
});
|
||||
|
||||
const paramsSchema = z.object({
|
||||
apiKeyId: z.string().nonempty()
|
||||
|
||||
@@ -2,15 +2,17 @@ export function generateCSV(data: any[]): string {
|
||||
if (data.length === 0) {
|
||||
return "orgId,action,actorType,timestamp,actor\n";
|
||||
}
|
||||
|
||||
|
||||
const headers = Object.keys(data[0]).join(",");
|
||||
const rows = data.map(row =>
|
||||
Object.values(row).map(value =>
|
||||
typeof value === 'string' && value.includes(',')
|
||||
? `"${value.replace(/"/g, '""')}"`
|
||||
: value
|
||||
).join(",")
|
||||
const rows = data.map((row) =>
|
||||
Object.values(row)
|
||||
.map((value) =>
|
||||
typeof value === "string" && value.includes(",")
|
||||
? `"${value.replace(/"/g, '""')}"`
|
||||
: value
|
||||
)
|
||||
.join(",")
|
||||
);
|
||||
|
||||
|
||||
return [headers, ...rows].join("\n");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,4 +90,4 @@ export type QueryAccessAuditLogResponse = {
|
||||
}[];
|
||||
locations: string[];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
@@ -6,10 +6,7 @@ import { z } from "zod";
|
||||
import { db } from "@server/db";
|
||||
import { User, users } from "@server/db";
|
||||
import { response } from "@server/lib/response";
|
||||
import {
|
||||
hashPassword,
|
||||
verifyPassword
|
||||
} from "@server/auth/password";
|
||||
import { hashPassword, verifyPassword } from "@server/auth/password";
|
||||
import { verifyTotpCode } from "@server/auth/totp";
|
||||
import logger from "@server/logger";
|
||||
import { unauthorized } from "@server/auth/unauthorizedResponse";
|
||||
@@ -23,10 +20,10 @@ import ConfirmPasswordReset from "@server/emails/templates/NotifyResetPassword";
|
||||
import config from "@server/lib/config";
|
||||
|
||||
export const changePasswordBody = z.strictObject({
|
||||
oldPassword: z.string(),
|
||||
newPassword: passwordSchema,
|
||||
code: z.string().optional()
|
||||
});
|
||||
oldPassword: z.string(),
|
||||
newPassword: passwordSchema,
|
||||
code: z.string().optional()
|
||||
});
|
||||
|
||||
export type ChangePasswordBody = z.infer<typeof changePasswordBody>;
|
||||
|
||||
@@ -62,12 +59,14 @@ async function invalidateAllSessionsExceptCurrent(
|
||||
}
|
||||
|
||||
// Delete the user sessions (except current)
|
||||
await trx.delete(sessions).where(
|
||||
and(
|
||||
eq(sessions.userId, userId),
|
||||
ne(sessions.sessionId, currentSessionId)
|
||||
)
|
||||
);
|
||||
await trx
|
||||
.delete(sessions)
|
||||
.where(
|
||||
and(
|
||||
eq(sessions.userId, userId),
|
||||
ne(sessions.sessionId, currentSessionId)
|
||||
)
|
||||
);
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error("Failed to invalidate user sessions except current", e);
|
||||
@@ -157,7 +156,10 @@ export async function changePassword(
|
||||
.where(eq(users.userId, user.userId));
|
||||
|
||||
// Invalidate all sessions except the current one
|
||||
await invalidateAllSessionsExceptCurrent(user.userId, req.session.sessionId);
|
||||
await invalidateAllSessionsExceptCurrent(
|
||||
user.userId,
|
||||
req.session.sessionId
|
||||
);
|
||||
|
||||
try {
|
||||
const email = user.email!;
|
||||
|
||||
@@ -9,7 +9,7 @@ import logger from "@server/logger";
|
||||
|
||||
export const params = z.strictObject({
|
||||
token: z.string(),
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive()),
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
export type CheckResourceSessionParams = z.infer<typeof params>;
|
||||
@@ -21,7 +21,7 @@ export type CheckResourceSessionResponse = {
|
||||
export async function checkResourceSession(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
const parsedParams = params.safeParse(req.params);
|
||||
|
||||
@@ -29,8 +29,8 @@ export async function checkResourceSession(
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
fromError(parsedParams.error).toString(),
|
||||
),
|
||||
fromError(parsedParams.error).toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ export async function checkResourceSession(
|
||||
try {
|
||||
const { resourceSession } = await validateResourceSessionToken(
|
||||
token,
|
||||
resourceId,
|
||||
resourceId
|
||||
);
|
||||
|
||||
let valid = false;
|
||||
@@ -52,15 +52,15 @@ export async function checkResourceSession(
|
||||
success: true,
|
||||
error: false,
|
||||
message: "Checked validity",
|
||||
status: HttpCode.OK,
|
||||
status: HttpCode.OK
|
||||
});
|
||||
} catch (e) {
|
||||
logger.error(e);
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.INTERNAL_SERVER_ERROR,
|
||||
"Failed to reset password",
|
||||
),
|
||||
"Failed to reset password"
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,9 +17,9 @@ import { unauthorized } from "@server/auth/unauthorizedResponse";
|
||||
import { UserType } from "@server/types/UserTypes";
|
||||
|
||||
export const disable2faBody = z.strictObject({
|
||||
password: z.string(),
|
||||
code: z.string().optional()
|
||||
});
|
||||
password: z.string(),
|
||||
code: z.string().optional()
|
||||
});
|
||||
|
||||
export type Disable2faBody = z.infer<typeof disable2faBody>;
|
||||
|
||||
@@ -56,7 +56,10 @@ export async function disable2fa(
|
||||
}
|
||||
|
||||
try {
|
||||
const validPassword = await verifyPassword(password, user.passwordHash!);
|
||||
const validPassword = await verifyPassword(
|
||||
password,
|
||||
user.passwordHash!
|
||||
);
|
||||
if (!validPassword) {
|
||||
return next(unauthorized());
|
||||
}
|
||||
|
||||
@@ -16,4 +16,4 @@ export * from "./checkResourceSession";
|
||||
export * from "./securityKey";
|
||||
export * from "./startDeviceWebAuth";
|
||||
export * from "./verifyDeviceWebAuth";
|
||||
export * from "./pollDeviceWebAuth";
|
||||
export * from "./pollDeviceWebAuth";
|
||||
|
||||
@@ -7,10 +7,7 @@ import logger from "@server/logger";
|
||||
import { response } from "@server/lib/response";
|
||||
import { db, deviceWebAuthCodes } from "@server/db";
|
||||
import { eq, and, gt } from "drizzle-orm";
|
||||
import {
|
||||
createSession,
|
||||
generateSessionToken
|
||||
} from "@server/auth/sessions/app";
|
||||
import { createSession, generateSessionToken } from "@server/auth/sessions/app";
|
||||
import { encodeHexLowerCase } from "@oslojs/encoding";
|
||||
import { sha256 } from "@oslojs/crypto/sha2";
|
||||
|
||||
@@ -22,9 +19,7 @@ export type PollDeviceWebAuthParams = z.infer<typeof paramsSchema>;
|
||||
|
||||
// Helper function to hash device code before querying database
|
||||
function hashDeviceCode(code: string): string {
|
||||
return encodeHexLowerCase(
|
||||
sha256(new TextEncoder().encode(code))
|
||||
);
|
||||
return encodeHexLowerCase(sha256(new TextEncoder().encode(code)));
|
||||
}
|
||||
|
||||
export type PollDeviceWebAuthResponse = {
|
||||
@@ -127,7 +122,9 @@ export async function pollDeviceWebAuth(
|
||||
|
||||
// Check if userId is set (should be set when verified)
|
||||
if (!deviceCode.userId) {
|
||||
logger.error("Device code is verified but userId is missing", { codeId: deviceCode.codeId });
|
||||
logger.error("Device code is verified but userId is missing", {
|
||||
codeId: deviceCode.codeId
|
||||
});
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.INTERNAL_SERVER_ERROR,
|
||||
@@ -165,4 +162,3 @@ export async function pollDeviceWebAuth(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -18,8 +18,8 @@ import { hashPassword } from "@server/auth/password";
|
||||
import { UserType } from "@server/types/UserTypes";
|
||||
|
||||
export const requestPasswordResetBody = z.strictObject({
|
||||
email: z.email().toLowerCase()
|
||||
});
|
||||
email: z.email().toLowerCase()
|
||||
});
|
||||
|
||||
export type RequestPasswordResetBody = z.infer<typeof requestPasswordResetBody>;
|
||||
|
||||
|
||||
@@ -17,9 +17,9 @@ import { verifySession } from "@server/auth/sessions/verifySession";
|
||||
import config from "@server/lib/config";
|
||||
|
||||
export const requestTotpSecretBody = z.strictObject({
|
||||
password: z.string(),
|
||||
email: z.email().optional()
|
||||
});
|
||||
password: z.string(),
|
||||
email: z.email().optional()
|
||||
});
|
||||
|
||||
export type RequestTotpSecretBody = z.infer<typeof requestTotpSecretBody>;
|
||||
|
||||
@@ -46,7 +46,8 @@ export async function requestTotpSecret(
|
||||
|
||||
const { password, email } = parsedBody.data;
|
||||
|
||||
const { user: sessionUser, session: existingSession } = await verifySession(req);
|
||||
const { user: sessionUser, session: existingSession } =
|
||||
await verifySession(req);
|
||||
|
||||
let user: User | null = sessionUser;
|
||||
if (!existingSession) {
|
||||
@@ -112,11 +113,7 @@ export async function requestTotpSecret(
|
||||
|
||||
const hex = crypto.getRandomValues(new Uint8Array(20));
|
||||
const secret = encodeHex(hex);
|
||||
const uri = createTOTPKeyURI(
|
||||
appName,
|
||||
user.email!,
|
||||
hex
|
||||
);
|
||||
const uri = createTOTPKeyURI(appName, user.email!, hex);
|
||||
|
||||
await db
|
||||
.update(users)
|
||||
|
||||
@@ -18,11 +18,11 @@ import { sendEmail } from "@server/emails";
|
||||
import { passwordSchema } from "@server/auth/passwordSchema";
|
||||
|
||||
export const resetPasswordBody = z.strictObject({
|
||||
email: z.email().toLowerCase(),
|
||||
token: z.string(), // reset secret code
|
||||
newPassword: passwordSchema,
|
||||
code: z.string().optional() // 2fa code
|
||||
});
|
||||
email: z.email().toLowerCase(),
|
||||
token: z.string(), // reset secret code
|
||||
newPassword: passwordSchema,
|
||||
code: z.string().optional() // 2fa code
|
||||
});
|
||||
|
||||
export type ResetPasswordBody = z.infer<typeof resetPasswordBody>;
|
||||
|
||||
|
||||
@@ -19,9 +19,7 @@ import type {
|
||||
GenerateAuthenticationOptionsOpts,
|
||||
AuthenticatorTransportFuture
|
||||
} from "@simplewebauthn/server";
|
||||
import {
|
||||
isoBase64URL
|
||||
} from '@simplewebauthn/server/helpers';
|
||||
import { isoBase64URL } from "@simplewebauthn/server/helpers";
|
||||
import config from "@server/lib/config";
|
||||
import { UserType } from "@server/types/UserTypes";
|
||||
import { verifyPassword } from "@server/auth/password";
|
||||
@@ -30,10 +28,12 @@ import { verifyTotpCode } from "@server/auth/totp";
|
||||
|
||||
// The RP ID is the domain name of your application
|
||||
const rpID = (() => {
|
||||
const url = config.getRawConfig().app.dashboard_url ? new URL(config.getRawConfig().app.dashboard_url!) : undefined;
|
||||
const url = config.getRawConfig().app.dashboard_url
|
||||
? new URL(config.getRawConfig().app.dashboard_url!)
|
||||
: undefined;
|
||||
// For localhost, we must use 'localhost' without port
|
||||
if (url?.hostname === 'localhost' || !url) {
|
||||
return 'localhost';
|
||||
if (url?.hostname === "localhost" || !url) {
|
||||
return "localhost";
|
||||
}
|
||||
return url.hostname;
|
||||
})();
|
||||
@@ -46,25 +46,38 @@ const origin = config.getRawConfig().app.dashboard_url || "localhost";
|
||||
// This supports clustered deployments and persists across server restarts
|
||||
|
||||
// Clean up expired challenges every 5 minutes
|
||||
setInterval(async () => {
|
||||
try {
|
||||
const now = Date.now();
|
||||
await db
|
||||
.delete(webauthnChallenge)
|
||||
.where(lt(webauthnChallenge.expiresAt, now));
|
||||
// logger.debug("Cleaned up expired security key challenges");
|
||||
} catch (error) {
|
||||
logger.error("Failed to clean up expired security key challenges", error);
|
||||
}
|
||||
}, 5 * 60 * 1000);
|
||||
setInterval(
|
||||
async () => {
|
||||
try {
|
||||
const now = Date.now();
|
||||
await db
|
||||
.delete(webauthnChallenge)
|
||||
.where(lt(webauthnChallenge.expiresAt, now));
|
||||
// logger.debug("Cleaned up expired security key challenges");
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
"Failed to clean up expired security key challenges",
|
||||
error
|
||||
);
|
||||
}
|
||||
},
|
||||
5 * 60 * 1000
|
||||
);
|
||||
|
||||
// Helper functions for challenge management
|
||||
async function storeChallenge(sessionId: string, challenge: string, securityKeyName?: string, userId?: string) {
|
||||
const expiresAt = Date.now() + (5 * 60 * 1000); // 5 minutes
|
||||
|
||||
async function storeChallenge(
|
||||
sessionId: string,
|
||||
challenge: string,
|
||||
securityKeyName?: string,
|
||||
userId?: string
|
||||
) {
|
||||
const expiresAt = Date.now() + 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
// Delete any existing challenge for this session
|
||||
await db.delete(webauthnChallenge).where(eq(webauthnChallenge.sessionId, sessionId));
|
||||
|
||||
await db
|
||||
.delete(webauthnChallenge)
|
||||
.where(eq(webauthnChallenge.sessionId, sessionId));
|
||||
|
||||
// Insert new challenge
|
||||
await db.insert(webauthnChallenge).values({
|
||||
sessionId,
|
||||
@@ -88,7 +101,9 @@ async function getChallenge(sessionId: string) {
|
||||
|
||||
// Check if expired
|
||||
if (challengeData.expiresAt < Date.now()) {
|
||||
await db.delete(webauthnChallenge).where(eq(webauthnChallenge.sessionId, sessionId));
|
||||
await db
|
||||
.delete(webauthnChallenge)
|
||||
.where(eq(webauthnChallenge.sessionId, sessionId));
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -96,7 +111,9 @@ async function getChallenge(sessionId: string) {
|
||||
}
|
||||
|
||||
async function clearChallenge(sessionId: string) {
|
||||
await db.delete(webauthnChallenge).where(eq(webauthnChallenge.sessionId, sessionId));
|
||||
await db
|
||||
.delete(webauthnChallenge)
|
||||
.where(eq(webauthnChallenge.sessionId, sessionId));
|
||||
}
|
||||
|
||||
export const registerSecurityKeyBody = z.strictObject({
|
||||
@@ -153,7 +170,10 @@ export async function startRegistration(
|
||||
|
||||
try {
|
||||
// Verify password
|
||||
const validPassword = await verifyPassword(password, user.passwordHash!);
|
||||
const validPassword = await verifyPassword(
|
||||
password,
|
||||
user.passwordHash!
|
||||
);
|
||||
if (!validPassword) {
|
||||
return next(unauthorized());
|
||||
}
|
||||
@@ -197,9 +217,11 @@ export async function startRegistration(
|
||||
.from(securityKeys)
|
||||
.where(eq(securityKeys.userId, user.userId));
|
||||
|
||||
const excludeCredentials = existingSecurityKeys.map(key => ({
|
||||
const excludeCredentials = existingSecurityKeys.map((key) => ({
|
||||
id: key.credentialId,
|
||||
transports: key.transports ? JSON.parse(key.transports) as AuthenticatorTransportFuture[] : undefined
|
||||
transports: key.transports
|
||||
? (JSON.parse(key.transports) as AuthenticatorTransportFuture[])
|
||||
: undefined
|
||||
}));
|
||||
|
||||
const options: GenerateRegistrationOptionsOpts = {
|
||||
@@ -207,18 +229,23 @@ export async function startRegistration(
|
||||
rpID,
|
||||
userID: isoBase64URL.toBuffer(user.userId),
|
||||
userName: user.email || user.username,
|
||||
attestationType: 'none',
|
||||
attestationType: "none",
|
||||
excludeCredentials,
|
||||
authenticatorSelection: {
|
||||
residentKey: 'preferred',
|
||||
userVerification: 'preferred',
|
||||
residentKey: "preferred",
|
||||
userVerification: "preferred"
|
||||
}
|
||||
};
|
||||
|
||||
const registrationOptions = await generateRegistrationOptions(options);
|
||||
|
||||
// Store challenge in database
|
||||
await storeChallenge(req.session.sessionId, registrationOptions.challenge, name, user.userId);
|
||||
await storeChallenge(
|
||||
req.session.sessionId,
|
||||
registrationOptions.challenge,
|
||||
name,
|
||||
user.userId
|
||||
);
|
||||
|
||||
return response<typeof registrationOptions>(res, {
|
||||
data: registrationOptions,
|
||||
@@ -270,7 +297,7 @@ export async function verifyRegistration(
|
||||
try {
|
||||
// Get challenge from database
|
||||
const challengeData = await getChallenge(req.session.sessionId);
|
||||
|
||||
|
||||
if (!challengeData) {
|
||||
return next(
|
||||
createHttpError(
|
||||
@@ -292,10 +319,7 @@ export async function verifyRegistration(
|
||||
|
||||
if (!verified || !registrationInfo) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
"Verification failed"
|
||||
)
|
||||
createHttpError(HttpCode.BAD_REQUEST, "Verification failed")
|
||||
);
|
||||
}
|
||||
|
||||
@@ -303,9 +327,13 @@ export async function verifyRegistration(
|
||||
await db.insert(securityKeys).values({
|
||||
credentialId: registrationInfo.credential.id,
|
||||
userId: user.userId,
|
||||
publicKey: isoBase64URL.fromBuffer(registrationInfo.credential.publicKey),
|
||||
publicKey: isoBase64URL.fromBuffer(
|
||||
registrationInfo.credential.publicKey
|
||||
),
|
||||
signCount: registrationInfo.credential.counter || 0,
|
||||
transports: registrationInfo.credential.transports ? JSON.stringify(registrationInfo.credential.transports) : null,
|
||||
transports: registrationInfo.credential.transports
|
||||
? JSON.stringify(registrationInfo.credential.transports)
|
||||
: null,
|
||||
name: challengeData.securityKeyName,
|
||||
lastUsed: new Date().toISOString(),
|
||||
dateCreated: new Date().toISOString()
|
||||
@@ -407,7 +435,10 @@ export async function deleteSecurityKey(
|
||||
|
||||
try {
|
||||
// Verify password
|
||||
const validPassword = await verifyPassword(password, user.passwordHash!);
|
||||
const validPassword = await verifyPassword(
|
||||
password,
|
||||
user.passwordHash!
|
||||
);
|
||||
if (!validPassword) {
|
||||
return next(unauthorized());
|
||||
}
|
||||
@@ -447,10 +478,12 @@ export async function deleteSecurityKey(
|
||||
|
||||
await db
|
||||
.delete(securityKeys)
|
||||
.where(and(
|
||||
eq(securityKeys.credentialId, credentialId),
|
||||
eq(securityKeys.userId, user.userId)
|
||||
));
|
||||
.where(
|
||||
and(
|
||||
eq(securityKeys.credentialId, credentialId),
|
||||
eq(securityKeys.userId, user.userId)
|
||||
)
|
||||
);
|
||||
|
||||
return response<null>(res, {
|
||||
data: null,
|
||||
@@ -502,10 +535,7 @@ export async function startAuthentication(
|
||||
|
||||
if (!user || user.type !== UserType.Internal) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
"Invalid credentials"
|
||||
)
|
||||
createHttpError(HttpCode.BAD_REQUEST, "Invalid credentials")
|
||||
);
|
||||
}
|
||||
|
||||
@@ -525,25 +555,37 @@ export async function startAuthentication(
|
||||
);
|
||||
}
|
||||
|
||||
allowCredentials = userSecurityKeys.map(key => ({
|
||||
allowCredentials = userSecurityKeys.map((key) => ({
|
||||
id: key.credentialId,
|
||||
transports: key.transports ? JSON.parse(key.transports) as AuthenticatorTransportFuture[] : undefined
|
||||
transports: key.transports
|
||||
? (JSON.parse(
|
||||
key.transports
|
||||
) as AuthenticatorTransportFuture[])
|
||||
: undefined
|
||||
}));
|
||||
}
|
||||
|
||||
const options: GenerateAuthenticationOptionsOpts = {
|
||||
rpID,
|
||||
allowCredentials,
|
||||
userVerification: 'preferred',
|
||||
userVerification: "preferred"
|
||||
};
|
||||
|
||||
const authenticationOptions = await generateAuthenticationOptions(options);
|
||||
const authenticationOptions =
|
||||
await generateAuthenticationOptions(options);
|
||||
|
||||
// Generate a temporary session ID for unauthenticated users
|
||||
const tempSessionId = email ? `temp_${email}_${Date.now()}` : `temp_${Date.now()}`;
|
||||
const tempSessionId = email
|
||||
? `temp_${email}_${Date.now()}`
|
||||
: `temp_${Date.now()}`;
|
||||
|
||||
// Store challenge in database
|
||||
await storeChallenge(tempSessionId, authenticationOptions.challenge, undefined, userId);
|
||||
await storeChallenge(
|
||||
tempSessionId,
|
||||
authenticationOptions.challenge,
|
||||
undefined,
|
||||
userId
|
||||
);
|
||||
|
||||
return response(res, {
|
||||
data: { ...authenticationOptions, tempSessionId },
|
||||
@@ -580,7 +622,7 @@ export async function verifyAuthentication(
|
||||
}
|
||||
|
||||
const { credential } = parsedBody.data;
|
||||
const tempSessionId = req.headers['x-temp-session-id'] as string;
|
||||
const tempSessionId = req.headers["x-temp-session-id"] as string;
|
||||
|
||||
if (!tempSessionId) {
|
||||
return next(
|
||||
@@ -594,7 +636,7 @@ export async function verifyAuthentication(
|
||||
try {
|
||||
// Get challenge from database
|
||||
const challengeData = await getChallenge(tempSessionId);
|
||||
|
||||
|
||||
if (!challengeData) {
|
||||
return next(
|
||||
createHttpError(
|
||||
@@ -646,7 +688,11 @@ export async function verifyAuthentication(
|
||||
id: securityKey.credentialId,
|
||||
publicKey: isoBase64URL.toBuffer(securityKey.publicKey),
|
||||
counter: securityKey.signCount,
|
||||
transports: securityKey.transports ? JSON.parse(securityKey.transports) as AuthenticatorTransportFuture[] : undefined
|
||||
transports: securityKey.transports
|
||||
? (JSON.parse(
|
||||
securityKey.transports
|
||||
) as AuthenticatorTransportFuture[])
|
||||
: undefined
|
||||
},
|
||||
requireUserVerification: false
|
||||
});
|
||||
@@ -672,7 +718,8 @@ export async function verifyAuthentication(
|
||||
.where(eq(securityKeys.credentialId, credentialId));
|
||||
|
||||
// Create session for the user
|
||||
const { createSession, generateSessionToken, serializeSessionCookie } = await import("@server/auth/sessions/app");
|
||||
const { createSession, generateSessionToken, serializeSessionCookie } =
|
||||
await import("@server/auth/sessions/app");
|
||||
const token = generateSessionToken();
|
||||
const session = await createSession(token, user.userId);
|
||||
const isSecure = req.protocol === "https";
|
||||
@@ -703,4 +750,4 @@ export async function verifyAuthentication(
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,8 +56,14 @@ export async function signup(
|
||||
);
|
||||
}
|
||||
|
||||
const { email, password, inviteToken, inviteId, termsAcceptedTimestamp, marketingEmailConsent } =
|
||||
parsedBody.data;
|
||||
const {
|
||||
email,
|
||||
password,
|
||||
inviteToken,
|
||||
inviteId,
|
||||
termsAcceptedTimestamp,
|
||||
marketingEmailConsent
|
||||
} = parsedBody.data;
|
||||
|
||||
const passwordHash = await hashPassword(password);
|
||||
const userId = generateId(15);
|
||||
@@ -222,7 +228,9 @@ export async function signup(
|
||||
);
|
||||
res.appendHeader("Set-Cookie", cookie);
|
||||
if (build == "saas" && marketingEmailConsent) {
|
||||
logger.debug(`User ${email} opted in to marketing emails during signup.`);
|
||||
logger.debug(
|
||||
`User ${email} opted in to marketing emails during signup.`
|
||||
);
|
||||
moveEmailToAudience(email, AudienceIds.SignUps);
|
||||
}
|
||||
|
||||
|
||||
@@ -13,10 +13,12 @@ import { maxmindLookup } from "@server/db/maxmind";
|
||||
import { encodeHexLowerCase } from "@oslojs/encoding";
|
||||
import { sha256 } from "@oslojs/crypto/sha2";
|
||||
|
||||
const bodySchema = z.object({
|
||||
deviceName: z.string().optional(),
|
||||
applicationName: z.string().min(1, "Application name is required")
|
||||
}).strict();
|
||||
const bodySchema = z
|
||||
.object({
|
||||
deviceName: z.string().optional(),
|
||||
applicationName: z.string().min(1, "Application name is required")
|
||||
})
|
||||
.strict();
|
||||
|
||||
export type StartDeviceWebAuthBody = z.infer<typeof bodySchema>;
|
||||
|
||||
@@ -34,14 +36,12 @@ function generateDeviceCode(): string {
|
||||
|
||||
// Helper function to hash device code before storing in database
|
||||
function hashDeviceCode(code: string): string {
|
||||
return encodeHexLowerCase(
|
||||
sha256(new TextEncoder().encode(code))
|
||||
);
|
||||
return encodeHexLowerCase(sha256(new TextEncoder().encode(code)));
|
||||
}
|
||||
|
||||
// Helper function to extract IP from request
|
||||
function extractIpFromRequest(req: Request): string | undefined {
|
||||
const ip = req.ip || req.socket.remoteAddress;
|
||||
const ip = req.ip;
|
||||
if (!ip) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -75,10 +75,10 @@ async function getCityFromIp(ip: string): Promise<string | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// MaxMind CountryResponse doesn't include city by default
|
||||
// If city data is available, it would be in result.city?.names?.en
|
||||
// But since we're using CountryResponse type, we'll just return undefined
|
||||
// The user said "don't do this if not easy", so we'll skip city for now
|
||||
if (result.country) {
|
||||
return result.country.names?.en || result.country.iso_code;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
} catch (error) {
|
||||
logger.debug("Failed to get city from IP", error);
|
||||
|
||||
@@ -5,4 +5,4 @@ export type TransferSessionResponse = {
|
||||
|
||||
export type GetSessionTransferTokenRenponse = {
|
||||
token: string;
|
||||
};
|
||||
};
|
||||
|
||||
@@ -9,8 +9,8 @@ import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
|
||||
const validateSetupTokenSchema = z.strictObject({
|
||||
token: z.string().min(1, "Token is required")
|
||||
});
|
||||
token: z.string().min(1, "Token is required")
|
||||
});
|
||||
|
||||
export type ValidateSetupTokenResponse = {
|
||||
valid: boolean;
|
||||
@@ -41,10 +41,7 @@ export async function validateSetupToken(
|
||||
.select()
|
||||
.from(setupTokens)
|
||||
.where(
|
||||
and(
|
||||
eq(setupTokens.token, token),
|
||||
eq(setupTokens.used, false)
|
||||
)
|
||||
and(eq(setupTokens.token, token), eq(setupTokens.used, false))
|
||||
);
|
||||
|
||||
if (!setupToken) {
|
||||
@@ -79,4 +76,4 @@ export async function validateSetupToken(
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,8 +14,8 @@ import { freeLimitSet, limitsService } from "@server/lib/billing";
|
||||
import { build } from "@server/build";
|
||||
|
||||
export const verifyEmailBody = z.strictObject({
|
||||
code: z.string()
|
||||
});
|
||||
code: z.string()
|
||||
});
|
||||
|
||||
export type VerifyEmailBody = z.infer<typeof verifyEmailBody>;
|
||||
|
||||
|
||||
@@ -19,10 +19,10 @@ import { verifySession } from "@server/auth/sessions/verifySession";
|
||||
import { unauthorized } from "@server/auth/unauthorizedResponse";
|
||||
|
||||
export const verifyTotpBody = z.strictObject({
|
||||
email: z.email().optional(),
|
||||
password: z.string().optional(),
|
||||
code: z.string()
|
||||
});
|
||||
email: z.email().optional(),
|
||||
password: z.string().optional(),
|
||||
code: z.string()
|
||||
});
|
||||
|
||||
export type VerifyTotpBody = z.infer<typeof verifyTotpBody>;
|
||||
|
||||
|
||||
@@ -12,7 +12,10 @@ import {
|
||||
serializeResourceSessionCookie,
|
||||
validateResourceSessionToken
|
||||
} from "@server/auth/sessions/resource";
|
||||
import { generateSessionToken, SESSION_COOKIE_EXPIRES } from "@server/auth/sessions/app";
|
||||
import {
|
||||
generateSessionToken,
|
||||
SESSION_COOKIE_EXPIRES
|
||||
} from "@server/auth/sessions/app";
|
||||
import { SESSION_COOKIE_EXPIRES as RESOURCE_SESSION_COOKIE_EXPIRES } from "@server/auth/sessions/resource";
|
||||
import config from "@server/lib/config";
|
||||
import { response } from "@server/lib/response";
|
||||
@@ -55,8 +58,8 @@ export async function exchangeSession(
|
||||
let cleanHost = host;
|
||||
// if the host ends with :port
|
||||
if (cleanHost.match(/:[0-9]{1,5}$/)) {
|
||||
const matched = ''+cleanHost.match(/:[0-9]{1,5}$/);
|
||||
cleanHost = cleanHost.slice(0, -1*matched.length);
|
||||
const matched = "" + cleanHost.match(/:[0-9]{1,5}$/);
|
||||
cleanHost = cleanHost.slice(0, -1 * matched.length);
|
||||
}
|
||||
|
||||
const clientIp = requestIp?.split(":")[0];
|
||||
@@ -153,8 +156,8 @@ export async function exchangeSession(
|
||||
}
|
||||
} else {
|
||||
const expires = new Date(
|
||||
Date.now() + SESSION_COOKIE_EXPIRES
|
||||
).getTime();
|
||||
Date.now() + SESSION_COOKIE_EXPIRES
|
||||
).getTime();
|
||||
await createResourceSession({
|
||||
token,
|
||||
resourceId: resource.resourceId,
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import { assertEquals } from '@test/assert';
|
||||
import { assertEquals } from "@test/assert";
|
||||
|
||||
function isPathAllowed(pattern: string, path: string): boolean {
|
||||
|
||||
// Normalize and split paths into segments
|
||||
const normalize = (p: string) => p.split("/").filter(Boolean);
|
||||
const patternParts = normalize(pattern);
|
||||
const pathParts = normalize(path);
|
||||
|
||||
|
||||
// Recursive function to try different wildcard matches
|
||||
function matchSegments(patternIndex: number, pathIndex: number): boolean {
|
||||
const indent = " ".repeat(pathIndex); // Indent based on recursion depth
|
||||
@@ -30,7 +28,6 @@ function isPathAllowed(pattern: string, path: string): boolean {
|
||||
|
||||
// For full segment wildcards, try consuming different numbers of path segments
|
||||
if (currentPatternPart === "*") {
|
||||
|
||||
// Try consuming 0 segments (skip the wildcard)
|
||||
if (matchSegments(patternIndex + 1, pathIndex)) {
|
||||
return true;
|
||||
@@ -74,69 +71,213 @@ function isPathAllowed(pattern: string, path: string): boolean {
|
||||
}
|
||||
|
||||
function runTests() {
|
||||
console.log('Running path matching tests...');
|
||||
console.log("Running path matching tests...");
|
||||
|
||||
// Test exact matching
|
||||
assertEquals(isPathAllowed('foo', 'foo'), true, 'Exact match should be allowed');
|
||||
assertEquals(isPathAllowed('foo', 'bar'), false, 'Different segments should not match');
|
||||
assertEquals(isPathAllowed('foo/bar', 'foo/bar'), true, 'Exact multi-segment match should be allowed');
|
||||
assertEquals(isPathAllowed('foo/bar', 'foo/baz'), false, 'Partial multi-segment match should not be allowed');
|
||||
assertEquals(
|
||||
isPathAllowed("foo", "foo"),
|
||||
true,
|
||||
"Exact match should be allowed"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo", "bar"),
|
||||
false,
|
||||
"Different segments should not match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo/bar", "foo/bar"),
|
||||
true,
|
||||
"Exact multi-segment match should be allowed"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo/bar", "foo/baz"),
|
||||
false,
|
||||
"Partial multi-segment match should not be allowed"
|
||||
);
|
||||
|
||||
// Test with leading and trailing slashes
|
||||
assertEquals(isPathAllowed('/foo', 'foo'), true, 'Pattern with leading slash should match');
|
||||
assertEquals(isPathAllowed('foo/', 'foo'), true, 'Pattern with trailing slash should match');
|
||||
assertEquals(isPathAllowed('/foo/', 'foo'), true, 'Pattern with both leading and trailing slashes should match');
|
||||
assertEquals(isPathAllowed('foo', '/foo/'), true, 'Path with leading and trailing slashes should match');
|
||||
assertEquals(
|
||||
isPathAllowed("/foo", "foo"),
|
||||
true,
|
||||
"Pattern with leading slash should match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo/", "foo"),
|
||||
true,
|
||||
"Pattern with trailing slash should match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("/foo/", "foo"),
|
||||
true,
|
||||
"Pattern with both leading and trailing slashes should match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo", "/foo/"),
|
||||
true,
|
||||
"Path with leading and trailing slashes should match"
|
||||
);
|
||||
|
||||
// Test simple wildcard matching
|
||||
assertEquals(isPathAllowed('*', 'foo'), true, 'Single wildcard should match any single segment');
|
||||
assertEquals(isPathAllowed('*', 'foo/bar'), true, 'Single wildcard should match multiple segments');
|
||||
assertEquals(isPathAllowed('*/bar', 'foo/bar'), true, 'Wildcard prefix should match');
|
||||
assertEquals(isPathAllowed('foo/*', 'foo/bar'), true, 'Wildcard suffix should match');
|
||||
assertEquals(isPathAllowed('foo/*/baz', 'foo/bar/baz'), true, 'Wildcard in middle should match');
|
||||
assertEquals(
|
||||
isPathAllowed("*", "foo"),
|
||||
true,
|
||||
"Single wildcard should match any single segment"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("*", "foo/bar"),
|
||||
true,
|
||||
"Single wildcard should match multiple segments"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("*/bar", "foo/bar"),
|
||||
true,
|
||||
"Wildcard prefix should match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo/*", "foo/bar"),
|
||||
true,
|
||||
"Wildcard suffix should match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo/*/baz", "foo/bar/baz"),
|
||||
true,
|
||||
"Wildcard in middle should match"
|
||||
);
|
||||
|
||||
// Test multiple wildcards
|
||||
assertEquals(isPathAllowed('*/*', 'foo/bar'), true, 'Multiple wildcards should match corresponding segments');
|
||||
assertEquals(isPathAllowed('*/*/*', 'foo/bar/baz'), true, 'Three wildcards should match three segments');
|
||||
assertEquals(isPathAllowed('foo/*/*', 'foo/bar/baz'), true, 'Specific prefix with wildcards should match');
|
||||
assertEquals(isPathAllowed('*/*/baz', 'foo/bar/baz'), true, 'Wildcards with specific suffix should match');
|
||||
assertEquals(
|
||||
isPathAllowed("*/*", "foo/bar"),
|
||||
true,
|
||||
"Multiple wildcards should match corresponding segments"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("*/*/*", "foo/bar/baz"),
|
||||
true,
|
||||
"Three wildcards should match three segments"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo/*/*", "foo/bar/baz"),
|
||||
true,
|
||||
"Specific prefix with wildcards should match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("*/*/baz", "foo/bar/baz"),
|
||||
true,
|
||||
"Wildcards with specific suffix should match"
|
||||
);
|
||||
|
||||
// Test wildcard consumption behavior
|
||||
assertEquals(isPathAllowed('*', ''), true, 'Wildcard should optionally consume segments');
|
||||
assertEquals(isPathAllowed('foo/*', 'foo'), true, 'Trailing wildcard should be optional');
|
||||
assertEquals(isPathAllowed('*/*', 'foo'), true, 'Multiple wildcards can match fewer segments');
|
||||
assertEquals(isPathAllowed('*/*/*', 'foo/bar'), true, 'Extra wildcards can be skipped');
|
||||
assertEquals(
|
||||
isPathAllowed("*", ""),
|
||||
true,
|
||||
"Wildcard should optionally consume segments"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("foo/*", "foo"),
|
||||
true,
|
||||
"Trailing wildcard should be optional"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("*/*", "foo"),
|
||||
true,
|
||||
"Multiple wildcards can match fewer segments"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("*/*/*", "foo/bar"),
|
||||
true,
|
||||
"Extra wildcards can be skipped"
|
||||
);
|
||||
|
||||
// Test complex nested paths
|
||||
assertEquals(isPathAllowed('api/*/users', 'api/v1/users'), true, 'API versioning pattern should match');
|
||||
assertEquals(isPathAllowed('api/*/users/*', 'api/v1/users/123'), true, 'API resource pattern should match');
|
||||
assertEquals(isPathAllowed('api/*/users/*/profile', 'api/v1/users/123/profile'), true, 'Nested API pattern should match');
|
||||
assertEquals(
|
||||
isPathAllowed("api/*/users", "api/v1/users"),
|
||||
true,
|
||||
"API versioning pattern should match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("api/*/users/*", "api/v1/users/123"),
|
||||
true,
|
||||
"API resource pattern should match"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("api/*/users/*/profile", "api/v1/users/123/profile"),
|
||||
true,
|
||||
"Nested API pattern should match"
|
||||
);
|
||||
|
||||
// Test for the requested padbootstrap* pattern
|
||||
assertEquals(isPathAllowed('padbootstrap*', 'padbootstrap'), true, 'padbootstrap* should match padbootstrap');
|
||||
assertEquals(isPathAllowed('padbootstrap*', 'padbootstrapv1'), true, 'padbootstrap* should match padbootstrapv1');
|
||||
assertEquals(isPathAllowed('padbootstrap*', 'padbootstrap/files'), false, 'padbootstrap* should not match padbootstrap/files');
|
||||
assertEquals(isPathAllowed('padbootstrap*/*', 'padbootstrap/files'), true, 'padbootstrap*/* should match padbootstrap/files');
|
||||
assertEquals(isPathAllowed('padbootstrap*/files', 'padbootstrapv1/files'), true, 'padbootstrap*/files should not match padbootstrapv1/files (wildcard is segment-based, not partial)');
|
||||
assertEquals(
|
||||
isPathAllowed("padbootstrap*", "padbootstrap"),
|
||||
true,
|
||||
"padbootstrap* should match padbootstrap"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("padbootstrap*", "padbootstrapv1"),
|
||||
true,
|
||||
"padbootstrap* should match padbootstrapv1"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("padbootstrap*", "padbootstrap/files"),
|
||||
false,
|
||||
"padbootstrap* should not match padbootstrap/files"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("padbootstrap*/*", "padbootstrap/files"),
|
||||
true,
|
||||
"padbootstrap*/* should match padbootstrap/files"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("padbootstrap*/files", "padbootstrapv1/files"),
|
||||
true,
|
||||
"padbootstrap*/files should not match padbootstrapv1/files (wildcard is segment-based, not partial)"
|
||||
);
|
||||
|
||||
// Test wildcard edge cases
|
||||
assertEquals(isPathAllowed('*/*/*/*/*/*', 'a/b'), true, 'Many wildcards can match few segments');
|
||||
assertEquals(isPathAllowed('a/*/b/*/c', 'a/anything/b/something/c'), true, 'Multiple wildcards in pattern should match corresponding segments');
|
||||
assertEquals(
|
||||
isPathAllowed("*/*/*/*/*/*", "a/b"),
|
||||
true,
|
||||
"Many wildcards can match few segments"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("a/*/b/*/c", "a/anything/b/something/c"),
|
||||
true,
|
||||
"Multiple wildcards in pattern should match corresponding segments"
|
||||
);
|
||||
|
||||
// Test patterns with partial segment matches
|
||||
assertEquals(isPathAllowed('padbootstrap*', 'padbootstrap-123'), true, 'Wildcards in isPathAllowed should be segment-based, not character-based');
|
||||
assertEquals(isPathAllowed('test*', 'testuser'), true, 'Asterisk as part of segment name is treated as a literal, not a wildcard');
|
||||
assertEquals(isPathAllowed('my*app', 'myapp'), true, 'Asterisk in middle of segment name is treated as a literal, not a wildcard');
|
||||
assertEquals(
|
||||
isPathAllowed("padbootstrap*", "padbootstrap-123"),
|
||||
true,
|
||||
"Wildcards in isPathAllowed should be segment-based, not character-based"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("test*", "testuser"),
|
||||
true,
|
||||
"Asterisk as part of segment name is treated as a literal, not a wildcard"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("my*app", "myapp"),
|
||||
true,
|
||||
"Asterisk in middle of segment name is treated as a literal, not a wildcard"
|
||||
);
|
||||
|
||||
assertEquals(isPathAllowed('/', '/'), true, 'Root path should match root path');
|
||||
assertEquals(isPathAllowed('/', '/test'), false, 'Root path should not match non-root path');
|
||||
assertEquals(
|
||||
isPathAllowed("/", "/"),
|
||||
true,
|
||||
"Root path should match root path"
|
||||
);
|
||||
assertEquals(
|
||||
isPathAllowed("/", "/test"),
|
||||
false,
|
||||
"Root path should not match non-root path"
|
||||
);
|
||||
|
||||
console.log('All tests passed!');
|
||||
console.log("All tests passed!");
|
||||
}
|
||||
|
||||
// Run all tests
|
||||
try {
|
||||
runTests();
|
||||
} catch (error) {
|
||||
console.error('Test failed:', error);
|
||||
console.error("Test failed:", error);
|
||||
}
|
||||
|
||||
@@ -14,4 +14,3 @@ export type GetOrgTierResponse = {
|
||||
tier: string | null;
|
||||
active: boolean;
|
||||
};
|
||||
|
||||
|
||||
@@ -11,4 +11,4 @@ export async function billingWebhookHandler(
|
||||
return next(
|
||||
createHttpError(HttpCode.NOT_FOUND, "This endpoint is not in use")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,12 +9,12 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { applyBlueprint } from "@server/lib/blueprints/applyBlueprint";
|
||||
|
||||
const applyBlueprintSchema = z.strictObject({
|
||||
blueprint: z.string()
|
||||
});
|
||||
blueprint: z.string()
|
||||
});
|
||||
|
||||
const applyBlueprintParamsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
registry.registerPath({
|
||||
method: "put",
|
||||
|
||||
@@ -13,12 +13,9 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { BlueprintData } from "./types";
|
||||
|
||||
const getBlueprintSchema = z.strictObject({
|
||||
blueprintId: z
|
||||
.string()
|
||||
.transform(stoi)
|
||||
.pipe(z.int().positive()),
|
||||
orgId: z.string()
|
||||
});
|
||||
blueprintId: z.string().transform(stoi).pipe(z.int().positive()),
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
async function query(blueprintId: number, orgId: string) {
|
||||
// Get the client
|
||||
|
||||
@@ -11,23 +11,23 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { BlueprintData } from "./types";
|
||||
|
||||
const listBluePrintsParamsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
const listBluePrintsSchema = z.strictObject({
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("1000")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative())
|
||||
});
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("1000")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative())
|
||||
});
|
||||
|
||||
async function queryBlueprints(orgId: string, limit: number, offset: number) {
|
||||
const res = await db
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import { db, Transaction } from "@server/db";
|
||||
|
||||
export async function createCertificate(domainId: string, domain: string, trx: Transaction | typeof db) {
|
||||
export async function createCertificate(
|
||||
domainId: string,
|
||||
domain: string,
|
||||
trx: Transaction | typeof db
|
||||
) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,4 +10,4 @@ export type GetCertificateResponse = {
|
||||
updatedAt: string;
|
||||
errorMessage?: string | null;
|
||||
renewalCount: number;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -10,7 +10,16 @@ import {
|
||||
import logger from "@server/logger";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import response from "@server/lib/response";
|
||||
import { and, count, eq, inArray, isNotNull, isNull, or, sql } from "drizzle-orm";
|
||||
import {
|
||||
and,
|
||||
count,
|
||||
eq,
|
||||
inArray,
|
||||
isNotNull,
|
||||
isNull,
|
||||
or,
|
||||
sql
|
||||
} from "drizzle-orm";
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
import createHttpError from "http-errors";
|
||||
import { z } from "zod";
|
||||
@@ -60,13 +69,9 @@ async function getLatestOlmVersion(): Promise<string | null> {
|
||||
return latestVersion;
|
||||
} catch (error: any) {
|
||||
if (error.name === "AbortError") {
|
||||
logger.warn(
|
||||
"Request to fetch latest Olm version timed out (1.5s)"
|
||||
);
|
||||
logger.warn("Request to fetch latest Olm version timed out (1.5s)");
|
||||
} else if (error.cause?.code === "UND_ERR_CONNECT_TIMEOUT") {
|
||||
logger.warn(
|
||||
"Connection timeout while fetching latest Olm version"
|
||||
);
|
||||
logger.warn("Connection timeout while fetching latest Olm version");
|
||||
} else {
|
||||
logger.warn(
|
||||
"Error fetching latest Olm version:",
|
||||
@@ -77,10 +82,9 @@ async function getLatestOlmVersion(): Promise<string | null> {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const listClientsParamsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
const listClientsSchema = z.object({
|
||||
limit: z
|
||||
@@ -95,12 +99,14 @@ const listClientsSchema = z.object({
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
filter: z
|
||||
.enum(["user", "machine"])
|
||||
.optional()
|
||||
filter: z.enum(["user", "machine"]).optional()
|
||||
});
|
||||
|
||||
function queryClients(orgId: string, accessibleClientIds: number[], filter?: "user" | "machine") {
|
||||
function queryClients(
|
||||
orgId: string,
|
||||
accessibleClientIds: number[],
|
||||
filter?: "user" | "machine"
|
||||
) {
|
||||
const conditions = [
|
||||
inArray(clients.clientId, accessibleClientIds),
|
||||
eq(clients.orgId, orgId)
|
||||
@@ -158,16 +164,17 @@ type OlmWithUpdateAvailable = Awaited<ReturnType<typeof queryClients>>[0] & {
|
||||
olmUpdateAvailable?: boolean;
|
||||
};
|
||||
|
||||
|
||||
export type ListClientsResponse = {
|
||||
clients: Array<Awaited<ReturnType<typeof queryClients>>[0] & {
|
||||
sites: Array<{
|
||||
siteId: number;
|
||||
siteName: string | null;
|
||||
siteNiceId: string | null;
|
||||
}>
|
||||
olmUpdateAvailable?: boolean;
|
||||
}>;
|
||||
clients: Array<
|
||||
Awaited<ReturnType<typeof queryClients>>[0] & {
|
||||
sites: Array<{
|
||||
siteId: number;
|
||||
siteName: string | null;
|
||||
siteNiceId: string | null;
|
||||
}>;
|
||||
olmUpdateAvailable?: boolean;
|
||||
}
|
||||
>;
|
||||
pagination: { total: number; limit: number; offset: number };
|
||||
};
|
||||
|
||||
@@ -271,28 +278,34 @@ export async function listClients(
|
||||
const totalCount = totalCountResult[0].count;
|
||||
|
||||
// Get associated sites for all clients
|
||||
const clientIds = clientsList.map(client => client.clientId);
|
||||
const clientIds = clientsList.map((client) => client.clientId);
|
||||
const siteAssociations = await getSiteAssociations(clientIds);
|
||||
|
||||
// Group site associations by client ID
|
||||
const sitesByClient = siteAssociations.reduce((acc, association) => {
|
||||
if (!acc[association.clientId]) {
|
||||
acc[association.clientId] = [];
|
||||
}
|
||||
acc[association.clientId].push({
|
||||
siteId: association.siteId,
|
||||
siteName: association.siteName,
|
||||
siteNiceId: association.siteNiceId
|
||||
});
|
||||
return acc;
|
||||
}, {} as Record<number, Array<{
|
||||
siteId: number;
|
||||
siteName: string | null;
|
||||
siteNiceId: string | null;
|
||||
}>>);
|
||||
const sitesByClient = siteAssociations.reduce(
|
||||
(acc, association) => {
|
||||
if (!acc[association.clientId]) {
|
||||
acc[association.clientId] = [];
|
||||
}
|
||||
acc[association.clientId].push({
|
||||
siteId: association.siteId,
|
||||
siteName: association.siteName,
|
||||
siteNiceId: association.siteNiceId
|
||||
});
|
||||
return acc;
|
||||
},
|
||||
{} as Record<
|
||||
number,
|
||||
Array<{
|
||||
siteId: number;
|
||||
siteName: string | null;
|
||||
siteNiceId: string | null;
|
||||
}>
|
||||
>
|
||||
);
|
||||
|
||||
// Merge clients with their site associations
|
||||
const clientsWithSites = clientsList.map(client => ({
|
||||
const clientsWithSites = clientsList.map((client) => ({
|
||||
...client,
|
||||
sites: sitesByClient[client.clientId] || []
|
||||
}));
|
||||
@@ -322,7 +335,6 @@ export async function listClients(
|
||||
} catch (error) {
|
||||
client.olmUpdateAvailable = false;
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -333,7 +345,6 @@ export async function listClients(
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
return response<ListClientsResponse>(res, {
|
||||
data: {
|
||||
clients: clientsWithSites,
|
||||
|
||||
@@ -16,8 +16,8 @@ export type PickClientDefaultsResponse = {
|
||||
};
|
||||
|
||||
const pickClientDefaultsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { sendToClient } from "#dynamic/routers/ws";
|
||||
import { db, olms } from "@server/db";
|
||||
import { db, olms, Transaction } from "@server/db";
|
||||
import { Alias, SubnetProxyTarget } from "@server/lib/ip";
|
||||
import logger from "@server/logger";
|
||||
import { eq } from "drizzle-orm";
|
||||
@@ -101,14 +101,18 @@ export async function removePeerData(
|
||||
export async function updatePeerData(
|
||||
clientId: number,
|
||||
siteId: number,
|
||||
remoteSubnets: {
|
||||
oldRemoteSubnets: string[];
|
||||
newRemoteSubnets: string[];
|
||||
} | undefined,
|
||||
aliases: {
|
||||
oldAliases: Alias[];
|
||||
newAliases: Alias[];
|
||||
} | undefined,
|
||||
remoteSubnets:
|
||||
| {
|
||||
oldRemoteSubnets: string[];
|
||||
newRemoteSubnets: string[];
|
||||
}
|
||||
| undefined,
|
||||
aliases:
|
||||
| {
|
||||
oldAliases: Alias[];
|
||||
newAliases: Alias[];
|
||||
}
|
||||
| undefined,
|
||||
olmId?: string
|
||||
) {
|
||||
if (!olmId) {
|
||||
|
||||
@@ -2,7 +2,10 @@ import { sendToClient } from "#dynamic/routers/ws";
|
||||
import { db, olms } from "@server/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
|
||||
export async function sendTerminateClient(clientId: number, olmId?: string | null) {
|
||||
export async function sendTerminateClient(
|
||||
clientId: number,
|
||||
olmId?: string | null
|
||||
) {
|
||||
if (!olmId) {
|
||||
const [olm] = await db
|
||||
.select()
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { z } from "zod";
|
||||
import { db, Domain, domains, OrgDomains, orgDomains, dnsRecords } from "@server/db";
|
||||
import {
|
||||
db,
|
||||
Domain,
|
||||
domains,
|
||||
OrgDomains,
|
||||
orgDomains,
|
||||
dnsRecords
|
||||
} from "@server/db";
|
||||
import response from "@server/lib/response";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
@@ -16,16 +23,15 @@ import { build } from "@server/build";
|
||||
import config from "@server/lib/config";
|
||||
|
||||
const paramsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
type: z.enum(["ns", "cname", "wildcard"]),
|
||||
baseDomain: subdomainSchema,
|
||||
certResolver: z.string().optional().nullable(),
|
||||
preferWildcardCert: z.boolean().optional().nullable() // optional, only for wildcard
|
||||
});
|
||||
|
||||
type: z.enum(["ns", "cname", "wildcard"]),
|
||||
baseDomain: subdomainSchema,
|
||||
certResolver: z.string().optional().nullable(),
|
||||
preferWildcardCert: z.boolean().optional().nullable() // optional, only for wildcard
|
||||
});
|
||||
|
||||
export type CreateDomainResponse = {
|
||||
domainId: string;
|
||||
@@ -72,7 +78,8 @@ export async function createOrgDomain(
|
||||
}
|
||||
|
||||
const { orgId } = parsedParams.data;
|
||||
const { type, baseDomain, certResolver, preferWildcardCert } = parsedBody.data;
|
||||
const { type, baseDomain, certResolver, preferWildcardCert } =
|
||||
parsedBody.data;
|
||||
|
||||
if (build == "oss") {
|
||||
if (type !== "wildcard") {
|
||||
@@ -278,7 +285,7 @@ export async function createOrgDomain(
|
||||
// TODO: This needs to be cross region and not hardcoded
|
||||
if (type === "ns") {
|
||||
nsRecords = config.getRawConfig().dns.nameservers as string[];
|
||||
|
||||
|
||||
// Save NS records to database
|
||||
for (const nsValue of nsRecords) {
|
||||
recordsToInsert.push({
|
||||
@@ -300,7 +307,7 @@ export async function createOrgDomain(
|
||||
baseDomain: `_acme-challenge.${baseDomain}`
|
||||
}
|
||||
];
|
||||
|
||||
|
||||
// Save CNAME records to database
|
||||
for (const cnameRecord of cnameRecords) {
|
||||
recordsToInsert.push({
|
||||
@@ -322,7 +329,7 @@ export async function createOrgDomain(
|
||||
baseDomain: `${baseDomain}`
|
||||
}
|
||||
];
|
||||
|
||||
|
||||
// Save A records to database
|
||||
for (const aRecord of aRecords) {
|
||||
recordsToInsert.push({
|
||||
|
||||
@@ -11,9 +11,9 @@ import { usageService } from "@server/lib/billing/usageService";
|
||||
import { FeatureId } from "@server/lib/billing";
|
||||
|
||||
const paramsSchema = z.strictObject({
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
});
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
export type DeleteAccountDomainResponse = {
|
||||
success: boolean;
|
||||
@@ -48,10 +48,7 @@ export async function deleteAccountDomain(
|
||||
eq(orgDomains.domainId, domainId)
|
||||
)
|
||||
)
|
||||
.innerJoin(
|
||||
domains,
|
||||
eq(orgDomains.domainId, domains.domainId)
|
||||
);
|
||||
.innerJoin(domains, eq(orgDomains.domainId, domains.domainId));
|
||||
|
||||
if (!existing) {
|
||||
return next(
|
||||
|
||||
@@ -11,16 +11,16 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { getServerIp } from "@server/lib/serverIpService"; // your in-memory IP module
|
||||
|
||||
const getDNSRecordsSchema = z.strictObject({
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
});
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
async function query(domainId: string) {
|
||||
const records = await db
|
||||
.select()
|
||||
.from(dnsRecords)
|
||||
.where(eq(dnsRecords.domainId, domainId));
|
||||
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
@@ -72,8 +72,11 @@ export async function getDNSRecords(
|
||||
const serverIp = getServerIp();
|
||||
|
||||
// Override value for type A or wildcard records
|
||||
const updatedRecords = records.map(record => {
|
||||
if ((record.recordType === "A" || record.baseDomain === "*") && serverIp) {
|
||||
const updatedRecords = records.map((record) => {
|
||||
if (
|
||||
(record.recordType === "A" || record.baseDomain === "*") &&
|
||||
serverIp
|
||||
) {
|
||||
return { ...record, value: serverIp };
|
||||
}
|
||||
return record;
|
||||
@@ -92,4 +95,4 @@ export async function getDNSRecords(
|
||||
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,11 +11,9 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { domain } from "zod/v4/core/regexes";
|
||||
|
||||
const getDomainSchema = z.strictObject({
|
||||
domainId: z
|
||||
.string()
|
||||
.optional(),
|
||||
orgId: z.string().optional()
|
||||
});
|
||||
domainId: z.string().optional(),
|
||||
orgId: z.string().optional()
|
||||
});
|
||||
|
||||
async function query(domainId?: string, orgId?: string) {
|
||||
if (domainId) {
|
||||
@@ -65,7 +63,9 @@ export async function getDomain(
|
||||
const domain = await query(domainId, orgId);
|
||||
|
||||
if (!domain) {
|
||||
return next(createHttpError(HttpCode.NOT_FOUND, "Domain not found"));
|
||||
return next(
|
||||
createHttpError(HttpCode.NOT_FOUND, "Domain not found")
|
||||
);
|
||||
}
|
||||
|
||||
return response<GetDomainResponse>(res, {
|
||||
|
||||
@@ -4,4 +4,4 @@ export * from "./deleteOrgDomain";
|
||||
export * from "./restartOrgDomain";
|
||||
export * from "./getDomain";
|
||||
export * from "./getDNSRecords";
|
||||
export * from "./updateDomain";
|
||||
export * from "./updateDomain";
|
||||
|
||||
@@ -11,23 +11,23 @@ import { fromError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const listDomainsParamsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
const listDomainsSchema = z.strictObject({
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("1000")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative())
|
||||
});
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("1000")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative())
|
||||
});
|
||||
|
||||
async function queryDomains(orgId: string, limit: number, offset: number) {
|
||||
const res = await db
|
||||
|
||||
@@ -9,9 +9,9 @@ import { fromError } from "zod-validation-error";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
|
||||
const paramsSchema = z.strictObject({
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
});
|
||||
domainId: z.string(),
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
export type RestartOrgDomainResponse = {
|
||||
success: boolean;
|
||||
|
||||
@@ -5,4 +5,4 @@ export type CheckDomainAvailabilityResponse = {
|
||||
domainId: string;
|
||||
fullDomain: string;
|
||||
}[];
|
||||
};
|
||||
};
|
||||
|
||||
@@ -10,14 +10,14 @@ import { eq, and } from "drizzle-orm";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const paramsSchema = z.strictObject({
|
||||
orgId: z.string(),
|
||||
domainId: z.string()
|
||||
});
|
||||
orgId: z.string(),
|
||||
domainId: z.string()
|
||||
});
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
certResolver: z.string().optional().nullable(),
|
||||
preferWildcardCert: z.boolean().optional().nullable()
|
||||
});
|
||||
certResolver: z.string().optional().nullable(),
|
||||
preferWildcardCert: z.boolean().optional().nullable()
|
||||
});
|
||||
|
||||
export type UpdateDomainResponse = {
|
||||
domainId: string;
|
||||
@@ -25,7 +25,6 @@ export type UpdateDomainResponse = {
|
||||
preferWildcardCert: boolean | null;
|
||||
};
|
||||
|
||||
|
||||
registry.registerPath({
|
||||
method: "patch",
|
||||
path: "/org/{orgId}/domain/{domainId}",
|
||||
@@ -88,7 +87,6 @@ export async function updateOrgDomain(
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
const [existingDomain] = await db
|
||||
.select()
|
||||
.from(domains)
|
||||
@@ -154,4 +152,4 @@ export async function updateOrgDomain(
|
||||
createHttpError(HttpCode.INTERNAL_SERVER_ERROR, "An error occurred")
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -318,7 +318,7 @@ authenticated.post(
|
||||
verifyRoleAccess,
|
||||
verifyUserHasAction(ActionsEnum.setResourceRoles),
|
||||
logActionAudit(ActionsEnum.setResourceRoles),
|
||||
siteResource.setSiteResourceRoles,
|
||||
siteResource.setSiteResourceRoles
|
||||
);
|
||||
|
||||
authenticated.post(
|
||||
@@ -327,7 +327,7 @@ authenticated.post(
|
||||
verifySetResourceUsers,
|
||||
verifyUserHasAction(ActionsEnum.setResourceUsers),
|
||||
logActionAudit(ActionsEnum.setResourceUsers),
|
||||
siteResource.setSiteResourceUsers,
|
||||
siteResource.setSiteResourceUsers
|
||||
);
|
||||
|
||||
authenticated.post(
|
||||
@@ -336,7 +336,7 @@ authenticated.post(
|
||||
verifySetResourceClients,
|
||||
verifyUserHasAction(ActionsEnum.setResourceUsers),
|
||||
logActionAudit(ActionsEnum.setResourceUsers),
|
||||
siteResource.setSiteResourceClients,
|
||||
siteResource.setSiteResourceClients
|
||||
);
|
||||
|
||||
authenticated.post(
|
||||
@@ -345,7 +345,7 @@ authenticated.post(
|
||||
verifySetResourceClients,
|
||||
verifyUserHasAction(ActionsEnum.setResourceUsers),
|
||||
logActionAudit(ActionsEnum.setResourceUsers),
|
||||
siteResource.addClientToSiteResource,
|
||||
siteResource.addClientToSiteResource
|
||||
);
|
||||
|
||||
authenticated.post(
|
||||
@@ -354,7 +354,7 @@ authenticated.post(
|
||||
verifySetResourceClients,
|
||||
verifyUserHasAction(ActionsEnum.setResourceUsers),
|
||||
logActionAudit(ActionsEnum.setResourceUsers),
|
||||
siteResource.removeClientFromSiteResource,
|
||||
siteResource.removeClientFromSiteResource
|
||||
);
|
||||
|
||||
authenticated.put(
|
||||
@@ -812,17 +812,9 @@ authenticated.delete(
|
||||
// createNewt
|
||||
// );
|
||||
|
||||
authenticated.put(
|
||||
"/user/:userId/olm",
|
||||
verifyIsLoggedInUser,
|
||||
olm.createUserOlm
|
||||
);
|
||||
authenticated.put("/user/:userId/olm", verifyIsLoggedInUser, olm.createUserOlm);
|
||||
|
||||
authenticated.get(
|
||||
"/user/:userId/olms",
|
||||
verifyIsLoggedInUser,
|
||||
olm.listUserOlms
|
||||
);
|
||||
authenticated.get("/user/:userId/olms", verifyIsLoggedInUser, olm.listUserOlms);
|
||||
|
||||
authenticated.delete(
|
||||
"/user/:userId/olm/:olmId",
|
||||
|
||||
@@ -27,4 +27,4 @@ export type NewLicenseKey = {
|
||||
};
|
||||
};
|
||||
|
||||
export type GenerateNewLicenseResponse = NewLicenseKey;
|
||||
export type GenerateNewLicenseResponse = NewLicenseKey;
|
||||
|
||||
@@ -5,7 +5,10 @@ import { getNextAvailableSubnet } from "@server/lib/exitNodes";
|
||||
import logger from "@server/logger";
|
||||
import { eq } from "drizzle-orm";
|
||||
|
||||
export async function createExitNode(publicKey: string, reachableAt: string | undefined) {
|
||||
export async function createExitNode(
|
||||
publicKey: string,
|
||||
reachableAt: string | undefined
|
||||
) {
|
||||
// Fetch exit node
|
||||
const [exitNodeQuery] = await db.select().from(exitNodes).limit(1);
|
||||
let exitNode: ExitNode;
|
||||
|
||||
@@ -117,4 +117,4 @@ export async function generateGerbilConfig(exitNode: ExitNode) {
|
||||
};
|
||||
|
||||
return configResponse;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,4 +2,4 @@ export * from "./getConfig";
|
||||
export * from "./receiveBandwidth";
|
||||
export * from "./updateHolePunch";
|
||||
export * from "./getAllRelays";
|
||||
export * from "./getResolvedHostname";
|
||||
export * from "./getResolvedHostname";
|
||||
|
||||
@@ -14,12 +14,55 @@ import { build } from "@server/build";
|
||||
// Track sites that are already offline to avoid unnecessary queries
|
||||
const offlineSites = new Set<string>();
|
||||
|
||||
// Retry configuration for deadlock handling
|
||||
const MAX_RETRIES = 3;
|
||||
const BASE_DELAY_MS = 50;
|
||||
|
||||
interface PeerBandwidth {
|
||||
publicKey: string;
|
||||
bytesIn: number;
|
||||
bytesOut: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an error is a deadlock error
|
||||
*/
|
||||
function isDeadlockError(error: any): boolean {
|
||||
return (
|
||||
error?.code === "40P01" ||
|
||||
error?.cause?.code === "40P01" ||
|
||||
(error?.message && error.message.includes("deadlock"))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a function with retry logic for deadlock handling
|
||||
*/
|
||||
async function withDeadlockRetry<T>(
|
||||
operation: () => Promise<T>,
|
||||
context: string
|
||||
): Promise<T> {
|
||||
let attempt = 0;
|
||||
while (true) {
|
||||
try {
|
||||
return await operation();
|
||||
} catch (error: any) {
|
||||
if (isDeadlockError(error) && attempt < MAX_RETRIES) {
|
||||
attempt++;
|
||||
const baseDelay = Math.pow(2, attempt - 1) * BASE_DELAY_MS;
|
||||
const jitter = Math.random() * baseDelay;
|
||||
const delay = baseDelay + jitter;
|
||||
logger.warn(
|
||||
`Deadlock detected in ${context}, retrying attempt ${attempt}/${MAX_RETRIES} after ${delay.toFixed(0)}ms`
|
||||
);
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
continue;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const receiveBandwidth = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
@@ -60,201 +103,215 @@ export async function updateSiteBandwidth(
|
||||
const currentTime = new Date();
|
||||
const oneMinuteAgo = new Date(currentTime.getTime() - 60000); // 1 minute ago
|
||||
|
||||
// logger.debug(`Received data: ${JSON.stringify(bandwidthData)}`);
|
||||
// Sort bandwidth data by publicKey to ensure consistent lock ordering across all instances
|
||||
// This is critical for preventing deadlocks when multiple instances update the same sites
|
||||
const sortedBandwidthData = [...bandwidthData].sort((a, b) =>
|
||||
a.publicKey.localeCompare(b.publicKey)
|
||||
);
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
// First, handle sites that are actively reporting bandwidth
|
||||
const activePeers = bandwidthData.filter((peer) => peer.bytesIn > 0); // Bytesout will have data as it tries to send keep alive messages
|
||||
// First, handle sites that are actively reporting bandwidth
|
||||
const activePeers = sortedBandwidthData.filter((peer) => peer.bytesIn > 0);
|
||||
|
||||
if (activePeers.length > 0) {
|
||||
// Remove any active peers from offline tracking since they're sending data
|
||||
activePeers.forEach((peer) => offlineSites.delete(peer.publicKey));
|
||||
// Aggregate usage data by organization (collected outside transaction)
|
||||
const orgUsageMap = new Map<string, number>();
|
||||
const orgUptimeMap = new Map<string, number>();
|
||||
|
||||
// Aggregate usage data by organization
|
||||
const orgUsageMap = new Map<string, number>();
|
||||
const orgUptimeMap = new Map<string, number>();
|
||||
if (activePeers.length > 0) {
|
||||
// Remove any active peers from offline tracking since they're sending data
|
||||
activePeers.forEach((peer) => offlineSites.delete(peer.publicKey));
|
||||
|
||||
// Update all active sites with bandwidth data and get the site data in one operation
|
||||
const updatedSites = [];
|
||||
for (const peer of activePeers) {
|
||||
const [updatedSite] = await trx
|
||||
.update(sites)
|
||||
.set({
|
||||
megabytesOut: sql`${sites.megabytesOut} + ${peer.bytesIn}`,
|
||||
megabytesIn: sql`${sites.megabytesIn} + ${peer.bytesOut}`,
|
||||
lastBandwidthUpdate: currentTime.toISOString(),
|
||||
online: true
|
||||
})
|
||||
.where(eq(sites.pubKey, peer.publicKey))
|
||||
.returning({
|
||||
online: sites.online,
|
||||
orgId: sites.orgId,
|
||||
siteId: sites.siteId,
|
||||
lastBandwidthUpdate: sites.lastBandwidthUpdate
|
||||
});
|
||||
// Update each active site individually with retry logic
|
||||
// This reduces transaction scope and allows retries per-site
|
||||
for (const peer of activePeers) {
|
||||
try {
|
||||
const updatedSite = await withDeadlockRetry(async () => {
|
||||
const [result] = await db
|
||||
.update(sites)
|
||||
.set({
|
||||
megabytesOut: sql`${sites.megabytesOut} + ${peer.bytesIn}`,
|
||||
megabytesIn: sql`${sites.megabytesIn} + ${peer.bytesOut}`,
|
||||
lastBandwidthUpdate: currentTime.toISOString(),
|
||||
online: true
|
||||
})
|
||||
.where(eq(sites.pubKey, peer.publicKey))
|
||||
.returning({
|
||||
online: sites.online,
|
||||
orgId: sites.orgId,
|
||||
siteId: sites.siteId,
|
||||
lastBandwidthUpdate: sites.lastBandwidthUpdate
|
||||
});
|
||||
return result;
|
||||
}, `update active site ${peer.publicKey}`);
|
||||
|
||||
if (updatedSite) {
|
||||
if (exitNodeId) {
|
||||
if (
|
||||
await checkExitNodeOrg(
|
||||
exitNodeId,
|
||||
updatedSite.orgId,
|
||||
trx
|
||||
)
|
||||
) {
|
||||
// not allowed
|
||||
const notAllowed = await checkExitNodeOrg(
|
||||
exitNodeId,
|
||||
updatedSite.orgId
|
||||
);
|
||||
if (notAllowed) {
|
||||
logger.warn(
|
||||
`Exit node ${exitNodeId} is not allowed for org ${updatedSite.orgId}`
|
||||
);
|
||||
// THIS SHOULD TRIGGER THE TRANSACTION TO FAIL?
|
||||
throw new Error("Exit node not allowed");
|
||||
// Skip this site but continue processing others
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
updatedSites.push({ ...updatedSite, peer });
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate org usage aggregations using the updated site data
|
||||
for (const { peer, ...site } of updatedSites) {
|
||||
// Aggregate bandwidth usage for the org
|
||||
const totalBandwidth = peer.bytesIn + peer.bytesOut;
|
||||
const currentOrgUsage = orgUsageMap.get(site.orgId) || 0;
|
||||
orgUsageMap.set(site.orgId, currentOrgUsage + totalBandwidth);
|
||||
|
||||
// Add 10 seconds of uptime for each active site
|
||||
const currentOrgUptime = orgUptimeMap.get(site.orgId) || 0;
|
||||
orgUptimeMap.set(site.orgId, currentOrgUptime + 10 / 60); // Store in minutes and jut add 10 seconds
|
||||
}
|
||||
|
||||
if (calcUsageAndLimits) {
|
||||
// REMOTE EXIT NODES DO NOT COUNT TOWARDS USAGE
|
||||
// Process all usage updates sequentially by organization to reduce deadlock risk
|
||||
const allOrgIds = new Set([...orgUsageMap.keys(), ...orgUptimeMap.keys()]);
|
||||
|
||||
for (const orgId of allOrgIds) {
|
||||
try {
|
||||
// Process bandwidth usage for this org
|
||||
const totalBandwidth = orgUsageMap.get(orgId);
|
||||
if (totalBandwidth) {
|
||||
const bandwidthUsage = await usageService.add(
|
||||
orgId,
|
||||
FeatureId.EGRESS_DATA_MB,
|
||||
totalBandwidth,
|
||||
trx
|
||||
);
|
||||
if (bandwidthUsage) {
|
||||
usageService
|
||||
.checkLimitSet(
|
||||
orgId,
|
||||
true,
|
||||
FeatureId.EGRESS_DATA_MB,
|
||||
bandwidthUsage,
|
||||
trx
|
||||
)
|
||||
.catch((error: any) => {
|
||||
logger.error(
|
||||
`Error checking bandwidth limits for org ${orgId}:`,
|
||||
error
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Process uptime usage for this org
|
||||
const totalUptime = orgUptimeMap.get(orgId);
|
||||
if (totalUptime) {
|
||||
const uptimeUsage = await usageService.add(
|
||||
orgId,
|
||||
FeatureId.SITE_UPTIME,
|
||||
totalUptime,
|
||||
trx
|
||||
);
|
||||
if (uptimeUsage) {
|
||||
usageService
|
||||
.checkLimitSet(
|
||||
orgId,
|
||||
true,
|
||||
FeatureId.SITE_UPTIME,
|
||||
uptimeUsage,
|
||||
trx
|
||||
)
|
||||
.catch((error: any) => {
|
||||
logger.error(
|
||||
`Error checking uptime limits for org ${orgId}:`,
|
||||
error
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Error processing usage for org ${orgId}:`,
|
||||
error
|
||||
);
|
||||
// Don't break the loop, continue with other orgs
|
||||
}
|
||||
// Aggregate bandwidth usage for the org
|
||||
const totalBandwidth = peer.bytesIn + peer.bytesOut;
|
||||
const currentOrgUsage =
|
||||
orgUsageMap.get(updatedSite.orgId) || 0;
|
||||
orgUsageMap.set(
|
||||
updatedSite.orgId,
|
||||
currentOrgUsage + totalBandwidth
|
||||
);
|
||||
|
||||
// Add 10 seconds of uptime for each active site
|
||||
const currentOrgUptime =
|
||||
orgUptimeMap.get(updatedSite.orgId) || 0;
|
||||
orgUptimeMap.set(
|
||||
updatedSite.orgId,
|
||||
currentOrgUptime + 10 / 60
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to update bandwidth for site ${peer.publicKey}:`,
|
||||
error
|
||||
);
|
||||
// Continue with other sites
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle sites that reported zero bandwidth but need online status updated
|
||||
const zeroBandwidthPeers = bandwidthData.filter(
|
||||
(peer) => peer.bytesIn === 0 && !offlineSites.has(peer.publicKey) // Bytesout will have data as it tries to send keep alive messages
|
||||
);
|
||||
// Process usage updates outside of site update transactions
|
||||
// This separates the concerns and reduces lock contention
|
||||
if (calcUsageAndLimits && (orgUsageMap.size > 0 || orgUptimeMap.size > 0)) {
|
||||
// Sort org IDs to ensure consistent lock ordering
|
||||
const allOrgIds = [
|
||||
...new Set([...orgUsageMap.keys(), ...orgUptimeMap.keys()])
|
||||
].sort();
|
||||
|
||||
if (zeroBandwidthPeers.length > 0) {
|
||||
const zeroBandwidthSites = await trx
|
||||
.select()
|
||||
.from(sites)
|
||||
.where(
|
||||
inArray(
|
||||
sites.pubKey,
|
||||
zeroBandwidthPeers.map((p) => p.publicKey)
|
||||
)
|
||||
);
|
||||
|
||||
for (const site of zeroBandwidthSites) {
|
||||
let newOnlineStatus = site.online;
|
||||
|
||||
// Check if site should go offline based on last bandwidth update WITH DATA
|
||||
if (site.lastBandwidthUpdate) {
|
||||
const lastUpdateWithData = new Date(
|
||||
site.lastBandwidthUpdate
|
||||
for (const orgId of allOrgIds) {
|
||||
try {
|
||||
// Process bandwidth usage for this org
|
||||
const totalBandwidth = orgUsageMap.get(orgId);
|
||||
if (totalBandwidth) {
|
||||
const bandwidthUsage = await usageService.add(
|
||||
orgId,
|
||||
FeatureId.EGRESS_DATA_MB,
|
||||
totalBandwidth
|
||||
);
|
||||
if (lastUpdateWithData < oneMinuteAgo) {
|
||||
newOnlineStatus = false;
|
||||
if (bandwidthUsage) {
|
||||
// Fire and forget - don't block on limit checking
|
||||
usageService
|
||||
.checkLimitSet(
|
||||
orgId,
|
||||
true,
|
||||
FeatureId.EGRESS_DATA_MB,
|
||||
bandwidthUsage
|
||||
)
|
||||
.catch((error: any) => {
|
||||
logger.error(
|
||||
`Error checking bandwidth limits for org ${orgId}:`,
|
||||
error
|
||||
);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// No previous data update recorded, set to offline
|
||||
newOnlineStatus = false;
|
||||
}
|
||||
|
||||
// Always update lastBandwidthUpdate to show this instance is receiving reports
|
||||
// Only update online status if it changed
|
||||
if (site.online !== newOnlineStatus) {
|
||||
const [updatedSite] = await trx
|
||||
.update(sites)
|
||||
.set({
|
||||
online: newOnlineStatus
|
||||
})
|
||||
.where(eq(sites.siteId, site.siteId))
|
||||
.returning();
|
||||
// Process uptime usage for this org
|
||||
const totalUptime = orgUptimeMap.get(orgId);
|
||||
if (totalUptime) {
|
||||
const uptimeUsage = await usageService.add(
|
||||
orgId,
|
||||
FeatureId.SITE_UPTIME,
|
||||
totalUptime
|
||||
);
|
||||
if (uptimeUsage) {
|
||||
// Fire and forget - don't block on limit checking
|
||||
usageService
|
||||
.checkLimitSet(
|
||||
orgId,
|
||||
true,
|
||||
FeatureId.SITE_UPTIME,
|
||||
uptimeUsage
|
||||
)
|
||||
.catch((error: any) => {
|
||||
logger.error(
|
||||
`Error checking uptime limits for org ${orgId}:`,
|
||||
error
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error processing usage for org ${orgId}:`, error);
|
||||
// Continue with other orgs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle sites that reported zero bandwidth but need online status updated
|
||||
const zeroBandwidthPeers = sortedBandwidthData.filter(
|
||||
(peer) => peer.bytesIn === 0 && !offlineSites.has(peer.publicKey)
|
||||
);
|
||||
|
||||
if (zeroBandwidthPeers.length > 0) {
|
||||
// Fetch all zero bandwidth sites in one query
|
||||
const zeroBandwidthSites = await db
|
||||
.select()
|
||||
.from(sites)
|
||||
.where(
|
||||
inArray(
|
||||
sites.pubKey,
|
||||
zeroBandwidthPeers.map((p) => p.publicKey)
|
||||
)
|
||||
);
|
||||
|
||||
// Sort by siteId to ensure consistent lock ordering
|
||||
const sortedZeroBandwidthSites = zeroBandwidthSites.sort(
|
||||
(a, b) => a.siteId - b.siteId
|
||||
);
|
||||
|
||||
for (const site of sortedZeroBandwidthSites) {
|
||||
let newOnlineStatus = site.online;
|
||||
|
||||
// Check if site should go offline based on last bandwidth update WITH DATA
|
||||
if (site.lastBandwidthUpdate) {
|
||||
const lastUpdateWithData = new Date(site.lastBandwidthUpdate);
|
||||
if (lastUpdateWithData < oneMinuteAgo) {
|
||||
newOnlineStatus = false;
|
||||
}
|
||||
} else {
|
||||
// No previous data update recorded, set to offline
|
||||
newOnlineStatus = false;
|
||||
}
|
||||
|
||||
// Only update online status if it changed
|
||||
if (site.online !== newOnlineStatus) {
|
||||
try {
|
||||
const updatedSite = await withDeadlockRetry(async () => {
|
||||
const [result] = await db
|
||||
.update(sites)
|
||||
.set({
|
||||
online: newOnlineStatus
|
||||
})
|
||||
.where(eq(sites.siteId, site.siteId))
|
||||
.returning();
|
||||
return result;
|
||||
}, `update offline status for site ${site.siteId}`);
|
||||
|
||||
if (updatedSite && exitNodeId) {
|
||||
if (
|
||||
await checkExitNodeOrg(
|
||||
exitNodeId,
|
||||
updatedSite.orgId,
|
||||
trx
|
||||
)
|
||||
) {
|
||||
// not allowed
|
||||
const notAllowed = await checkExitNodeOrg(
|
||||
exitNodeId,
|
||||
updatedSite.orgId
|
||||
);
|
||||
if (notAllowed) {
|
||||
logger.warn(
|
||||
`Exit node ${exitNodeId} is not allowed for org ${updatedSite.orgId}`
|
||||
);
|
||||
// THIS SHOULD TRIGGER THE TRANSACTION TO FAIL?
|
||||
throw new Error("Exit node not allowed");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -262,8 +319,14 @@ export async function updateSiteBandwidth(
|
||||
if (!newOnlineStatus && site.pubKey) {
|
||||
offlineSites.add(site.pubKey);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to update offline status for site ${site.siteId}:`,
|
||||
error
|
||||
);
|
||||
// Continue with other sites
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import { validateOlmSessionToken } from "@server/auth/sessions/olm";
|
||||
import { checkExitNodeOrg } from "#dynamic/lib/exitNodes";
|
||||
import { updatePeer as updateOlmPeer } from "../olm/peers";
|
||||
import { updatePeer as updateNewtPeer } from "../newt/peers";
|
||||
import { formatEndpoint } from "@server/lib/ip";
|
||||
|
||||
// Define Zod schema for request validation
|
||||
const updateHolePunchSchema = z.object({
|
||||
@@ -207,9 +208,12 @@ export async function updateAndGenerateEndpointDestinations(
|
||||
// `Updating site ${site.siteId} on exit node ${exitNode.exitNodeId}`
|
||||
// );
|
||||
|
||||
// Format the endpoint properly for both IPv4 and IPv6
|
||||
const formattedEndpoint = formatEndpoint(ip, port);
|
||||
|
||||
// if the public key or endpoint has changed, update it otherwise continue
|
||||
if (
|
||||
site.endpoint === `${ip}:${port}` &&
|
||||
site.endpoint === formattedEndpoint &&
|
||||
site.publicKey === publicKey
|
||||
) {
|
||||
continue;
|
||||
@@ -218,7 +222,7 @@ export async function updateAndGenerateEndpointDestinations(
|
||||
const [updatedClientSitesAssociationsCache] = await db
|
||||
.update(clientSitesAssociationsCache)
|
||||
.set({
|
||||
endpoint: `${ip}:${port}`,
|
||||
endpoint: formattedEndpoint,
|
||||
publicKey: publicKey
|
||||
})
|
||||
.where(
|
||||
@@ -310,11 +314,14 @@ export async function updateAndGenerateEndpointDestinations(
|
||||
|
||||
currentSiteId = newt.siteId;
|
||||
|
||||
// Format the endpoint properly for both IPv4 and IPv6
|
||||
const formattedSiteEndpoint = formatEndpoint(ip, port);
|
||||
|
||||
// Update the current site with the new endpoint
|
||||
const [updatedSite] = await db
|
||||
.update(sites)
|
||||
.set({
|
||||
endpoint: `${ip}:${port}`,
|
||||
endpoint: formattedSiteEndpoint,
|
||||
lastHolePunch: timestamp
|
||||
})
|
||||
.where(eq(sites.siteId, newt.siteId))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Router } from "express";
|
||||
|
||||
// Root routes
|
||||
export const hybridRouter = Router();
|
||||
export const hybridRouter = Router();
|
||||
|
||||
@@ -12,14 +12,14 @@ import { eq, and } from "drizzle-orm";
|
||||
import { idp, idpOrg } from "@server/db";
|
||||
|
||||
const paramsSchema = z.strictObject({
|
||||
idpId: z.coerce.number<number>(),
|
||||
orgId: z.string()
|
||||
});
|
||||
idpId: z.coerce.number<number>(),
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
roleMapping: z.string().optional(),
|
||||
orgMapping: z.string().optional()
|
||||
});
|
||||
roleMapping: z.string().optional(),
|
||||
orgMapping: z.string().optional()
|
||||
});
|
||||
|
||||
export type CreateIdpOrgPolicyResponse = {};
|
||||
|
||||
|
||||
@@ -15,17 +15,17 @@ import config from "@server/lib/config";
|
||||
const paramsSchema = z.strictObject({});
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
name: z.string().nonempty(),
|
||||
clientId: z.string().nonempty(),
|
||||
clientSecret: z.string().nonempty(),
|
||||
authUrl: z.url(),
|
||||
tokenUrl: z.url(),
|
||||
identifierPath: z.string().nonempty(),
|
||||
emailPath: z.string().optional(),
|
||||
namePath: z.string().optional(),
|
||||
scopes: z.string().nonempty(),
|
||||
autoProvision: z.boolean().optional()
|
||||
});
|
||||
name: z.string().nonempty(),
|
||||
clientId: z.string().nonempty(),
|
||||
clientSecret: z.string().nonempty(),
|
||||
authUrl: z.url(),
|
||||
tokenUrl: z.url(),
|
||||
identifierPath: z.string().nonempty(),
|
||||
emailPath: z.string().optional(),
|
||||
namePath: z.string().optional(),
|
||||
scopes: z.string().nonempty(),
|
||||
autoProvision: z.boolean().optional()
|
||||
});
|
||||
|
||||
export type CreateIdpResponse = {
|
||||
idpId: number;
|
||||
|
||||
@@ -53,12 +53,7 @@ export async function deleteIdp(
|
||||
.where(eq(idp.idpId, idpId));
|
||||
|
||||
if (!existingIdp) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.NOT_FOUND,
|
||||
"IdP not found"
|
||||
)
|
||||
);
|
||||
return next(createHttpError(HttpCode.NOT_FOUND, "IdP not found"));
|
||||
}
|
||||
|
||||
// Delete the IDP and its related records in a transaction
|
||||
@@ -69,14 +64,10 @@ export async function deleteIdp(
|
||||
.where(eq(idpOidcConfig.idpId, idpId));
|
||||
|
||||
// Delete IDP-org mappings
|
||||
await trx
|
||||
.delete(idpOrg)
|
||||
.where(eq(idpOrg.idpId, idpId));
|
||||
await trx.delete(idpOrg).where(eq(idpOrg.idpId, idpId));
|
||||
|
||||
// Delete the IDP itself
|
||||
await trx
|
||||
.delete(idp)
|
||||
.where(eq(idp.idpId, idpId));
|
||||
await trx.delete(idp).where(eq(idp.idpId, idpId));
|
||||
});
|
||||
|
||||
return response<null>(res, {
|
||||
|
||||
@@ -11,9 +11,9 @@ import { eq, and } from "drizzle-orm";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const paramsSchema = z.strictObject({
|
||||
idpId: z.coerce.number<number>(),
|
||||
orgId: z.string()
|
||||
});
|
||||
idpId: z.coerce.number<number>(),
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
registry.registerPath({
|
||||
method: "delete",
|
||||
|
||||
@@ -24,8 +24,8 @@ const paramsSchema = z
|
||||
.strict();
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
redirectUrl: z.string()
|
||||
});
|
||||
redirectUrl: z.string()
|
||||
});
|
||||
|
||||
const querySchema = z.object({
|
||||
orgId: z.string().optional() // check what actuall calls it
|
||||
|
||||
@@ -71,14 +71,8 @@ export async function getIdp(
|
||||
const clientSecret = idpRes.idpOidcConfig!.clientSecret;
|
||||
const clientId = idpRes.idpOidcConfig!.clientId;
|
||||
|
||||
idpRes.idpOidcConfig!.clientSecret = decrypt(
|
||||
clientSecret,
|
||||
key
|
||||
);
|
||||
idpRes.idpOidcConfig!.clientId = decrypt(
|
||||
clientId,
|
||||
key
|
||||
);
|
||||
idpRes.idpOidcConfig!.clientSecret = decrypt(clientSecret, key);
|
||||
idpRes.idpOidcConfig!.clientId = decrypt(clientId, key);
|
||||
}
|
||||
|
||||
return response<GetIdpResponse>(res, {
|
||||
|
||||
@@ -8,4 +8,4 @@ export * from "./getIdp";
|
||||
export * from "./createIdpOrgPolicy";
|
||||
export * from "./deleteIdpOrgPolicy";
|
||||
export * from "./listIdpOrgPolicies";
|
||||
export * from "./updateIdpOrgPolicy";
|
||||
export * from "./updateIdpOrgPolicy";
|
||||
|
||||
@@ -15,19 +15,19 @@ const paramsSchema = z.object({
|
||||
});
|
||||
|
||||
const querySchema = z.strictObject({
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("1000")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative())
|
||||
});
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("1000")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative())
|
||||
});
|
||||
|
||||
async function query(idpId: number, limit: number, offset: number) {
|
||||
const res = await db
|
||||
|
||||
@@ -11,19 +11,19 @@ import { fromError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const querySchema = z.strictObject({
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("1000")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative())
|
||||
});
|
||||
limit: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("1000")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative()),
|
||||
offset: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("0")
|
||||
.transform(Number)
|
||||
.pipe(z.int().nonnegative())
|
||||
});
|
||||
|
||||
async function query(limit: number, offset: number) {
|
||||
const res = await db
|
||||
|
||||
@@ -11,14 +11,14 @@ import { eq, and } from "drizzle-orm";
|
||||
import { idp, idpOrg } from "@server/db";
|
||||
|
||||
const paramsSchema = z.strictObject({
|
||||
idpId: z.coerce.number<number>(),
|
||||
orgId: z.string()
|
||||
});
|
||||
idpId: z.coerce.number<number>(),
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
roleMapping: z.string().optional(),
|
||||
orgMapping: z.string().optional()
|
||||
});
|
||||
roleMapping: z.string().optional(),
|
||||
orgMapping: z.string().optional()
|
||||
});
|
||||
|
||||
export type UpdateIdpOrgPolicyResponse = {};
|
||||
|
||||
|
||||
@@ -19,19 +19,19 @@ const paramsSchema = z
|
||||
.strict();
|
||||
|
||||
const bodySchema = z.strictObject({
|
||||
name: z.string().optional(),
|
||||
clientId: z.string().optional(),
|
||||
clientSecret: z.string().optional(),
|
||||
authUrl: z.string().optional(),
|
||||
tokenUrl: z.string().optional(),
|
||||
identifierPath: z.string().optional(),
|
||||
emailPath: z.string().optional(),
|
||||
namePath: z.string().optional(),
|
||||
scopes: z.string().optional(),
|
||||
autoProvision: z.boolean().optional(),
|
||||
defaultRoleMapping: z.string().optional(),
|
||||
defaultOrgMapping: z.string().optional()
|
||||
});
|
||||
name: z.string().optional(),
|
||||
clientId: z.string().optional(),
|
||||
clientSecret: z.string().optional(),
|
||||
authUrl: z.string().optional(),
|
||||
tokenUrl: z.string().optional(),
|
||||
identifierPath: z.string().optional(),
|
||||
emailPath: z.string().optional(),
|
||||
namePath: z.string().optional(),
|
||||
scopes: z.string().optional(),
|
||||
autoProvision: z.boolean().optional(),
|
||||
defaultRoleMapping: z.string().optional(),
|
||||
defaultOrgMapping: z.string().optional()
|
||||
});
|
||||
|
||||
export type UpdateIdpResponse = {
|
||||
idpId: number;
|
||||
|
||||
@@ -8,4 +8,4 @@ export type GetLicenseStatusResponse = LicenseStatus;
|
||||
|
||||
export type ListLicenseKeysResponse = LicenseKeyCache[];
|
||||
|
||||
export type RecheckStatusResponse = LicenseStatus;
|
||||
export type RecheckStatusResponse = LicenseStatus;
|
||||
|
||||
@@ -8,4 +8,4 @@ export type GetLoginPageResponse = LoginPage;
|
||||
|
||||
export type UpdateLoginPageResponse = LoginPage;
|
||||
|
||||
export type LoadLoginPageResponse = LoginPage & { orgId: string };
|
||||
export type LoadLoginPageResponse = LoginPage & { orgId: string };
|
||||
|
||||
@@ -24,9 +24,9 @@ export type CreateNewtResponse = {
|
||||
};
|
||||
|
||||
const createNewtSchema = z.strictObject({
|
||||
newtId: z.string(),
|
||||
secret: z.string()
|
||||
});
|
||||
newtId: z.string(),
|
||||
secret: z.string()
|
||||
});
|
||||
|
||||
export async function createNewt(
|
||||
req: Request,
|
||||
@@ -34,7 +34,6 @@ export async function createNewt(
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
try {
|
||||
|
||||
const parsedBody = createNewtSchema.safeParse(req.body);
|
||||
if (!parsedBody.success) {
|
||||
return next(
|
||||
@@ -58,7 +57,7 @@ export async function createNewt(
|
||||
await db.insert(newts).values({
|
||||
newtId: newtId,
|
||||
secretHash,
|
||||
dateCreated: moment().toISOString(),
|
||||
dateCreated: moment().toISOString()
|
||||
});
|
||||
|
||||
// give the newt their default permissions:
|
||||
@@ -75,12 +74,12 @@ export async function createNewt(
|
||||
data: {
|
||||
newtId,
|
||||
secret,
|
||||
token,
|
||||
token
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
message: "Newt created successfully",
|
||||
status: HttpCode.OK,
|
||||
status: HttpCode.OK
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_UNIQUE") {
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
import { verifyPassword } from "@server/auth/password";
|
||||
import logger from "@server/logger";
|
||||
import config from "@server/lib/config";
|
||||
import { APP_VERSION } from "@server/lib/consts";
|
||||
|
||||
export const newtGetTokenBodySchema = z.object({
|
||||
newtId: z.string(),
|
||||
@@ -94,9 +95,10 @@ export async function getNewtToken(
|
||||
const resToken = generateSessionToken();
|
||||
await createNewtSession(resToken, existingNewt.newtId);
|
||||
|
||||
return response<{ token: string }>(res, {
|
||||
return response<{ token: string; serverVersion: string }>(res, {
|
||||
data: {
|
||||
token: resToken
|
||||
token: resToken,
|
||||
serverVersion: APP_VERSION
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
|
||||
@@ -35,7 +35,11 @@ export const handleNewtPingRequestMessage: MessageHandler = async (context) => {
|
||||
|
||||
const { noCloud } = message.data;
|
||||
|
||||
const exitNodesList = await listExitNodes(site.orgId, true, noCloud || false); // filter for only the online ones
|
||||
const exitNodesList = await listExitNodes(
|
||||
site.orgId,
|
||||
true,
|
||||
noCloud || false
|
||||
); // filter for only the online ones
|
||||
|
||||
let lastExitNodeId = null;
|
||||
if (newt.siteId) {
|
||||
|
||||
@@ -255,7 +255,7 @@ export const handleNewtRegisterMessage: MessageHandler = async (context) => {
|
||||
hcTimeout: targetHealthCheck.hcTimeout,
|
||||
hcHeaders: targetHealthCheck.hcHeaders,
|
||||
hcMethod: targetHealthCheck.hcMethod,
|
||||
hcTlsServerName: targetHealthCheck.hcTlsServerName,
|
||||
hcTlsServerName: targetHealthCheck.hcTlsServerName
|
||||
})
|
||||
.from(targets)
|
||||
.innerJoin(resources, eq(targets.resourceId, resources.resourceId))
|
||||
@@ -328,7 +328,7 @@ export const handleNewtRegisterMessage: MessageHandler = async (context) => {
|
||||
hcTimeout: target.hcTimeout, // in seconds
|
||||
hcHeaders: hcHeadersSend,
|
||||
hcMethod: target.hcMethod,
|
||||
hcTlsServerName: target.hcTlsServerName,
|
||||
hcTlsServerName: target.hcTlsServerName
|
||||
};
|
||||
});
|
||||
|
||||
@@ -366,7 +366,7 @@ async function getUniqueSubnetForSite(
|
||||
trx: Transaction | typeof db = db
|
||||
): Promise<string | null> {
|
||||
const lockKey = `subnet-allocation:${exitNode.exitNodeId}`;
|
||||
|
||||
|
||||
return await lockManager.withLock(
|
||||
lockKey,
|
||||
async () => {
|
||||
@@ -382,7 +382,8 @@ async function getUniqueSubnetForSite(
|
||||
.map((site) => site.subnet)
|
||||
.filter(
|
||||
(subnet) =>
|
||||
subnet && /^(\d{1,3}\.){3}\d{1,3}\/\d{1,2}$/.test(subnet)
|
||||
subnet &&
|
||||
/^(\d{1,3}\.){3}\d{1,3}\/\d{1,2}$/.test(subnet)
|
||||
)
|
||||
.filter((subnet) => subnet !== null);
|
||||
subnets.push(exitNode.address.replace(/\/\d+$/, `/${blockSize}`));
|
||||
|
||||
@@ -10,7 +10,9 @@ interface PeerBandwidth {
|
||||
bytesOut: number;
|
||||
}
|
||||
|
||||
export const handleReceiveBandwidthMessage: MessageHandler = async (context) => {
|
||||
export const handleReceiveBandwidthMessage: MessageHandler = async (
|
||||
context
|
||||
) => {
|
||||
const { message, client, sendToClient } = context;
|
||||
|
||||
if (!message.data.bandwidthData) {
|
||||
@@ -44,7 +46,7 @@ export const handleReceiveBandwidthMessage: MessageHandler = async (context) =>
|
||||
.set({
|
||||
megabytesOut: (client.megabytesIn || 0) + bytesIn,
|
||||
megabytesIn: (client.megabytesOut || 0) + bytesOut,
|
||||
lastBandwidthUpdate: new Date().toISOString(),
|
||||
lastBandwidthUpdate: new Date().toISOString()
|
||||
})
|
||||
.where(eq(clients.clientId, client.clientId));
|
||||
}
|
||||
|
||||
@@ -64,9 +64,5 @@ export const handleDockerContainersMessage: MessageHandler = async (
|
||||
return;
|
||||
}
|
||||
|
||||
await applyNewtDockerBlueprint(
|
||||
newt.siteId,
|
||||
newt.newtId,
|
||||
containers
|
||||
);
|
||||
await applyNewtDockerBlueprint(newt.siteId, newt.newtId, containers);
|
||||
};
|
||||
|
||||
@@ -5,4 +5,4 @@ export * from "./handleReceiveBandwidthMessage";
|
||||
export * from "./handleGetConfigMessage";
|
||||
export * from "./handleSocketMessages";
|
||||
export * from "./handleNewtPingRequestMessage";
|
||||
export * from "./handleApplyBlueprintMessage";
|
||||
export * from "./handleApplyBlueprintMessage";
|
||||
|
||||
@@ -48,7 +48,11 @@ export async function addPeer(
|
||||
return site;
|
||||
}
|
||||
|
||||
export async function deletePeer(siteId: number, publicKey: string, newtId?: string) {
|
||||
export async function deletePeer(
|
||||
siteId: number,
|
||||
publicKey: string,
|
||||
newtId?: string
|
||||
) {
|
||||
let site: Site | null = null;
|
||||
if (!newtId) {
|
||||
[site] = await db
|
||||
|
||||
@@ -26,22 +26,32 @@ export async function addTargets(
|
||||
|
||||
// Create a map for quick lookup
|
||||
const healthCheckMap = new Map<number, TargetHealthCheck>();
|
||||
healthCheckData.forEach(hc => {
|
||||
healthCheckData.forEach((hc) => {
|
||||
healthCheckMap.set(hc.targetId, hc);
|
||||
});
|
||||
|
||||
const healthCheckTargets = targets.map((target) => {
|
||||
const hc = healthCheckMap.get(target.targetId);
|
||||
|
||||
|
||||
// If no health check data found, skip this target
|
||||
if (!hc) {
|
||||
logger.warn(`No health check configuration found for target ${target.targetId}`);
|
||||
logger.warn(
|
||||
`No health check configuration found for target ${target.targetId}`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Ensure all necessary fields are present
|
||||
if (!hc.hcPath || !hc.hcHostname || !hc.hcPort || !hc.hcInterval || !hc.hcMethod) {
|
||||
logger.debug(`Skipping target ${target.targetId} due to missing health check fields`);
|
||||
if (
|
||||
!hc.hcPath ||
|
||||
!hc.hcHostname ||
|
||||
!hc.hcPort ||
|
||||
!hc.hcInterval ||
|
||||
!hc.hcMethod
|
||||
) {
|
||||
logger.debug(
|
||||
`Skipping target ${target.targetId} due to missing health check fields`
|
||||
);
|
||||
return null; // Skip targets with missing health check fields
|
||||
}
|
||||
|
||||
@@ -49,9 +59,11 @@ export async function addTargets(
|
||||
const hcHeadersSend: { [key: string]: string } = {};
|
||||
if (hcHeadersParse) {
|
||||
// transform
|
||||
hcHeadersParse.forEach((header: { name: string; value: string }) => {
|
||||
hcHeadersSend[header.name] = header.value;
|
||||
});
|
||||
hcHeadersParse.forEach(
|
||||
(header: { name: string; value: string }) => {
|
||||
hcHeadersSend[header.name] = header.value;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// try to parse the hcStatus into a int and if not possible set to undefined
|
||||
@@ -77,12 +89,14 @@ export async function addTargets(
|
||||
hcHeaders: hcHeadersSend,
|
||||
hcMethod: hc.hcMethod,
|
||||
hcStatus: hcStatus,
|
||||
hcTlsServerName: hc.hcTlsServerName,
|
||||
hcTlsServerName: hc.hcTlsServerName
|
||||
};
|
||||
});
|
||||
|
||||
// Filter out any null values from health check targets
|
||||
const validHealthCheckTargets = healthCheckTargets.filter((target) => target !== null);
|
||||
const validHealthCheckTargets = healthCheckTargets.filter(
|
||||
(target) => target !== null
|
||||
);
|
||||
|
||||
await sendToClient(newtId, {
|
||||
type: `newt/healthcheck/add`,
|
||||
|
||||
@@ -24,9 +24,9 @@ export type CreateNewtResponse = {
|
||||
};
|
||||
|
||||
const createNewtSchema = z.strictObject({
|
||||
newtId: z.string(),
|
||||
secret: z.string()
|
||||
});
|
||||
newtId: z.string(),
|
||||
secret: z.string()
|
||||
});
|
||||
|
||||
export async function createNewt(
|
||||
req: Request,
|
||||
@@ -34,7 +34,6 @@ export async function createNewt(
|
||||
next: NextFunction
|
||||
): Promise<any> {
|
||||
try {
|
||||
|
||||
const parsedBody = createNewtSchema.safeParse(req.body);
|
||||
if (!parsedBody.success) {
|
||||
return next(
|
||||
@@ -58,7 +57,7 @@ export async function createNewt(
|
||||
await db.insert(newts).values({
|
||||
newtId: newtId,
|
||||
secretHash,
|
||||
dateCreated: moment().toISOString(),
|
||||
dateCreated: moment().toISOString()
|
||||
});
|
||||
|
||||
// give the newt their default permissions:
|
||||
@@ -75,12 +74,12 @@ export async function createNewt(
|
||||
data: {
|
||||
newtId,
|
||||
secret,
|
||||
token,
|
||||
token
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
message: "Newt created successfully",
|
||||
status: HttpCode.OK,
|
||||
status: HttpCode.OK
|
||||
});
|
||||
} catch (e) {
|
||||
if (e instanceof SqliteError && e.code === "SQLITE_CONSTRAINT_UNIQUE") {
|
||||
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
import { verifyPassword } from "@server/auth/password";
|
||||
import logger from "@server/logger";
|
||||
import config from "@server/lib/config";
|
||||
import { APP_VERSION } from "@server/lib/consts";
|
||||
|
||||
export const olmGetTokenBodySchema = z.object({
|
||||
olmId: z.string(),
|
||||
@@ -205,10 +206,12 @@ export async function getOlmToken(
|
||||
return response<{
|
||||
token: string;
|
||||
exitNodes: { publicKey: string; endpoint: string }[];
|
||||
serverVersion: string;
|
||||
}>(res, {
|
||||
data: {
|
||||
token: resToken,
|
||||
exitNodes: exitNodesHpData
|
||||
exitNodes: exitNodesHpData,
|
||||
serverVersion: APP_VERSION
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
|
||||
@@ -61,9 +61,12 @@ export const startOlmOfflineChecker = (): void => {
|
||||
|
||||
// Send a disconnect message to the client if connected
|
||||
try {
|
||||
await sendTerminateClient(offlineClient.clientId, offlineClient.olmId); // terminate first
|
||||
await sendTerminateClient(
|
||||
offlineClient.clientId,
|
||||
offlineClient.olmId
|
||||
); // terminate first
|
||||
// wait a moment to ensure the message is sent
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
await disconnectClient(offlineClient.olmId);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
|
||||
@@ -113,14 +113,14 @@ export const handleOlmServerPeerAddMessage: MessageHandler = async (
|
||||
.select()
|
||||
.from(clientSitesAssociationsCache)
|
||||
.where(
|
||||
and(
|
||||
and(
|
||||
eq(clientSitesAssociationsCache.clientId, client.clientId),
|
||||
isNotNull(clientSitesAssociationsCache.endpoint),
|
||||
eq(clientSitesAssociationsCache.publicKey, client.pubKey) // limit it to the current session its connected with otherwise the endpoint could be stale
|
||||
)
|
||||
);
|
||||
|
||||
// pick an endpoint
|
||||
// pick an endpoint
|
||||
for (const assoc of currentSessionSiteAssociationCaches) {
|
||||
if (assoc.endpoint) {
|
||||
endpoint = assoc.endpoint;
|
||||
|
||||
@@ -8,4 +8,4 @@ export * from "./listUserOlms";
|
||||
export * from "./deleteUserOlm";
|
||||
export * from "./getUserOlm";
|
||||
export * from "./handleOlmServerPeerAddMessage";
|
||||
export * from "./handleOlmUnRelayMessage";
|
||||
export * from "./handleOlmUnRelayMessage";
|
||||
|
||||
@@ -10,8 +10,8 @@ import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
|
||||
const getOrgSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
export async function checkId(
|
||||
req: Request,
|
||||
|
||||
@@ -11,8 +11,8 @@ import { fromZodError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const getOrgSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
export type GetOrgResponse = {
|
||||
org: Org;
|
||||
|
||||
@@ -19,8 +19,8 @@ import logger from "@server/logger";
|
||||
import { fromZodError } from "zod-validation-error";
|
||||
|
||||
const getOrgParamsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
export type GetOrgOverviewResponse = {
|
||||
orgName: string;
|
||||
|
||||
@@ -16,10 +16,11 @@ import { TierId } from "@server/lib/billing/tiers";
|
||||
import { cache } from "@server/lib/cache";
|
||||
|
||||
const updateOrgParamsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
const updateOrgBodySchema = z.strictObject({
|
||||
const updateOrgBodySchema = z
|
||||
.strictObject({
|
||||
name: z.string().min(1).max(255).optional(),
|
||||
requireTwoFactor: z.boolean().optional(),
|
||||
maxSessionLengthHours: z.number().nullable().optional(),
|
||||
|
||||
@@ -6,10 +6,10 @@ export type CreateOrgIdpResponse = {
|
||||
};
|
||||
|
||||
export type GetOrgIdpResponse = {
|
||||
idp: Idp,
|
||||
idpOidcConfig: IdpOidcConfig | null,
|
||||
redirectUrl: string
|
||||
}
|
||||
idp: Idp;
|
||||
idpOidcConfig: IdpOidcConfig | null;
|
||||
redirectUrl: string;
|
||||
};
|
||||
|
||||
export type ListOrgIdpsResponse = {
|
||||
idps: {
|
||||
@@ -18,7 +18,7 @@ export type ListOrgIdpsResponse = {
|
||||
name: string;
|
||||
type: string;
|
||||
variant: string;
|
||||
}[],
|
||||
}[];
|
||||
pagination: {
|
||||
total: number;
|
||||
limit: number;
|
||||
|
||||
@@ -31,4 +31,14 @@ export type ListRemoteExitNodesResponse = {
|
||||
pagination: { total: number; limit: number; offset: number };
|
||||
};
|
||||
|
||||
export type GetRemoteExitNodeResponse = { remoteExitNodeId: string; dateCreated: string; version: string | null; exitNodeId: number | null; name: string; address: string; endpoint: string; online: boolean; type: string | null; }
|
||||
export type GetRemoteExitNodeResponse = {
|
||||
remoteExitNodeId: string;
|
||||
dateCreated: string;
|
||||
version: string | null;
|
||||
exitNodeId: number | null;
|
||||
name: string;
|
||||
address: string;
|
||||
endpoint: string;
|
||||
online: boolean;
|
||||
type: string | null;
|
||||
};
|
||||
|
||||
@@ -11,21 +11,19 @@ import { and, eq } from "drizzle-orm";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const addEmailToResourceWhitelistBodySchema = z.strictObject({
|
||||
email: z.email()
|
||||
.or(
|
||||
z.string().regex(/^\*@[\w.-]+\.[a-zA-Z]{2,}$/, {
|
||||
error: "Invalid email address. Wildcard (*) must be the entire local part."
|
||||
})
|
||||
)
|
||||
.transform((v) => v.toLowerCase())
|
||||
});
|
||||
email: z
|
||||
.email()
|
||||
.or(
|
||||
z.string().regex(/^\*@[\w.-]+\.[a-zA-Z]{2,}$/, {
|
||||
error: "Invalid email address. Wildcard (*) must be the entire local part."
|
||||
})
|
||||
)
|
||||
.transform((v) => v.toLowerCase())
|
||||
});
|
||||
|
||||
const addEmailToResourceWhitelistParamsSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
registry.registerPath({
|
||||
method: "post",
|
||||
|
||||
@@ -93,10 +93,7 @@ export async function addRoleToResource(
|
||||
.select()
|
||||
.from(roles)
|
||||
.where(
|
||||
and(
|
||||
eq(roles.roleId, roleId),
|
||||
eq(roles.orgId, resource.orgId)
|
||||
)
|
||||
and(eq(roles.roleId, roleId), eq(roles.orgId, resource.orgId))
|
||||
)
|
||||
.limit(1);
|
||||
|
||||
@@ -158,4 +155,3 @@ export async function addRoleToResource(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -127,4 +127,3 @@ export async function addUserToResource(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -16,17 +16,17 @@ import stoi from "@server/lib/stoi";
|
||||
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
|
||||
|
||||
const authWithAccessTokenBodySchema = z.strictObject({
|
||||
accessToken: z.string(),
|
||||
accessTokenId: z.string().optional()
|
||||
});
|
||||
accessToken: z.string(),
|
||||
accessTokenId: z.string().optional()
|
||||
});
|
||||
|
||||
const authWithAccessTokenParamsSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform(stoi)
|
||||
.pipe(z.int().positive().optional())
|
||||
});
|
||||
resourceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform(stoi)
|
||||
.pipe(z.int().positive().optional())
|
||||
});
|
||||
|
||||
export type AuthWithAccessTokenResponse = {
|
||||
session?: string;
|
||||
|
||||
@@ -16,15 +16,12 @@ import config from "@server/lib/config";
|
||||
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
|
||||
|
||||
export const authWithPasswordBodySchema = z.strictObject({
|
||||
password: z.string()
|
||||
});
|
||||
password: z.string()
|
||||
});
|
||||
|
||||
export const authWithPasswordParamsSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
export type AuthWithPasswordResponse = {
|
||||
session?: string;
|
||||
|
||||
@@ -15,15 +15,12 @@ import config from "@server/lib/config";
|
||||
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
|
||||
|
||||
export const authWithPincodeBodySchema = z.strictObject({
|
||||
pincode: z.string()
|
||||
});
|
||||
pincode: z.string()
|
||||
});
|
||||
|
||||
export const authWithPincodeParamsSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
export type AuthWithPincodeResponse = {
|
||||
session?: string;
|
||||
|
||||
@@ -15,16 +15,13 @@ import config from "@server/lib/config";
|
||||
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
|
||||
|
||||
const authWithWhitelistBodySchema = z.strictObject({
|
||||
email: z.email().toLowerCase(),
|
||||
otp: z.string().optional()
|
||||
});
|
||||
email: z.email().toLowerCase(),
|
||||
otp: z.string().optional()
|
||||
});
|
||||
|
||||
const authWithWhitelistParamsSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
export type AuthWithWhitelistResponse = {
|
||||
otpSent?: boolean;
|
||||
|
||||
@@ -26,16 +26,17 @@ import { getUniqueResourceName } from "@server/db/names";
|
||||
import { validateAndConstructDomain } from "@server/lib/domainUtils";
|
||||
|
||||
const createResourceParamsSchema = z.strictObject({
|
||||
orgId: z.string()
|
||||
});
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
const createHttpResourceSchema = z.strictObject({
|
||||
const createHttpResourceSchema = z
|
||||
.strictObject({
|
||||
name: z.string().min(1).max(255),
|
||||
subdomain: z.string().nullable().optional(),
|
||||
http: z.boolean(),
|
||||
protocol: z.enum(["tcp", "udp"]),
|
||||
domainId: z.string(),
|
||||
stickySession: z.boolean().optional(),
|
||||
stickySession: z.boolean().optional()
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
@@ -49,7 +50,8 @@ const createHttpResourceSchema = z.strictObject({
|
||||
}
|
||||
);
|
||||
|
||||
const createRawResourceSchema = z.strictObject({
|
||||
const createRawResourceSchema = z
|
||||
.strictObject({
|
||||
name: z.string().min(1).max(255),
|
||||
http: z.boolean(),
|
||||
protocol: z.enum(["tcp", "udp"]),
|
||||
@@ -188,7 +190,7 @@ async function createHttpResource(
|
||||
|
||||
const { name, domainId } = parsedBody.data;
|
||||
const subdomain = parsedBody.data.subdomain;
|
||||
const stickySession=parsedBody.data.stickySession;
|
||||
const stickySession = parsedBody.data.stickySession;
|
||||
|
||||
// Validate domain and construct full domain
|
||||
const domainResult = await validateAndConstructDomain(
|
||||
|
||||
@@ -16,19 +16,16 @@ import {
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const createResourceRuleSchema = z.strictObject({
|
||||
action: z.enum(["ACCEPT", "DROP", "PASS"]),
|
||||
match: z.enum(["CIDR", "IP", "PATH", "COUNTRY"]),
|
||||
value: z.string().min(1),
|
||||
priority: z.int(),
|
||||
enabled: z.boolean().optional()
|
||||
});
|
||||
action: z.enum(["ACCEPT", "DROP", "PASS"]),
|
||||
match: z.enum(["CIDR", "IP", "PATH", "COUNTRY"]),
|
||||
value: z.string().min(1),
|
||||
priority: z.int(),
|
||||
enabled: z.boolean().optional()
|
||||
});
|
||||
|
||||
const createResourceRuleParamsSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
registry.registerPath({
|
||||
method: "put",
|
||||
|
||||
@@ -15,11 +15,8 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
// Define Zod schema for request parameters validation
|
||||
const deleteResourceSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
registry.registerPath({
|
||||
method: "delete",
|
||||
|
||||
@@ -11,12 +11,9 @@ import { fromError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const deleteResourceRuleSchema = z.strictObject({
|
||||
ruleId: z.string().transform(Number).pipe(z.int().positive()),
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
ruleId: z.string().transform(Number).pipe(z.int().positive()),
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
registry.registerPath({
|
||||
method: "delete",
|
||||
|
||||
@@ -17,11 +17,8 @@ import { checkOrgAccessPolicy } from "#dynamic/lib/checkOrgAccessPolicy";
|
||||
import { logAccessAudit } from "#dynamic/lib/logAccessAudit";
|
||||
|
||||
const getExchangeTokenParams = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.transform(Number)
|
||||
.pipe(z.int().positive())
|
||||
});
|
||||
resourceId: z.string().transform(Number).pipe(z.int().positive())
|
||||
});
|
||||
|
||||
export type GetExchangeTokenResponse = {
|
||||
requestToken: string;
|
||||
|
||||
@@ -12,15 +12,15 @@ import stoi from "@server/lib/stoi";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
|
||||
const getResourceSchema = z.strictObject({
|
||||
resourceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform(stoi)
|
||||
.pipe(z.int().positive().optional())
|
||||
.optional(),
|
||||
niceId: z.string().optional(),
|
||||
orgId: z.string().optional()
|
||||
});
|
||||
resourceId: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform(stoi)
|
||||
.pipe(z.int().positive().optional())
|
||||
.optional(),
|
||||
niceId: z.string().optional(),
|
||||
orgId: z.string().optional()
|
||||
});
|
||||
|
||||
async function query(resourceId?: number, niceId?: string, orgId?: string) {
|
||||
if (resourceId) {
|
||||
@@ -34,13 +34,18 @@ async function query(resourceId?: number, niceId?: string, orgId?: string) {
|
||||
const [res] = await db
|
||||
.select()
|
||||
.from(resources)
|
||||
.where(and(eq(resources.niceId, niceId), eq(resources.orgId, orgId)))
|
||||
.where(
|
||||
and(eq(resources.niceId, niceId), eq(resources.orgId, orgId))
|
||||
)
|
||||
.limit(1);
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
export type GetResourceResponse = Omit<NonNullable<Awaited<ReturnType<typeof query>>>, 'headers'> & {
|
||||
export type GetResourceResponse = Omit<
|
||||
NonNullable<Awaited<ReturnType<typeof query>>>,
|
||||
"headers"
|
||||
> & {
|
||||
headers: { name: string; value: string }[] | null;
|
||||
};
|
||||
|
||||
@@ -101,7 +106,9 @@ export async function getResource(
|
||||
return response<GetResourceResponse>(res, {
|
||||
data: {
|
||||
...resource,
|
||||
headers: resource.headers ? JSON.parse(resource.headers) : resource.headers
|
||||
headers: resource.headers
|
||||
? JSON.parse(resource.headers)
|
||||
: resource.headers
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user