mirror of
https://github.com/fosrl/pangolin.git
synced 2026-02-07 21:46:38 +00:00
Merge branch 'dev' into distribution
This commit is contained in:
75
.github/workflows/dev-image.yml
vendored
75
.github/workflows/dev-image.yml
vendored
@@ -1,75 +0,0 @@
|
||||
name: Create Dev-Image
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
TAG_URL: https://hub.docker.com/r/${{ vars.DOCKER_HUB_REPO }}/tags
|
||||
TAG: ${{ vars.DOCKER_HUB_REPO }}:dev-pr${{ github.event.pull_request.number }}
|
||||
TAG_PG: ${{ vars.DOCKER_HUB_REPO }}:postgresql-dev-pr${{ github.event.pull_request.number }}
|
||||
|
||||
steps:
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push Docker image SQLITE
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ env.TAG }}
|
||||
cache-from: type=registry,ref=${{ vars.DOCKER_HUB_REPO }}:buildcache
|
||||
cache-to: type=registry,ref=${{ vars.DOCKER_HUB_REPO }}:buildcache,mode=max
|
||||
build-args: DATABASE=sqlite
|
||||
|
||||
- name: Build and push Docker image PG
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ env.TAG_PG }}
|
||||
cache-from: type=registry,ref=${{ vars.DOCKER_HUB_REPO }}:buildcache-pg
|
||||
cache-to: type=registry,ref=${{ vars.DOCKER_HUB_REPO }}:buildcache-pg,mode=max
|
||||
build-args: DATABASE=pg
|
||||
|
||||
- uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const repoUrl = process.env.TAG_URL;
|
||||
const tag = process.env.TAG;
|
||||
const tagPg = process.env.TAG_PG;
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `👋 Thanks for your PR!
|
||||
Dev images for this PR are now available on [docker hub](${repoUrl}):
|
||||
|
||||
**SQLITE Image:**
|
||||
\`\`\`
|
||||
${tag}
|
||||
\`\`\`
|
||||
|
||||
**Postgresql Image:**
|
||||
\`\`\`
|
||||
${tagPg}
|
||||
\`\`\``
|
||||
})
|
||||
|
||||
@@ -1541,8 +1541,8 @@
|
||||
"autoLoginError": "Auto Login Error",
|
||||
"autoLoginErrorNoRedirectUrl": "No redirect URL received from the identity provider.",
|
||||
"autoLoginErrorGeneratingUrl": "Failed to generate authentication URL.",
|
||||
"remoteExitNodeManageRemoteExitNodes": "Managed Nodes",
|
||||
"remoteExitNodeDescription": "Self-host one or more nodes for tunnel exit servers",
|
||||
"remoteExitNodeManageRemoteExitNodes": "Remote Nodes",
|
||||
"remoteExitNodeDescription": "Self-host one or more remote nodes for tunnel exit servers",
|
||||
"remoteExitNodes": "Nodes",
|
||||
"searchRemoteExitNodes": "Search nodes...",
|
||||
"remoteExitNodeAdd": "Add Node",
|
||||
@@ -1552,7 +1552,7 @@
|
||||
"remoteExitNodeMessageConfirm": "To confirm, please type the name of the node below.",
|
||||
"remoteExitNodeConfirmDelete": "Confirm Delete Node",
|
||||
"remoteExitNodeDelete": "Delete Node",
|
||||
"sidebarRemoteExitNodes": "Nodes",
|
||||
"sidebarRemoteExitNodes": "Remote Nodes",
|
||||
"remoteExitNodeCreate": {
|
||||
"title": "Create Node",
|
||||
"description": "Create a new node to extend your network connectivity",
|
||||
|
||||
@@ -44,27 +44,25 @@ export function createApiServer() {
|
||||
}
|
||||
|
||||
const corsConfig = config.getRawConfig().server.cors;
|
||||
const options = {
|
||||
...(corsConfig?.origins
|
||||
? { origin: corsConfig.origins }
|
||||
: {
|
||||
origin: (origin: any, callback: any) => {
|
||||
callback(null, true);
|
||||
}
|
||||
}),
|
||||
...(corsConfig?.methods && { methods: corsConfig.methods }),
|
||||
...(corsConfig?.allowed_headers && {
|
||||
allowedHeaders: corsConfig.allowed_headers
|
||||
}),
|
||||
credentials: !(corsConfig?.credentials === false)
|
||||
};
|
||||
|
||||
if (build == "oss") {
|
||||
const options = {
|
||||
...(corsConfig?.origins
|
||||
? { origin: corsConfig.origins }
|
||||
: {
|
||||
origin: (origin: any, callback: any) => {
|
||||
callback(null, true);
|
||||
}
|
||||
}),
|
||||
...(corsConfig?.methods && { methods: corsConfig.methods }),
|
||||
...(corsConfig?.allowed_headers && {
|
||||
allowedHeaders: corsConfig.allowed_headers
|
||||
}),
|
||||
credentials: !(corsConfig?.credentials === false)
|
||||
};
|
||||
|
||||
if (build == "oss" || !corsConfig) {
|
||||
logger.debug("Using CORS options", options);
|
||||
|
||||
apiServer.use(cors(options));
|
||||
} else {
|
||||
} else if (corsConfig) {
|
||||
// Use the custom CORS middleware with loginPage support
|
||||
apiServer.use(corsWithLoginPageSupport(corsConfig));
|
||||
}
|
||||
|
||||
@@ -4,9 +4,6 @@ import { resourceSessions, ResourceSession } from "@server/db";
|
||||
import { db } from "@server/db";
|
||||
import { eq, and } from "drizzle-orm";
|
||||
import config from "@server/lib/config";
|
||||
import axios from "axios";
|
||||
import logger from "@server/logger";
|
||||
import { tokenManager } from "@server/lib/tokenManager";
|
||||
|
||||
export const SESSION_COOKIE_NAME =
|
||||
config.getRawConfig().server.session_cookie_name;
|
||||
@@ -65,29 +62,6 @@ export async function validateResourceSessionToken(
|
||||
token: string,
|
||||
resourceId: number
|
||||
): Promise<ResourceSessionValidationResult> {
|
||||
if (config.isManagedMode()) {
|
||||
try {
|
||||
const response = await axios.post(`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/resource/${resourceId}/session/validate`, {
|
||||
token: token
|
||||
}, await tokenManager.getAuthHeader());
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error validating resource session token in hybrid mode:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error validating resource session token in hybrid mode:", error);
|
||||
}
|
||||
return { resourceSession: null };
|
||||
}
|
||||
}
|
||||
|
||||
const sessionId = encodeHexLowerCase(
|
||||
sha256(new TextEncoder().encode(token))
|
||||
);
|
||||
|
||||
@@ -721,3 +721,4 @@ export type SiteResource = InferSelectModel<typeof siteResources>;
|
||||
export type SetupToken = InferSelectModel<typeof setupTokens>;
|
||||
export type HostMeta = InferSelectModel<typeof hostMeta>;
|
||||
export type TargetHealthCheck = InferSelectModel<typeof targetHealthCheck>;
|
||||
export type IdpOidcConfig = InferSelectModel<typeof idpOidcConfig>;
|
||||
|
||||
@@ -17,10 +17,6 @@ import {
|
||||
users
|
||||
} from "@server/db";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import axios from "axios";
|
||||
import config from "@server/lib/config";
|
||||
import logger from "@server/logger";
|
||||
import { tokenManager } from "@server/lib/tokenManager";
|
||||
|
||||
export type ResourceWithAuth = {
|
||||
resource: Resource | null;
|
||||
@@ -40,30 +36,6 @@ export type UserSessionWithUser = {
|
||||
export async function getResourceByDomain(
|
||||
domain: string
|
||||
): Promise<ResourceWithAuth | null> {
|
||||
if (config.isManagedMode()) {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/resource/domain/${domain}`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error fetching config in verify session:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error fetching config in verify session:", error);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const [result] = await db
|
||||
.select()
|
||||
.from(resources)
|
||||
@@ -100,30 +72,6 @@ export async function getResourceByDomain(
|
||||
export async function getUserSessionWithUser(
|
||||
userSessionId: string
|
||||
): Promise<UserSessionWithUser | null> {
|
||||
if (config.isManagedMode()) {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/session/${userSessionId}`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error fetching config in verify session:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error fetching config in verify session:", error);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const [res] = await db
|
||||
.select()
|
||||
.from(sessions)
|
||||
@@ -144,30 +92,6 @@ export async function getUserSessionWithUser(
|
||||
* Get user organization role
|
||||
*/
|
||||
export async function getUserOrgRole(userId: string, orgId: string) {
|
||||
if (config.isManagedMode()) {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/user/${userId}/org/${orgId}/role`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error fetching config in verify session:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error fetching config in verify session:", error);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const userOrgRole = await db
|
||||
.select()
|
||||
.from(userOrgs)
|
||||
@@ -184,30 +108,6 @@ export async function getRoleResourceAccess(
|
||||
resourceId: number,
|
||||
roleId: number
|
||||
) {
|
||||
if (config.isManagedMode()) {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/role/${roleId}/resource/${resourceId}/access`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error fetching config in verify session:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error fetching config in verify session:", error);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const roleResourceAccess = await db
|
||||
.select()
|
||||
.from(roleResources)
|
||||
@@ -229,30 +129,6 @@ export async function getUserResourceAccess(
|
||||
userId: string,
|
||||
resourceId: number
|
||||
) {
|
||||
if (config.isManagedMode()) {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/user/${userId}/resource/${resourceId}/access`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error fetching config in verify session:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error fetching config in verify session:", error);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const userResourceAccess = await db
|
||||
.select()
|
||||
.from(userResources)
|
||||
@@ -273,30 +149,6 @@ export async function getUserResourceAccess(
|
||||
export async function getResourceRules(
|
||||
resourceId: number
|
||||
): Promise<ResourceRule[]> {
|
||||
if (config.isManagedMode()) {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/resource/${resourceId}/rules`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error fetching config in verify session:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error fetching config in verify session:", error);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
const rules = await db
|
||||
.select()
|
||||
.from(resourceRules)
|
||||
@@ -311,30 +163,6 @@ export async function getResourceRules(
|
||||
export async function getOrgLoginPage(
|
||||
orgId: string
|
||||
): Promise<LoginPage | null> {
|
||||
if (config.isManagedMode()) {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/org/${orgId}/login-page`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error fetching config in verify session:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error fetching config in verify session:", error);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
const [result] = await db
|
||||
.select()
|
||||
.from(loginPageOrg)
|
||||
|
||||
@@ -760,3 +760,4 @@ export type OrgDomains = InferSelectModel<typeof orgDomains>;
|
||||
export type SetupToken = InferSelectModel<typeof setupTokens>;
|
||||
export type HostMeta = InferSelectModel<typeof hostMeta>;
|
||||
export type TargetHealthCheck = InferSelectModel<typeof targetHealthCheck>;
|
||||
export type IdpOidcConfig = InferSelectModel<typeof idpOidcConfig>;
|
||||
|
||||
@@ -6,11 +6,6 @@ import logger from "@server/logger";
|
||||
import SMTPTransport from "nodemailer/lib/smtp-transport";
|
||||
|
||||
function createEmailClient() {
|
||||
if (config.isManagedMode()) {
|
||||
// LETS NOT WORRY ABOUT EMAILS IN HYBRID
|
||||
return;
|
||||
}
|
||||
|
||||
const emailConfig = config.getRawConfig().email;
|
||||
if (!emailConfig) {
|
||||
logger.warn(
|
||||
|
||||
@@ -1,151 +0,0 @@
|
||||
import logger from "@server/logger";
|
||||
import config from "@server/lib/config";
|
||||
import { createWebSocketClient } from "./routers/ws/client";
|
||||
import { addPeer, deletePeer } from "./routers/gerbil/peers";
|
||||
import { db, exitNodes } from "./db";
|
||||
import { TraefikConfigManager } from "./lib/traefik/TraefikConfigManager";
|
||||
import { tokenManager } from "./lib/tokenManager";
|
||||
import { APP_VERSION } from "./lib/consts";
|
||||
import axios from "axios";
|
||||
|
||||
export async function createHybridClientServer() {
|
||||
logger.info("Starting hybrid client server...");
|
||||
|
||||
// Start the token manager
|
||||
await tokenManager.start();
|
||||
|
||||
const token = await tokenManager.getToken();
|
||||
|
||||
const monitor = new TraefikConfigManager();
|
||||
|
||||
await monitor.start();
|
||||
|
||||
// Create client
|
||||
const client = createWebSocketClient(
|
||||
token,
|
||||
config.getRawConfig().managed!.endpoint!,
|
||||
{
|
||||
reconnectInterval: 5000,
|
||||
pingInterval: 30000,
|
||||
pingTimeout: 10000
|
||||
}
|
||||
);
|
||||
|
||||
// Register message handlers
|
||||
client.registerHandler("remoteExitNode/peers/add", async (message) => {
|
||||
const { publicKey, allowedIps } = message.data;
|
||||
|
||||
// TODO: we are getting the exit node twice here
|
||||
// NOTE: there should only be one gerbil registered so...
|
||||
const [exitNode] = await db.select().from(exitNodes).limit(1);
|
||||
await addPeer(exitNode.exitNodeId, {
|
||||
publicKey: publicKey,
|
||||
allowedIps: allowedIps || []
|
||||
});
|
||||
});
|
||||
|
||||
client.registerHandler("remoteExitNode/peers/remove", async (message) => {
|
||||
const { publicKey } = message.data;
|
||||
|
||||
// TODO: we are getting the exit node twice here
|
||||
// NOTE: there should only be one gerbil registered so...
|
||||
const [exitNode] = await db.select().from(exitNodes).limit(1);
|
||||
await deletePeer(exitNode.exitNodeId, publicKey);
|
||||
});
|
||||
|
||||
// /update-proxy-mapping
|
||||
client.registerHandler("remoteExitNode/update-proxy-mapping", async (message) => {
|
||||
try {
|
||||
const [exitNode] = await db.select().from(exitNodes).limit(1);
|
||||
if (!exitNode) {
|
||||
logger.error("No exit node found for proxy mapping update");
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await axios.post(`${exitNode.endpoint}/update-proxy-mapping`, message.data);
|
||||
logger.info(`Successfully updated proxy mapping: ${response.status}`);
|
||||
} catch (error) {
|
||||
// pull data out of the axios error to log
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error updating proxy mapping:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error updating proxy mapping:", error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// /update-destinations
|
||||
client.registerHandler("remoteExitNode/update-destinations", async (message) => {
|
||||
try {
|
||||
const [exitNode] = await db.select().from(exitNodes).limit(1);
|
||||
if (!exitNode) {
|
||||
logger.error("No exit node found for destinations update");
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await axios.post(`${exitNode.endpoint}/update-destinations`, message.data);
|
||||
logger.info(`Successfully updated destinations: ${response.status}`);
|
||||
} catch (error) {
|
||||
// pull data out of the axios error to log
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error updating destinations:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error updating destinations:", error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
client.registerHandler("remoteExitNode/traefik/reload", async (message) => {
|
||||
await monitor.HandleTraefikConfig();
|
||||
});
|
||||
|
||||
// Listen to connection events
|
||||
client.on("connect", () => {
|
||||
logger.info("Connected to WebSocket server");
|
||||
client.sendMessage("remoteExitNode/register", {
|
||||
remoteExitNodeVersion: APP_VERSION
|
||||
});
|
||||
});
|
||||
|
||||
client.on("disconnect", () => {
|
||||
logger.info("Disconnected from WebSocket server");
|
||||
});
|
||||
|
||||
client.on("message", (message) => {
|
||||
logger.info(
|
||||
`Received message: ${message.type} ${JSON.stringify(message.data)}`
|
||||
);
|
||||
});
|
||||
|
||||
// Connect to the server
|
||||
try {
|
||||
await client.connect();
|
||||
logger.info("Connection initiated");
|
||||
} catch (error) {
|
||||
logger.error("Failed to connect:", error);
|
||||
}
|
||||
|
||||
// Store the ping interval stop function for cleanup if needed
|
||||
const stopPingInterval = client.sendMessageInterval(
|
||||
"remoteExitNode/ping",
|
||||
{ timestamp: Date.now() / 1000 },
|
||||
60000
|
||||
); // send every minute
|
||||
|
||||
// Return client and cleanup function for potential use
|
||||
return { client, stopPingInterval };
|
||||
}
|
||||
@@ -5,9 +5,15 @@ import { runSetupFunctions } from "./setup";
|
||||
import { createApiServer } from "./apiServer";
|
||||
import { createNextServer } from "./nextServer";
|
||||
import { createInternalServer } from "./internalServer";
|
||||
import { ApiKey, ApiKeyOrg, RemoteExitNode, Session, User, UserOrg } from "@server/db";
|
||||
import {
|
||||
ApiKey,
|
||||
ApiKeyOrg,
|
||||
RemoteExitNode,
|
||||
Session,
|
||||
User,
|
||||
UserOrg
|
||||
} from "@server/db";
|
||||
import { createIntegrationApiServer } from "./integrationApiServer";
|
||||
import { createHybridClientServer } from "./hybridServer";
|
||||
import config from "@server/lib/config";
|
||||
import { setHostMeta } from "@server/lib/hostMeta";
|
||||
import { initTelemetryClient } from "./lib/telemetry.js";
|
||||
@@ -26,16 +32,11 @@ async function startServers() {
|
||||
const apiServer = createApiServer();
|
||||
const internalServer = createInternalServer();
|
||||
|
||||
let hybridClientServer;
|
||||
let nextServer;
|
||||
if (config.isManagedMode()) {
|
||||
hybridClientServer = await createHybridClientServer();
|
||||
} else {
|
||||
nextServer = await createNextServer();
|
||||
if (config.getRawConfig().traefik.file_mode) {
|
||||
const monitor = new TraefikConfigManager();
|
||||
await monitor.start();
|
||||
}
|
||||
nextServer = await createNextServer();
|
||||
if (config.getRawConfig().traefik.file_mode) {
|
||||
const monitor = new TraefikConfigManager();
|
||||
await monitor.start();
|
||||
}
|
||||
|
||||
let integrationServer;
|
||||
@@ -49,8 +50,7 @@ async function startServers() {
|
||||
apiServer,
|
||||
nextServer,
|
||||
internalServer,
|
||||
integrationServer,
|
||||
hybridClientServer
|
||||
integrationServer
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -31,6 +31,17 @@ interface StripeEvent {
|
||||
};
|
||||
}
|
||||
|
||||
export function noop() {
|
||||
if (
|
||||
build !== "saas" ||
|
||||
!process.env.S3_BUCKET ||
|
||||
!process.env.LOCAL_FILE_PATH
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export class UsageService {
|
||||
private cache: NodeCache;
|
||||
private bucketName: string | undefined;
|
||||
@@ -41,7 +52,7 @@ export class UsageService {
|
||||
|
||||
constructor() {
|
||||
this.cache = new NodeCache({ stdTTL: 300 }); // 5 minute TTL
|
||||
if (build !== "saas") {
|
||||
if (noop()) {
|
||||
return;
|
||||
}
|
||||
// this.bucketName = privateConfig.getRawPrivateConfig().stripe?.s3Bucket;
|
||||
@@ -71,7 +82,9 @@ export class UsageService {
|
||||
|
||||
private async initializeEventsDirectory(): Promise<void> {
|
||||
if (!this.eventsDir) {
|
||||
logger.warn("Stripe local file path is not configured, skipping events directory initialization.");
|
||||
logger.warn(
|
||||
"Stripe local file path is not configured, skipping events directory initialization."
|
||||
);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -83,7 +96,9 @@ export class UsageService {
|
||||
|
||||
private async uploadPendingEventFilesOnStartup(): Promise<void> {
|
||||
if (!this.eventsDir || !this.bucketName) {
|
||||
logger.warn("Stripe local file path or bucket name is not configured, skipping leftover event file upload.");
|
||||
logger.warn(
|
||||
"Stripe local file path or bucket name is not configured, skipping leftover event file upload."
|
||||
);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -106,15 +121,17 @@ export class UsageService {
|
||||
ContentType: "application/json"
|
||||
});
|
||||
await s3Client.send(uploadCommand);
|
||||
|
||||
|
||||
// Check if file still exists before unlinking
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
await fs.unlink(filePath);
|
||||
} catch (unlinkError) {
|
||||
logger.debug(`Startup file ${file} was already deleted`);
|
||||
logger.debug(
|
||||
`Startup file ${file} was already deleted`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
logger.info(
|
||||
`Uploaded leftover event file ${file} to S3 with ${events.length} events`
|
||||
);
|
||||
@@ -124,7 +141,9 @@ export class UsageService {
|
||||
await fs.access(filePath);
|
||||
await fs.unlink(filePath);
|
||||
} catch (unlinkError) {
|
||||
logger.debug(`Empty startup file ${file} was already deleted`);
|
||||
logger.debug(
|
||||
`Empty startup file ${file} was already deleted`
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -135,8 +154,8 @@ export class UsageService {
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error("Failed to scan for leftover event files:", err);
|
||||
} catch (error) {
|
||||
logger.error("Failed to scan for leftover event files");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,17 +165,17 @@ export class UsageService {
|
||||
value: number,
|
||||
transaction: any = null
|
||||
): Promise<Usage | null> {
|
||||
if (build !== "saas") {
|
||||
if (noop()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
// Truncate value to 11 decimal places
|
||||
value = this.truncateValue(value);
|
||||
|
||||
|
||||
// Implement retry logic for deadlock handling
|
||||
const maxRetries = 3;
|
||||
let attempt = 0;
|
||||
|
||||
|
||||
while (attempt <= maxRetries) {
|
||||
try {
|
||||
// Get subscription data for this org (with caching)
|
||||
@@ -179,7 +198,12 @@ export class UsageService {
|
||||
);
|
||||
} else {
|
||||
await db.transaction(async (trx) => {
|
||||
usage = await this.internalAddUsage(orgId, featureId, value, trx);
|
||||
usage = await this.internalAddUsage(
|
||||
orgId,
|
||||
featureId,
|
||||
value,
|
||||
trx
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -189,25 +213,26 @@ export class UsageService {
|
||||
return usage || null;
|
||||
} catch (error: any) {
|
||||
// Check if this is a deadlock error
|
||||
const isDeadlock = error?.code === '40P01' ||
|
||||
error?.cause?.code === '40P01' ||
|
||||
(error?.message && error.message.includes('deadlock'));
|
||||
|
||||
const isDeadlock =
|
||||
error?.code === "40P01" ||
|
||||
error?.cause?.code === "40P01" ||
|
||||
(error?.message && error.message.includes("deadlock"));
|
||||
|
||||
if (isDeadlock && attempt < maxRetries) {
|
||||
attempt++;
|
||||
// Exponential backoff with jitter: 50-150ms, 100-300ms, 200-600ms
|
||||
const baseDelay = Math.pow(2, attempt - 1) * 50;
|
||||
const jitter = Math.random() * baseDelay;
|
||||
const delay = baseDelay + jitter;
|
||||
|
||||
|
||||
logger.warn(
|
||||
`Deadlock detected for ${orgId}/${featureId}, retrying attempt ${attempt}/${maxRetries} after ${delay.toFixed(0)}ms`
|
||||
);
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
logger.error(
|
||||
`Failed to add usage for ${orgId}/${featureId} after ${attempt} attempts:`,
|
||||
error
|
||||
@@ -227,10 +252,10 @@ export class UsageService {
|
||||
): Promise<Usage> {
|
||||
// Truncate value to 11 decimal places
|
||||
value = this.truncateValue(value);
|
||||
|
||||
|
||||
const usageId = `${orgId}-${featureId}`;
|
||||
const meterId = getFeatureMeterId(featureId);
|
||||
|
||||
|
||||
// Use upsert: insert if not exists, otherwise increment
|
||||
const [returnUsage] = await trx
|
||||
.insert(usage)
|
||||
@@ -247,7 +272,8 @@ export class UsageService {
|
||||
set: {
|
||||
latestValue: sql`${usage.latestValue} + ${value}`
|
||||
}
|
||||
}).returning();
|
||||
})
|
||||
.returning();
|
||||
|
||||
return returnUsage;
|
||||
}
|
||||
@@ -268,7 +294,7 @@ export class UsageService {
|
||||
value?: number,
|
||||
customerId?: string
|
||||
): Promise<void> {
|
||||
if (build !== "saas") {
|
||||
if (noop()) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -339,7 +365,7 @@ export class UsageService {
|
||||
.set({
|
||||
latestValue: newRunningTotal,
|
||||
instantaneousValue: value,
|
||||
updatedAt: Math.floor(Date.now() / 1000)
|
||||
updatedAt: Math.floor(Date.now() / 1000)
|
||||
})
|
||||
.where(eq(usage.usageId, usageId));
|
||||
}
|
||||
@@ -354,7 +380,7 @@ export class UsageService {
|
||||
meterId,
|
||||
instantaneousValue: truncatedValue,
|
||||
latestValue: truncatedValue,
|
||||
updatedAt: Math.floor(Date.now() / 1000)
|
||||
updatedAt: Math.floor(Date.now() / 1000)
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -415,7 +441,7 @@ export class UsageService {
|
||||
): Promise<void> {
|
||||
// Truncate value to 11 decimal places before sending to Stripe
|
||||
const truncatedValue = this.truncateValue(value);
|
||||
|
||||
|
||||
const event: StripeEvent = {
|
||||
identifier: uuidv4(),
|
||||
timestamp: Math.floor(new Date().getTime() / 1000),
|
||||
@@ -432,7 +458,9 @@ export class UsageService {
|
||||
|
||||
private async writeEventToFile(event: StripeEvent): Promise<void> {
|
||||
if (!this.eventsDir || !this.bucketName) {
|
||||
logger.warn("Stripe local file path or bucket name is not configured, skipping event file write.");
|
||||
logger.warn(
|
||||
"Stripe local file path or bucket name is not configured, skipping event file write."
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (!this.currentEventFile) {
|
||||
@@ -481,7 +509,9 @@ export class UsageService {
|
||||
|
||||
private async uploadFileToS3(): Promise<void> {
|
||||
if (!this.bucketName || !this.eventsDir) {
|
||||
logger.warn("Stripe local file path or bucket name is not configured, skipping S3 upload.");
|
||||
logger.warn(
|
||||
"Stripe local file path or bucket name is not configured, skipping S3 upload."
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (!this.currentEventFile) {
|
||||
@@ -493,7 +523,9 @@ export class UsageService {
|
||||
|
||||
// Check if this file is already being uploaded
|
||||
if (this.uploadingFiles.has(fileName)) {
|
||||
logger.debug(`File ${fileName} is already being uploaded, skipping`);
|
||||
logger.debug(
|
||||
`File ${fileName} is already being uploaded, skipping`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -505,7 +537,9 @@ export class UsageService {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
} catch (error) {
|
||||
logger.debug(`File ${fileName} does not exist, may have been already processed`);
|
||||
logger.debug(
|
||||
`File ${fileName} does not exist, may have been already processed`
|
||||
);
|
||||
this.uploadingFiles.delete(fileName);
|
||||
// Reset current file if it was this file
|
||||
if (this.currentEventFile === fileName) {
|
||||
@@ -525,7 +559,9 @@ export class UsageService {
|
||||
await fs.unlink(filePath);
|
||||
} catch (unlinkError) {
|
||||
// File may have been already deleted
|
||||
logger.debug(`File ${fileName} was already deleted during cleanup`);
|
||||
logger.debug(
|
||||
`File ${fileName} was already deleted during cleanup`
|
||||
);
|
||||
}
|
||||
this.currentEventFile = null;
|
||||
this.uploadingFiles.delete(fileName);
|
||||
@@ -548,7 +584,9 @@ export class UsageService {
|
||||
await fs.unlink(filePath);
|
||||
} catch (unlinkError) {
|
||||
// File may have been already deleted by another process
|
||||
logger.debug(`File ${fileName} was already deleted during upload`);
|
||||
logger.debug(
|
||||
`File ${fileName} was already deleted during upload`
|
||||
);
|
||||
}
|
||||
|
||||
logger.info(
|
||||
@@ -559,10 +597,7 @@ export class UsageService {
|
||||
this.currentEventFile = null;
|
||||
this.currentFileStartTime = 0;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to upload ${fileName} to S3:`,
|
||||
error
|
||||
);
|
||||
logger.error(`Failed to upload ${fileName} to S3:`, error);
|
||||
} finally {
|
||||
// Always remove from uploading set
|
||||
this.uploadingFiles.delete(fileName);
|
||||
@@ -579,7 +614,7 @@ export class UsageService {
|
||||
orgId: string,
|
||||
featureId: FeatureId
|
||||
): Promise<Usage | null> {
|
||||
if (build !== "saas") {
|
||||
if (noop()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -598,7 +633,7 @@ export class UsageService {
|
||||
`Creating new usage record for ${orgId}/${featureId}`
|
||||
);
|
||||
const meterId = getFeatureMeterId(featureId);
|
||||
|
||||
|
||||
try {
|
||||
const [newUsage] = await db
|
||||
.insert(usage)
|
||||
@@ -653,7 +688,7 @@ export class UsageService {
|
||||
orgId: string,
|
||||
featureId: FeatureId
|
||||
): Promise<Usage | null> {
|
||||
if (build !== "saas") {
|
||||
if (noop()) {
|
||||
return null;
|
||||
}
|
||||
await this.updateDaily(orgId, featureId); // Ensure daily usage is updated
|
||||
@@ -673,7 +708,9 @@ export class UsageService {
|
||||
*/
|
||||
private async uploadOldEventFiles(): Promise<void> {
|
||||
if (!this.eventsDir || !this.bucketName) {
|
||||
logger.warn("Stripe local file path or bucket name is not configured, skipping old event file upload.");
|
||||
logger.warn(
|
||||
"Stripe local file path or bucket name is not configured, skipping old event file upload."
|
||||
);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -681,15 +718,17 @@ export class UsageService {
|
||||
const now = Date.now();
|
||||
for (const file of files) {
|
||||
if (!file.endsWith(".json")) continue;
|
||||
|
||||
|
||||
// Skip files that are already being uploaded
|
||||
if (this.uploadingFiles.has(file)) {
|
||||
logger.debug(`Skipping file ${file} as it's already being uploaded`);
|
||||
logger.debug(
|
||||
`Skipping file ${file} as it's already being uploaded`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const filePath = path.join(this.eventsDir, file);
|
||||
|
||||
|
||||
try {
|
||||
// Check if file still exists before processing
|
||||
try {
|
||||
@@ -704,7 +743,7 @@ export class UsageService {
|
||||
if (age >= 90000) {
|
||||
// 1.5 minutes - Mark as being uploaded
|
||||
this.uploadingFiles.add(file);
|
||||
|
||||
|
||||
try {
|
||||
const fileContent = await fs.readFile(
|
||||
filePath,
|
||||
@@ -720,15 +759,17 @@ export class UsageService {
|
||||
ContentType: "application/json"
|
||||
});
|
||||
await s3Client.send(uploadCommand);
|
||||
|
||||
|
||||
// Check if file still exists before unlinking
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
await fs.unlink(filePath);
|
||||
} catch (unlinkError) {
|
||||
logger.debug(`File ${file} was already deleted during interval upload`);
|
||||
logger.debug(
|
||||
`File ${file} was already deleted during interval upload`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
logger.info(
|
||||
`Interval: Uploaded event file ${file} to S3 with ${events.length} events`
|
||||
);
|
||||
@@ -743,7 +784,9 @@ export class UsageService {
|
||||
await fs.access(filePath);
|
||||
await fs.unlink(filePath);
|
||||
} catch (unlinkError) {
|
||||
logger.debug(`Empty file ${file} was already deleted`);
|
||||
logger.debug(
|
||||
`Empty file ${file} was already deleted`
|
||||
);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
@@ -765,12 +808,17 @@ export class UsageService {
|
||||
}
|
||||
}
|
||||
|
||||
public async checkLimitSet(orgId: string, kickSites = false, featureId?: FeatureId, usage?: Usage): Promise<boolean> {
|
||||
if (build !== "saas") {
|
||||
public async checkLimitSet(
|
||||
orgId: string,
|
||||
kickSites = false,
|
||||
featureId?: FeatureId,
|
||||
usage?: Usage
|
||||
): Promise<boolean> {
|
||||
if (noop()) {
|
||||
return false;
|
||||
}
|
||||
// This method should check the current usage against the limits set for the organization
|
||||
// and kick out all of the sites on the org
|
||||
// and kick out all of the sites on the org
|
||||
let hasExceededLimits = false;
|
||||
|
||||
try {
|
||||
@@ -805,16 +853,30 @@ export class UsageService {
|
||||
if (usage) {
|
||||
currentUsage = usage;
|
||||
} else {
|
||||
currentUsage = await this.getUsage(orgId, limit.featureId as FeatureId);
|
||||
currentUsage = await this.getUsage(
|
||||
orgId,
|
||||
limit.featureId as FeatureId
|
||||
);
|
||||
}
|
||||
|
||||
const usageValue = currentUsage?.instantaneousValue || currentUsage?.latestValue || 0;
|
||||
logger.debug(`Current usage for org ${orgId} on feature ${limit.featureId}: ${usageValue}`);
|
||||
logger.debug(`Limit for org ${orgId} on feature ${limit.featureId}: ${limit.value}`);
|
||||
if (currentUsage && limit.value !== null && usageValue > limit.value) {
|
||||
const usageValue =
|
||||
currentUsage?.instantaneousValue ||
|
||||
currentUsage?.latestValue ||
|
||||
0;
|
||||
logger.debug(
|
||||
`Current usage for org ${orgId} on feature ${limit.featureId}: ${usageValue}`
|
||||
);
|
||||
logger.debug(
|
||||
`Limit for org ${orgId} on feature ${limit.featureId}: ${limit.value}`
|
||||
);
|
||||
if (
|
||||
currentUsage &&
|
||||
limit.value !== null &&
|
||||
usageValue > limit.value
|
||||
) {
|
||||
logger.debug(
|
||||
`Org ${orgId} has exceeded limit for ${limit.featureId}: ` +
|
||||
`${usageValue} > ${limit.value}`
|
||||
`${usageValue} > ${limit.value}`
|
||||
);
|
||||
hasExceededLimits = true;
|
||||
break; // Exit early if any limit is exceeded
|
||||
@@ -823,7 +885,9 @@ export class UsageService {
|
||||
|
||||
// If any limits are exceeded, disconnect all sites for this organization
|
||||
if (hasExceededLimits && kickSites) {
|
||||
logger.warn(`Disconnecting all sites for org ${orgId} due to exceeded limits`);
|
||||
logger.warn(
|
||||
`Disconnecting all sites for org ${orgId} due to exceeded limits`
|
||||
);
|
||||
|
||||
// Get all sites for this organization
|
||||
const orgSites = await db
|
||||
@@ -832,7 +896,7 @@ export class UsageService {
|
||||
.where(eq(sites.orgId, orgId));
|
||||
|
||||
// Mark all sites as offline and send termination messages
|
||||
const siteUpdates = orgSites.map(site => site.siteId);
|
||||
const siteUpdates = orgSites.map((site) => site.siteId);
|
||||
|
||||
if (siteUpdates.length > 0) {
|
||||
// Send termination messages to newt sites
|
||||
@@ -853,17 +917,21 @@ export class UsageService {
|
||||
};
|
||||
|
||||
// Don't await to prevent blocking
|
||||
sendToClient(newt.newtId, payload).catch((error: any) => {
|
||||
logger.error(
|
||||
`Failed to send termination message to newt ${newt.newtId}:`,
|
||||
error
|
||||
);
|
||||
});
|
||||
sendToClient(newt.newtId, payload).catch(
|
||||
(error: any) => {
|
||||
logger.error(
|
||||
`Failed to send termination message to newt ${newt.newtId}:`,
|
||||
error
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Disconnected ${orgSites.length} sites for org ${orgId} due to exceeded limits`);
|
||||
logger.info(
|
||||
`Disconnected ${orgSites.length} sites for org ${orgId} due to exceeded limits`
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,70 +1,3 @@
|
||||
import axios from "axios";
|
||||
import { tokenManager } from "./tokenManager";
|
||||
import logger from "@server/logger";
|
||||
import config from "./config";
|
||||
|
||||
/**
|
||||
* Get valid certificates for the specified domains
|
||||
*/
|
||||
export async function getValidCertificatesForDomainsHybrid(domains: Set<string>): Promise<
|
||||
Array<{
|
||||
id: number;
|
||||
domain: string;
|
||||
wildcard: boolean | null;
|
||||
certFile: string | null;
|
||||
keyFile: string | null;
|
||||
expiresAt: number | null;
|
||||
updatedAt?: number | null;
|
||||
}>
|
||||
> {
|
||||
if (domains.size === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const domainArray = Array.from(domains);
|
||||
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/certificates/domains`,
|
||||
{
|
||||
params: {
|
||||
domains: domainArray
|
||||
},
|
||||
headers: (await tokenManager.getAuthHeader()).headers
|
||||
}
|
||||
);
|
||||
|
||||
if (response.status !== 200) {
|
||||
logger.error(
|
||||
`Failed to fetch certificates for domains: ${response.status} ${response.statusText}`,
|
||||
{ responseData: response.data, domains: domainArray }
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
// logger.debug(
|
||||
// `Successfully retrieved ${response.data.data?.length || 0} certificates for ${domainArray.length} domains`
|
||||
// );
|
||||
|
||||
return response.data.data;
|
||||
} catch (error) {
|
||||
// pull data out of the axios error to log
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error getting certificates:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error getting certificates:", error);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function getValidCertificatesForDomains(domains: Set<string>): Promise<
|
||||
Array<{
|
||||
id: number;
|
||||
|
||||
@@ -102,10 +102,7 @@ export class Config {
|
||||
if (!this.rawConfig) {
|
||||
throw new Error("Config not loaded. Call load() first.");
|
||||
}
|
||||
if (this.rawConfig.managed) {
|
||||
// LETS NOT WORRY ABOUT THE SERVER SECRET WHEN MANAGED
|
||||
return;
|
||||
}
|
||||
|
||||
license.setServerSecret(this.rawConfig.server.secret!);
|
||||
|
||||
await this.checkKeyStatus();
|
||||
@@ -158,10 +155,6 @@ export class Config {
|
||||
return false;
|
||||
}
|
||||
|
||||
public isManagedMode() {
|
||||
return typeof this.rawConfig?.managed === "object";
|
||||
}
|
||||
|
||||
public async checkSupporterKey() {
|
||||
const [key] = await db.select().from(supporterKey).limit(1);
|
||||
|
||||
|
||||
@@ -15,8 +15,7 @@ import {
|
||||
} from "@server/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { defaultRoleAllowedActions } from "@server/routers/role";
|
||||
import { FeatureId, limitsService, sandboxLimitSet } from "@server/lib/billing";
|
||||
import { createCustomer } from "@server/private/lib/billing/createCustomer";
|
||||
import { FeatureId, limitsService, sandboxLimitSet, createCustomer } from "@server/lib/billing";
|
||||
import { usageService } from "@server/lib/billing/usageService";
|
||||
|
||||
export async function createUserAccountOrg(
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import logger from "@server/logger";
|
||||
import { maxmindLookup } from "@server/db/maxmind";
|
||||
import axios from "axios";
|
||||
import config from "./config";
|
||||
import { tokenManager } from "./tokenManager";
|
||||
|
||||
export async function getCountryCodeForIp(
|
||||
ip: string
|
||||
@@ -33,32 +30,4 @@ export async function getCountryCodeForIp(
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
export async function remoteGetCountryCodeForIp(
|
||||
ip: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/geoip/${ip}`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
|
||||
return response.data.data.countryCode;
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error fetching config in verify session:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error fetching config in verify session:", error);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -42,18 +42,6 @@ export const configSchema = z
|
||||
anonymous_usage: true
|
||||
}
|
||||
}),
|
||||
managed: z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
id: z.string().optional(),
|
||||
secret: z.string().optional(),
|
||||
endpoint: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("https://pangolin.fossorial.io"),
|
||||
redirect_endpoint: z.string().optional()
|
||||
})
|
||||
.optional(),
|
||||
domains: z
|
||||
.record(
|
||||
z.string(),
|
||||
@@ -346,10 +334,7 @@ export const configSchema = z
|
||||
if (data.flags?.disable_config_managed_domains) {
|
||||
return true;
|
||||
}
|
||||
// If hybrid is defined, domains are not required
|
||||
if (data.managed) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (keys.length === 0) {
|
||||
return false;
|
||||
}
|
||||
@@ -361,10 +346,6 @@ export const configSchema = z
|
||||
)
|
||||
.refine(
|
||||
(data) => {
|
||||
// If hybrid is defined, server secret is not required
|
||||
if (data.managed) {
|
||||
return true;
|
||||
}
|
||||
// If hybrid is not defined, server secret must be defined. If its not defined already then pull it from env
|
||||
if (data.server?.secret === undefined) {
|
||||
data.server.secret = process.env.SERVER_SECRET;
|
||||
@@ -380,10 +361,6 @@ export const configSchema = z
|
||||
)
|
||||
.refine(
|
||||
(data) => {
|
||||
// If hybrid is defined, dashboard_url is not required
|
||||
if (data.managed) {
|
||||
return true;
|
||||
}
|
||||
// If hybrid is not defined, dashboard_url must be defined
|
||||
return (
|
||||
data.app.dashboard_url !== undefined &&
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { Router } from "express";
|
||||
import axios from "axios";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import config from "@server/lib/config";
|
||||
import { tokenManager } from "./tokenManager";
|
||||
|
||||
/**
|
||||
* Proxy function that forwards requests to the remote cloud server
|
||||
*/
|
||||
|
||||
export const proxyToRemote = async (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
endpoint: string
|
||||
): Promise<any> => {
|
||||
try {
|
||||
const remoteUrl = `${config.getRawConfig().managed?.endpoint?.replace(/\/$/, '')}/api/v1/${endpoint}`;
|
||||
|
||||
logger.debug(`Proxying request to remote server: ${remoteUrl}`);
|
||||
|
||||
// Forward the request to the remote server
|
||||
const response = await axios({
|
||||
method: req.method as any,
|
||||
url: remoteUrl,
|
||||
data: req.body,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(await tokenManager.getAuthHeader()).headers
|
||||
},
|
||||
params: req.query,
|
||||
timeout: 30000, // 30 second timeout
|
||||
validateStatus: () => true // Don't throw on non-2xx status codes
|
||||
});
|
||||
|
||||
logger.debug(`Proxy response: ${JSON.stringify(response.data)}`);
|
||||
|
||||
// Forward the response status and data
|
||||
return res.status(response.status).json(response.data);
|
||||
|
||||
} catch (error) {
|
||||
logger.error("Error proxying request to remote server:", error);
|
||||
|
||||
if (axios.isAxiosError(error)) {
|
||||
if (error.code === 'ECONNREFUSED' || error.code === 'ENOTFOUND') {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.SERVICE_UNAVAILABLE,
|
||||
"Remote server is unavailable"
|
||||
)
|
||||
);
|
||||
}
|
||||
if (error.code === 'ECONNABORTED') {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.REQUEST_TIMEOUT,
|
||||
"Request to remote server timed out"
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.INTERNAL_SERVER_ERROR,
|
||||
"Error communicating with remote server"
|
||||
)
|
||||
);
|
||||
}
|
||||
};
|
||||
@@ -1,274 +0,0 @@
|
||||
import axios from "axios";
|
||||
import config from "@server/lib/config";
|
||||
import logger from "@server/logger";
|
||||
|
||||
export interface TokenResponse {
|
||||
success: boolean;
|
||||
message?: string;
|
||||
data: {
|
||||
token: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Token Manager - Handles automatic token refresh for hybrid server authentication
|
||||
*
|
||||
* Usage throughout the application:
|
||||
* ```typescript
|
||||
* import { tokenManager } from "@server/lib/tokenManager";
|
||||
*
|
||||
* // Get the current valid token
|
||||
* const token = await tokenManager.getToken();
|
||||
*
|
||||
* // Force refresh if needed
|
||||
* await tokenManager.refreshToken();
|
||||
* ```
|
||||
*
|
||||
* The token manager automatically refreshes tokens every 24 hours by default
|
||||
* and is started once in the privateHybridServer.ts file.
|
||||
*/
|
||||
|
||||
export class TokenManager {
|
||||
private token: string | null = null;
|
||||
private refreshInterval: NodeJS.Timeout | null = null;
|
||||
private isRefreshing: boolean = false;
|
||||
private refreshIntervalMs: number;
|
||||
private retryInterval: NodeJS.Timeout | null = null;
|
||||
private retryIntervalMs: number;
|
||||
private tokenAvailablePromise: Promise<void> | null = null;
|
||||
private tokenAvailableResolve: (() => void) | null = null;
|
||||
|
||||
constructor(refreshIntervalMs: number = 24 * 60 * 60 * 1000, retryIntervalMs: number = 5000) {
|
||||
// Default to 24 hours for refresh, 5 seconds for retry
|
||||
this.refreshIntervalMs = refreshIntervalMs;
|
||||
this.retryIntervalMs = retryIntervalMs;
|
||||
this.setupTokenAvailablePromise();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up promise that resolves when token becomes available
|
||||
*/
|
||||
private setupTokenAvailablePromise(): void {
|
||||
this.tokenAvailablePromise = new Promise((resolve) => {
|
||||
this.tokenAvailableResolve = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the token available promise
|
||||
*/
|
||||
private resolveTokenAvailable(): void {
|
||||
if (this.tokenAvailableResolve) {
|
||||
this.tokenAvailableResolve();
|
||||
this.tokenAvailableResolve = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the token manager - gets initial token and sets up refresh interval
|
||||
* If initial token fetch fails, keeps retrying every few seconds until successful
|
||||
*/
|
||||
async start(): Promise<void> {
|
||||
logger.info("Starting token manager...");
|
||||
|
||||
try {
|
||||
await this.refreshToken();
|
||||
this.setupRefreshInterval();
|
||||
this.resolveTokenAvailable();
|
||||
logger.info("Token manager started successfully");
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to get initial token, will retry in ${this.retryIntervalMs / 1000} seconds:`, error);
|
||||
this.setupRetryInterval();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up retry interval for initial token acquisition
|
||||
*/
|
||||
private setupRetryInterval(): void {
|
||||
if (this.retryInterval) {
|
||||
clearInterval(this.retryInterval);
|
||||
}
|
||||
|
||||
this.retryInterval = setInterval(async () => {
|
||||
try {
|
||||
logger.debug("Retrying initial token acquisition");
|
||||
await this.refreshToken();
|
||||
this.setupRefreshInterval();
|
||||
this.clearRetryInterval();
|
||||
this.resolveTokenAvailable();
|
||||
logger.info("Token manager started successfully after retry");
|
||||
} catch (error) {
|
||||
logger.debug("Token acquisition retry failed, will try again");
|
||||
}
|
||||
}, this.retryIntervalMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear retry interval
|
||||
*/
|
||||
private clearRetryInterval(): void {
|
||||
if (this.retryInterval) {
|
||||
clearInterval(this.retryInterval);
|
||||
this.retryInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the token manager and clear all intervals
|
||||
*/
|
||||
stop(): void {
|
||||
if (this.refreshInterval) {
|
||||
clearInterval(this.refreshInterval);
|
||||
this.refreshInterval = null;
|
||||
}
|
||||
this.clearRetryInterval();
|
||||
logger.info("Token manager stopped");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current valid token
|
||||
*/
|
||||
|
||||
// TODO: WE SHOULD NOT BE GETTING A TOKEN EVERY TIME WE REQUEST IT
|
||||
async getToken(): Promise<string> {
|
||||
// If we don't have a token yet, wait for it to become available
|
||||
if (!this.token && this.tokenAvailablePromise) {
|
||||
await this.tokenAvailablePromise;
|
||||
}
|
||||
|
||||
if (!this.token) {
|
||||
if (this.isRefreshing) {
|
||||
// Wait for current refresh to complete
|
||||
await this.waitForRefresh();
|
||||
} else {
|
||||
throw new Error("No valid token available");
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.token) {
|
||||
throw new Error("No valid token available");
|
||||
}
|
||||
|
||||
return this.token;
|
||||
}
|
||||
|
||||
async getAuthHeader() {
|
||||
return {
|
||||
headers: {
|
||||
Authorization: `Bearer ${await this.getToken()}`,
|
||||
"X-CSRF-Token": "x-csrf-protection",
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Force refresh the token
|
||||
*/
|
||||
async refreshToken(): Promise<void> {
|
||||
if (this.isRefreshing) {
|
||||
await this.waitForRefresh();
|
||||
return;
|
||||
}
|
||||
|
||||
this.isRefreshing = true;
|
||||
|
||||
try {
|
||||
const hybridConfig = config.getRawConfig().managed;
|
||||
|
||||
if (
|
||||
!hybridConfig?.id ||
|
||||
!hybridConfig?.secret ||
|
||||
!hybridConfig?.endpoint
|
||||
) {
|
||||
throw new Error("Hybrid configuration is not defined");
|
||||
}
|
||||
|
||||
const tokenEndpoint = `${hybridConfig.endpoint}/api/v1/auth/remoteExitNode/get-token`;
|
||||
|
||||
const tokenData = {
|
||||
remoteExitNodeId: hybridConfig.id,
|
||||
secret: hybridConfig.secret
|
||||
};
|
||||
|
||||
logger.debug("Requesting new token from server");
|
||||
|
||||
const response = await axios.post<TokenResponse>(
|
||||
tokenEndpoint,
|
||||
tokenData,
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"X-CSRF-Token": "x-csrf-protection"
|
||||
},
|
||||
timeout: 10000 // 10 second timeout
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.data.success) {
|
||||
throw new Error(
|
||||
`Failed to get token: ${response.data.message}`
|
||||
);
|
||||
}
|
||||
|
||||
if (!response.data.data.token) {
|
||||
throw new Error("Received empty token from server");
|
||||
}
|
||||
|
||||
this.token = response.data.data.token;
|
||||
logger.debug("Token refreshed successfully");
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
logger.error("Error updating proxy mapping:", {
|
||||
message: error.message,
|
||||
code: error.code,
|
||||
status: error.response?.status,
|
||||
statusText: error.response?.statusText,
|
||||
url: error.config?.url,
|
||||
method: error.config?.method
|
||||
});
|
||||
} else {
|
||||
logger.error("Error updating proxy mapping:", error);
|
||||
}
|
||||
|
||||
throw new Error("Failed to refresh token");
|
||||
} finally {
|
||||
this.isRefreshing = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up automatic token refresh interval
|
||||
*/
|
||||
private setupRefreshInterval(): void {
|
||||
if (this.refreshInterval) {
|
||||
clearInterval(this.refreshInterval);
|
||||
}
|
||||
|
||||
this.refreshInterval = setInterval(async () => {
|
||||
try {
|
||||
logger.debug("Auto-refreshing token");
|
||||
await this.refreshToken();
|
||||
} catch (error) {
|
||||
logger.error("Failed to auto-refresh token:", error);
|
||||
}
|
||||
}, this.refreshIntervalMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for current refresh operation to complete
|
||||
*/
|
||||
private async waitForRefresh(): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const checkInterval = setInterval(() => {
|
||||
if (!this.isRefreshing) {
|
||||
clearInterval(checkInterval);
|
||||
resolve();
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Export a singleton instance for use throughout the application
|
||||
export const tokenManager = new TokenManager();
|
||||
@@ -6,13 +6,9 @@ import * as yaml from "js-yaml";
|
||||
import axios from "axios";
|
||||
import { db, exitNodes } from "@server/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { tokenManager } from "../tokenManager";
|
||||
import { getCurrentExitNodeId } from "@server/lib/exitNodes";
|
||||
import { getTraefikConfig } from "#dynamic/lib/traefik";
|
||||
import {
|
||||
getValidCertificatesForDomains,
|
||||
getValidCertificatesForDomainsHybrid
|
||||
} from "#dynamic/lib/certificates";
|
||||
import { getValidCertificatesForDomains } from "#dynamic/lib/certificates";
|
||||
import { sendToExitNode } from "#dynamic/lib/exitNodes";
|
||||
import { build } from "@server/build";
|
||||
|
||||
@@ -313,93 +309,92 @@ export class TraefikConfigManager {
|
||||
this.lastActiveDomains = new Set(domains);
|
||||
}
|
||||
|
||||
// Scan current local certificate state
|
||||
this.lastLocalCertificateState =
|
||||
await this.scanLocalCertificateState();
|
||||
if (
|
||||
process.env.GENERATE_OWN_CERTIFICATES === "true" &&
|
||||
build != "oss"
|
||||
) {
|
||||
// Scan current local certificate state
|
||||
this.lastLocalCertificateState =
|
||||
await this.scanLocalCertificateState();
|
||||
|
||||
// Only fetch certificates if needed (domain changes, missing certs, or daily renewal check)
|
||||
let validCertificates: Array<{
|
||||
id: number;
|
||||
domain: string;
|
||||
wildcard: boolean | null;
|
||||
certFile: string | null;
|
||||
keyFile: string | null;
|
||||
expiresAt: number | null;
|
||||
updatedAt?: number | null;
|
||||
}> = [];
|
||||
// Only fetch certificates if needed (domain changes, missing certs, or daily renewal check)
|
||||
let validCertificates: Array<{
|
||||
id: number;
|
||||
domain: string;
|
||||
wildcard: boolean | null;
|
||||
certFile: string | null;
|
||||
keyFile: string | null;
|
||||
expiresAt: number | null;
|
||||
updatedAt?: number | null;
|
||||
}> = [];
|
||||
|
||||
if (this.shouldFetchCertificates(domains)) {
|
||||
// Filter out domains that are already covered by wildcard certificates
|
||||
const domainsToFetch = new Set<string>();
|
||||
for (const domain of domains) {
|
||||
if (
|
||||
!isDomainCoveredByWildcard(
|
||||
domain,
|
||||
this.lastLocalCertificateState
|
||||
)
|
||||
) {
|
||||
domainsToFetch.add(domain);
|
||||
} else {
|
||||
logger.debug(
|
||||
`Domain ${domain} is covered by existing wildcard certificate, skipping fetch`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (domainsToFetch.size > 0) {
|
||||
// Get valid certificates for domains not covered by wildcards
|
||||
if (config.isManagedMode()) {
|
||||
validCertificates =
|
||||
await getValidCertificatesForDomainsHybrid(
|
||||
domainsToFetch
|
||||
if (this.shouldFetchCertificates(domains)) {
|
||||
// Filter out domains that are already covered by wildcard certificates
|
||||
const domainsToFetch = new Set<string>();
|
||||
for (const domain of domains) {
|
||||
if (
|
||||
!isDomainCoveredByWildcard(
|
||||
domain,
|
||||
this.lastLocalCertificateState
|
||||
)
|
||||
) {
|
||||
domainsToFetch.add(domain);
|
||||
} else {
|
||||
logger.debug(
|
||||
`Domain ${domain} is covered by existing wildcard certificate, skipping fetch`
|
||||
);
|
||||
} else {
|
||||
}
|
||||
}
|
||||
|
||||
if (domainsToFetch.size > 0) {
|
||||
// Get valid certificates for domains not covered by wildcards
|
||||
validCertificates =
|
||||
await getValidCertificatesForDomains(
|
||||
domainsToFetch
|
||||
);
|
||||
this.lastCertificateFetch = new Date();
|
||||
this.lastKnownDomains = new Set(domains);
|
||||
|
||||
logger.info(
|
||||
`Fetched ${validCertificates.length} certificates from remote (${domains.size - domainsToFetch.size} domains covered by wildcards)`
|
||||
);
|
||||
|
||||
// Download and decrypt new certificates
|
||||
await this.processValidCertificates(validCertificates);
|
||||
} else {
|
||||
logger.info(
|
||||
"All domains are covered by existing wildcard certificates, no fetch needed"
|
||||
);
|
||||
this.lastCertificateFetch = new Date();
|
||||
this.lastKnownDomains = new Set(domains);
|
||||
}
|
||||
this.lastCertificateFetch = new Date();
|
||||
this.lastKnownDomains = new Set(domains);
|
||||
|
||||
logger.info(
|
||||
`Fetched ${validCertificates.length} certificates from remote (${domains.size - domainsToFetch.size} domains covered by wildcards)`
|
||||
);
|
||||
|
||||
// Download and decrypt new certificates
|
||||
await this.processValidCertificates(validCertificates);
|
||||
// Always ensure all existing certificates (including wildcards) are in the config
|
||||
await this.updateDynamicConfigFromLocalCerts(domains);
|
||||
} else {
|
||||
logger.info(
|
||||
"All domains are covered by existing wildcard certificates, no fetch needed"
|
||||
);
|
||||
this.lastCertificateFetch = new Date();
|
||||
this.lastKnownDomains = new Set(domains);
|
||||
const timeSinceLastFetch = this.lastCertificateFetch
|
||||
? Math.round(
|
||||
(Date.now() -
|
||||
this.lastCertificateFetch.getTime()) /
|
||||
(1000 * 60)
|
||||
)
|
||||
: 0;
|
||||
|
||||
// logger.debug(
|
||||
// `Skipping certificate fetch - no changes detected and within 24-hour window (last fetch: ${timeSinceLastFetch} minutes ago)`
|
||||
// );
|
||||
|
||||
// Still need to ensure config is up to date with existing certificates
|
||||
await this.updateDynamicConfigFromLocalCerts(domains);
|
||||
}
|
||||
|
||||
// Always ensure all existing certificates (including wildcards) are in the config
|
||||
await this.updateDynamicConfigFromLocalCerts(domains);
|
||||
} else {
|
||||
const timeSinceLastFetch = this.lastCertificateFetch
|
||||
? Math.round(
|
||||
(Date.now() - this.lastCertificateFetch.getTime()) /
|
||||
(1000 * 60)
|
||||
)
|
||||
: 0;
|
||||
// Clean up certificates for domains no longer in use
|
||||
await this.cleanupUnusedCertificates(domains);
|
||||
|
||||
// logger.debug(
|
||||
// `Skipping certificate fetch - no changes detected and within 24-hour window (last fetch: ${timeSinceLastFetch} minutes ago)`
|
||||
// );
|
||||
|
||||
// Still need to ensure config is up to date with existing certificates
|
||||
await this.updateDynamicConfigFromLocalCerts(domains);
|
||||
// wait 1 second for traefik to pick up the new certificates
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
}
|
||||
|
||||
// Clean up certificates for domains no longer in use
|
||||
await this.cleanupUnusedCertificates(domains);
|
||||
|
||||
// wait 1 second for traefik to pick up the new certificates
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
// Write traefik config as YAML to a second dynamic config file if changed
|
||||
await this.writeTraefikDynamicConfig(traefikConfig);
|
||||
|
||||
@@ -448,32 +443,15 @@ export class TraefikConfigManager {
|
||||
} | null> {
|
||||
let traefikConfig;
|
||||
try {
|
||||
if (config.isManagedMode()) {
|
||||
const resp = await axios.get(
|
||||
`${config.getRawConfig().managed?.endpoint}/api/v1/hybrid/traefik-config`,
|
||||
await tokenManager.getAuthHeader()
|
||||
);
|
||||
|
||||
if (resp.status !== 200) {
|
||||
logger.error(
|
||||
`Failed to fetch traefik config: ${resp.status} ${resp.statusText}`,
|
||||
{ responseData: resp.data }
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
traefikConfig = resp.data.data;
|
||||
} else {
|
||||
const currentExitNode = await getCurrentExitNodeId();
|
||||
// logger.debug(`Fetching traefik config for exit node: ${currentExitNode}`);
|
||||
traefikConfig = await getTraefikConfig(
|
||||
// this is called by the local exit node to get its own config
|
||||
currentExitNode,
|
||||
config.getRawConfig().traefik.site_types,
|
||||
build == "oss", // filter out the namespace domains in open source
|
||||
build != "oss" // generate the login pages on the cloud and hybrid
|
||||
);
|
||||
}
|
||||
const currentExitNode = await getCurrentExitNodeId();
|
||||
// logger.debug(`Fetching traefik config for exit node: ${currentExitNode}`);
|
||||
traefikConfig = await getTraefikConfig(
|
||||
// this is called by the local exit node to get its own config
|
||||
currentExitNode,
|
||||
config.getRawConfig().traefik.site_types,
|
||||
build == "oss", // filter out the namespace domains in open source
|
||||
build != "oss" // generate the login pages on the cloud and hybrid
|
||||
);
|
||||
|
||||
const domains = new Set<string>();
|
||||
|
||||
@@ -718,7 +696,12 @@ export class TraefikConfigManager {
|
||||
|
||||
for (const cert of validCertificates) {
|
||||
try {
|
||||
if (!cert.certFile || !cert.keyFile) {
|
||||
if (
|
||||
!cert.certFile ||
|
||||
!cert.keyFile ||
|
||||
cert.certFile.length === 0 ||
|
||||
cert.keyFile.length === 0
|
||||
) {
|
||||
logger.warn(
|
||||
`Certificate for domain ${cert.domain} is missing cert or key file`
|
||||
);
|
||||
@@ -842,7 +825,9 @@ export class TraefikConfigManager {
|
||||
const lastUpdateStr = fs
|
||||
.readFileSync(lastUpdatePath, "utf8")
|
||||
.trim();
|
||||
lastUpdateTime = Math.floor(new Date(lastUpdateStr).getTime() / 1000);
|
||||
lastUpdateTime = Math.floor(
|
||||
new Date(lastUpdateStr).getTime() / 1000
|
||||
);
|
||||
} catch {
|
||||
lastUpdateTime = null;
|
||||
}
|
||||
|
||||
@@ -105,7 +105,12 @@ export async function getTraefikConfig(
|
||||
const priority = row.priority ?? 100;
|
||||
|
||||
// Create a unique key combining resourceId, path config, and rewrite config
|
||||
const pathKey = [targetPath, pathMatchType, rewritePath, rewritePathType]
|
||||
const pathKey = [
|
||||
targetPath,
|
||||
pathMatchType,
|
||||
rewritePath,
|
||||
rewritePathType
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join("-");
|
||||
const mapKey = [resourceId, pathKey].filter(Boolean).join("-");
|
||||
@@ -120,13 +125,15 @@ export async function getTraefikConfig(
|
||||
);
|
||||
|
||||
if (!validation.isValid) {
|
||||
logger.error(`Invalid path rewrite configuration for resource ${resourceId}: ${validation.error}`);
|
||||
logger.error(
|
||||
`Invalid path rewrite configuration for resource ${resourceId}: ${validation.error}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
resourcesMap.set(key, {
|
||||
resourceId: row.resourceId,
|
||||
name: resourceName,
|
||||
name: resourceName,
|
||||
fullDomain: row.fullDomain,
|
||||
ssl: row.ssl,
|
||||
http: row.http,
|
||||
@@ -239,21 +246,18 @@ export async function getTraefikConfig(
|
||||
preferWildcardCert = configDomain.prefer_wildcard_cert;
|
||||
}
|
||||
|
||||
let tls = {};
|
||||
if (build == "oss") {
|
||||
tls = {
|
||||
certResolver: certResolver,
|
||||
...(preferWildcardCert
|
||||
? {
|
||||
domains: [
|
||||
{
|
||||
main: wildCard
|
||||
}
|
||||
]
|
||||
}
|
||||
: {})
|
||||
};
|
||||
}
|
||||
const tls = {
|
||||
certResolver: certResolver,
|
||||
...(preferWildcardCert
|
||||
? {
|
||||
domains: [
|
||||
{
|
||||
main: wildCard
|
||||
}
|
||||
]
|
||||
}
|
||||
: {})
|
||||
};
|
||||
|
||||
const additionalMiddlewares =
|
||||
config.getRawConfig().traefik.additional_middlewares || [];
|
||||
@@ -264,11 +268,12 @@ export async function getTraefikConfig(
|
||||
];
|
||||
|
||||
// Handle path rewriting middleware
|
||||
if (resource.rewritePath &&
|
||||
if (
|
||||
resource.rewritePath &&
|
||||
resource.path &&
|
||||
resource.pathMatchType &&
|
||||
resource.rewritePathType) {
|
||||
|
||||
resource.rewritePathType
|
||||
) {
|
||||
// Create a unique middleware name
|
||||
const rewriteMiddlewareName = `rewrite-r${resource.resourceId}-${key}`;
|
||||
|
||||
@@ -287,7 +292,10 @@ export async function getTraefikConfig(
|
||||
}
|
||||
|
||||
// the middleware to the config
|
||||
Object.assign(config_output.http.middlewares, rewriteResult.middlewares);
|
||||
Object.assign(
|
||||
config_output.http.middlewares,
|
||||
rewriteResult.middlewares
|
||||
);
|
||||
|
||||
// middlewares to the router middleware chain
|
||||
if (rewriteResult.chain) {
|
||||
@@ -298,9 +306,13 @@ export async function getTraefikConfig(
|
||||
routerMiddlewares.push(rewriteMiddlewareName);
|
||||
}
|
||||
|
||||
logger.debug(`Created path rewrite middleware ${rewriteMiddlewareName}: ${resource.pathMatchType}(${resource.path}) -> ${resource.rewritePathType}(${resource.rewritePath})`);
|
||||
logger.debug(
|
||||
`Created path rewrite middleware ${rewriteMiddlewareName}: ${resource.pathMatchType}(${resource.path}) -> ${resource.rewritePathType}(${resource.rewritePath})`
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to create path rewrite middleware for resource ${resource.resourceId}: ${error}`);
|
||||
logger.error(
|
||||
`Failed to create path rewrite middleware for resource ${resource.resourceId}: ${error}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -316,7 +328,9 @@ export async function getTraefikConfig(
|
||||
value: string;
|
||||
}[];
|
||||
} catch (e) {
|
||||
logger.warn(`Failed to parse headers for resource ${resource.resourceId}: ${e}`);
|
||||
logger.warn(
|
||||
`Failed to parse headers for resource ${resource.resourceId}: ${e}`
|
||||
);
|
||||
}
|
||||
|
||||
headersArr.forEach((header) => {
|
||||
@@ -482,14 +496,14 @@ export async function getTraefikConfig(
|
||||
})(),
|
||||
...(resource.stickySession
|
||||
? {
|
||||
sticky: {
|
||||
cookie: {
|
||||
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
|
||||
secure: resource.ssl,
|
||||
httpOnly: true
|
||||
}
|
||||
}
|
||||
}
|
||||
sticky: {
|
||||
cookie: {
|
||||
name: "p_sticky", // TODO: make this configurable via config.yml like other cookies
|
||||
secure: resource.ssl,
|
||||
httpOnly: true
|
||||
}
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
};
|
||||
@@ -590,13 +604,13 @@ export async function getTraefikConfig(
|
||||
})(),
|
||||
...(resource.stickySession
|
||||
? {
|
||||
sticky: {
|
||||
ipStrategy: {
|
||||
depth: 0,
|
||||
sourcePort: true
|
||||
}
|
||||
}
|
||||
}
|
||||
sticky: {
|
||||
ipStrategy: {
|
||||
depth: 0,
|
||||
sourcePort: true
|
||||
}
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
/*
|
||||
* This file is part of a proprietary work.
|
||||
*
|
||||
* Copyright (c) 2025 Fossorial, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This file is licensed under the Fossorial Commercial License.
|
||||
* You may not use this file except in compliance with the License.
|
||||
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
|
||||
*
|
||||
* This file is not licensed under the AGPLv3.
|
||||
*/
|
||||
|
||||
import { rateLimitService } from "#private/lib/rateLimit";
|
||||
import { cleanup as wsCleanup } from "#private/routers/ws";
|
||||
|
||||
|
||||
@@ -97,20 +97,4 @@ export async function getValidCertificatesForDomains(
|
||||
});
|
||||
|
||||
return validCertsDecrypted;
|
||||
}
|
||||
|
||||
export async function getValidCertificatesForDomainsHybrid(
|
||||
domains: Set<string>
|
||||
): Promise<
|
||||
Array<{
|
||||
id: number;
|
||||
domain: string;
|
||||
wildcard: boolean | null;
|
||||
certFile: string | null;
|
||||
keyFile: string | null;
|
||||
expiresAt: number | null;
|
||||
updatedAt?: number | null;
|
||||
}>
|
||||
> {
|
||||
return []; // stub
|
||||
}
|
||||
}
|
||||
@@ -146,6 +146,10 @@ export class PrivateConfig {
|
||||
if (parsedPrivateConfig.stripe?.s3Region) {
|
||||
process.env.S3_REGION = parsedPrivateConfig.stripe.s3Region;
|
||||
}
|
||||
if (parsedPrivateConfig.flags?.generate_own_certificates) {
|
||||
process.env.GENERATE_OWN_CERTIFICATES =
|
||||
parsedPrivateConfig.flags.generate_own_certificates.toString();
|
||||
}
|
||||
}
|
||||
|
||||
this.rawPrivateConfig = parsedPrivateConfig;
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
*/
|
||||
|
||||
import logger from "@server/logger";
|
||||
import redisManager from "@server/private/lib/redis";
|
||||
import redisManager from "#private/lib/redis";
|
||||
import { build } from "@server/build";
|
||||
|
||||
// Rate limiting configuration
|
||||
|
||||
@@ -17,7 +17,7 @@ import { MemoryStore, Store } from "express-rate-limit";
|
||||
import RedisStore from "#private/lib/redisStore";
|
||||
|
||||
export function createStore(): Store {
|
||||
if (build != "oss" && privateConfig.getRawPrivateConfig().flags?.enable_redis) {
|
||||
if (build != "oss" && privateConfig.getRawPrivateConfig().flags.enable_redis) {
|
||||
const rateLimitStore: Store = new RedisStore({
|
||||
prefix: "api-rate-limit", // Optional: customize Redis key prefix
|
||||
skipFailedRequests: true, // Don't count failed requests
|
||||
|
||||
@@ -20,15 +20,18 @@ import { build } from "@server/build";
|
||||
|
||||
const portSchema = z.number().positive().gt(0).lte(65535);
|
||||
|
||||
export const privateConfigSchema = z
|
||||
.object({
|
||||
app: z.object({
|
||||
export const privateConfigSchema = z.object({
|
||||
app: z
|
||||
.object({
|
||||
region: z.string().optional().default("default"),
|
||||
base_domain: z.string().optional()
|
||||
}).optional().default({
|
||||
})
|
||||
.optional()
|
||||
.default({
|
||||
region: "default"
|
||||
}),
|
||||
server: z.object({
|
||||
server: z
|
||||
.object({
|
||||
encryption_key_path: z
|
||||
.string()
|
||||
.optional()
|
||||
@@ -37,125 +40,132 @@ export const privateConfigSchema = z
|
||||
resend_api_key: z.string().optional(),
|
||||
reo_client_id: z.string().optional(),
|
||||
fossorial_api_key: z.string().optional()
|
||||
}).optional().default({
|
||||
})
|
||||
.optional()
|
||||
.default({
|
||||
encryption_key_path: "./config/encryption.pem"
|
||||
}),
|
||||
redis: z
|
||||
.object({
|
||||
host: z.string(),
|
||||
port: portSchema,
|
||||
password: z.string().optional(),
|
||||
db: z.number().int().nonnegative().optional().default(0),
|
||||
replicas: z
|
||||
.array(
|
||||
z.object({
|
||||
host: z.string(),
|
||||
port: portSchema,
|
||||
password: z.string().optional(),
|
||||
db: z.number().int().nonnegative().optional().default(0)
|
||||
redis: z
|
||||
.object({
|
||||
host: z.string(),
|
||||
port: portSchema,
|
||||
password: z.string().optional(),
|
||||
db: z.number().int().nonnegative().optional().default(0),
|
||||
replicas: z
|
||||
.array(
|
||||
z.object({
|
||||
host: z.string(),
|
||||
port: portSchema,
|
||||
password: z.string().optional(),
|
||||
db: z.number().int().nonnegative().optional().default(0)
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
// tls: z
|
||||
// .object({
|
||||
// reject_unauthorized: z
|
||||
// .boolean()
|
||||
// .optional()
|
||||
// .default(true)
|
||||
// })
|
||||
// .optional()
|
||||
})
|
||||
.optional(),
|
||||
gerbil: z
|
||||
.object({
|
||||
local_exit_node_reachable_at: z
|
||||
.string()
|
||||
.optional()
|
||||
.default("http://gerbil:3003")
|
||||
})
|
||||
.optional()
|
||||
.default({}),
|
||||
flags: z
|
||||
.object({
|
||||
enable_redis: z.boolean().optional().default(false),
|
||||
generate_own_certificates: z.boolean().optional().default(false)
|
||||
})
|
||||
.optional()
|
||||
.default({}),
|
||||
branding: z
|
||||
.object({
|
||||
app_name: z.string().optional(),
|
||||
background_image_path: z.string().optional(),
|
||||
colors: z
|
||||
.object({
|
||||
light: colorsSchema.optional(),
|
||||
dark: colorsSchema.optional()
|
||||
})
|
||||
.optional(),
|
||||
logo: z
|
||||
.object({
|
||||
light_path: z.string().optional(),
|
||||
dark_path: z.string().optional(),
|
||||
auth_page: z
|
||||
.object({
|
||||
width: z.number().optional(),
|
||||
height: z.number().optional()
|
||||
})
|
||||
)
|
||||
.optional()
|
||||
// tls: z
|
||||
// .object({
|
||||
// reject_unauthorized: z
|
||||
// .boolean()
|
||||
// .optional()
|
||||
// .default(true)
|
||||
// })
|
||||
// .optional()
|
||||
})
|
||||
.optional(),
|
||||
gerbil: z
|
||||
.object({
|
||||
local_exit_node_reachable_at: z.string().optional().default("http://gerbil:3003")
|
||||
})
|
||||
.optional()
|
||||
.default({}),
|
||||
flags: z
|
||||
.object({
|
||||
enable_redis: z.boolean().optional(),
|
||||
})
|
||||
.optional(),
|
||||
branding: z
|
||||
.object({
|
||||
app_name: z.string().optional(),
|
||||
background_image_path: z.string().optional(),
|
||||
colors: z
|
||||
.object({
|
||||
light: colorsSchema.optional(),
|
||||
dark: colorsSchema.optional()
|
||||
})
|
||||
.optional(),
|
||||
logo: z
|
||||
.object({
|
||||
light_path: z.string().optional(),
|
||||
dark_path: z.string().optional(),
|
||||
auth_page: z
|
||||
.object({
|
||||
width: z.number().optional(),
|
||||
height: z.number().optional()
|
||||
})
|
||||
.optional(),
|
||||
navbar: z
|
||||
.object({
|
||||
width: z.number().optional(),
|
||||
height: z.number().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.optional(),
|
||||
favicon_path: z.string().optional(),
|
||||
footer: z
|
||||
.array(
|
||||
z.object({
|
||||
text: z.string(),
|
||||
href: z.string().optional()
|
||||
.optional(),
|
||||
navbar: z
|
||||
.object({
|
||||
width: z.number().optional(),
|
||||
height: z.number().optional()
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
login_page: z
|
||||
.object({
|
||||
subtitle_text: z.string().optional(),
|
||||
title_text: z.string().optional()
|
||||
.optional()
|
||||
})
|
||||
.optional(),
|
||||
favicon_path: z.string().optional(),
|
||||
footer: z
|
||||
.array(
|
||||
z.object({
|
||||
text: z.string(),
|
||||
href: z.string().optional()
|
||||
})
|
||||
.optional(),
|
||||
signup_page: z
|
||||
.object({
|
||||
subtitle_text: z.string().optional(),
|
||||
title_text: z.string().optional()
|
||||
})
|
||||
.optional(),
|
||||
resource_auth_page: z
|
||||
.object({
|
||||
show_logo: z.boolean().optional(),
|
||||
hide_powered_by: z.boolean().optional(),
|
||||
title_text: z.string().optional(),
|
||||
subtitle_text: z.string().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.object({
|
||||
signature: z.string().optional(),
|
||||
colors: z
|
||||
.object({
|
||||
primary: z.string().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.optional(),
|
||||
stripe: z
|
||||
.object({
|
||||
secret_key: z.string(),
|
||||
webhook_secret: z.string(),
|
||||
s3Bucket: z.string(),
|
||||
s3Region: z.string().default("us-east-1"),
|
||||
localFilePath: z.string()
|
||||
})
|
||||
.optional(),
|
||||
});
|
||||
)
|
||||
.optional(),
|
||||
login_page: z
|
||||
.object({
|
||||
subtitle_text: z.string().optional(),
|
||||
title_text: z.string().optional()
|
||||
})
|
||||
.optional(),
|
||||
signup_page: z
|
||||
.object({
|
||||
subtitle_text: z.string().optional(),
|
||||
title_text: z.string().optional()
|
||||
})
|
||||
.optional(),
|
||||
resource_auth_page: z
|
||||
.object({
|
||||
show_logo: z.boolean().optional(),
|
||||
hide_powered_by: z.boolean().optional(),
|
||||
title_text: z.string().optional(),
|
||||
subtitle_text: z.string().optional()
|
||||
})
|
||||
.optional(),
|
||||
emails: z
|
||||
.object({
|
||||
signature: z.string().optional(),
|
||||
colors: z
|
||||
.object({
|
||||
primary: z.string().optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.optional()
|
||||
})
|
||||
.optional(),
|
||||
stripe: z
|
||||
.object({
|
||||
secret_key: z.string(),
|
||||
webhook_secret: z.string(),
|
||||
s3Bucket: z.string(),
|
||||
s3Region: z.string().default("us-east-1"),
|
||||
localFilePath: z.string()
|
||||
})
|
||||
.optional()
|
||||
});
|
||||
|
||||
export function readPrivateConfigFile() {
|
||||
if (build == "oss") {
|
||||
@@ -186,9 +196,7 @@ export function readPrivateConfigFile() {
|
||||
}
|
||||
|
||||
if (!environment) {
|
||||
throw new Error(
|
||||
"No private configuration file found."
|
||||
);
|
||||
throw new Error("No private configuration file found.");
|
||||
}
|
||||
|
||||
return environment;
|
||||
|
||||
@@ -46,7 +46,7 @@ class RedisManager {
|
||||
this.isEnabled = false;
|
||||
return;
|
||||
}
|
||||
this.isEnabled = privateConfig.getRawPrivateConfig().flags?.enable_redis || false;
|
||||
this.isEnabled = privateConfig.getRawPrivateConfig().flags.enable_redis || false;
|
||||
if (this.isEnabled) {
|
||||
this.initializeClients();
|
||||
}
|
||||
|
||||
@@ -14,10 +14,10 @@
|
||||
import Stripe from "stripe";
|
||||
import privateConfig from "#private/lib/config";
|
||||
import logger from "@server/logger";
|
||||
import { build } from "@server/build";
|
||||
import { noop } from "@server/lib/billing/usageService";
|
||||
|
||||
let stripe: Stripe | undefined = undefined;
|
||||
if (build == "saas") {
|
||||
if (!noop()) {
|
||||
const stripeApiKey = privateConfig.getRawPrivateConfig().stripe?.secret_key;
|
||||
if (!stripeApiKey) {
|
||||
logger.error("Stripe secret key is not configured");
|
||||
|
||||
@@ -21,11 +21,10 @@ import {
|
||||
} from "@server/db";
|
||||
import { and, eq, inArray, or, isNull, ne, isNotNull, desc } from "drizzle-orm";
|
||||
import logger from "@server/logger";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import config from "@server/lib/config";
|
||||
import { orgs, resources, sites, Target, targets } from "@server/db";
|
||||
import { build } from "@server/build";
|
||||
import { sanitize } from "@server/lib/traefik/utils";
|
||||
import privateConfig from "#private/lib/config";
|
||||
|
||||
const redirectHttpsMiddlewareName = "redirect-to-https";
|
||||
const redirectToRootMiddlewareName = "redirect-to-root";
|
||||
@@ -79,7 +78,7 @@ export async function getTraefikConfig(
|
||||
path: targets.path,
|
||||
pathMatchType: targets.pathMatchType,
|
||||
priority: targets.priority,
|
||||
|
||||
|
||||
// Site fields
|
||||
siteId: sites.siteId,
|
||||
siteType: sites.type,
|
||||
@@ -234,12 +233,13 @@ export async function getTraefikConfig(
|
||||
continue;
|
||||
}
|
||||
|
||||
if (resource.certificateStatus !== "valid") {
|
||||
logger.debug(
|
||||
`Resource ${resource.resourceId} has certificate stats ${resource.certificateStats}`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
// TODO: for now dont filter it out because if you have multiple domain ids and one is failed it causes all of them to fail
|
||||
// if (resource.certificateStatus !== "valid" && privateConfig.getRawPrivateConfig().flags.generate_own_certificates) {
|
||||
// logger.debug(
|
||||
// `Resource ${resource.resourceId} has certificate stats ${resource.certificateStats}`
|
||||
// );
|
||||
// continue;
|
||||
// }
|
||||
|
||||
// add routers and services empty objects if they don't exist
|
||||
if (!config_output.http.routers) {
|
||||
@@ -264,18 +264,21 @@ export async function getTraefikConfig(
|
||||
|
||||
const configDomain = config.getDomain(resource.domainId);
|
||||
|
||||
let certResolver: string, preferWildcardCert: boolean;
|
||||
if (!configDomain) {
|
||||
certResolver = config.getRawConfig().traefik.cert_resolver;
|
||||
preferWildcardCert =
|
||||
config.getRawConfig().traefik.prefer_wildcard_cert;
|
||||
} else {
|
||||
certResolver = configDomain.cert_resolver;
|
||||
preferWildcardCert = configDomain.prefer_wildcard_cert;
|
||||
}
|
||||
|
||||
let tls = {};
|
||||
if (build == "oss") {
|
||||
if (
|
||||
!privateConfig.getRawPrivateConfig().flags
|
||||
.generate_own_certificates
|
||||
) {
|
||||
let certResolver: string, preferWildcardCert: boolean;
|
||||
if (!configDomain) {
|
||||
certResolver = config.getRawConfig().traefik.cert_resolver;
|
||||
preferWildcardCert =
|
||||
config.getRawConfig().traefik.prefer_wildcard_cert;
|
||||
} else {
|
||||
certResolver = configDomain.cert_resolver;
|
||||
preferWildcardCert = configDomain.prefer_wildcard_cert;
|
||||
}
|
||||
|
||||
tls = {
|
||||
certResolver: certResolver,
|
||||
...(preferWildcardCert
|
||||
@@ -419,7 +422,7 @@ export async function getTraefikConfig(
|
||||
|
||||
return (
|
||||
(targets as TargetWithSite[])
|
||||
.filter((target: TargetWithSite) => {
|
||||
.filter((target: TargetWithSite) => {
|
||||
if (!target.enabled) {
|
||||
return false;
|
||||
}
|
||||
@@ -440,7 +443,7 @@ export async function getTraefikConfig(
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
} else if (target.site.type === "newt") {
|
||||
} else if (target.site.type === "newt") {
|
||||
if (
|
||||
!target.internalPort ||
|
||||
!target.method ||
|
||||
@@ -448,10 +451,10 @@ export async function getTraefikConfig(
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.map((target: TargetWithSite) => {
|
||||
}
|
||||
return true;
|
||||
})
|
||||
.map((target: TargetWithSite) => {
|
||||
if (
|
||||
target.site.type === "local" ||
|
||||
target.site.type === "wireguard"
|
||||
@@ -459,14 +462,14 @@ export async function getTraefikConfig(
|
||||
return {
|
||||
url: `${target.method}://${target.ip}:${target.port}`
|
||||
};
|
||||
} else if (target.site.type === "newt") {
|
||||
} else if (target.site.type === "newt") {
|
||||
const ip =
|
||||
target.site.subnet!.split("/")[0];
|
||||
return {
|
||||
url: `${target.method}://${ip}:${target.internalPort}`
|
||||
};
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
// filter out duplicates
|
||||
.filter(
|
||||
(v, i, a) =>
|
||||
@@ -709,4 +712,4 @@ export async function getTraefikConfig(
|
||||
}
|
||||
|
||||
return config_output;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import { sha256 } from "@oslojs/crypto/sha2";
|
||||
import { serializeSessionCookie } from "@server/auth/sessions/app";
|
||||
import { decrypt } from "@server/lib/crypto";
|
||||
import config from "@server/lib/config";
|
||||
import { TransferSessionResponse } from "@server/routers/auth/types";
|
||||
|
||||
const bodySchema = z.object({
|
||||
token: z.string()
|
||||
@@ -33,11 +34,6 @@ const bodySchema = z.object({
|
||||
|
||||
export type TransferSessionBodySchema = z.infer<typeof bodySchema>;
|
||||
|
||||
export type TransferSessionResponse = {
|
||||
valid: boolean;
|
||||
cookie?: string;
|
||||
};
|
||||
|
||||
export async function transferSession(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -22,6 +22,8 @@ import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromZodError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { GetOrgSubscriptionResponse } from "@server/routers/billing/types";
|
||||
|
||||
// Import tables for billing
|
||||
import {
|
||||
customers,
|
||||
@@ -37,11 +39,6 @@ const getOrgSchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
export type GetOrgSubscriptionResponse = {
|
||||
subscription: Subscription | null;
|
||||
items: SubscriptionItem[];
|
||||
};
|
||||
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
path: "/org/{orgId}/billing/subscription",
|
||||
|
||||
@@ -25,6 +25,7 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { Limit, limits, Usage, usage } from "@server/db";
|
||||
import { usageService } from "@server/lib/billing/usageService";
|
||||
import { FeatureId } from "@server/lib/billing";
|
||||
import { GetOrgUsageResponse } from "@server/routers/billing/types";
|
||||
|
||||
const getOrgSchema = z
|
||||
.object({
|
||||
@@ -32,11 +33,6 @@ const getOrgSchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
export type GetOrgUsageResponse = {
|
||||
usage: Usage[];
|
||||
limits: Limit[];
|
||||
};
|
||||
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
path: "/org/{orgId}/billing/usage",
|
||||
|
||||
@@ -19,6 +19,7 @@ import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromZodError } from "zod-validation-error";
|
||||
import { getOrgTierData } from "#private/lib/billing";
|
||||
import { GetOrgTierResponse } from "@server/routers/billing/types";
|
||||
|
||||
const getOrgSchema = z
|
||||
.object({
|
||||
@@ -26,11 +27,6 @@ const getOrgSchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
export type GetOrgTierResponse = {
|
||||
tier: string | null;
|
||||
active: boolean;
|
||||
};
|
||||
|
||||
export async function getOrgTier(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -15,15 +15,19 @@ import { Certificate, certificates, db, domains } from "@server/db";
|
||||
import logger from "@server/logger";
|
||||
import { Transaction } from "@server/db";
|
||||
import { eq, or, and, like } from "drizzle-orm";
|
||||
import { build } from "@server/build";
|
||||
import privateConfig from "#private/lib/config";
|
||||
|
||||
/**
|
||||
* Checks if a certificate exists for the given domain.
|
||||
* If not, creates a new certificate in 'pending' state.
|
||||
* Wildcard certs cover subdomains.
|
||||
*/
|
||||
export async function createCertificate(domainId: string, domain: string, trx: Transaction | typeof db) {
|
||||
if (build !== "saas") {
|
||||
export async function createCertificate(
|
||||
domainId: string,
|
||||
domain: string,
|
||||
trx: Transaction | typeof db
|
||||
) {
|
||||
if (!privateConfig.getRawPrivateConfig().flags.generate_own_certificates) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -39,7 +43,7 @@ export async function createCertificate(domainId: string, domain: string, trx: T
|
||||
|
||||
let existing: Certificate[] = [];
|
||||
if (domainRecord.type == "ns") {
|
||||
const domainLevelDown = domain.split('.').slice(1).join('.');
|
||||
const domainLevelDown = domain.split(".").slice(1).join(".");
|
||||
existing = await trx
|
||||
.select()
|
||||
.from(certificates)
|
||||
@@ -49,7 +53,7 @@ export async function createCertificate(domainId: string, domain: string, trx: T
|
||||
eq(certificates.wildcard, true), // only NS domains can have wildcard certs
|
||||
or(
|
||||
eq(certificates.domain, domain),
|
||||
eq(certificates.domain, domainLevelDown),
|
||||
eq(certificates.domain, domainLevelDown)
|
||||
)
|
||||
)
|
||||
);
|
||||
@@ -67,9 +71,7 @@ export async function createCertificate(domainId: string, domain: string, trx: T
|
||||
}
|
||||
|
||||
if (existing.length > 0) {
|
||||
logger.info(
|
||||
`Certificate already exists for domain ${domain}`
|
||||
);
|
||||
logger.info(`Certificate already exists for domain ${domain}`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { registry } from "@server/openApi";
|
||||
import { GetCertificateResponse } from "@server/routers/certificates/types";
|
||||
|
||||
const getCertificateSchema = z
|
||||
.object({
|
||||
@@ -96,20 +97,6 @@ async function query(domainId: string, domain: string) {
|
||||
return existing.length > 0 ? existing[0] : null;
|
||||
}
|
||||
|
||||
export type GetCertificateResponse = {
|
||||
certId: number;
|
||||
domain: string;
|
||||
domainId: string;
|
||||
wildcard: boolean;
|
||||
status: string; // pending, requested, valid, expired, failed
|
||||
expiresAt: string | null;
|
||||
lastRenewalAttempt: Date | null;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
errorMessage?: string | null;
|
||||
renewalCount: number;
|
||||
}
|
||||
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
path: "/org/{orgId}/certificate/{domainId}/{domain}",
|
||||
|
||||
@@ -21,6 +21,7 @@ import { fromError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { db, domainNamespaces, resources } from "@server/db";
|
||||
import { inArray } from "drizzle-orm";
|
||||
import { CheckDomainAvailabilityResponse } from "@server/routers/domain/types";
|
||||
|
||||
const paramsSchema = z.object({}).strict();
|
||||
|
||||
@@ -30,15 +31,6 @@ const querySchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
export type CheckDomainAvailabilityResponse = {
|
||||
available: boolean;
|
||||
options: {
|
||||
domainNamespaceId: string;
|
||||
domainId: string;
|
||||
fullDomain: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
path: "/domain/check-namespace-availability",
|
||||
|
||||
@@ -39,16 +39,17 @@ import {
|
||||
import rateLimit, { ipKeyGenerator } from "express-rate-limit";
|
||||
import createHttpError from "http-errors";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
|
||||
import {
|
||||
unauthenticated as ua,
|
||||
authenticated as a
|
||||
} from "@server/routers/external";
|
||||
import { verifyValidLicense } from "../middlewares/verifyValidLicense";
|
||||
import { build } from "@server/build";
|
||||
import {
|
||||
unauthenticated as ua,
|
||||
authenticated as a,
|
||||
authRouter as aa
|
||||
} from "@server/routers/external";
|
||||
|
||||
export const authenticated = a;
|
||||
export const unauthenticated = ua;
|
||||
export const authRouter = aa;
|
||||
|
||||
unauthenticated.post(
|
||||
"/remote-exit-node/quick-start",
|
||||
@@ -276,8 +277,6 @@ authenticated.get(
|
||||
loginPage.getLoginPage
|
||||
);
|
||||
|
||||
export const authRouter = Router();
|
||||
|
||||
authRouter.post(
|
||||
"/remoteExitNode/get-token",
|
||||
verifyValidLicense,
|
||||
|
||||
@@ -68,10 +68,11 @@ import { decryptData } from "@server/lib/encryption";
|
||||
import config from "@server/lib/config";
|
||||
import privateConfig from "#private/lib/config";
|
||||
import * as fs from "fs";
|
||||
import { exchangeSession } from "@server/routers/badger";
|
||||
import { exchangeSession } from "@server/routers/badger";
|
||||
import { validateResourceSessionToken } from "@server/auth/sessions/resource";
|
||||
import { checkExitNodeOrg, resolveExitNodes } from "#private/lib/exitNodes";
|
||||
import { maxmindLookup } from "@server/db/maxmind";
|
||||
import { verifyResourceAccessToken } from "@server/auth/verifyResourceAccessToken";
|
||||
|
||||
// Zod schemas for request validation
|
||||
const getResourceByDomainParamsSchema = z
|
||||
@@ -162,6 +163,14 @@ const validateResourceSessionTokenBodySchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
const validateResourceAccessTokenBodySchema = z
|
||||
.object({
|
||||
accessTokenId: z.string().optional(),
|
||||
resourceId: z.number().optional(),
|
||||
accessToken: z.string()
|
||||
})
|
||||
.strict();
|
||||
|
||||
// Certificates by domains query validation
|
||||
const getCertificatesByDomainsQuerySchema = z
|
||||
.object({
|
||||
@@ -215,6 +224,33 @@ export type UserSessionWithUser = {
|
||||
export const hybridRouter = Router();
|
||||
hybridRouter.use(verifySessionRemoteExitNodeMiddleware);
|
||||
|
||||
hybridRouter.get(
|
||||
"/general-config",
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
return response(res, {
|
||||
data: {
|
||||
resource_session_request_param:
|
||||
config.getRawConfig().server.resource_session_request_param,
|
||||
resource_access_token_headers:
|
||||
config.getRawConfig().server.resource_access_token_headers,
|
||||
resource_access_token_param:
|
||||
config.getRawConfig().server.resource_access_token_param,
|
||||
session_cookie_name:
|
||||
config.getRawConfig().server.session_cookie_name,
|
||||
require_email_verification:
|
||||
config.getRawConfig().flags?.require_email_verification ||
|
||||
false,
|
||||
resource_session_length_hours:
|
||||
config.getRawConfig().server.resource_session_length_hours
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
message: "General config retrieved successfully",
|
||||
status: HttpCode.OK
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
hybridRouter.get(
|
||||
"/traefik-config",
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
@@ -1101,6 +1137,52 @@ hybridRouter.post(
|
||||
}
|
||||
);
|
||||
|
||||
// Validate resource session token
|
||||
hybridRouter.post(
|
||||
"/resource/:resourceId/access-token/verify",
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
const parsedBody = validateResourceAccessTokenBodySchema.safeParse(
|
||||
req.body
|
||||
);
|
||||
if (!parsedBody.success) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.BAD_REQUEST,
|
||||
fromError(parsedBody.error).toString()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const { accessToken, resourceId, accessTokenId } = parsedBody.data;
|
||||
|
||||
const result = await verifyResourceAccessToken({
|
||||
accessTokenId,
|
||||
accessToken,
|
||||
resourceId
|
||||
});
|
||||
|
||||
return response(res, {
|
||||
data: result,
|
||||
success: true,
|
||||
error: false,
|
||||
message: result.valid
|
||||
? "Resource access token is valid"
|
||||
: "Resource access token is invalid or expired",
|
||||
status: HttpCode.OK
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.INTERNAL_SERVER_ERROR,
|
||||
"Failed to validate resource session token"
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const geoIpLookupParamsSchema = z.object({
|
||||
ip: z.string().ip()
|
||||
});
|
||||
@@ -1489,4 +1571,4 @@ hybridRouter.post(
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
);
|
||||
|
||||
@@ -33,6 +33,7 @@ import { createCertificate } from "#private/routers/certificates/createCertifica
|
||||
import { getOrgTierData } from "#private/lib/billing";
|
||||
import { TierId } from "@server/lib/billing/tiers";
|
||||
import { build } from "@server/build";
|
||||
import { CreateLoginPageResponse } from "@server/routers/loginPage/types";
|
||||
|
||||
const paramsSchema = z
|
||||
.object({
|
||||
@@ -49,8 +50,6 @@ const bodySchema = z
|
||||
|
||||
export type CreateLoginPageBody = z.infer<typeof bodySchema>;
|
||||
|
||||
export type CreateLoginPageResponse = LoginPage;
|
||||
|
||||
export async function createLoginPage(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -20,6 +20,7 @@ import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { eq, and } from "drizzle-orm";
|
||||
import { DeleteLoginPageResponse } from "@server/routers/loginPage/types";
|
||||
|
||||
const paramsSchema = z
|
||||
.object({
|
||||
@@ -28,8 +29,6 @@ const paramsSchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
export type DeleteLoginPageResponse = LoginPage;
|
||||
|
||||
export async function deleteLoginPage(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -20,6 +20,7 @@ import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { GetLoginPageResponse } from "@server/routers/loginPage/types";
|
||||
|
||||
const paramsSchema = z
|
||||
.object({
|
||||
@@ -40,10 +41,6 @@ async function query(orgId: string) {
|
||||
return res?.loginPage;
|
||||
}
|
||||
|
||||
export type GetLoginPageResponse = NonNullable<
|
||||
Awaited<ReturnType<typeof query>>
|
||||
>;
|
||||
|
||||
export async function getLoginPage(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -20,6 +20,7 @@ import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { LoadLoginPageResponse } from "@server/routers/loginPage/types";
|
||||
|
||||
const querySchema = z.object({
|
||||
resourceId: z.coerce.number().int().positive().optional(),
|
||||
@@ -70,10 +71,6 @@ async function query(orgId: string | undefined, fullDomain: string) {
|
||||
};
|
||||
}
|
||||
|
||||
export type LoadLoginPageResponse = NonNullable<
|
||||
Awaited<ReturnType<typeof query>>
|
||||
> & { orgId: string };
|
||||
|
||||
export async function loadLoginPage(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -26,6 +26,7 @@ import { createCertificate } from "#private/routers/certificates/createCertifica
|
||||
import { getOrgTierData } from "#private/lib/billing";
|
||||
import { TierId } from "@server/lib/billing/tiers";
|
||||
import { build } from "@server/build";
|
||||
import { UpdateLoginPageResponse } from "@server/routers/loginPage/types";
|
||||
|
||||
const paramsSchema = z
|
||||
.object({
|
||||
@@ -55,8 +56,6 @@ const bodySchema = z
|
||||
|
||||
export type UpdateLoginPageBody = z.infer<typeof bodySchema>;
|
||||
|
||||
export type UpdateLoginPageResponse = LoginPage;
|
||||
|
||||
export async function updateLoginPage(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -27,6 +27,7 @@ import config from "@server/lib/config";
|
||||
import { build } from "@server/build";
|
||||
import { getOrgTierData } from "#private/lib/billing";
|
||||
import { TierId } from "@server/lib/billing/tiers";
|
||||
import { CreateOrgIdpResponse } from "@server/routers/orgIdp/types";
|
||||
|
||||
const paramsSchema = z.object({ orgId: z.string().nonempty() }).strict();
|
||||
|
||||
@@ -47,11 +48,6 @@ const bodySchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
export type CreateOrgIdpResponse = {
|
||||
idpId: number;
|
||||
redirectUrl: string;
|
||||
};
|
||||
|
||||
// registry.registerPath({
|
||||
// method: "put",
|
||||
// path: "/idp/oidc",
|
||||
|
||||
@@ -25,6 +25,7 @@ import { OpenAPITags, registry } from "@server/openApi";
|
||||
import config from "@server/lib/config";
|
||||
import { decrypt } from "@server/lib/crypto";
|
||||
import { generateOidcRedirectUrl } from "@server/lib/idp/generateRedirectUrl";
|
||||
import { GetOrgIdpResponse } from "@server/routers/orgIdp/types";
|
||||
|
||||
const paramsSchema = z
|
||||
.object({
|
||||
@@ -47,10 +48,6 @@ async function query(idpId: number, orgId: string) {
|
||||
return res;
|
||||
}
|
||||
|
||||
export type GetOrgIdpResponse = NonNullable<
|
||||
Awaited<ReturnType<typeof query>>
|
||||
> & { redirectUrl: string };
|
||||
|
||||
// registry.registerPath({
|
||||
// method: "get",
|
||||
// path: "/idp/{idpId}",
|
||||
|
||||
@@ -22,6 +22,7 @@ import { eq, sql } from "drizzle-orm";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { OpenAPITags, registry } from "@server/openApi";
|
||||
import { ListOrgIdpsResponse } from "@server/routers/orgIdp/types";
|
||||
|
||||
const querySchema = z
|
||||
.object({
|
||||
@@ -65,15 +66,6 @@ async function query(orgId: string, limit: number, offset: number) {
|
||||
return res;
|
||||
}
|
||||
|
||||
export type ListOrgIdpsResponse = {
|
||||
idps: Awaited<ReturnType<typeof query>>;
|
||||
pagination: {
|
||||
total: number;
|
||||
limit: number;
|
||||
offset: number;
|
||||
};
|
||||
};
|
||||
|
||||
// registry.registerPath({
|
||||
// method: "get",
|
||||
// path: "/idp",
|
||||
|
||||
@@ -29,17 +29,12 @@ import { and, eq } from "drizzle-orm";
|
||||
import { getNextAvailableSubnet } from "@server/lib/exitNodes";
|
||||
import { usageService } from "@server/lib/billing/usageService";
|
||||
import { FeatureId } from "@server/lib/billing";
|
||||
import { CreateRemoteExitNodeResponse } from "@server/routers/remoteExitNode/types";
|
||||
|
||||
export const paramsSchema = z.object({
|
||||
orgId: z.string()
|
||||
});
|
||||
|
||||
export type CreateRemoteExitNodeResponse = {
|
||||
token: string;
|
||||
remoteExitNodeId: string;
|
||||
secret: string;
|
||||
};
|
||||
|
||||
const bodySchema = z
|
||||
.object({
|
||||
remoteExitNodeId: z.string().length(15),
|
||||
@@ -89,30 +84,25 @@ export async function createRemoteExitNode(
|
||||
orgId,
|
||||
FeatureId.REMOTE_EXIT_NODES
|
||||
);
|
||||
if (!usage) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.NOT_FOUND,
|
||||
"No usage data found for this organization"
|
||||
)
|
||||
);
|
||||
}
|
||||
const rejectRemoteExitNodes = await usageService.checkLimitSet(
|
||||
orgId,
|
||||
false,
|
||||
FeatureId.REMOTE_EXIT_NODES,
|
||||
{
|
||||
...usage,
|
||||
instantaneousValue: (usage.instantaneousValue || 0) + 1
|
||||
} // We need to add one to know if we are violating the limit
|
||||
);
|
||||
if (rejectRemoteExitNodes) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.FORBIDDEN,
|
||||
"Remote exit node limit exceeded. Please upgrade your plan or contact us at support@fossorial.io"
|
||||
)
|
||||
if (usage) {
|
||||
const rejectRemoteExitNodes = await usageService.checkLimitSet(
|
||||
orgId,
|
||||
false,
|
||||
FeatureId.REMOTE_EXIT_NODES,
|
||||
{
|
||||
...usage,
|
||||
instantaneousValue: (usage.instantaneousValue || 0) + 1
|
||||
} // We need to add one to know if we are violating the limit
|
||||
);
|
||||
|
||||
if (rejectRemoteExitNodes) {
|
||||
return next(
|
||||
createHttpError(
|
||||
HttpCode.FORBIDDEN,
|
||||
"Remote exit node limit exceeded. Please upgrade your plan or contact us at support@fossorial.io"
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const secretHash = await hashPassword(secret);
|
||||
|
||||
@@ -21,6 +21,7 @@ import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { GetRemoteExitNodeResponse } from "@server/routers/remoteExitNode/types";
|
||||
|
||||
const getRemoteExitNodeSchema = z
|
||||
.object({
|
||||
@@ -52,8 +53,6 @@ async function query(remoteExitNodeId: string) {
|
||||
return remoteExitNode;
|
||||
}
|
||||
|
||||
export type GetRemoteExitNodeResponse = Awaited<ReturnType<typeof query>>;
|
||||
|
||||
export async function getRemoteExitNode(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -35,8 +35,6 @@ export const remoteExitNodeGetTokenBodySchema = z.object({
|
||||
token: z.string().optional()
|
||||
});
|
||||
|
||||
export type RemoteExitNodeGetTokenBody = z.infer<typeof remoteExitNodeGetTokenBodySchema>;
|
||||
|
||||
export async function getRemoteExitNodeToken(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -21,6 +21,7 @@ import HttpCode from "@server/types/HttpCode";
|
||||
import createHttpError from "http-errors";
|
||||
import logger from "@server/logger";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { ListRemoteExitNodesResponse } from "@server/routers/remoteExitNode/types";
|
||||
|
||||
const listRemoteExitNodesParamsSchema = z
|
||||
.object({
|
||||
@@ -43,7 +44,7 @@ const listRemoteExitNodesSchema = z.object({
|
||||
.pipe(z.number().int().nonnegative())
|
||||
});
|
||||
|
||||
function queryRemoteExitNodes(orgId: string) {
|
||||
export function queryRemoteExitNodes(orgId: string) {
|
||||
return db
|
||||
.select({
|
||||
remoteExitNodeId: remoteExitNodes.remoteExitNodeId,
|
||||
@@ -65,11 +66,6 @@ function queryRemoteExitNodes(orgId: string) {
|
||||
);
|
||||
}
|
||||
|
||||
export type ListRemoteExitNodesResponse = {
|
||||
remoteExitNodes: Awaited<ReturnType<typeof queryRemoteExitNodes>>;
|
||||
pagination: { total: number; limit: number; offset: number };
|
||||
};
|
||||
|
||||
export async function listRemoteExitNodes(
|
||||
req: Request,
|
||||
res: Response,
|
||||
|
||||
@@ -19,11 +19,7 @@ import logger from "@server/logger";
|
||||
import { generateId } from "@server/auth/sessions/app";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { z } from "zod";
|
||||
|
||||
export type PickRemoteExitNodeDefaultsResponse = {
|
||||
remoteExitNodeId: string;
|
||||
secret: string;
|
||||
};
|
||||
import { PickRemoteExitNodeDefaultsResponse } from "@server/routers/remoteExitNode/types";
|
||||
|
||||
const paramsSchema = z
|
||||
.object({
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
*/
|
||||
|
||||
import { NextFunction, Request, Response } from "express";
|
||||
import { db, exitNodes, exitNodeOrgs } from "@server/db";
|
||||
import { db } from "@server/db";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import { remoteExitNodes } from "@server/db";
|
||||
import createHttpError from "http-errors";
|
||||
@@ -24,11 +24,7 @@ import { hashPassword } from "@server/auth/password";
|
||||
import logger from "@server/logger";
|
||||
import z from "zod";
|
||||
import { fromError } from "zod-validation-error";
|
||||
|
||||
export type QuickStartRemoteExitNodeResponse = {
|
||||
remoteExitNodeId: string;
|
||||
secret: string;
|
||||
};
|
||||
import { QuickStartRemoteExitNodeResponse } from "@server/routers/remoteExitNode/types";
|
||||
|
||||
const INSTALLER_KEY = "af4e4785-7e09-11f0-b93a-74563c4e2a7e";
|
||||
|
||||
|
||||
8
server/routers/auth/types.ts
Normal file
8
server/routers/auth/types.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export type TransferSessionResponse = {
|
||||
valid: boolean;
|
||||
cookie?: string;
|
||||
};
|
||||
|
||||
export type GetSessionTransferTokenRenponse = {
|
||||
token: string;
|
||||
};
|
||||
@@ -33,7 +33,9 @@ import createHttpError from "http-errors";
|
||||
import NodeCache from "node-cache";
|
||||
import { z } from "zod";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { getCountryCodeForIp, remoteGetCountryCodeForIp } from "@server/lib/geoip";
|
||||
import {
|
||||
getCountryCodeForIp,
|
||||
} from "@server/lib/geoip";
|
||||
import { getOrgTierData } from "#dynamic/lib/billing";
|
||||
import { TierId } from "@server/lib/billing/tiers";
|
||||
import { verifyPassword } from "@server/auth/password";
|
||||
@@ -106,23 +108,23 @@ export async function verifyResourceSession(
|
||||
|
||||
const clientIp = requestIp
|
||||
? (() => {
|
||||
logger.debug("Request IP:", { requestIp });
|
||||
if (requestIp.startsWith("[") && requestIp.includes("]")) {
|
||||
// if brackets are found, extract the IPv6 address from between the brackets
|
||||
const ipv6Match = requestIp.match(/\[(.*?)\]/);
|
||||
if (ipv6Match) {
|
||||
return ipv6Match[1];
|
||||
}
|
||||
}
|
||||
logger.debug("Request IP:", { requestIp });
|
||||
if (requestIp.startsWith("[") && requestIp.includes("]")) {
|
||||
// if brackets are found, extract the IPv6 address from between the brackets
|
||||
const ipv6Match = requestIp.match(/\[(.*?)\]/);
|
||||
if (ipv6Match) {
|
||||
return ipv6Match[1];
|
||||
}
|
||||
}
|
||||
|
||||
// ivp4
|
||||
// split at last colon
|
||||
const lastColonIndex = requestIp.lastIndexOf(":");
|
||||
if (lastColonIndex !== -1) {
|
||||
return requestIp.substring(0, lastColonIndex);
|
||||
}
|
||||
return requestIp;
|
||||
})()
|
||||
// ivp4
|
||||
// split at last colon
|
||||
const lastColonIndex = requestIp.lastIndexOf(":");
|
||||
if (lastColonIndex !== -1) {
|
||||
return requestIp.substring(0, lastColonIndex);
|
||||
}
|
||||
return requestIp;
|
||||
})()
|
||||
: undefined;
|
||||
|
||||
logger.debug("Client IP:", { clientIp });
|
||||
@@ -137,11 +139,11 @@ export async function verifyResourceSession(
|
||||
const resourceCacheKey = `resource:${cleanHost}`;
|
||||
let resourceData:
|
||||
| {
|
||||
resource: Resource | null;
|
||||
pincode: ResourcePincode | null;
|
||||
password: ResourcePassword | null;
|
||||
headerAuth: ResourceHeaderAuth | null;
|
||||
}
|
||||
resource: Resource | null;
|
||||
pincode: ResourcePincode | null;
|
||||
password: ResourcePassword | null;
|
||||
headerAuth: ResourceHeaderAuth | null;
|
||||
}
|
||||
| undefined = cache.get(resourceCacheKey);
|
||||
|
||||
if (!resourceData) {
|
||||
@@ -213,21 +215,21 @@ export async function verifyResourceSession(
|
||||
headers &&
|
||||
headers[
|
||||
config.getRawConfig().server.resource_access_token_headers.id
|
||||
] &&
|
||||
] &&
|
||||
headers[
|
||||
config.getRawConfig().server.resource_access_token_headers.token
|
||||
]
|
||||
]
|
||||
) {
|
||||
const accessTokenId =
|
||||
headers[
|
||||
config.getRawConfig().server.resource_access_token_headers
|
||||
.id
|
||||
];
|
||||
];
|
||||
const accessToken =
|
||||
headers[
|
||||
config.getRawConfig().server.resource_access_token_headers
|
||||
.token
|
||||
];
|
||||
];
|
||||
|
||||
const { valid, error, tokenItem } = await verifyResourceAccessToken(
|
||||
{
|
||||
@@ -294,10 +296,17 @@ export async function verifyResourceSession(
|
||||
|
||||
// check for HTTP Basic Auth header
|
||||
if (headerAuth && clientHeaderAuth) {
|
||||
if(cache.get(clientHeaderAuth)) {
|
||||
logger.debug("Resource allowed because header auth is valid (cached)");
|
||||
if (cache.get(clientHeaderAuth)) {
|
||||
logger.debug(
|
||||
"Resource allowed because header auth is valid (cached)"
|
||||
);
|
||||
return allowed(res);
|
||||
}else if(await verifyPassword(clientHeaderAuth, headerAuth.headerAuthHash)){
|
||||
} else if (
|
||||
await verifyPassword(
|
||||
clientHeaderAuth,
|
||||
headerAuth.headerAuthHash
|
||||
)
|
||||
) {
|
||||
cache.set(clientHeaderAuth, clientHeaderAuth);
|
||||
logger.debug("Resource allowed because header auth is valid");
|
||||
return allowed(res);
|
||||
@@ -477,7 +486,11 @@ function extractResourceSessionToken(
|
||||
return latest.token;
|
||||
}
|
||||
|
||||
async function notAllowed(res: Response, redirectPath?: string, orgId?: string) {
|
||||
async function notAllowed(
|
||||
res: Response,
|
||||
redirectPath?: string,
|
||||
orgId?: string
|
||||
) {
|
||||
let loginPage: LoginPage | null = null;
|
||||
if (orgId) {
|
||||
const { tier } = await getOrgTierData(orgId); // returns null in oss
|
||||
@@ -491,14 +504,11 @@ async function notAllowed(res: Response, redirectPath?: string, orgId?: string)
|
||||
let endpoint: string;
|
||||
|
||||
if (loginPage && loginPage.domainId && loginPage.fullDomain) {
|
||||
const secure = config.getRawConfig().app.dashboard_url?.startsWith("https");
|
||||
const secure = config
|
||||
.getRawConfig()
|
||||
.app.dashboard_url?.startsWith("https");
|
||||
const method = secure ? "https" : "http";
|
||||
endpoint = `${method}://${loginPage.fullDomain}`;
|
||||
} else if (config.isManagedMode()) {
|
||||
endpoint =
|
||||
config.getRawConfig().managed?.redirect_endpoint ||
|
||||
config.getRawConfig().managed?.endpoint ||
|
||||
"";
|
||||
} else {
|
||||
endpoint = config.getRawConfig().app.dashboard_url!;
|
||||
}
|
||||
@@ -803,11 +813,7 @@ async function isIpInGeoIP(ip: string, countryCode: string): Promise<boolean> {
|
||||
let cachedCountryCode: string | undefined = cache.get(geoIpCacheKey);
|
||||
|
||||
if (!cachedCountryCode) {
|
||||
if (config.isManagedMode()) {
|
||||
cachedCountryCode = await remoteGetCountryCodeForIp(ip);
|
||||
} else {
|
||||
cachedCountryCode = await getCountryCodeForIp(ip); // do it locally
|
||||
}
|
||||
cachedCountryCode = await getCountryCodeForIp(ip); // do it locally
|
||||
// Cache for longer since IP geolocation doesn't change frequently
|
||||
cache.set(geoIpCacheKey, cachedCountryCode, 300); // 5 minutes
|
||||
}
|
||||
@@ -817,7 +823,9 @@ async function isIpInGeoIP(ip: string, countryCode: string): Promise<boolean> {
|
||||
return cachedCountryCode?.toUpperCase() === countryCode.toUpperCase();
|
||||
}
|
||||
|
||||
function extractBasicAuth(headers: Record<string, string> | undefined): string | undefined {
|
||||
function extractBasicAuth(
|
||||
headers: Record<string, string> | undefined
|
||||
): string | undefined {
|
||||
if (!headers || (!headers.authorization && !headers.Authorization)) {
|
||||
return;
|
||||
}
|
||||
@@ -833,8 +841,9 @@ function extractBasicAuth(headers: Record<string, string> | undefined): string |
|
||||
try {
|
||||
// Extract the base64 encoded credentials
|
||||
return authHeader.slice("Basic ".length);
|
||||
|
||||
} catch (error) {
|
||||
logger.debug("Basic Auth: Failed to decode credentials", { error: error instanceof Error ? error.message : "Unknown error" });
|
||||
logger.debug("Basic Auth: Failed to decode credentials", {
|
||||
error: error instanceof Error ? error.message : "Unknown error"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
17
server/routers/billing/types.ts
Normal file
17
server/routers/billing/types.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { Limit, Subscription, SubscriptionItem, Usage } from "@server/db";
|
||||
|
||||
export type GetOrgSubscriptionResponse = {
|
||||
subscription: Subscription | null;
|
||||
items: SubscriptionItem[];
|
||||
};
|
||||
|
||||
export type GetOrgUsageResponse = {
|
||||
usage: Usage[];
|
||||
limits: Limit[];
|
||||
};
|
||||
|
||||
export type GetOrgTierResponse = {
|
||||
tier: string | null;
|
||||
active: boolean;
|
||||
};
|
||||
|
||||
13
server/routers/certificates/types.ts
Normal file
13
server/routers/certificates/types.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export type GetCertificateResponse = {
|
||||
certId: number;
|
||||
domain: string;
|
||||
domainId: string;
|
||||
wildcard: boolean;
|
||||
status: string; // pending, requested, valid, expired, failed
|
||||
expiresAt: string | null;
|
||||
lastRenewalAttempt: Date | null;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
errorMessage?: string | null;
|
||||
renewalCount: number;
|
||||
}
|
||||
8
server/routers/domain/types.ts
Normal file
8
server/routers/domain/types.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
export type CheckDomainAvailabilityResponse = {
|
||||
available: boolean;
|
||||
options: {
|
||||
domainNamespaceId: string;
|
||||
domainId: string;
|
||||
fullDomain: string;
|
||||
}[];
|
||||
};
|
||||
@@ -9,7 +9,6 @@ import logger from "@server/logger";
|
||||
import config from "@server/lib/config";
|
||||
import { fromError } from "zod-validation-error";
|
||||
import { getAllowedIps } from "../target/helpers";
|
||||
import { proxyToRemote } from "@server/lib/remoteProxy";
|
||||
import { createExitNode } from "#dynamic/routers/gerbil/createExitNode";
|
||||
|
||||
// Define Zod schema for request validation
|
||||
@@ -63,16 +62,6 @@ export async function getConfig(
|
||||
);
|
||||
}
|
||||
|
||||
// STOP HERE IN HYBRID MODE
|
||||
if (config.isManagedMode()) {
|
||||
req.body = {
|
||||
...req.body,
|
||||
endpoint: exitNode.endpoint,
|
||||
listenPort: exitNode.listenPort
|
||||
};
|
||||
return proxyToRemote(req, res, next, "hybrid/gerbil/get-config");
|
||||
}
|
||||
|
||||
const configResponse = await generateGerbilConfig(exitNode);
|
||||
|
||||
logger.debug("Sending config: ", configResponse);
|
||||
|
||||
@@ -6,8 +6,6 @@ import * as badger from "./badger";
|
||||
import * as auth from "@server/routers/auth";
|
||||
import * as supporterKey from "@server/routers/supporterKey";
|
||||
import * as idp from "@server/routers/idp";
|
||||
import { proxyToRemote } from "@server/lib/remoteProxy";
|
||||
import config from "@server/lib/config";
|
||||
import HttpCode from "@server/types/HttpCode";
|
||||
import {
|
||||
verifyResourceAccess,
|
||||
@@ -48,34 +46,11 @@ internalRouter.get("/idp/:idpId", idp.getIdp);
|
||||
const gerbilRouter = Router();
|
||||
internalRouter.use("/gerbil", gerbilRouter);
|
||||
|
||||
if (config.isManagedMode()) {
|
||||
// Use proxy router to forward requests to remote cloud server
|
||||
// Proxy endpoints for each gerbil route
|
||||
gerbilRouter.post("/receive-bandwidth", (req, res, next) =>
|
||||
proxyToRemote(req, res, next, "hybrid/gerbil/receive-bandwidth")
|
||||
);
|
||||
|
||||
gerbilRouter.post("/update-hole-punch", (req, res, next) =>
|
||||
proxyToRemote(req, res, next, "hybrid/gerbil/update-hole-punch")
|
||||
);
|
||||
|
||||
gerbilRouter.post("/get-all-relays", (req, res, next) =>
|
||||
proxyToRemote(req, res, next, "hybrid/gerbil/get-all-relays")
|
||||
);
|
||||
|
||||
gerbilRouter.post("/get-resolved-hostname", (req, res, next) =>
|
||||
proxyToRemote(req, res, next, `hybrid/gerbil/get-resolved-hostname`)
|
||||
);
|
||||
|
||||
// GET CONFIG IS HANDLED IN THE ORIGINAL HANDLER
|
||||
// SO IT CAN REGISTER THE LOCAL EXIT NODE
|
||||
} else {
|
||||
// Use local gerbil endpoints
|
||||
gerbilRouter.post("/receive-bandwidth", gerbil.receiveBandwidth);
|
||||
gerbilRouter.post("/update-hole-punch", gerbil.updateHolePunch);
|
||||
gerbilRouter.post("/get-all-relays", gerbil.getAllRelays);
|
||||
gerbilRouter.post("/get-resolved-hostname", gerbil.getResolvedHostname);
|
||||
}
|
||||
// Use local gerbil endpoints
|
||||
gerbilRouter.post("/receive-bandwidth", gerbil.receiveBandwidth);
|
||||
gerbilRouter.post("/update-hole-punch", gerbil.updateHolePunch);
|
||||
gerbilRouter.post("/get-all-relays", gerbil.getAllRelays);
|
||||
gerbilRouter.post("/get-resolved-hostname", gerbil.getResolvedHostname);
|
||||
|
||||
// WE HANDLE THE PROXY INSIDE OF THIS FUNCTION
|
||||
// SO IT REGISTERS THE EXIT NODE LOCALLY AS WELL
|
||||
@@ -87,10 +62,4 @@ internalRouter.use("/badger", badgerRouter);
|
||||
|
||||
badgerRouter.post("/verify-session", badger.verifyResourceSession);
|
||||
|
||||
if (config.isManagedMode()) {
|
||||
badgerRouter.post("/exchange-session", (req, res, next) =>
|
||||
proxyToRemote(req, res, next, "hybrid/badger/exchange-session")
|
||||
);
|
||||
} else {
|
||||
badgerRouter.post("/exchange-session", badger.exchangeSession);
|
||||
}
|
||||
badgerRouter.post("/exchange-session", badger.exchangeSession);
|
||||
|
||||
11
server/routers/loginPage/types.ts
Normal file
11
server/routers/loginPage/types.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { LoginPage } from "@server/db";
|
||||
|
||||
export type CreateLoginPageResponse = LoginPage;
|
||||
|
||||
export type DeleteLoginPageResponse = LoginPage;
|
||||
|
||||
export type GetLoginPageResponse = LoginPage;
|
||||
|
||||
export type UpdateLoginPageResponse = LoginPage;
|
||||
|
||||
export type LoadLoginPageResponse = LoginPage & { orgId: string };
|
||||
27
server/routers/orgIdp/types.ts
Normal file
27
server/routers/orgIdp/types.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Idp, IdpOidcConfig } from "@server/db";
|
||||
|
||||
export type CreateOrgIdpResponse = {
|
||||
idpId: number;
|
||||
redirectUrl: string;
|
||||
};
|
||||
|
||||
export type GetOrgIdpResponse = {
|
||||
idp: Idp,
|
||||
idpOidcConfig: IdpOidcConfig | null,
|
||||
redirectUrl: string
|
||||
}
|
||||
|
||||
export type ListOrgIdpsResponse = {
|
||||
idps: {
|
||||
idpId: number;
|
||||
orgId: string;
|
||||
name: string;
|
||||
type: string;
|
||||
variant: string;
|
||||
}[],
|
||||
pagination: {
|
||||
total: number;
|
||||
limit: number;
|
||||
offset: number;
|
||||
};
|
||||
};
|
||||
34
server/routers/remoteExitNode/types.ts
Normal file
34
server/routers/remoteExitNode/types.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { RemoteExitNode } from "@server/db";
|
||||
|
||||
export type CreateRemoteExitNodeResponse = {
|
||||
token: string;
|
||||
remoteExitNodeId: string;
|
||||
secret: string;
|
||||
};
|
||||
|
||||
export type PickRemoteExitNodeDefaultsResponse = {
|
||||
remoteExitNodeId: string;
|
||||
secret: string;
|
||||
};
|
||||
|
||||
export type QuickStartRemoteExitNodeResponse = {
|
||||
remoteExitNodeId: string;
|
||||
secret: string;
|
||||
};
|
||||
|
||||
export type ListRemoteExitNodesResponse = {
|
||||
remoteExitNodes: {
|
||||
remoteExitNodeId: string;
|
||||
dateCreated: string;
|
||||
version: string | null;
|
||||
exitNodeId: number | null;
|
||||
name: string;
|
||||
address: string;
|
||||
endpoint: string;
|
||||
online: boolean;
|
||||
type: string | null;
|
||||
}[];
|
||||
pagination: { total: number; limit: number; offset: number };
|
||||
};
|
||||
|
||||
export type GetRemoteExitNodeResponse = { remoteExitNodeId: string; dateCreated: string; version: string | null; exitNodeId: number | null; name: string; address: string; endpoint: string; online: boolean; type: string | null; }
|
||||
@@ -3,7 +3,6 @@ import { eq } from "drizzle-orm";
|
||||
import { generateRandomString, RandomReader } from "@oslojs/crypto/random";
|
||||
import moment from "moment";
|
||||
import logger from "@server/logger";
|
||||
import config from "@server/lib/config";
|
||||
|
||||
const random: RandomReader = {
|
||||
read(bytes: Uint8Array): void {
|
||||
@@ -23,11 +22,6 @@ function generateId(length: number): string {
|
||||
}
|
||||
|
||||
export async function ensureSetupToken() {
|
||||
if (config.isManagedMode()) {
|
||||
// LETS NOT WORRY ABOUT THE SERVER SECRET WHEN HYBRID
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if a server admin already exists
|
||||
const [existingAdmin] = await db
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { internal } from "@app/lib/api";
|
||||
import { authCookieHeader } from "@app/lib/api/cookies";
|
||||
import ProfileIcon from "@app/components/ProfileIcon";
|
||||
import { verifySession } from "@app/lib/auth/verifySession";
|
||||
import UserProvider from "@app/providers/UserProvider";
|
||||
import { GetOrgResponse } from "@server/routers/org";
|
||||
import { GetOrgUserResponse } from "@server/routers/user";
|
||||
import { AxiosResponse } from "axios";
|
||||
@@ -10,7 +8,7 @@ import { redirect } from "next/navigation";
|
||||
import { cache } from "react";
|
||||
import SetLastOrgCookie from "@app/components/SetLastOrgCookie";
|
||||
import SubscriptionStatusProvider from "@app/providers/SubscriptionStatusProvider";
|
||||
import { GetOrgSubscriptionResponse } from "#private/routers/billing/getOrgSubscription";
|
||||
import { GetOrgSubscriptionResponse } from "@server/routers/billing/types";
|
||||
import { pullEnv } from "@app/lib/pullEnv";
|
||||
import { build } from "@server/build";
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ import { InfoPopup } from "@/components/ui/info-popup";
|
||||
import {
|
||||
GetOrgSubscriptionResponse,
|
||||
GetOrgUsageResponse
|
||||
} from "#private/routers/billing";
|
||||
} from "@server/routers/billing/types";
|
||||
import { useTranslations } from "use-intl";
|
||||
import Link from "next/link";
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import { cache } from "react";
|
||||
import {
|
||||
GetOrgSubscriptionResponse,
|
||||
GetOrgTierResponse
|
||||
} from "#private/routers/billing";
|
||||
} from "@server/routers/billing/types";
|
||||
import { TierId } from "@server/lib/billing/tiers";
|
||||
import { build } from "@server/build";
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { internal } from "@app/lib/api";
|
||||
import { GetRemoteExitNodeResponse } from "#private/routers/remoteExitNode";
|
||||
import { GetRemoteExitNodeResponse } from "@server/routers/remoteExitNode/types";
|
||||
import { AxiosResponse } from "axios";
|
||||
import { redirect } from "next/navigation";
|
||||
import { authCookieHeader } from "@app/lib/api/cookies";
|
||||
|
||||
@@ -30,7 +30,7 @@ import { useEnvContext } from "@app/hooks/useEnvContext";
|
||||
import {
|
||||
QuickStartRemoteExitNodeResponse,
|
||||
PickRemoteExitNodeDefaultsResponse
|
||||
} from "#private/routers/remoteExitNode";
|
||||
} from "@server/routers/remoteExitNode/types";
|
||||
import { toast } from "@app/hooks/useToast";
|
||||
import { AxiosResponse } from "axios";
|
||||
import { useParams, useRouter, useSearchParams } from "next/navigation";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { internal } from "@app/lib/api";
|
||||
import { authCookieHeader } from "@app/lib/api/cookies";
|
||||
import { ListRemoteExitNodesResponse } from "#private/routers/remoteExitNode";
|
||||
import { ListRemoteExitNodesResponse } from "@server/routers/remoteExitNode/types";
|
||||
import { AxiosResponse } from "axios";
|
||||
import ExitNodesTable, { RemoteExitNodeRow } from "./ExitNodesTable";
|
||||
import SettingsSectionTitle from "@app/components/SettingsSectionTitle";
|
||||
|
||||
@@ -117,8 +117,8 @@ export default function ResourceRules(props: {
|
||||
const [openAddRuleCountrySelect, setOpenAddRuleCountrySelect] = useState(false);
|
||||
const router = useRouter();
|
||||
const t = useTranslations();
|
||||
const env = useEnvContext();
|
||||
const isMaxmindAvailable = env.env.server.maxmind_db_path && env.env.server.maxmind_db_path.length > 0;
|
||||
const { env } = useEnvContext();
|
||||
const isMaxmindAvailable = env.server.maxmind_db_path && env.server.maxmind_db_path.length > 0;
|
||||
|
||||
const RuleAction = {
|
||||
ACCEPT: t('alwaysAllow'),
|
||||
|
||||
@@ -53,7 +53,7 @@ import {
|
||||
CreateSiteResponse,
|
||||
PickSiteDefaultsResponse
|
||||
} from "@server/routers/site";
|
||||
import { ListRemoteExitNodesResponse } from "#private/routers/remoteExitNode";
|
||||
import { ListRemoteExitNodesResponse } from "@server/routers/remoteExitNode/types";
|
||||
import { toast } from "@app/hooks/useToast";
|
||||
import { AxiosResponse } from "axios";
|
||||
import { useParams, useRouter } from "next/navigation";
|
||||
|
||||
@@ -6,13 +6,12 @@ import { verifySession } from "@app/lib/auth/verifySession";
|
||||
import { redirect } from "next/navigation";
|
||||
import { pullEnv } from "@app/lib/pullEnv";
|
||||
import { LoginFormIDP } from "@app/components/LoginForm";
|
||||
import { ListOrgIdpsResponse } from "#private/routers/orgIdp";
|
||||
import { ListOrgIdpsResponse } from "@server/routers/orgIdp/types";
|
||||
import { build } from "@server/build";
|
||||
import { headers } from "next/headers";
|
||||
import {
|
||||
GetLoginPageResponse,
|
||||
LoadLoginPageResponse
|
||||
} from "#private/routers/loginPage";
|
||||
} from "@server/routers/loginPage/types";
|
||||
import IdpLoginButtons from "@app/components/private/IdpLoginButtons";
|
||||
import {
|
||||
Card,
|
||||
@@ -24,9 +23,9 @@ import {
|
||||
import { Button } from "@app/components/ui/button";
|
||||
import Link from "next/link";
|
||||
import { getTranslations } from "next-intl/server";
|
||||
import { GetSessionTransferTokenRenponse } from "#private/routers/auth/getSessionTransferToken";
|
||||
import { GetSessionTransferTokenRenponse } from "@server/routers/auth/types";
|
||||
import ValidateSessionTransferToken from "@app/components/private/ValidateSessionTransferToken";
|
||||
import { GetOrgTierResponse } from "#private/routers/billing";
|
||||
import { GetOrgTierResponse } from "@server/routers/billing/types";
|
||||
import { TierId } from "@server/lib/billing/tiers";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
@@ -6,7 +6,7 @@ import { AxiosResponse } from "axios";
|
||||
import { GetIdpResponse } from "@server/routers/idp";
|
||||
import { getTranslations } from "next-intl/server";
|
||||
import { pullEnv } from "@app/lib/pullEnv";
|
||||
import { LoadLoginPageResponse } from "#private/routers/loginPage";
|
||||
import { LoadLoginPageResponse } from "@server/routers/loginPage/types";
|
||||
import { redirect } from "next/navigation";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
@@ -15,12 +15,12 @@ import AccessToken from "@app/components/AccessToken";
|
||||
import { pullEnv } from "@app/lib/pullEnv";
|
||||
import { LoginFormIDP } from "@app/components/LoginForm";
|
||||
import { ListIdpsResponse } from "@server/routers/idp";
|
||||
import { ListOrgIdpsResponse } from "#private/routers/orgIdp";
|
||||
import { ListOrgIdpsResponse } from "@server/routers/orgIdp/types";
|
||||
import AutoLoginHandler from "@app/components/AutoLoginHandler";
|
||||
import { build } from "@server/build";
|
||||
import { headers } from "next/headers";
|
||||
import { GetLoginPageResponse } from "#private/routers/loginPage";
|
||||
import { GetOrgTierResponse } from "#private/routers/billing";
|
||||
import { GetLoginPageResponse } from "@server/routers/loginPage/types";
|
||||
import { GetOrgTierResponse } from "@server/routers/billing/types";
|
||||
import { TierId } from "@server/lib/billing/tiers";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
@@ -32,7 +32,7 @@ import { createApiClient, formatAxiosError } from "@/lib/api";
|
||||
import { useEnvContext } from "@/hooks/useEnvContext";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { ListDomainsResponse } from "@server/routers/domain/listDomains";
|
||||
import { CheckDomainAvailabilityResponse } from "#private/routers/domain/checkDomainNamespaceAvailability";
|
||||
import { CheckDomainAvailabilityResponse } from "@server/routers/domain/types";
|
||||
import { AxiosResponse } from "axios";
|
||||
import { cn } from "@/lib/cn";
|
||||
import { useTranslations } from "next-intl";
|
||||
|
||||
@@ -13,12 +13,14 @@ import {
|
||||
import { useTranslations } from "next-intl";
|
||||
import { build } from "@server/build";
|
||||
import CertificateStatus from "@app/components/private/CertificateStatus";
|
||||
import { toUnicode } from 'punycode';
|
||||
import { toUnicode } from "punycode";
|
||||
import { useEnvContext } from "@app/hooks/useEnvContext";
|
||||
|
||||
type ResourceInfoBoxType = {};
|
||||
|
||||
export default function ResourceInfoBox({ }: ResourceInfoBoxType) {
|
||||
export default function ResourceInfoBox({}: ResourceInfoBoxType) {
|
||||
const { resource, authInfo } = useResourceContext();
|
||||
const { env } = useEnvContext();
|
||||
|
||||
const t = useTranslations();
|
||||
|
||||
@@ -28,7 +30,13 @@ export default function ResourceInfoBox({ }: ResourceInfoBoxType) {
|
||||
<Alert>
|
||||
<AlertDescription>
|
||||
{/* 4 cols because of the certs */}
|
||||
<InfoSections cols={resource.http && build != "oss" ? 4 : 3}>
|
||||
<InfoSections
|
||||
cols={
|
||||
resource.http && env.flags.generateOwnCertificates
|
||||
? 4
|
||||
: 3
|
||||
}
|
||||
>
|
||||
{resource.http ? (
|
||||
<>
|
||||
<InfoSection>
|
||||
@@ -37,9 +45,9 @@ export default function ResourceInfoBox({ }: ResourceInfoBoxType) {
|
||||
</InfoSectionTitle>
|
||||
<InfoSectionContent>
|
||||
{authInfo.password ||
|
||||
authInfo.pincode ||
|
||||
authInfo.sso ||
|
||||
authInfo.whitelist ? (
|
||||
authInfo.pincode ||
|
||||
authInfo.sso ||
|
||||
authInfo.whitelist ? (
|
||||
<div className="flex items-start space-x-2 text-green-500">
|
||||
<ShieldCheck className="w-4 h-4 mt-0.5" />
|
||||
<span>{t("protected")}</span>
|
||||
@@ -126,25 +134,28 @@ export default function ResourceInfoBox({ }: ResourceInfoBoxType) {
|
||||
{/* </InfoSectionContent> */}
|
||||
{/* </InfoSection> */}
|
||||
{/* Certificate Status Column */}
|
||||
{resource.http && resource.domainId && resource.fullDomain && build != "oss" && (
|
||||
<InfoSection>
|
||||
<InfoSectionTitle>
|
||||
{t("certificateStatus", {
|
||||
defaultValue: "Certificate"
|
||||
})}
|
||||
</InfoSectionTitle>
|
||||
<InfoSectionContent>
|
||||
<CertificateStatus
|
||||
orgId={resource.orgId}
|
||||
domainId={resource.domainId}
|
||||
fullDomain={resource.fullDomain}
|
||||
autoFetch={true}
|
||||
showLabel={false}
|
||||
polling={true}
|
||||
/>
|
||||
</InfoSectionContent>
|
||||
</InfoSection>
|
||||
)}
|
||||
{resource.http &&
|
||||
resource.domainId &&
|
||||
resource.fullDomain &&
|
||||
build != "oss" && (
|
||||
<InfoSection>
|
||||
<InfoSectionTitle>
|
||||
{t("certificateStatus", {
|
||||
defaultValue: "Certificate"
|
||||
})}
|
||||
</InfoSectionTitle>
|
||||
<InfoSectionContent>
|
||||
<CertificateStatus
|
||||
orgId={resource.orgId}
|
||||
domainId={resource.domainId}
|
||||
fullDomain={resource.fullDomain}
|
||||
autoFetch={true}
|
||||
showLabel={false}
|
||||
polling={true}
|
||||
/>
|
||||
</InfoSectionContent>
|
||||
</InfoSection>
|
||||
)}
|
||||
<InfoSection>
|
||||
<InfoSectionTitle>{t("visibility")}</InfoSectionTitle>
|
||||
<InfoSectionContent>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@app/components/ui/button";
|
||||
import { useOrgContext } from "@app/hooks/useOrgContext";
|
||||
import { toast } from "@app/hooks/useToast";
|
||||
@@ -30,7 +31,7 @@ import {
|
||||
SettingsSectionForm
|
||||
} from "@app/components/Settings";
|
||||
import { useTranslations } from "next-intl";
|
||||
import { GetLoginPageResponse } from "#private/routers/loginPage";
|
||||
import { GetLoginPageResponse } from "@server/routers/loginPage/types";
|
||||
import { ListDomainsResponse } from "@server/routers/domain";
|
||||
import { DomainRow } from "@app/components/DomainsTable";
|
||||
import { toUnicode } from "punycode";
|
||||
@@ -78,6 +79,7 @@ const AuthPageSettings = forwardRef<AuthPageSettingsRef, AuthPageSettingsProps>(
|
||||
const api = createApiClient(useEnvContext());
|
||||
const router = useRouter();
|
||||
const t = useTranslations();
|
||||
const { env } = useEnvContext();
|
||||
|
||||
const subscription = useSubscriptionStatusContext();
|
||||
|
||||
@@ -447,10 +449,21 @@ const AuthPageSettings = forwardRef<AuthPageSettingsRef, AuthPageSettingsProps>(
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Certificate Status */}
|
||||
{(build === "enterprise" ||
|
||||
(build === "saas" &&
|
||||
subscription?.subscribed)) &&
|
||||
{!form.watch(
|
||||
"authPageDomainId"
|
||||
) && (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
{t(
|
||||
"addDomainToEnableCustomAuthPages"
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{env.flags
|
||||
.generateOwnCertificates &&
|
||||
(build === "enterprise" ||
|
||||
(build === "saas" &&
|
||||
subscription?.subscribed)) &&
|
||||
loginPage?.domainId &&
|
||||
loginPage?.fullDomain &&
|
||||
!hasUnsavedChanges && (
|
||||
@@ -469,16 +482,6 @@ const AuthPageSettings = forwardRef<AuthPageSettingsRef, AuthPageSettingsProps>(
|
||||
polling={true}
|
||||
/>
|
||||
)}
|
||||
|
||||
{!form.watch(
|
||||
"authPageDomainId"
|
||||
) && (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
{t(
|
||||
"addDomainToEnableCustomAuthPages"
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</form>
|
||||
</Form>
|
||||
|
||||
@@ -8,7 +8,7 @@ import { useEffect, useState } from "react";
|
||||
import { Alert, AlertDescription } from "@/components/ui/alert";
|
||||
import { AlertCircle } from "lucide-react";
|
||||
import { useTranslations } from "next-intl";
|
||||
import { TransferSessionResponse } from "#private/routers/auth/transferSession";
|
||||
import { TransferSessionResponse } from "@server/routers/auth/types";
|
||||
|
||||
type ValidateSessionTransferTokenParams = {
|
||||
token: string;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { GetRemoteExitNodeResponse } from "#private/routers/remoteExitNode";
|
||||
import { GetRemoteExitNodeResponse } from "@server/routers/remoteExitNode/types";
|
||||
import { createContext } from "react";
|
||||
|
||||
type RemoteExitNodeContextType = {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { GetOrgSubscriptionResponse } from "#private/routers/billing";
|
||||
import { GetOrgSubscriptionResponse } from "@server/routers/billing/types";
|
||||
import { createContext } from "react";
|
||||
|
||||
type SubscriptionStatusContextType = {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useState, useCallback, useEffect } from "react";
|
||||
import { AxiosResponse } from "axios";
|
||||
import { GetCertificateResponse } from "#private/routers/certificates";
|
||||
import { GetCertificateResponse } from "@server/routers/certificates/types";
|
||||
import { createApiClient } from "@app/lib/api";
|
||||
import { useEnvContext } from "@app/hooks/useEnvContext";
|
||||
|
||||
|
||||
@@ -48,7 +48,11 @@ export function pullEnv(): Env {
|
||||
enableClients:
|
||||
process.env.FLAGS_ENABLE_CLIENTS === "true" ? true : false,
|
||||
hideSupporterKey:
|
||||
process.env.HIDE_SUPPORTER_KEY === "true" ? true : false
|
||||
process.env.HIDE_SUPPORTER_KEY === "true" ? true : false,
|
||||
generateOwnCertificates:
|
||||
process.env.GENERATE_OWN_CERTIFICATES === "true"
|
||||
? true
|
||||
: false
|
||||
},
|
||||
|
||||
branding: {
|
||||
|
||||
@@ -28,6 +28,7 @@ export type Env = {
|
||||
disableBasicWireguardSites: boolean;
|
||||
enableClients: boolean;
|
||||
hideSupporterKey: boolean;
|
||||
generateOwnCertificates: boolean;
|
||||
},
|
||||
branding: {
|
||||
appName?: string;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import RemoteExitNodeContext from "@app/contexts/remoteExitNodeContext";
|
||||
import { GetRemoteExitNodeResponse } from "#private/routers/remoteExitNode";
|
||||
import { GetRemoteExitNodeResponse } from "@server/routers/remoteExitNode/types";
|
||||
import { useState } from "react";
|
||||
import { useTranslations } from "next-intl";
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import SubscriptionStatusContext from "@app/contexts/subscriptionStatusContext";
|
||||
import { getTierPriceSet, TierId } from "@server/lib/billing/tiers";
|
||||
import { GetOrgSubscriptionResponse } from "#private/routers/billing";
|
||||
import { GetOrgSubscriptionResponse } from "@server/routers/billing/types";
|
||||
import { useState } from "react";
|
||||
import { build } from "@server/build";
|
||||
|
||||
|
||||
Reference in New Issue
Block a user