This commit is contained in:
Owen
2025-10-04 18:36:44 -07:00
parent 3123f858bb
commit c2c907852d
320 changed files with 35785 additions and 2984 deletions

View File

@@ -0,0 +1,85 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import Stripe from "stripe";
export enum FeatureId {
SITE_UPTIME = "siteUptime",
USERS = "users",
EGRESS_DATA_MB = "egressDataMb",
DOMAINS = "domains",
REMOTE_EXIT_NODES = "remoteExitNodes"
}
export const FeatureMeterIds: Record<FeatureId, string> = {
[FeatureId.SITE_UPTIME]: "mtr_61Srrej5wUJuiTWgo41D3Ee2Ir7WmDLU",
[FeatureId.USERS]: "mtr_61SrreISyIWpwUNGR41D3Ee2Ir7WmQro",
[FeatureId.EGRESS_DATA_MB]: "mtr_61Srreh9eWrExDSCe41D3Ee2Ir7Wm5YW",
[FeatureId.DOMAINS]: "mtr_61Ss9nIKDNMw0LDRU41D3Ee2Ir7WmRPU",
[FeatureId.REMOTE_EXIT_NODES]: "mtr_61T86UXnfxTVXy9sD41D3Ee2Ir7WmFTE"
};
export const FeatureMeterIdsSandbox: Record<FeatureId, string> = {
[FeatureId.SITE_UPTIME]: "mtr_test_61Snh3cees4w60gv841DCpkOb237BDEu",
[FeatureId.USERS]: "mtr_test_61Sn5fLtq1gSfRkyA41DCpkOb237B6au",
[FeatureId.EGRESS_DATA_MB]: "mtr_test_61Snh2a2m6qome5Kv41DCpkOb237B3dQ",
[FeatureId.DOMAINS]: "mtr_test_61SsA8qrdAlgPpFRQ41DCpkOb237BGts",
[FeatureId.REMOTE_EXIT_NODES]: "mtr_test_61T86Vqmwa3D9ra3341DCpkOb237B94K"
};
export function getFeatureMeterId(featureId: FeatureId): string {
if (process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") {
return FeatureMeterIds[featureId];
} else {
return FeatureMeterIdsSandbox[featureId];
}
}
export function getFeatureIdByMetricId(metricId: string): FeatureId | undefined {
return (Object.entries(FeatureMeterIds) as [FeatureId, string][])
.find(([_, v]) => v === metricId)?.[0];
}
export type FeaturePriceSet = {
[key in FeatureId]: string;
};
export const standardFeaturePriceSet: FeaturePriceSet = { // Free tier matches the freeLimitSet
[FeatureId.SITE_UPTIME]: "price_1RrQc4D3Ee2Ir7WmaJGZ3MtF",
[FeatureId.USERS]: "price_1RrQeJD3Ee2Ir7WmgveP3xea",
[FeatureId.EGRESS_DATA_MB]: "price_1RrQXFD3Ee2Ir7WmvGDlgxQk",
[FeatureId.DOMAINS]: "price_1Rz3tMD3Ee2Ir7Wm5qLeASzC",
[FeatureId.REMOTE_EXIT_NODES]: "price_1S46weD3Ee2Ir7Wm94KEHI4h"
};
export const standardFeaturePriceSetSandbox: FeaturePriceSet = { // Free tier matches the freeLimitSet
[FeatureId.SITE_UPTIME]: "price_1RefFBDCpkOb237BPrKZ8IEU",
[FeatureId.USERS]: "price_1ReNa4DCpkOb237Bc67G5muF",
[FeatureId.EGRESS_DATA_MB]: "price_1Rfp9LDCpkOb237BwuN5Oiu0",
[FeatureId.DOMAINS]: "price_1Ryi88DCpkOb237B2D6DM80b",
[FeatureId.REMOTE_EXIT_NODES]: "price_1RyiZvDCpkOb237BXpmoIYJL"
};
export function getStandardFeaturePriceSet(): FeaturePriceSet {
if (process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") {
return standardFeaturePriceSet;
} else {
return standardFeaturePriceSetSandbox;
}
}
export function getLineItems(featurePriceSet: FeaturePriceSet): Stripe.Checkout.SessionCreateParams.LineItem[] {
return Object.entries(featurePriceSet).map(([featureId, priceId]) => ({
price: priceId,
}));
}

View File

@@ -0,0 +1,16 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
export * from "./limitSet";
export * from "./features";
export * from "./limitsService";

View File

@@ -0,0 +1,63 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { FeatureId } from "./features";
export type LimitSet = {
[key in FeatureId]: {
value: number | null; // null indicates no limit
description?: string;
};
};
export const sandboxLimitSet: LimitSet = {
[FeatureId.SITE_UPTIME]: { value: 2880, description: "Sandbox limit" }, // 1 site up for 2 days
[FeatureId.USERS]: { value: 1, description: "Sandbox limit" },
[FeatureId.EGRESS_DATA_MB]: { value: 1000, description: "Sandbox limit" }, // 1 GB
[FeatureId.DOMAINS]: { value: 0, description: "Sandbox limit" },
[FeatureId.REMOTE_EXIT_NODES]: { value: 0, description: "Sandbox limit" },
};
export const freeLimitSet: LimitSet = {
[FeatureId.SITE_UPTIME]: { value: 46080, description: "Free tier limit" }, // 1 site up for 32 days
[FeatureId.USERS]: { value: 3, description: "Free tier limit" },
[FeatureId.EGRESS_DATA_MB]: {
value: 25000,
description: "Free tier limit"
}, // 25 GB
[FeatureId.DOMAINS]: { value: 3, description: "Free tier limit" },
[FeatureId.REMOTE_EXIT_NODES]: { value: 1, description: "Free tier limit" }
};
export const subscribedLimitSet: LimitSet = {
[FeatureId.SITE_UPTIME]: {
value: 2232000,
description: "Contact us to increase soft limit.",
}, // 50 sites up for 31 days
[FeatureId.USERS]: {
value: 150,
description: "Contact us to increase soft limit."
},
[FeatureId.EGRESS_DATA_MB]: {
value: 12000000,
description: "Contact us to increase soft limit."
}, // 12000 GB
[FeatureId.DOMAINS]: {
value: 20,
description: "Contact us to increase soft limit."
},
[FeatureId.REMOTE_EXIT_NODES]: {
value: 5,
description: "Contact us to increase soft limit."
}
};

View File

@@ -0,0 +1,51 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { db, limits } from "@server/db";
import { and, eq } from "drizzle-orm";
import { LimitSet } from "./limitSet";
import { FeatureId } from "./features";
class LimitService {
async applyLimitSetToOrg(orgId: string, limitSet: LimitSet): Promise<void> {
const limitEntries = Object.entries(limitSet);
// delete existing limits for the org
await db.transaction(async (trx) => {
await trx.delete(limits).where(eq(limits.orgId, orgId));
for (const [featureId, entry] of limitEntries) {
const limitId = `${orgId}-${featureId}`;
const { value, description } = entry;
await trx
.insert(limits)
.values({ limitId, orgId, featureId, value, description });
}
});
}
async getOrgLimit(
orgId: string,
featureId: FeatureId
): Promise<number | null> {
const limitId = `${orgId}-${featureId}`;
const [limit] = await db
.select()
.from(limits)
.where(and(eq(limits.limitId, limitId)))
.limit(1);
return limit ? limit.value : null;
}
}
export const limitsService = new LimitService();

View File

@@ -0,0 +1,37 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
export enum TierId {
STANDARD = "standard",
}
export type TierPriceSet = {
[key in TierId]: string;
};
export const tierPriceSet: TierPriceSet = { // Free tier matches the freeLimitSet
[TierId.STANDARD]: "price_1RrQ9cD3Ee2Ir7Wmqdy3KBa0",
};
export const tierPriceSetSandbox: TierPriceSet = { // Free tier matches the freeLimitSet
// when matching tier the keys closer to 0 index are matched first so list the tiers in descending order of value
[TierId.STANDARD]: "price_1RrAYJDCpkOb237By2s1P32m",
};
export function getTierPriceSet(environment?: string, sandbox_mode?: boolean): TierPriceSet {
if ((process.env.ENVIRONMENT == "prod" && process.env.SANDBOX_MODE !== "true") || (environment === "prod" && sandbox_mode !== true)) { // THIS GETS LOADED CLIENT SIDE AND SERVER SIDE
return tierPriceSet;
} else {
return tierPriceSetSandbox;
}
}

View File

@@ -0,0 +1,889 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { eq, sql, and } from "drizzle-orm";
import NodeCache from "node-cache";
import { v4 as uuidv4 } from "uuid";
import { PutObjectCommand } from "@aws-sdk/client-s3";
import { s3Client } from "../s3";
import * as fs from "fs/promises";
import * as path from "path";
import {
db,
usage,
customers,
sites,
newts,
limits,
Usage,
Limit,
Transaction
} from "@server/db";
import { FeatureId, getFeatureMeterId } from "./features";
import config from "@server/lib/config";
import logger from "@server/logger";
import { sendToClient } from "@server/routers/ws";
import { build } from "@server/build";
interface StripeEvent {
identifier?: string;
timestamp: number;
event_name: string;
payload: {
value: number;
stripe_customer_id: string;
};
}
export class UsageService {
private cache: NodeCache;
private bucketName: string | undefined;
private currentEventFile: string | null = null;
private currentFileStartTime: number = 0;
private eventsDir: string | undefined;
private uploadingFiles: Set<string> = new Set();
constructor() {
this.cache = new NodeCache({ stdTTL: 300 }); // 5 minute TTL
if (build !== "saas") {
return;
}
this.bucketName = config.getRawPrivateConfig().stripe?.s3Bucket;
this.eventsDir = config.getRawPrivateConfig().stripe?.localFilePath;
// Ensure events directory exists
this.initializeEventsDirectory().then(() => {
this.uploadPendingEventFilesOnStartup();
});
// Periodically check for old event files to upload
setInterval(() => {
this.uploadOldEventFiles().catch((err) => {
logger.error("Error in periodic event file upload:", err);
});
}, 30000); // every 30 seconds
}
/**
* Truncate a number to 11 decimal places to prevent precision issues
*/
private truncateValue(value: number): number {
return Math.round(value * 100000000000) / 100000000000; // 11 decimal places
}
private async initializeEventsDirectory(): Promise<void> {
if (!this.eventsDir) {
logger.warn("Stripe local file path is not configured, skipping events directory initialization.");
return;
}
try {
await fs.mkdir(this.eventsDir, { recursive: true });
} catch (error) {
logger.error("Failed to create events directory:", error);
}
}
private async uploadPendingEventFilesOnStartup(): Promise<void> {
if (!this.eventsDir || !this.bucketName) {
logger.warn("Stripe local file path or bucket name is not configured, skipping leftover event file upload.");
return;
}
try {
const files = await fs.readdir(this.eventsDir);
for (const file of files) {
if (file.endsWith(".json")) {
const filePath = path.join(this.eventsDir, file);
try {
const fileContent = await fs.readFile(
filePath,
"utf-8"
);
const events = JSON.parse(fileContent);
if (Array.isArray(events) && events.length > 0) {
// Upload to S3
const uploadCommand = new PutObjectCommand({
Bucket: this.bucketName,
Key: file,
Body: fileContent,
ContentType: "application/json"
});
await s3Client.send(uploadCommand);
// Check if file still exists before unlinking
try {
await fs.access(filePath);
await fs.unlink(filePath);
} catch (unlinkError) {
logger.debug(`Startup file ${file} was already deleted`);
}
logger.info(
`Uploaded leftover event file ${file} to S3 with ${events.length} events`
);
} else {
// Remove empty file
try {
await fs.access(filePath);
await fs.unlink(filePath);
} catch (unlinkError) {
logger.debug(`Empty startup file ${file} was already deleted`);
}
}
} catch (err) {
logger.error(
`Error processing leftover event file ${file}:`,
err
);
}
}
}
} catch (err) {
logger.error("Failed to scan for leftover event files:", err);
}
}
public async add(
orgId: string,
featureId: FeatureId,
value: number,
transaction: any = null
): Promise<Usage | null> {
if (build !== "saas") {
return null;
}
// Truncate value to 11 decimal places
value = this.truncateValue(value);
// Implement retry logic for deadlock handling
const maxRetries = 3;
let attempt = 0;
while (attempt <= maxRetries) {
try {
// Get subscription data for this org (with caching)
const customerId = await this.getCustomerId(orgId, featureId);
if (!customerId) {
logger.warn(
`No subscription data found for org ${orgId} and feature ${featureId}`
);
return null;
}
let usage;
if (transaction) {
usage = await this.internalAddUsage(
orgId,
featureId,
value,
transaction
);
} else {
await db.transaction(async (trx) => {
usage = await this.internalAddUsage(orgId, featureId, value, trx);
});
}
// Log event for Stripe
await this.logStripeEvent(featureId, value, customerId);
return usage || null;
} catch (error: any) {
// Check if this is a deadlock error
const isDeadlock = error?.code === '40P01' ||
error?.cause?.code === '40P01' ||
(error?.message && error.message.includes('deadlock'));
if (isDeadlock && attempt < maxRetries) {
attempt++;
// Exponential backoff with jitter: 50-150ms, 100-300ms, 200-600ms
const baseDelay = Math.pow(2, attempt - 1) * 50;
const jitter = Math.random() * baseDelay;
const delay = baseDelay + jitter;
logger.warn(
`Deadlock detected for ${orgId}/${featureId}, retrying attempt ${attempt}/${maxRetries} after ${delay.toFixed(0)}ms`
);
await new Promise(resolve => setTimeout(resolve, delay));
continue;
}
logger.error(
`Failed to add usage for ${orgId}/${featureId} after ${attempt} attempts:`,
error
);
break;
}
}
return null;
}
private async internalAddUsage(
orgId: string,
featureId: FeatureId,
value: number,
trx: Transaction
): Promise<Usage> {
// Truncate value to 11 decimal places
value = this.truncateValue(value);
const usageId = `${orgId}-${featureId}`;
const meterId = getFeatureMeterId(featureId);
// Use upsert: insert if not exists, otherwise increment
const [returnUsage] = await trx
.insert(usage)
.values({
usageId,
featureId,
orgId,
meterId,
latestValue: value,
updatedAt: Math.floor(Date.now() / 1000)
})
.onConflictDoUpdate({
target: usage.usageId,
set: {
latestValue: sql`${usage.latestValue} + ${value}`
}
}).returning();
return returnUsage;
}
// Helper function to get today's date as string (YYYY-MM-DD)
getTodayDateString(): string {
return new Date().toISOString().split("T")[0];
}
// Helper function to get date string from Date object
getDateString(date: number): string {
return new Date(date * 1000).toISOString().split("T")[0];
}
async updateDaily(
orgId: string,
featureId: FeatureId,
value?: number,
customerId?: string
): Promise<void> {
if (build !== "saas") {
return;
}
try {
if (!customerId) {
customerId =
(await this.getCustomerId(orgId, featureId)) || undefined;
if (!customerId) {
logger.warn(
`No subscription data found for org ${orgId} and feature ${featureId}`
);
return;
}
}
// Truncate value to 11 decimal places if provided
if (value !== undefined && value !== null) {
value = this.truncateValue(value);
}
const today = this.getTodayDateString();
let currentUsage: Usage | null = null;
await db.transaction(async (trx) => {
// Get existing meter record
const usageId = `${orgId}-${featureId}`;
// Get current usage record
[currentUsage] = await trx
.select()
.from(usage)
.where(eq(usage.usageId, usageId))
.limit(1);
if (currentUsage) {
const lastUpdateDate = this.getDateString(
currentUsage.updatedAt
);
const currentRunningTotal = currentUsage.latestValue;
const lastDailyValue = currentUsage.instantaneousValue || 0;
if (value == undefined || value === null) {
value = currentUsage.instantaneousValue || 0;
}
if (lastUpdateDate === today) {
// Same day update: replace the daily value
// Remove old daily value from running total, add new value
const newRunningTotal = this.truncateValue(
currentRunningTotal - lastDailyValue + value
);
await trx
.update(usage)
.set({
latestValue: newRunningTotal,
instantaneousValue: value,
updatedAt: Math.floor(Date.now() / 1000)
})
.where(eq(usage.usageId, usageId));
} else {
// New day: add to running total
const newRunningTotal = this.truncateValue(
currentRunningTotal + value
);
await trx
.update(usage)
.set({
latestValue: newRunningTotal,
instantaneousValue: value,
updatedAt: Math.floor(Date.now() / 1000)
})
.where(eq(usage.usageId, usageId));
}
} else {
// First record for this meter
const meterId = getFeatureMeterId(featureId);
const truncatedValue = this.truncateValue(value || 0);
await trx.insert(usage).values({
usageId,
featureId,
orgId,
meterId,
instantaneousValue: truncatedValue,
latestValue: truncatedValue,
updatedAt: Math.floor(Date.now() / 1000)
});
}
});
await this.logStripeEvent(featureId, value || 0, customerId);
} catch (error) {
logger.error(
`Failed to update daily usage for ${orgId}/${featureId}:`,
error
);
}
}
private async getCustomerId(
orgId: string,
featureId: FeatureId
): Promise<string | null> {
const cacheKey = `customer_${orgId}_${featureId}`;
const cached = this.cache.get<string>(cacheKey);
if (cached) {
return cached;
}
try {
// Query subscription data
const [customer] = await db
.select({
customerId: customers.customerId
})
.from(customers)
.where(eq(customers.orgId, orgId))
.limit(1);
if (!customer) {
return null;
}
const customerId = customer.customerId;
// Cache the result
this.cache.set(cacheKey, customerId);
return customerId;
} catch (error) {
logger.error(
`Failed to get subscription data for ${orgId}/${featureId}:`,
error
);
return null;
}
}
private async logStripeEvent(
featureId: FeatureId,
value: number,
customerId: string
): Promise<void> {
// Truncate value to 11 decimal places before sending to Stripe
const truncatedValue = this.truncateValue(value);
const event: StripeEvent = {
identifier: uuidv4(),
timestamp: Math.floor(new Date().getTime() / 1000),
event_name: featureId,
payload: {
value: truncatedValue,
stripe_customer_id: customerId
}
};
await this.writeEventToFile(event);
await this.checkAndUploadFile();
}
private async writeEventToFile(event: StripeEvent): Promise<void> {
if (!this.eventsDir || !this.bucketName) {
logger.warn("Stripe local file path or bucket name is not configured, skipping event file write.");
return;
}
if (!this.currentEventFile) {
this.currentEventFile = this.generateEventFileName();
this.currentFileStartTime = Date.now();
}
const filePath = path.join(this.eventsDir, this.currentEventFile);
try {
let events: StripeEvent[] = [];
// Try to read existing file
try {
const fileContent = await fs.readFile(filePath, "utf-8");
events = JSON.parse(fileContent);
} catch (error) {
// File doesn't exist or is empty, start with empty array
events = [];
}
// Add new event
events.push(event);
// Write back to file
await fs.writeFile(filePath, JSON.stringify(events, null, 2));
} catch (error) {
logger.error("Failed to write event to file:", error);
}
}
private async checkAndUploadFile(): Promise<void> {
if (!this.currentEventFile) {
return;
}
const now = Date.now();
const fileAge = now - this.currentFileStartTime;
// Check if file is at least 1 minute old
if (fileAge >= 60000) {
// 60 seconds
await this.uploadFileToS3();
}
}
private async uploadFileToS3(): Promise<void> {
if (!this.bucketName || !this.eventsDir) {
logger.warn("Stripe local file path or bucket name is not configured, skipping S3 upload.");
return;
}
if (!this.currentEventFile) {
return;
}
const fileName = this.currentEventFile;
const filePath = path.join(this.eventsDir, fileName);
// Check if this file is already being uploaded
if (this.uploadingFiles.has(fileName)) {
logger.debug(`File ${fileName} is already being uploaded, skipping`);
return;
}
// Mark file as being uploaded
this.uploadingFiles.add(fileName);
try {
// Check if file exists before trying to read it
try {
await fs.access(filePath);
} catch (error) {
logger.debug(`File ${fileName} does not exist, may have been already processed`);
this.uploadingFiles.delete(fileName);
// Reset current file if it was this file
if (this.currentEventFile === fileName) {
this.currentEventFile = null;
this.currentFileStartTime = 0;
}
return;
}
// Check if file exists and has content
const fileContent = await fs.readFile(filePath, "utf-8");
const events = JSON.parse(fileContent);
if (events.length === 0) {
// No events to upload, just clean up
try {
await fs.unlink(filePath);
} catch (unlinkError) {
// File may have been already deleted
logger.debug(`File ${fileName} was already deleted during cleanup`);
}
this.currentEventFile = null;
this.uploadingFiles.delete(fileName);
return;
}
// Upload to S3
const uploadCommand = new PutObjectCommand({
Bucket: this.bucketName,
Key: fileName,
Body: fileContent,
ContentType: "application/json"
});
await s3Client.send(uploadCommand);
// Clean up local file - check if it still exists before unlinking
try {
await fs.access(filePath);
await fs.unlink(filePath);
} catch (unlinkError) {
// File may have been already deleted by another process
logger.debug(`File ${fileName} was already deleted during upload`);
}
logger.info(
`Uploaded ${fileName} to S3 with ${events.length} events`
);
// Reset for next file
this.currentEventFile = null;
this.currentFileStartTime = 0;
} catch (error) {
logger.error(
`Failed to upload ${fileName} to S3:`,
error
);
} finally {
// Always remove from uploading set
this.uploadingFiles.delete(fileName);
}
}
private generateEventFileName(): string {
const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
const uuid = uuidv4().substring(0, 8);
return `events-${timestamp}-${uuid}.json`;
}
public async getUsage(
orgId: string,
featureId: FeatureId
): Promise<Usage | null> {
if (build !== "saas") {
return null;
}
const usageId = `${orgId}-${featureId}`;
try {
const [result] = await db
.select()
.from(usage)
.where(eq(usage.usageId, usageId))
.limit(1);
if (!result) {
// Lets create one if it doesn't exist using upsert to handle race conditions
logger.info(
`Creating new usage record for ${orgId}/${featureId}`
);
const meterId = getFeatureMeterId(featureId);
try {
const [newUsage] = await db
.insert(usage)
.values({
usageId,
featureId,
orgId,
meterId,
latestValue: 0,
updatedAt: Math.floor(Date.now() / 1000)
})
.onConflictDoNothing()
.returning();
if (newUsage) {
return newUsage;
} else {
// Record was created by another process, fetch it
const [existingUsage] = await db
.select()
.from(usage)
.where(eq(usage.usageId, usageId))
.limit(1);
return existingUsage || null;
}
} catch (insertError) {
// Fallback: try to fetch existing record in case of any insert issues
logger.warn(
`Insert failed for ${orgId}/${featureId}, attempting to fetch existing record:`,
insertError
);
const [existingUsage] = await db
.select()
.from(usage)
.where(eq(usage.usageId, usageId))
.limit(1);
return existingUsage || null;
}
}
return result
} catch (error) {
logger.error(
`Failed to get usage for ${orgId}/${featureId}:`,
error
);
throw error;
}
}
public async getUsageDaily(
orgId: string,
featureId: FeatureId
): Promise<Usage | null> {
if (build !== "saas") {
return null;
}
await this.updateDaily(orgId, featureId); // Ensure daily usage is updated
return this.getUsage(orgId, featureId);
}
public async forceUpload(): Promise<void> {
await this.uploadFileToS3();
}
public clearCache(): void {
this.cache.flushAll();
}
/**
* Scan the events directory for files older than 1 minute and upload them if not empty.
*/
private async uploadOldEventFiles(): Promise<void> {
if (!this.eventsDir || !this.bucketName) {
logger.warn("Stripe local file path or bucket name is not configured, skipping old event file upload.");
return;
}
try {
const files = await fs.readdir(this.eventsDir);
const now = Date.now();
for (const file of files) {
if (!file.endsWith(".json")) continue;
// Skip files that are already being uploaded
if (this.uploadingFiles.has(file)) {
logger.debug(`Skipping file ${file} as it's already being uploaded`);
continue;
}
const filePath = path.join(this.eventsDir, file);
try {
// Check if file still exists before processing
try {
await fs.access(filePath);
} catch (accessError) {
logger.debug(`File ${file} does not exist, skipping`);
continue;
}
const stat = await fs.stat(filePath);
const age = now - stat.mtimeMs;
if (age >= 90000) {
// 1.5 minutes - Mark as being uploaded
this.uploadingFiles.add(file);
try {
const fileContent = await fs.readFile(
filePath,
"utf-8"
);
const events = JSON.parse(fileContent);
if (Array.isArray(events) && events.length > 0) {
// Upload to S3
const uploadCommand = new PutObjectCommand({
Bucket: this.bucketName,
Key: file,
Body: fileContent,
ContentType: "application/json"
});
await s3Client.send(uploadCommand);
// Check if file still exists before unlinking
try {
await fs.access(filePath);
await fs.unlink(filePath);
} catch (unlinkError) {
logger.debug(`File ${file} was already deleted during interval upload`);
}
logger.info(
`Interval: Uploaded event file ${file} to S3 with ${events.length} events`
);
// If this was the current event file, reset it
if (this.currentEventFile === file) {
this.currentEventFile = null;
this.currentFileStartTime = 0;
}
} else {
// Remove empty file
try {
await fs.access(filePath);
await fs.unlink(filePath);
} catch (unlinkError) {
logger.debug(`Empty file ${file} was already deleted`);
}
}
} finally {
// Always remove from uploading set
this.uploadingFiles.delete(file);
}
}
} catch (err) {
logger.error(
`Interval: Error processing event file ${file}:`,
err
);
// Remove from uploading set on error
this.uploadingFiles.delete(file);
}
}
} catch (err) {
logger.error("Interval: Failed to scan for event files:", err);
}
}
public async checkLimitSet(orgId: string, kickSites = false, featureId?: FeatureId, usage?: Usage): Promise<boolean> {
if (build !== "saas") {
return false;
}
// This method should check the current usage against the limits set for the organization
// and kick out all of the sites on the org
let hasExceededLimits = false;
try {
let orgLimits: Limit[] = [];
if (featureId) {
// Get all limits set for this organization
orgLimits = await db
.select()
.from(limits)
.where(
and(
eq(limits.orgId, orgId),
eq(limits.featureId, featureId)
)
);
} else {
// Get all limits set for this organization
orgLimits = await db
.select()
.from(limits)
.where(eq(limits.orgId, orgId));
}
if (orgLimits.length === 0) {
logger.debug(`No limits set for org ${orgId}`);
return false;
}
// Check each limit against current usage
for (const limit of orgLimits) {
let currentUsage: Usage | null;
if (usage) {
currentUsage = usage;
} else {
currentUsage = await this.getUsage(orgId, limit.featureId as FeatureId);
}
const usageValue = currentUsage?.instantaneousValue || currentUsage?.latestValue || 0;
logger.debug(`Current usage for org ${orgId} on feature ${limit.featureId}: ${usageValue}`);
logger.debug(`Limit for org ${orgId} on feature ${limit.featureId}: ${limit.value}`);
if (currentUsage && limit.value !== null && usageValue > limit.value) {
logger.debug(
`Org ${orgId} has exceeded limit for ${limit.featureId}: ` +
`${usageValue} > ${limit.value}`
);
hasExceededLimits = true;
break; // Exit early if any limit is exceeded
}
}
// If any limits are exceeded, disconnect all sites for this organization
if (hasExceededLimits && kickSites) {
logger.warn(`Disconnecting all sites for org ${orgId} due to exceeded limits`);
// Get all sites for this organization
const orgSites = await db
.select()
.from(sites)
.where(eq(sites.orgId, orgId));
// Mark all sites as offline and send termination messages
const siteUpdates = orgSites.map(site => site.siteId);
if (siteUpdates.length > 0) {
// Send termination messages to newt sites
for (const site of orgSites) {
if (site.type === "newt") {
const [newt] = await db
.select()
.from(newts)
.where(eq(newts.siteId, site.siteId))
.limit(1);
if (newt) {
const payload = {
type: `newt/wg/terminate`,
data: {
reason: "Usage limits exceeded"
}
};
// Don't await to prevent blocking
sendToClient(newt.newtId, payload).catch((error: any) => {
logger.error(
`Failed to send termination message to newt ${newt.newtId}:`,
error
);
});
}
}
}
logger.info(`Disconnected ${orgSites.length} sites for org ${orgId} due to exceeded limits`);
}
}
} catch (error) {
logger.error(`Error checking limits for org ${orgId}:`, error);
}
return hasExceededLimits;
}
}
export const usageService = new UsageService();

View File

@@ -0,0 +1,206 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { isValidCIDR } from "@server/lib/validators";
import { getNextAvailableOrgSubnet } from "@server/lib/ip";
import {
actions,
apiKeyOrg,
apiKeys,
db,
domains,
Org,
orgDomains,
orgs,
roleActions,
roles,
userOrgs
} from "@server/db";
import { eq } from "drizzle-orm";
import { defaultRoleAllowedActions } from "@server/routers/role";
import { FeatureId, limitsService, sandboxLimitSet } from "@server/lib/private/billing";
import { createCustomer } from "@server/routers/private/billing/createCustomer";
import { usageService } from "@server/lib/private/billing/usageService";
export async function createUserAccountOrg(
userId: string,
userEmail: string
): Promise<{
success: boolean;
org?: {
orgId: string;
name: string;
subnet: string;
};
error?: string;
}> {
// const subnet = await getNextAvailableOrgSubnet();
const orgId = "org_" + userId;
const name = `${userEmail}'s Organization`;
// if (!isValidCIDR(subnet)) {
// return {
// success: false,
// error: "Invalid subnet format. Please provide a valid CIDR notation."
// };
// }
// // make sure the subnet is unique
// const subnetExists = await db
// .select()
// .from(orgs)
// .where(eq(orgs.subnet, subnet))
// .limit(1);
// if (subnetExists.length > 0) {
// return { success: false, error: `Subnet ${subnet} already exists` };
// }
// make sure the orgId is unique
const orgExists = await db
.select()
.from(orgs)
.where(eq(orgs.orgId, orgId))
.limit(1);
if (orgExists.length > 0) {
return {
success: false,
error: `Organization with ID ${orgId} already exists`
};
}
let error = "";
let org: Org | null = null;
await db.transaction(async (trx) => {
const allDomains = await trx
.select()
.from(domains)
.where(eq(domains.configManaged, true));
const newOrg = await trx
.insert(orgs)
.values({
orgId,
name,
// subnet
subnet: "100.90.128.0/24", // TODO: this should not be hardcoded - or can it be the same in all orgs?
createdAt: new Date().toISOString()
})
.returning();
if (newOrg.length === 0) {
error = "Failed to create organization";
trx.rollback();
return;
}
org = newOrg[0];
// Create admin role within the same transaction
const [insertedRole] = await trx
.insert(roles)
.values({
orgId: newOrg[0].orgId,
isAdmin: true,
name: "Admin",
description: "Admin role with the most permissions"
})
.returning({ roleId: roles.roleId });
if (!insertedRole || !insertedRole.roleId) {
error = "Failed to create Admin role";
trx.rollback();
return;
}
const roleId = insertedRole.roleId;
// Get all actions and create role actions
const actionIds = await trx.select().from(actions).execute();
if (actionIds.length > 0) {
await trx.insert(roleActions).values(
actionIds.map((action) => ({
roleId,
actionId: action.actionId,
orgId: newOrg[0].orgId
}))
);
}
if (allDomains.length) {
await trx.insert(orgDomains).values(
allDomains.map((domain) => ({
orgId: newOrg[0].orgId,
domainId: domain.domainId
}))
);
}
await trx.insert(userOrgs).values({
userId,
orgId: newOrg[0].orgId,
roleId: roleId,
isOwner: true
});
const memberRole = await trx
.insert(roles)
.values({
name: "Member",
description: "Members can only view resources",
orgId
})
.returning();
await trx.insert(roleActions).values(
defaultRoleAllowedActions.map((action) => ({
roleId: memberRole[0].roleId,
actionId: action,
orgId
}))
);
});
await limitsService.applyLimitSetToOrg(orgId, sandboxLimitSet);
if (!org) {
return { success: false, error: "Failed to create org" };
}
if (error) {
return {
success: false,
error: `Failed to create org: ${error}`
};
}
// make sure we have the stripe customer
const customerId = await createCustomer(orgId, userEmail);
if (customerId) {
await usageService.updateDaily(orgId, FeatureId.USERS, 1, customerId); // Only 1 because we are crating the org
}
return {
org: {
orgId,
name,
// subnet
subnet: "100.90.128.0/24"
},
success: true
};
}

View File

@@ -0,0 +1,25 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import RedisStore from "@server/db/private/redisStore";
import { MemoryStore, Store } from "express-rate-limit";
export function createStore(): Store {
const rateLimitStore: Store = new RedisStore({
prefix: 'api-rate-limit', // Optional: customize Redis key prefix
skipFailedRequests: true, // Don't count failed requests
skipSuccessfulRequests: false, // Count successful requests
});
return rateLimitStore;
}

View File

@@ -0,0 +1,192 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import fs from "fs";
import yaml from "js-yaml";
import { privateConfigFilePath1 } from "@server/lib/consts";
import { z } from "zod";
import { colorsSchema } from "@server/lib/colorsSchema";
import { build } from "@server/build";
const portSchema = z.number().positive().gt(0).lte(65535);
export const privateConfigSchema = z
.object({
app: z.object({
region: z.string().optional().default("default"),
base_domain: z.string().optional()
}).optional().default({
region: "default"
}),
server: z.object({
encryption_key_path: z
.string()
.optional()
.default("./config/encryption.pem")
.pipe(z.string().min(8)),
resend_api_key: z.string().optional(),
reo_client_id: z.string().optional(),
}).optional().default({
encryption_key_path: "./config/encryption.pem"
}),
redis: z
.object({
host: z.string(),
port: portSchema,
password: z.string().optional(),
db: z.number().int().nonnegative().optional().default(0),
replicas: z
.array(
z.object({
host: z.string(),
port: portSchema,
password: z.string().optional(),
db: z.number().int().nonnegative().optional().default(0)
})
)
.optional()
// tls: z
// .object({
// reject_unauthorized: z
// .boolean()
// .optional()
// .default(true)
// })
// .optional()
})
.optional(),
gerbil: z
.object({
local_exit_node_reachable_at: z.string().optional().default("http://gerbil:3003")
})
.optional()
.default({}),
flags: z
.object({
enable_redis: z.boolean().optional(),
hide_supporter_key: z.boolean().optional()
})
.optional(),
branding: z
.object({
app_name: z.string().optional(),
background_image_path: z.string().optional(),
colors: z
.object({
light: colorsSchema.optional(),
dark: colorsSchema.optional()
})
.optional(),
logo: z
.object({
light_path: z.string().optional(),
dark_path: z.string().optional(),
auth_page: z
.object({
width: z.number().optional(),
height: z.number().optional()
})
.optional(),
navbar: z
.object({
width: z.number().optional(),
height: z.number().optional()
})
.optional()
})
.optional(),
favicon_path: z.string().optional(),
footer: z
.array(
z.object({
text: z.string(),
href: z.string().optional()
})
)
.optional(),
login_page: z
.object({
subtitle_text: z.string().optional(),
title_text: z.string().optional()
})
.optional(),
signup_page: z
.object({
subtitle_text: z.string().optional(),
title_text: z.string().optional()
})
.optional(),
resource_auth_page: z
.object({
show_logo: z.boolean().optional(),
hide_powered_by: z.boolean().optional(),
title_text: z.string().optional(),
subtitle_text: z.string().optional()
})
.optional(),
emails: z
.object({
signature: z.string().optional(),
colors: z
.object({
primary: z.string().optional()
})
.optional()
})
.optional()
})
.optional(),
stripe: z
.object({
secret_key: z.string(),
webhook_secret: z.string(),
s3Bucket: z.string(),
s3Region: z.string().default("us-east-1"),
localFilePath: z.string()
})
.optional(),
})
export function readPrivateConfigFile() {
if (build == "oss") {
return {};
}
const loadConfig = (configPath: string) => {
try {
const yamlContent = fs.readFileSync(configPath, "utf8");
const config = yaml.load(yamlContent);
return config;
} catch (error) {
if (error instanceof Error) {
throw new Error(
`Error loading configuration file: ${error.message}`
);
}
throw error;
}
};
let environment: any;
if (fs.existsSync(privateConfigFilePath1)) {
environment = loadConfig(privateConfigFilePath1);
}
if (!environment) {
throw new Error(
"No private configuration file found."
);
}
return environment;
}

View File

@@ -0,0 +1,124 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { Resend } from "resend";
import config from "../config";
import logger from "@server/logger";
export enum AudienceIds {
General = "5cfbf99b-c592-40a9-9b8a-577a4681c158",
Subscribed = "870b43fd-387f-44de-8fc1-707335f30b20",
Churned = "f3ae92bd-2fdb-4d77-8746-2118afd62549"
}
const resend = new Resend(
config.getRawPrivateConfig().server.resend_api_key || "missing"
);
export default resend;
export async function moveEmailToAudience(
email: string,
audienceId: AudienceIds
) {
if (process.env.ENVIRONMENT !== "prod") {
logger.debug(`Skipping moving email ${email} to audience ${audienceId} in non-prod environment`);
return;
}
const { error, data } = await retryWithBackoff(async () => {
const { data, error } = await resend.contacts.create({
email,
unsubscribed: false,
audienceId
});
if (error) {
throw new Error(
`Error adding email ${email} to audience ${audienceId}: ${error}`
);
}
return { error, data };
});
if (error) {
logger.error(
`Error adding email ${email} to audience ${audienceId}: ${error}`
);
return;
}
if (data) {
logger.debug(
`Added email ${email} to audience ${audienceId} with contact ID ${data.id}`
)
}
const otherAudiences = Object.values(AudienceIds).filter(
(id) => id !== audienceId
);
for (const otherAudienceId of otherAudiences) {
const { error, data } = await retryWithBackoff(async () => {
const { data, error } = await resend.contacts.remove({
email,
audienceId: otherAudienceId
});
if (error) {
throw new Error(
`Error removing email ${email} from audience ${otherAudienceId}: ${error}`
);
}
return { error, data };
});
if (error) {
logger.error(
`Error removing email ${email} from audience ${otherAudienceId}: ${error}`
);
}
if (data) {
logger.info(
`Removed email ${email} from audience ${otherAudienceId}`
);
}
}
}
type RetryOptions = {
retries?: number;
initialDelayMs?: number;
factor?: number;
};
export async function retryWithBackoff<T>(
fn: () => Promise<T>,
options: RetryOptions = {}
): Promise<T> {
const { retries = 5, initialDelayMs = 500, factor = 2 } = options;
let attempt = 0;
let delay = initialDelayMs;
while (true) {
try {
return await fn();
} catch (err) {
attempt++;
if (attempt > retries) throw err;
await new Promise((resolve) => setTimeout(resolve, delay));
delay *= factor;
}
}
}

19
server/lib/private/s3.ts Normal file
View File

@@ -0,0 +1,19 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import { S3Client } from "@aws-sdk/client-s3";
import config from "@server/lib/config";
export const s3Client = new S3Client({
region: config.getRawPrivateConfig().stripe?.s3Region || "us-east-1",
});

View File

@@ -0,0 +1,28 @@
/*
* This file is part of a proprietary work.
*
* Copyright (c) 2025 Fossorial, Inc.
* All rights reserved.
*
* This file is licensed under the Fossorial Commercial License.
* You may not use this file except in compliance with the License.
* Unauthorized use, copying, modification, or distribution is strictly prohibited.
*
* This file is not licensed under the AGPLv3.
*/
import Stripe from "stripe";
import config from "@server/lib/config";
import logger from "@server/logger";
import { build } from "@server/build";
let stripe: Stripe | undefined = undefined;
if (build == "saas") {
const stripeApiKey = config.getRawPrivateConfig().stripe?.secret_key;
if (!stripeApiKey) {
logger.error("Stripe secret key is not configured");
}
stripe = new Stripe(stripeApiKey!);
}
export default stripe;