mirror of
https://github.com/fosrl/pangolin.git
synced 2026-02-22 12:56:37 +00:00
Compare commits
4 Commits
logs-datab
...
k8s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b786497299 | ||
|
|
874794c996 | ||
|
|
5e37c4e85f | ||
|
|
4e7eac368f |
35
.github/workflows/saas.yml
vendored
35
.github/workflows/saas.yml
vendored
@@ -56,6 +56,41 @@ jobs:
|
|||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
|
||||||
|
- name: Download MaxMind GeoLite2 databases
|
||||||
|
env:
|
||||||
|
MAXMIND_LICENSE_KEY: ${{ secrets.MAXMIND_LICENSE_KEY }}
|
||||||
|
run: |
|
||||||
|
echo "Downloading MaxMind GeoLite2 databases..."
|
||||||
|
|
||||||
|
# Download GeoLite2-Country
|
||||||
|
curl -L "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key=${MAXMIND_LICENSE_KEY}&suffix=tar.gz" \
|
||||||
|
-o GeoLite2-Country.tar.gz
|
||||||
|
|
||||||
|
# Download GeoLite2-ASN
|
||||||
|
curl -L "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-ASN&license_key=${MAXMIND_LICENSE_KEY}&suffix=tar.gz" \
|
||||||
|
-o GeoLite2-ASN.tar.gz
|
||||||
|
|
||||||
|
# Extract the .mmdb files
|
||||||
|
tar -xzf GeoLite2-Country.tar.gz --strip-components=1 --wildcards '*.mmdb'
|
||||||
|
tar -xzf GeoLite2-ASN.tar.gz --strip-components=1 --wildcards '*.mmdb'
|
||||||
|
|
||||||
|
# Verify files exist
|
||||||
|
if [ ! -f "GeoLite2-Country.mmdb" ]; then
|
||||||
|
echo "ERROR: Failed to download GeoLite2-Country.mmdb"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "GeoLite2-ASN.mmdb" ]; then
|
||||||
|
echo "ERROR: Failed to download GeoLite2-ASN.mmdb"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up tar files
|
||||||
|
rm -f GeoLite2-Country.tar.gz GeoLite2-ASN.tar.gz
|
||||||
|
|
||||||
|
echo "MaxMind databases downloaded successfully"
|
||||||
|
ls -lh GeoLite2-*.mmdb
|
||||||
|
|
||||||
- name: Monitor storage space
|
- name: Monitor storage space
|
||||||
run: |
|
run: |
|
||||||
THRESHOLD=75
|
THRESHOLD=75
|
||||||
|
|||||||
@@ -49,6 +49,14 @@ COPY server/db/ios_models.json ./dist/ios_models.json
|
|||||||
COPY server/db/mac_models.json ./dist/mac_models.json
|
COPY server/db/mac_models.json ./dist/mac_models.json
|
||||||
COPY public ./public
|
COPY public ./public
|
||||||
|
|
||||||
|
# Copy MaxMind databases for SaaS builds
|
||||||
|
ARG BUILD=oss
|
||||||
|
RUN mkdir -p ./maxmind
|
||||||
|
|
||||||
|
# This is only for saas
|
||||||
|
COPY --from=builder-dev /app/GeoLite2-Country.mmdb ./maxmind/GeoLite2-Country.mmdb
|
||||||
|
COPY --from=builder-dev /app/GeoLite2-ASN.mmdb ./maxmind/GeoLite2-ASN.mmdb
|
||||||
|
|
||||||
# OCI Image Labels - Build Args for dynamic values
|
# OCI Image Labels - Build Args for dynamic values
|
||||||
ARG VERSION="dev"
|
ARG VERSION="dev"
|
||||||
ARG REVISION=""
|
ARG REVISION=""
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
export * from "./driver";
|
export * from "./driver";
|
||||||
export * from "./logsDriver";
|
|
||||||
export * from "./schema/schema";
|
export * from "./schema/schema";
|
||||||
export * from "./schema/privateSchema";
|
export * from "./schema/privateSchema";
|
||||||
export * from "./migrate";
|
export * from "./migrate";
|
||||||
|
|||||||
@@ -1,89 +0,0 @@
|
|||||||
import { drizzle as DrizzlePostgres } from "drizzle-orm/node-postgres";
|
|
||||||
import { Pool } from "pg";
|
|
||||||
import { readConfigFile } from "@server/lib/readConfigFile";
|
|
||||||
import { readPrivateConfigFile } from "@server/private/lib/readConfigFile";
|
|
||||||
import { withReplicas } from "drizzle-orm/pg-core";
|
|
||||||
import { build } from "@server/build";
|
|
||||||
import { db as mainDb, primaryDb as mainPrimaryDb } from "./driver";
|
|
||||||
|
|
||||||
function createLogsDb() {
|
|
||||||
// Only use separate logs database in SaaS builds
|
|
||||||
if (build !== "saas") {
|
|
||||||
return mainDb;
|
|
||||||
}
|
|
||||||
|
|
||||||
const config = readConfigFile();
|
|
||||||
const privateConfig = readPrivateConfigFile();
|
|
||||||
|
|
||||||
// Merge configs, prioritizing private config
|
|
||||||
const logsConfig = privateConfig.postgres_logs || config.postgres_logs;
|
|
||||||
|
|
||||||
// Check environment variable first
|
|
||||||
let connectionString = process.env.POSTGRES_LOGS_CONNECTION_STRING;
|
|
||||||
let replicaConnections: Array<{ connection_string: string }> = [];
|
|
||||||
|
|
||||||
if (!connectionString && logsConfig) {
|
|
||||||
connectionString = logsConfig.connection_string;
|
|
||||||
replicaConnections = logsConfig.replicas || [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// If POSTGRES_LOGS_REPLICA_CONNECTION_STRINGS is set, use it
|
|
||||||
if (process.env.POSTGRES_LOGS_REPLICA_CONNECTION_STRINGS) {
|
|
||||||
replicaConnections =
|
|
||||||
process.env.POSTGRES_LOGS_REPLICA_CONNECTION_STRINGS.split(",").map(
|
|
||||||
(conn) => ({
|
|
||||||
connection_string: conn.trim()
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no logs database is configured, fall back to main database
|
|
||||||
if (!connectionString) {
|
|
||||||
return mainDb;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create separate connection pool for logs database
|
|
||||||
const poolConfig = logsConfig?.pool || config.postgres?.pool;
|
|
||||||
const primaryPool = new Pool({
|
|
||||||
connectionString,
|
|
||||||
max: poolConfig?.max_connections || 20,
|
|
||||||
idleTimeoutMillis: poolConfig?.idle_timeout_ms || 30000,
|
|
||||||
connectionTimeoutMillis: poolConfig?.connection_timeout_ms || 5000
|
|
||||||
});
|
|
||||||
|
|
||||||
const replicas = [];
|
|
||||||
|
|
||||||
if (!replicaConnections.length) {
|
|
||||||
replicas.push(
|
|
||||||
DrizzlePostgres(primaryPool, {
|
|
||||||
logger: process.env.QUERY_LOGGING == "true"
|
|
||||||
})
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
for (const conn of replicaConnections) {
|
|
||||||
const replicaPool = new Pool({
|
|
||||||
connectionString: conn.connection_string,
|
|
||||||
max: poolConfig?.max_replica_connections || 20,
|
|
||||||
idleTimeoutMillis: poolConfig?.idle_timeout_ms || 30000,
|
|
||||||
connectionTimeoutMillis:
|
|
||||||
poolConfig?.connection_timeout_ms || 5000
|
|
||||||
});
|
|
||||||
replicas.push(
|
|
||||||
DrizzlePostgres(replicaPool, {
|
|
||||||
logger: process.env.QUERY_LOGGING == "true"
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return withReplicas(
|
|
||||||
DrizzlePostgres(primaryPool, {
|
|
||||||
logger: process.env.QUERY_LOGGING == "true"
|
|
||||||
}),
|
|
||||||
replicas as any
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export const logsDb = createLogsDb();
|
|
||||||
export default logsDb;
|
|
||||||
export const primaryLogsDb = logsDb.$primary;
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
export * from "./driver";
|
export * from "./driver";
|
||||||
export * from "./logsDriver";
|
|
||||||
export * from "./schema/schema";
|
export * from "./schema/schema";
|
||||||
export * from "./schema/privateSchema";
|
export * from "./schema/privateSchema";
|
||||||
export * from "./migrate";
|
export * from "./migrate";
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
import { db as mainDb } from "./driver";
|
|
||||||
|
|
||||||
// SQLite doesn't support separate databases for logs in the same way as Postgres
|
|
||||||
// Always use the main database connection for SQLite
|
|
||||||
export const logsDb = mainDb;
|
|
||||||
export default logsDb;
|
|
||||||
export const primaryLogsDb = logsDb;
|
|
||||||
@@ -46,6 +46,8 @@ export class UsageService {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let orgIdToUse = await this.getBillingOrg(orgId, transaction);
|
||||||
|
|
||||||
// Truncate value to 11 decimal places
|
// Truncate value to 11 decimal places
|
||||||
value = this.truncateValue(value);
|
value = this.truncateValue(value);
|
||||||
|
|
||||||
@@ -57,7 +59,6 @@ export class UsageService {
|
|||||||
try {
|
try {
|
||||||
let usage;
|
let usage;
|
||||||
if (transaction) {
|
if (transaction) {
|
||||||
const orgIdToUse = await this.getBillingOrg(orgId, transaction);
|
|
||||||
usage = await this.internalAddUsage(
|
usage = await this.internalAddUsage(
|
||||||
orgIdToUse,
|
orgIdToUse,
|
||||||
featureId,
|
featureId,
|
||||||
@@ -66,7 +67,6 @@ export class UsageService {
|
|||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await db.transaction(async (trx) => {
|
await db.transaction(async (trx) => {
|
||||||
const orgIdToUse = await this.getBillingOrg(orgId, trx);
|
|
||||||
usage = await this.internalAddUsage(
|
usage = await this.internalAddUsage(
|
||||||
orgIdToUse,
|
orgIdToUse,
|
||||||
featureId,
|
featureId,
|
||||||
@@ -92,7 +92,7 @@ export class UsageService {
|
|||||||
const delay = baseDelay + jitter;
|
const delay = baseDelay + jitter;
|
||||||
|
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`Deadlock detected for ${orgId}/${featureId}, retrying attempt ${attempt}/${maxRetries} after ${delay.toFixed(0)}ms`
|
`Deadlock detected for ${orgIdToUse}/${featureId}, retrying attempt ${attempt}/${maxRetries} after ${delay.toFixed(0)}ms`
|
||||||
);
|
);
|
||||||
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, delay));
|
await new Promise((resolve) => setTimeout(resolve, delay));
|
||||||
@@ -100,7 +100,7 @@ export class UsageService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.error(
|
logger.error(
|
||||||
`Failed to add usage for ${orgId}/${featureId} after ${attempt} attempts:`,
|
`Failed to add usage for ${orgIdToUse}/${featureId} after ${attempt} attempts:`,
|
||||||
error
|
error
|
||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
@@ -169,7 +169,7 @@ export class UsageService {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const orgIdToUse = await this.getBillingOrg(orgId);
|
let orgIdToUse = await this.getBillingOrg(orgId);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Truncate value to 11 decimal places if provided
|
// Truncate value to 11 decimal places if provided
|
||||||
@@ -227,7 +227,7 @@ export class UsageService {
|
|||||||
orgId: string,
|
orgId: string,
|
||||||
featureId: FeatureId
|
featureId: FeatureId
|
||||||
): Promise<string | null> {
|
): Promise<string | null> {
|
||||||
const orgIdToUse = await this.getBillingOrg(orgId);
|
let orgIdToUse = await this.getBillingOrg(orgId);
|
||||||
|
|
||||||
const cacheKey = `customer_${orgIdToUse}_${featureId}`;
|
const cacheKey = `customer_${orgIdToUse}_${featureId}`;
|
||||||
const cached = cache.get<string>(cacheKey);
|
const cached = cache.get<string>(cacheKey);
|
||||||
@@ -274,7 +274,7 @@ export class UsageService {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const orgIdToUse = await this.getBillingOrg(orgId, trx);
|
let orgIdToUse = await this.getBillingOrg(orgId, trx);
|
||||||
|
|
||||||
const usageId = `${orgIdToUse}-${featureId}`;
|
const usageId = `${orgIdToUse}-${featureId}`;
|
||||||
|
|
||||||
@@ -382,7 +382,7 @@ export class UsageService {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const orgIdToUse = await this.getBillingOrg(orgId, trx);
|
let orgIdToUse = await this.getBillingOrg(orgId, trx);
|
||||||
|
|
||||||
// This method should check the current usage against the limits set for the organization
|
// This method should check the current usage against the limits set for the organization
|
||||||
// and kick out all of the sites on the org
|
// and kick out all of the sites on the org
|
||||||
|
|||||||
@@ -189,46 +189,6 @@ export const configSchema = z
|
|||||||
.prefault({})
|
.prefault({})
|
||||||
})
|
})
|
||||||
.optional(),
|
.optional(),
|
||||||
postgres_logs: z
|
|
||||||
.object({
|
|
||||||
connection_string: z
|
|
||||||
.string()
|
|
||||||
.optional()
|
|
||||||
.transform(getEnvOrYaml("POSTGRES_LOGS_CONNECTION_STRING")),
|
|
||||||
replicas: z
|
|
||||||
.array(
|
|
||||||
z.object({
|
|
||||||
connection_string: z.string()
|
|
||||||
})
|
|
||||||
)
|
|
||||||
.optional(),
|
|
||||||
pool: z
|
|
||||||
.object({
|
|
||||||
max_connections: z
|
|
||||||
.number()
|
|
||||||
.positive()
|
|
||||||
.optional()
|
|
||||||
.default(20),
|
|
||||||
max_replica_connections: z
|
|
||||||
.number()
|
|
||||||
.positive()
|
|
||||||
.optional()
|
|
||||||
.default(10),
|
|
||||||
idle_timeout_ms: z
|
|
||||||
.number()
|
|
||||||
.positive()
|
|
||||||
.optional()
|
|
||||||
.default(30000),
|
|
||||||
connection_timeout_ms: z
|
|
||||||
.number()
|
|
||||||
.positive()
|
|
||||||
.optional()
|
|
||||||
.default(5000)
|
|
||||||
})
|
|
||||||
.optional()
|
|
||||||
.prefault({})
|
|
||||||
})
|
|
||||||
.optional(),
|
|
||||||
traefik: z
|
traefik: z
|
||||||
.object({
|
.object({
|
||||||
http_entrypoint: z.string().optional().default("web"),
|
http_entrypoint: z.string().optional().default("web"),
|
||||||
|
|||||||
@@ -23,9 +23,14 @@ export async function verifyApiKeyRoleAccess(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { roleIds } = req.body;
|
let allRoleIds: number[] = [];
|
||||||
const allRoleIds =
|
if (!isNaN(singleRoleId)) {
|
||||||
roleIds || (isNaN(singleRoleId) ? [] : [singleRoleId]);
|
// If roleId is provided in URL params, query params, or body (single), use it exclusively
|
||||||
|
allRoleIds = [singleRoleId];
|
||||||
|
} else if (req.body?.roleIds) {
|
||||||
|
// Only use body.roleIds if no single roleId was provided
|
||||||
|
allRoleIds = req.body.roleIds;
|
||||||
|
}
|
||||||
|
|
||||||
if (allRoleIds.length === 0) {
|
if (allRoleIds.length === 0) {
|
||||||
return next();
|
return next();
|
||||||
|
|||||||
@@ -23,8 +23,14 @@ export async function verifyRoleAccess(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const roleIds = req.body?.roleIds;
|
let allRoleIds: number[] = [];
|
||||||
const allRoleIds = roleIds || (isNaN(singleRoleId) ? [] : [singleRoleId]);
|
if (!isNaN(singleRoleId)) {
|
||||||
|
// If roleId is provided in URL params, query params, or body (single), use it exclusively
|
||||||
|
allRoleIds = [singleRoleId];
|
||||||
|
} else if (req.body?.roleIds) {
|
||||||
|
// Only use body.roleIds if no single roleId was provided
|
||||||
|
allRoleIds = req.body.roleIds;
|
||||||
|
}
|
||||||
|
|
||||||
if (allRoleIds.length === 0) {
|
if (allRoleIds.length === 0) {
|
||||||
return next();
|
return next();
|
||||||
|
|||||||
@@ -78,8 +78,7 @@ export async function getOrgTierData(
|
|||||||
if (
|
if (
|
||||||
subscription.type === "tier1" ||
|
subscription.type === "tier1" ||
|
||||||
subscription.type === "tier2" ||
|
subscription.type === "tier2" ||
|
||||||
subscription.type === "tier3" ||
|
subscription.type === "tier3"
|
||||||
subscription.type === "enterprise"
|
|
||||||
) {
|
) {
|
||||||
tier = subscription.type;
|
tier = subscription.type;
|
||||||
active = true;
|
active = true;
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
* This file is not licensed under the AGPLv3.
|
* This file is not licensed under the AGPLv3.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { accessAuditLog, logsDb, db, orgs } from "@server/db";
|
import { accessAuditLog, db, orgs } from "@server/db";
|
||||||
import { getCountryCodeForIp } from "@server/lib/geoip";
|
import { getCountryCodeForIp } from "@server/lib/geoip";
|
||||||
import logger from "@server/logger";
|
import logger from "@server/logger";
|
||||||
import { and, eq, lt } from "drizzle-orm";
|
import { and, eq, lt } from "drizzle-orm";
|
||||||
@@ -52,7 +52,7 @@ export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
|
|||||||
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
|
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await logsDb
|
await db
|
||||||
.delete(accessAuditLog)
|
.delete(accessAuditLog)
|
||||||
.where(
|
.where(
|
||||||
and(
|
and(
|
||||||
@@ -124,7 +124,7 @@ export async function logAccessAudit(data: {
|
|||||||
? await getCountryCodeFromIp(data.requestIp)
|
? await getCountryCodeFromIp(data.requestIp)
|
||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
await logsDb.insert(accessAuditLog).values({
|
await db.insert(accessAuditLog).values({
|
||||||
timestamp: timestamp,
|
timestamp: timestamp,
|
||||||
orgId: data.orgId,
|
orgId: data.orgId,
|
||||||
actorType,
|
actorType,
|
||||||
|
|||||||
@@ -72,55 +72,15 @@ export const privateConfigSchema = z.object({
|
|||||||
db: z.int().nonnegative().optional().default(0)
|
db: z.int().nonnegative().optional().default(0)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
.optional()
|
|
||||||
// tls: z
|
|
||||||
// .object({
|
|
||||||
// reject_unauthorized: z
|
|
||||||
// .boolean()
|
|
||||||
// .optional()
|
|
||||||
// .default(true)
|
|
||||||
// })
|
|
||||||
// .optional()
|
|
||||||
})
|
|
||||||
.optional(),
|
.optional(),
|
||||||
postgres_logs: z
|
tls: z
|
||||||
.object({
|
.object({
|
||||||
connection_string: z
|
rejectUnauthorized: z
|
||||||
.string()
|
.boolean()
|
||||||
.optional()
|
.optional()
|
||||||
.transform(getEnvOrYaml("POSTGRES_LOGS_CONNECTION_STRING")),
|
.default(true)
|
||||||
replicas: z
|
|
||||||
.array(
|
|
||||||
z.object({
|
|
||||||
connection_string: z.string()
|
|
||||||
})
|
|
||||||
)
|
|
||||||
.optional(),
|
|
||||||
pool: z
|
|
||||||
.object({
|
|
||||||
max_connections: z
|
|
||||||
.number()
|
|
||||||
.positive()
|
|
||||||
.optional()
|
|
||||||
.default(20),
|
|
||||||
max_replica_connections: z
|
|
||||||
.number()
|
|
||||||
.positive()
|
|
||||||
.optional()
|
|
||||||
.default(10),
|
|
||||||
idle_timeout_ms: z
|
|
||||||
.number()
|
|
||||||
.positive()
|
|
||||||
.optional()
|
|
||||||
.default(30000),
|
|
||||||
connection_timeout_ms: z
|
|
||||||
.number()
|
|
||||||
.positive()
|
|
||||||
.optional()
|
|
||||||
.default(5000)
|
|
||||||
})
|
})
|
||||||
.optional()
|
.optional()
|
||||||
.prefault({})
|
|
||||||
})
|
})
|
||||||
.optional(),
|
.optional(),
|
||||||
gerbil: z
|
gerbil: z
|
||||||
|
|||||||
@@ -108,11 +108,15 @@ class RedisManager {
|
|||||||
port: redisConfig.port!,
|
port: redisConfig.port!,
|
||||||
password: redisConfig.password,
|
password: redisConfig.password,
|
||||||
db: redisConfig.db
|
db: redisConfig.db
|
||||||
// tls: {
|
|
||||||
// rejectUnauthorized:
|
|
||||||
// redisConfig.tls?.reject_unauthorized || false
|
|
||||||
// }
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Enable TLS if configured (required for AWS ElastiCache in-transit encryption)
|
||||||
|
if (redisConfig.tls) {
|
||||||
|
opts.tls = {
|
||||||
|
rejectUnauthorized: redisConfig.tls.rejectUnauthorized ?? true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return opts;
|
return opts;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -130,11 +134,15 @@ class RedisManager {
|
|||||||
port: replica.port!,
|
port: replica.port!,
|
||||||
password: replica.password,
|
password: replica.password,
|
||||||
db: replica.db || redisConfig.db
|
db: replica.db || redisConfig.db
|
||||||
// tls: {
|
|
||||||
// rejectUnauthorized:
|
|
||||||
// replica.tls?.reject_unauthorized || false
|
|
||||||
// }
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Enable TLS if configured (required for AWS ElastiCache in-transit encryption)
|
||||||
|
if (redisConfig.tls) {
|
||||||
|
opts.tls = {
|
||||||
|
rejectUnauthorized: redisConfig.tls.rejectUnauthorized ?? true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return opts;
|
return opts;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { ActionsEnum } from "@server/auth/actions";
|
import { ActionsEnum } from "@server/auth/actions";
|
||||||
import { actionAuditLog, logsDb, db, orgs } from "@server/db";
|
import { actionAuditLog, db, orgs } from "@server/db";
|
||||||
import logger from "@server/logger";
|
import logger from "@server/logger";
|
||||||
import HttpCode from "@server/types/HttpCode";
|
import HttpCode from "@server/types/HttpCode";
|
||||||
import { Request, Response, NextFunction } from "express";
|
import { Request, Response, NextFunction } from "express";
|
||||||
@@ -54,7 +54,7 @@ export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
|
|||||||
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
|
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await logsDb
|
await db
|
||||||
.delete(actionAuditLog)
|
.delete(actionAuditLog)
|
||||||
.where(
|
.where(
|
||||||
and(
|
and(
|
||||||
@@ -123,7 +123,7 @@ export function logActionAudit(action: ActionsEnum) {
|
|||||||
metadata = JSON.stringify(req.params);
|
metadata = JSON.stringify(req.params);
|
||||||
}
|
}
|
||||||
|
|
||||||
await logsDb.insert(actionAuditLog).values({
|
await db.insert(actionAuditLog).values({
|
||||||
timestamp,
|
timestamp,
|
||||||
orgId,
|
orgId,
|
||||||
actorType,
|
actorType,
|
||||||
|
|||||||
@@ -11,11 +11,11 @@
|
|||||||
* This file is not licensed under the AGPLv3.
|
* This file is not licensed under the AGPLv3.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { accessAuditLog, logsDb, resources, db, primaryDb } from "@server/db";
|
import { accessAuditLog, db, resources } from "@server/db";
|
||||||
import { registry } from "@server/openApi";
|
import { registry } from "@server/openApi";
|
||||||
import { NextFunction } from "express";
|
import { NextFunction } from "express";
|
||||||
import { Request, Response } from "express";
|
import { Request, Response } from "express";
|
||||||
import { eq, gt, lt, and, count, desc, inArray } from "drizzle-orm";
|
import { eq, gt, lt, and, count, desc } from "drizzle-orm";
|
||||||
import { OpenAPITags } from "@server/openApi";
|
import { OpenAPITags } from "@server/openApi";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import createHttpError from "http-errors";
|
import createHttpError from "http-errors";
|
||||||
@@ -115,7 +115,7 @@ function getWhere(data: Q) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function queryAccess(data: Q) {
|
export function queryAccess(data: Q) {
|
||||||
return logsDb
|
return db
|
||||||
.select({
|
.select({
|
||||||
orgId: accessAuditLog.orgId,
|
orgId: accessAuditLog.orgId,
|
||||||
action: accessAuditLog.action,
|
action: accessAuditLog.action,
|
||||||
@@ -133,46 +133,16 @@ export function queryAccess(data: Q) {
|
|||||||
actor: accessAuditLog.actor
|
actor: accessAuditLog.actor
|
||||||
})
|
})
|
||||||
.from(accessAuditLog)
|
.from(accessAuditLog)
|
||||||
|
.leftJoin(
|
||||||
|
resources,
|
||||||
|
eq(accessAuditLog.resourceId, resources.resourceId)
|
||||||
|
)
|
||||||
.where(getWhere(data))
|
.where(getWhere(data))
|
||||||
.orderBy(desc(accessAuditLog.timestamp), desc(accessAuditLog.id));
|
.orderBy(desc(accessAuditLog.timestamp), desc(accessAuditLog.id));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function enrichWithResourceDetails(logs: Awaited<ReturnType<typeof queryAccess>>) {
|
|
||||||
// If logs database is the same as main database, we can do a join
|
|
||||||
// Otherwise, we need to fetch resource details separately
|
|
||||||
const resourceIds = logs
|
|
||||||
.map(log => log.resourceId)
|
|
||||||
.filter((id): id is number => id !== null && id !== undefined);
|
|
||||||
|
|
||||||
if (resourceIds.length === 0) {
|
|
||||||
return logs.map(log => ({ ...log, resourceName: null, resourceNiceId: null }));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch resource details from main database
|
|
||||||
const resourceDetails = await primaryDb
|
|
||||||
.select({
|
|
||||||
resourceId: resources.resourceId,
|
|
||||||
name: resources.name,
|
|
||||||
niceId: resources.niceId
|
|
||||||
})
|
|
||||||
.from(resources)
|
|
||||||
.where(inArray(resources.resourceId, resourceIds));
|
|
||||||
|
|
||||||
// Create a map for quick lookup
|
|
||||||
const resourceMap = new Map(
|
|
||||||
resourceDetails.map(r => [r.resourceId, { name: r.name, niceId: r.niceId }])
|
|
||||||
);
|
|
||||||
|
|
||||||
// Enrich logs with resource details
|
|
||||||
return logs.map(log => ({
|
|
||||||
...log,
|
|
||||||
resourceName: log.resourceId ? resourceMap.get(log.resourceId)?.name ?? null : null,
|
|
||||||
resourceNiceId: log.resourceId ? resourceMap.get(log.resourceId)?.niceId ?? null : null
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
export function countAccessQuery(data: Q) {
|
export function countAccessQuery(data: Q) {
|
||||||
const countQuery = logsDb
|
const countQuery = db
|
||||||
.select({ count: count() })
|
.select({ count: count() })
|
||||||
.from(accessAuditLog)
|
.from(accessAuditLog)
|
||||||
.where(getWhere(data));
|
.where(getWhere(data));
|
||||||
@@ -191,7 +161,7 @@ async function queryUniqueFilterAttributes(
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Get unique actors
|
// Get unique actors
|
||||||
const uniqueActors = await logsDb
|
const uniqueActors = await db
|
||||||
.selectDistinct({
|
.selectDistinct({
|
||||||
actor: accessAuditLog.actor
|
actor: accessAuditLog.actor
|
||||||
})
|
})
|
||||||
@@ -199,7 +169,7 @@ async function queryUniqueFilterAttributes(
|
|||||||
.where(baseConditions);
|
.where(baseConditions);
|
||||||
|
|
||||||
// Get unique locations
|
// Get unique locations
|
||||||
const uniqueLocations = await logsDb
|
const uniqueLocations = await db
|
||||||
.selectDistinct({
|
.selectDistinct({
|
||||||
locations: accessAuditLog.location
|
locations: accessAuditLog.location
|
||||||
})
|
})
|
||||||
@@ -207,40 +177,25 @@ async function queryUniqueFilterAttributes(
|
|||||||
.where(baseConditions);
|
.where(baseConditions);
|
||||||
|
|
||||||
// Get unique resources with names
|
// Get unique resources with names
|
||||||
const uniqueResources = await logsDb
|
const uniqueResources = await db
|
||||||
.selectDistinct({
|
.selectDistinct({
|
||||||
id: accessAuditLog.resourceId
|
id: accessAuditLog.resourceId,
|
||||||
})
|
|
||||||
.from(accessAuditLog)
|
|
||||||
.where(baseConditions);
|
|
||||||
|
|
||||||
// Fetch resource names from main database for the unique resource IDs
|
|
||||||
const resourceIds = uniqueResources
|
|
||||||
.map(row => row.id)
|
|
||||||
.filter((id): id is number => id !== null);
|
|
||||||
|
|
||||||
let resourcesWithNames: Array<{ id: number; name: string | null }> = [];
|
|
||||||
|
|
||||||
if (resourceIds.length > 0) {
|
|
||||||
const resourceDetails = await primaryDb
|
|
||||||
.select({
|
|
||||||
resourceId: resources.resourceId,
|
|
||||||
name: resources.name
|
name: resources.name
|
||||||
})
|
})
|
||||||
.from(resources)
|
.from(accessAuditLog)
|
||||||
.where(inArray(resources.resourceId, resourceIds));
|
.leftJoin(
|
||||||
|
resources,
|
||||||
resourcesWithNames = resourceDetails.map(r => ({
|
eq(accessAuditLog.resourceId, resources.resourceId)
|
||||||
id: r.resourceId,
|
)
|
||||||
name: r.name
|
.where(baseConditions);
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
actors: uniqueActors
|
actors: uniqueActors
|
||||||
.map((row) => row.actor)
|
.map((row) => row.actor)
|
||||||
.filter((actor): actor is string => actor !== null),
|
.filter((actor): actor is string => actor !== null),
|
||||||
resources: resourcesWithNames,
|
resources: uniqueResources.filter(
|
||||||
|
(row): row is { id: number; name: string | null } => row.id !== null
|
||||||
|
),
|
||||||
locations: uniqueLocations
|
locations: uniqueLocations
|
||||||
.map((row) => row.locations)
|
.map((row) => row.locations)
|
||||||
.filter((location): location is string => location !== null)
|
.filter((location): location is string => location !== null)
|
||||||
@@ -288,10 +243,7 @@ export async function queryAccessAuditLogs(
|
|||||||
|
|
||||||
const baseQuery = queryAccess(data);
|
const baseQuery = queryAccess(data);
|
||||||
|
|
||||||
const logsRaw = await baseQuery.limit(data.limit).offset(data.offset);
|
const log = await baseQuery.limit(data.limit).offset(data.offset);
|
||||||
|
|
||||||
// Enrich with resource details (handles cross-database scenario)
|
|
||||||
const log = await enrichWithResourceDetails(logsRaw);
|
|
||||||
|
|
||||||
const totalCountResult = await countAccessQuery(data);
|
const totalCountResult = await countAccessQuery(data);
|
||||||
const totalCount = totalCountResult[0].count;
|
const totalCount = totalCountResult[0].count;
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
* This file is not licensed under the AGPLv3.
|
* This file is not licensed under the AGPLv3.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { actionAuditLog, logsDb } from "@server/db";
|
import { actionAuditLog, db } from "@server/db";
|
||||||
import { registry } from "@server/openApi";
|
import { registry } from "@server/openApi";
|
||||||
import { NextFunction } from "express";
|
import { NextFunction } from "express";
|
||||||
import { Request, Response } from "express";
|
import { Request, Response } from "express";
|
||||||
@@ -97,7 +97,7 @@ function getWhere(data: Q) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function queryAction(data: Q) {
|
export function queryAction(data: Q) {
|
||||||
return logsDb
|
return db
|
||||||
.select({
|
.select({
|
||||||
orgId: actionAuditLog.orgId,
|
orgId: actionAuditLog.orgId,
|
||||||
action: actionAuditLog.action,
|
action: actionAuditLog.action,
|
||||||
@@ -113,7 +113,7 @@ export function queryAction(data: Q) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function countActionQuery(data: Q) {
|
export function countActionQuery(data: Q) {
|
||||||
const countQuery = logsDb
|
const countQuery = db
|
||||||
.select({ count: count() })
|
.select({ count: count() })
|
||||||
.from(actionAuditLog)
|
.from(actionAuditLog)
|
||||||
.where(getWhere(data));
|
.where(getWhere(data));
|
||||||
@@ -132,14 +132,14 @@ async function queryUniqueFilterAttributes(
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Get unique actors
|
// Get unique actors
|
||||||
const uniqueActors = await logsDb
|
const uniqueActors = await db
|
||||||
.selectDistinct({
|
.selectDistinct({
|
||||||
actor: actionAuditLog.actor
|
actor: actionAuditLog.actor
|
||||||
})
|
})
|
||||||
.from(actionAuditLog)
|
.from(actionAuditLog)
|
||||||
.where(baseConditions);
|
.where(baseConditions);
|
||||||
|
|
||||||
const uniqueActions = await logsDb
|
const uniqueActions = await db
|
||||||
.selectDistinct({
|
.selectDistinct({
|
||||||
action: actionAuditLog.action
|
action: actionAuditLog.action
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -139,7 +139,7 @@ export async function signSshKey(
|
|||||||
if (!userOrg.pamUsername) {
|
if (!userOrg.pamUsername) {
|
||||||
if (req.user?.email) {
|
if (req.user?.email) {
|
||||||
// Extract username from email (first part before @)
|
// Extract username from email (first part before @)
|
||||||
usernameToUse = req.user?.email.split("@")[0];
|
usernameToUse = req.user?.email.split("@")[0].replace(/[^a-zA-Z0-9_-]/g, "");
|
||||||
if (!usernameToUse) {
|
if (!usernameToUse) {
|
||||||
return next(
|
return next(
|
||||||
createHttpError(
|
createHttpError(
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { logsDb, requestAuditLog, driver, primaryLogsDb } from "@server/db";
|
import { db, requestAuditLog, driver, primaryDb } from "@server/db";
|
||||||
import { registry } from "@server/openApi";
|
import { registry } from "@server/openApi";
|
||||||
import { NextFunction } from "express";
|
import { NextFunction } from "express";
|
||||||
import { Request, Response } from "express";
|
import { Request, Response } from "express";
|
||||||
@@ -74,12 +74,12 @@ async function query(query: Q) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const [all] = await primaryLogsDb
|
const [all] = await primaryDb
|
||||||
.select({ total: count() })
|
.select({ total: count() })
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
.where(baseConditions);
|
.where(baseConditions);
|
||||||
|
|
||||||
const [blocked] = await primaryLogsDb
|
const [blocked] = await primaryDb
|
||||||
.select({ total: count() })
|
.select({ total: count() })
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
.where(and(baseConditions, eq(requestAuditLog.action, false)));
|
.where(and(baseConditions, eq(requestAuditLog.action, false)));
|
||||||
@@ -90,7 +90,7 @@ async function query(query: Q) {
|
|||||||
|
|
||||||
const DISTINCT_LIMIT = 500;
|
const DISTINCT_LIMIT = 500;
|
||||||
|
|
||||||
const requestsPerCountry = await primaryLogsDb
|
const requestsPerCountry = await primaryDb
|
||||||
.selectDistinct({
|
.selectDistinct({
|
||||||
code: requestAuditLog.location,
|
code: requestAuditLog.location,
|
||||||
count: totalQ
|
count: totalQ
|
||||||
@@ -118,7 +118,7 @@ async function query(query: Q) {
|
|||||||
const booleanTrue = driver === "pg" ? sql`true` : sql`1`;
|
const booleanTrue = driver === "pg" ? sql`true` : sql`1`;
|
||||||
const booleanFalse = driver === "pg" ? sql`false` : sql`0`;
|
const booleanFalse = driver === "pg" ? sql`false` : sql`0`;
|
||||||
|
|
||||||
const requestsPerDay = await primaryLogsDb
|
const requestsPerDay = await primaryDb
|
||||||
.select({
|
.select({
|
||||||
day: groupByDayFunction.as("day"),
|
day: groupByDayFunction.as("day"),
|
||||||
allowedCount:
|
allowedCount:
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import { logsDb, primaryLogsDb, requestAuditLog, resources, db, primaryDb } from "@server/db";
|
import { db, primaryDb, requestAuditLog, resources } from "@server/db";
|
||||||
import { registry } from "@server/openApi";
|
import { registry } from "@server/openApi";
|
||||||
import { NextFunction } from "express";
|
import { NextFunction } from "express";
|
||||||
import { Request, Response } from "express";
|
import { Request, Response } from "express";
|
||||||
import { eq, gt, lt, and, count, desc, inArray } from "drizzle-orm";
|
import { eq, gt, lt, and, count, desc } from "drizzle-orm";
|
||||||
import { OpenAPITags } from "@server/openApi";
|
import { OpenAPITags } from "@server/openApi";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import createHttpError from "http-errors";
|
import createHttpError from "http-errors";
|
||||||
@@ -107,7 +107,7 @@ function getWhere(data: Q) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function queryRequest(data: Q) {
|
export function queryRequest(data: Q) {
|
||||||
return primaryLogsDb
|
return primaryDb
|
||||||
.select({
|
.select({
|
||||||
id: requestAuditLog.id,
|
id: requestAuditLog.id,
|
||||||
timestamp: requestAuditLog.timestamp,
|
timestamp: requestAuditLog.timestamp,
|
||||||
@@ -129,49 +129,21 @@ export function queryRequest(data: Q) {
|
|||||||
host: requestAuditLog.host,
|
host: requestAuditLog.host,
|
||||||
path: requestAuditLog.path,
|
path: requestAuditLog.path,
|
||||||
method: requestAuditLog.method,
|
method: requestAuditLog.method,
|
||||||
tls: requestAuditLog.tls
|
tls: requestAuditLog.tls,
|
||||||
|
resourceName: resources.name,
|
||||||
|
resourceNiceId: resources.niceId
|
||||||
})
|
})
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
|
.leftJoin(
|
||||||
|
resources,
|
||||||
|
eq(requestAuditLog.resourceId, resources.resourceId)
|
||||||
|
) // TODO: Is this efficient?
|
||||||
.where(getWhere(data))
|
.where(getWhere(data))
|
||||||
.orderBy(desc(requestAuditLog.timestamp));
|
.orderBy(desc(requestAuditLog.timestamp));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function enrichWithResourceDetails(logs: Awaited<ReturnType<typeof queryRequest>>) {
|
|
||||||
// If logs database is the same as main database, we can do a join
|
|
||||||
// Otherwise, we need to fetch resource details separately
|
|
||||||
const resourceIds = logs
|
|
||||||
.map(log => log.resourceId)
|
|
||||||
.filter((id): id is number => id !== null && id !== undefined);
|
|
||||||
|
|
||||||
if (resourceIds.length === 0) {
|
|
||||||
return logs.map(log => ({ ...log, resourceName: null, resourceNiceId: null }));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch resource details from main database
|
|
||||||
const resourceDetails = await primaryDb
|
|
||||||
.select({
|
|
||||||
resourceId: resources.resourceId,
|
|
||||||
name: resources.name,
|
|
||||||
niceId: resources.niceId
|
|
||||||
})
|
|
||||||
.from(resources)
|
|
||||||
.where(inArray(resources.resourceId, resourceIds));
|
|
||||||
|
|
||||||
// Create a map for quick lookup
|
|
||||||
const resourceMap = new Map(
|
|
||||||
resourceDetails.map(r => [r.resourceId, { name: r.name, niceId: r.niceId }])
|
|
||||||
);
|
|
||||||
|
|
||||||
// Enrich logs with resource details
|
|
||||||
return logs.map(log => ({
|
|
||||||
...log,
|
|
||||||
resourceName: log.resourceId ? resourceMap.get(log.resourceId)?.name ?? null : null,
|
|
||||||
resourceNiceId: log.resourceId ? resourceMap.get(log.resourceId)?.niceId ?? null : null
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
export function countRequestQuery(data: Q) {
|
export function countRequestQuery(data: Q) {
|
||||||
const countQuery = primaryLogsDb
|
const countQuery = primaryDb
|
||||||
.select({ count: count() })
|
.select({ count: count() })
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
.where(getWhere(data));
|
.where(getWhere(data));
|
||||||
@@ -213,31 +185,36 @@ async function queryUniqueFilterAttributes(
|
|||||||
uniquePaths,
|
uniquePaths,
|
||||||
uniqueResources
|
uniqueResources
|
||||||
] = await Promise.all([
|
] = await Promise.all([
|
||||||
primaryLogsDb
|
primaryDb
|
||||||
.selectDistinct({ actor: requestAuditLog.actor })
|
.selectDistinct({ actor: requestAuditLog.actor })
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
.where(baseConditions)
|
.where(baseConditions)
|
||||||
.limit(DISTINCT_LIMIT + 1),
|
.limit(DISTINCT_LIMIT + 1),
|
||||||
primaryLogsDb
|
primaryDb
|
||||||
.selectDistinct({ locations: requestAuditLog.location })
|
.selectDistinct({ locations: requestAuditLog.location })
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
.where(baseConditions)
|
.where(baseConditions)
|
||||||
.limit(DISTINCT_LIMIT + 1),
|
.limit(DISTINCT_LIMIT + 1),
|
||||||
primaryLogsDb
|
primaryDb
|
||||||
.selectDistinct({ hosts: requestAuditLog.host })
|
.selectDistinct({ hosts: requestAuditLog.host })
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
.where(baseConditions)
|
.where(baseConditions)
|
||||||
.limit(DISTINCT_LIMIT + 1),
|
.limit(DISTINCT_LIMIT + 1),
|
||||||
primaryLogsDb
|
primaryDb
|
||||||
.selectDistinct({ paths: requestAuditLog.path })
|
.selectDistinct({ paths: requestAuditLog.path })
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
.where(baseConditions)
|
.where(baseConditions)
|
||||||
.limit(DISTINCT_LIMIT + 1),
|
.limit(DISTINCT_LIMIT + 1),
|
||||||
primaryLogsDb
|
primaryDb
|
||||||
.selectDistinct({
|
.selectDistinct({
|
||||||
id: requestAuditLog.resourceId
|
id: requestAuditLog.resourceId,
|
||||||
|
name: resources.name
|
||||||
})
|
})
|
||||||
.from(requestAuditLog)
|
.from(requestAuditLog)
|
||||||
|
.leftJoin(
|
||||||
|
resources,
|
||||||
|
eq(requestAuditLog.resourceId, resources.resourceId)
|
||||||
|
)
|
||||||
.where(baseConditions)
|
.where(baseConditions)
|
||||||
.limit(DISTINCT_LIMIT + 1)
|
.limit(DISTINCT_LIMIT + 1)
|
||||||
]);
|
]);
|
||||||
@@ -254,33 +231,13 @@ async function queryUniqueFilterAttributes(
|
|||||||
// throw new Error("Too many distinct filter attributes to retrieve. Please refine your time range.");
|
// throw new Error("Too many distinct filter attributes to retrieve. Please refine your time range.");
|
||||||
// }
|
// }
|
||||||
|
|
||||||
// Fetch resource names from main database for the unique resource IDs
|
|
||||||
const resourceIds = uniqueResources
|
|
||||||
.map(row => row.id)
|
|
||||||
.filter((id): id is number => id !== null);
|
|
||||||
|
|
||||||
let resourcesWithNames: Array<{ id: number; name: string | null }> = [];
|
|
||||||
|
|
||||||
if (resourceIds.length > 0) {
|
|
||||||
const resourceDetails = await primaryDb
|
|
||||||
.select({
|
|
||||||
resourceId: resources.resourceId,
|
|
||||||
name: resources.name
|
|
||||||
})
|
|
||||||
.from(resources)
|
|
||||||
.where(inArray(resources.resourceId, resourceIds));
|
|
||||||
|
|
||||||
resourcesWithNames = resourceDetails.map(r => ({
|
|
||||||
id: r.resourceId,
|
|
||||||
name: r.name
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
actors: uniqueActors
|
actors: uniqueActors
|
||||||
.map((row) => row.actor)
|
.map((row) => row.actor)
|
||||||
.filter((actor): actor is string => actor !== null),
|
.filter((actor): actor is string => actor !== null),
|
||||||
resources: resourcesWithNames,
|
resources: uniqueResources.filter(
|
||||||
|
(row): row is { id: number; name: string | null } => row.id !== null
|
||||||
|
),
|
||||||
locations: uniqueLocations
|
locations: uniqueLocations
|
||||||
.map((row) => row.locations)
|
.map((row) => row.locations)
|
||||||
.filter((location): location is string => location !== null),
|
.filter((location): location is string => location !== null),
|
||||||
@@ -323,10 +280,7 @@ export async function queryRequestAuditLogs(
|
|||||||
|
|
||||||
const baseQuery = queryRequest(data);
|
const baseQuery = queryRequest(data);
|
||||||
|
|
||||||
const logsRaw = await baseQuery.limit(data.limit).offset(data.offset);
|
const log = await baseQuery.limit(data.limit).offset(data.offset);
|
||||||
|
|
||||||
// Enrich with resource details (handles cross-database scenario)
|
|
||||||
const log = await enrichWithResourceDetails(logsRaw);
|
|
||||||
|
|
||||||
const totalCountResult = await countRequestQuery(data);
|
const totalCountResult = await countRequestQuery(data);
|
||||||
const totalCount = totalCountResult[0].count;
|
const totalCount = totalCountResult[0].count;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { logsDb, primaryLogsDb, db, orgs, requestAuditLog } from "@server/db";
|
import { db, orgs, requestAuditLog } from "@server/db";
|
||||||
import logger from "@server/logger";
|
import logger from "@server/logger";
|
||||||
import { and, eq, lt, sql } from "drizzle-orm";
|
import { and, eq, lt, sql } from "drizzle-orm";
|
||||||
import cache from "@server/lib/cache";
|
import cache from "@server/lib/cache";
|
||||||
@@ -69,7 +69,7 @@ async function flushAuditLogs() {
|
|||||||
try {
|
try {
|
||||||
// Use a transaction to ensure all inserts succeed or fail together
|
// Use a transaction to ensure all inserts succeed or fail together
|
||||||
// This prevents index corruption from partial writes
|
// This prevents index corruption from partial writes
|
||||||
await logsDb.transaction(async (tx) => {
|
await db.transaction(async (tx) => {
|
||||||
// Batch insert logs in groups of 25 to avoid overwhelming the database
|
// Batch insert logs in groups of 25 to avoid overwhelming the database
|
||||||
const BATCH_DB_SIZE = 25;
|
const BATCH_DB_SIZE = 25;
|
||||||
for (let i = 0; i < logsToWrite.length; i += BATCH_DB_SIZE) {
|
for (let i = 0; i < logsToWrite.length; i += BATCH_DB_SIZE) {
|
||||||
@@ -162,7 +162,7 @@ export async function cleanUpOldLogs(orgId: string, retentionDays: number) {
|
|||||||
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
|
const cutoffTimestamp = calculateCutoffTimestamp(retentionDays);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await logsDb
|
await db
|
||||||
.delete(requestAuditLog)
|
.delete(requestAuditLog)
|
||||||
.where(
|
.where(
|
||||||
and(
|
and(
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import response from "@server/lib/response";
|
|||||||
import HttpCode from "@server/types/HttpCode";
|
import HttpCode from "@server/types/HttpCode";
|
||||||
import createHttpError from "http-errors";
|
import createHttpError from "http-errors";
|
||||||
import logger from "@server/logger";
|
import logger from "@server/logger";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and, ne } from "drizzle-orm";
|
||||||
import { fromError } from "zod-validation-error";
|
import { fromError } from "zod-validation-error";
|
||||||
import { OpenAPITags, registry } from "@server/openApi";
|
import { OpenAPITags, registry } from "@server/openApi";
|
||||||
|
|
||||||
@@ -93,7 +93,8 @@ export async function updateClient(
|
|||||||
.where(
|
.where(
|
||||||
and(
|
and(
|
||||||
eq(clients.niceId, niceId),
|
eq(clients.niceId, niceId),
|
||||||
eq(clients.orgId, clients.orgId)
|
eq(clients.orgId, clients.orgId),
|
||||||
|
ne(clients.clientId, clientId)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.limit(1);
|
.limit(1);
|
||||||
|
|||||||
@@ -197,6 +197,7 @@ export async function updateSiteBandwidth(
|
|||||||
usageService
|
usageService
|
||||||
.checkLimitSet(
|
.checkLimitSet(
|
||||||
orgId,
|
orgId,
|
||||||
|
|
||||||
FeatureId.EGRESS_DATA_MB,
|
FeatureId.EGRESS_DATA_MB,
|
||||||
bandwidthUsage
|
bandwidthUsage
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
Resource,
|
Resource,
|
||||||
resources
|
resources
|
||||||
} from "@server/db";
|
} from "@server/db";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and, ne } from "drizzle-orm";
|
||||||
import response from "@server/lib/response";
|
import response from "@server/lib/response";
|
||||||
import HttpCode from "@server/types/HttpCode";
|
import HttpCode from "@server/types/HttpCode";
|
||||||
import createHttpError from "http-errors";
|
import createHttpError from "http-errors";
|
||||||
@@ -33,7 +33,15 @@ const updateResourceParamsSchema = z.strictObject({
|
|||||||
const updateHttpResourceBodySchema = z
|
const updateHttpResourceBodySchema = z
|
||||||
.strictObject({
|
.strictObject({
|
||||||
name: z.string().min(1).max(255).optional(),
|
name: z.string().min(1).max(255).optional(),
|
||||||
niceId: z.string().min(1).max(255).regex(/^[a-zA-Z0-9-]+$/, "niceId can only contain letters, numbers, and dashes").optional(),
|
niceId: z
|
||||||
|
.string()
|
||||||
|
.min(1)
|
||||||
|
.max(255)
|
||||||
|
.regex(
|
||||||
|
/^[a-zA-Z0-9-]+$/,
|
||||||
|
"niceId can only contain letters, numbers, and dashes"
|
||||||
|
)
|
||||||
|
.optional(),
|
||||||
subdomain: subdomainSchema.nullable().optional(),
|
subdomain: subdomainSchema.nullable().optional(),
|
||||||
ssl: z.boolean().optional(),
|
ssl: z.boolean().optional(),
|
||||||
sso: z.boolean().optional(),
|
sso: z.boolean().optional(),
|
||||||
@@ -248,14 +256,13 @@ async function updateHttpResource(
|
|||||||
.where(
|
.where(
|
||||||
and(
|
and(
|
||||||
eq(resources.niceId, updateData.niceId),
|
eq(resources.niceId, updateData.niceId),
|
||||||
eq(resources.orgId, resource.orgId)
|
eq(resources.orgId, resource.orgId),
|
||||||
|
ne(resources.resourceId, resource.resourceId) // exclude the current resource from the search
|
||||||
)
|
)
|
||||||
);
|
)
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
if (
|
if (existingResource) {
|
||||||
existingResource &&
|
|
||||||
existingResource.resourceId !== resource.resourceId
|
|
||||||
) {
|
|
||||||
return next(
|
return next(
|
||||||
createHttpError(
|
createHttpError(
|
||||||
HttpCode.CONFLICT,
|
HttpCode.CONFLICT,
|
||||||
@@ -343,7 +350,10 @@ async function updateHttpResource(
|
|||||||
headers = null;
|
headers = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const isLicensed = await isLicensedOrSubscribed(resource.orgId, tierMatrix.maintencePage);
|
const isLicensed = await isLicensedOrSubscribed(
|
||||||
|
resource.orgId,
|
||||||
|
tierMatrix.maintencePage
|
||||||
|
);
|
||||||
if (!isLicensed) {
|
if (!isLicensed) {
|
||||||
updateData.maintenanceModeEnabled = undefined;
|
updateData.maintenanceModeEnabled = undefined;
|
||||||
updateData.maintenanceModeType = undefined;
|
updateData.maintenanceModeType = undefined;
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { Request, Response, NextFunction } from "express";
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { db } from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { sites } from "@server/db";
|
import { sites } from "@server/db";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and, ne } from "drizzle-orm";
|
||||||
import response from "@server/lib/response";
|
import response from "@server/lib/response";
|
||||||
import HttpCode from "@server/types/HttpCode";
|
import HttpCode from "@server/types/HttpCode";
|
||||||
import createHttpError from "http-errors";
|
import createHttpError from "http-errors";
|
||||||
@@ -19,8 +19,8 @@ const updateSiteBodySchema = z
|
|||||||
.strictObject({
|
.strictObject({
|
||||||
name: z.string().min(1).max(255).optional(),
|
name: z.string().min(1).max(255).optional(),
|
||||||
niceId: z.string().min(1).max(255).optional(),
|
niceId: z.string().min(1).max(255).optional(),
|
||||||
dockerSocketEnabled: z.boolean().optional(),
|
dockerSocketEnabled: z.boolean().optional()
|
||||||
remoteSubnets: z.string().optional()
|
// remoteSubnets: z.string().optional()
|
||||||
// subdomain: z
|
// subdomain: z
|
||||||
// .string()
|
// .string()
|
||||||
// .min(1)
|
// .min(1)
|
||||||
@@ -86,18 +86,19 @@ export async function updateSite(
|
|||||||
|
|
||||||
// if niceId is provided, check if it's already in use by another site
|
// if niceId is provided, check if it's already in use by another site
|
||||||
if (updateData.niceId) {
|
if (updateData.niceId) {
|
||||||
const existingSite = await db
|
const [existingSite] = await db
|
||||||
.select()
|
.select()
|
||||||
.from(sites)
|
.from(sites)
|
||||||
.where(
|
.where(
|
||||||
and(
|
and(
|
||||||
eq(sites.niceId, updateData.niceId),
|
eq(sites.niceId, updateData.niceId),
|
||||||
eq(sites.orgId, sites.orgId)
|
eq(sites.orgId, sites.orgId),
|
||||||
|
ne(sites.siteId, siteId)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.limit(1);
|
.limit(1);
|
||||||
|
|
||||||
if (existingSite.length > 0 && existingSite[0].siteId !== siteId) {
|
if (existingSite) {
|
||||||
return next(
|
return next(
|
||||||
createHttpError(
|
createHttpError(
|
||||||
HttpCode.CONFLICT,
|
HttpCode.CONFLICT,
|
||||||
@@ -107,22 +108,22 @@ export async function updateSite(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// if remoteSubnets is provided, ensure it's a valid comma-separated list of cidrs
|
// // if remoteSubnets is provided, ensure it's a valid comma-separated list of cidrs
|
||||||
if (updateData.remoteSubnets) {
|
// if (updateData.remoteSubnets) {
|
||||||
const subnets = updateData.remoteSubnets
|
// const subnets = updateData.remoteSubnets
|
||||||
.split(",")
|
// .split(",")
|
||||||
.map((s) => s.trim());
|
// .map((s) => s.trim());
|
||||||
for (const subnet of subnets) {
|
// for (const subnet of subnets) {
|
||||||
if (!isValidCIDR(subnet)) {
|
// if (!isValidCIDR(subnet)) {
|
||||||
return next(
|
// return next(
|
||||||
createHttpError(
|
// createHttpError(
|
||||||
HttpCode.BAD_REQUEST,
|
// HttpCode.BAD_REQUEST,
|
||||||
`Invalid CIDR format: ${subnet}`
|
// `Invalid CIDR format: ${subnet}`
|
||||||
)
|
// )
|
||||||
);
|
// );
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
const updatedSite = await db
|
const updatedSite = await db
|
||||||
.update(sites)
|
.update(sites)
|
||||||
|
|||||||
@@ -82,13 +82,13 @@ export default async function RootLayout({
|
|||||||
<body className={`${font.className} h-screen-safe overflow-hidden`}>
|
<body className={`${font.className} h-screen-safe overflow-hidden`}>
|
||||||
<StoreInternalRedirect />
|
<StoreInternalRedirect />
|
||||||
<TopLoader />
|
<TopLoader />
|
||||||
{/* build === "saas" && (
|
{build === "saas" && (
|
||||||
<Script
|
<Script
|
||||||
src="https://rybbit.fossorial.io/api/script.js"
|
src="https://rybbit.fossorial.io/api/script.js"
|
||||||
data-site-id="fe1ff2a33287"
|
data-site-id="fe1ff2a33287"
|
||||||
strategy="afterInteractive"
|
strategy="afterInteractive"
|
||||||
/>
|
/>
|
||||||
)*/}
|
)}
|
||||||
<ViewportHeightFix />
|
<ViewportHeightFix />
|
||||||
<NextIntlClientProvider>
|
<NextIntlClientProvider>
|
||||||
<ThemeProvider
|
<ThemeProvider
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ export const isOrgSubscribed = cache(async (orgId: string) => {
|
|||||||
try {
|
try {
|
||||||
const subRes = await getCachedSubscription(orgId);
|
const subRes = await getCachedSubscription(orgId);
|
||||||
subscribed =
|
subscribed =
|
||||||
(subRes.data.data.tier == "tier1" || subRes.data.data.tier == "tier2" || subRes.data.data.tier == "tier3" || subRes.data.data.tier == "enterprise") &&
|
(subRes.data.data.tier == "tier1" || subRes.data.data.tier == "tier2" || subRes.data.data.tier == "tier3") &&
|
||||||
subRes.data.data.active;
|
subRes.data.data.active;
|
||||||
} catch {}
|
} catch {}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,8 +42,7 @@ export function SubscriptionStatusProvider({
|
|||||||
if (
|
if (
|
||||||
subscription.type == "tier1" ||
|
subscription.type == "tier1" ||
|
||||||
subscription.type == "tier2" ||
|
subscription.type == "tier2" ||
|
||||||
subscription.type == "tier3" ||
|
subscription.type == "tier3"
|
||||||
subscription.type == "enterprise"
|
|
||||||
) {
|
) {
|
||||||
return {
|
return {
|
||||||
tier: subscription.type,
|
tier: subscription.type,
|
||||||
@@ -62,7 +61,7 @@ export function SubscriptionStatusProvider({
|
|||||||
const isSubscribed = () => {
|
const isSubscribed = () => {
|
||||||
const { tier, active } = getTier();
|
const { tier, active } = getTier();
|
||||||
return (
|
return (
|
||||||
(tier == "tier1" || tier == "tier2" || tier == "tier3" || tier == "enterprise") &&
|
(tier == "tier1" || tier == "tier2" || tier == "tier3") &&
|
||||||
active
|
active
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user