support postgresql as database option

This commit is contained in:
miloschwartz
2025-06-04 12:02:07 -04:00
parent 62a0104e70
commit 2cca561e51
218 changed files with 1417 additions and 713 deletions

View File

@@ -0,0 +1,5 @@
export default async function migration() {
console.log("Running setup script 1.0.0-beta.1...");
// SQL operations would go here in ts format
console.log("Done.");
}

View File

@@ -0,0 +1,45 @@
import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import fs from "fs";
import yaml from "js-yaml";
export default async function migration() {
console.log("Running setup script 1.0.0-beta.10...");
try {
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
delete rawConfig.server.secure_cookies;
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
console.log(`Removed deprecated config option: secure_cookies.`);
} catch (e) {
console.log(
`Was unable to remove deprecated config option: secure_cookies. Error: ${e}`
);
return;
}
console.log("Done.");
}

View File

@@ -0,0 +1,62 @@
import { db } from "../../db/sqlite";
import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import { sql } from "drizzle-orm";
import fs from "fs";
import yaml from "js-yaml";
export default async function migration() {
console.log("Running setup script 1.0.0-beta.12...");
try {
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
if (!rawConfig.flags) {
rawConfig.flags = {};
}
rawConfig.flags.allow_base_domain_resources = true;
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
console.log(`Added new config option: allow_base_domain_resources`);
} catch (e) {
console.log(
`Unable to add new config option: allow_base_domain_resources. This is not critical.`
);
console.error(e);
}
try {
db.transaction((trx) => {
trx.run(sql`ALTER TABLE 'resources' ADD 'isBaseDomain' integer;`);
});
console.log(`Added new column: isBaseDomain`);
} catch (e) {
console.log("Unable to add new column: isBaseDomain");
throw e;
}
console.log("Done.");
}

View File

@@ -0,0 +1,33 @@
import { db } from "../../db/sqlite";
import { sql } from "drizzle-orm";
const version = "1.0.0-beta.13";
export default async function migration() {
console.log(`Running setup script ${version}...`);
try {
db.transaction((trx) => {
trx.run(sql`CREATE TABLE resourceRules (
ruleId integer PRIMARY KEY AUTOINCREMENT NOT NULL,
resourceId integer NOT NULL,
priority integer NOT NULL,
enabled integer DEFAULT true NOT NULL,
action text NOT NULL,
match text NOT NULL,
value text NOT NULL,
FOREIGN KEY (resourceId) REFERENCES resources(resourceId) ON UPDATE no action ON DELETE cascade
);`);
trx.run(
sql`ALTER TABLE resources ADD applyRules integer DEFAULT false NOT NULL;`
);
});
console.log(`Added new table and column: resourceRules, applyRules`);
} catch (e) {
console.log("Unable to add new table and column: resourceRules, applyRules");
throw e;
}
console.log(`${version} migration complete`);
}

View File

@@ -0,0 +1,129 @@
import { db } from "../../db/sqlite";
import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import fs from "fs";
import yaml from "js-yaml";
import { sql } from "drizzle-orm";
import { domains, orgDomains, resources } from "@server/db";
const version = "1.0.0-beta.15";
export default async function migration() {
console.log(`Running setup script ${version}...`);
let domain = "";
try {
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
const baseDomain = rawConfig.app.base_domain;
const certResolver = rawConfig.traefik.cert_resolver;
const preferWildcardCert = rawConfig.traefik.prefer_wildcard_cert;
delete rawConfig.traefik.prefer_wildcard_cert;
delete rawConfig.traefik.cert_resolver;
delete rawConfig.app.base_domain;
rawConfig.domains = {
domain1: {
base_domain: baseDomain
}
};
if (certResolver) {
rawConfig.domains.domain1.cert_resolver = certResolver;
}
if (preferWildcardCert) {
rawConfig.domains.domain1.prefer_wildcard_cert = preferWildcardCert;
}
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
domain = baseDomain;
console.log(`Moved base_domain to new domains section`);
} catch (e) {
console.log(
`Unable to migrate config file and move base_domain to domains. Error: ${e}`
);
throw e;
}
try {
db.transaction((trx) => {
trx.run(sql`CREATE TABLE 'domains' (
'domainId' text PRIMARY KEY NOT NULL,
'baseDomain' text NOT NULL,
'configManaged' integer DEFAULT false NOT NULL
);`);
trx.run(sql`CREATE TABLE 'orgDomains' (
'orgId' text NOT NULL,
'domainId' text NOT NULL,
FOREIGN KEY ('orgId') REFERENCES 'orgs'('orgId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('domainId') REFERENCES 'domains'('domainId') ON UPDATE no action ON DELETE cascade
);`);
trx.run(
sql`ALTER TABLE 'resources' ADD 'domainId' text REFERENCES domains(domainId);`
);
trx.run(sql`ALTER TABLE 'orgs' DROP COLUMN 'domain';`);
});
console.log(`Migrated database schema`);
} catch (e) {
console.log("Unable to migrate database schema");
throw e;
}
try {
await db.transaction(async (trx) => {
await trx
.insert(domains)
.values({
domainId: "domain1",
baseDomain: domain,
configManaged: true
})
.execute();
await trx.update(resources).set({ domainId: "domain1" });
const existingOrgDomains = await trx.select().from(orgDomains);
for (const orgDomain of existingOrgDomains) {
await trx
.insert(orgDomains)
.values({ orgId: orgDomain.orgId, domainId: "domain1" })
.execute();
}
});
console.log(`Updated resources table with new domainId`);
} catch (e) {
console.log(
`Unable to update resources table with new domainId. Error: ${e}`
);
return;
}
console.log(`${version} migration complete`);
}

View File

@@ -0,0 +1,59 @@
import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import fs from "fs";
import yaml from "js-yaml";
export default async function migration() {
console.log("Running setup script 1.0.0-beta.2...");
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
// Validate the structure
if (!rawConfig.app || !rawConfig.app.base_url) {
throw new Error(`Invalid config file: app.base_url is missing.`);
}
// Move base_url to dashboard_url and calculate base_domain
const baseUrl = rawConfig.app.base_url;
rawConfig.app.dashboard_url = baseUrl;
rawConfig.app.base_domain = getBaseDomain(baseUrl);
// Remove the old base_url
delete rawConfig.app.base_url;
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
console.log("Done.");
}
function getBaseDomain(url: string): string {
const newUrl = new URL(url);
const hostname = newUrl.hostname;
const parts = hostname.split(".");
if (parts.length <= 2) {
return parts.join(".");
}
return parts.slice(-2).join(".");
}

View File

@@ -0,0 +1,42 @@
import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import fs from "fs";
import yaml from "js-yaml";
export default async function migration() {
console.log("Running setup script 1.0.0-beta.3...");
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
// Validate the structure
if (!rawConfig.gerbil) {
throw new Error(`Invalid config file: gerbil is missing.`);
}
// Update the config
rawConfig.gerbil.site_block_size = 29;
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
console.log("Done.");
}

View File

@@ -0,0 +1,101 @@
import { APP_PATH, configFilePath1, configFilePath2 } from "@server/lib/consts";
import fs from "fs";
import yaml from "js-yaml";
import path from "path";
import { z } from "zod";
import { fromZodError } from "zod-validation-error";
export default async function migration() {
console.log("Running setup script 1.0.0-beta.5...");
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
// Validate the structure
if (!rawConfig.server) {
throw new Error(`Invalid config file: server is missing.`);
}
// Update the config
rawConfig.server.resource_access_token_param = "p_token";
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
// then try to update badger in traefik config
try {
const traefikPath = path.join(
APP_PATH,
"traefik",
"traefik_config.yml"
);
// read the traefik file
// look for the badger middleware
// set the version to v1.0.0-beta.2
/*
experimental:
plugins:
badger:
moduleName: "github.com/fosrl/badger"
version: "v1.0.0-beta.2"
*/
const schema = z.object({
experimental: z.object({
plugins: z.object({
badger: z.object({
moduleName: z.string(),
version: z.string()
})
})
})
});
const traefikFileContents = fs.readFileSync(traefikPath, "utf8");
const traefikConfig = yaml.load(traefikFileContents) as any;
const parsedConfig = schema.safeParse(traefikConfig);
if (!parsedConfig.success) {
throw new Error(fromZodError(parsedConfig.error).toString());
}
traefikConfig.experimental.plugins.badger.version = "v1.0.0-beta.2";
const updatedTraefikYaml = yaml.dump(traefikConfig);
fs.writeFileSync(traefikPath, updatedTraefikYaml, "utf8");
console.log(
"Updated the version of Badger in your Traefik configuration to v1.0.0-beta.2."
);
} catch (e) {
console.log(
"We were unable to update the version of Badger in your Traefik configuration. Please update it manually."
);
console.error(e);
}
console.log("Done.");
}

View File

@@ -0,0 +1,52 @@
import { configFilePath1, configFilePath2 } from "@server/lib/consts";
import fs from "fs";
import yaml from "js-yaml";
export default async function migration() {
console.log("Running setup script 1.0.0-beta.6...");
try {
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
// Validate the structure
if (!rawConfig.server) {
throw new Error(`Invalid config file: server is missing.`);
}
// Update the config
rawConfig.server.cors = {
origins: [rawConfig.app.dashboard_url],
methods: ["GET", "POST", "PUT", "DELETE", "PATCH"],
headers: ["X-CSRF-Token", "Content-Type"],
credentials: false
};
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
} catch (error) {
console.log("We were unable to add CORS to your config file. Please add it manually.")
console.error(error)
}
console.log("Done.");
}

View File

@@ -0,0 +1,291 @@
import { db } from "../../db/sqlite";
import {
emailVerificationCodes,
passwordResetTokens,
resourceOtp,
resources,
resourceWhitelist,
targets,
userInvites,
users
} from "../../db/sqlite";
import { APP_PATH, configFilePath1, configFilePath2 } from "@server/lib/consts";
import { eq, sql } from "drizzle-orm";
import fs from "fs";
import yaml from "js-yaml";
import path from "path";
import { z } from "zod";
import { fromZodError } from "zod-validation-error";
export default async function migration() {
console.log("Running setup script 1.0.0-beta.9...");
// make dir config/db/backups
const appPath = APP_PATH;
const dbDir = path.join(appPath, "db");
const backupsDir = path.join(dbDir, "backups");
// check if the backups directory exists and create it if it doesn't
if (!fs.existsSync(backupsDir)) {
fs.mkdirSync(backupsDir, { recursive: true });
}
// copy the db.sqlite file to backups
// add the date to the filename
const date = new Date();
const dateString = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}_${date.getHours()}-${date.getMinutes()}-${date.getSeconds()}`;
const dbPath = path.join(dbDir, "db.sqlite");
const backupPath = path.join(backupsDir, `db_${dateString}.sqlite`);
fs.copyFileSync(dbPath, backupPath);
await db.transaction(async (trx) => {
try {
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
rawConfig.server.resource_session_request_param =
"p_session_request";
rawConfig.server.session_cookie_name = "p_session_token"; // rename to prevent conflicts
delete rawConfig.server.resource_session_cookie_name;
if (!rawConfig.flags) {
rawConfig.flags = {};
}
rawConfig.flags.allow_raw_resources = true;
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
} catch (e) {
console.log(
`Failed to add resource_session_request_param to config. Please add it manually. https://docs.fossorial.io/Pangolin/Configuration/config`
);
trx.rollback();
return;
}
try {
const traefikPath = path.join(
APP_PATH,
"traefik",
"traefik_config.yml"
);
// Define schema for traefik config validation
const schema = z.object({
entryPoints: z
.object({
websecure: z
.object({
address: z.string(),
transport: z
.object({
respondingTimeouts: z.object({
readTimeout: z.string()
})
})
.optional()
})
.optional()
})
.optional(),
experimental: z.object({
plugins: z.object({
badger: z.object({
moduleName: z.string(),
version: z.string()
})
})
})
});
const traefikFileContents = fs.readFileSync(traefikPath, "utf8");
const traefikConfig = yaml.load(traefikFileContents) as any;
let parsedConfig: any = schema.safeParse(traefikConfig);
if (parsedConfig.success) {
// Ensure websecure entrypoint exists
if (traefikConfig.entryPoints?.websecure) {
// Add transport configuration
traefikConfig.entryPoints.websecure.transport = {
respondingTimeouts: {
readTimeout: "30m"
}
};
}
traefikConfig.experimental.plugins.badger.version =
"v1.0.0-beta.3";
const updatedTraefikYaml = yaml.dump(traefikConfig);
fs.writeFileSync(traefikPath, updatedTraefikYaml, "utf8");
console.log("Updated Badger version in Traefik config.");
} else {
console.log(fromZodError(parsedConfig.error));
console.log(
"We were unable to update the version of Badger in your Traefik configuration. Please update it manually to at least v1.0.0-beta.3. https://github.com/fosrl/badger"
);
}
} catch (e) {
console.log(
"We were unable to update the version of Badger in your Traefik configuration. Please update it manually to at least v1.0.0-beta.3. https://github.com/fosrl/badger"
);
trx.rollback();
return;
}
try {
const traefikPath = path.join(
APP_PATH,
"traefik",
"dynamic_config.yml"
);
const schema = z.object({
http: z.object({
middlewares: z.object({
"redirect-to-https": z.object({
redirectScheme: z.object({
scheme: z.string(),
permanent: z.boolean()
})
})
})
})
});
const traefikFileContents = fs.readFileSync(traefikPath, "utf8");
const traefikConfig = yaml.load(traefikFileContents) as any;
let parsedConfig: any = schema.safeParse(traefikConfig);
if (parsedConfig.success) {
// delete permanent from redirect-to-https middleware
delete traefikConfig.http.middlewares["redirect-to-https"].redirectScheme.permanent;
const updatedTraefikYaml = yaml.dump(traefikConfig);
fs.writeFileSync(traefikPath, updatedTraefikYaml, "utf8");
console.log("Deleted permanent from redirect-to-https middleware.");
} else {
console.log(fromZodError(parsedConfig.error));
console.log(
"We were unable to delete the permanent field from the redirect-to-https middleware in your Traefik configuration. Please delete it manually."
);
}
} catch (e) {
console.log(
"We were unable to delete the permanent field from the redirect-to-https middleware in your Traefik configuration. Please delete it manually. Note that this is not a critical change but recommended."
);
}
trx.run(sql`UPDATE ${users} SET email = LOWER(email);`);
trx.run(
sql`UPDATE ${emailVerificationCodes} SET email = LOWER(email);`
);
trx.run(sql`UPDATE ${passwordResetTokens} SET email = LOWER(email);`);
trx.run(sql`UPDATE ${userInvites} SET email = LOWER(email);`);
trx.run(sql`UPDATE ${resourceWhitelist} SET email = LOWER(email);`);
trx.run(sql`UPDATE ${resourceOtp} SET email = LOWER(email);`);
const resourcesAll = await trx
.select({
resourceId: resources.resourceId,
fullDomain: resources.fullDomain,
subdomain: resources.subdomain
})
.from(resources);
trx.run(`DROP INDEX resources_fullDomain_unique;`);
trx.run(`ALTER TABLE resources
DROP COLUMN fullDomain;
`);
trx.run(`ALTER TABLE resources
DROP COLUMN subdomain;
`);
trx.run(sql`ALTER TABLE resources
ADD COLUMN fullDomain TEXT;
`);
trx.run(sql`ALTER TABLE resources
ADD COLUMN subdomain TEXT;
`);
trx.run(sql`ALTER TABLE resources
ADD COLUMN http INTEGER DEFAULT true NOT NULL;
`);
trx.run(sql`ALTER TABLE resources
ADD COLUMN protocol TEXT DEFAULT 'tcp' NOT NULL;
`);
trx.run(sql`ALTER TABLE resources
ADD COLUMN proxyPort INTEGER;
`);
// write the new fullDomain and subdomain values back to the database
for (const resource of resourcesAll) {
await trx
.update(resources)
.set({
fullDomain: resource.fullDomain,
subdomain: resource.subdomain
})
.where(eq(resources.resourceId, resource.resourceId));
}
const targetsAll = await trx
.select({
targetId: targets.targetId,
method: targets.method
})
.from(targets);
trx.run(`ALTER TABLE targets
DROP COLUMN method;
`);
trx.run(`ALTER TABLE targets
DROP COLUMN protocol;
`);
trx.run(sql`ALTER TABLE targets
ADD COLUMN method TEXT;
`);
// write the new method and protocol values back to the database
for (const target of targetsAll) {
await trx
.update(targets)
.set({
method: target.method
})
.where(eq(targets.targetId, target.targetId));
}
trx.run(
sql`ALTER TABLE 'resourceSessions' ADD 'isRequestToken' integer;`
);
trx.run(
sql`ALTER TABLE 'resourceSessions' ADD 'userSessionId' text REFERENCES session(id);`
);
});
console.log("Done.");
}

View File

@@ -0,0 +1,57 @@
import { APP_PATH } from "@server/lib/consts";
import fs from "fs";
import yaml from "js-yaml";
import path from "path";
import { z } from "zod";
import { fromZodError } from "zod-validation-error";
const version = "1.0.0";
export default async function migration() {
console.log(`Running setup script ${version}...`);
try {
const traefikPath = path.join(
APP_PATH,
"traefik",
"traefik_config.yml"
);
const schema = z.object({
experimental: z.object({
plugins: z.object({
badger: z.object({
moduleName: z.string(),
version: z.string()
})
})
})
});
const traefikFileContents = fs.readFileSync(traefikPath, "utf8");
const traefikConfig = yaml.load(traefikFileContents) as any;
const parsedConfig = schema.safeParse(traefikConfig);
if (!parsedConfig.success) {
throw new Error(fromZodError(parsedConfig.error).toString());
}
traefikConfig.experimental.plugins.badger.version = "v1.0.0";
const updatedTraefikYaml = yaml.dump(traefikConfig);
fs.writeFileSync(traefikPath, updatedTraefikYaml, "utf8");
console.log(
"Updated the version of Badger in your Traefik configuration to 1.0.0"
);
} catch (e) {
console.log(
"We were unable to update the version of Badger in your Traefik configuration. Please update it manually."
);
console.error(e);
}
console.log(`${version} migration complete`);
}

View File

@@ -0,0 +1,28 @@
import { db } from "../../db/sqlite";
import { sql } from "drizzle-orm";
const version = "1.1.0";
export default async function migration() {
console.log(`Running setup script ${version}...`);
try {
db.transaction((trx) => {
trx.run(sql`CREATE TABLE 'supporterKey' (
'keyId' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'key' text NOT NULL,
'githubUsername' text NOT NULL,
'phrase' text,
'tier' text,
'valid' integer DEFAULT false NOT NULL
);`);
});
console.log(`Migrated database schema`);
} catch (e) {
console.log("Unable to migrate database schema");
throw e;
}
console.log(`${version} migration complete`);
}

View File

@@ -0,0 +1,115 @@
import { db } from "../../db/sqlite";
import { APP_PATH, configFilePath1, configFilePath2 } from "@server/lib/consts";
import { sql } from "drizzle-orm";
import fs from "fs";
import yaml from "js-yaml";
import path from "path";
import { z } from "zod";
import { fromZodError } from "zod-validation-error";
const version = "1.2.0";
export default async function migration() {
console.log(`Running setup script ${version}...`);
try {
db.transaction((trx) => {
trx.run(
sql`ALTER TABLE 'resources' ADD 'enabled' integer DEFAULT true NOT NULL;`
);
});
console.log(`Migrated database schema`);
} catch (e) {
console.log("Unable to migrate database schema");
throw e;
}
try {
// Determine which config file exists
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
// Read and parse the YAML file
let rawConfig: any;
const fileContents = fs.readFileSync(filePath, "utf8");
rawConfig = yaml.load(fileContents);
if (!rawConfig.flags) {
rawConfig.flags = {};
}
rawConfig.server.resource_access_token_headers = {
id: "P-Access-Token-Id",
token: "P-Access-Token"
};
// Write the updated YAML back to the file
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
console.log(`Added new config option: resource_access_token_headers`);
} catch (e) {
console.log(
`Unable to add new config option: resource_access_token_headers. Please add it manually. https://docs.fossorial.io/Pangolin/Configuration/config`
);
console.error(e);
}
try {
const traefikPath = path.join(
APP_PATH,
"traefik",
"traefik_config.yml"
);
const schema = z.object({
experimental: z.object({
plugins: z.object({
badger: z.object({
moduleName: z.string(),
version: z.string()
})
})
})
});
const traefikFileContents = fs.readFileSync(traefikPath, "utf8");
const traefikConfig = yaml.load(traefikFileContents) as any;
const parsedConfig = schema.safeParse(traefikConfig);
if (!parsedConfig.success) {
throw new Error(fromZodError(parsedConfig.error).toString());
}
traefikConfig.experimental.plugins.badger.version = "v1.1.0";
const updatedTraefikYaml = yaml.dump(traefikConfig);
fs.writeFileSync(traefikPath, updatedTraefikYaml, "utf8");
console.log(
"Updated the version of Badger in your Traefik configuration to v1.1.0"
);
} catch (e) {
console.log(
"We were unable to update the version of Badger in your Traefik configuration. Please update it manually. Check the release notes for this version for more information."
);
console.error(e);
}
console.log(`${version} migration complete`);
}

View File

@@ -0,0 +1,203 @@
import Database from "better-sqlite3";
import path from "path";
import fs from "fs";
import yaml from "js-yaml";
import { encodeBase32LowerCaseNoPadding } from "@oslojs/encoding";
import { APP_PATH, configFilePath1, configFilePath2 } from "@server/lib/consts";
const version = "1.3.0";
const location = path.join(APP_PATH, "db", "db.sqlite");
export default async function migration() {
console.log(`Running setup script ${version}...`);
const db = new Database(location);
try {
db.pragma("foreign_keys = OFF");
db.transaction(() => {
db.exec(`
CREATE TABLE 'apiKeyActions' (
'apiKeyId' text NOT NULL,
'actionId' text NOT NULL,
FOREIGN KEY ('apiKeyId') REFERENCES 'apiKeys'('apiKeyId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('actionId') REFERENCES 'actions'('actionId') ON UPDATE no action ON DELETE cascade
);
CREATE TABLE 'apiKeyOrg' (
'apiKeyId' text NOT NULL,
'orgId' text NOT NULL,
FOREIGN KEY ('apiKeyId') REFERENCES 'apiKeys'('apiKeyId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('orgId') REFERENCES 'orgs'('orgId') ON UPDATE no action ON DELETE cascade
);
CREATE TABLE 'apiKeys' (
'apiKeyId' text PRIMARY KEY NOT NULL,
'name' text NOT NULL,
'apiKeyHash' text NOT NULL,
'lastChars' text NOT NULL,
'dateCreated' text NOT NULL,
'isRoot' integer DEFAULT false NOT NULL
);
CREATE TABLE 'hostMeta' (
'hostMetaId' text PRIMARY KEY NOT NULL,
'createdAt' integer NOT NULL
);
CREATE TABLE 'idp' (
'idpId' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'name' text NOT NULL,
'type' text NOT NULL,
'defaultRoleMapping' text,
'defaultOrgMapping' text,
'autoProvision' integer DEFAULT false NOT NULL
);
CREATE TABLE 'idpOidcConfig' (
'idpOauthConfigId' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'idpId' integer NOT NULL,
'clientId' text NOT NULL,
'clientSecret' text NOT NULL,
'authUrl' text NOT NULL,
'tokenUrl' text NOT NULL,
'identifierPath' text NOT NULL,
'emailPath' text,
'namePath' text,
'scopes' text NOT NULL,
FOREIGN KEY ('idpId') REFERENCES 'idp'('idpId') ON UPDATE no action ON DELETE cascade
);
CREATE TABLE 'idpOrg' (
'idpId' integer NOT NULL,
'orgId' text NOT NULL,
'roleMapping' text,
'orgMapping' text,
FOREIGN KEY ('idpId') REFERENCES 'idp'('idpId') ON UPDATE no action ON DELETE cascade,
FOREIGN KEY ('orgId') REFERENCES 'orgs'('orgId') ON UPDATE no action ON DELETE cascade
);
CREATE TABLE 'licenseKey' (
'licenseKeyId' text PRIMARY KEY NOT NULL,
'instanceId' text NOT NULL,
'token' text NOT NULL
);
CREATE TABLE '__new_user' (
'id' text PRIMARY KEY NOT NULL,
'email' text,
'username' text NOT NULL,
'name' text,
'type' text NOT NULL,
'idpId' integer,
'passwordHash' text,
'twoFactorEnabled' integer DEFAULT false NOT NULL,
'twoFactorSecret' text,
'emailVerified' integer DEFAULT false NOT NULL,
'dateCreated' text NOT NULL,
'serverAdmin' integer DEFAULT false NOT NULL,
FOREIGN KEY ('idpId') REFERENCES 'idp'('idpId') ON UPDATE no action ON DELETE cascade
);
INSERT INTO '__new_user'(
"id", "email", "username", "name", "type", "idpId", "passwordHash",
"twoFactorEnabled", "twoFactorSecret", "emailVerified", "dateCreated", "serverAdmin"
)
SELECT
"id",
"email",
COALESCE("email", 'unknown'),
NULL,
'internal',
NULL,
"passwordHash",
"twoFactorEnabled",
"twoFactorSecret",
"emailVerified",
"dateCreated",
"serverAdmin"
FROM 'user';
DROP TABLE 'user';
ALTER TABLE '__new_user' RENAME TO 'user';
ALTER TABLE 'resources' ADD 'stickySession' integer DEFAULT false NOT NULL;
ALTER TABLE 'resources' ADD 'tlsServerName' text;
ALTER TABLE 'resources' ADD 'setHostHeader' text;
CREATE TABLE 'exitNodes_new' (
'exitNodeId' integer PRIMARY KEY AUTOINCREMENT NOT NULL,
'name' text NOT NULL,
'address' text NOT NULL,
'endpoint' text NOT NULL,
'publicKey' text NOT NULL,
'listenPort' integer NOT NULL,
'reachableAt' text
);
INSERT INTO 'exitNodes_new' (
'exitNodeId', 'name', 'address', 'endpoint', 'publicKey', 'listenPort', 'reachableAt'
)
SELECT
exitNodeId,
name,
address,
endpoint,
pubicKey,
listenPort,
reachableAt
FROM exitNodes;
DROP TABLE 'exitNodes';
ALTER TABLE 'exitNodes_new' RENAME TO 'exitNodes';
`);
})(); // <-- executes the transaction immediately
db.pragma("foreign_keys = ON");
console.log(`Migrated database schema`);
} catch (e) {
console.log("Unable to migrate database schema");
throw e;
}
// Update config file
try {
const filePaths = [configFilePath1, configFilePath2];
let filePath = "";
for (const path of filePaths) {
if (fs.existsSync(path)) {
filePath = path;
break;
}
}
if (!filePath) {
throw new Error(
`No config file found (expected config.yml or config.yaml).`
);
}
const fileContents = fs.readFileSync(filePath, "utf8");
let rawConfig: any = yaml.load(fileContents);
if (!rawConfig.server.secret) {
rawConfig.server.secret = generateIdFromEntropySize(32);
}
const updatedYaml = yaml.dump(rawConfig);
fs.writeFileSync(filePath, updatedYaml, "utf8");
console.log(`Added new config option: server.secret`);
} catch (e) {
console.log(
`Unable to add new config option: server.secret. Please add it manually.`
);
console.error(e);
}
console.log(`${version} migration complete`);
}
function generateIdFromEntropySize(size: number): string {
const buffer = crypto.getRandomValues(new Uint8Array(size));
return encodeBase32LowerCaseNoPadding(buffer);
}