mirror of
https://github.com/fosrl/pangolin.git
synced 2026-02-20 03:46:38 +00:00
Store headers as json
This commit is contained in:
@@ -42,7 +42,9 @@ async function query(resourceId?: number, niceId?: string, orgId?: string) {
|
||||
}
|
||||
}
|
||||
|
||||
export type GetResourceResponse = NonNullable<Awaited<ReturnType<typeof query>>>;
|
||||
export type GetResourceResponse = Omit<NonNullable<Awaited<ReturnType<typeof query>>>, 'headers'> & {
|
||||
headers: { name: string; value: string }[] | null;
|
||||
};
|
||||
|
||||
registry.registerPath({
|
||||
method: "get",
|
||||
@@ -99,7 +101,10 @@ export async function getResource(
|
||||
}
|
||||
|
||||
return response<GetResourceResponse>(res, {
|
||||
data: resource,
|
||||
data: {
|
||||
...resource,
|
||||
headers: resource.headers ? JSON.parse(resource.headers) : resource.headers
|
||||
},
|
||||
success: true,
|
||||
error: false,
|
||||
message: "Resource retrieved successfully",
|
||||
|
||||
@@ -47,7 +47,7 @@ const updateHttpResourceBodySchema = z
|
||||
tlsServerName: z.string().nullable().optional(),
|
||||
setHostHeader: z.string().nullable().optional(),
|
||||
skipToIdpId: z.number().int().positive().nullable().optional(),
|
||||
headers: z.string().nullable().optional()
|
||||
headers: z.array(z.object({ name: z.string(), value: z.string() })).optional(),
|
||||
})
|
||||
.strict()
|
||||
.refine((data) => Object.keys(data).length > 0, {
|
||||
@@ -86,18 +86,6 @@ const updateHttpResourceBodySchema = z
|
||||
"Invalid custom Host Header value. Use domain name format, or save empty to unset custom Host Header."
|
||||
}
|
||||
)
|
||||
.refine(
|
||||
(data) => {
|
||||
if (data.headers) {
|
||||
return validateHeaders(data.headers);
|
||||
}
|
||||
return true;
|
||||
},
|
||||
{
|
||||
message:
|
||||
"Invalid headers format. Use comma-separated format: 'Header-Name: value, Another-Header: another-value'. Header values cannot contain colons."
|
||||
}
|
||||
);
|
||||
|
||||
export type UpdateResourceResponse = Resource;
|
||||
|
||||
@@ -292,9 +280,14 @@ async function updateHttpResource(
|
||||
updateData.subdomain = finalSubdomain;
|
||||
}
|
||||
|
||||
let headers = null;
|
||||
if (updateData.headers) {
|
||||
headers = JSON.stringify(updateData.headers);
|
||||
}
|
||||
|
||||
const updatedResource = await db
|
||||
.update(resources)
|
||||
.set({ ...updateData })
|
||||
.set({ ...updateData, headers })
|
||||
.where(eq(resources.resourceId, resource.resourceId))
|
||||
.returning();
|
||||
|
||||
|
||||
@@ -306,22 +306,25 @@ export async function getTraefikConfig(
|
||||
...additionalMiddlewares
|
||||
];
|
||||
|
||||
if (
|
||||
resource.headers ||
|
||||
resource.setHostHeader
|
||||
) {
|
||||
if (resource.headers || resource.setHostHeader) {
|
||||
// if there are headers, parse them into an object
|
||||
const headersObj: { [key: string]: string } = {};
|
||||
const headersArr = resource.headers?.split(",");
|
||||
if (headersArr && headersArr.length > 0) {
|
||||
for (const header of headersArr) {
|
||||
const [key, value] = header
|
||||
.split(":")
|
||||
.map((s: string) => s.trim());
|
||||
if (key && value) {
|
||||
headersObj[key] = value;
|
||||
}
|
||||
if (resource.headers) {
|
||||
let headersArr: { name: string; value: string }[] = [];
|
||||
try {
|
||||
headersArr = JSON.parse(resource.headers) as {
|
||||
name: string;
|
||||
value: string;
|
||||
}[];
|
||||
} catch (e) {
|
||||
logger.warn(
|
||||
`Failed to parse headers for resource ${resource.resourceId}: ${e}`
|
||||
);
|
||||
}
|
||||
|
||||
headersArr.forEach((header) => {
|
||||
headersObj[header.name] = header.value;
|
||||
});
|
||||
}
|
||||
|
||||
if (resource.setHostHeader) {
|
||||
|
||||
49
server/setup/scriptsPg/1.10.1.ts
Normal file
49
server/setup/scriptsPg/1.10.1.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { db } from "@server/db/pg/driver";
|
||||
import { sql } from "drizzle-orm";
|
||||
import { __DIRNAME, APP_PATH } from "@server/lib/consts";
|
||||
import { readFileSync } from "fs";
|
||||
import path, { join } from "path";
|
||||
|
||||
const version = "1.10.1";
|
||||
|
||||
export default async function migration() {
|
||||
console.log(`Running setup script ${version}...`);
|
||||
|
||||
try {
|
||||
const resources = await db.execute(sql`
|
||||
SELECT * FROM "resources"
|
||||
`);
|
||||
|
||||
await db.execute(sql`BEGIN`);
|
||||
|
||||
for (const resource of resources.rows) {
|
||||
const headers = resource.headers as string | null;
|
||||
if (headers && headers !== "") {
|
||||
// lets convert it to json
|
||||
// fist split at commas
|
||||
const headersArray = headers
|
||||
.split(",")
|
||||
.map((header: string) => {
|
||||
const [name, ...valueParts] = header.split(":");
|
||||
const value = valueParts.join(":").trim();
|
||||
return { name: name.trim(), value };
|
||||
});
|
||||
|
||||
await db.execute(sql`
|
||||
UPDATE "resources" SET "headers" = ${JSON.stringify(headersArray)} WHERE "resourceId" = ${resource.resourceId}
|
||||
`);
|
||||
|
||||
console.log(
|
||||
`Updated resource ${resource.resourceId} headers to JSON format`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await db.execute(sql`COMMIT`);
|
||||
console.log(`Migrated database`);
|
||||
} catch (e) {
|
||||
await db.execute(sql`ROLLBACK`);
|
||||
console.log("Failed to migrate db:", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -5,16 +5,16 @@ import path from "path";
|
||||
const version = "1.10.1";
|
||||
|
||||
export default async function migration() {
|
||||
console.log(`Running setup script ${version}...`);
|
||||
console.log(`Running setup script ${version}...`);
|
||||
|
||||
const location = path.join(APP_PATH, "db", "db.sqlite");
|
||||
const db = new Database(location);
|
||||
const location = path.join(APP_PATH, "db", "db.sqlite");
|
||||
const db = new Database(location);
|
||||
|
||||
try {
|
||||
db.pragma("foreign_keys = OFF");
|
||||
try {
|
||||
db.pragma("foreign_keys = OFF");
|
||||
|
||||
db.transaction(() => {
|
||||
db.exec(`ALTER TABLE "targets" RENAME TO "targets_old";
|
||||
db.transaction(() => {
|
||||
db.exec(`ALTER TABLE "targets" RENAME TO "targets_old";
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE "targets" (
|
||||
"targetId" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
@@ -57,13 +57,43 @@ SELECT
|
||||
FROM "targets_old";
|
||||
--> statement-breakpoint
|
||||
DROP TABLE "targets_old";`);
|
||||
})();
|
||||
})();
|
||||
|
||||
db.pragma("foreign_keys = ON");
|
||||
db.pragma("foreign_keys = ON");
|
||||
|
||||
console.log(`Migrated database`);
|
||||
} catch (e) {
|
||||
console.log("Failed to migrate db:", e);
|
||||
throw e;
|
||||
}
|
||||
const resources = db.prepare("SELECT * FROM resources").all() as Array<{
|
||||
resourceId: number;
|
||||
headers: string | null;
|
||||
}>;
|
||||
|
||||
for (const resource of resources) {
|
||||
const headers = resource.headers;
|
||||
if (headers && headers !== "") {
|
||||
// lets convert it to json
|
||||
// fist split at commas
|
||||
const headersArray = headers
|
||||
.split(",")
|
||||
.map((header: string) => {
|
||||
const [name, ...valueParts] = header.split(":");
|
||||
const value = valueParts.join(":").trim();
|
||||
return { name: name.trim(), value };
|
||||
});
|
||||
|
||||
db.prepare(
|
||||
`
|
||||
UPDATE "resources" SET "headers" = ? WHERE "resourceId" = ?
|
||||
`
|
||||
).run(JSON.stringify(headersArray), resource.resourceId);
|
||||
|
||||
console.log(
|
||||
`Updated resource ${resource.resourceId} headers to JSON format`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Migrated database`);
|
||||
} catch (e) {
|
||||
console.log("Failed to migrate db:", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user