mirror of
https://github.com/ajayyy/SponsorBlockServer.git
synced 2025-12-09 04:57:04 +03:00
better db dump system
This commit is contained in:
@@ -6,7 +6,7 @@ RUN npm ci && npm run tsc
|
|||||||
|
|
||||||
FROM node:16-alpine as app
|
FROM node:16-alpine as app
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
RUN apk add git
|
RUN apk add git postgresql-client
|
||||||
COPY --from=builder ./node_modules ./node_modules
|
COPY --from=builder ./node_modules ./node_modules
|
||||||
COPY --from=builder ./dist ./dist
|
COPY --from=builder ./dist ./dist
|
||||||
COPY ./.git ./.git
|
COPY ./.git ./.git
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ import { loggerMiddleware } from "./middleware/logger";
|
|||||||
import { corsMiddleware } from "./middleware/cors";
|
import { corsMiddleware } from "./middleware/cors";
|
||||||
import { apiCspMiddleware } from "./middleware/apiCsp";
|
import { apiCspMiddleware } from "./middleware/apiCsp";
|
||||||
import { rateLimitMiddleware } from "./middleware/requestRateLimit";
|
import { rateLimitMiddleware } from "./middleware/requestRateLimit";
|
||||||
import dumpDatabase, { appExportPath, redirectLink } from "./routes/dumpDatabase";
|
import dumpDatabase, { appExportPath, downloadFile } from "./routes/dumpDatabase";
|
||||||
import { endpoint as getSegmentInfo } from "./routes/getSegmentInfo";
|
import { endpoint as getSegmentInfo } from "./routes/getSegmentInfo";
|
||||||
import { postClearCache } from "./routes/postClearCache";
|
import { postClearCache } from "./routes/postClearCache";
|
||||||
import { addUnlistedVideo } from "./routes/addUnlistedVideo";
|
import { addUnlistedVideo } from "./routes/addUnlistedVideo";
|
||||||
@@ -205,7 +205,7 @@ function setupRoutes(router: Router) {
|
|||||||
if (config.postgres?.enabled) {
|
if (config.postgres?.enabled) {
|
||||||
router.get("/database", (req, res) => dumpDatabase(req, res, true));
|
router.get("/database", (req, res) => dumpDatabase(req, res, true));
|
||||||
router.get("/database.json", (req, res) => dumpDatabase(req, res, false));
|
router.get("/database.json", (req, res) => dumpDatabase(req, res, false));
|
||||||
router.get("/database/*", redirectLink);
|
router.get("/database/*", downloadFile);
|
||||||
router.use("/download", express.static(appExportPath));
|
router.use("/download", express.static(appExportPath));
|
||||||
} else {
|
} else {
|
||||||
router.get("/database.db", function (req: Request, res: Response) {
|
router.get("/database.db", function (req: Request, res: Response) {
|
||||||
|
|||||||
@@ -82,7 +82,6 @@ addDefaults(config, {
|
|||||||
enabled: false,
|
enabled: false,
|
||||||
minTimeBetweenMs: 180000,
|
minTimeBetweenMs: 180000,
|
||||||
appExportPath: "./docker/database-export",
|
appExportPath: "./docker/database-export",
|
||||||
postgresExportPath: "/opt/exports",
|
|
||||||
tables: [{
|
tables: [{
|
||||||
name: "sponsorTimes",
|
name: "sponsorTimes",
|
||||||
order: "timeSubmitted"
|
order: "timeSubmitted"
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ async function initDb(): Promise<void> {
|
|||||||
const tables = config?.dumpDatabase?.tables ?? [];
|
const tables = config?.dumpDatabase?.tables ?? [];
|
||||||
const tableNames = tables.map(table => table.name);
|
const tableNames = tables.map(table => table.name);
|
||||||
for (const table of tableNames) {
|
for (const table of tableNames) {
|
||||||
const filePath = `${config?.dumpDatabase?.postgresExportPath}/${table}.csv`;
|
const filePath = `${config?.dumpDatabase?.appExportPath}/${table}.csv`;
|
||||||
await db.prepare("run", `COPY "${table}" FROM '${filePath}' WITH (FORMAT CSV, HEADER true);`);
|
await db.prepare("run", `COPY "${table}" FROM '${filePath}' WITH (FORMAT CSV, HEADER true);`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { config } from "../config";
|
|||||||
import util from "util";
|
import util from "util";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
import { ChildProcess, exec, ExecOptions, spawn } from "child_process";
|
||||||
const unlink = util.promisify(fs.unlink);
|
const unlink = util.promisify(fs.unlink);
|
||||||
|
|
||||||
const ONE_MINUTE = 1000 * 60;
|
const ONE_MINUTE = 1000 * 60;
|
||||||
@@ -32,9 +33,19 @@ const licenseHeader = `<p>The API and database follow <a href="https://creativec
|
|||||||
const tables = config?.dumpDatabase?.tables ?? [];
|
const tables = config?.dumpDatabase?.tables ?? [];
|
||||||
const MILLISECONDS_BETWEEN_DUMPS = config?.dumpDatabase?.minTimeBetweenMs ?? ONE_MINUTE;
|
const MILLISECONDS_BETWEEN_DUMPS = config?.dumpDatabase?.minTimeBetweenMs ?? ONE_MINUTE;
|
||||||
export const appExportPath = config?.dumpDatabase?.appExportPath ?? "./docker/database-export";
|
export const appExportPath = config?.dumpDatabase?.appExportPath ?? "./docker/database-export";
|
||||||
const postgresExportPath = config?.dumpDatabase?.postgresExportPath ?? "/opt/exports";
|
|
||||||
const tableNames = tables.map(table => table.name);
|
const tableNames = tables.map(table => table.name);
|
||||||
|
|
||||||
|
const credentials: ExecOptions = {
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
PGHOST: config.postgres.host,
|
||||||
|
PGPORT: String(config.postgres.port),
|
||||||
|
PGUSER: config.postgres.user,
|
||||||
|
PGPASSWORD: String(config.postgres.password),
|
||||||
|
PGDATABASE: "sponsorTimes",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
interface TableDumpList {
|
interface TableDumpList {
|
||||||
fileName: string;
|
fileName: string;
|
||||||
tableName: string;
|
tableName: string;
|
||||||
@@ -170,7 +181,7 @@ async function getDbVersion(): Promise<number> {
|
|||||||
return row.value;
|
return row.value;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function redirectLink(req: Request, res: Response): Promise<void> {
|
export async function downloadFile(req: Request, res: Response): Promise<void> {
|
||||||
if (!config?.dumpDatabase?.enabled) {
|
if (!config?.dumpDatabase?.enabled) {
|
||||||
res.status(404).send("Database dump is disabled");
|
res.status(404).send("Database dump is disabled");
|
||||||
return;
|
return;
|
||||||
@@ -183,7 +194,7 @@ export async function redirectLink(req: Request, res: Response): Promise<void> {
|
|||||||
const file = latestDumpFiles.find((value) => `/database/${value.tableName}.csv` === req.path);
|
const file = latestDumpFiles.find((value) => `/database/${value.tableName}.csv` === req.path);
|
||||||
|
|
||||||
if (file) {
|
if (file) {
|
||||||
res.redirect(`/download/${file.fileName}`);
|
res.sendFile(file.fileName, { root: appExportPath });
|
||||||
} else {
|
} else {
|
||||||
res.sendStatus(404);
|
res.sendStatus(404);
|
||||||
}
|
}
|
||||||
@@ -210,9 +221,19 @@ async function queueDump(): Promise<void> {
|
|||||||
|
|
||||||
for (const table of tables) {
|
for (const table of tables) {
|
||||||
const fileName = `${table.name}_${startTime}.csv`;
|
const fileName = `${table.name}_${startTime}.csv`;
|
||||||
const file = `${postgresExportPath}/${fileName}`;
|
const file = `${appExportPath}/${fileName}`;
|
||||||
await db.prepare("run", `COPY (SELECT * FROM "${table.name}"${table.order ? ` ORDER BY "${table.order}"` : ``})
|
|
||||||
TO '${file}' WITH (FORMAT CSV, HEADER true);`);
|
await new Promise<string>((resolve) => {
|
||||||
|
exec(`psql -c "\\copy (SELECT * FROM \\"${table.name}\\"${table.order ? ` ORDER BY \\"${table.order}\\"` : ``})`
|
||||||
|
+ ` TO '${file}' WITH (FORMAT CSV, HEADER true);"`, credentials, (error, stdout, stderr) => {
|
||||||
|
if (error) {
|
||||||
|
Logger.error(`[dumpDatabase] Failed to dump ${table.name} to ${file} due to ${stderr}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve(error ? stderr : stdout);
|
||||||
|
});
|
||||||
|
})
|
||||||
|
|
||||||
dumpFiles.push({
|
dumpFiles.push({
|
||||||
fileName,
|
fileName,
|
||||||
tableName: table.name,
|
tableName: table.name,
|
||||||
|
|||||||
@@ -84,7 +84,6 @@ export interface DumpDatabase {
|
|||||||
enabled: boolean;
|
enabled: boolean;
|
||||||
minTimeBetweenMs: number;
|
minTimeBetweenMs: number;
|
||||||
appExportPath: string;
|
appExportPath: string;
|
||||||
postgresExportPath: string;
|
|
||||||
tables: DumpDatabaseTable[];
|
tables: DumpDatabaseTable[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user