diff --git a/config.json.example b/config.json.example
index f8548c1..580b8fb 100644
--- a/config.json.example
+++ b/config.json.example
@@ -38,5 +38,31 @@
"max": 20, // 20 requests in 15min time window
"statusCode": 200
}
+ },
+ "dumpDatabase": {
+ "enabled": true,
+ "minTimeBetweenMs": 60000, // 1 minute between dumps
+ "appExportPath": "/opt/exports",
+ "postgresExportPath": "/opt/exports",
+ "tables": [{
+ "name": "sponsorTimes",
+ "order": "timeSubmitted"
+ },
+ {
+ "name": "userNames"
+ },
+ {
+ "name": "categoryVotes"
+ },
+ {
+ "name": "noSegments"
+ },
+ {
+ "name": "warnings",
+ "order": "issueTime"
+ },
+ {
+ "name": "vipUsers"
+ }]
}
}
diff --git a/src/config.ts b/src/config.ts
index 7b15987..4ec51bf 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -45,7 +45,33 @@ addDefaults(config, {
},
userCounterURL: null,
youtubeAPIKey: null,
- postgres: null
+ postgres: null,
+ dumpDatabase: {
+ enabled: true,
+ minTimeBetweenMs: 60000,
+ appExportPath: './docker/database-export',
+ postgresExportPath: '/opt/exports',
+ tables: [{
+ name: "sponsorTimes",
+ order: "timeSubmitted"
+ },
+ {
+ name: "userNames"
+ },
+ {
+ name: "categoryVotes"
+ },
+ {
+ name: "noSegments",
+ },
+ {
+ name: "warnings",
+ order: "issueTime"
+ },
+ {
+ name: "vipUsers"
+ }]
+ }
});
// Add defaults
diff --git a/src/routes/dumpDatabase.ts b/src/routes/dumpDatabase.ts
index d687f07..8e33440 100644
--- a/src/routes/dumpDatabase.ts
+++ b/src/routes/dumpDatabase.ts
@@ -2,51 +2,110 @@ import {db} from '../databases/databases';
import {Logger} from '../utils/logger';
import {Request, Response} from 'express';
import { config } from '../config';
+import util from 'util';
+import fs from 'fs';
+import path from 'path';
+const unlink = util.promisify(fs.unlink);
const ONE_MINUTE = 1000 * 60;
-const styleHeader = ``
+const styleHeader = ``
const licenseHeader = `
The API and database follow CC BY-NC-SA 4.0 unless you have explicit permission.
Attribution Template
If you need to use the database or API in a way that violates this license, contact me with your reason and I may grant you access under a different license.
`;
-const tables = [{
- name: "sponsorTimes",
- order: "timeSubmitted"
-},
-{
- name: "userNames"
-},
-{
- name: "categoryVotes"
-},
-{
- name: "noSegments",
-},
-{
- name: "warnings",
- order: "issueTime"
-},
-{
- name: "vipUsers"
-}];
+const tables = config?.dumpDatabase?.tables ?? [];
+const MILLISECONDS_BETWEEN_DUMPS = config?.dumpDatabase?.minTimeBetweenMs ?? ONE_MINUTE;
+const appExportPath = config?.dumpDatabase?.appExportPath ?? './docker/database-export';
+const postgresExportPath = config?.dumpDatabase?.postgresExportPath ?? '/opt/exports';
+const tableNames = tables.map(table => table.name);
-const links: string[] = tables.map((table) => `/database/${table.name}.csv`);
+interface TableDumpList {
+ fileName: string;
+ tableName: string;
+};
+let latestDumpFiles: TableDumpList[] = [];
-const linksHTML: string = tables.map((table) => `${table.name}.csv
`)
- .reduce((acc, url) => acc + url, "");
+if (tables.length === 0) {
+ Logger.warn('[dumpDatabase] No tables configured');
+}
let lastUpdate = 0;
-export default function dumpDatabase(req: Request, res: Response, showPage: boolean) {
+function removeOutdatedDumps(exportPath: string): Promise {
+ return new Promise((resolve, reject) => {
+ // Get list of table names
+ // Create array for each table
+ const tableFiles = tableNames.reduce((obj: any, tableName) => {
+ obj[tableName] = [];
+ return obj;
+ }, {});
+
+ // read files in export directory
+ fs.readdir(exportPath, (err: any, files: string[]) => {
+ if (err) Logger.error(err);
+ if (err) return resolve();
+
+ files.forEach(file => {
+ // we only care about files that start with "_" and ends with .csv
+ tableNames.forEach(tableName => {
+ if (file.startsWith(`${tableName}_`) && file.endsWith('.csv')) {
+ // extract the timestamp from the filename
+ // we could also use the fs.stat mtime
+ const timestamp = Number(file.split('_')[1].replace('.csv', ''));
+ tableFiles[tableName].push({
+ file: path.join(exportPath, file),
+ timestamp,
+ });
+ }
+ });
+ });
+
+ const outdatedTime = Math.floor(Date.now() - (MILLISECONDS_BETWEEN_DUMPS * 1.5));
+ for (let tableName in tableFiles) {
+ const files = tableFiles[tableName];
+ files.forEach(async (item: any) => {
+ if (item.timestamp < outdatedTime) {
+ // remove old file
+ await unlink(item.file).catch((error: any) => {
+ Logger.error(`[dumpDatabase] Garbage collection failed ${error}`);
+ });
+ }
+ });
+ }
+
+ resolve();
+ });
+ });
+}
+
+export default async function dumpDatabase(req: Request, res: Response, showPage: boolean) {
+ if (!config?.dumpDatabase?.enabled) {
+ res.status(404).send("Database dump is disabled");
+ return;
+ }
if (!config.postgres) {
res.status(404).send("Not supported on this instance");
return;
}
const now = Date.now();
- const updateQueued = now - lastUpdate > ONE_MINUTE;
+ const updateQueued = now - lastUpdate > MILLISECONDS_BETWEEN_DUMPS;
res.status(200)
@@ -57,22 +116,58 @@ export default function dumpDatabase(req: Request, res: Response, showPage: bool
Send a request to https://sponsor.ajay.app/database.json, or visit this page to trigger the database dump to run.
Then, you can download the csv files below, or use the links returned from the JSON request.
Links
- ${linksHTML}
+
+
+
+ | Table |
+ CSV |
+
+
+
+ ${latestDumpFiles.map((item:any) => {
+ return `
+
+ | ${item.tableName} |
+ ${item.fileName} |
+
+ `;
+ }).join('')}
+ ${latestDumpFiles.length === 0 ? '| Please wait: Generating files |
' : ''}
+
+
+
${updateQueued ? `Update queued.` : ``} Last updated: ${lastUpdate ? new Date(lastUpdate).toUTCString() : `Unknown`}`);
} else {
res.send({
lastUpdated: lastUpdate,
updateQueued,
- links
+ links: latestDumpFiles.map((item:any) => {
+ return {
+ table: item.tableName,
+ url: `/database/${item.fileName}`,
+ size: item.fileSize,
+ };
+ }),
})
}
if (updateQueued) {
lastUpdate = Date.now();
+
+ await removeOutdatedDumps(appExportPath);
+
+ const dumpFiles = [];
for (const table of tables) {
- db.prepare('run', `COPY (SELECT * FROM "${table.name}"${table.order ? ` ORDER BY "${table.order}"` : ``})
- TO '/opt/exports/${table.name}.csv' WITH (FORMAT CSV, HEADER true);`);
+ const fileName = `${table.name}_${lastUpdate}.csv`;
+ const file = `${postgresExportPath}/${fileName}`;
+ await db.prepare('run', `COPY (SELECT * FROM "${table.name}"${table.order ? ` ORDER BY "${table.order}"` : ``})
+ TO '${file}' WITH (FORMAT CSV, HEADER true);`);
+ dumpFiles.push({
+ fileName,
+ tableName: table.name,
+ });
}
+ latestDumpFiles = [...dumpFiles];
}
-}
\ No newline at end of file
+}
diff --git a/src/types/config.model.ts b/src/types/config.model.ts
index b27cfaa..f46cc17 100644
--- a/src/types/config.model.ts
+++ b/src/types/config.model.ts
@@ -38,6 +38,7 @@ export interface SBSConfig {
maximumPrefix?: string;
redis?: redis.ClientOpts;
postgres?: PoolConfig;
+ dumpDatabase?: DumpDatabase;
}
export interface WebhookConfig {
@@ -61,4 +62,17 @@ export interface PostgresConfig {
createDbIfNotExists: boolean;
enableWalCheckpointNumber: boolean;
postgres: PoolConfig;
-}
\ No newline at end of file
+}
+
+export interface DumpDatabase {
+ enabled: boolean;
+ minTimeBetweenMs: number;
+ appExportPath: string;
+ postgresExportPath: string;
+ tables: DumpDatabaseTable[];
+}
+
+export interface DumpDatabaseTable {
+ name: string;
+ order?: string;
+}