diff --git a/config.json.example b/config.json.example index 65f9b42..580b8fb 100644 --- a/config.json.example +++ b/config.json.example @@ -42,7 +42,8 @@ "dumpDatabase": { "enabled": true, "minTimeBetweenMs": 60000, // 1 minute between dumps - "exportPath": "/opt/exports", + "appExportPath": "/opt/exports", + "postgresExportPath": "/opt/exports", "tables": [{ "name": "sponsorTimes", "order": "timeSubmitted" diff --git a/src/config.ts b/src/config.ts index 477aa72..0e985dc 100644 --- a/src/config.ts +++ b/src/config.ts @@ -49,7 +49,8 @@ addDefaults(config, { dumpDatabase: { enabled: true, minTimeBetweenMs: 60000, - exportPath: '/opt/exports', + appExportPath: '/opt/exports', + postgresExportPath: '/opt/exports', tables: [{ name: "sponsorTimes", order: "timeSubmitted" diff --git a/src/routes/dumpDatabase.ts b/src/routes/dumpDatabase.ts index ec3e801..b7f2726 100644 --- a/src/routes/dumpDatabase.ts +++ b/src/routes/dumpDatabase.ts @@ -2,10 +2,29 @@ import {db} from '../databases/databases'; import {Logger} from '../utils/logger'; import {Request, Response} from 'express'; import { config } from '../config'; +const util = require('util'); +const fs = require('fs'); +const path = require('path'); +const unlink = util.promisify(fs.unlink); +const fstat = util.promisify(fs.fstat); const ONE_MINUTE = 1000 * 60; -const styleHeader = `` +const styleHeader = `` const licenseHeader = `

The API and database follow CC BY-NC-SA 4.0 unless you have explicit permission.

Attribution Template

@@ -13,7 +32,15 @@ const licenseHeader = `

The API and database follow { + return new Promise((resolve, reject) => { + // Get list of table names + // Create array for each table + const tableFiles = tableNames.reduce((obj: any, tableName) => { + obj[tableName] = []; + return obj; + }, {}); + // read files in export directory + fs.readdir(exportPath, (err: any, files: string[]) => { + if (err) Logger.error(err); + if (err) return resolve(); + files.forEach(file => { + // we only care about files that start with "_" and ends with .csv + tableNames.forEach(tableName => { + if (file.startsWith(`${tableName}_`) && file.endsWith('.csv')) { + // extract the timestamp from the filename + // we could also use the fs.stat mtime + const timestamp = Number(file.split('_')[1].replace('.csv', '')); + tableFiles[tableName].push({ + file: path.join(exportPath, file), + timestamp, + }); + } + }); + }); + const outdatedTime = Math.floor(Date.now() - (MILLISECONDS_BETWEEN_DUMPS * 1.5)); + for (let tableName in tableFiles) { + const files = tableFiles[tableName]; + files.forEach(async (item: any) => { + if (item.timestamp < outdatedTime) { + // remove old file + await unlink(item.file).catch((error: any) => { + Logger.error(`[dumpDatabase] Garbage collection failed ${error}`); + }); + } + }); + } + resolve(); + }); + }); +} + +export default async function dumpDatabase(req: Request, res: Response, showPage: boolean) { if (config?.dumpDatabase?.enabled === false) { res.status(404).send("Database dump is disabled"); return; @@ -48,22 +118,58 @@ export default function dumpDatabase(req: Request, res: Response, showPage: bool Send a request to https://sponsor.ajay.app/database.json, or visit this page to trigger the database dump to run. Then, you can download the csv files below, or use the links returned from the JSON request.

Links

- ${linksHTML}
+ + + + + + + + + ${latestDumpFiles.map((item:any) => { + return ` + + + + + `; + }).join('')} + ${latestDumpFiles.length === 0 ? '' : ''} + +
TableCSV
${item.tableName}${item.fileName}
Please wait: Generating files
+
${updateQueued ? `Update queued.` : ``} Last updated: ${lastUpdate ? new Date(lastUpdate).toUTCString() : `Unknown`}`); } else { res.send({ lastUpdated: lastUpdate, updateQueued, - links + links: latestDumpFiles.map((item:any) => { + return { + table: item.tableName, + url: `/download/${item.fileName}`, + size: item.fileSize, + }; + }), }) } if (updateQueued) { lastUpdate = Date.now(); + + await removeOutdatedDumps(appExportPath); + + const dumpFiles = []; for (const table of tables) { - db.prepare('run', `COPY (SELECT * FROM "${table.name}"${table.order ? ` ORDER BY "${table.order}"` : ``}) - TO '${exportPath}/${table.name}.csv' WITH (FORMAT CSV, HEADER true);`); + const fileName = `${table.name}_${lastUpdate}.csv`; + const file = `${postgresExportPath}/${fileName}`; + await db.prepare('run', `COPY (SELECT * FROM "${table.name}"${table.order ? ` ORDER BY "${table.order}"` : ``}) + TO '${file}' WITH (FORMAT CSV, HEADER true);`); + dumpFiles.push({ + fileName, + tableName: table.name, + }); } + latestDumpFiles = [...dumpFiles]; } } diff --git a/src/types/config.model.ts b/src/types/config.model.ts index c0611b7..f46cc17 100644 --- a/src/types/config.model.ts +++ b/src/types/config.model.ts @@ -67,7 +67,8 @@ export interface PostgresConfig { export interface DumpDatabase { enabled: boolean; minTimeBetweenMs: number; - exportPath: string; + appExportPath: string; + postgresExportPath: string; tables: DumpDatabaseTable[]; }