Prepare dockerfile for use, allow configuring via env vars

This commit is contained in:
Ajay
2022-05-03 22:08:44 -04:00
parent a66588619a
commit 5b177a3e53
13 changed files with 93 additions and 46 deletions

View File

@@ -1,10 +1,10 @@
FROM node:14-alpine as builder FROM node:16-alpine as builder
RUN apk add --no-cache --virtual .build-deps python make g++ RUN apk add --no-cache --virtual .build-deps python make g++
COPY package.json package-lock.json tsconfig.json entrypoint.sh ./ COPY package.json package-lock.json tsconfig.json entrypoint.sh ./
COPY src src COPY src src
RUN npm ci && npm run tsc RUN npm ci && npm run tsc
FROM node:14-alpine as app FROM node:16-alpine as app
WORKDIR /usr/src/app WORKDIR /usr/src/app
COPY --from=builder node_modules . COPY --from=builder node_modules .
COPY --from=builder dist ./dist COPY --from=builder dist ./dist

View File

@@ -2,25 +2,11 @@
set -e set -e
echo 'Entrypoint script' echo 'Entrypoint script'
cd /usr/src/app cd /usr/src/app
# blank config, use defaults
cp /etc/sponsorblock/config.json . || cat <<EOF > config.json cp /etc/sponsorblock/config.json . || cat <<EOF > config.json
{ {
"port": 8080,
"globalSalt": "[CHANGE THIS]",
"adminUserID": "[CHANGE THIS]",
"youtubeAPIKey": null,
"discordReportChannelWebhookURL": null,
"discordFirstTimeSubmissionsWebhookURL": null,
"discordAutoModWebhookURL": null,
"proxySubmission": null,
"behindProxy": "X-Forwarded-For",
"db": "./databases/sponsorTimes.db",
"privateDB": "./databases/private.db",
"createDatabaseIfNotExist": true,
"schemaFolder": "./databases",
"dbSchema": "./databases/_sponsorTimes.db.sql",
"privateDBSchema": "./databases/_private.db.sql",
"mode": "development",
"readOnly": false
} }
EOF EOF
node dist/index.js node dist/index.js

View File

@@ -202,7 +202,7 @@ function setupRoutes(router: Router) {
router.post("/api/ratings/rate", postRateEndpoints); router.post("/api/ratings/rate", postRateEndpoints);
router.post("/api/ratings/clearCache", ratingPostClearCache); router.post("/api/ratings/clearCache", ratingPostClearCache);
if (config.postgres) { if (config.postgres?.enabled) {
router.get("/database", (req, res) => dumpDatabase(req, res, true)); router.get("/database", (req, res) => dumpDatabase(req, res, true));
router.get("/database.json", (req, res) => dumpDatabase(req, res, false)); router.get("/database.json", (req, res) => dumpDatabase(req, res, false));
router.get("/database/*", redirectLink); router.get("/database/*", redirectLink);

View File

@@ -1,6 +1,7 @@
import fs from "fs"; import fs from "fs";
import { SBSConfig } from "./types/config.model"; import { SBSConfig } from "./types/config.model";
import packageJson from "../package.json"; import packageJson from "../package.json";
import { isBoolean, isNumber } from "lodash";
const isTestMode = process.env.npm_lifecycle_script === packageJson.scripts.test; const isTestMode = process.env.npm_lifecycle_script === packageJson.scripts.test;
const configFile = process.env.TEST_POSTGRES ? "ci.json" const configFile = process.env.TEST_POSTGRES ? "ci.json"
@@ -8,9 +9,10 @@ const configFile = process.env.TEST_POSTGRES ? "ci.json"
: "config.json"; : "config.json";
export const config: SBSConfig = JSON.parse(fs.readFileSync(configFile).toString("utf8")); export const config: SBSConfig = JSON.parse(fs.readFileSync(configFile).toString("utf8"));
loadFromEnv(config);
migrate(config); migrate(config);
addDefaults(config, { addDefaults(config, {
port: 80, port: 8080,
behindProxy: "X-Forwarded-For", behindProxy: "X-Forwarded-For",
db: "./databases/sponsorTimes.db", db: "./databases/sponsorTimes.db",
privateDB: "./databases/private.db", privateDB: "./databases/private.db",
@@ -20,7 +22,7 @@ addDefaults(config, {
privateDBSchema: "./databases/_private.db.sql", privateDBSchema: "./databases/_private.db.sql",
readOnly: false, readOnly: false,
webhooks: [], webhooks: [],
categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"], categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight"],
categorySupport: { categorySupport: {
sponsor: ["skip", "mute", "full"], sponsor: ["skip", "mute", "full"],
selfpromo: ["skip", "mute", "full"], selfpromo: ["skip", "mute", "full"],
@@ -35,14 +37,14 @@ addDefaults(config, {
chapter: ["chapter"] chapter: ["chapter"]
}, },
maxNumberOfActiveWarnings: 1, maxNumberOfActiveWarnings: 1,
hoursAfterWarningExpires: 24, hoursAfterWarningExpires: 16300000,
adminUserID: "", adminUserID: "",
discordCompletelyIncorrectReportWebhookURL: null, discordCompletelyIncorrectReportWebhookURL: null,
discordFirstTimeSubmissionsWebhookURL: null, discordFirstTimeSubmissionsWebhookURL: null,
discordNeuralBlockRejectWebhookURL: null, discordNeuralBlockRejectWebhookURL: null,
discordFailedReportChannelWebhookURL: null, discordFailedReportChannelWebhookURL: null,
discordReportChannelWebhookURL: null, discordReportChannelWebhookURL: null,
getTopUsersCacheTimeMinutes: 0, getTopUsersCacheTimeMinutes: 240,
globalSalt: null, globalSalt: null,
mode: "", mode: "",
neuralBlockURL: null, neuralBlockURL: null,
@@ -50,15 +52,15 @@ addDefaults(config, {
rateLimit: { rateLimit: {
vote: { vote: {
windowMs: 900000, windowMs: 900000,
max: 20, max: 15,
message: "Too many votes, please try again later", message: "OK",
statusCode: 429, statusCode: 200,
}, },
view: { view: {
windowMs: 900000, windowMs: 900000,
max: 20, max: 10,
statusCode: 200, statusCode: 200,
message: "Too many views, please try again later", message: "OK",
}, },
rate: { rate: {
windowMs: 900000, windowMs: 900000,
@@ -71,10 +73,16 @@ addDefaults(config, {
newLeafURLs: null, newLeafURLs: null,
maxRewardTimePerSegmentInSeconds: 600, maxRewardTimePerSegmentInSeconds: 600,
poiMinimumStartTime: 2, poiMinimumStartTime: 2,
postgres: null, postgres: {
enabled: null,
user: "",
host: "",
password: "",
port: 5432
},
dumpDatabase: { dumpDatabase: {
enabled: false, enabled: false,
minTimeBetweenMs: 60000, minTimeBetweenMs: 180000,
appExportPath: "./docker/database-export", appExportPath: "./docker/database-export",
postgresExportPath: "/opt/exports", postgresExportPath: "/opt/exports",
tables: [{ tables: [{
@@ -96,10 +104,29 @@ addDefaults(config, {
}, },
{ {
name: "vipUsers" name: "vipUsers"
},
{
name: "unlistedVideos"
},
{
name: "videoInfo"
},
{
name: "ratings"
}] }]
}, },
diskCache: null, diskCache: {
crons: null max: 10737418240
},
crons: null,
redis: {
enabled: null,
socket: {
host: "",
port: 0
},
disableOfflineQueue: true
}
}); });
// Add defaults // Add defaults
@@ -125,5 +152,30 @@ function migrate(config: SBSConfig) {
if (redisConfig.enable_offline_queue !== undefined) { if (redisConfig.enable_offline_queue !== undefined) {
config.disableOfflineQueue = !redisConfig.enable_offline_queue; config.disableOfflineQueue = !redisConfig.enable_offline_queue;
} }
if (redisConfig.socket.host && redisConfig.enabled === null) {
redisConfig.enabled = true;
}
}
if (config.postgres && config.postgres.user && config.postgres.enabled === null) {
config.postgres.enabled = true;
}
}
function loadFromEnv(config: SBSConfig, prefix = "") {
for (const key in config) {
if (typeof config[key] === "object") {
loadFromEnv(config[key], (prefix ? `${prefix}.` : "") + key);
} else if (process.env[key]) {
const value = process.env[key];
if (isNumber(value)) {
config[key] = parseInt(value, 10);
} else if (isBoolean(value)) {
config[key] = value === "true";
} else {
config[key] = value;
}
}
} }
} }

View File

@@ -9,7 +9,7 @@ let privateDB: IDatabase;
if (config.mysql) { if (config.mysql) {
db = new Mysql(config.mysql); db = new Mysql(config.mysql);
privateDB = new Mysql(config.privateMysql); privateDB = new Mysql(config.privateMysql);
} else if (config.postgres) { } else if (config.postgres?.enabled) {
db = new Postgres({ db = new Postgres({
dbSchemaFileName: config.dbSchema, dbSchemaFileName: config.dbSchema,
dbSchemaFolder: config.schemaFolder, dbSchemaFolder: config.schemaFolder,

View File

@@ -28,7 +28,7 @@ export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (r
return next(); return next();
} }
}, },
store: config.redis ? new RedisStore({ store: config.redis?.enabled ? new RedisStore({
sendCommand: (...args: string[]) => redis.sendCommand(args), sendCommand: (...args: string[]) => redis.sendCommand(args),
}) : null, }) : null,
}); });

View File

@@ -100,7 +100,7 @@ export default async function dumpDatabase(req: Request, res: Response, showPage
res.status(404).send("Database dump is disabled"); res.status(404).send("Database dump is disabled");
return; return;
} }
if (!config.postgres) { if (!config.postgres?.enabled) {
res.status(404).send("Not supported on this instance"); res.status(404).send("Not supported on this instance");
return; return;
} }
@@ -175,7 +175,7 @@ export async function redirectLink(req: Request, res: Response): Promise<void> {
res.status(404).send("Database dump is disabled"); res.status(404).send("Database dump is disabled");
return; return;
} }
if (!config.postgres) { if (!config.postgres?.enabled) {
res.status(404).send("Not supported on this instance"); res.status(404).send("Not supported on this instance");
return; return;
} }

View File

@@ -2,6 +2,14 @@ import { PoolConfig } from "pg";
import * as redis from "redis"; import * as redis from "redis";
import { CacheOptions } from "@ajayyy/lru-diskcache"; import { CacheOptions } from "@ajayyy/lru-diskcache";
interface RedisConfig extends redis.RedisClientOptions {
enabled: boolean;
}
interface CustomPostgresConfig extends PoolConfig {
enabled: boolean;
}
export interface SBSConfig { export interface SBSConfig {
[index: string]: any [index: string]: any
port: number; port: number;
@@ -41,9 +49,9 @@ export interface SBSConfig {
privateMysql?: any; privateMysql?: any;
minimumPrefix?: string; minimumPrefix?: string;
maximumPrefix?: string; maximumPrefix?: string;
redis?: redis.RedisClientOptions; redis?: RedisConfig;
maxRewardTimePerSegmentInSeconds?: number; maxRewardTimePerSegmentInSeconds?: number;
postgres?: PoolConfig; postgres?: CustomPostgresConfig;
dumpDatabase?: DumpDatabase; dumpDatabase?: DumpDatabase;
diskCache: CacheOptions; diskCache: CacheOptions;
crons: CronJobOptions; crons: CronJobOptions;

View File

@@ -25,7 +25,7 @@ let exportClient: RedisSB = {
quit: () => new Promise((resolve, reject) => reject()), quit: () => new Promise((resolve, reject) => reject()),
}; };
if (config.redis) { if (config.redis?.enabled) {
Logger.info("Connected to redis"); Logger.info("Connected to redis");
const client = createClient(config.redis); const client = createClient(config.redis);
client.connect(); client.connect();

View File

@@ -16,6 +16,7 @@
"schemaFolder": "./databases", "schemaFolder": "./databases",
"dbSchema": "./databases/_sponsorTimes.db.sql", "dbSchema": "./databases/_sponsorTimes.db.sql",
"privateDBSchema": "./databases/_private.db.sql", "privateDBSchema": "./databases/_private.db.sql",
"categoryList": ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"],
"mode": "test", "mode": "test",
"readOnly": false, "readOnly": false,
"webhooks": [ "webhooks": [

View File

@@ -89,7 +89,7 @@ describe("getStatus", () => {
}); });
it("Should be able to get statusRequests only", function (done) { it("Should be able to get statusRequests only", function (done) {
if (!config.redis) this.skip(); if (!config.redis?.enabled) this.skip();
client.get(`${endpoint}/statusRequests`) client.get(`${endpoint}/statusRequests`)
.then(res => { .then(res => {
assert.strictEqual(res.status, 200); assert.strictEqual(res.status, 200);
@@ -100,7 +100,7 @@ describe("getStatus", () => {
}); });
it("Should be able to get status with statusRequests", function (done) { it("Should be able to get status with statusRequests", function (done) {
if (!config.redis) this.skip(); if (!config.redis?.enabled) this.skip();
client.get(endpoint) client.get(endpoint)
.then(res => { .then(res => {
assert.strictEqual(res.status, 200); assert.strictEqual(res.status, 200);

View File

@@ -11,7 +11,7 @@ const randKey2 = genRandom(16);
describe("redis test", function() { describe("redis test", function() {
before(async function() { before(async function() {
if (!config.redis) this.skip(); if (!config.redis?.enabled) this.skip();
await redis.set(randKey1, randValue1); await redis.set(randKey1, randValue1);
}); });
it("Should get stored value", (done) => { it("Should get stored value", (done) => {

View File

@@ -63,7 +63,7 @@ const checkUserVIP = async (publicID: HashedUserID) => {
describe("tempVIP test", function() { describe("tempVIP test", function() {
before(async function() { before(async function() {
if (!config.redis) this.skip(); if (!config.redis?.enabled) this.skip();
const insertSponsorTimeQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "shadowHidden") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'; const insertSponsorTimeQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "shadowHidden") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
await db.prepare("run", insertSponsorTimeQuery, ["channelid-convert", 0, 1, 0, 0, UUID0, "testman", 0, 50, "sponsor", 0]); await db.prepare("run", insertSponsorTimeQuery, ["channelid-convert", 0, 1, 0, 0, UUID0, "testman", 0, 50, "sponsor", 0]);