Prepare dockerfile for use, allow configuring via env vars

This commit is contained in:
Ajay
2022-05-03 22:08:44 -04:00
parent a66588619a
commit 5b177a3e53
13 changed files with 93 additions and 46 deletions

View File

@@ -1,14 +1,14 @@
FROM node:14-alpine as builder
FROM node:16-alpine as builder
RUN apk add --no-cache --virtual .build-deps python make g++
COPY package.json package-lock.json tsconfig.json entrypoint.sh ./
COPY src src
RUN npm ci && npm run tsc
FROM node:14-alpine as app
FROM node:16-alpine as app
WORKDIR /usr/src/app
COPY --from=builder node_modules .
COPY --from=builder dist ./dist
COPY entrypoint.sh .
COPY databases/*.sql databases/
EXPOSE 8080
CMD ./entrypoint.sh
CMD ./entrypoint.sh

View File

@@ -2,25 +2,11 @@
set -e
echo 'Entrypoint script'
cd /usr/src/app
# blank config, use defaults
cp /etc/sponsorblock/config.json . || cat <<EOF > config.json
{
"port": 8080,
"globalSalt": "[CHANGE THIS]",
"adminUserID": "[CHANGE THIS]",
"youtubeAPIKey": null,
"discordReportChannelWebhookURL": null,
"discordFirstTimeSubmissionsWebhookURL": null,
"discordAutoModWebhookURL": null,
"proxySubmission": null,
"behindProxy": "X-Forwarded-For",
"db": "./databases/sponsorTimes.db",
"privateDB": "./databases/private.db",
"createDatabaseIfNotExist": true,
"schemaFolder": "./databases",
"dbSchema": "./databases/_sponsorTimes.db.sql",
"privateDBSchema": "./databases/_private.db.sql",
"mode": "development",
"readOnly": false
}
EOF
node dist/index.js
node dist/index.js

View File

@@ -202,7 +202,7 @@ function setupRoutes(router: Router) {
router.post("/api/ratings/rate", postRateEndpoints);
router.post("/api/ratings/clearCache", ratingPostClearCache);
if (config.postgres) {
if (config.postgres?.enabled) {
router.get("/database", (req, res) => dumpDatabase(req, res, true));
router.get("/database.json", (req, res) => dumpDatabase(req, res, false));
router.get("/database/*", redirectLink);

View File

@@ -1,6 +1,7 @@
import fs from "fs";
import { SBSConfig } from "./types/config.model";
import packageJson from "../package.json";
import { isBoolean, isNumber } from "lodash";
const isTestMode = process.env.npm_lifecycle_script === packageJson.scripts.test;
const configFile = process.env.TEST_POSTGRES ? "ci.json"
@@ -8,9 +9,10 @@ const configFile = process.env.TEST_POSTGRES ? "ci.json"
: "config.json";
export const config: SBSConfig = JSON.parse(fs.readFileSync(configFile).toString("utf8"));
loadFromEnv(config);
migrate(config);
addDefaults(config, {
port: 80,
port: 8080,
behindProxy: "X-Forwarded-For",
db: "./databases/sponsorTimes.db",
privateDB: "./databases/private.db",
@@ -20,7 +22,7 @@ addDefaults(config, {
privateDBSchema: "./databases/_private.db.sql",
readOnly: false,
webhooks: [],
categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"],
categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight"],
categorySupport: {
sponsor: ["skip", "mute", "full"],
selfpromo: ["skip", "mute", "full"],
@@ -35,14 +37,14 @@ addDefaults(config, {
chapter: ["chapter"]
},
maxNumberOfActiveWarnings: 1,
hoursAfterWarningExpires: 24,
hoursAfterWarningExpires: 16300000,
adminUserID: "",
discordCompletelyIncorrectReportWebhookURL: null,
discordFirstTimeSubmissionsWebhookURL: null,
discordNeuralBlockRejectWebhookURL: null,
discordFailedReportChannelWebhookURL: null,
discordReportChannelWebhookURL: null,
getTopUsersCacheTimeMinutes: 0,
getTopUsersCacheTimeMinutes: 240,
globalSalt: null,
mode: "",
neuralBlockURL: null,
@@ -50,15 +52,15 @@ addDefaults(config, {
rateLimit: {
vote: {
windowMs: 900000,
max: 20,
message: "Too many votes, please try again later",
statusCode: 429,
max: 15,
message: "OK",
statusCode: 200,
},
view: {
windowMs: 900000,
max: 20,
max: 10,
statusCode: 200,
message: "Too many views, please try again later",
message: "OK",
},
rate: {
windowMs: 900000,
@@ -71,10 +73,16 @@ addDefaults(config, {
newLeafURLs: null,
maxRewardTimePerSegmentInSeconds: 600,
poiMinimumStartTime: 2,
postgres: null,
postgres: {
enabled: null,
user: "",
host: "",
password: "",
port: 5432
},
dumpDatabase: {
enabled: false,
minTimeBetweenMs: 60000,
minTimeBetweenMs: 180000,
appExportPath: "./docker/database-export",
postgresExportPath: "/opt/exports",
tables: [{
@@ -96,10 +104,29 @@ addDefaults(config, {
},
{
name: "vipUsers"
},
{
name: "unlistedVideos"
},
{
name: "videoInfo"
},
{
name: "ratings"
}]
},
diskCache: null,
crons: null
diskCache: {
max: 10737418240
},
crons: null,
redis: {
enabled: null,
socket: {
host: "",
port: 0
},
disableOfflineQueue: true
}
});
// Add defaults
@@ -125,5 +152,30 @@ function migrate(config: SBSConfig) {
if (redisConfig.enable_offline_queue !== undefined) {
config.disableOfflineQueue = !redisConfig.enable_offline_queue;
}
if (redisConfig.socket.host && redisConfig.enabled === null) {
redisConfig.enabled = true;
}
}
if (config.postgres && config.postgres.user && config.postgres.enabled === null) {
config.postgres.enabled = true;
}
}
function loadFromEnv(config: SBSConfig, prefix = "") {
for (const key in config) {
if (typeof config[key] === "object") {
loadFromEnv(config[key], (prefix ? `${prefix}.` : "") + key);
} else if (process.env[key]) {
const value = process.env[key];
if (isNumber(value)) {
config[key] = parseInt(value, 10);
} else if (isBoolean(value)) {
config[key] = value === "true";
} else {
config[key] = value;
}
}
}
}

View File

@@ -9,7 +9,7 @@ let privateDB: IDatabase;
if (config.mysql) {
db = new Mysql(config.mysql);
privateDB = new Mysql(config.privateMysql);
} else if (config.postgres) {
} else if (config.postgres?.enabled) {
db = new Postgres({
dbSchemaFileName: config.dbSchema,
dbSchemaFolder: config.schemaFolder,

View File

@@ -28,7 +28,7 @@ export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (r
return next();
}
},
store: config.redis ? new RedisStore({
store: config.redis?.enabled ? new RedisStore({
sendCommand: (...args: string[]) => redis.sendCommand(args),
}) : null,
});

View File

@@ -100,7 +100,7 @@ export default async function dumpDatabase(req: Request, res: Response, showPage
res.status(404).send("Database dump is disabled");
return;
}
if (!config.postgres) {
if (!config.postgres?.enabled) {
res.status(404).send("Not supported on this instance");
return;
}
@@ -175,7 +175,7 @@ export async function redirectLink(req: Request, res: Response): Promise<void> {
res.status(404).send("Database dump is disabled");
return;
}
if (!config.postgres) {
if (!config.postgres?.enabled) {
res.status(404).send("Not supported on this instance");
return;
}

View File

@@ -2,6 +2,14 @@ import { PoolConfig } from "pg";
import * as redis from "redis";
import { CacheOptions } from "@ajayyy/lru-diskcache";
interface RedisConfig extends redis.RedisClientOptions {
enabled: boolean;
}
interface CustomPostgresConfig extends PoolConfig {
enabled: boolean;
}
export interface SBSConfig {
[index: string]: any
port: number;
@@ -41,9 +49,9 @@ export interface SBSConfig {
privateMysql?: any;
minimumPrefix?: string;
maximumPrefix?: string;
redis?: redis.RedisClientOptions;
redis?: RedisConfig;
maxRewardTimePerSegmentInSeconds?: number;
postgres?: PoolConfig;
postgres?: CustomPostgresConfig;
dumpDatabase?: DumpDatabase;
diskCache: CacheOptions;
crons: CronJobOptions;

View File

@@ -25,7 +25,7 @@ let exportClient: RedisSB = {
quit: () => new Promise((resolve, reject) => reject()),
};
if (config.redis) {
if (config.redis?.enabled) {
Logger.info("Connected to redis");
const client = createClient(config.redis);
client.connect();

View File

@@ -16,6 +16,7 @@
"schemaFolder": "./databases",
"dbSchema": "./databases/_sponsorTimes.db.sql",
"privateDBSchema": "./databases/_private.db.sql",
"categoryList": ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"],
"mode": "test",
"readOnly": false,
"webhooks": [

View File

@@ -89,7 +89,7 @@ describe("getStatus", () => {
});
it("Should be able to get statusRequests only", function (done) {
if (!config.redis) this.skip();
if (!config.redis?.enabled) this.skip();
client.get(`${endpoint}/statusRequests`)
.then(res => {
assert.strictEqual(res.status, 200);
@@ -100,7 +100,7 @@ describe("getStatus", () => {
});
it("Should be able to get status with statusRequests", function (done) {
if (!config.redis) this.skip();
if (!config.redis?.enabled) this.skip();
client.get(endpoint)
.then(res => {
assert.strictEqual(res.status, 200);

View File

@@ -11,7 +11,7 @@ const randKey2 = genRandom(16);
describe("redis test", function() {
before(async function() {
if (!config.redis) this.skip();
if (!config.redis?.enabled) this.skip();
await redis.set(randKey1, randValue1);
});
it("Should get stored value", (done) => {

View File

@@ -63,7 +63,7 @@ const checkUserVIP = async (publicID: HashedUserID) => {
describe("tempVIP test", function() {
before(async function() {
if (!config.redis) this.skip();
if (!config.redis?.enabled) this.skip();
const insertSponsorTimeQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "shadowHidden") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
await db.prepare("run", insertSponsorTimeQuery, ["channelid-convert", 0, 1, 0, 0, UUID0, "testman", 0, 50, "sponsor", 0]);