From 76cc603a3f52b89ebfc325f68e56e97a8922c6cd Mon Sep 17 00:00:00 2001 From: Michael C Date: Thu, 31 Mar 2022 16:02:50 -0400 Subject: [PATCH 01/38] update auotmod check - remove NB code - reduce complexity + unnecessary iterations - use client duration if given --- src/routes/postSkipSegments.ts | 193 +++++++-------------------------- test/cases/postSkipSegments.ts | 16 --- 2 files changed, 42 insertions(+), 167 deletions(-) diff --git a/src/routes/postSkipSegments.ts b/src/routes/postSkipSegments.ts index d704c85..5ed0efc 100644 --- a/src/routes/postSkipSegments.ts +++ b/src/routes/postSkipSegments.ts @@ -112,55 +112,6 @@ async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID: } } -async function sendWebhooksNB(userID: string, videoID: string, UUID: string, startTime: number, endTime: number, category: string, probability: number, ytData: any) { - const submissionInfoRow = await db.prepare("get", `SELECT - (select count(1) from "sponsorTimes" where "userID" = ?) count, - (select count(1) from "sponsorTimes" where "userID" = ? and "votes" <= -2) disregarded, - coalesce((select "userName" FROM "userNames" WHERE "userID" = ?), ?) "userName"`, - [userID, userID, userID, userID]); - - let submittedBy: string; - // If a userName was created then show both - if (submissionInfoRow.userName !== userID) { - submittedBy = `${submissionInfoRow.userName}\n${userID}`; - } else { - submittedBy = userID; - } - - // Send discord message - if (config.discordNeuralBlockRejectWebhookURL === null) return; - - axios.post(config.discordNeuralBlockRejectWebhookURL, { - "embeds": [{ - "title": ytData.items[0].snippet.title, - "url": `https://www.youtube.com/watch?v=${videoID}&t=${(parseFloat(startTime.toFixed(0)) - 2)}`, - "description": `**Submission ID:** ${UUID}\ - \n**Timestamp:** ${getFormattedTime(startTime)} to ${getFormattedTime(endTime)}\ - \n**Predicted Probability:** ${probability}\ - \n**Category:** ${category}\ - \n**Submitted by:** ${submittedBy}\ - \n**Total User Submissions:** ${submissionInfoRow.count}\ - \n**Ignored User Submissions:** ${submissionInfoRow.disregarded}`, - "color": 10813440, - "thumbnail": { - "url": ytData.items[0].snippet.thumbnails.maxres ? ytData.items[0].snippet.thumbnails.maxres.url : "", - }, - }] - }) - .then(res => { - if (res.status >= 400) { - Logger.error("Error sending NeuralBlock Discord hook"); - Logger.error(JSON.stringify(res)); - Logger.error("\n"); - } - }) - .catch(err => { - Logger.error("Failed to send NeuralBlock Discord hook."); - Logger.error(JSON.stringify(err)); - Logger.error("\n"); - }); -} - // callback: function(reject: "String containing reason the submission was rejected") // returns: string when an error, false otherwise @@ -168,98 +119,47 @@ async function sendWebhooksNB(userID: string, videoID: string, UUID: string, sta // false for a pass - it was confusing and lead to this bug - any use of this function in // the future could have the same problem. async function autoModerateSubmission(apiVideoInfo: APIVideoInfo, - submission: { videoID: VideoID; userID: UserID; segments: IncomingSegment[], service: Service }) { - if (apiVideoInfo) { + submission: { videoID: VideoID; userID: UserID; segments: IncomingSegment[], service: Service, videoDuration: number }) { + + const apiVideoDuration = (apiVideoInfo: APIVideoInfo) => { + if (!apiVideoInfo) return undefined; const { err, data } = apiVideoInfo; - if (err) return false; + // return undefined if API error + if (err) return undefined; + return data?.lengthSeconds; + }; + // get duration from API + const apiDuration = apiVideoDuration(apiVideoInfo); + // if API fail or returns 0, get duration from client + const duration = apiDuration || submission.videoDuration; + // return false on undefined or 0 + if (!duration) return false; - const duration = apiVideoInfo?.data?.lengthSeconds; - const segments = submission.segments; - let nbString = ""; - for (let i = 0; i < segments.length; i++) { - if (duration == 0) { - // Allow submission if the duration is 0 (bug in youtube api) - return false; - } else { - if (segments[i].category === "sponsor") { - //Prepare timestamps to send to NB all at once - nbString = `${nbString}${segments[i].segment[0]},${segments[i].segment[1]};`; - } - } - } + const segments = submission.segments; + // map all times to float array + const allSegmentTimes = segments.map(segment => [parseFloat(segment.segment[0]), parseFloat(segment.segment[1])]); - // Get all submissions for this user - const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ? and "votes" > -1`, [submission.userID, submission.videoID]); - const allSegmentTimes = []; - if (allSubmittedByUser !== undefined) { - //add segments the user has previously submitted - for (const segmentInfo of allSubmittedByUser) { - allSegmentTimes.push([parseFloat(segmentInfo.startTime), parseFloat(segmentInfo.endTime)]); - } - } + // add previous submissions by this user + const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ? and "votes" > -1`, [submission.userID, submission.videoID]); - //add segments they are trying to add in this submission - for (let i = 0; i < segments.length; i++) { - const startTime = parseFloat(segments[i].segment[0]); - const endTime = parseFloat(segments[i].segment[1]); - allSegmentTimes.push([startTime, endTime]); - } - - //merge all the times into non-overlapping arrays - const allSegmentsSorted = mergeTimeSegments(allSegmentTimes.sort(function (a, b) { - return a[0] - b[0] || a[1] - b[1]; - })); - - const videoDuration = data?.lengthSeconds; - if (videoDuration != 0) { - let allSegmentDuration = 0; - //sum all segment times together - allSegmentsSorted.forEach(segmentInfo => allSegmentDuration += segmentInfo[1] - segmentInfo[0]); - if (allSegmentDuration > (videoDuration / 100) * 80) { - // Reject submission if all segments combine are over 80% of the video - return "Total length of your submitted segments are over 80% of the video."; - } - } - - // Check NeuralBlock - const neuralBlockURL = config.neuralBlockURL; - if (!neuralBlockURL) return false; - const response = await axios.get(`${neuralBlockURL}/api/checkSponsorSegments?vid=${submission.videoID} - &segments=${nbString.substring(0, nbString.length - 1)}`, { validateStatus: () => true }); - if (response.status !== 200) return false; - - const nbPredictions = response.data; - let nbDecision = false; - let predictionIdx = 0; //Keep track because only sponsor categories were submitted - for (let i = 0; i < segments.length; i++) { - if (segments[i].category === "sponsor") { - if (nbPredictions.probabilities[predictionIdx] < 0.70) { - nbDecision = true; // At least one bad entry - const startTime = parseFloat(segments[i].segment[0]); - const endTime = parseFloat(segments[i].segment[1]); - - const UUID = getSubmissionUUID(submission.videoID, segments[i].category, segments[i].actionType, submission.userID, startTime, endTime, submission.service); - // Send to Discord - // Note, if this is too spammy. Consider sending all the segments as one Webhook - sendWebhooksNB(submission.userID, submission.videoID, UUID, startTime, endTime, segments[i].category, nbPredictions.probabilities[predictionIdx], data); - } - predictionIdx++; - } - - } - - if (nbDecision) { - return "Rejected based on NeuralBlock predictions."; - } else { - return false; - } - } else { - Logger.debug("Skipped YouTube API"); - - // Can't moderate the submission without calling the youtube API - // so allow by default. - return false; + if (allSubmittedByUser) { + //add segments the user has previously submitted + const allSubmittedTimes = allSubmittedByUser.map((segment: { startTime: string, endTime: string }) => [parseFloat(segment.startTime), parseFloat(segment.endTime)]); + allSegmentTimes.push(...allSubmittedTimes); } + + //merge all the times into non-overlapping arrays + const allSegmentsSorted = mergeTimeSegments(allSegmentTimes.sort((a, b) => a[0] - b[0] || a[1] - b[1])); + + let allSegmentDuration = 0; + //sum all segment times together + allSegmentsSorted.forEach(segmentInfo => allSegmentDuration += segmentInfo[1] - segmentInfo[0]); + + if (allSegmentDuration > (duration / 100) * 80) { + // Reject submission if all segments combine are over 80% of the video + return "Total length of your submitted segments are over 80% of the video."; + } + return false; } function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise { @@ -310,7 +210,7 @@ function checkInvalidFields(videoID: VideoID, userID: UserID, segments: Incoming invalidFields.push("userID"); if (userID?.length < 30) errors.push(`userID must be at least 30 characters long`); } - if (!Array.isArray(segments) || segments.length < 1) { + if (!Array.isArray(segments) || segments.length == 0) { invalidFields.push("segments"); } // validate start and end times (no : marks) @@ -323,7 +223,7 @@ function checkInvalidFields(videoID: VideoID, userID: UserID, segments: Incoming } if (typeof segmentPair.description !== "string" - || (segmentPair.description.length > 60 && segmentPair.actionType === ActionType.Chapter) + || (segmentPair.actionType === ActionType.Chapter && segmentPair.description.length > 60 ) || (segmentPair.description.length !== 0 && segmentPair.actionType !== ActionType.Chapter)) { invalidFields.push("segment description"); } @@ -425,19 +325,11 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user return CHECK_PASS; } -async function checkByAutoModerator(videoID: any, userID: any, segments: Array, isVIP: boolean, service:string, apiVideoInfo: APIVideoInfo, decreaseVotes: number): Promise { +async function checkByAutoModerator(videoID: any, userID: any, segments: Array, isVIP: boolean, service:string, apiVideoInfo: APIVideoInfo, decreaseVotes: number, videoDuration: number): Promise { // Auto moderator check if (!isVIP && service == Service.YouTube) { - const autoModerateResult = await autoModerateSubmission(apiVideoInfo, { userID, videoID, segments, service });//startTime, endTime, category: segments[i].category}); - - if (autoModerateResult == "Rejected based on NeuralBlock predictions.") { - // If NB automod rejects, the submission will start with -2 votes. - // Note, if one submission is bad all submissions will be affected. - // However, this behavior is consistent with other automod functions - // already in place. - //decreaseVotes = -2; //Disable for now - } else if (autoModerateResult) { - //Normal automod behavior + const autoModerateResult = await autoModerateSubmission(apiVideoInfo, { userID, videoID, segments, service, videoDuration });//startTime, endTime, category: segments[i].category}); + if (autoModerateResult) { return { pass: false, errorCode: 403, @@ -619,8 +511,7 @@ export async function postSkipSegments(req: Request, res: Response): Promise { .catch(err => done(err)); }); - it("Should be rejected if NB's predicted probability is <70%.", (done) => { - const videoID = "LevkAjUE6d4"; - postSkipSegmentParam({ - videoID, - startTime: 40, - endTime: 60, - userID: submitUserTwo, - category: "sponsor" - }) - .then(res => { - assert.strictEqual(res.status, 200); - done(); - }) - .catch(err => done(err)); - }); - it("Should be rejected with custom message if user has to many active warnings", (done) => { postSkipSegmentJSON({ userID: warnUser01, From 6b5dc54cc7c5ad84946f1ccfce348c3e16f00216 Mon Sep 17 00:00:00 2001 From: mini-bomba <55105495+mini-bomba@users.noreply.github.com> Date: Thu, 31 Mar 2022 22:41:09 +0200 Subject: [PATCH 02/38] voteOnSponsorTime.ts: don't do database queries for vote eligibility on locked segments --- src/routes/voteOnSponsorTime.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/routes/voteOnSponsorTime.ts b/src/routes/voteOnSponsorTime.ts index 469df87..fba1403 100644 --- a/src/routes/voteOnSponsorTime.ts +++ b/src/routes/voteOnSponsorTime.ts @@ -476,11 +476,11 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID // Only change the database if they have made a submission before and haven't voted recently const userAbleToVote = (!(isOwnSubmission && incrementAmount > 0 && oldIncrementAmount >= 0) + && !finalResponse.blockVote + && finalResponse.finalStatus === 200 && (await db.prepare("get", `SELECT "userID" FROM "sponsorTimes" WHERE "userID" = ?`, [nonAnonUserID])) !== undefined && (await db.prepare("get", `SELECT "userID" FROM "shadowBannedUsers" WHERE "userID" = ?`, [nonAnonUserID])) === undefined - && (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID])) === undefined) - && !finalResponse.blockVote - && finalResponse.finalStatus === 200; + && (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID])) === undefined); const ableToVote = isVIP || isTempVIP || userAbleToVote; @@ -534,4 +534,4 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID Logger.error(err as string); return { status: 500, message: finalResponse.finalMessage ?? undefined, json: { error: "Internal error creating segment vote" } }; } -} \ No newline at end of file +} From d02d78f325f2ea5f7056c4cadf681302f9be3a3f Mon Sep 17 00:00:00 2001 From: Michael C Date: Thu, 31 Mar 2022 16:43:10 -0400 Subject: [PATCH 03/38] add 80% tempVIP - move isUserTempVIP to own file - reduce allSegmentDuration instead of forEach - don't return decreaseVotes from autoModerator - completely skip autoModCheck if VIP --- src/routes/postSkipSegments.ts | 48 +++++++++++++++------------------ src/routes/voteOnSponsorTime.ts | 13 ++------- src/utils/isUserTempVIP.ts | 19 +++++++++++++ 3 files changed, 43 insertions(+), 37 deletions(-) create mode 100644 src/utils/isUserTempVIP.ts diff --git a/src/routes/postSkipSegments.ts b/src/routes/postSkipSegments.ts index 5ed0efc..2d89516 100644 --- a/src/routes/postSkipSegments.ts +++ b/src/routes/postSkipSegments.ts @@ -16,6 +16,7 @@ import { getReputation } from "../utils/reputation"; import { APIVideoData, APIVideoInfo } from "../types/youtubeApi.model"; import { HashedUserID, UserID } from "../types/user.model"; import { isUserVIP } from "../utils/isUserVIP"; +import { isUserTempVIP } from "../utils/isUserTempVIP"; import { parseUserAgent } from "../utils/userAgent"; import { getService } from "../utils/getService"; import axios from "axios"; @@ -81,19 +82,19 @@ async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID: if (config.discordFirstTimeSubmissionsWebhookURL === null || userSubmissionCountRow.submissionCount > 1) return; axios.post(config.discordFirstTimeSubmissionsWebhookURL, { - "embeds": [{ - "title": data?.title, - "url": `https://www.youtube.com/watch?v=${videoID}&t=${(parseInt(startTime.toFixed(0)) - 2)}s#requiredSegment=${UUID}`, - "description": `Submission ID: ${UUID}\ + embeds: [{ + title: data?.title, + url: `https://www.youtube.com/watch?v=${videoID}&t=${(parseInt(startTime.toFixed(0)) - 2)}s#requiredSegment=${UUID}`, + description: `Submission ID: ${UUID}\ \n\nTimestamp: \ ${getFormattedTime(startTime)} to ${getFormattedTime(endTime)}\ \n\nCategory: ${segmentInfo.category}`, - "color": 10813440, - "author": { - "name": userID, + color: 10813440, + author: { + name: userID, }, - "thumbnail": { - "url": getMaxResThumbnail(data) || "", + thumbnail: { + url: getMaxResThumbnail(data) || "", }, }], }) @@ -151,9 +152,8 @@ async function autoModerateSubmission(apiVideoInfo: APIVideoInfo, //merge all the times into non-overlapping arrays const allSegmentsSorted = mergeTimeSegments(allSegmentTimes.sort((a, b) => a[0] - b[0] || a[1] - b[1])); - let allSegmentDuration = 0; //sum all segment times together - allSegmentsSorted.forEach(segmentInfo => allSegmentDuration += segmentInfo[1] - segmentInfo[0]); + const allSegmentDuration = allSegmentsSorted.reduce((acc, curr) => acc + (curr[1] - curr[0]), 0); if (allSegmentDuration > (duration / 100) * 80) { // Reject submission if all segments combine are over 80% of the video @@ -325,24 +325,20 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user return CHECK_PASS; } -async function checkByAutoModerator(videoID: any, userID: any, segments: Array, isVIP: boolean, service:string, apiVideoInfo: APIVideoInfo, decreaseVotes: number, videoDuration: number): Promise { +async function checkByAutoModerator(videoID: any, userID: any, segments: Array, service:string, apiVideoInfo: APIVideoInfo, videoDuration: number): Promise { // Auto moderator check - if (!isVIP && service == Service.YouTube) { + if (service == Service.YouTube) { const autoModerateResult = await autoModerateSubmission(apiVideoInfo, { userID, videoID, segments, service, videoDuration });//startTime, endTime, category: segments[i].category}); if (autoModerateResult) { return { pass: false, errorCode: 403, - errorMessage: `Request rejected by auto moderator: ${autoModerateResult} If this is an issue, send a message on Discord.`, - decreaseVotes + errorMessage: `Request rejected by auto moderator: ${autoModerateResult} If this is an issue, send a message on Discord.` }; } } - return { - ...CHECK_PASS, - decreaseVotes - }; + return CHECK_PASS; } async function updateDataIfVideoDurationChange(videoID: VideoID, service: Service, videoDuration: VideoDuration, videoDurationParam: VideoDuration) { @@ -498,6 +494,7 @@ export async function postSkipSegments(req: Request, res: Response): Promise => { - const apiVideoInfo = await getYouTubeVideoInfo(videoID); - const channelID = apiVideoInfo?.data?.authorId; - const { err, reply } = await redis.getAsync(tempVIPKey(nonAnonUserID)); - - return err || !reply ? false : (reply == channelID); -}; - const videoDurationChanged = (segmentDuration: number, APIDuration: number) => (APIDuration > 0 && Math.abs(segmentDuration - APIDuration) > 2); async function updateSegmentVideoDuration(UUID: SegmentUUID) { diff --git a/src/utils/isUserTempVIP.ts b/src/utils/isUserTempVIP.ts new file mode 100644 index 0000000..dc098e0 --- /dev/null +++ b/src/utils/isUserTempVIP.ts @@ -0,0 +1,19 @@ +import redis from "../utils/redis"; +import { tempVIPKey } from "../utils/redisKeys"; +import { HashedUserID } from "../types/user.model"; +import { YouTubeAPI } from "../utils/youtubeApi"; +import { APIVideoInfo } from "../types/youtubeApi.model"; +import { VideoID } from "../types/segments.model"; +import { config } from "../config"; + +function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise { + return config.newLeafURLs ? YouTubeAPI.listVideos(videoID, ignoreCache) : null; +} + +export const isUserTempVIP = async (hashedUserID: HashedUserID, videoID: VideoID): Promise => { + const apiVideoInfo = await getYouTubeVideoInfo(videoID); + const channelID = apiVideoInfo?.data?.authorId; + const { err, reply } = await redis.getAsync(tempVIPKey(hashedUserID)); + + return err || !reply ? false : (reply == channelID); +}; \ No newline at end of file From d392b1c8fcb7638fa9768863d86535b5e72d32b1 Mon Sep 17 00:00:00 2001 From: Michael C Date: Thu, 31 Mar 2022 16:52:05 -0400 Subject: [PATCH 04/38] remove outdated comments & unnecessary space --- src/routes/postSkipSegments.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/routes/postSkipSegments.ts b/src/routes/postSkipSegments.ts index 2d89516..3e6a8c0 100644 --- a/src/routes/postSkipSegments.ts +++ b/src/routes/postSkipSegments.ts @@ -328,7 +328,7 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user async function checkByAutoModerator(videoID: any, userID: any, segments: Array, service:string, apiVideoInfo: APIVideoInfo, videoDuration: number): Promise { // Auto moderator check if (service == Service.YouTube) { - const autoModerateResult = await autoModerateSubmission(apiVideoInfo, { userID, videoID, segments, service, videoDuration });//startTime, endTime, category: segments[i].category}); + const autoModerateResult = await autoModerateSubmission(apiVideoInfo, { userID, videoID, segments, service, videoDuration }); if (autoModerateResult) { return { pass: false, @@ -337,7 +337,6 @@ async function checkByAutoModerator(videoID: any, userID: any, segments: Array Date: Thu, 31 Mar 2022 18:03:42 -0400 Subject: [PATCH 05/38] merged changes by @mini-bomba --- src/routes/postSkipSegments.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/routes/postSkipSegments.ts b/src/routes/postSkipSegments.ts index 3e6a8c0..669bb54 100644 --- a/src/routes/postSkipSegments.ts +++ b/src/routes/postSkipSegments.ts @@ -141,7 +141,7 @@ async function autoModerateSubmission(apiVideoInfo: APIVideoInfo, const allSegmentTimes = segments.map(segment => [parseFloat(segment.segment[0]), parseFloat(segment.segment[1])]); // add previous submissions by this user - const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ? and "votes" > -1`, [submission.userID, submission.videoID]); + const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? AND "videoID" = ? AND "votes" > -1 AND "hidden" = 0`, [submission.userID, submission.videoID]); if (allSubmittedByUser) { //add segments the user has previously submitted @@ -302,7 +302,7 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user } if (!isVIP && segments[i].category === "sponsor" - && segments[i].actionType !== ActionType.Full && Math.abs(startTime - endTime) < 1) { + && segments[i].actionType !== ActionType.Full && (endTime - startTime) < 1) { // Too short return { pass: false, errorMessage: "Segments must be longer than 1 second long", errorCode: 400 }; } @@ -488,7 +488,7 @@ export async function postSkipSegments(req: Request, res: Response): Promise((prev, val) => `${prev} ${val.category}`, "")}', times: ${segments.reduce((prev, val) => `${prev} ${val.segment}`, "")}`); + Logger.warn(`Caught a submission for a warned user. userID: '${userID}', videoID: '${videoID}', category: '${segments.reduce((prev, val) => `${prev} ${val.category}`, "")}', times: ${segments.reduce((prev, val) => `${prev} ${val.segment}`, "")}`); return res.status(userWarningCheckResult.errorCode).send(userWarningCheckResult.errorMessage); } From d5611fb023f3bbbed6fa923a4c4886a778e6470a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 9 Apr 2022 11:08:19 +0000 Subject: [PATCH 06/38] Bump moment from 2.29.1 to 2.29.2 Bumps [moment](https://github.com/moment/moment) from 2.29.1 to 2.29.2. - [Release notes](https://github.com/moment/moment/releases) - [Changelog](https://github.com/moment/moment/blob/develop/CHANGELOG.md) - [Commits](https://github.com/moment/moment/compare/2.29.1...2.29.2) --- updated-dependencies: - dependency-name: moment dependency-type: indirect ... Signed-off-by: dependabot[bot] --- package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/package-lock.json b/package-lock.json index e48a32b..f433169 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3140,9 +3140,9 @@ } }, "node_modules/moment": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", - "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==", + "version": "2.29.2", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.2.tgz", + "integrity": "sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg==", "engines": { "node": "*" } @@ -7502,9 +7502,9 @@ } }, "moment": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", - "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==" + "version": "2.29.2", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.2.tgz", + "integrity": "sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg==" }, "moment-timezone": { "version": "0.5.34", From b09ed1cbe22e06d53af1ad825c5539b7753a9dec Mon Sep 17 00:00:00 2001 From: "Michael M. Chang" Date: Mon, 11 Apr 2022 01:54:28 -0400 Subject: [PATCH 07/38] Update src/routes/postSkipSegments.ts Co-authored-by: Ajay Ramachandran --- src/routes/postSkipSegments.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/routes/postSkipSegments.ts b/src/routes/postSkipSegments.ts index 669bb54..8ccee51 100644 --- a/src/routes/postSkipSegments.ts +++ b/src/routes/postSkipSegments.ts @@ -506,7 +506,7 @@ export async function postSkipSegments(req: Request, res: Response): Promise Date: Mon, 11 Apr 2022 23:48:52 -0400 Subject: [PATCH 08/38] Log locked downvoted segment --- src/routes/voteOnSponsorTime.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/routes/voteOnSponsorTime.ts b/src/routes/voteOnSponsorTime.ts index 469df87..c56256f 100644 --- a/src/routes/voteOnSponsorTime.ts +++ b/src/routes/voteOnSponsorTime.ts @@ -44,6 +44,7 @@ interface VoteData { row: { votes: number; views: number; + locked: boolean; }; category: string; incrementAmount: number; @@ -178,7 +179,7 @@ async function sendWebhooks(voteData: VoteData) { "url": `https://www.youtube.com/watch?v=${submissionInfoRow.videoID}&t=${(submissionInfoRow.startTime.toFixed(0) - 2)}s#requiredSegment=${voteData.UUID}`, "description": `**${voteData.row.votes} Votes Prior | \ ${(voteData.row.votes + voteData.incrementAmount - voteData.oldIncrementAmount)} Votes Now | ${voteData.row.views} \ - Views**\n\n**Submission ID:** ${voteData.UUID}\ + Views**\n\n**Locked**: ${voteData.row.locked}\n\n**Submission ID:** ${voteData.UUID}\ \n**Category:** ${submissionInfoRow.category}\ \n\n**Submitted by:** ${submissionInfoRow.userName}\n${submissionInfoRow.userID}\ \n\n**Total User Submissions:** ${submissionInfoRow.count}\ @@ -189,7 +190,7 @@ async function sendWebhooks(voteData: VoteData) { "author": { "name": voteData.finalResponse?.webhookMessage ?? voteData.finalResponse?.finalMessage ?? - getVoteAuthor(userSubmissionCountRow.submissionCount, voteData.isTempVIP, voteData.isVIP, voteData.isOwnSubmission), + `${getVoteAuthor(userSubmissionCountRow.submissionCount, voteData.isTempVIP, voteData.isVIP, voteData.isOwnSubmission)}${voteData.row.locked ? " (Locked)" : ""}`, }, "thumbnail": { "url": getMaxResThumbnail(data) || "", From 41c92da37e46eeca3a4ca5e1b2a58e4789103d65 Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 13 Apr 2022 13:50:30 -0400 Subject: [PATCH 09/38] Upgrade express-rate-limit --- package-lock.json | 40 +++++++++++------------------- package.json | 3 +-- src/middleware/requestRateLimit.ts | 7 +++--- 3 files changed, 19 insertions(+), 31 deletions(-) diff --git a/package-lock.json b/package-lock.json index f433169..37f5178 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,7 +15,7 @@ "cron": "^1.8.2", "express": "^4.17.1", "express-promise-router": "^4.1.1", - "express-rate-limit": "^5.5.1", + "express-rate-limit": "^6.3.0", "lodash": "^4.17.21", "pg": "^8.7.1", "redis": "^3.1.2", @@ -25,7 +25,6 @@ "@types/better-sqlite3": "^7.4.1", "@types/cron": "^1.7.3", "@types/express": "^4.17.13", - "@types/express-rate-limit": "^5.1.3", "@types/lodash": "^4.14.178", "@types/mocha": "^9.0.0", "@types/node": "^16.11.11", @@ -293,15 +292,6 @@ "@types/serve-static": "*" } }, - "node_modules/@types/express-rate-limit": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@types/express-rate-limit/-/express-rate-limit-5.1.3.tgz", - "integrity": "sha512-H+TYy3K53uPU2TqPGFYaiWc2xJV6+bIFkDd/Ma2/h67Pa6ARk9kWE0p/K9OH1Okm0et9Sfm66fmXoAxsH2PHXg==", - "dev": true, - "dependencies": { - "@types/express": "*" - } - }, "node_modules/@types/express-serve-static-core": { "version": "4.17.26", "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.26.tgz", @@ -1889,9 +1879,15 @@ } }, "node_modules/express-rate-limit": { - "version": "5.5.1", - "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-5.5.1.tgz", - "integrity": "sha512-MTjE2eIbHv5DyfuFz4zLYWxpqVhEhkTiwFGuB74Q9CSou2WHO52nlE5y3Zlg6SIsiYUIPj6ifFxnkPz6O3sIUg==" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-6.3.0.tgz", + "integrity": "sha512-932Io1VGKjM3ppi7xW9sb1J5nVkEJSUiOtHw2oE+JyHks1e+AXuOBSXbJKM0mcXwEnW1TibJibQ455Ow1YFjfg==", + "engines": { + "node": ">= 12.9.0" + }, + "peerDependencies": { + "express": "^4" + } }, "node_modules/express/node_modules/debug": { "version": "2.6.9", @@ -5319,15 +5315,6 @@ "@types/serve-static": "*" } }, - "@types/express-rate-limit": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@types/express-rate-limit/-/express-rate-limit-5.1.3.tgz", - "integrity": "sha512-H+TYy3K53uPU2TqPGFYaiWc2xJV6+bIFkDd/Ma2/h67Pa6ARk9kWE0p/K9OH1Okm0et9Sfm66fmXoAxsH2PHXg==", - "dev": true, - "requires": { - "@types/express": "*" - } - }, "@types/express-serve-static-core": { "version": "4.17.26", "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.26.tgz", @@ -6557,9 +6544,10 @@ } }, "express-rate-limit": { - "version": "5.5.1", - "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-5.5.1.tgz", - "integrity": "sha512-MTjE2eIbHv5DyfuFz4zLYWxpqVhEhkTiwFGuB74Q9CSou2WHO52nlE5y3Zlg6SIsiYUIPj6ifFxnkPz6O3sIUg==" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-6.3.0.tgz", + "integrity": "sha512-932Io1VGKjM3ppi7xW9sb1J5nVkEJSUiOtHw2oE+JyHks1e+AXuOBSXbJKM0mcXwEnW1TibJibQ455Ow1YFjfg==", + "requires": {} }, "fast-deep-equal": { "version": "3.1.3", diff --git a/package.json b/package.json index efee6ac..d727b11 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,7 @@ "cron": "^1.8.2", "express": "^4.17.1", "express-promise-router": "^4.1.1", - "express-rate-limit": "^5.5.1", + "express-rate-limit": "^6.3.0", "lodash": "^4.17.21", "pg": "^8.7.1", "redis": "^3.1.2", @@ -33,7 +33,6 @@ "@types/better-sqlite3": "^7.4.1", "@types/cron": "^1.7.3", "@types/express": "^4.17.13", - "@types/express-rate-limit": "^5.1.3", "@types/lodash": "^4.14.178", "@types/mocha": "^9.0.0", "@types/node": "^16.11.11", diff --git a/src/middleware/requestRateLimit.ts b/src/middleware/requestRateLimit.ts index 71f8bd9..63b65c9 100644 --- a/src/middleware/requestRateLimit.ts +++ b/src/middleware/requestRateLimit.ts @@ -1,19 +1,20 @@ import { getIP } from "../utils/getIP"; import { getHash } from "../utils/getHash"; import { getHashCache } from "../utils/getHashCache"; -import rateLimit from "express-rate-limit"; +import rateLimit, { RateLimitRequestHandler } from "express-rate-limit"; import { RateLimitConfig } from "../types/config.model"; import { Request } from "express"; import { isUserVIP } from "../utils/isUserVIP"; import { UserID } from "../types/user.model"; -export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (req: Request) => UserID): rateLimit.RateLimit { +export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (req: Request) => UserID): RateLimitRequestHandler { return rateLimit({ windowMs: limitConfig.windowMs, max: limitConfig.max, message: limitConfig.message, statusCode: limitConfig.statusCode, - headers: false, + legacyHeaders: false, + standardHeaders: false, keyGenerator: (req) => { return getHash(getIP(req), 1); }, From 8dc87da462292062650a1f8729e6a049e027fb76 Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 13 Apr 2022 17:36:07 -0400 Subject: [PATCH 10/38] Back rate limit by redia and upgrade node-redis --- ci.json | 6 +- package-lock.json | 234 +++++++++++++++++++++-------- package.json | 4 +- src/middleware/requestRateLimit.ts | 8 +- src/routes/addUserAsTempVIP.ts | 25 ++- src/routes/getStatus.ts | 8 +- src/types/config.model.ts | 2 +- src/utils/getHashCache.ts | 17 ++- src/utils/isUserTempVIP.ts | 11 +- src/utils/queryCacher.ts | 36 ++--- src/utils/redis.ts | 70 +++++---- test/cases/redisTest.ts | 15 +- test/cases/tempVip.ts | 12 +- test/test.ts | 2 +- 14 files changed, 292 insertions(+), 158 deletions(-) diff --git a/ci.json b/ci.json index 68262cb..86c106a 100644 --- a/ci.json +++ b/ci.json @@ -17,8 +17,10 @@ "port": 5432 }, "redis": { - "host": "localhost", - "port": 6379 + "socket": { + "host": "localhost", + "port": 6379 + } }, "createDatabaseIfNotExist": true, "schemaFolder": "./databases", diff --git a/package-lock.json b/package-lock.json index 37f5178..ebf1447 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,7 +18,8 @@ "express-rate-limit": "^6.3.0", "lodash": "^4.17.21", "pg": "^8.7.1", - "redis": "^3.1.2", + "rate-limit-redis": "^3.0.1", + "redis": "^4.0.6", "sync-mysql": "^3.0.1" }, "devDependencies": { @@ -29,7 +30,6 @@ "@types/mocha": "^9.0.0", "@types/node": "^16.11.11", "@types/pg": "^8.6.1", - "@types/redis": "^2.8.32", "@typescript-eslint/eslint-plugin": "^5.5.0", "@typescript-eslint/parser": "^5.5.0", "eslint": "^8.3.0", @@ -127,6 +127,65 @@ "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", "dev": true }, + "node_modules/@node-redis/bloom": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@node-redis/bloom/-/bloom-1.0.1.tgz", + "integrity": "sha512-mXEBvEIgF4tUzdIN89LiYsbi6//EdpFA7L8M+DHCvePXg+bfHWi+ct5VI6nHUFQE5+ohm/9wmgihCH3HSkeKsw==", + "peerDependencies": { + "@node-redis/client": "^1.0.0" + } + }, + "node_modules/@node-redis/client": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@node-redis/client/-/client-1.0.5.tgz", + "integrity": "sha512-ESZ3bd1f+od62h4MaBLKum+klVJfA4wAeLHcVQBkoXa1l0viFesOWnakLQqKg+UyrlJhZmXJWtu0Y9v7iTMrig==", + "dependencies": { + "cluster-key-slot": "1.1.0", + "generic-pool": "3.8.2", + "redis-parser": "3.0.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@node-redis/client/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@node-redis/graph": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@node-redis/graph/-/graph-1.0.0.tgz", + "integrity": "sha512-mRSo8jEGC0cf+Rm7q8mWMKKKqkn6EAnA9IA2S3JvUv/gaWW/73vil7GLNwion2ihTptAm05I9LkepzfIXUKX5g==", + "peerDependencies": { + "@node-redis/client": "^1.0.0" + } + }, + "node_modules/@node-redis/json": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@node-redis/json/-/json-1.0.2.tgz", + "integrity": "sha512-qVRgn8WfG46QQ08CghSbY4VhHFgaTY71WjpwRBGEuqGPfWwfRcIf3OqSpR7Q/45X+v3xd8mvYjywqh0wqJ8T+g==", + "peerDependencies": { + "@node-redis/client": "^1.0.0" + } + }, + "node_modules/@node-redis/search": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@node-redis/search/-/search-1.0.5.tgz", + "integrity": "sha512-MCOL8iCKq4v+3HgEQv8zGlSkZyXSXtERgrAJ4TSryIG/eLFy84b57KmNNa/V7M1Q2Wd2hgn2nPCGNcQtk1R1OQ==", + "peerDependencies": { + "@node-redis/client": "^1.0.0" + } + }, + "node_modules/@node-redis/time-series": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@node-redis/time-series/-/time-series-1.0.2.tgz", + "integrity": "sha512-HGQ8YooJ8Mx7l28tD7XjtB3ImLEjlUxG1wC1PAjxu6hPJqjPshUZxAICzDqDjtIbhDTf48WXXUcx8TQJB1XTKA==", + "peerDependencies": { + "@node-redis/client": "^1.0.0" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -356,15 +415,6 @@ "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==", "devOptional": true }, - "node_modules/@types/redis": { - "version": "2.8.32", - "resolved": "https://registry.npmjs.org/@types/redis/-/redis-2.8.32.tgz", - "integrity": "sha512-7jkMKxcGq9p242exlbsVzuJb57KqHRhNl4dHoQu2Y5v9bCAbtIXXH0R3HleSQW4CTOqpHIYUW3t6tpUj4BVQ+w==", - "dev": true, - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/serve-static": { "version": "1.13.10", "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", @@ -1204,6 +1254,14 @@ "node": ">=4" } }, + "node_modules/cluster-key-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", + "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/code-point-at": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", @@ -1434,14 +1492,6 @@ "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" }, - "node_modules/denque": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", - "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", - "engines": { - "node": ">=0.10" - } - }, "node_modules/depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", @@ -2201,6 +2251,14 @@ "node": ">=0.10.0" } }, + "node_modules/generic-pool": { + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.8.2.tgz", + "integrity": "sha512-nGToKy6p3PAbYQ7p1UlWl6vSPwfwU6TMSWK7TTu+WUY4ZjyZQGniGGt2oNVvyNSpyZYSB43zMXVLcBm08MTMkg==", + "engines": { + "node": ">= 4" + } + }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -3872,6 +3930,17 @@ "node": ">= 0.6" } }, + "node_modules/rate-limit-redis": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/rate-limit-redis/-/rate-limit-redis-3.0.1.tgz", + "integrity": "sha512-L6yhOUBrAZ8VEMX9DwlM3X6hfm8yq+gBO4LoOW7+JgmNq59zE7QmLz4v5VnwYPvLeSh/e7PDcrzUI3UumJw1iw==", + "engines": { + "node": ">= 14.5.0" + }, + "peerDependencies": { + "express-rate-limit": "^6" + } + }, "node_modules/raw-body": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", @@ -3927,28 +3996,18 @@ } }, "node_modules/redis": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/redis/-/redis-3.1.2.tgz", - "integrity": "sha512-grn5KoZLr/qrRQVwoSkmzdbw6pwF+/rwODtrOr6vuBRiR/f3rjSTGupbF90Zpqm2oenix8Do6RV7pYEkGwlKkw==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.0.6.tgz", + "integrity": "sha512-IaPAxgF5dV0jx+A9l6yd6R9/PAChZIoAskDVRzUODeLDNhsMlq7OLLTmu0AwAr0xjrJ1bibW5xdpRwqIQ8Q0Xg==", "dependencies": { - "denque": "^1.5.0", - "redis-commands": "^1.7.0", - "redis-errors": "^1.2.0", - "redis-parser": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-redis" + "@node-redis/bloom": "1.0.1", + "@node-redis/client": "1.0.5", + "@node-redis/graph": "1.0.0", + "@node-redis/json": "1.0.2", + "@node-redis/search": "1.0.5", + "@node-redis/time-series": "1.0.2" } }, - "node_modules/redis-commands": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz", - "integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ==" - }, "node_modules/redis-errors": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", @@ -5165,6 +5224,54 @@ "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", "dev": true }, + "@node-redis/bloom": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@node-redis/bloom/-/bloom-1.0.1.tgz", + "integrity": "sha512-mXEBvEIgF4tUzdIN89LiYsbi6//EdpFA7L8M+DHCvePXg+bfHWi+ct5VI6nHUFQE5+ohm/9wmgihCH3HSkeKsw==", + "requires": {} + }, + "@node-redis/client": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@node-redis/client/-/client-1.0.5.tgz", + "integrity": "sha512-ESZ3bd1f+od62h4MaBLKum+klVJfA4wAeLHcVQBkoXa1l0viFesOWnakLQqKg+UyrlJhZmXJWtu0Y9v7iTMrig==", + "requires": { + "cluster-key-slot": "1.1.0", + "generic-pool": "3.8.2", + "redis-parser": "3.0.0", + "yallist": "4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, + "@node-redis/graph": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@node-redis/graph/-/graph-1.0.0.tgz", + "integrity": "sha512-mRSo8jEGC0cf+Rm7q8mWMKKKqkn6EAnA9IA2S3JvUv/gaWW/73vil7GLNwion2ihTptAm05I9LkepzfIXUKX5g==", + "requires": {} + }, + "@node-redis/json": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@node-redis/json/-/json-1.0.2.tgz", + "integrity": "sha512-qVRgn8WfG46QQ08CghSbY4VhHFgaTY71WjpwRBGEuqGPfWwfRcIf3OqSpR7Q/45X+v3xd8mvYjywqh0wqJ8T+g==", + "requires": {} + }, + "@node-redis/search": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@node-redis/search/-/search-1.0.5.tgz", + "integrity": "sha512-MCOL8iCKq4v+3HgEQv8zGlSkZyXSXtERgrAJ4TSryIG/eLFy84b57KmNNa/V7M1Q2Wd2hgn2nPCGNcQtk1R1OQ==", + "requires": {} + }, + "@node-redis/time-series": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@node-redis/time-series/-/time-series-1.0.2.tgz", + "integrity": "sha512-HGQ8YooJ8Mx7l28tD7XjtB3ImLEjlUxG1wC1PAjxu6hPJqjPshUZxAICzDqDjtIbhDTf48WXXUcx8TQJB1XTKA==", + "requires": {} + }, "@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -5379,15 +5486,6 @@ "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==", "devOptional": true }, - "@types/redis": { - "version": "2.8.32", - "resolved": "https://registry.npmjs.org/@types/redis/-/redis-2.8.32.tgz", - "integrity": "sha512-7jkMKxcGq9p242exlbsVzuJb57KqHRhNl4dHoQu2Y5v9bCAbtIXXH0R3HleSQW4CTOqpHIYUW3t6tpUj4BVQ+w==", - "dev": true, - "requires": { - "@types/node": "*" - } - }, "@types/serve-static": { "version": "1.13.10", "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", @@ -6016,6 +6114,11 @@ } } }, + "cluster-key-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz", + "integrity": "sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw==" + }, "code-point-at": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", @@ -6196,11 +6299,6 @@ "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" }, - "denque": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", - "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==" - }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", @@ -6790,6 +6888,11 @@ } } }, + "generic-pool": { + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.8.2.tgz", + "integrity": "sha512-nGToKy6p3PAbYQ7p1UlWl6vSPwfwU6TMSWK7TTu+WUY4ZjyZQGniGGt2oNVvyNSpyZYSB43zMXVLcBm08MTMkg==" + }, "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -8039,6 +8142,12 @@ "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, + "rate-limit-redis": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/rate-limit-redis/-/rate-limit-redis-3.0.1.tgz", + "integrity": "sha512-L6yhOUBrAZ8VEMX9DwlM3X6hfm8yq+gBO4LoOW7+JgmNq59zE7QmLz4v5VnwYPvLeSh/e7PDcrzUI3UumJw1iw==", + "requires": {} + }, "raw-body": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", @@ -8082,21 +8191,18 @@ } }, "redis": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/redis/-/redis-3.1.2.tgz", - "integrity": "sha512-grn5KoZLr/qrRQVwoSkmzdbw6pwF+/rwODtrOr6vuBRiR/f3rjSTGupbF90Zpqm2oenix8Do6RV7pYEkGwlKkw==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.0.6.tgz", + "integrity": "sha512-IaPAxgF5dV0jx+A9l6yd6R9/PAChZIoAskDVRzUODeLDNhsMlq7OLLTmu0AwAr0xjrJ1bibW5xdpRwqIQ8Q0Xg==", "requires": { - "denque": "^1.5.0", - "redis-commands": "^1.7.0", - "redis-errors": "^1.2.0", - "redis-parser": "^3.0.0" + "@node-redis/bloom": "1.0.1", + "@node-redis/client": "1.0.5", + "@node-redis/graph": "1.0.0", + "@node-redis/json": "1.0.2", + "@node-redis/search": "1.0.5", + "@node-redis/time-series": "1.0.2" } }, - "redis-commands": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz", - "integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ==" - }, "redis-errors": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", diff --git a/package.json b/package.json index d727b11..4ddd8dc 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,8 @@ "express-rate-limit": "^6.3.0", "lodash": "^4.17.21", "pg": "^8.7.1", - "redis": "^3.1.2", + "rate-limit-redis": "^3.0.1", + "redis": "^4.0.6", "sync-mysql": "^3.0.1" }, "devDependencies": { @@ -37,7 +38,6 @@ "@types/mocha": "^9.0.0", "@types/node": "^16.11.11", "@types/pg": "^8.6.1", - "@types/redis": "^2.8.32", "@typescript-eslint/eslint-plugin": "^5.5.0", "@typescript-eslint/parser": "^5.5.0", "eslint": "^8.3.0", diff --git a/src/middleware/requestRateLimit.ts b/src/middleware/requestRateLimit.ts index 63b65c9..a188011 100644 --- a/src/middleware/requestRateLimit.ts +++ b/src/middleware/requestRateLimit.ts @@ -6,6 +6,9 @@ import { RateLimitConfig } from "../types/config.model"; import { Request } from "express"; import { isUserVIP } from "../utils/isUserVIP"; import { UserID } from "../types/user.model"; +import RedisStore from "rate-limit-redis"; +import redis from "../utils/redis"; +import { config } from "../config"; export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (req: Request) => UserID): RateLimitRequestHandler { return rateLimit({ @@ -24,6 +27,9 @@ export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (r } else { return next(); } - } + }, + store: config.redis ? new RedisStore({ + sendCommand: (...args: string[]) => redis.sendCommand(args), + }) : null, }); } diff --git a/src/routes/addUserAsTempVIP.ts b/src/routes/addUserAsTempVIP.ts index fb8d482..3001a0e 100644 --- a/src/routes/addUserAsTempVIP.ts +++ b/src/routes/addUserAsTempVIP.ts @@ -9,6 +9,7 @@ import { isUserVIP } from "../utils/isUserVIP"; import { HashedUserID } from "../types/user.model"; import redis from "../utils/redis"; import { tempVIPKey } from "../utils/redisKeys"; +import { Logger } from "../utils/logger"; interface AddUserAsTempVIPRequest extends Request { query: { @@ -65,12 +66,22 @@ export async function addUserAsTempVIP(req: AddUserAsTempVIPRequest, res: Respon if (!channelInfo?.id) { return res.status(404).send(`No channel found for videoID ${channelVideoID}`); } - await redis.setAsyncEx(tempVIPKey(userID), channelInfo?.id, dayInSeconds); - await privateDB.prepare("run", `INSERT INTO "tempVipLog" VALUES (?, ?, ?, ?)`, [adminUserID, userID, + enabled, startTime]); - return res.status(200).send(`Temp VIP added on channel ${channelInfo?.name}`); - } - await redis.delAsync(tempVIPKey(userID)); - await privateDB.prepare("run", `INSERT INTO "tempVipLog" VALUES (?, ?, ?, ?)`, [adminUserID, userID, + enabled, startTime]); - return res.status(200).send(`Temp VIP removed`); + try { + await redis.setEx(tempVIPKey(userID), dayInSeconds, channelInfo?.id); + await privateDB.prepare("run", `INSERT INTO "tempVipLog" VALUES (?, ?, ?, ?)`, [adminUserID, userID, + enabled, startTime]); + return res.status(200).send(`Temp VIP added on channel ${channelInfo?.name}`); + } catch (e) { + Logger.error(e as string); + return res.status(500).send(); + } + } + try { + await redis.del(tempVIPKey(userID)); + await privateDB.prepare("run", `INSERT INTO "tempVipLog" VALUES (?, ?, ?, ?)`, [adminUserID, userID, + enabled, startTime]); + return res.status(200).send(`Temp VIP removed`); + } catch (e) { + Logger.error(e as string); + return res.status(500).send(); + } } \ No newline at end of file diff --git a/src/routes/getStatus.ts b/src/routes/getStatus.ts index bc855c2..770ca25 100644 --- a/src/routes/getStatus.ts +++ b/src/routes/getStatus.ts @@ -10,8 +10,12 @@ export async function getStatus(req: Request, res: Response): Promise value = Array.isArray(value) ? value[0] : value; try { const dbVersion = (await db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"])).value; - const numberRequests = await redis.increment("statusRequest"); - const statusRequests = numberRequests?.replies?.[0]; + let statusRequests: unknown = 0; + try { + const numberRequests = await redis.increment("statusRequest"); + statusRequests = numberRequests?.[0]; + } catch (error) { } // eslint-disable-line no-empty + const statusValues: Record = { uptime: process.uptime(), commit: (global as any).HEADCOMMIT || "unknown", diff --git a/src/types/config.model.ts b/src/types/config.model.ts index 1a5a22f..52ba2f2 100644 --- a/src/types/config.model.ts +++ b/src/types/config.model.ts @@ -41,7 +41,7 @@ export interface SBSConfig { privateMysql?: any; minimumPrefix?: string; maximumPrefix?: string; - redis?: redis.ClientOpts; + redis?: redis.RedisClientOptions; maxRewardTimePerSegmentInSeconds?: number; postgres?: PoolConfig; dumpDatabase?: DumpDatabase; diff --git a/src/utils/getHashCache.ts b/src/utils/getHashCache.ts index 14d30c6..2936947 100644 --- a/src/utils/getHashCache.ts +++ b/src/utils/getHashCache.ts @@ -18,18 +18,19 @@ export async function getHashCache(value: T, times = defaulted async function getFromRedis(key: HashedValue): Promise { const redisKey = shaHashKey(key); - const { err, reply } = await redis.getAsync(redisKey); - if (!err && reply) { - try { + try { + const reply = await redis.get(redisKey);; + + if (reply) { Logger.debug(`Got data from redis: ${reply}`); return reply as T & HashedValue; - } catch (e) { - // If all else, continue on hashing } - } - const data = getHash(key, cachedHashTimes); + } catch (e) {} // eslint-disable-line no-empty + + // Otherwise, calculate it + const data = getHash(key, cachedHashTimes); + redis.set(key, data); - redis.setAsync(key, data); return data as T & HashedValue; } \ No newline at end of file diff --git a/src/utils/isUserTempVIP.ts b/src/utils/isUserTempVIP.ts index dc098e0..a2758bc 100644 --- a/src/utils/isUserTempVIP.ts +++ b/src/utils/isUserTempVIP.ts @@ -5,6 +5,7 @@ import { YouTubeAPI } from "../utils/youtubeApi"; import { APIVideoInfo } from "../types/youtubeApi.model"; import { VideoID } from "../types/segments.model"; import { config } from "../config"; +import { Logger } from "./logger"; function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise { return config.newLeafURLs ? YouTubeAPI.listVideos(videoID, ignoreCache) : null; @@ -13,7 +14,11 @@ function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise => { const apiVideoInfo = await getYouTubeVideoInfo(videoID); const channelID = apiVideoInfo?.data?.authorId; - const { err, reply } = await redis.getAsync(tempVIPKey(hashedUserID)); - - return err || !reply ? false : (reply == channelID); + try { + const reply = await redis.get(tempVIPKey(hashedUserID)); + return reply && reply == channelID; + } catch (e) { + Logger.error(e as string); + return false; + } }; \ No newline at end of file diff --git a/src/utils/queryCacher.ts b/src/utils/queryCacher.ts index 57af472..bd6e630 100644 --- a/src/utils/queryCacher.ts +++ b/src/utils/queryCacher.ts @@ -5,21 +5,18 @@ import { Service, VideoID, VideoIDHash } from "../types/segments.model"; import { UserID } from "../types/user.model"; async function get(fetchFromDB: () => Promise, key: string): Promise { - const { err, reply } = await redis.getAsync(key); - - if (!err && reply) { - try { + try { + const reply = await redis.get(key); + if (reply) { Logger.debug(`Got data from redis: ${reply}`); return JSON.parse(reply); - } catch (e) { - // If all else, continue on to fetching from the database } - } + } catch (e) { Logger.error(e as string)} //eslint-disable-line no-empty const data = await fetchFromDB(); - redis.setAsync(key, JSON.stringify(data)); + redis.set(key, JSON.stringify(data)); return data; } @@ -30,18 +27,17 @@ async function get(fetchFromDB: () => Promise, key: string): Promise { async function getAndSplit(fetchFromDB: (values: U[]) => Promise>, keyGenerator: (value: U) => string, splitKey: string, values: U[]): Promise> { const cachedValues = await Promise.all(values.map(async (value) => { const key = keyGenerator(value); - const { err, reply } = await redis.getAsync(key); - - if (!err && reply) { - try { + try { + const reply = await redis.get(key); + if (reply) { Logger.debug(`Got data from redis: ${reply}`); return { value, result: JSON.parse(reply) }; - } catch (e) { } //eslint-disable-line no-empty - } + } + } catch (e) { } //eslint-disable-line no-empty return { value, @@ -71,7 +67,7 @@ async function getAndSplit(fetchFromDB: (values: U[]) => Pr } for (const key in newResults) { - redis.setAsync(key, JSON.stringify(newResults[key])); + redis.set(key, JSON.stringify(newResults[key])); } }); } @@ -81,16 +77,16 @@ async function getAndSplit(fetchFromDB: (values: U[]) => Pr function clearSegmentCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID?: UserID; }): void { if (videoInfo) { - redis.delAsync(skipSegmentsKey(videoInfo.videoID, videoInfo.service)); - redis.delAsync(skipSegmentGroupsKey(videoInfo.videoID, videoInfo.service)); - redis.delAsync(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service)); - if (videoInfo.userID) redis.delAsync(reputationKey(videoInfo.userID)); + redis.del(skipSegmentsKey(videoInfo.videoID, videoInfo.service)); + redis.del(skipSegmentGroupsKey(videoInfo.videoID, videoInfo.service)); + redis.del(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service)); + if (videoInfo.userID) redis.del(reputationKey(videoInfo.userID)); } } function clearRatingCache(videoInfo: { hashedVideoID: VideoIDHash; service: Service;}): void { if (videoInfo) { - redis.delAsync(ratingHashKey(videoInfo.hashedVideoID, videoInfo.service)); + redis.del(ratingHashKey(videoInfo.hashedVideoID, videoInfo.service)); } } diff --git a/src/utils/redis.ts b/src/utils/redis.ts index 7faefe6..6ac3174 100644 --- a/src/utils/redis.ts +++ b/src/utils/redis.ts @@ -1,59 +1,57 @@ import { config } from "../config"; import { Logger } from "./logger"; -import redis, { Callback } from "redis"; +import { createClient } from "redis"; +import { RedisClientType } from "@node-redis/client"; +import { RedisCommandArgument, RedisCommandArguments, RedisCommandRawReply } from "@node-redis/client/dist/lib/commands"; +import { ClientCommandOptions } from "@node-redis/client/dist/lib/client"; +import { RedisReply } from "rate-limit-redis"; interface RedisSB { - get(key: string, callback?: Callback): void; - getAsync?(key: string): Promise<{err: Error | null, reply: string | null}>; - set(key: string, value: string, callback?: Callback): void; - setAsync?(key: string, value: string): Promise<{err: Error | null, reply: string | null}>; - setAsyncEx?(key: string, value: string, seconds: number): Promise<{err: Error | null, reply: string | null}>; - delAsync?(...keys: [string]): Promise; - close?(flush?: boolean): void; - increment?(key: string): Promise<{err: Error| null, replies: any[] | null}>; + get(key: RedisCommandArgument): Promise; + set(key: RedisCommandArgument, value: RedisCommandArgument): Promise; + setEx(key: RedisCommandArgument, seconds: number, value: RedisCommandArgument): Promise; + del(...keys: [RedisCommandArgument]): Promise; + increment?(key: RedisCommandArgument): Promise; + sendCommand(args: RedisCommandArguments, options?: ClientCommandOptions): Promise; + quit(): Promise; } -let exportObject: RedisSB = { - get: (key, callback?) => callback(null, undefined), - getAsync: () => - new Promise((resolve) => resolve({ err: null, reply: undefined })), - set: (key, value, callback) => callback(null, undefined), - setAsync: () => - new Promise((resolve) => resolve({ err: null, reply: undefined })), - setAsyncEx: () => - new Promise((resolve) => resolve({ err: null, reply: undefined })), - delAsync: () => - new Promise((resolve) => resolve(null)), - increment: () => - new Promise((resolve) => resolve({ err: null, replies: undefined })), +let exportClient: RedisSB = { + get: (key) => new Promise((resolve, reject) => reject()), + set: (key, value) => new Promise((resolve, reject) => reject()), + setEx: (key, value, seconds) => new Promise((resolve, reject) => reject()), + del: (...keys) => new Promise((resolve, reject) => reject()), + increment: (key) => new Promise((resolve, reject) => reject()), + sendCommand: (command, args) => new Promise((resolve, reject) => reject()), + quit: () => new Promise((resolve, reject) => reject()), }; if (config.redis) { Logger.info("Connected to redis"); - const client = redis.createClient(config.redis); - exportObject = client; + const client = createClient(config.redis); + client.connect(); + exportClient = client; const timeoutDuration = 200; - exportObject.getAsync = (key) => new Promise((resolve) => { - const timeout = setTimeout(() => resolve({ err: null, reply: undefined }), timeoutDuration); - client.get(key, (err, reply) => { + const get = client.get.bind(client); + exportClient.get = (key) => new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(), timeoutDuration); + get(key).then((reply) => { clearTimeout(timeout); - resolve({ err, reply }); - }); + resolve(reply); + }).catch((err) => reject(err)); }); - exportObject.setAsync = (key, value) => new Promise((resolve) => client.set(key, value, (err, reply) => resolve({ err, reply }))); - exportObject.setAsyncEx = (key, value, seconds) => new Promise((resolve) => client.setex(key, seconds, value, (err, reply) => resolve({ err, reply }))); - exportObject.delAsync = (...keys) => new Promise((resolve) => client.del(keys, (err) => resolve(err))); - exportObject.close = (flush) => client.end(flush); - exportObject.increment = (key) => new Promise((resolve) => + exportClient.increment = (key) => new Promise((resolve, reject) => client.multi() .incr(key) .expire(key, 60) - .exec((err, replies) => resolve({ err, replies })) + .exec() + .then((reply) => resolve(reply)) + .catch((err) => reject(err)) ); client.on("error", function(error) { Logger.error(error); }); } -export default exportObject; +export default exportClient; diff --git a/test/cases/redisTest.ts b/test/cases/redisTest.ts index 3ab238d..e86f123 100644 --- a/test/cases/redisTest.ts +++ b/test/cases/redisTest.ts @@ -12,21 +12,20 @@ const randKey2 = genRandom(16); describe("redis test", function() { before(async function() { if (!config.redis) this.skip(); - await redis.setAsync(randKey1, randValue1); + await redis.set(randKey1, randValue1); }); it("Should get stored value", (done) => { - redis.getAsync(randKey1) + redis.get(randKey1) .then(res => { - if (res.err) assert.fail(res.err); - assert.strictEqual(res.reply, randValue1); + assert.strictEqual(res, randValue1); done(); - }); + }).catch(err => assert.fail(err)); }); it("Should not be able to get not stored value", (done) => { - redis.getAsync(randKey2) + redis.get(randKey2) .then(res => { - if (res.reply || res.err ) assert.fail("Value should not be found"); + if (res) assert.fail("Value should not be found"); done(); - }); + }).catch(err => assert.fail(err)); }); }); \ No newline at end of file diff --git a/test/cases/tempVip.ts b/test/cases/tempVip.ts index c2fbc10..3051e0c 100644 --- a/test/cases/tempVip.ts +++ b/test/cases/tempVip.ts @@ -6,6 +6,7 @@ import { client } from "../utils/httpClient"; import { db, privateDB } from "../../src/databases/databases"; import redis from "../../src/utils/redis"; import assert from "assert"; +import { Logger } from "../../src/utils/logger"; // helpers const getSegment = (UUID: string) => db.prepare("get", `SELECT "votes", "locked", "category" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]); @@ -51,8 +52,13 @@ const postVoteCategory = (userID: string, UUID: string, category: string) => cli } }); const checkUserVIP = async (publicID: HashedUserID) => { - const { reply } = await redis.getAsync(tempVIPKey(publicID)); - return reply; + try { + const reply = await redis.get(tempVIPKey(publicID)); + return reply; + } catch (e) { + Logger.error(e as string); + return false; + } }; describe("tempVIP test", function() { @@ -67,7 +73,7 @@ describe("tempVIP test", function() { await db.prepare("run", 'INSERT INTO "vipUsers" ("userID") VALUES (?)', [publicPermVIP1]); await db.prepare("run", 'INSERT INTO "vipUsers" ("userID") VALUES (?)', [publicPermVIP2]); // clear redis if running consecutive tests - await redis.delAsync(tempVIPKey(publicTempVIPOne)); + await redis.del(tempVIPKey(publicTempVIPOne)); }); it("Should update db version when starting the application", () => { diff --git a/test/test.ts b/test/test.ts index 3d72c34..e13f4ba 100644 --- a/test/test.ts +++ b/test/test.ts @@ -57,7 +57,7 @@ async function init() { mocha.run((failures) => { mockServer.close(); server.close(); - redis.close(true); + redis.quit(); process.exitCode = failures ? 1 : 0; // exit with non-zero status if there were failures }); }); From 146ba4ff93267a1682ec2ecabe4d35b134ff014d Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 13 Apr 2022 17:41:57 -0400 Subject: [PATCH 11/38] Migrate breaking config changes --- src/config.ts | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/config.ts b/src/config.ts index f4e6175..e5e694a 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,6 +1,7 @@ import fs from "fs"; import { SBSConfig } from "./types/config.model"; import packageJson from "../package.json"; +import { RedisSearchLanguages } from "@node-redis/search/dist/commands"; const isTestMode = process.env.npm_lifecycle_script === packageJson.scripts.test; const configFile = process.env.TEST_POSTGRES ? "ci.json" @@ -8,6 +9,7 @@ const configFile = process.env.TEST_POSTGRES ? "ci.json" : "config.json"; export const config: SBSConfig = JSON.parse(fs.readFileSync(configFile).toString("utf8")); +migrate(config); addDefaults(config, { port: 80, behindProxy: "X-Forwarded-For", @@ -109,3 +111,20 @@ function addDefaults(config: SBSConfig, defaults: SBSConfig) { } } } + +function migrate(config: SBSConfig) { + // Redis change + if (config.redis) { + const redisConfig = config.redis as any; + if (redisConfig["host"] || redisConfig["port"]) { + config.redis.socket = { + host: redisConfig["host"], + port: redisConfig["port"] + }; + } + + if (redisConfig["enable_offline_queue"] !== undefined) { + config.disableOfflineQueue = !redisConfig["enable_offline_queue"]; + } + } +} \ No newline at end of file From 1df811710551a0d4b92f477eff75786662435f70 Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 13 Apr 2022 17:43:38 -0400 Subject: [PATCH 12/38] Fix warnings and errors --- src/config.ts | 1 - src/utils/getHashCache.ts | 2 +- src/utils/queryCacher.ts | 2 +- src/utils/redis.ts | 13 ++++++------- 4 files changed, 8 insertions(+), 10 deletions(-) diff --git a/src/config.ts b/src/config.ts index e5e694a..5822800 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,7 +1,6 @@ import fs from "fs"; import { SBSConfig } from "./types/config.model"; import packageJson from "../package.json"; -import { RedisSearchLanguages } from "@node-redis/search/dist/commands"; const isTestMode = process.env.npm_lifecycle_script === packageJson.scripts.test; const configFile = process.env.TEST_POSTGRES ? "ci.json" diff --git a/src/utils/getHashCache.ts b/src/utils/getHashCache.ts index 2936947..f801545 100644 --- a/src/utils/getHashCache.ts +++ b/src/utils/getHashCache.ts @@ -20,7 +20,7 @@ async function getFromRedis(key: HashedValue): Promise(fetchFromDB: () => Promise, key: string): Promise { return JSON.parse(reply); } - } catch (e) { Logger.error(e as string)} //eslint-disable-line no-empty + } catch (e) { } //eslint-disable-line no-empty const data = await fetchFromDB(); diff --git a/src/utils/redis.ts b/src/utils/redis.ts index 6ac3174..3c3123e 100644 --- a/src/utils/redis.ts +++ b/src/utils/redis.ts @@ -1,7 +1,6 @@ import { config } from "../config"; import { Logger } from "./logger"; import { createClient } from "redis"; -import { RedisClientType } from "@node-redis/client"; import { RedisCommandArgument, RedisCommandArguments, RedisCommandRawReply } from "@node-redis/client/dist/lib/commands"; import { ClientCommandOptions } from "@node-redis/client/dist/lib/client"; import { RedisReply } from "rate-limit-redis"; @@ -17,12 +16,12 @@ interface RedisSB { } let exportClient: RedisSB = { - get: (key) => new Promise((resolve, reject) => reject()), - set: (key, value) => new Promise((resolve, reject) => reject()), - setEx: (key, value, seconds) => new Promise((resolve, reject) => reject()), - del: (...keys) => new Promise((resolve, reject) => reject()), - increment: (key) => new Promise((resolve, reject) => reject()), - sendCommand: (command, args) => new Promise((resolve, reject) => reject()), + get: () => new Promise((resolve, reject) => reject()), + set: () => new Promise((resolve, reject) => reject()), + setEx: () => new Promise((resolve, reject) => reject()), + del: () => new Promise((resolve, reject) => reject()), + increment: () => new Promise((resolve, reject) => reject()), + sendCommand: () => new Promise((resolve, reject) => reject()), quit: () => new Promise((resolve, reject) => reject()), }; From 929856fd3f894c5ef4c56e6fb6d1f96af13a9d35 Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 13 Apr 2022 17:45:41 -0400 Subject: [PATCH 13/38] No ugly json access --- src/config.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/config.ts b/src/config.ts index 5822800..d6f59ca 100644 --- a/src/config.ts +++ b/src/config.ts @@ -115,15 +115,15 @@ function migrate(config: SBSConfig) { // Redis change if (config.redis) { const redisConfig = config.redis as any; - if (redisConfig["host"] || redisConfig["port"]) { + if (redisConfig.host || redisConfig.port) { config.redis.socket = { - host: redisConfig["host"], - port: redisConfig["port"] + host: redisConfig.host, + port: redisConfig.port }; } - if (redisConfig["enable_offline_queue"] !== undefined) { - config.disableOfflineQueue = !redisConfig["enable_offline_queue"]; + if (redisConfig.enable_offline_queue !== undefined) { + config.disableOfflineQueue = !redisConfig.enable_offline_queue; } } } \ No newline at end of file From 8a9c7c869b64969962a56c8fa22101b67508757c Mon Sep 17 00:00:00 2001 From: Ajay Ramachandran Date: Thu, 14 Apr 2022 01:18:48 -0400 Subject: [PATCH 14/38] new server --- nginx/nginx.conf | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nginx/nginx.conf b/nginx/nginx.conf index 69526ec..2b4596d 100644 --- a/nginx/nginx.conf +++ b/nginx/nginx.conf @@ -72,6 +72,9 @@ http { server 10.0.0.13:4441 max_fails=25 fail_timeout=20s; server 10.0.0.13:4442 max_fails=25 fail_timeout=20s; + server 10.0.0.14:4441 max_fails=25 fail_timeout=20s; + server 10.0.0.14:4442 max_fails=25 fail_timeout=20s; + server 10.0.0.11:4441 max_fails=25 fail_timeout=20s; server 10.0.0.11:4442 max_fails=25 fail_timeout=20s; From c7905d40624ab14cb3e41e8adebe673c8b6b8bbc Mon Sep 17 00:00:00 2001 From: Ajay Date: Thu, 14 Apr 2022 01:54:26 -0400 Subject: [PATCH 15/38] Don't break for no response --- src/routes/getTopCategoryUsers.ts | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/routes/getTopCategoryUsers.ts b/src/routes/getTopCategoryUsers.ts index f22fbc1..b9558dd 100644 --- a/src/routes/getTopCategoryUsers.ts +++ b/src/routes/getTopCategoryUsers.ts @@ -28,13 +28,16 @@ async function generateTopCategoryUsersStats(sortBy: string, category: string) { GROUP BY COALESCE("userName", "sponsorTimes"."userID") HAVING SUM("votes") > 20 ORDER BY "${sortBy}" DESC LIMIT 100`, [maxRewardTimePerSegmentInSeconds, maxRewardTimePerSegmentInSeconds, category]); - for (const row of rows) { - userNames.push(row.userName); - viewCounts.push(row.viewCount); - totalSubmissions.push(row.totalSubmissions); - minutesSaved.push(row.minutesSaved); + if (rows) { + for (const row of rows) { + userNames.push(row.userName); + viewCounts.push(row.viewCount); + totalSubmissions.push(row.totalSubmissions); + minutesSaved.push(row.minutesSaved); + } } + return { userNames, viewCounts, From e74b985304443b17b429c5c82696c7a03e78a166 Mon Sep 17 00:00:00 2001 From: Ajay Ramachandran Date: Thu, 14 Apr 2022 01:57:23 -0400 Subject: [PATCH 16/38] more writes --- nginx/nginx.conf | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nginx/nginx.conf b/nginx/nginx.conf index 2b4596d..50cc509 100644 --- a/nginx/nginx.conf +++ b/nginx/nginx.conf @@ -49,7 +49,7 @@ http { #server localhost:4446; #server localhost:4447; #server localhost:4448; - server 10.0.0.4:4441 max_fails=25 fail_timeout=20s; + #server 10.0.0.4:4441 max_fails=25 fail_timeout=20s; #server 10.0.0.3:4441 max_fails=25 fail_timeout=20s; #server 10.0.0.3:4442 max_fails=25 fail_timeout=20s; @@ -87,6 +87,7 @@ http { #server localhost:4441; #server localhost:4442; server 10.0.0.3:4441 max_fails=25 fail_timeout=15s; + server 10.0.0.4:4441 max_fails=25 fail_timeout=15s; #server 10.0.0.3:4442; } upstream backend_db { From 98494aec4a9c1902ad2e5fb10c68f4e41e6dc591 Mon Sep 17 00:00:00 2001 From: Ajay Ramachandran Date: Wed, 27 Apr 2022 14:19:14 -0400 Subject: [PATCH 17/38] more server --- nginx/nginx.conf | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nginx/nginx.conf b/nginx/nginx.conf index 50cc509..22d9a38 100644 --- a/nginx/nginx.conf +++ b/nginx/nginx.conf @@ -77,6 +77,9 @@ http { server 10.0.0.11:4441 max_fails=25 fail_timeout=20s; server 10.0.0.11:4442 max_fails=25 fail_timeout=20s; + + server 10.0.0.16:4441 max_fails=25 fail_timeout=20s; + server 10.0.0.16:4442 max_fails=25 fail_timeout=20s; #server 134.209.69.251:80 backup; From a66588619a640c9a1804bd8a5a62c34429a80542 Mon Sep 17 00:00:00 2001 From: Ajay Date: Tue, 3 May 2022 15:27:23 -0400 Subject: [PATCH 18/38] Fix rep hurting negatively voted segments --- src/routes/getSkipSegments.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/routes/getSkipSegments.ts b/src/routes/getSkipSegments.ts index 237a3be..a9500f8 100644 --- a/src/routes/getSkipSegments.ts +++ b/src/routes/getSkipSegments.ts @@ -223,7 +223,8 @@ function getWeightedRandomChoice(choices: T[], amountOf //The 3 makes -2 the minimum votes before being ignored completely //this can be changed if this system increases in popularity. - const weight = Math.exp(choice.votes * Math.max(1, choice.reputation + 1) + 3 + boost); + const repFactor = choice.votes > 0 ? Math.max(1, choice.reputation + 1) : 1; + const weight = Math.exp(choice.votes * repFactor + 3 + boost); totalWeight += Math.max(weight, 0); return { ...choice, weight }; From 5b177a3e53e061f1fad4b9bc0925fd0af29edec4 Mon Sep 17 00:00:00 2001 From: Ajay Date: Tue, 3 May 2022 22:08:44 -0400 Subject: [PATCH 19/38] Prepare dockerfile for use, allow configuring via env vars --- Dockerfile | 6 +-- entrypoint.sh | 22 ++------- src/app.ts | 2 +- src/config.ts | 78 +++++++++++++++++++++++++----- src/databases/databases.ts | 2 +- src/middleware/requestRateLimit.ts | 2 +- src/routes/dumpDatabase.ts | 4 +- src/types/config.model.ts | 12 ++++- src/utils/redis.ts | 2 +- test.json | 1 + test/cases/getStatus.ts | 4 +- test/cases/redisTest.ts | 2 +- test/cases/tempVip.ts | 2 +- 13 files changed, 93 insertions(+), 46 deletions(-) diff --git a/Dockerfile b/Dockerfile index c39d92e..357fced 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,14 @@ -FROM node:14-alpine as builder +FROM node:16-alpine as builder RUN apk add --no-cache --virtual .build-deps python make g++ COPY package.json package-lock.json tsconfig.json entrypoint.sh ./ COPY src src RUN npm ci && npm run tsc -FROM node:14-alpine as app +FROM node:16-alpine as app WORKDIR /usr/src/app COPY --from=builder node_modules . COPY --from=builder dist ./dist COPY entrypoint.sh . COPY databases/*.sql databases/ EXPOSE 8080 -CMD ./entrypoint.sh +CMD ./entrypoint.sh \ No newline at end of file diff --git a/entrypoint.sh b/entrypoint.sh index 9055d2c..09b2035 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -2,25 +2,11 @@ set -e echo 'Entrypoint script' cd /usr/src/app + +# blank config, use defaults cp /etc/sponsorblock/config.json . || cat < config.json { - "port": 8080, - "globalSalt": "[CHANGE THIS]", - "adminUserID": "[CHANGE THIS]", - "youtubeAPIKey": null, - "discordReportChannelWebhookURL": null, - "discordFirstTimeSubmissionsWebhookURL": null, - "discordAutoModWebhookURL": null, - "proxySubmission": null, - "behindProxy": "X-Forwarded-For", - "db": "./databases/sponsorTimes.db", - "privateDB": "./databases/private.db", - "createDatabaseIfNotExist": true, - "schemaFolder": "./databases", - "dbSchema": "./databases/_sponsorTimes.db.sql", - "privateDBSchema": "./databases/_private.db.sql", - "mode": "development", - "readOnly": false } EOF -node dist/index.js + +node dist/index.js \ No newline at end of file diff --git a/src/app.ts b/src/app.ts index 267e44d..9522c87 100644 --- a/src/app.ts +++ b/src/app.ts @@ -202,7 +202,7 @@ function setupRoutes(router: Router) { router.post("/api/ratings/rate", postRateEndpoints); router.post("/api/ratings/clearCache", ratingPostClearCache); - if (config.postgres) { + if (config.postgres?.enabled) { router.get("/database", (req, res) => dumpDatabase(req, res, true)); router.get("/database.json", (req, res) => dumpDatabase(req, res, false)); router.get("/database/*", redirectLink); diff --git a/src/config.ts b/src/config.ts index d6f59ca..d42520c 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,6 +1,7 @@ import fs from "fs"; import { SBSConfig } from "./types/config.model"; import packageJson from "../package.json"; +import { isBoolean, isNumber } from "lodash"; const isTestMode = process.env.npm_lifecycle_script === packageJson.scripts.test; const configFile = process.env.TEST_POSTGRES ? "ci.json" @@ -8,9 +9,10 @@ const configFile = process.env.TEST_POSTGRES ? "ci.json" : "config.json"; export const config: SBSConfig = JSON.parse(fs.readFileSync(configFile).toString("utf8")); +loadFromEnv(config); migrate(config); addDefaults(config, { - port: 80, + port: 8080, behindProxy: "X-Forwarded-For", db: "./databases/sponsorTimes.db", privateDB: "./databases/private.db", @@ -20,7 +22,7 @@ addDefaults(config, { privateDBSchema: "./databases/_private.db.sql", readOnly: false, webhooks: [], - categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"], + categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight"], categorySupport: { sponsor: ["skip", "mute", "full"], selfpromo: ["skip", "mute", "full"], @@ -35,14 +37,14 @@ addDefaults(config, { chapter: ["chapter"] }, maxNumberOfActiveWarnings: 1, - hoursAfterWarningExpires: 24, + hoursAfterWarningExpires: 16300000, adminUserID: "", discordCompletelyIncorrectReportWebhookURL: null, discordFirstTimeSubmissionsWebhookURL: null, discordNeuralBlockRejectWebhookURL: null, discordFailedReportChannelWebhookURL: null, discordReportChannelWebhookURL: null, - getTopUsersCacheTimeMinutes: 0, + getTopUsersCacheTimeMinutes: 240, globalSalt: null, mode: "", neuralBlockURL: null, @@ -50,15 +52,15 @@ addDefaults(config, { rateLimit: { vote: { windowMs: 900000, - max: 20, - message: "Too many votes, please try again later", - statusCode: 429, + max: 15, + message: "OK", + statusCode: 200, }, view: { windowMs: 900000, - max: 20, + max: 10, statusCode: 200, - message: "Too many views, please try again later", + message: "OK", }, rate: { windowMs: 900000, @@ -71,10 +73,16 @@ addDefaults(config, { newLeafURLs: null, maxRewardTimePerSegmentInSeconds: 600, poiMinimumStartTime: 2, - postgres: null, + postgres: { + enabled: null, + user: "", + host: "", + password: "", + port: 5432 + }, dumpDatabase: { enabled: false, - minTimeBetweenMs: 60000, + minTimeBetweenMs: 180000, appExportPath: "./docker/database-export", postgresExportPath: "/opt/exports", tables: [{ @@ -96,10 +104,29 @@ addDefaults(config, { }, { name: "vipUsers" + }, + { + name: "unlistedVideos" + }, + { + name: "videoInfo" + }, + { + name: "ratings" }] }, - diskCache: null, - crons: null + diskCache: { + max: 10737418240 + }, + crons: null, + redis: { + enabled: null, + socket: { + host: "", + port: 0 + }, + disableOfflineQueue: true + } }); // Add defaults @@ -125,5 +152,30 @@ function migrate(config: SBSConfig) { if (redisConfig.enable_offline_queue !== undefined) { config.disableOfflineQueue = !redisConfig.enable_offline_queue; } + + if (redisConfig.socket.host && redisConfig.enabled === null) { + redisConfig.enabled = true; + } + } + + if (config.postgres && config.postgres.user && config.postgres.enabled === null) { + config.postgres.enabled = true; + } +} + +function loadFromEnv(config: SBSConfig, prefix = "") { + for (const key in config) { + if (typeof config[key] === "object") { + loadFromEnv(config[key], (prefix ? `${prefix}.` : "") + key); + } else if (process.env[key]) { + const value = process.env[key]; + if (isNumber(value)) { + config[key] = parseInt(value, 10); + } else if (isBoolean(value)) { + config[key] = value === "true"; + } else { + config[key] = value; + } + } } } \ No newline at end of file diff --git a/src/databases/databases.ts b/src/databases/databases.ts index 5e2ae0e..9ff8742 100644 --- a/src/databases/databases.ts +++ b/src/databases/databases.ts @@ -9,7 +9,7 @@ let privateDB: IDatabase; if (config.mysql) { db = new Mysql(config.mysql); privateDB = new Mysql(config.privateMysql); -} else if (config.postgres) { +} else if (config.postgres?.enabled) { db = new Postgres({ dbSchemaFileName: config.dbSchema, dbSchemaFolder: config.schemaFolder, diff --git a/src/middleware/requestRateLimit.ts b/src/middleware/requestRateLimit.ts index a188011..06fde41 100644 --- a/src/middleware/requestRateLimit.ts +++ b/src/middleware/requestRateLimit.ts @@ -28,7 +28,7 @@ export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (r return next(); } }, - store: config.redis ? new RedisStore({ + store: config.redis?.enabled ? new RedisStore({ sendCommand: (...args: string[]) => redis.sendCommand(args), }) : null, }); diff --git a/src/routes/dumpDatabase.ts b/src/routes/dumpDatabase.ts index 3fb82e3..3c39857 100644 --- a/src/routes/dumpDatabase.ts +++ b/src/routes/dumpDatabase.ts @@ -100,7 +100,7 @@ export default async function dumpDatabase(req: Request, res: Response, showPage res.status(404).send("Database dump is disabled"); return; } - if (!config.postgres) { + if (!config.postgres?.enabled) { res.status(404).send("Not supported on this instance"); return; } @@ -175,7 +175,7 @@ export async function redirectLink(req: Request, res: Response): Promise { res.status(404).send("Database dump is disabled"); return; } - if (!config.postgres) { + if (!config.postgres?.enabled) { res.status(404).send("Not supported on this instance"); return; } diff --git a/src/types/config.model.ts b/src/types/config.model.ts index 52ba2f2..0b22508 100644 --- a/src/types/config.model.ts +++ b/src/types/config.model.ts @@ -2,6 +2,14 @@ import { PoolConfig } from "pg"; import * as redis from "redis"; import { CacheOptions } from "@ajayyy/lru-diskcache"; +interface RedisConfig extends redis.RedisClientOptions { + enabled: boolean; +} + +interface CustomPostgresConfig extends PoolConfig { + enabled: boolean; +} + export interface SBSConfig { [index: string]: any port: number; @@ -41,9 +49,9 @@ export interface SBSConfig { privateMysql?: any; minimumPrefix?: string; maximumPrefix?: string; - redis?: redis.RedisClientOptions; + redis?: RedisConfig; maxRewardTimePerSegmentInSeconds?: number; - postgres?: PoolConfig; + postgres?: CustomPostgresConfig; dumpDatabase?: DumpDatabase; diskCache: CacheOptions; crons: CronJobOptions; diff --git a/src/utils/redis.ts b/src/utils/redis.ts index 3c3123e..5f70e35 100644 --- a/src/utils/redis.ts +++ b/src/utils/redis.ts @@ -25,7 +25,7 @@ let exportClient: RedisSB = { quit: () => new Promise((resolve, reject) => reject()), }; -if (config.redis) { +if (config.redis?.enabled) { Logger.info("Connected to redis"); const client = createClient(config.redis); client.connect(); diff --git a/test.json b/test.json index e10ff44..95d2d79 100644 --- a/test.json +++ b/test.json @@ -16,6 +16,7 @@ "schemaFolder": "./databases", "dbSchema": "./databases/_sponsorTimes.db.sql", "privateDBSchema": "./databases/_private.db.sql", + "categoryList": ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"], "mode": "test", "readOnly": false, "webhooks": [ diff --git a/test/cases/getStatus.ts b/test/cases/getStatus.ts index 970ebed..abf22b3 100644 --- a/test/cases/getStatus.ts +++ b/test/cases/getStatus.ts @@ -89,7 +89,7 @@ describe("getStatus", () => { }); it("Should be able to get statusRequests only", function (done) { - if (!config.redis) this.skip(); + if (!config.redis?.enabled) this.skip(); client.get(`${endpoint}/statusRequests`) .then(res => { assert.strictEqual(res.status, 200); @@ -100,7 +100,7 @@ describe("getStatus", () => { }); it("Should be able to get status with statusRequests", function (done) { - if (!config.redis) this.skip(); + if (!config.redis?.enabled) this.skip(); client.get(endpoint) .then(res => { assert.strictEqual(res.status, 200); diff --git a/test/cases/redisTest.ts b/test/cases/redisTest.ts index e86f123..a6d21bb 100644 --- a/test/cases/redisTest.ts +++ b/test/cases/redisTest.ts @@ -11,7 +11,7 @@ const randKey2 = genRandom(16); describe("redis test", function() { before(async function() { - if (!config.redis) this.skip(); + if (!config.redis?.enabled) this.skip(); await redis.set(randKey1, randValue1); }); it("Should get stored value", (done) => { diff --git a/test/cases/tempVip.ts b/test/cases/tempVip.ts index 3051e0c..6f23913 100644 --- a/test/cases/tempVip.ts +++ b/test/cases/tempVip.ts @@ -63,7 +63,7 @@ const checkUserVIP = async (publicID: HashedUserID) => { describe("tempVIP test", function() { before(async function() { - if (!config.redis) this.skip(); + if (!config.redis?.enabled) this.skip(); const insertSponsorTimeQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "shadowHidden") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'; await db.prepare("run", insertSponsorTimeQuery, ["channelid-convert", 0, 1, 0, 0, UUID0, "testman", 0, 50, "sponsor", 0]); From 5f53859c94651a086c091213cdd0653840e6c52c Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 4 May 2022 01:46:41 -0400 Subject: [PATCH 20/38] Fix docker container --- Dockerfile | 8 +++++--- entrypoint.sh | 4 ++-- src/config.ts | 22 ++++++++++++---------- src/utils/redis.ts | 14 +++++++------- 4 files changed, 26 insertions(+), 22 deletions(-) diff --git a/Dockerfile b/Dockerfile index 357fced..2fc664d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,15 @@ FROM node:16-alpine as builder -RUN apk add --no-cache --virtual .build-deps python make g++ +RUN apk add --no-cache --virtual .build-deps python3 make g++ COPY package.json package-lock.json tsconfig.json entrypoint.sh ./ COPY src src RUN npm ci && npm run tsc FROM node:16-alpine as app WORKDIR /usr/src/app -COPY --from=builder node_modules . -COPY --from=builder dist ./dist +RUN apk add git +COPY --from=builder ./node_modules ./node_modules +COPY --from=builder ./dist ./dist +COPY ./.git ./.git COPY entrypoint.sh . COPY databases/*.sql databases/ EXPOSE 8080 diff --git a/entrypoint.sh b/entrypoint.sh index 09b2035..9b73c6b 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -4,9 +4,9 @@ echo 'Entrypoint script' cd /usr/src/app # blank config, use defaults -cp /etc/sponsorblock/config.json . || cat < config.json +cat < config.json { } EOF -node dist/index.js \ No newline at end of file +node dist/src/index.js \ No newline at end of file diff --git a/src/config.ts b/src/config.ts index d42520c..4b4ab32 100644 --- a/src/config.ts +++ b/src/config.ts @@ -9,8 +9,6 @@ const configFile = process.env.TEST_POSTGRES ? "ci.json" : "config.json"; export const config: SBSConfig = JSON.parse(fs.readFileSync(configFile).toString("utf8")); -loadFromEnv(config); -migrate(config); addDefaults(config, { port: 8080, behindProxy: "X-Forwarded-For", @@ -74,7 +72,7 @@ addDefaults(config, { maxRewardTimePerSegmentInSeconds: 600, poiMinimumStartTime: 2, postgres: { - enabled: null, + enabled: false, user: "", host: "", password: "", @@ -120,7 +118,7 @@ addDefaults(config, { }, crons: null, redis: { - enabled: null, + enabled: false, socket: { host: "", port: 0 @@ -128,6 +126,8 @@ addDefaults(config, { disableOfflineQueue: true } }); +loadFromEnv(config); +migrate(config); // Add defaults function addDefaults(config: SBSConfig, defaults: SBSConfig) { @@ -153,25 +153,27 @@ function migrate(config: SBSConfig) { config.disableOfflineQueue = !redisConfig.enable_offline_queue; } - if (redisConfig.socket.host && redisConfig.enabled === null) { + if (redisConfig.socket?.host && redisConfig.enabled === undefined) { redisConfig.enabled = true; } } - if (config.postgres && config.postgres.user && config.postgres.enabled === null) { + if (config.postgres && config.postgres.user && config.postgres.enabled === undefined) { config.postgres.enabled = true; } } function loadFromEnv(config: SBSConfig, prefix = "") { for (const key in config) { + const fullKey = (prefix ? `${prefix}_` : "") + key; + if (typeof config[key] === "object") { - loadFromEnv(config[key], (prefix ? `${prefix}.` : "") + key); - } else if (process.env[key]) { - const value = process.env[key]; + loadFromEnv(config[key], fullKey); + } else if (process.env[fullKey]) { + const value = process.env[fullKey]; if (isNumber(value)) { config[key] = parseInt(value, 10); - } else if (isBoolean(value)) { + } else if (value.toLowerCase() === "true" || value.toLowerCase() === "false") { config[key] = value === "true"; } else { config[key] = value; diff --git a/src/utils/redis.ts b/src/utils/redis.ts index 5f70e35..e1bc2e9 100644 --- a/src/utils/redis.ts +++ b/src/utils/redis.ts @@ -16,13 +16,13 @@ interface RedisSB { } let exportClient: RedisSB = { - get: () => new Promise((resolve, reject) => reject()), - set: () => new Promise((resolve, reject) => reject()), - setEx: () => new Promise((resolve, reject) => reject()), - del: () => new Promise((resolve, reject) => reject()), - increment: () => new Promise((resolve, reject) => reject()), - sendCommand: () => new Promise((resolve, reject) => reject()), - quit: () => new Promise((resolve, reject) => reject()), + get: () => new Promise((resolve) => resolve(null)), + set: () => new Promise((resolve) => resolve(null)), + setEx: () => new Promise((resolve) => resolve(null)), + del: () => new Promise((resolve) => resolve(null)), + increment: () => new Promise((resolve) => resolve(null)), + sendCommand: () => new Promise((resolve) => resolve(null)), + quit: () => new Promise((resolve) => resolve(null)), }; if (config.redis?.enabled) { From 0fe85b97607f74dfe3c13cedfc254210ef01ec5b Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 4 May 2022 01:51:38 -0400 Subject: [PATCH 21/38] Auto build container --- .github/workflows/docker-build.yml | 50 ++++++++++++++++++++++++++++++ .github/workflows/sb-container.yml | 15 +++++++++ 2 files changed, 65 insertions(+) create mode 100644 .github/workflows/docker-build.yml create mode 100644 .github/workflows/sb-container.yml diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000..eda5429 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,50 @@ +# Based on https://github.com/ajayyy/sb-mirror/blob/main/.github/workflows/docker-build.yml +name: multi-build-docker +on: + workflow_call: + inputs: + name: + required: true + type: string + username: + required: true + type: string + secrets: + GH_TOKEN: + required: true + +jobs: + build_container: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Docker meta + id: meta + uses: docker/metadata-action@v3 + with: + images: | + ghcr.io/${{ inputs.username }}/${{ inputs.name }} + tags: | + type-raw,value=alpine + flavor: | + latest=true + - name: Login to GHCR + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GH_TOKEN }} + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + with: + platforms: arm,arm64 + - name: Set up buildx + uses: docker/setup-buildx-action@v1 + - name: push + uses: docker/build-push-action@v2 + with: + context: ./build/${{ inputs.name }} + platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6 + push: true + tags: ${{ steps.meta.outputs.tags }} \ No newline at end of file diff --git a/.github/workflows/sb-container.yml b/.github/workflows/sb-container.yml new file mode 100644 index 0000000..8aae0ba --- /dev/null +++ b/.github/workflows/sb-container.yml @@ -0,0 +1,15 @@ +name: sb-runner +on: + push: + branches: + - master + workflow_dispatch: + +jobs: + build: + uses: ./.github/workflows/docker-build.yml + with: + name: "sb-container" + username: "ajayyy" + secrets: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From 5e3e02c6741858401a7e89a292a2edfc71cf75ab Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 4 May 2022 01:54:32 -0400 Subject: [PATCH 22/38] Fix context --- .github/workflows/docker-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index eda5429..91716f5 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -44,7 +44,7 @@ jobs: - name: push uses: docker/build-push-action@v2 with: - context: ./build/${{ inputs.name }} + context: . platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6 push: true tags: ${{ steps.meta.outputs.tags }} \ No newline at end of file From c5d2cacae2b1ee51e0b217515049d2c2e6f908b8 Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 4 May 2022 02:07:22 -0400 Subject: [PATCH 23/38] Fix postgres tests --- ci.json | 1 + 1 file changed, 1 insertion(+) diff --git a/ci.json b/ci.json index 86c106a..18fb97c 100644 --- a/ci.json +++ b/ci.json @@ -26,6 +26,7 @@ "schemaFolder": "./databases", "dbSchema": "./databases/_sponsorTimes.db.sql", "privateDBSchema": "./databases/_private.db.sql", + "categoryList": ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"], "mode": "test", "readOnly": false, "webhooks": [ From c5f163e41e80f168d56890016a5432e6081349dd Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 4 May 2022 02:25:29 -0400 Subject: [PATCH 24/38] rename container --- .github/workflows/{sb-container.yml => sb-server.yml} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename .github/workflows/{sb-container.yml => sb-server.yml} (89%) diff --git a/.github/workflows/sb-container.yml b/.github/workflows/sb-server.yml similarity index 89% rename from .github/workflows/sb-container.yml rename to .github/workflows/sb-server.yml index 8aae0ba..9f674bb 100644 --- a/.github/workflows/sb-container.yml +++ b/.github/workflows/sb-server.yml @@ -9,7 +9,7 @@ jobs: build: uses: ./.github/workflows/docker-build.yml with: - name: "sb-container" + name: "sb-server" username: "ajayyy" secrets: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From 2e4b7a0c9c5e7b66e2d94f10323484d66a27a38f Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 4 May 2022 15:36:51 -0400 Subject: [PATCH 25/38] Don't force encoding --- src/databases/Postgres.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/databases/Postgres.ts b/src/databases/Postgres.ts index 79d2adb..3f69a59 100644 --- a/src/databases/Postgres.ts +++ b/src/databases/Postgres.ts @@ -94,9 +94,6 @@ export class Postgres implements IDatabase { await client.query(`CREATE DATABASE "${this.config.postgres.database}" WITH OWNER = ${this.config.postgres.user} - ENCODING = 'UTF8' - LC_COLLATE = 'en_US.utf8' - LC_CTYPE = 'en_US.utf8' TABLESPACE = pg_default CONNECTION LIMIT = -1;` ); From b0bcf2b6848ce78306c5b3c0f1d5d8cc24ffc56b Mon Sep 17 00:00:00 2001 From: Ajay Date: Wed, 4 May 2022 16:03:56 -0400 Subject: [PATCH 26/38] don't specify tablespace --- src/databases/Postgres.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/databases/Postgres.ts b/src/databases/Postgres.ts index 3f69a59..a6e855e 100644 --- a/src/databases/Postgres.ts +++ b/src/databases/Postgres.ts @@ -94,7 +94,6 @@ export class Postgres implements IDatabase { await client.query(`CREATE DATABASE "${this.config.postgres.database}" WITH OWNER = ${this.config.postgres.user} - TABLESPACE = pg_default CONNECTION LIMIT = -1;` ); } From 591e3a0051a2d6fa5c6932266d2146acb02eefee Mon Sep 17 00:00:00 2001 From: Ajay Ramachandran Date: Thu, 5 May 2022 09:12:12 -0400 Subject: [PATCH 27/38] another server --- nginx/nginx.conf | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nginx/nginx.conf b/nginx/nginx.conf index 22d9a38..38447d6 100644 --- a/nginx/nginx.conf +++ b/nginx/nginx.conf @@ -80,6 +80,9 @@ http { server 10.0.0.16:4441 max_fails=25 fail_timeout=20s; server 10.0.0.16:4442 max_fails=25 fail_timeout=20s; + + server 10.0.0.17:4441 max_fails=25 fail_timeout=20s; + server 10.0.0.17:4442 max_fails=25 fail_timeout=20s; #server 134.209.69.251:80 backup; From db700cd7e8fb9afbc9d3272e4c5523fa0e2a45ae Mon Sep 17 00:00:00 2001 From: Ajay Date: Thu, 5 May 2022 20:52:44 -0400 Subject: [PATCH 28/38] Allow single newleaf url --- src/config.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/config.ts b/src/config.ts index 4b4ab32..cbfbf6b 100644 --- a/src/config.ts +++ b/src/config.ts @@ -175,6 +175,8 @@ function loadFromEnv(config: SBSConfig, prefix = "") { config[key] = parseInt(value, 10); } else if (value.toLowerCase() === "true" || value.toLowerCase() === "false") { config[key] = value === "true"; + } else if (key === "newLeafURLs") { + config[key] = [value]; } else { config[key] = value; } From d75226bde52484db94b9cdcf0599e73163c6fb56 Mon Sep 17 00:00:00 2001 From: Ajay Date: Thu, 5 May 2022 22:50:07 -0400 Subject: [PATCH 29/38] better db dump system --- Dockerfile | 2 +- src/app.ts | 4 ++-- src/config.ts | 1 - src/databases/databases.ts | 2 +- src/routes/dumpDatabase.ts | 33 +++++++++++++++++++++++++++------ src/types/config.model.ts | 1 - 6 files changed, 31 insertions(+), 12 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2fc664d..26b77cb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,7 +6,7 @@ RUN npm ci && npm run tsc FROM node:16-alpine as app WORKDIR /usr/src/app -RUN apk add git +RUN apk add git postgresql-client COPY --from=builder ./node_modules ./node_modules COPY --from=builder ./dist ./dist COPY ./.git ./.git diff --git a/src/app.ts b/src/app.ts index 9522c87..77f1c2b 100644 --- a/src/app.ts +++ b/src/app.ts @@ -27,7 +27,7 @@ import { loggerMiddleware } from "./middleware/logger"; import { corsMiddleware } from "./middleware/cors"; import { apiCspMiddleware } from "./middleware/apiCsp"; import { rateLimitMiddleware } from "./middleware/requestRateLimit"; -import dumpDatabase, { appExportPath, redirectLink } from "./routes/dumpDatabase"; +import dumpDatabase, { appExportPath, downloadFile } from "./routes/dumpDatabase"; import { endpoint as getSegmentInfo } from "./routes/getSegmentInfo"; import { postClearCache } from "./routes/postClearCache"; import { addUnlistedVideo } from "./routes/addUnlistedVideo"; @@ -205,7 +205,7 @@ function setupRoutes(router: Router) { if (config.postgres?.enabled) { router.get("/database", (req, res) => dumpDatabase(req, res, true)); router.get("/database.json", (req, res) => dumpDatabase(req, res, false)); - router.get("/database/*", redirectLink); + router.get("/database/*", downloadFile); router.use("/download", express.static(appExportPath)); } else { router.get("/database.db", function (req: Request, res: Response) { diff --git a/src/config.ts b/src/config.ts index cbfbf6b..5e51e81 100644 --- a/src/config.ts +++ b/src/config.ts @@ -82,7 +82,6 @@ addDefaults(config, { enabled: false, minTimeBetweenMs: 180000, appExportPath: "./docker/database-export", - postgresExportPath: "/opt/exports", tables: [{ name: "sponsorTimes", order: "timeSubmitted" diff --git a/src/databases/databases.ts b/src/databases/databases.ts index 9ff8742..52f357c 100644 --- a/src/databases/databases.ts +++ b/src/databases/databases.ts @@ -72,7 +72,7 @@ async function initDb(): Promise { const tables = config?.dumpDatabase?.tables ?? []; const tableNames = tables.map(table => table.name); for (const table of tableNames) { - const filePath = `${config?.dumpDatabase?.postgresExportPath}/${table}.csv`; + const filePath = `${config?.dumpDatabase?.appExportPath}/${table}.csv`; await db.prepare("run", `COPY "${table}" FROM '${filePath}' WITH (FORMAT CSV, HEADER true);`); } } diff --git a/src/routes/dumpDatabase.ts b/src/routes/dumpDatabase.ts index 3c39857..73557ad 100644 --- a/src/routes/dumpDatabase.ts +++ b/src/routes/dumpDatabase.ts @@ -5,6 +5,7 @@ import { config } from "../config"; import util from "util"; import fs from "fs"; import path from "path"; +import { ChildProcess, exec, ExecOptions, spawn } from "child_process"; const unlink = util.promisify(fs.unlink); const ONE_MINUTE = 1000 * 60; @@ -32,9 +33,19 @@ const licenseHeader = `

The API and database follow table.name); +const credentials: ExecOptions = { + env: { + ...process.env, + PGHOST: config.postgres.host, + PGPORT: String(config.postgres.port), + PGUSER: config.postgres.user, + PGPASSWORD: String(config.postgres.password), + PGDATABASE: "sponsorTimes", + } +} + interface TableDumpList { fileName: string; tableName: string; @@ -170,7 +181,7 @@ async function getDbVersion(): Promise { return row.value; } -export async function redirectLink(req: Request, res: Response): Promise { +export async function downloadFile(req: Request, res: Response): Promise { if (!config?.dumpDatabase?.enabled) { res.status(404).send("Database dump is disabled"); return; @@ -183,7 +194,7 @@ export async function redirectLink(req: Request, res: Response): Promise { const file = latestDumpFiles.find((value) => `/database/${value.tableName}.csv` === req.path); if (file) { - res.redirect(`/download/${file.fileName}`); + res.sendFile(file.fileName, { root: appExportPath }); } else { res.sendStatus(404); } @@ -210,9 +221,19 @@ async function queueDump(): Promise { for (const table of tables) { const fileName = `${table.name}_${startTime}.csv`; - const file = `${postgresExportPath}/${fileName}`; - await db.prepare("run", `COPY (SELECT * FROM "${table.name}"${table.order ? ` ORDER BY "${table.order}"` : ``}) - TO '${file}' WITH (FORMAT CSV, HEADER true);`); + const file = `${appExportPath}/${fileName}`; + + await new Promise((resolve) => { + exec(`psql -c "\\copy (SELECT * FROM \\"${table.name}\\"${table.order ? ` ORDER BY \\"${table.order}\\"` : ``})` + + ` TO '${file}' WITH (FORMAT CSV, HEADER true);"`, credentials, (error, stdout, stderr) => { + if (error) { + Logger.error(`[dumpDatabase] Failed to dump ${table.name} to ${file} due to ${stderr}`); + } + + resolve(error ? stderr : stdout); + }); + }) + dumpFiles.push({ fileName, tableName: table.name, diff --git a/src/types/config.model.ts b/src/types/config.model.ts index 0b22508..2fe08f6 100644 --- a/src/types/config.model.ts +++ b/src/types/config.model.ts @@ -84,7 +84,6 @@ export interface DumpDatabase { enabled: boolean; minTimeBetweenMs: number; appExportPath: string; - postgresExportPath: string; tables: DumpDatabaseTable[]; } From b4b7ccec20b4b6c762f3f1feb8c2dd83294348fa Mon Sep 17 00:00:00 2001 From: Ajay Date: Fri, 6 May 2022 00:03:01 -0400 Subject: [PATCH 30/38] remove old volume --- docker/docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index e5e10e0..7b6e1b5 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -7,7 +7,6 @@ services: - database.env volumes: - database-data:/var/lib/postgresql/data - - ./database-export/:/opt/exports # To make this work, run chmod 777 ./database-exports ports: - 5432:5432 restart: always From d8395163b9e511aa1505c7dd79ed28b9cc751033 Mon Sep 17 00:00:00 2001 From: Ajay Date: Fri, 6 May 2022 01:53:44 -0400 Subject: [PATCH 31/38] fix newleafurls --- .github/workflows/docker-build.yml | 2 +- src/config.ts | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index 91716f5..a76c01e 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -45,6 +45,6 @@ jobs: uses: docker/build-push-action@v2 with: context: . - platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6 + platforms: linux/amd64 push: true tags: ${{ steps.meta.outputs.tags }} \ No newline at end of file diff --git a/src/config.ts b/src/config.ts index 5e51e81..acd473e 100644 --- a/src/config.ts +++ b/src/config.ts @@ -165,9 +165,10 @@ function migrate(config: SBSConfig) { function loadFromEnv(config: SBSConfig, prefix = "") { for (const key in config) { const fullKey = (prefix ? `${prefix}_` : "") + key; + const data = config[key]; - if (typeof config[key] === "object") { - loadFromEnv(config[key], fullKey); + if (typeof data === "object" && !Array.isArray(data)) { + loadFromEnv(data, fullKey); } else if (process.env[fullKey]) { const value = process.env[fullKey]; if (isNumber(value)) { From c0dc174f42ffd5dd63327f99a684fd6a6b387098 Mon Sep 17 00:00:00 2001 From: Michael C Date: Fri, 6 May 2022 13:01:53 -0400 Subject: [PATCH 32/38] docker build without platforms + bumped actions --- .github/workflows/docker-build.yml | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index a76c01e..378afc8 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -18,10 +18,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Docker meta id: meta - uses: docker/metadata-action@v3 + uses: docker/metadata-action@v4 with: images: | ghcr.io/${{ inputs.username }}/${{ inputs.name }} @@ -30,21 +30,14 @@ jobs: flavor: | latest=true - name: Login to GHCR - uses: docker/login-action@v1 + uses: docker/login-action@v2 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GH_TOKEN }} - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - with: - platforms: arm,arm64 - - name: Set up buildx - uses: docker/setup-buildx-action@v1 - name: push - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v3 with: context: . - platforms: linux/amd64 push: true tags: ${{ steps.meta.outputs.tags }} \ No newline at end of file From ff197d2985419207291b79a267ecb0c23652797c Mon Sep 17 00:00:00 2001 From: Ajay Date: Fri, 6 May 2022 14:34:11 -0400 Subject: [PATCH 33/38] Add rsync docker image --- .github/workflows/docker-build.yml | 5 ++++- .github/workflows/sb-server.yml | 8 ++++++++ rsync/Dockerfile | 5 +++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 rsync/Dockerfile diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index a76c01e..c5ef476 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -9,6 +9,9 @@ on: username: required: true type: string + folder: + required: true + type: string secrets: GH_TOKEN: required: true @@ -44,7 +47,7 @@ jobs: - name: push uses: docker/build-push-action@v2 with: - context: . + context: ${{ inputs.folder }} platforms: linux/amd64 push: true tags: ${{ steps.meta.outputs.tags }} \ No newline at end of file diff --git a/.github/workflows/sb-server.yml b/.github/workflows/sb-server.yml index 9f674bb..12b89e7 100644 --- a/.github/workflows/sb-server.yml +++ b/.github/workflows/sb-server.yml @@ -11,5 +11,13 @@ jobs: with: name: "sb-server" username: "ajayyy" + folder: "." + secrets: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: ./.github/workflows/docker-build.yml + with: + name: "rsync-host" + username: "ajayyy" + folder: "./rsync" secrets: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/rsync/Dockerfile b/rsync/Dockerfile new file mode 100644 index 0000000..41a2e34 --- /dev/null +++ b/rsync/Dockerfile @@ -0,0 +1,5 @@ +FROM ghcr.io/ajayyy/sb-server:latest +EXPOSE 873/tcp +RUN apk add rsync>3.2.4-r0 + +CMD rsync --no-detach --daemon \ No newline at end of file From 3341500fdf4ccd3eedc97ec704391fc787dd13b0 Mon Sep 17 00:00:00 2001 From: Ajay Date: Fri, 6 May 2022 14:42:52 -0400 Subject: [PATCH 34/38] Fix docker building workflow --- .github/workflows/sb-server.yml | 34 ++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/.github/workflows/sb-server.yml b/.github/workflows/sb-server.yml index 12b89e7..cd8595b 100644 --- a/.github/workflows/sb-server.yml +++ b/.github/workflows/sb-server.yml @@ -1,4 +1,4 @@ -name: sb-runner +name: Docker image builds on: push: branches: @@ -7,17 +7,21 @@ on: jobs: build: - uses: ./.github/workflows/docker-build.yml - with: - name: "sb-server" - username: "ajayyy" - folder: "." - secrets: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: ./.github/workflows/docker-build.yml - with: - name: "rsync-host" - username: "ajayyy" - folder: "./rsync" - secrets: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + name: Build docker images + runs-on: ubuntu-latest + + steps: + - uses: ./.github/workflows/docker-build.yml + with: + name: "sb-server" + username: "ajayyy" + folder: "." + secrets: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - uses: ./.github/workflows/docker-build.yml + with: + name: "rsync-host" + username: "ajayyy" + folder: "./rsync" + secrets: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From 1146aac3c256c9eeff01f8b9fd8aeab1304c7e4a Mon Sep 17 00:00:00 2001 From: Ajay Date: Fri, 6 May 2022 15:25:24 -0400 Subject: [PATCH 35/38] Run docker build as two jobs --- .github/workflows/sb-server.yml | 35 +++++++++++++++------------------ 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/.github/workflows/sb-server.yml b/.github/workflows/sb-server.yml index cd8595b..18c3372 100644 --- a/.github/workflows/sb-server.yml +++ b/.github/workflows/sb-server.yml @@ -6,22 +6,19 @@ on: workflow_dispatch: jobs: - build: - name: Build docker images - runs-on: ubuntu-latest - - steps: - - uses: ./.github/workflows/docker-build.yml - with: - name: "sb-server" - username: "ajayyy" - folder: "." - secrets: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - uses: ./.github/workflows/docker-build.yml - with: - name: "rsync-host" - username: "ajayyy" - folder: "./rsync" - secrets: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + sb-server: + uses: ./.github/workflows/docker-build.yml + with: + name: "sb-server" + username: "ajayyy" + folder: "." + secrets: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + sb-client: + uses: ./.github/workflows/docker-build.yml + with: + name: "rsync-host" + username: "ajayyy" + folder: "./rsync" + secrets: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From fc99c42e02a46cffde6b27f1b750fbbecb53bfc9 Mon Sep 17 00:00:00 2001 From: Ajay Date: Fri, 6 May 2022 15:27:44 -0400 Subject: [PATCH 36/38] Run rsync build after sb server --- .github/workflows/sb-server.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/sb-server.yml b/.github/workflows/sb-server.yml index 18c3372..1e46555 100644 --- a/.github/workflows/sb-server.yml +++ b/.github/workflows/sb-server.yml @@ -14,7 +14,8 @@ jobs: folder: "." secrets: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - sb-client: + rsync-host: + needs: sb-server uses: ./.github/workflows/docker-build.yml with: name: "rsync-host" From a89abd5dd8a8ab10c419cc416b348486628f6078 Mon Sep 17 00:00:00 2001 From: Ajay Date: Fri, 6 May 2022 16:06:56 -0400 Subject: [PATCH 37/38] Rsync should also run node --- rsync/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rsync/Dockerfile b/rsync/Dockerfile index 41a2e34..b895940 100644 --- a/rsync/Dockerfile +++ b/rsync/Dockerfile @@ -2,4 +2,4 @@ FROM ghcr.io/ajayyy/sb-server:latest EXPOSE 873/tcp RUN apk add rsync>3.2.4-r0 -CMD rsync --no-detach --daemon \ No newline at end of file +CMD rsync --no-detach --daemon & ./entrypoint.sh \ No newline at end of file From 60a3c017e5eeb23db7967a7748da979c7922ffe0 Mon Sep 17 00:00:00 2001 From: Ajay Date: Fri, 6 May 2022 16:21:49 -0400 Subject: [PATCH 38/38] Make missing folder --- rsync/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/rsync/Dockerfile b/rsync/Dockerfile index b895940..c5093da 100644 --- a/rsync/Dockerfile +++ b/rsync/Dockerfile @@ -1,5 +1,6 @@ FROM ghcr.io/ajayyy/sb-server:latest EXPOSE 873/tcp RUN apk add rsync>3.2.4-r0 +RUN mkdir /usr/src/app/database-export CMD rsync --no-detach --daemon & ./entrypoint.sh \ No newline at end of file