Merge branch 'master' into fix/general-fixes

This commit is contained in:
Ajay Ramachandran
2021-11-16 19:18:16 -05:00
committed by GitHub
39 changed files with 1086 additions and 79 deletions

View File

@@ -42,6 +42,10 @@ import { getUserStats } from "./routes/getUserStats";
import ExpressPromiseRouter from "express-promise-router";
import { Server } from "http";
import { youtubeApiProxy } from "./routes/youtubeApiProxy";
import { getChapterNames } from "./routes/getChapterNames";
import { postRating } from "./routes/ratings/postRating";
import { getRating } from "./routes/ratings/getRating";
import { postClearCache as ratingPostClearCache } from "./routes/ratings/postClearCache";
export function createServer(callback: () => void): Server {
// Create a service (the app object is just a callback).
@@ -73,9 +77,11 @@ function setupRoutes(router: Router) {
// Rate limit endpoint lists
const voteEndpoints: RequestHandler[] = [voteOnSponsorTime];
const viewEndpoints: RequestHandler[] = [viewedVideoSponsorTime];
const postRateEndpoints: RequestHandler[] = [postRating];
if (config.rateLimit) {
if (config.rateLimit.vote) voteEndpoints.unshift(rateLimitMiddleware(config.rateLimit.vote, voteGetUserID));
if (config.rateLimit.view) viewEndpoints.unshift(rateLimitMiddleware(config.rateLimit.view));
if (config.rateLimit.rate) postRateEndpoints.unshift(rateLimitMiddleware(config.rateLimit.rate));
}
//add the get function
@@ -172,6 +178,9 @@ function setupRoutes(router: Router) {
// get all segments that match a search
router.get("/api/searchSegments", getSearchSegments);
// autocomplete chapter names
router.get("/api/chapterNames", getChapterNames);
// get status
router.get("/api/status/:value", getStatus);
router.get("/api/status", getStatus);
@@ -182,6 +191,11 @@ function setupRoutes(router: Router) {
router.get("/api/lockReason", getLockReason);
// ratings
router.get("/api/ratings/rate/:prefix", getRating);
router.post("/api/ratings/rate", postRateEndpoints);
router.post("/api/ratings/clearCache", ratingPostClearCache);
if (config.postgres) {
router.get("/database", (req, res) => dumpDatabase(req, res, true));
router.get("/database.json", (req, res) => dumpDatabase(req, res, false));

View File

@@ -19,16 +19,18 @@ addDefaults(config, {
privateDBSchema: "./databases/_private.db.sql",
readOnly: false,
webhooks: [],
categoryList: ["sponsor", "selfpromo", "interaction", "intro", "outro", "preview", "music_offtopic", "poi_highlight"],
categoryList: ["sponsor", "selfpromo", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"],
categorySupport: {
sponsor: ["skip", "mute"],
selfpromo: ["skip", "mute"],
interaction: ["skip", "mute"],
intro: ["skip"],
outro: ["skip"],
preview: ["skip"],
intro: ["skip", "mute"],
outro: ["skip", "mute"],
preview: ["skip", "mute"],
filler: ["skip", "mute"],
music_offtopic: ["skip"],
poi_highlight: ["skip"],
chapter: ["chapter"]
},
maxNumberOfActiveWarnings: 1,
hoursAfterWarningExpires: 24,
@@ -56,6 +58,12 @@ addDefaults(config, {
statusCode: 200,
message: "Too many views, please try again later",
},
rate: {
windowMs: 900000,
max: 20,
statusCode: 200,
message: "Success",
}
},
userCounterURL: null,
newLeafURLs: null,

View File

@@ -68,7 +68,7 @@ export class Postgres implements IDatabase {
}
case "all": {
const values = queryResult.rows;
Logger.debug(`result (postgres): ${values}`);
Logger.debug(`result (postgres): ${JSON.stringify(values)}`);
return values;
}
case "run": {

View File

@@ -0,0 +1,46 @@
import { Logger } from "../utils/logger";
import { Request, Response } from "express";
import { db } from "../databases/databases";
import { Postgres } from "../databases/Postgres";
export async function getChapterNames(req: Request, res: Response): Promise<Response> {
const description = req.query.description as string;
const channelID = req.query.channelID as string;
if (!description || typeof(description) !== "string"
|| !channelID || typeof(channelID) !== "string") {
return res.sendStatus(400);
}
if (!(db instanceof Postgres)) {
return res.sendStatus(500).json({
message: "Not supported on this instance"
});
}
try {
const descriptions = await db.prepare("all", `
SELECT "description"
FROM "sponsorTimes"
WHERE ("votes" > 0 OR ("views" > 100 AND "votes" >= 0)) AND "videoID" IN (
SELECT "videoID"
FROM "videoInfo"
WHERE "channelID" = ?
) AND "description" != ''
GROUP BY "description"
ORDER BY SUM("votes"), similarity("description", ?) DESC
LIMIT 5;`
, [channelID, description]) as { description: string }[];
if (descriptions?.length > 0) {
return res.status(200).json(descriptions.map(d => ({
description: d.description
})));
}
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
}
return res.status(404).json([]);
}

View File

@@ -45,7 +45,7 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, category:
const filteredSegments = segments.filter((_, index) => shouldFilter[index]);
const maxSegments = getCategoryActionType(category) === CategoryActionType.Skippable ? 32 : 1;
const maxSegments = getCategoryActionType(category) === CategoryActionType.Skippable ? Infinity : 1;
return (await chooseSegments(filteredSegments, maxSegments)).map((chosenSegment) => ({
category: chosenSegment.category,
actionType: chosenSegment.actionType,
@@ -53,7 +53,9 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, category:
UUID: chosenSegment.UUID,
locked: chosenSegment.locked,
votes: chosenSegment.votes,
videoDuration: chosenSegment.videoDuration
videoDuration: chosenSegment.videoDuration,
userID: chosenSegment.userID,
description: chosenSegment.description
}));
}
@@ -138,7 +140,7 @@ async function getSegmentsFromDBByHash(hashedVideoIDPrefix: VideoIDHash, service
const fetchFromDB = () => db
.prepare(
"all",
`SELECT "videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "videoDuration", "reputation", "shadowHidden", "hashedVideoID", "timeSubmitted" FROM "sponsorTimes"
`SELECT "videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "videoDuration", "reputation", "shadowHidden", "hashedVideoID", "timeSubmitted", "description" FROM "sponsorTimes"
WHERE "hashedVideoID" LIKE ? AND "service" = ? AND "hidden" = 0 ORDER BY "startTime"`,
[`${hashedVideoIDPrefix}%`, service]
) as Promise<DBSegment[]>;
@@ -154,7 +156,7 @@ async function getSegmentsFromDBByVideoID(videoID: VideoID, service: Service): P
const fetchFromDB = () => db
.prepare(
"all",
`SELECT "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "videoDuration", "reputation", "shadowHidden", "timeSubmitted" FROM "sponsorTimes"
`SELECT "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "videoDuration", "reputation", "shadowHidden", "timeSubmitted", "description" FROM "sponsorTimes"
WHERE "videoID" = ? AND "service" = ? AND "hidden" = 0 ORDER BY "startTime"`,
[videoID, service]
) as Promise<DBSegment[]>;
@@ -218,7 +220,7 @@ async function chooseSegments(segments: DBSegment[], max: number): Promise<DBSeg
//1. As long as the segments' startTime fall inside the currentGroup, we keep adding them to that group
//2. If a segment starts after the end of the currentGroup (> cursor), no other segment will ever fall
// inside that group (because they're sorted) so we can create a new one
const overlappingSegmentsGroups: OverlappingSegmentGroup[] = [];
let overlappingSegmentsGroups: OverlappingSegmentGroup[] = [];
let currentGroup: OverlappingSegmentGroup;
let cursor = -1; //-1 to make sure that, even if the 1st segment starts at 0, a new group is created
for (const segment of segments) {
@@ -260,6 +262,8 @@ async function chooseSegments(segments: DBSegment[], max: number): Promise<DBSeg
group.reputation = group.reputation / group.segments.length;
});
overlappingSegmentsGroups = splitPercentOverlap(overlappingSegmentsGroups);
//if there are too many groups, find the best ones
return getWeightedRandomChoice(overlappingSegmentsGroups, max).map(
//randomly choose 1 good segment per group and return them
@@ -267,6 +271,37 @@ async function chooseSegments(segments: DBSegment[], max: number): Promise<DBSeg
);
}
function splitPercentOverlap(groups: OverlappingSegmentGroup[]): OverlappingSegmentGroup[] {
return groups.flatMap((group) => {
const result: OverlappingSegmentGroup[] = [];
group.segments.forEach((segment) => {
const bestGroup = result.find((group) => {
// At least one segment in the group must have high % overlap or the same action type
return group.segments.some((compareSegment) => {
const overlap = Math.min(segment.endTime, compareSegment.endTime) - Math.max(segment.startTime, compareSegment.startTime);
const overallDuration = Math.max(segment.endTime, compareSegment.endTime) - Math.min(segment.startTime, compareSegment.startTime);
const overlapPercent = overlap / overallDuration;
return (overlapPercent > 0 && segment.actionType === compareSegment.actionType && segment.actionType !== ActionType.Chapter)
|| overlapPercent >= 0.6
|| (overlapPercent >= 0.8 && segment.actionType === ActionType.Chapter && compareSegment.actionType === ActionType.Chapter);
});
});
if (bestGroup) {
bestGroup.segments.push(segment);
bestGroup.votes += segment.votes;
bestGroup.reputation += segment.reputation;
bestGroup.locked ||= segment.locked;
bestGroup.required ||= segment.required;
} else {
result.push({ segments: [segment], votes: segment.votes, reputation: segment.reputation, locked: segment.locked, required: segment.required });
}
});
return result;
});
}
/**
*
* Returns what would be sent to the client.

View File

@@ -39,7 +39,7 @@ export async function postClearCache(req: Request, res: Response): Promise<Respo
}
try {
QueryCacher.clearVideoCache({
QueryCacher.clearSegmentCache({
videoID,
hashedVideoID,
service

View File

@@ -31,7 +31,7 @@ export async function postPurgeAllSegments(req: Request, res: Response): Promise
await db.prepare("run", `UPDATE "sponsorTimes" SET "hidden" = 1 WHERE "videoID" = ?`, [videoID]);
const hashedVideoID: VideoIDHash = getHash(videoID, 1);
QueryCacher.clearVideoCache({
QueryCacher.clearSegmentCache({
videoID,
hashedVideoID,
service

View File

@@ -299,7 +299,7 @@ async function checkUserActiveWarning(userID: string): Promise<CheckResult> {
return CHECK_PASS;
}
function checkInvalidFields(videoID: any, userID: any, segments: Array<any>): CheckResult {
function checkInvalidFields(videoID: VideoID, userID: UserID, segments: IncomingSegment[]): CheckResult {
const invalidFields = [];
const errors = [];
if (typeof videoID !== "string") {
@@ -320,6 +320,12 @@ function checkInvalidFields(videoID: any, userID: any, segments: Array<any>): Ch
(typeof endTime === "string" && endTime.includes(":"))) {
invalidFields.push("segment time");
}
if (typeof segmentPair.description !== "string"
|| (segmentPair.description.length > 60 && segmentPair.actionType === ActionType.Chapter)
|| (segmentPair.description.length !== 0 && segmentPair.actionType !== ActionType.Chapter)) {
invalidFields.push("segment description");
}
}
if (invalidFields.length !== 0) {
@@ -541,7 +547,8 @@ function preprocessInput(req: Request) {
segments = [{
segment: [req.query.startTime as string, req.query.endTime as string],
category: req.query.category as Category,
actionType: (req.query.actionType as ActionType) ?? ActionType.Skip
actionType: (req.query.actionType as ActionType) ?? ActionType.Skip,
description: req.query.description as string || "",
}];
}
// Add default action type
@@ -550,6 +557,7 @@ function preprocessInput(req: Request) {
segment.actionType = ActionType.Skip;
}
segment.description ??= "";
segment.segment = segment.segment.map((time) => typeof segment.segment[0] === "string" ? time?.replace(",", ".") : time);
});
@@ -620,7 +628,6 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
//check to see if this user is shadowbanned
const shadowBanRow = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID]);
const startingVotes = 0 + decreaseVotes;
const reputation = await getReputation(userID);
@@ -634,9 +641,10 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
const startingLocked = isVIP ? 1 : 0;
try {
await db.prepare("run", `INSERT INTO "sponsorTimes"
("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "actionType", "service", "videoDuration", "reputation", "shadowHidden", "hashedVideoID", "userAgent")
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0, segmentInfo.category, segmentInfo.actionType, service, videoDuration, reputation, 0, hashedVideoID, userAgent
("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "actionType", "service", "videoDuration", "reputation", "shadowHidden", "hashedVideoID", "userAgent", "description")
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0
, segmentInfo.category, segmentInfo.actionType, service, videoDuration, reputation, shadowBanRow.userCount, hashedVideoID, userAgent, segmentInfo.description
],
);
@@ -649,7 +657,7 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
videoID, apiVideoInfo?.data?.authorId || "", apiVideoInfo?.data?.title || "", apiVideoInfo?.data?.published || 0, apiVideoInfo?.data?.genreUrl || "", videoID]);
// Clear redis cache for this video
QueryCacher.clearVideoCache({
QueryCacher.clearSegmentCache({
videoID,
hashedVideoID,
service,

View File

@@ -0,0 +1,75 @@
import { Request, Response } from "express";
import { db } from "../../databases/databases";
import { RatingType } from "../../types/ratings.model";
import { Service, VideoID, VideoIDHash } from "../../types/segments.model";
import { getService } from "../../utils/getService";
import { hashPrefixTester } from "../../utils/hashPrefixTester";
import { Logger } from "../../utils/logger";
import { QueryCacher } from "../../utils/queryCacher";
import { ratingHashKey } from "../../utils/redisKeys";
interface DBRating {
videoID: VideoID,
hashedVideoID: VideoIDHash,
service: Service,
type: RatingType,
count: number
}
export async function getRating(req: Request, res: Response): Promise<Response> {
let hashPrefix = req.params.prefix as VideoIDHash;
if (!hashPrefix || !hashPrefixTester(hashPrefix)) {
return res.status(400).send("Hash prefix does not match format requirements."); // Exit early on faulty prefix
}
hashPrefix = hashPrefix.toLowerCase() as VideoIDHash;
let types: RatingType[] = [];
try {
types = req.query.types
? JSON.parse(req.query.types as string)
: req.query.type
? Array.isArray(req.query.type)
? req.query.type
: [req.query.type]
: [RatingType.Upvote, RatingType.Downvote];
if (!Array.isArray(types)) {
return res.status(400).send("Types parameter does not match format requirements.");
}
types = types.map((type) => parseInt(type as unknown as string, 10));
} catch(error) {
return res.status(400).send("Bad parameter: types (invalid JSON)");
}
const service: Service = getService(req.query.service, req.body.service);
try {
const ratings = (await getRatings(hashPrefix, service))
.filter((rating) => types.includes(rating.type))
.map((rating) => ({
videoID: rating.videoID,
hash: rating.hashedVideoID,
service: rating.service,
type: rating.type,
count: rating.count
}));
return res.status((ratings.length) ? 200 : 404)
.send(ratings ?? []);
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
}
}
function getRatings(hashPrefix: VideoIDHash, service: Service): Promise<DBRating[]> {
const fetchFromDB = () => db
.prepare(
"all",
`SELECT "videoID", "hashedVideoID", "type", "count" FROM "ratings" WHERE "hashedVideoID" LIKE ? AND "service" = ? ORDER BY "hashedVideoID"`,
[`${hashPrefix}%`, service]
) as Promise<DBRating[]>;
return (hashPrefix.length === 4)
? QueryCacher.get(fetchFromDB, ratingHashKey(hashPrefix, service))
: fetchFromDB();
}

View File

@@ -0,0 +1,52 @@
import { Logger } from "../../utils/logger";
import { HashedUserID, UserID } from "../../types/user.model";
import { getHash } from "../../utils/getHash";
import { Request, Response } from "express";
import { Service, VideoID } from "../../types/segments.model";
import { QueryCacher } from "../../utils/queryCacher";
import { isUserVIP } from "../../utils/isUserVIP";
import { VideoIDHash } from "../../types/segments.model";
import { getService } from "../..//utils/getService";
export async function postClearCache(req: Request, res: Response): Promise<Response> {
const videoID = req.query.videoID as VideoID;
const userID = req.query.userID as UserID;
const service = getService(req.query.service as Service);
const invalidFields = [];
if (typeof videoID !== "string") {
invalidFields.push("videoID");
}
if (typeof userID !== "string") {
invalidFields.push("userID");
}
if (invalidFields.length !== 0) {
// invalid request
const fields = invalidFields.reduce((p, c, i) => p + (i !== 0 ? ", " : "") + c, "");
return res.status(400).send(`No valid ${fields} field(s) provided`);
}
// hash the userID as early as possible
const hashedUserID: HashedUserID = getHash(userID);
// hash videoID
const hashedVideoID: VideoIDHash = getHash(videoID, 1);
// Ensure user is a VIP
if (!(await isUserVIP(hashedUserID))){
Logger.warn(`Permission violation: User ${hashedUserID} attempted to clear cache for video ${videoID}.`);
return res.status(403).json({ "message": "Not a VIP" });
}
try {
QueryCacher.clearRatingCache({
hashedVideoID,
service
});
return res.status(200).json({
message: `Cache cleared on video ${videoID}`
});
} catch(err) {
return res.sendStatus(500);
}
}

View File

@@ -0,0 +1,63 @@
import { db, privateDB } from "../../databases/databases";
import { getHash } from "../../utils/getHash";
import { Logger } from "../../utils/logger";
import { Request, Response } from "express";
import { HashedUserID, UserID } from "../../types/user.model";
import { HashedIP, IPAddress, VideoID } from "../../types/segments.model";
import { getIP } from "../../utils/getIP";
import { getService } from "../../utils/getService";
import { RatingType, RatingTypes } from "../../types/ratings.model";
import { config } from "../../config";
import { QueryCacher } from "../../utils/queryCacher";
export async function postRating(req: Request, res: Response): Promise<Response> {
const privateUserID = req.body.userID as UserID;
const videoID = req.body.videoID as VideoID;
const service = getService(req.query.service, req.body.service);
const type = req.body.type as RatingType;
const enabled = req.body.enabled ?? true;
if (privateUserID == undefined || videoID == undefined || service == undefined || type == undefined
|| (typeof privateUserID !== "string") || (typeof videoID !== "string") || (typeof service !== "string")
|| (typeof type !== "number") || (enabled && (typeof enabled !== "boolean")) || !RatingTypes.includes(type)) {
//invalid request
return res.sendStatus(400);
}
const hashedIP: HashedIP = getHash(getIP(req) + config.globalSalt as IPAddress, 1);
const hashedUserID: HashedUserID = getHash(privateUserID);
const hashedVideoID = getHash(videoID, 1);
try {
// Check if this user has voted before
const existingVote = await privateDB.prepare("get", `SELECT count(*) as "count" FROM "ratings" WHERE "videoID" = ? AND "service" = ? AND "type" = ? AND "userID" = ?`, [videoID, service, type, hashedUserID]);
if (existingVote.count > 0 && !enabled) {
// Undo the vote
await db.prepare("run", `UPDATE "ratings" SET "count" = "count" - 1 WHERE "videoID" = ? AND "service" = ? AND type = ?`, [videoID, service, type]);
await privateDB.prepare("run", `DELETE FROM "ratings" WHERE "videoID" = ? AND "service" = ? AND "type" = ? AND "userID" = ?`, [videoID, service, type, hashedUserID]);
} else if (existingVote.count === 0 && enabled) {
// Make sure there hasn't been another vote from this IP
const existingIPVote = (await privateDB.prepare("get", `SELECT count(*) as "count" FROM "ratings" WHERE "videoID" = ? AND "service" = ? AND "type" = ? AND "hashedIP" = ?`, [videoID, service, type, hashedIP]))
.count > 0;
if (existingIPVote) { // if exisiting vote, exit early instead
return res.sendStatus(200);
}
// Check if general rating already exists, if so increase it
const rating = await db.prepare("get", `SELECT count(*) as "count" FROM "ratings" WHERE "videoID" = ? AND "service" = ? AND type = ?`, [videoID, service, type]);
if (rating.count > 0) {
await db.prepare("run", `UPDATE "ratings" SET "count" = "count" + 1 WHERE "videoID" = ? AND "service" = ? AND type = ?`, [videoID, service, type]);
} else {
await db.prepare("run", `INSERT INTO "ratings" ("videoID", "service", "type", "count", "hashedVideoID") VALUES (?, ?, ?, 1, ?)`, [videoID, service, type, hashedVideoID]);
}
// Create entry in privateDB
await privateDB.prepare("run", `INSERT INTO "ratings" ("videoID", "service", "type", "userID", "timeSubmitted", "hashedIP") VALUES (?, ?, ?, ?, ?, ?)`, [videoID, service, type, hashedUserID, Date.now(), hashedIP]);
}
// clear rating cache
QueryCacher.clearRatingCache({ hashedVideoID, service });
return res.sendStatus(200);
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
}
}

View File

@@ -68,7 +68,7 @@ export async function shadowBanUser(req: Request, res: Response): Promise<Respon
// collect list for unshadowbanning
(await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service", "votes", "views", "userID" FROM "sponsorTimes" WHERE "UUID" = ? AND "shadowHidden" = 1 AND "category" in (${categories.map((c) => `'${c}'`).join(",")})`, [UUID]))
.forEach((videoInfo: {category: Category, videoID: VideoID, hashedVideoID: VideoIDHash, service: Service, userID: UserID}) => {
QueryCacher.clearVideoCache(videoInfo);
QueryCacher.clearSegmentCache(videoInfo);
}
);
@@ -125,6 +125,6 @@ async function unHideSubmissions(categories: string[], userID: UserID) {
// clear cache for all old videos
(await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service", "votes", "views" FROM "sponsorTimes" WHERE "userID" = ?`, [userID]))
.forEach((videoInfo: { category: Category; videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID: UserID; }) => {
QueryCacher.clearVideoCache(videoInfo);
QueryCacher.clearSegmentCache(videoInfo);
});
}

View File

@@ -267,7 +267,7 @@ async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, i
}
}
QueryCacher.clearVideoCache(videoInfo);
QueryCacher.clearSegmentCache(videoInfo);
return res.sendStatus(finalResponse.finalStatus);
}
@@ -473,7 +473,7 @@ export async function voteOnSponsorTime(req: Request, res: Response): Promise<Re
await db.prepare("run", 'UPDATE "sponsorTimes" SET locked = 0 WHERE "UUID" = ?', [UUID]);
}
QueryCacher.clearVideoCache(videoInfo);
QueryCacher.clearSegmentCache(videoInfo);
}
if (incrementAmount - oldIncrementAmount !== 0) {
sendWebhooks({

View File

@@ -34,6 +34,7 @@ export interface SBSConfig {
rateLimit: {
vote: RateLimitConfig;
view: RateLimitConfig;
rate: RateLimitConfig;
};
mysql?: any;
privateMysql?: any;

View File

@@ -0,0 +1,6 @@
export enum RatingType {
Downvote = 0,
Upvote = 1
}
export const RatingTypes = [RatingType.Downvote, RatingType.Upvote];

View File

@@ -5,7 +5,7 @@ import { UserID } from "./user.model";
export type SegmentUUID = string & { __segmentUUIDBrand: unknown };
export type VideoID = string & { __videoIDBrand: unknown };
export type VideoDuration = number & { __videoDurationBrand: unknown };
export type Category = ("sponsor" | "selfpromo" | "interaction" | "intro" | "outro" | "preview" | "music_offtopic" | "poi_highlight") & { __categoryBrand: unknown };
export type Category = ("sponsor" | "selfpromo" | "interaction" | "intro" | "outro" | "preview" | "music_offtopic" | "poi_highlight" | "chapter") & { __categoryBrand: unknown };
export type VideoIDHash = VideoID & HashedValue;
export type IPAddress = string & { __ipAddressBrand: unknown };
export type HashedIP = IPAddress & HashedValue;
@@ -13,6 +13,7 @@ export type HashedIP = IPAddress & HashedValue;
export enum ActionType {
Skip = "skip",
Mute = "mute",
Chapter = "chapter"
}
// Uncomment as needed
@@ -30,6 +31,7 @@ export interface IncomingSegment {
category: Category;
actionType: ActionType;
segment: string[];
description?: string;
}
export interface Segment {
@@ -65,6 +67,7 @@ export interface DBSegment {
timeSubmitted: number;
userAgent: string;
service: Service;
description: string;
}
export interface OverlappingSegmentGroup {

View File

@@ -15,6 +15,6 @@ export function getIP(req: Request): IPAddress {
case "X-Real-IP":
return req.headers["x-real-ip"] as IPAddress;
default:
return req.connection.remoteAddress as IPAddress;
return (req.connection?.remoteAddress || req.socket?.remoteAddress) as IPAddress;
}
}

View File

@@ -1,6 +1,6 @@
import redis from "../utils/redis";
import { Logger } from "../utils/logger";
import { skipSegmentsHashKey, skipSegmentsKey, reputationKey } from "./redisKeys";
import { skipSegmentsHashKey, skipSegmentsKey, reputationKey, ratingHashKey } from "./redisKeys";
import { Service, VideoID, VideoIDHash } from "../types/segments.model";
import { UserID } from "../types/user.model";
@@ -22,7 +22,7 @@ async function get<T>(fetchFromDB: () => Promise<T>, key: string): Promise<T> {
return data;
}
function clearVideoCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID?: UserID; }): void {
function clearSegmentCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID?: UserID; }): void {
if (videoInfo) {
redis.delAsync(skipSegmentsKey(videoInfo.videoID, videoInfo.service));
redis.delAsync(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service));
@@ -30,7 +30,14 @@ function clearVideoCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHa
}
}
function clearRatingCache(videoInfo: { hashedVideoID: VideoIDHash; service: Service;}): void {
if (videoInfo) {
redis.delAsync(ratingHashKey(videoInfo.hashedVideoID, videoInfo.service));
}
}
export const QueryCacher = {
get,
clearVideoCache
clearSegmentCache,
clearRatingCache
};

View File

@@ -16,3 +16,10 @@ export function skipSegmentsHashKey(hashedVideoIDPrefix: VideoIDHash, service: S
export function reputationKey(userID: UserID): string {
return `reputation.user.${userID}`;
}
export function ratingHashKey(hashPrefix: VideoIDHash, service: Service): string {
hashPrefix = hashPrefix.substring(0, 4) as VideoIDHash;
if (hashPrefix.length !== 4) Logger.warn(`Redis rating hash-prefix key is not length 4! ${hashPrefix}`);
return `rating.${service}.${hashPrefix}`;
}