Merge branch 'master' into feat/limit-reward-time-per-segment

This commit is contained in:
Ajay Ramachandran
2021-04-18 15:47:42 -04:00
committed by GitHub
29 changed files with 1314 additions and 345 deletions

View File

@@ -5,7 +5,7 @@ import {oldGetVideoSponsorTimes} from './routes/oldGetVideoSponsorTimes';
import {postSegmentShift} from './routes/postSegmentShift';
import {postWarning} from './routes/postWarning';
import {getIsUserVIP} from './routes/getIsUserVIP';
import {deleteNoSegments} from './routes/deleteNoSegments';
import {deleteNoSegmentsEndpoint} from './routes/deleteNoSegments';
import {postNoSegments} from './routes/postNoSegments';
import {getUserInfo} from './routes/getUserInfo';
import {getDaysSavedFormatted} from './routes/getDaysSavedFormatted';
@@ -26,6 +26,7 @@ import {userCounter} from './middleware/userCounter';
import {loggerMiddleware} from './middleware/logger';
import {corsMiddleware} from './middleware/cors';
import {rateLimitMiddleware} from './middleware/requestRateLimit';
import dumpDatabase from './routes/dumpDatabase';
export function createServer(callback: () => void) {
@@ -116,7 +117,7 @@ function setupRoutes(app: Express) {
//submit video containing no segments
app.post('/api/noSegments', postNoSegments);
app.delete('/api/noSegments', deleteNoSegments);
app.delete('/api/noSegments', deleteNoSegmentsEndpoint);
//get if user is a vip
app.get('/api/isUserVIP', getIsUserVIP);
@@ -127,7 +128,12 @@ function setupRoutes(app: Express) {
//get if user is a vip
app.post('/api/segmentShift', postSegmentShift);
app.get('/database.db', function (req: Request, res: Response) {
res.sendFile("./databases/sponsorTimes.db", {root: "./"});
});
if (config.postgres) {
app.get('/database', (req, res) => dumpDatabase(req, res, true));
app.get('/database.json', (req, res) => dumpDatabase(req, res, false));
} else {
app.get('/database.db', function (req: Request, res: Response) {
res.sendFile("./databases/sponsorTimes.db", {root: "./"});
});
}
}

View File

@@ -16,7 +16,7 @@ addDefaults(config, {
privateDBSchema: "./databases/_private.db.sql",
readOnly: false,
webhooks: [],
categoryList: ["sponsor", "intro", "outro", "interaction", "selfpromo", "music_offtopic"],
categoryList: ["sponsor", "selfpromo", "interaction", "intro", "outro", "preview", "music_offtopic"],
maxNumberOfActiveWarnings: 3,
hoursAfterWarningExpires: 24,
adminUserID: "",
@@ -46,7 +46,33 @@ addDefaults(config, {
userCounterURL: null,
youtubeAPIKey: null,
maxRewardTimePerSegmentInSeconds: 86400,
postgres: null
postgres: null,
dumpDatabase: {
enabled: true,
minTimeBetweenMs: 60000,
appExportPath: './docker/database-export',
postgresExportPath: '/opt/exports',
tables: [{
name: "sponsorTimes",
order: "timeSubmitted"
},
{
name: "userNames"
},
{
name: "categoryVotes"
},
{
name: "noSegments",
},
{
name: "warnings",
order: "issueTime"
},
{
name: "vipUsers"
}]
}
});
// Add defaults

View File

@@ -23,6 +23,13 @@ export class Postgres implements IDatabase {
// Upgrade database if required
await this.upgradeDB(this.config.fileNamePrefix, this.config.dbSchemaFolder);
try {
await this.applyIndexes(this.config.fileNamePrefix, this.config.dbSchemaFolder);
} catch (e) {
Logger.warn("Applying indexes failed. See https://github.com/ajayyy/SponsorBlockServer/wiki/Postgres-Extensions for more information.");
Logger.warn(e);
}
}
}
@@ -118,9 +125,18 @@ export class Postgres implements IDatabase {
Logger.debug('db update: no file ' + path);
}
private async applyIndexes(fileNamePrefix: string, schemaFolder: string) {
const path = schemaFolder + "/_" + fileNamePrefix + "_indexes.sql";
if (fs.existsSync(path)) {
await this.pool.query(fs.readFileSync(path).toString());
} else {
Logger.debug('failed to apply indexes to ' + fileNamePrefix);
}
}
private processUpgradeQuery(query: string): string {
let result = query;
result = result.replace(/sha256\((.*?)\)/gm, "digest($1, 'sha256')");
result = result.replace(/sha256\((.*?)\)/gm, "encode(digest($1, 'sha256'), 'hex')");
result = result.replace(/integer/gmi, "NUMERIC");
return result;

View File

@@ -1,5 +1,13 @@
import { Category, VideoID } from "../types/segments.model";
import { Service, VideoID, VideoIDHash } from "../types/segments.model";
import { Logger } from "../utils/logger";
export function skipSegmentsKey(videoID: VideoID): string {
return "segments-" + videoID;
}
export function skipSegmentsHashKey(hashedVideoIDPrefix: VideoIDHash, service: Service): string {
hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 4) as VideoIDHash;
if (hashedVideoIDPrefix.length !== 4) Logger.warn("Redis skip segment hash-prefix key is not length 4! " + hashedVideoIDPrefix);
return "segments." + service + "." + hashedVideoIDPrefix;
}

View File

@@ -2,12 +2,14 @@ import {Request, Response} from 'express';
import {isUserVIP} from '../utils/isUserVIP';
import {getHash} from '../utils/getHash';
import {db} from '../databases/databases';
import { Category, VideoID } from '../types/segments.model';
import { UserID } from '../types/user.model';
export async function deleteNoSegments(req: Request, res: Response) {
export async function deleteNoSegmentsEndpoint(req: Request, res: Response) {
// Collect user input data
const videoID = req.body.videoID;
let userID = req.body.userID;
const categories = req.body.categories;
const videoID = req.body.videoID as VideoID;
const userID = req.body.userID as UserID;
const categories = req.body.categories as Category[];
// Check input data is valid
if (!videoID
@@ -23,8 +25,8 @@ export async function deleteNoSegments(req: Request, res: Response) {
}
// Check if user is VIP
userID = getHash(userID);
const userIsVIP = await isUserVIP(userID);
const hashedUserID = getHash(userID);
const userIsVIP = await isUserVIP(hashedUserID);
if (!userIsVIP) {
res.status(403).json({
@@ -33,13 +35,22 @@ export async function deleteNoSegments(req: Request, res: Response) {
return;
}
deleteNoSegments(videoID, categories);
res.status(200).json({message: 'Removed no segments entrys for video ' + videoID});
}
/**
*
* @param videoID
* @param categories If null, will remove all
*/
export async function deleteNoSegments(videoID: VideoID, categories: Category[]): Promise<void> {
const entries = (await db.prepare("all", 'SELECT * FROM "noSegments" WHERE "videoID" = ?', [videoID])).filter((entry: any) => {
return (categories.indexOf(entry.category) !== -1);
return categories === null || categories.indexOf(entry.category) !== -1;
});
for (const entry of entries) {
await db.prepare('run', 'DELETE FROM "noSegments" WHERE "videoID" = ? AND "category" = ?', [videoID, entry.category]);
}
res.status(200).json({message: 'Removed no segments entrys for video ' + videoID});
}

173
src/routes/dumpDatabase.ts Normal file
View File

@@ -0,0 +1,173 @@
import {db} from '../databases/databases';
import {Logger} from '../utils/logger';
import {Request, Response} from 'express';
import { config } from '../config';
import util from 'util';
import fs from 'fs';
import path from 'path';
const unlink = util.promisify(fs.unlink);
const ONE_MINUTE = 1000 * 60;
const styleHeader = `<style>
body {
font-family: sans-serif
}
table th,
table td {
padding: 7px;
}
table th {
text-align: left;
}
table tbody tr:nth-child(odd) {
background: #efefef;
}
</style>`
const licenseHeader = `<p>The API and database follow <a href="https://creativecommons.org/licenses/by-nc-sa/4.0/" rel="nofollow">CC BY-NC-SA 4.0</a> unless you have explicit permission.</p>
<p><a href="https://gist.github.com/ajayyy/4b27dfc66e33941a45aeaadccb51de71">Attribution Template</a></p>
<p>If you need to use the database or API in a way that violates this license, contact me with your reason and I may grant you access under a different license.</p></a></p>`;
const tables = config?.dumpDatabase?.tables ?? [];
const MILLISECONDS_BETWEEN_DUMPS = config?.dumpDatabase?.minTimeBetweenMs ?? ONE_MINUTE;
const appExportPath = config?.dumpDatabase?.appExportPath ?? './docker/database-export';
const postgresExportPath = config?.dumpDatabase?.postgresExportPath ?? '/opt/exports';
const tableNames = tables.map(table => table.name);
interface TableDumpList {
fileName: string;
tableName: string;
};
let latestDumpFiles: TableDumpList[] = [];
interface TableFile {
file: string,
timestamp: number
};
if (tables.length === 0) {
Logger.warn('[dumpDatabase] No tables configured');
}
let lastUpdate = 0;
function removeOutdatedDumps(exportPath: string): Promise<void> {
return new Promise((resolve, reject) => {
// Get list of table names
// Create array for each table
const tableFiles: Record<string, TableFile[]> = tableNames.reduce((obj: any, tableName) => {
obj[tableName] = [];
return obj;
}, {});
// read files in export directory
fs.readdir(exportPath, async (err: any, files: string[]) => {
if (err) Logger.error(err);
if (err) return resolve();
files.forEach(file => {
// we only care about files that start with "<tablename>_" and ends with .csv
tableNames.forEach(tableName => {
if (file.startsWith(`${tableName}`) && file.endsWith('.csv')) {
const filePath = path.join(exportPath, file);
tableFiles[tableName].push({
file: filePath,
timestamp: fs.statSync(filePath).mtime.getTime()
});
}
});
});
for (let tableName in tableFiles) {
const files = tableFiles[tableName].sort((a, b) => b.timestamp - a.timestamp);
for (let i = 2; i < files.length; i++) {
// remove old file
await unlink(files[i].file).catch((error: any) => {
Logger.error(`[dumpDatabase] Garbage collection failed ${error}`);
});
}
}
resolve();
});
});
}
export default async function dumpDatabase(req: Request, res: Response, showPage: boolean) {
if (!config?.dumpDatabase?.enabled) {
res.status(404).send("Database dump is disabled");
return;
}
if (!config.postgres) {
res.status(404).send("Not supported on this instance");
return;
}
const now = Date.now();
const updateQueued = now - lastUpdate > MILLISECONDS_BETWEEN_DUMPS;
res.status(200)
if (showPage) {
res.send(`${styleHeader}
<h1>SponsorBlock database dumps</h1>${licenseHeader}
<h3>How this works</h3>
Send a request to <code>https://sponsor.ajay.app/database.json</code>, or visit this page to trigger the database dump to run.
Then, you can download the csv files below, or use the links returned from the JSON request.
<h3>Links</h3>
<table>
<thead>
<tr>
<th>Table</th>
<th>CSV</th>
</tr>
</thead>
<tbody>
${latestDumpFiles.map((item:any) => {
return `
<tr>
<td>${item.tableName}</td>
<td><a href="/database/${item.fileName}">${item.fileName}</a></td>
</tr>
`;
}).join('')}
${latestDumpFiles.length === 0 ? '<tr><td colspan="2">Please wait: Generating files</td></tr>' : ''}
</tbody>
</table>
<hr/>
${updateQueued ? `Update queued.` : ``} Last updated: ${lastUpdate ? new Date(lastUpdate).toUTCString() : `Unknown`}`);
} else {
res.send({
lastUpdated: lastUpdate,
updateQueued,
links: latestDumpFiles.map((item:any) => {
return {
table: item.tableName,
url: `/database/${item.fileName}`,
size: item.fileSize,
};
}),
})
}
if (updateQueued) {
lastUpdate = Date.now();
await removeOutdatedDumps(appExportPath);
const dumpFiles = [];
for (const table of tables) {
const fileName = `${table.name}_${lastUpdate}.csv`;
const file = `${postgresExportPath}/${fileName}`;
await db.prepare('run', `COPY (SELECT * FROM "${table.name}"${table.order ? ` ORDER BY "${table.order}"` : ``})
TO '${file}' WITH (FORMAT CSV, HEADER true);`);
dumpFiles.push({
fileName,
tableName: table.name,
});
}
latestDumpFiles = [...dumpFiles];
}
}

View File

@@ -2,9 +2,9 @@ import { Request, Response } from 'express';
import { RedisClient } from 'redis';
import { config } from '../config';
import { db, privateDB } from '../databases/databases';
import { skipSegmentsKey } from '../middleware/redisKeys';
import { skipSegmentsHashKey, skipSegmentsKey } from '../middleware/redisKeys';
import { SBRecord } from '../types/lib.model';
import { Category, DBSegment, HashedIP, IPAddress, OverlappingSegmentGroup, Segment, SegmentCache, VideoData, VideoID, VideoIDHash, Visibility, VotableObject } from "../types/segments.model";
import { Category, DBSegment, HashedIP, IPAddress, OverlappingSegmentGroup, Segment, SegmentCache, Service, VideoData, VideoID, VideoIDHash, Visibility, VotableObject } from "../types/segments.model";
import { getHash } from '../utils/getHash';
import { getIP } from '../utils/getIP';
import { Logger } from '../utils/logger';
@@ -43,11 +43,12 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, category:
return chooseSegments(filteredSegments).map((chosenSegment) => ({
category,
segment: [chosenSegment.startTime, chosenSegment.endTime],
UUID: chosenSegment.UUID
UUID: chosenSegment.UUID,
videoDuration: chosenSegment.videoDuration
}));
}
async function getSegmentsByVideoID(req: Request, videoID: string, categories: Category[]): Promise<Segment[]> {
async function getSegmentsByVideoID(req: Request, videoID: string, categories: Category[], service: Service): Promise<Segment[]> {
const cache: SegmentCache = {shadowHiddenSegmentIPs: {}};
const segments: Segment[] = [];
@@ -58,9 +59,9 @@ async function getSegmentsByVideoID(req: Request, videoID: string, categories: C
const segmentsByCategory: SBRecord<Category, DBSegment[]> = (await db
.prepare(
'all',
`SELECT "startTime", "endTime", "votes", "locked", "UUID", "category", "shadowHidden" FROM "sponsorTimes"
WHERE "videoID" = ? AND "category" IN (${categories.map((c) => "'" + c + "'")}) ORDER BY "startTime"`,
[videoID]
`SELECT "startTime", "endTime", "votes", "locked", "UUID", "category", "videoDuration", "shadowHidden" FROM "sponsorTimes"
WHERE "videoID" = ? AND "category" IN (${categories.map((c) => "'" + c + "'")}) AND "service" = ? AND "hidden" = 0 ORDER BY "startTime"`,
[videoID, service]
)).reduce((acc: SBRecord<Category, DBSegment[]>, segment: DBSegment) => {
acc[segment.category] = acc[segment.category] || [];
acc[segment.category].push(segment);
@@ -81,7 +82,7 @@ async function getSegmentsByVideoID(req: Request, videoID: string, categories: C
}
}
async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash, categories: Category[]): Promise<SBRecord<VideoID, VideoData>> {
async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash, categories: Category[], service: Service): Promise<SBRecord<VideoID, VideoData>> {
const cache: SegmentCache = {shadowHiddenSegmentIPs: {}};
const segments: SBRecord<VideoID, VideoData> = {};
@@ -91,13 +92,9 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
categories = categories.filter((category) => !(/[^a-z|_|-]/.test(category)));
if (categories.length === 0) return null;
const segmentPerVideoID: SegmentWithHashPerVideoID = (await db
.prepare(
'all',
`SELECT "videoID", "startTime", "endTime", "votes", "locked", "UUID", "category", "shadowHidden", "hashedVideoID" FROM "sponsorTimes"
WHERE "hashedVideoID" LIKE ? AND "category" IN (${categories.map((c) => "'" + c + "'")}) ORDER BY "startTime"`,
[hashedVideoIDPrefix + '%']
)).reduce((acc: SegmentWithHashPerVideoID, segment: DBSegment) => {
const segmentPerVideoID: SegmentWithHashPerVideoID = (await getSegmentsFromDB(hashedVideoIDPrefix, service))
.filter((segment: DBSegment) => categories.includes(segment?.category))
.reduce((acc: SegmentWithHashPerVideoID, segment: DBSegment) => {
acc[segment.videoID] = acc[segment.videoID] || {
hash: segment.hashedVideoID,
segmentPerCategory: {},
@@ -130,6 +127,37 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
}
}
async function getSegmentsFromDB(hashedVideoIDPrefix: VideoIDHash, service: Service): Promise<DBSegment[]> {
const fetchFromDB = () => db
.prepare(
'all',
`SELECT "videoID", "startTime", "endTime", "votes", "locked", "UUID", "category", "videoDuration", "shadowHidden", "hashedVideoID" FROM "sponsorTimes"
WHERE "hashedVideoID" LIKE ? AND "service" = ? AND "hidden" = 0 ORDER BY "startTime"`,
[hashedVideoIDPrefix + '%', service]
);
if (hashedVideoIDPrefix.length === 4) {
const key = skipSegmentsHashKey(hashedVideoIDPrefix, service);
const {err, reply} = await redis.getAsync(key);
if (!err && reply) {
try {
Logger.debug("Got data from redis: " + reply);
return JSON.parse(reply);
} catch (e) {
// If all else, continue on to fetching from the database
}
}
const data = await fetchFromDB();
redis.setAsync(key, JSON.stringify(data));
return data;
}
return await fetchFromDB();
}
//gets a weighted random choice from the choices array based on their `votes` property.
//amountOfChoices specifies the maximum amount of choices to return, 1 or more.
//choices are unique
@@ -239,6 +267,11 @@ async function handleGetSegments(req: Request, res: Response): Promise<Segment[]
? [req.query.category]
: ['sponsor'];
let service: Service = req.query.service ?? req.body.service ?? Service.YouTube;
if (!Object.values(Service).some((val) => val == service)) {
service = Service.YouTube;
}
// Only 404s are cached at the moment
const redisResult = await redis.getAsync(skipSegmentsKey(videoID));
@@ -251,7 +284,7 @@ async function handleGetSegments(req: Request, res: Response): Promise<Segment[]
}
}
const segments = await getSegmentsByVideoID(req, videoID, categories);
const segments = await getSegmentsByVideoID(req, videoID, categories, service);
if (segments === null || segments === undefined) {
res.sendStatus(500);

View File

@@ -1,7 +1,7 @@
import {hashPrefixTester} from '../utils/hashPrefixTester';
import {getSegmentsByHash} from './getSkipSegments';
import {Request, Response} from 'express';
import { Category, VideoIDHash } from '../types/segments.model';
import { Category, Service, VideoIDHash } from '../types/segments.model';
export async function getSkipSegmentsByHash(req: Request, res: Response) {
let hashPrefix = req.params.prefix as VideoIDHash;
@@ -25,12 +25,17 @@ export async function getSkipSegmentsByHash(req: Request, res: Response) {
catch(error) {
return res.status(400).send("Bad parameter: categories (invalid JSON)");
}
let service: Service = req.query.service ?? req.body.service ?? Service.YouTube;
if (!Object.values(Service).some((val) => val == service)) {
service = Service.YouTube;
}
// filter out none string elements, only flat array with strings is valid
categories = categories.filter((item: any) => typeof item === "string");
// Get all video id's that match hash prefix
const segments = await getSegmentsByHash(req, hashPrefix, categories);
const segments = await getSegmentsByHash(req, hashPrefix, categories, service);
if (!segments) return res.status(404).json([]);

View File

@@ -11,9 +11,15 @@ import {getFormattedTime} from '../utils/getFormattedTime';
import {isUserTrustworthy} from '../utils/isUserTrustworthy';
import {dispatchEvent} from '../utils/webhookUtils';
import {Request, Response} from 'express';
import { skipSegmentsKey } from '../middleware/redisKeys';
import { skipSegmentsHashKey, skipSegmentsKey } from '../middleware/redisKeys';
import redis from '../utils/redis';
import { Category, IncomingSegment, Segment, SegmentUUID, Service, VideoDuration, VideoID } from '../types/segments.model';
import { deleteNoSegments } from './deleteNoSegments';
interface APIVideoInfo {
err: string | boolean,
data: any
}
async function sendWebhookNotification(userID: string, videoID: string, UUID: string, submissionCount: number, youtubeData: any, {submissionStart, submissionEnd}: { submissionStart: number; submissionEnd: number; }, segmentInfo: any) {
const row = await db.prepare('get', `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID]);
@@ -45,62 +51,58 @@ async function sendWebhookNotification(userID: string, videoID: string, UUID: st
});
}
async function sendWebhooks(userID: string, videoID: string, UUID: string, segmentInfo: any) {
if (config.youtubeAPIKey !== null) {
async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID: string, UUID: string, segmentInfo: any, service: Service) {
if (apiVideoInfo && service == Service.YouTube) {
const userSubmissionCountRow = await db.prepare('get', `SELECT count(*) as "submissionCount" FROM "sponsorTimes" WHERE "userID" = ?`, [userID]);
YouTubeAPI.listVideos(videoID, (err: any, data: any) => {
if (err || data.items.length === 0) {
err && Logger.error(err);
return;
const {data, err} = apiVideoInfo;
if (err) return;
const startTime = parseFloat(segmentInfo.segment[0]);
const endTime = parseFloat(segmentInfo.segment[1]);
sendWebhookNotification(userID, videoID, UUID, userSubmissionCountRow.submissionCount, data, {
submissionStart: startTime,
submissionEnd: endTime,
}, segmentInfo);
// If it is a first time submission
// Then send a notification to discord
if (config.discordFirstTimeSubmissionsWebhookURL === null || userSubmissionCountRow.submissionCount > 1) return;
fetch(config.discordFirstTimeSubmissionsWebhookURL, {
method: 'POST',
body: JSON.stringify({
"embeds": [{
"title": data.items[0].snippet.title,
"url": "https://www.youtube.com/watch?v=" + videoID + "&t=" + (parseInt(startTime.toFixed(0)) - 2),
"description": "Submission ID: " + UUID +
"\n\nTimestamp: " +
getFormattedTime(startTime) + " to " + getFormattedTime(endTime) +
"\n\nCategory: " + segmentInfo.category,
"color": 10813440,
"author": {
"name": userID,
},
"thumbnail": {
"url": data.items[0].snippet.thumbnails.maxres ? data.items[0].snippet.thumbnails.maxres.url : "",
},
}],
}),
headers: {
'Content-Type': 'application/json'
}
const startTime = parseFloat(segmentInfo.segment[0]);
const endTime = parseFloat(segmentInfo.segment[1]);
sendWebhookNotification(userID, videoID, UUID, userSubmissionCountRow.submissionCount, data, {
submissionStart: startTime,
submissionEnd: endTime,
}, segmentInfo);
// If it is a first time submission
// Then send a notification to discord
if (config.discordFirstTimeSubmissionsWebhookURL === null || userSubmissionCountRow.submissionCount > 1) return;
fetch(config.discordFirstTimeSubmissionsWebhookURL, {
method: 'POST',
body: JSON.stringify({
"embeds": [{
"title": data.items[0].snippet.title,
"url": "https://www.youtube.com/watch?v=" + videoID + "&t=" + (parseInt(startTime.toFixed(0)) - 2),
"description": "Submission ID: " + UUID +
"\n\nTimestamp: " +
getFormattedTime(startTime) + " to " + getFormattedTime(endTime) +
"\n\nCategory: " + segmentInfo.category,
"color": 10813440,
"author": {
"name": userID,
},
"thumbnail": {
"url": data.items[0].snippet.thumbnails.maxres ? data.items[0].snippet.thumbnails.maxres.url : "",
},
}],
}),
headers: {
'Content-Type': 'application/json'
}
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending first time submission Discord hook");
Logger.error(JSON.stringify(res));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send first time submission Discord hook.");
Logger.error(JSON.stringify(err));
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending first time submission Discord hook");
Logger.error(JSON.stringify(res));
Logger.error("\n");
});
}
})
.catch(err => {
Logger.error("Failed to send first time submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
}
}
@@ -166,73 +168,98 @@ async function sendWebhooksNB(userID: string, videoID: string, UUID: string, sta
// Looks like this was broken for no defined youtube key - fixed but IMO we shouldn't return
// false for a pass - it was confusing and lead to this bug - any use of this function in
// the future could have the same problem.
async function autoModerateSubmission(submission: { videoID: any; userID: any; segments: any }) {
// Get the video information from the youtube API
if (config.youtubeAPIKey !== null) {
const {err, data} = await new Promise((resolve) => {
YouTubeAPI.listVideos(submission.videoID, (err: any, data: any) => resolve({err, data}));
});
async function autoModerateSubmission(apiVideoInfo: APIVideoInfo,
submission: { videoID: any; userID: any; segments: any }) {
if (apiVideoInfo) {
const {err, data} = apiVideoInfo;
if (err) return false;
if (err) {
return false;
} else {
// Check to see if video exists
if (data.pageInfo.totalResults === 0) {
return "No video exists with id " + submission.videoID;
// Check to see if video exists
if (data.pageInfo.totalResults === 0) return "No video exists with id " + submission.videoID;
const duration = getYouTubeVideoDuration(apiVideoInfo);
const segments = submission.segments;
let nbString = "";
for (let i = 0; i < segments.length; i++) {
const startTime = parseFloat(segments[i].segment[0]);
const endTime = parseFloat(segments[i].segment[1]);
if (duration == 0) {
// Allow submission if the duration is 0 (bug in youtube api)
return false;
} else {
const segments = submission.segments;
let nbString = "";
for (let i = 0; i < segments.length; i++) {
if (segments[i].category === "sponsor") {
//Prepare timestamps to send to NB all at once
nbString = nbString + segments[i].segment[0] + "," + segments[i].segment[1] + ";";
}
}
}
// Get all submissions for this user
const allSubmittedByUser = await db.prepare('all', `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ? and "votes" > -1`, [submission.userID, submission.videoID]);
const allSegmentTimes = [];
if (allSubmittedByUser !== undefined) {
//add segments the user has previously submitted
for (const segmentInfo of allSubmittedByUser) {
allSegmentTimes.push([parseFloat(segmentInfo.startTime), parseFloat(segmentInfo.endTime)]);
}
}
//add segments they are trying to add in this submission
for (let i = 0; i < segments.length; i++) {
let startTime = parseFloat(segments[i].segment[0]);
let endTime = parseFloat(segments[i].segment[1]);
allSegmentTimes.push([startTime, endTime]);
}
//merge all the times into non-overlapping arrays
const allSegmentsSorted = mergeTimeSegments(allSegmentTimes.sort(function (a, b) {
return a[0] - b[0] || a[1] - b[1];
}));
let videoDuration = data.items[0].contentDetails.duration;
videoDuration = isoDurations.toSeconds(isoDurations.parse(videoDuration));
if (videoDuration != 0) {
let allSegmentDuration = 0;
//sum all segment times together
allSegmentsSorted.forEach(segmentInfo => allSegmentDuration += segmentInfo[1] - segmentInfo[0]);
if (allSegmentDuration > (videoDuration / 100) * 80) {
// Reject submission if all segments combine are over 80% of the video
return "Total length of your submitted segments are over 80% of the video.";
}
}
// Check NeuralBlock
const neuralBlockURL = config.neuralBlockURL;
if (!neuralBlockURL) return false;
const response = await fetch(neuralBlockURL + "/api/checkSponsorSegments?vid=" + submission.videoID +
"&segments=" + nbString.substring(0, nbString.length - 1));
if (!response.ok) return false;
const nbPredictions = await response.json();
let nbDecision = false;
let predictionIdx = 0; //Keep track because only sponsor categories were submitted
for (let i = 0; i < segments.length; i++) {
if (segments[i].category === "sponsor") {
if (nbPredictions.probabilities[predictionIdx] < 0.70) {
nbDecision = true; // At least one bad entry
const startTime = parseFloat(segments[i].segment[0]);
const endTime = parseFloat(segments[i].segment[1]);
let duration = data.items[0].contentDetails.duration;
duration = isoDurations.toSeconds(isoDurations.parse(duration));
if (duration == 0) {
// Allow submission if the duration is 0 (bug in youtube api)
return false;
} else if ((endTime - startTime) > (duration / 100) * 80) {
// Reject submission if over 80% of the video
return "One of your submitted segments is over 80% of the video.";
} else {
if (segments[i].category === "sponsor") {
//Prepare timestamps to send to NB all at once
nbString = nbString + segments[i].segment[0] + "," + segments[i].segment[1] + ";";
}
}
}
// Check NeuralBlock
const neuralBlockURL = config.neuralBlockURL;
if (!neuralBlockURL) return false;
const response = await fetch(neuralBlockURL + "/api/checkSponsorSegments?vid=" + submission.videoID +
"&segments=" + nbString.substring(0, nbString.length - 1));
if (!response.ok) return false;
const nbPredictions = await response.json();
let nbDecision = false;
let predictionIdx = 0; //Keep track because only sponsor categories were submitted
for (let i = 0; i < segments.length; i++) {
if (segments[i].category === "sponsor") {
if (nbPredictions.probabilities[predictionIdx] < 0.70) {
nbDecision = true; // At least one bad entry
const startTime = parseFloat(segments[i].segment[0]);
const endTime = parseFloat(segments[i].segment[1]);
const UUID = getSubmissionUUID(submission.videoID, segments[i].category, submission.userID, startTime, endTime);
// Send to Discord
// Note, if this is too spammy. Consider sending all the segments as one Webhook
sendWebhooksNB(submission.userID, submission.videoID, UUID, startTime, endTime, segments[i].category, nbPredictions.probabilities[predictionIdx], data);
}
predictionIdx++;
}
}
if (nbDecision) {
return "Rejected based on NeuralBlock predictions.";
} else {
return false;
const UUID = getSubmissionUUID(submission.videoID, segments[i].category, submission.userID, startTime, endTime);
// Send to Discord
// Note, if this is too spammy. Consider sending all the segments as one Webhook
sendWebhooksNB(submission.userID, submission.videoID, UUID, startTime, endTime, segments[i].category, nbPredictions.probabilities[predictionIdx], data);
}
predictionIdx++;
}
}
if (nbDecision) {
return "Rejected based on NeuralBlock predictions.";
} else {
return false;
}
} else {
Logger.debug("Skipped YouTube API");
@@ -243,6 +270,21 @@ async function autoModerateSubmission(submission: { videoID: any; userID: any; s
}
}
function getYouTubeVideoDuration(apiVideoInfo: APIVideoInfo): VideoDuration {
const duration = apiVideoInfo?.data?.items[0]?.contentDetails?.duration;
return duration ? isoDurations.toSeconds(isoDurations.parse(duration)) as VideoDuration : null;
}
async function getYouTubeVideoInfo(videoID: VideoID): Promise<APIVideoInfo> {
if (config.youtubeAPIKey !== null) {
return new Promise((resolve) => {
YouTubeAPI.listVideos(videoID, (err: any, data: any) => resolve({err, data}));
});
} else {
return null;
}
}
function proxySubmission(req: Request) {
fetch(config.proxySubmission + '/api/skipSegments?userID=' + req.query.userID + '&videoID=' + req.query.videoID, {
method: 'POST',
@@ -267,14 +309,18 @@ export async function postSkipSegments(req: Request, res: Response) {
const videoID = req.query.videoID || req.body.videoID;
let userID = req.query.userID || req.body.userID;
let service: Service = req.query.service ?? req.body.service ?? Service.YouTube;
if (!Object.values(Service).some((val) => val == service)) {
service = Service.YouTube;
}
let videoDuration: VideoDuration = (parseFloat(req.query.videoDuration || req.body.videoDuration) || 0) as VideoDuration;
let segments = req.body.segments;
let segments = req.body.segments as IncomingSegment[];
if (segments === undefined) {
// Use query instead
segments = [{
segment: [req.query.startTime, req.query.endTime],
category: req.query.category,
segment: [req.query.startTime as string, req.query.endTime as string],
category: req.query.category as Category
}];
}
@@ -312,7 +358,7 @@ export async function postSkipSegments(req: Request, res: Response) {
return res.status(403).send('Submission rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes that are not malicious, and we just want to clarify the rules. Could you please send a message in Discord or Matrix so we can further help you?');
}
const noSegmentList = (await db.prepare('all', 'SELECT category from "noSegments" where "videoID" = ?', [videoID])).map((list: any) => {
let noSegmentList = (await db.prepare('all', 'SELECT category from "noSegments" where "videoID" = ?', [videoID])).map((list: any) => {
return list.category;
});
@@ -321,6 +367,32 @@ export async function postSkipSegments(req: Request, res: Response) {
const decreaseVotes = 0;
let apiVideoInfo: APIVideoInfo = null;
if (service == Service.YouTube) {
apiVideoInfo = await getYouTubeVideoInfo(videoID);
}
const apiVideoDuration = getYouTubeVideoDuration(apiVideoInfo);
if (!videoDuration || (apiVideoDuration && Math.abs(videoDuration - apiVideoDuration) > 2)) {
// If api duration is far off, take that one instead (it is only precise to seconds, not millis)
videoDuration = apiVideoDuration || 0 as VideoDuration;
}
const previousSubmissions = await db.prepare('all', `SELECT "videoDuration", "UUID" FROM "sponsorTimes" WHERE "videoID" = ? AND "service" = ? AND "hidden" = 0
AND "shadowHidden" = 0 AND "votes" >= 0 AND "videoDuration" != 0`, [videoID, service]) as
{videoDuration: VideoDuration, UUID: SegmentUUID}[];
// If the video's duration is changed, then the video should be unlocked and old submissions should be hidden
const videoDurationChanged = previousSubmissions.length > 0 && !previousSubmissions.some((e) => Math.abs(videoDuration - e.videoDuration) < 2);
if (videoDurationChanged) {
// Hide all previous submissions
for (const submission of previousSubmissions) {
await db.prepare('run', `UPDATE "sponsorTimes" SET "hidden" = 1 WHERE "UUID" = ?`, [submission.UUID]);
}
// Reset no segments
noSegmentList = [];
deleteNoSegments(videoID, null);
}
// Check if all submissions are correct
for (let i = 0; i < segments.length; i++) {
if (segments[i] === undefined || segments[i].segment === undefined || segments[i].category === undefined) {
@@ -343,7 +415,7 @@ export async function postSkipSegments(req: Request, res: Response) {
+ segments[i].category + "'. A moderator has decided that no new segments are needed and that all current segments of this category are timed perfectly.\n\n "
+ (segments[i].category === "sponsor" ? "Maybe the segment you are submitting is a different category that you have not enabled and is not a sponsor. " +
"Categories that aren't sponsor, such as self-promotion can be enabled in the options.\n\n " : "")
+ "If you believe this is incorrect, please contact someone on Discord.",
+ "If you believe this is incorrect, please contact someone on discord.gg/SponsorBlock or matrix.to/#/+sponsorblock:ajay.app",
);
return;
}
@@ -367,7 +439,7 @@ export async function postSkipSegments(req: Request, res: Response) {
//check if this info has already been submitted before
const duplicateCheck2Row = await db.prepare('get', `SELECT COUNT(*) as count FROM "sponsorTimes" WHERE "startTime" = ?
and "endTime" = ? and "category" = ? and "videoID" = ?`, [startTime, endTime, segments[i].category, videoID]);
and "endTime" = ? and "category" = ? and "videoID" = ? and "service" = ?`, [startTime, endTime, segments[i].category, videoID, service]);
if (duplicateCheck2Row.count > 0) {
res.sendStatus(409);
return;
@@ -375,8 +447,8 @@ export async function postSkipSegments(req: Request, res: Response) {
}
// Auto moderator check
if (!isVIP) {
const autoModerateResult = await autoModerateSubmission({userID, videoID, segments});//startTime, endTime, category: segments[i].category});
if (!isVIP && service == Service.YouTube) {
const autoModerateResult = await autoModerateSubmission(apiVideoInfo, {userID, videoID, segments});//startTime, endTime, category: segments[i].category});
if (autoModerateResult == "Rejected based on NeuralBlock predictions.") {
// If NB automod rejects, the submission will start with -2 votes.
// Note, if one submission is bad all submissions will be affected.
@@ -437,63 +509,19 @@ export async function postSkipSegments(req: Request, res: Response) {
let startingVotes = 0 + decreaseVotes;
if (config.youtubeAPIKey !== null) {
let {err, data} = await new Promise((resolve) => {
YouTubeAPI.listVideos(videoID, (err: any, data: any) => resolve({err, data}));
});
if (err) {
Logger.error("Error while submitting when connecting to YouTube API: " + err);
} else {
//get all segments for this video and user
const allSubmittedByUser = await db.prepare('all', `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ? and "votes" > -1`, [userID, videoID]);
const allSegmentTimes = [];
if (allSubmittedByUser !== undefined) {
//add segments the user has previously submitted
for (const segmentInfo of allSubmittedByUser) {
allSegmentTimes.push([parseFloat(segmentInfo.startTime), parseFloat(segmentInfo.endTime)]);
}
}
//add segments they are trying to add in this submission
for (let i = 0; i < segments.length; i++) {
let startTime = parseFloat(segments[i].segment[0]);
let endTime = parseFloat(segments[i].segment[1]);
allSegmentTimes.push([startTime, endTime]);
}
//merge all the times into non-overlapping arrays
const allSegmentsSorted = mergeTimeSegments(allSegmentTimes.sort(function (a, b) {
return a[0] - b[0] || a[1] - b[1];
}));
let videoDuration = data.items[0].contentDetails.duration;
videoDuration = isoDurations.toSeconds(isoDurations.parse(videoDuration));
if (videoDuration != 0) {
let allSegmentDuration = 0;
//sum all segment times together
allSegmentsSorted.forEach(segmentInfo => allSegmentDuration += segmentInfo[1] - segmentInfo[0]);
if (allSegmentDuration > (videoDuration / 100) * 80) {
// Reject submission if all segments combine are over 80% of the video
res.status(400).send("Total length of your submitted segments are over 80% of the video.");
return;
}
}
}
}
for (const segmentInfo of segments) {
//this can just be a hash of the data
//it's better than generating an actual UUID like what was used before
//also better for duplication checking
const UUID = getSubmissionUUID(videoID, segmentInfo.category, userID, segmentInfo.segment[0], segmentInfo.segment[1]);
const UUID = getSubmissionUUID(videoID, segmentInfo.category, userID, parseFloat(segmentInfo.segment[0]), parseFloat(segmentInfo.segment[1]));
const hashedVideoID = getHash(videoID, 1);
const startingLocked = isVIP ? 1 : 0;
try {
await db.prepare('run', `INSERT INTO "sponsorTimes"
("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "shadowHidden", "hashedVideoID")
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0, segmentInfo.category, shadowBanned, getHash(videoID, 1),
("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "service", "videoDuration", "shadowHidden", "hashedVideoID")
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0, segmentInfo.category, service, videoDuration, shadowBanned, hashedVideoID,
],
);
@@ -502,9 +530,10 @@ export async function postSkipSegments(req: Request, res: Response) {
// Clear redis cache for this video
redis.delAsync(skipSegmentsKey(videoID));
redis.delAsync(skipSegmentsHashKey(hashedVideoID, service));
} catch (err) {
//a DB change probably occurred
res.sendStatus(502);
res.sendStatus(500);
Logger.error("Error when putting sponsorTime in the DB: " + videoID + ", " + segmentInfo.segment[0] + ", " +
segmentInfo.segment[1] + ", " + userID + ", " + segmentInfo.category + ". " + err);
@@ -529,7 +558,7 @@ export async function postSkipSegments(req: Request, res: Response) {
res.json(newSegments);
for (let i = 0; i < segments.length; i++) {
sendWebhooks(userID, videoID, UUIDs[i], segments[i]);
sendWebhooks(apiVideoInfo, userID, videoID, UUIDs[i], segments[i], service);
}
}

View File

@@ -12,8 +12,8 @@ import {getHash} from '../utils/getHash';
import {config} from '../config';
import { UserID } from '../types/user.model';
import redis from '../utils/redis';
import { skipSegmentsKey } from '../middleware/redisKeys';
import { VideoID } from '../types/segments.model';
import { skipSegmentsHashKey, skipSegmentsKey } from '../middleware/redisKeys';
import { Category, HashedIP, IPAddress, SegmentUUID, Service, VideoID, VideoIDHash } from '../types/segments.model';
const voteTypes = {
normal: 0,
@@ -147,8 +147,8 @@ async function sendWebhooks(voteData: VoteData) {
}
}
async function categoryVote(UUID: string, userID: string, isVIP: boolean, isOwnSubmission: boolean, category: string
, hashedIP: string, finalResponse: FinalResponse, res: Response) {
async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, isOwnSubmission: boolean, category: Category
, hashedIP: HashedIP, finalResponse: FinalResponse, res: Response) {
// Check if they've already made a vote
const usersLastVoteInfo = await privateDB.prepare('get', `select count(*) as votes, category from "categoryVotes" where "UUID" = ? and "userID" = ? group by category`, [UUID, userID]);
@@ -158,8 +158,9 @@ async function categoryVote(UUID: string, userID: string, isVIP: boolean, isOwnS
return;
}
const currentCategory = await db.prepare('get', `select category from "sponsorTimes" where "UUID" = ?`, [UUID]);
if (!currentCategory) {
const videoInfo = (await db.prepare('get', `SELECT "category", "videoID", "hashedVideoID", "service" FROM "sponsorTimes" WHERE "UUID" = ?`,
[UUID])) as {category: Category, videoID: VideoID, hashedVideoID: VideoIDHash, service: Service};
if (!videoInfo) {
// Submission doesn't exist
res.status(400).send("Submission doesn't exist.");
return;
@@ -175,52 +176,57 @@ async function categoryVote(UUID: string, userID: string, isVIP: boolean, isOwnS
const timeSubmitted = Date.now();
const voteAmount = isVIP ? 500 : 1;
const ableToVote = isVIP || finalResponse.finalStatus === 200 || true;
// Add the vote
if ((await db.prepare('get', `select count(*) as count from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, category])).count > 0) {
// Update the already existing db entry
await db.prepare('run', `update "categoryVotes" set "votes" = "votes" + ? where "UUID" = ? and "category" = ?`, [voteAmount, UUID, category]);
} else {
// Add a db entry
await db.prepare('run', `insert into "categoryVotes" ("UUID", "category", "votes") values (?, ?, ?)`, [UUID, category, voteAmount]);
if (ableToVote) {
// Add the vote
if ((await db.prepare('get', `select count(*) as count from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, category])).count > 0) {
// Update the already existing db entry
await db.prepare('run', `update "categoryVotes" set "votes" = "votes" + ? where "UUID" = ? and "category" = ?`, [voteAmount, UUID, category]);
} else {
// Add a db entry
await db.prepare('run', `insert into "categoryVotes" ("UUID", "category", "votes") values (?, ?, ?)`, [UUID, category, voteAmount]);
}
// Add the info into the private db
if (usersLastVoteInfo?.votes > 0) {
// Reverse the previous vote
await db.prepare('run', `update "categoryVotes" set "votes" = "votes" - ? where "UUID" = ? and "category" = ?`, [voteAmount, UUID, usersLastVoteInfo.category]);
await privateDB.prepare('run', `update "categoryVotes" set "category" = ?, "timeSubmitted" = ?, "hashedIP" = ? where "userID" = ? and "UUID" = ?`, [category, timeSubmitted, hashedIP, userID, UUID]);
} else {
await privateDB.prepare('run', `insert into "categoryVotes" ("UUID", "userID", "hashedIP", "category", "timeSubmitted") values (?, ?, ?, ?, ?)`, [UUID, userID, hashedIP, category, timeSubmitted]);
}
// See if the submissions category is ready to change
const currentCategoryInfo = await db.prepare("get", `select votes from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, videoInfo.category]);
const submissionInfo = await db.prepare("get", `SELECT "userID", "timeSubmitted", "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]);
const isSubmissionVIP = submissionInfo && await isUserVIP(submissionInfo.userID);
const startingVotes = isSubmissionVIP ? 10000 : 1;
// Change this value from 1 in the future to make it harder to change categories
// Done this way without ORs incase the value is zero
const currentCategoryCount = (currentCategoryInfo === undefined || currentCategoryInfo === null) ? startingVotes : currentCategoryInfo.votes;
// Add submission as vote
if (!currentCategoryInfo && submissionInfo) {
await db.prepare("run", `insert into "categoryVotes" ("UUID", "category", "votes") values (?, ?, ?)`, [UUID, videoInfo.category, currentCategoryCount]);
await privateDB.prepare("run", `insert into "categoryVotes" ("UUID", "userID", "hashedIP", "category", "timeSubmitted") values (?, ?, ?, ?, ?)`, [UUID, submissionInfo.userID, "unknown", videoInfo.category, submissionInfo.timeSubmitted]);
}
const nextCategoryCount = (nextCategoryInfo?.votes || 0) + voteAmount;
//TODO: In the future, raise this number from zero to make it harder to change categories
// VIPs change it every time
if (nextCategoryCount - currentCategoryCount >= Math.max(Math.ceil(submissionInfo?.votes / 2), 2) || isVIP || isOwnSubmission) {
// Replace the category
await db.prepare('run', `update "sponsorTimes" set "category" = ? where "UUID" = ?`, [category, UUID]);
}
}
// Add the info into the private db
if (usersLastVoteInfo?.votes > 0) {
// Reverse the previous vote
await db.prepare('run', `update "categoryVotes" set "votes" = "votes" - ? where "UUID" = ? and "category" = ?`, [voteAmount, UUID, usersLastVoteInfo.category]);
await privateDB.prepare('run', `update "categoryVotes" set "category" = ?, "timeSubmitted" = ?, "hashedIP" = ? where "userID" = ? and "UUID" = ?`, [category, timeSubmitted, hashedIP, userID, UUID]);
} else {
await privateDB.prepare('run', `insert into "categoryVotes" ("UUID", "userID", "hashedIP", "category", "timeSubmitted") values (?, ?, ?, ?, ?)`, [UUID, userID, hashedIP, category, timeSubmitted]);
}
// See if the submissions category is ready to change
const currentCategoryInfo = await db.prepare("get", `select votes from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, currentCategory.category]);
const submissionInfo = await db.prepare("get", `SELECT "userID", "timeSubmitted", "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]);
const isSubmissionVIP = submissionInfo && await isUserVIP(submissionInfo.userID);
const startingVotes = isSubmissionVIP ? 10000 : 1;
// Change this value from 1 in the future to make it harder to change categories
// Done this way without ORs incase the value is zero
const currentCategoryCount = (currentCategoryInfo === undefined || currentCategoryInfo === null) ? startingVotes : currentCategoryInfo.votes;
// Add submission as vote
if (!currentCategoryInfo && submissionInfo) {
await db.prepare("run", `insert into "categoryVotes" ("UUID", "category", "votes") values (?, ?, ?)`, [UUID, currentCategory.category, currentCategoryCount]);
await privateDB.prepare("run", `insert into "categoryVotes" ("UUID", "userID", "hashedIP", "category", "timeSubmitted") values (?, ?, ?, ?, ?)`, [UUID, submissionInfo.userID, "unknown", currentCategory.category, submissionInfo.timeSubmitted]);
}
const nextCategoryCount = (nextCategoryInfo?.votes || 0) + voteAmount;
//TODO: In the future, raise this number from zero to make it harder to change categories
// VIPs change it every time
if (nextCategoryCount - currentCategoryCount >= Math.max(Math.ceil(submissionInfo?.votes / 2), 2) || isVIP || isOwnSubmission) {
// Replace the category
await db.prepare('run', `update "sponsorTimes" set "category" = ? where "UUID" = ?`, [category, UUID]);
}
clearRedisCache(videoInfo);
res.sendStatus(finalResponse.finalStatus);
}
@@ -230,10 +236,10 @@ export function getUserID(req: Request): UserID {
}
export async function voteOnSponsorTime(req: Request, res: Response) {
const UUID = req.query.UUID as string;
const UUID = req.query.UUID as SegmentUUID;
const paramUserID = getUserID(req);
let type = req.query.type !== undefined ? parseInt(req.query.type as string) : undefined;
const category = req.query.category as string;
const category = req.query.category as Category;
if (UUID === undefined || paramUserID === undefined || (type === undefined && category === undefined)) {
//invalid request
@@ -255,7 +261,7 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
const ip = getIP(req);
//hash the ip 5000 times so no one can get it from the database
const hashedIP = getHash(ip + config.globalSalt);
const hashedIP: HashedIP = getHash((ip + config.globalSalt) as IPAddress);
//check if this user is on the vip list
const isVIP = (await db.prepare('get', `SELECT count(*) as "userCount" FROM "vipUsers" WHERE "userID" = ?`, [nonAnonUserID])).userCount > 0;
@@ -280,13 +286,19 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
return categoryVote(UUID, nonAnonUserID, isVIP, isOwnSubmission, category, hashedIP, finalResponse, res);
}
if (type == 1 && !isVIP && !isOwnSubmission) {
if (type !== undefined && !isVIP && !isOwnSubmission) {
// Check if upvoting hidden segment
const voteInfo = await db.prepare('get', `SELECT votes FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]);
if (voteInfo && voteInfo.votes <= -2) {
res.status(403).send("Not allowed to upvote segment with too many downvotes unless you are VIP.");
return;
if (type == 1) {
res.status(403).send("Not allowed to upvote segment with too many downvotes unless you are VIP.");
return;
} else if (type == 0) {
// Already downvoted enough, ignore
res.status(200).send();
return;
}
}
}
@@ -350,13 +362,13 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
}
//check if the increment amount should be multiplied (downvotes have more power if there have been many views)
const row = await db.prepare('get', `SELECT "videoID", votes, views FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]) as
{videoID: VideoID, votes: number, views: number};
const videoInfo = await db.prepare('get', `SELECT "videoID", "hashedVideoID", "service", "votes", "views" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]) as
{videoID: VideoID, hashedVideoID: VideoIDHash, service: Service, votes: number, views: number};
if (voteTypeEnum === voteTypes.normal) {
if ((isVIP || isOwnSubmission) && incrementAmount < 0) {
//this user is a vip and a downvote
incrementAmount = -(row.votes + 2 - oldIncrementAmount);
incrementAmount = -(videoInfo.votes + 2 - oldIncrementAmount);
type = incrementAmount;
}
} else if (voteTypeEnum == voteTypes.incorrect) {
@@ -371,7 +383,8 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
const ableToVote = isVIP
|| ((await db.prepare("get", `SELECT "userID" FROM "sponsorTimes" WHERE "userID" = ?`, [nonAnonUserID])) !== undefined
&& (await privateDB.prepare("get", `SELECT "userID" FROM "shadowBannedUsers" WHERE "userID" = ?`, [nonAnonUserID])) === undefined
&& (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID])) === undefined);
&& (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID])) === undefined)
&& finalResponse.finalStatus === 200;
if (ableToVote) {
//update the votes table
@@ -399,8 +412,7 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
await db.prepare('run', 'UPDATE "sponsorTimes" SET locked = 0 WHERE "UUID" = ?', [UUID]);
}
// Clear redis cache for this video
redis.delAsync(skipSegmentsKey(row?.videoID));
clearRedisCache(videoInfo);
//for each positive vote, see if a hidden submission can be shown again
if (incrementAmount > 0 && voteTypeEnum === voteTypes.normal) {
@@ -437,7 +449,7 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
voteTypeEnum,
isVIP,
isOwnSubmission,
row,
row: videoInfo,
category,
incrementAmount,
oldIncrementAmount,
@@ -449,4 +461,11 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
res.status(500).json({error: 'Internal error creating segment vote'});
}
}
}
function clearRedisCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; }) {
if (videoInfo) {
redis.delAsync(skipSegmentsKey(videoInfo.videoID));
redis.delAsync(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service));
}
}

View File

@@ -39,6 +39,7 @@ export interface SBSConfig {
redis?: redis.ClientOpts;
maxRewardTimePerSegmentInSeconds?: number;
postgres?: PoolConfig;
dumpDatabase?: DumpDatabase;
}
export interface WebhookConfig {
@@ -62,4 +63,17 @@ export interface PostgresConfig {
createDbIfNotExists: boolean;
enableWalCheckpointNumber: boolean;
postgres: PoolConfig;
}
}
export interface DumpDatabase {
enabled: boolean;
minTimeBetweenMs: number;
appExportPath: string;
postgresExportPath: string;
tables: DumpDatabaseTable[];
}
export interface DumpDatabaseTable {
name: string;
order?: string;
}

View File

@@ -3,15 +3,33 @@ import { SBRecord } from "./lib.model";
export type SegmentUUID = string & { __segmentUUIDBrand: unknown };
export type VideoID = string & { __videoIDBrand: unknown };
export type VideoDuration = number & { __videoDurationBrand: unknown };
export type Category = string & { __categoryBrand: unknown };
export type VideoIDHash = VideoID & HashedValue;
export type IPAddress = string & { __ipAddressBrand: unknown };
export type HashedIP = IPAddress & HashedValue;
// Uncomment as needed
export enum Service {
YouTube = 'YouTube',
PeerTube = 'PeerTube',
// Twitch = 'Twitch',
// Nebula = 'Nebula',
// RSS = 'RSS',
// Corridor = 'Corridor',
// Lbry = 'Lbry'
}
export interface IncomingSegment {
category: Category;
segment: string[];
}
export interface Segment {
category: Category;
segment: number[];
UUID: SegmentUUID;
videoDuration: VideoDuration;
}
export enum Visibility {
@@ -28,6 +46,7 @@ export interface DBSegment {
locked: boolean;
shadowHidden: Visibility;
videoID: VideoID;
videoDuration: VideoDuration;
hashedVideoID: VideoIDHash;
}