This commit is contained in:
Ajay Ramachandran
2021-12-22 23:26:37 +01:00
26 changed files with 564 additions and 118 deletions

View File

@@ -1,9 +1,13 @@
name: SQLite CI
on: [push, pull_request]
on:
push:
branches:
- master
pull_request:
jobs:
build:
test:
name: Run Tests with SQLite
runs-on: ubuntu-latest
@@ -12,7 +16,6 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
- run: npm install
- name: Run Tests
timeout-minutes: 5
run: npm test

View File

@@ -1,9 +1,13 @@
name: Linting
on: [push, pull_request]
on:
push:
branches:
- master
pull_request:
jobs:
build:
lint:
name: Lint with ESLint
runs-on: ubuntu-latest
@@ -12,7 +16,6 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
- run: npm install
- name: Run Tests
timeout-minutes: 5
run: npm run lint

View File

@@ -1,12 +1,17 @@
name: create-sqlite-base
on:
push:
branches:
- master
paths:
- databases/**
jobs:
make-base-db:
name: Generate SQLite base .db
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2

View File

@@ -1,10 +1,14 @@
name: PostgreSQL CI
name: PostgreSQL + Redis CI
on: [push, pull_request]
on:
push:
branches:
- master
pull_request:
jobs:
build:
name: Run Tests with PostgreSQL
test:
name: Run Tests with PostgreSQL and Redis
runs-on: ubuntu-latest
steps:

View File

@@ -1,14 +1,12 @@
# .github/workflows/take.yml
name: Assign issue to contributor
on:
issue_comment:
on: [issue_comment]
jobs:
assign:
name: Take an issue
runs-on: ubuntu-latest
steps:
- name: take the issue
uses: bdougie/take-action@main
env:
GITHUB_TOKEN: ${{ github.token }}
- name: take the issue
uses: bdougie/take-action@main
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@@ -16,6 +16,10 @@
"host": "localhost",
"port": 5432
},
"redis": {
"host": "localhost",
"port": 6379
},
"createDatabaseIfNotExist": true,
"schemaFolder": "./databases",
"dbSchema": "./databases/_sponsorTimes.db.sql",

View File

@@ -6,4 +6,8 @@ services:
- POSTGRES_USER=${PG_USER}
- POSTGRES_PASSWORD=${PG_PASS}
ports:
- 5432:5432
- 5432:5432
redis:
image: redis:alpine
ports:
- 6379:6379

14
package-lock.json generated
View File

@@ -16,6 +16,7 @@
"express": "^4.17.1",
"express-promise-router": "^4.1.1",
"express-rate-limit": "^5.5.1",
"lodash": "^4.17.21",
"pg": "^8.7.1",
"redis": "^3.1.2",
"sync-mysql": "^3.0.1"
@@ -25,6 +26,7 @@
"@types/cron": "^1.7.3",
"@types/express": "^4.17.13",
"@types/express-rate-limit": "^5.1.3",
"@types/lodash": "^4.14.178",
"@types/mocha": "^9.0.0",
"@types/node": "^16.11.11",
"@types/pg": "^8.6.1",
@@ -317,6 +319,12 @@
"integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==",
"dev": true
},
"node_modules/@types/lodash": {
"version": "4.14.178",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.178.tgz",
"integrity": "sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw==",
"dev": true
},
"node_modules/@types/mime": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz",
@@ -5337,6 +5345,12 @@
"integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==",
"dev": true
},
"@types/lodash": {
"version": "4.14.178",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.178.tgz",
"integrity": "sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw==",
"dev": true
},
"@types/mime": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz",

View File

@@ -8,6 +8,7 @@
"dev": "nodemon",
"dev:bash": "nodemon -x 'npm test ; npm start'",
"postgres:docker": "docker run --rm -p 5432:5432 -e POSTGRES_USER=ci_db_user -e POSTGRES_PASSWORD=ci_db_pass postgres:alpine",
"redis:docker": "docker run --rm -p 6379:6379 redis:alpine",
"start": "ts-node src/index.ts",
"tsc": "tsc -p tsconfig.json",
"lint": "eslint src test",
@@ -23,6 +24,7 @@
"express": "^4.17.1",
"express-promise-router": "^4.1.1",
"express-rate-limit": "^5.5.1",
"lodash": "^4.17.21",
"pg": "^8.7.1",
"redis": "^3.1.2",
"sync-mysql": "^3.0.1"
@@ -32,6 +34,7 @@
"@types/cron": "^1.7.3",
"@types/express": "^4.17.13",
"@types/express-rate-limit": "^5.1.3",
"@types/lodash": "^4.14.178",
"@types/mocha": "^9.0.0",
"@types/node": "^16.11.11",
"@types/pg": "^8.6.1",

View File

@@ -46,7 +46,6 @@ import { getChapterNames } from "./routes/getChapterNames";
import { postRating } from "./routes/ratings/postRating";
import { getRating } from "./routes/ratings/getRating";
import { postClearCache as ratingPostClearCache } from "./routes/ratings/postClearCache";
import path from "path";
export function createServer(callback: () => void): Server {
// Create a service (the app object is just a callback).

View File

@@ -1,8 +1,9 @@
import { Request, Response } from "express";
import { db } from "../databases/databases";
import { ActionType, Category, DBSegment, Service, VideoID } from "../types/segments.model";
import { ActionType, Category, DBSegment, Service, VideoID, SortableFields } from "../types/segments.model";
import { getService } from "../utils/getService";
const segmentsPerPage = 10;
const maxSegmentsPerPage = 100;
const defaultSegmentsPerPage = 10;
type searchSegmentResponse = {
segmentCount: number,
@@ -19,6 +20,42 @@ function getSegmentsFromDBByVideoID(videoID: VideoID, service: Service): Promise
) as Promise<DBSegment[]>;
}
function getSortField<T extends string>(...value: T[]): SortableFields {
const fieldByName = Object.values(SortableFields).reduce((acc, fieldName) => {
acc[fieldName.toLowerCase()] = fieldName;
return acc;
}, {} as Record<string, SortableFields>);
for (const name of value) {
if (name?.trim()?.toLowerCase() in fieldByName) {
return fieldByName[name.trim().toLowerCase()];
}
}
return SortableFields.timeSubmitted;
}
function getLimit<T extends string>(value: T): number {
const limit = Number(value);
if (Number.isInteger(limit)
&& limit >= 1
&& limit <= maxSegmentsPerPage) {
return limit;
}
return defaultSegmentsPerPage;
}
function getPage<T extends string>(value: T): number {
const page = Number(value);
if (Number.isInteger(page) && page >= 0) {
return page;
}
return 0;
}
/**
*
* Returns what would be sent to the client.
@@ -62,8 +99,10 @@ async function handleGetSegments(req: Request, res: Response): Promise<searchSeg
const service = getService(req.query.service, req.body.service);
let page: number = req.query.page ?? req.body.page ?? 0;
page = Number(page);
const page: number = getPage(req.query.page ?? req.body.page);
const limit: number = getLimit(req.query.limit ?? req.body.limit);
const sortBy: SortableFields = getSortField(req.query.sortBy, req.body.sortBy);
const sortDir: string = req.query.sortDir ?? req.body.sortDir ?? "asc";
const minVotes: number = req.query.minVotes ?? req.body.minVotes ?? -3;
const maxVotes: number = req.query.maxVotes ?? req.body.maxVotes ?? Infinity;
@@ -99,11 +138,11 @@ async function handleGetSegments(req: Request, res: Response): Promise<searchSeg
return false;
}
return filterSegments(segments, page, filters);
return filterSegments(segments, filters, page, limit, sortBy, sortDir);
}
function filterSegments(segments: DBSegment[], page: number, filters: Record<string, any>) {
const startIndex = 0+(page*segmentsPerPage);
const endIndex = segmentsPerPage+(page*segmentsPerPage);
function filterSegments(segments: DBSegment[], filters: Record<string, any>, page: number, limit: number, sortBy: SortableFields, sortDir: string) {
const startIndex = 0+(page*limit);
const endIndex = limit+(page*limit);
const filteredSegments = segments.filter((segment) =>
!((segment.votes < filters.minVotes || segment.votes > filters.maxVotes)
|| (segment.views < filters.minViews || segment.views > filters.maxViews)
@@ -114,10 +153,27 @@ function filterSegments(segments: DBSegment[], page: number, filters: Record<str
// return false if any of the conditions are met
// return true if none of the conditions are met
);
if (sortBy !== SortableFields.timeSubmitted) {
filteredSegments.sort((a,b) => {
const key = sortDir === "desc" ? 1 : -1;
if (a[sortBy] < b[sortBy]) {
return key;
}
if (a[sortBy] > b[sortBy]) {
return -key;
}
return 0;
});
}
return {
segmentCount: filteredSegments.length,
page,
segments: filteredSegments.slice(startIndex, endIndex)
};
}

View File

@@ -1,7 +1,8 @@
import { Request, Response } from "express";
import { partition } from "lodash"
import { config } from "../config";
import { db, privateDB } from "../databases/databases";
import { skipSegmentsHashKey, skipSegmentsKey } from "../utils/redisKeys";
import { skipSegmentsHashKey, skipSegmentsKey, skipSegmentGroupsKey } from "../utils/redisKeys";
import { SBRecord } from "../types/lib.model";
import { ActionType, Category, CategoryActionType, DBSegment, HashedIP, IPAddress, OverlappingSegmentGroup, Segment, SegmentCache, SegmentUUID, Service, VideoData, VideoID, VideoIDHash, Visibility, VotableObject } from "../types/segments.model";
import { getCategoryActionType } from "../utils/categoryInfo";
@@ -13,7 +14,7 @@ import { getReputation } from "../utils/reputation";
import { getService } from "../utils/getService";
async function prepareCategorySegments(req: Request, videoID: VideoID, category: Category, segments: DBSegment[], cache: SegmentCache = { shadowHiddenSegmentIPs: {} }): Promise<Segment[]> {
async function prepareCategorySegments(req: Request, videoID: VideoID, service: Service, segments: DBSegment[], cache: SegmentCache = { shadowHiddenSegmentIPs: {} }, useCache: boolean): Promise<Segment[]> {
const shouldFilter: boolean[] = await Promise.all(segments.map(async (segment) => {
if (segment.votes < -1 && !segment.required) {
return false; //too untrustworthy, just ignore it
@@ -39,14 +40,16 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, category:
cache.userHashedIP = await getHashCache((getIP(req) + config.globalSalt) as IPAddress);
}
//if this isn't their ip, don't send it to them
return cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted]?.some(
const shouldShadowHide = cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted]?.some(
(shadowHiddenSegment) => shadowHiddenSegment.hashedIP === cache.userHashedIP) ?? false;
if (shouldShadowHide) useCache = false;
return shouldShadowHide;
}));
const filteredSegments = segments.filter((_, index) => shouldFilter[index]);
const maxSegments = getCategoryActionType(category) === CategoryActionType.Skippable ? Infinity : 1;
return (await chooseSegments(filteredSegments, maxSegments)).map((chosenSegment) => ({
return (await chooseSegments(videoID, service, filteredSegments, useCache)).map((chosenSegment) => ({
category: chosenSegment.category,
actionType: chosenSegment.actionType,
segment: [chosenSegment.startTime, chosenSegment.endTime],
@@ -62,28 +65,21 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, category:
async function getSegmentsByVideoID(req: Request, videoID: VideoID, categories: Category[],
actionTypes: ActionType[], requiredSegments: SegmentUUID[], service: Service): Promise<Segment[]> {
const cache: SegmentCache = { shadowHiddenSegmentIPs: {} };
const segments: Segment[] = [];
try {
categories = categories.filter((category) => !/[^a-z|_|-]/.test(category));
if (categories.length === 0) return null;
const segmentsByCategory: SBRecord<Category, DBSegment[]> = (await getSegmentsFromDBByVideoID(videoID, service))
.filter((segment: DBSegment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType))
.reduce((acc: SBRecord<Category, DBSegment[]>, segment: DBSegment) => {
const segments: DBSegment[] = (await getSegmentsFromDBByVideoID(videoID, service))
.map((segment: DBSegment) => {
if (filterRequiredSegments(segment.UUID, requiredSegments)) segment.required = true;
acc[segment.category] ??= [];
acc[segment.category].push(segment);
return acc;
return segment;
}, {});
for (const [category, categorySegments] of Object.entries(segmentsByCategory)) {
segments.push(...(await prepareCategorySegments(req, videoID, category as Category, categorySegments, cache)));
}
const canUseCache = requiredSegments.length === 0;
const processedSegments: Segment[] = await prepareCategorySegments(req, videoID, service, segments, cache, canUseCache);
return segments;
return processedSegments.filter((segment: Segment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType));
} catch (err) {
if (err) {
Logger.error(err as string);
@@ -98,34 +94,37 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
const segments: SBRecord<VideoID, VideoData> = {};
try {
type SegmentWithHashPerVideoID = SBRecord<VideoID, {hash: VideoIDHash, segmentPerCategory: SBRecord<Category, DBSegment[]>}>;
type SegmentWithHashPerVideoID = SBRecord<VideoID, { hash: VideoIDHash, segments: DBSegment[] }>;
categories = categories.filter((category) => !(/[^a-z|_|-]/.test(category)));
if (categories.length === 0) return null;
const segmentPerVideoID: SegmentWithHashPerVideoID = (await getSegmentsFromDBByHash(hashedVideoIDPrefix, service))
.filter((segment: DBSegment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType))
.reduce((acc: SegmentWithHashPerVideoID, segment: DBSegment) => {
acc[segment.videoID] = acc[segment.videoID] || {
hash: segment.hashedVideoID,
segmentPerCategory: {}
segments: []
};
if (filterRequiredSegments(segment.UUID, requiredSegments)) segment.required = true;
acc[segment.videoID].segmentPerCategory[segment.category] ??= [];
acc[segment.videoID].segmentPerCategory[segment.category].push(segment);
acc[segment.videoID].segments ??= [];
acc[segment.videoID].segments.push(segment);
return acc;
}, {});
for (const [videoID, videoData] of Object.entries(segmentPerVideoID)) {
segments[videoID] = {
const data: VideoData = {
hash: videoData.hash,
segments: [],
};
for (const [category, segmentPerCategory] of Object.entries(videoData.segmentPerCategory)) {
segments[videoID].segments.push(...(await prepareCategorySegments(req, videoID as VideoID, category as Category, segmentPerCategory, cache)));
const canUseCache = requiredSegments.length === 0;
data.segments = (await prepareCategorySegments(req, videoID as VideoID, service, videoData.segments, cache, canUseCache))
.filter((segment: Segment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType));
if (data.segments.length > 0) {
segments[videoID] = data;
}
}
@@ -164,10 +163,11 @@ async function getSegmentsFromDBByVideoID(videoID: VideoID, service: Service): P
return await QueryCacher.get(fetchFromDB, skipSegmentsKey(videoID, service));
}
//gets a weighted random choice from the choices array based on their `votes` property.
//amountOfChoices specifies the maximum amount of choices to return, 1 or more.
//choices are unique
function getWeightedRandomChoice<T extends VotableObject>(choices: T[], amountOfChoices: number): T[] {
// Gets a weighted random choice from the choices array based on their `votes` property.
// amountOfChoices specifies the maximum amount of choices to return, 1 or more.
// Choices are unique
// If a predicate is given, it will only filter choices following it, and will leave the rest in the list
function getWeightedRandomChoice<T extends VotableObject>(choices: T[], amountOfChoices: number, predicate?: (choice: T) => void): T[] {
//trivial case: no need to go through the whole process
if (amountOfChoices >= choices.length) {
return choices;
@@ -177,9 +177,17 @@ function getWeightedRandomChoice<T extends VotableObject>(choices: T[], amountOf
weight: number
}
let forceIncludedChoices: T[] = [];
let filteredChoices = choices;
if (predicate) {
const splitArray = partition(choices, predicate);
filteredChoices = splitArray[0];
forceIncludedChoices = splitArray[1];
}
//assign a weight to each choice
let totalWeight = 0;
const choicesWithWeights: TWithWeight[] = choices.map(choice => {
const choicesWithWeights: TWithWeight[] = filteredChoices.map(choice => {
const boost = Math.min(choice.reputation, 4);
//The 3 makes -2 the minimum votes before being ignored completely
@@ -190,8 +198,13 @@ function getWeightedRandomChoice<T extends VotableObject>(choices: T[], amountOf
return { ...choice, weight };
});
// Nothing to filter for
if (amountOfChoices >= choicesWithWeights.length) {
return choices;
}
//iterate and find amountOfChoices choices
const chosen = [];
const chosen = [...forceIncludedChoices];
while (amountOfChoices-- > 0) {
//weighted random draw of one element of choices
const randomNumber = Math.random() * totalWeight;
@@ -210,11 +223,25 @@ function getWeightedRandomChoice<T extends VotableObject>(choices: T[], amountOf
return chosen;
}
async function chooseSegments(videoID: VideoID, service: Service, segments: DBSegment[], useCache: boolean): Promise<DBSegment[]> {
const fetchData = async () => await buildSegmentGroups(segments);
const groups = useCache
? await QueryCacher.get(fetchData, skipSegmentGroupsKey(videoID, service))
: await fetchData();
// Filter for only 1 item for POI categories
return getWeightedRandomChoice(groups, 1, (choice) => getCategoryActionType(choice.segments[0].category) === CategoryActionType.POI)
.map(//randomly choose 1 good segment per group and return them
group => getWeightedRandomChoice(group.segments, 1)[0]
);
}
//This function will find segments that are contained inside of eachother, called similar segments
//Only one similar time will be returned, randomly generated based on the sqrt of votes.
//This allows new less voted items to still sometimes appear to give them a chance at getting votes.
//Segments with less than -1 votes are already ignored before this function is called
async function chooseSegments(segments: DBSegment[], max: number): Promise<DBSegment[]> {
async function buildSegmentGroups(segments: DBSegment[]): Promise<OverlappingSegmentGroup[]> {
//Create groups of segments that are similar to eachother
//Segments must be sorted by their startTime so that we can build groups chronologically:
//1. As long as the segments' startTime fall inside the currentGroup, we keep adding them to that group
@@ -251,6 +278,7 @@ async function chooseSegments(segments: DBSegment[], max: number): Promise<DBSeg
cursor = Math.max(cursor, segment.endTime);
}
overlappingSegmentsGroups = splitPercentOverlap(overlappingSegmentsGroups);
overlappingSegmentsGroups.forEach((group) => {
if (group.required) {
// Required beats locked
@@ -262,13 +290,8 @@ async function chooseSegments(segments: DBSegment[], max: number): Promise<DBSeg
group.reputation = group.reputation / group.segments.length;
});
overlappingSegmentsGroups = splitPercentOverlap(overlappingSegmentsGroups);
//if there are too many groups, find the best ones
return getWeightedRandomChoice(overlappingSegmentsGroups, max).map(
//randomly choose 1 good segment per group and return them
group => getWeightedRandomChoice(group.segments, 1)[0],
);
return overlappingSegmentsGroups;
}
function splitPercentOverlap(groups: OverlappingSegmentGroup[]): OverlappingSegmentGroup[] {
@@ -277,12 +300,15 @@ function splitPercentOverlap(groups: OverlappingSegmentGroup[]): OverlappingSegm
group.segments.forEach((segment) => {
const bestGroup = result.find((group) => {
// At least one segment in the group must have high % overlap or the same action type
// Since POI segments will always have 0 overlap, they will always be in their own groups
return group.segments.some((compareSegment) => {
const overlap = Math.min(segment.endTime, compareSegment.endTime) - Math.max(segment.startTime, compareSegment.startTime);
const overallDuration = Math.max(segment.endTime, compareSegment.endTime) - Math.min(segment.startTime, compareSegment.startTime);
const overlapPercent = overlap / overallDuration;
return (overlapPercent > 0 && segment.actionType === compareSegment.actionType && segment.actionType !== ActionType.Chapter)
|| overlapPercent >= 0.6
return (overlapPercent > 0 && segment.actionType === compareSegment.actionType && segment.category === compareSegment.category && segment.actionType !== ActionType.Chapter)
|| (overlapPercent >= 0.6 && segment.actionType !== compareSegment.actionType && segment.category === compareSegment.category)
|| (overlapPercent >= 0.8 && segment.actionType === compareSegment.actionType && segment.category !== compareSegment.category
&& segment.category !== "music_offtopic" && compareSegment.category !== "music_offtopic")
|| (overlapPercent >= 0.8 && segment.actionType === ActionType.Chapter && compareSegment.actionType === ActionType.Chapter);
});
});

View File

@@ -16,14 +16,16 @@ async function generateTopUsersStats(sortBy: string, categoryStatsEnabled = fals
let additionalFields = "";
if (categoryStatsEnabled) {
additionalFields += `SUM(CASE WHEN category = 'sponsor' THEN 1 ELSE 0 END) as "categorySponsor",
additionalFields += `
SUM(CASE WHEN category = 'sponsor' THEN 1 ELSE 0 END) as "categorySumSponsor",
SUM(CASE WHEN category = 'intro' THEN 1 ELSE 0 END) as "categorySumIntro",
SUM(CASE WHEN category = 'outro' THEN 1 ELSE 0 END) as "categorySumOutro",
SUM(CASE WHEN category = 'interaction' THEN 1 ELSE 0 END) as "categorySumInteraction",
SUM(CASE WHEN category = 'selfpromo' THEN 1 ELSE 0 END) as "categorySelfpromo",
SUM(CASE WHEN category = 'music_offtopic' THEN 1 ELSE 0 END) as "categoryMusicOfftopic",
SUM(CASE WHEN category = 'selfpromo' THEN 1 ELSE 0 END) as "categorySumSelfpromo",
SUM(CASE WHEN category = 'music_offtopic' THEN 1 ELSE 0 END) as "categorySumMusicOfftopic",
SUM(CASE WHEN category = 'preview' THEN 1 ELSE 0 END) as "categorySumPreview",
SUM(CASE WHEN category = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight", `;
SUM(CASE WHEN category = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight",
SUM(CASE WHEN category = 'filler' THEN 1 ELSE 0 END) as "categorySumFiller",`;
}
const rows = await db.prepare("all", `SELECT COUNT(*) as "totalSubmissions", SUM(views) as "viewCount",
@@ -42,14 +44,15 @@ async function generateTopUsersStats(sortBy: string, categoryStatsEnabled = fals
minutesSaved[i] = rows[i].minutesSaved;
if (categoryStatsEnabled) {
categoryStats[i] = [
rows[i].categorySponsor,
rows[i].categorySumSponsor,
rows[i].categorySumIntro,
rows[i].categorySumOutro,
rows[i].categorySumInteraction,
rows[i].categorySelfpromo,
rows[i].categoryMusicOfftopic,
rows[i].categorySumSelfpromo,
rows[i].categorySumMusicOfftopic,
rows[i].categorySumPreview,
rows[i].categorySumHighlight
rows[i].categorySumHighlight,
rows[i].categorySumFiller
];
}
}

View File

@@ -15,10 +15,11 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea
SUM(CASE WHEN "category" = 'intro' THEN 1 ELSE 0 END) as "categorySumIntro",
SUM(CASE WHEN "category" = 'outro' THEN 1 ELSE 0 END) as "categorySumOutro",
SUM(CASE WHEN "category" = 'interaction' THEN 1 ELSE 0 END) as "categorySumInteraction",
SUM(CASE WHEN "category" = 'selfpromo' THEN 1 ELSE 0 END) as "categorySelfpromo",
SUM(CASE WHEN "category" = 'music_offtopic' THEN 1 ELSE 0 END) as "categoryMusicOfftopic",
SUM(CASE WHEN "category" = 'selfpromo' THEN 1 ELSE 0 END) as "categorySumSelfpromo",
SUM(CASE WHEN "category" = 'music_offtopic' THEN 1 ELSE 0 END) as "categorySumMusicOfftopic",
SUM(CASE WHEN "category" = 'preview' THEN 1 ELSE 0 END) as "categorySumPreview",
SUM(CASE WHEN "category" = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight",`;
SUM(CASE WHEN "category" = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight",
SUM(CASE WHEN "category" = 'filler' THEN 1 ELSE 0 END) as "categorySumFiller",`;
}
if (fetchActionTypeStats) {
additionalQuery += `
@@ -48,10 +49,11 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea
intro: proxy.categorySumIntro,
outro: proxy.categorySumOutro,
interaction: proxy.categorySumInteraction,
selfpromo: proxy.categorySelfpromo,
music_offtopic: proxy.categoryMusicOfftopic,
selfpromo: proxy.categorySumSelfpromo,
music_offtopic: proxy.categorySumMusicOfftopic,
preview: proxy.categorySumPreview,
poi_highlight: proxy.categorySumHighlight,
filler: proxy.categorySumFiller,
};
}
if (fetchActionTypeStats) {

View File

@@ -107,4 +107,12 @@ export interface LockCategory {
reason: string,
videoID: VideoID,
userID: UserID
}
}
export enum SortableFields {
timeSubmitted = "timeSubmitted",
startTime = "startTime",
endTime = "endTime",
votes = "votes",
views = "views",
}

View File

@@ -1,6 +1,6 @@
import redis from "../utils/redis";
import { Logger } from "../utils/logger";
import { skipSegmentsHashKey, skipSegmentsKey, reputationKey, ratingHashKey } from "./redisKeys";
import { skipSegmentsHashKey, skipSegmentsKey, reputationKey, ratingHashKey, skipSegmentGroupsKey } from "./redisKeys";
import { Service, VideoID, VideoIDHash } from "../types/segments.model";
import { UserID } from "../types/user.model";
@@ -82,6 +82,7 @@ async function getAndSplit<T, U extends string>(fetchFromDB: (values: U[]) => Pr
function clearSegmentCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID?: UserID; }): void {
if (videoInfo) {
redis.delAsync(skipSegmentsKey(videoInfo.videoID, videoInfo.service));
redis.delAsync(skipSegmentGroupsKey(videoInfo.videoID, videoInfo.service));
redis.delAsync(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service));
if (videoInfo.userID) redis.delAsync(reputationKey(videoInfo.userID));
}

View File

@@ -8,6 +8,7 @@ interface RedisSB {
set(key: string, value: string, callback?: Callback<string | null>): void;
setAsync?(key: string, value: string): Promise<{err: Error | null, reply: string | null}>;
delAsync?(...keys: [string]): Promise<Error | null>;
close?(flush?: boolean): void;
}
let exportObject: RedisSB = {
@@ -29,6 +30,7 @@ if (config.redis) {
exportObject.getAsync = (key) => new Promise((resolve) => client.get(key, (err, reply) => resolve({ err, reply })));
exportObject.setAsync = (key, value) => new Promise((resolve) => client.set(key, value, (err, reply) => resolve({ err, reply })));
exportObject.delAsync = (...keys) => new Promise((resolve) => client.del(keys, (err) => resolve(err)));
exportObject.close = (flush) => client.end(flush);
client.on("error", function(error) {
Logger.error(error);

View File

@@ -7,6 +7,10 @@ export function skipSegmentsKey(videoID: VideoID, service: Service): string {
return `segments.v2.${service}.videoID.${videoID}`;
}
export function skipSegmentGroupsKey(videoID: VideoID, service: Service): string {
return `segments.groups.${service}.videoID.${videoID}`;
}
export function skipSegmentsHashKey(hashedVideoIDPrefix: VideoIDHash, service: Service): string {
hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 4) as VideoIDHash;
if (hashedVideoIDPrefix.length !== 4) Logger.warn(`Redis skip segment hash-prefix key is not length 4! ${hashedVideoIDPrefix}`);

View File

@@ -281,4 +281,222 @@ describe("getSearchSegments", () => {
})
.catch(err => done(err));
});
it("Should be able to get with custom limit", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", limit: 2 } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments.length, 2);
done();
})
.catch(err => done(err));
});
it("Should be able to get with custom limit(2) and page(2)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", limit: 2, page: 2 } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 2);
assert.strictEqual(segments.length, 2);
assert.strictEqual(segments[0].UUID, "search-page1-5");
assert.strictEqual(segments[1].UUID, "search-page1-6");
done();
})
.catch(err => done(err));
});
it("Should be able to get with over range page", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", limit: 2, page: 2000 } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 2000);
assert.strictEqual(segments.length, 0);
done();
})
.catch(err => done(err));
});
it("Should be able to get with invalid page (=-100)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", page: -100 } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments.length, 10);
done();
})
.catch(err => done(err));
});
it("Should be able to get with invalid page (=text)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", page: "hello" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments.length, 10);
done();
})
.catch(err => done(err));
});
it("Should be use default limit if invalid limit query (=0)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", limit: 0 } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments[0].UUID, "search-page1-1");
assert.strictEqual(segments[1].UUID, "search-page1-2");
assert.strictEqual(segments[2].UUID, "search-page1-3");
assert.strictEqual(segments[3].UUID, "search-page1-4");
assert.strictEqual(segments[4].UUID, "search-page1-5");
assert.strictEqual(segments[5].UUID, "search-page1-6");
assert.strictEqual(segments[6].UUID, "search-page1-7");
assert.strictEqual(segments[7].UUID, "search-page1-8");
assert.strictEqual(segments[8].UUID, "search-page1-9");
assert.strictEqual(segments[9].UUID, "search-page1-10");
done();
})
.catch(err => done(err));
});
it("Should be use default limit if invalid limit query (=-100)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", limit: -100 } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments[0].UUID, "search-page1-1");
assert.strictEqual(segments[1].UUID, "search-page1-2");
assert.strictEqual(segments[2].UUID, "search-page1-3");
assert.strictEqual(segments[3].UUID, "search-page1-4");
assert.strictEqual(segments[4].UUID, "search-page1-5");
assert.strictEqual(segments[5].UUID, "search-page1-6");
assert.strictEqual(segments[6].UUID, "search-page1-7");
assert.strictEqual(segments[7].UUID, "search-page1-8");
assert.strictEqual(segments[8].UUID, "search-page1-9");
assert.strictEqual(segments[9].UUID, "search-page1-10");
done();
})
.catch(err => done(err));
});
it("Should be use default limit if invalid limit query (=text)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", limit: "hello" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments[0].UUID, "search-page1-1");
assert.strictEqual(segments[1].UUID, "search-page1-2");
assert.strictEqual(segments[2].UUID, "search-page1-3");
assert.strictEqual(segments[3].UUID, "search-page1-4");
assert.strictEqual(segments[4].UUID, "search-page1-5");
assert.strictEqual(segments[5].UUID, "search-page1-6");
assert.strictEqual(segments[6].UUID, "search-page1-7");
assert.strictEqual(segments[7].UUID, "search-page1-8");
assert.strictEqual(segments[8].UUID, "search-page1-9");
assert.strictEqual(segments[9].UUID, "search-page1-10");
done();
})
.catch(err => done(err));
});
it("Should be use default limit if invalid limit query (=2000)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", limit: 2000 } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments[0].UUID, "search-page1-1");
assert.strictEqual(segments[1].UUID, "search-page1-2");
assert.strictEqual(segments[2].UUID, "search-page1-3");
assert.strictEqual(segments[3].UUID, "search-page1-4");
assert.strictEqual(segments[4].UUID, "search-page1-5");
assert.strictEqual(segments[5].UUID, "search-page1-6");
assert.strictEqual(segments[6].UUID, "search-page1-7");
assert.strictEqual(segments[7].UUID, "search-page1-8");
assert.strictEqual(segments[8].UUID, "search-page1-9");
assert.strictEqual(segments[9].UUID, "search-page1-10");
done();
})
.catch(err => done(err));
});
it("Should be able to get sorted result (desc)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", sortBy: "endTime", sortDir: "desc" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments[0].UUID, "search-page2-2");
assert.strictEqual(segments[1].UUID, "search-page2-1");
assert.strictEqual(segments[2].UUID, "search-page1-10");
assert.strictEqual(segments[3].UUID, "search-page1-9");
assert.strictEqual(segments[4].UUID, "search-page1-8");
done();
})
.catch(err => done(err));
});
it("Should be able to get sorted result (asc)", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", sortBy: "endTime" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments[0].UUID, "search-page1-1");
assert.strictEqual(segments[1].UUID, "search-page1-2");
assert.strictEqual(segments[2].UUID, "search-page1-3");
assert.strictEqual(segments[3].UUID, "search-page1-4");
assert.strictEqual(segments[4].UUID, "search-page1-5");
done();
})
.catch(err => done(err));
});
it("Should be use default sorted if invalid sort field", (done) => {
client.get(endpoint, { params: { videoID: "searchTest4", sortBy: "not exist", sortDir: "desc" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const segments = data.segments;
assert.strictEqual(data.segmentCount, 12);
assert.strictEqual(data.page, 0);
assert.strictEqual(segments[0].UUID, "search-page1-1");
assert.strictEqual(segments[1].UUID, "search-page1-2");
assert.strictEqual(segments[2].UUID, "search-page1-3");
assert.strictEqual(segments[3].UUID, "search-page1-4");
assert.strictEqual(segments[4].UUID, "search-page1-5");
done();
})
.catch(err => done(err));
});
});

View File

@@ -25,7 +25,7 @@ describe("getSkipSegments", () => {
await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 2, 0, "requiredSegmentVid4", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]);
await db.prepare("run", query, ["chapterVid", 60, 80, 2, 0, "chapterVid-1", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 1"]);
await db.prepare("run", query, ["chapterVid", 70, 75, 2, 0, "chapterVid-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 2"]);
await db.prepare("run", query, ["chapterVid", 71, 76, 2, 0, "chapterVid-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 3"]);
await db.prepare("run", query, ["chapterVid", 71, 75, 2, 0, "chapterVid-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 3"]);
await db.prepare("run", query, ["requiredSegmentHashVid", 10, 20, -2, 0, "1d04b98f48e8f8bcc15c6ae5ac050801cd6dcfd428fb5f9e65c4e16e7807340fa", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]);
await db.prepare("run", query, ["requiredSegmentHashVid", 20, 30, -2, 0, "1ebde8e8ae03096b6c866aa2c8cc7ee1d720ca1fca27bea3f39a6a1b876577e71", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]);
return;

View File

@@ -1,5 +1,5 @@
import { db } from "../../src/databases/databases";
import { partialDeepEquals } from "../utils/partialDeepEquals";
import { partialDeepEquals, arrayPartialDeepEquals } from "../utils/partialDeepEquals";
import { getHash } from "../../src/utils/getHash";
import { ImportMock, } from "ts-mock-imports";
import * as YouTubeAPIModule from "../../src/utils/youtubeApi";
@@ -16,34 +16,40 @@ describe("getSkipSegmentsByHash", () => {
const getSegmentsByHash0Hash = "fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910";
const requiredSegmentVidHash = "d51822c3f681e07aef15a8855f52ad12db9eb9cf059e65b16b64c43359557f61";
const requiredSegmentHashVidHash = "17bf8d9090e050257772f8bff277293c29c7ce3b25eb969a8fae111a2434504d";
const differentCategoryVidHash = "7fac44d1ee3257ec7f18953e2b5f991828de6854ad57193d1027c530981a89c0";
const nonMusicOverlapVidHash = "306151f778f9bfd19872b3ccfc83cbab37c4f370717436bfd85e0a624cd8ba3c";
before(async () => {
const query = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", views, category, "actionType", "service", "hidden", "shadowHidden", "hashedVideoID", "description") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, "getSegmentsByHash-01", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]);
await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, "getSegmentsByHash-02", "testman", 0, 50, "sponsor", "skip", "PeerTube", 0, 0, getSegmentsByHash0Hash, ""]);
await db.prepare("run", query, ["getSegmentsByHash-0", 20, 30, 2, "getSegmentsByHash-03", "testman", 100, 150, "intro", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]);
await db.prepare("run", query, ["getSegmentsByHash-0", 40, 50, 2, "getSegmentsByHash-04", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]);
await db.prepare("run", query, ["getSegmentsByHash-noMatchHash", 40, 50, 2, "getSegmentsByHash-noMatchHash", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, "fdaffnoMatchHash", ""]);
await db.prepare("run", query, ["getSegmentsByHash-1", 60, 70, 2, "getSegmentsByHash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, "3272fa85ee0927f6073ef6f07ad5f3146047c1abba794cfa364d65ab9921692b", ""]);
await db.prepare("run", query, ["onlyHidden", 60, 70, 2, "onlyHidden", "testman", 0, 50, "sponsor", "skip", "YouTube", 1, 0, "f3a199e1af001d716cdc6599360e2b062c2d2b3fa2885f6d9d2fd741166cbbd3", ""]);
await db.prepare("run", query, ["highlightVid", 60, 60, 2, "highlightVid-1", "testman", 0, 50, "poi_highlight", "skip", "YouTube", 0, 0, getHash("highlightVid", 1), ""]);
await db.prepare("run", query, ["highlightVid", 70, 70, 2, "highlightVid-2", "testman", 0, 50, "poi_highlight", "skip", "YouTube", 0, 0, getHash("highlightVid", 1), ""]);
await db.prepare("run", query, ["requiredSegmentVid", 60, 70, 2, "requiredSegmentVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 60, 70, -2, "requiredSegmentVid-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 80, 90, -2, "requiredSegmentVid-3", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 2, "requiredSegmentVid-4", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]);
await db.prepare("run", query, ["chapterVid-hash", 60, 80, 2, "chapterVid-hash-1", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 1"]); //7258
await db.prepare("run", query, ["chapterVid-hash", 70, 75, 2, "chapterVid-hash-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 2"]); //7258
await db.prepare("run", query, ["chapterVid-hash", 71, 76, 2, "chapterVid-hash-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 3"]); //7258
await db.prepare("run", query, ["longMuteVid-hash", 40, 45, 2, "longMuteVid-hash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613
await db.prepare("run", query, ["longMuteVid-hash", 30, 35, 2, "longMuteVid-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613
await db.prepare("run", query, ["longMuteVid-hash", 2, 80, 2, "longMuteVid-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613
await db.prepare("run", query, ["longMuteVid-hash", 3, 78, 2, "longMuteVid-hash-4", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613
await db.prepare("run", query, ["longMuteVid-2-hash", 1, 15, 2, "longMuteVid-2-hash-1", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c
await db.prepare("run", query, ["longMuteVid-2-hash", 30, 35, 2, "longMuteVid-2-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c
await db.prepare("run", query, ["longMuteVid-2-hash", 2, 80, 2, "longMuteVid-2-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c
await db.prepare("run", query, ["longMuteVid-2-hash", 3, 78, 2, "longMuteVid-2-hash-4", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c
await db.prepare("run", query, ["requiredSegmentHashVid", 10, 20, -2, "fbf0af454059733c8822f6a4ac8ec568e0787f8c0a5ee915dd5b05e0d7a9a388", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]);
await db.prepare("run", query, ["requiredSegmentHashVid", 20, 30, -2, "7e1ebc5194551d2d0a606d64f675e5a14952e4576b2959f8c9d51e316c14f8da", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]);
const query = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", views, category, "actionType", "service", "hidden", "shadowHidden", "hashedVideoID", "description") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, 0, "getSegmentsByHash-01", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]);
await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, 0, "getSegmentsByHash-02", "testman", 0, 50, "sponsor", "skip", "PeerTube", 0, 0, getSegmentsByHash0Hash, ""]);
await db.prepare("run", query, ["getSegmentsByHash-0", 20, 30, 2, 0, "getSegmentsByHash-03", "testman", 100, 150, "intro", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]);
await db.prepare("run", query, ["getSegmentsByHash-0", 40, 50, 2, 0, "getSegmentsByHash-04", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]);
await db.prepare("run", query, ["getSegmentsByHash-noMatchHash", 40, 50, 2, 0, "getSegmentsByHash-noMatchHash", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, "fdaffnoMatchHash", ""]);
await db.prepare("run", query, ["getSegmentsByHash-1", 60, 70, 2, 0, "getSegmentsByHash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, "3272fa85ee0927f6073ef6f07ad5f3146047c1abba794cfa364d65ab9921692b", ""]);
await db.prepare("run", query, ["onlyHidden", 60, 70, 2, 0, "onlyHidden", "testman", 0, 50, "sponsor", "skip", "YouTube", 1, 0, "f3a199e1af001d716cdc6599360e2b062c2d2b3fa2885f6d9d2fd741166cbbd3", ""]);
await db.prepare("run", query, ["highlightVid", 60, 60, 2, 0, "highlightVid-1", "testman", 0, 50, "poi_highlight", "skip", "YouTube", 0, 0, getHash("highlightVid", 1), ""]);
await db.prepare("run", query, ["highlightVid", 70, 70, 2, 0, "highlightVid-2", "testman", 0, 50, "poi_highlight", "skip", "YouTube", 0, 0, getHash("highlightVid", 1), ""]);
await db.prepare("run", query, ["requiredSegmentVid", 60, 70, 2, 0, "requiredSegmentVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 60, 70, -2, 0, "requiredSegmentVid-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 80, 90, -2, 0, "requiredSegmentVid-3", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 2, 0, "requiredSegmentVid-4", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]);
await db.prepare("run", query, ["chapterVid-hash", 60, 80, 2, 0, "chapterVid-hash-1", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 1"]); //7258
await db.prepare("run", query, ["chapterVid-hash", 70, 75, 2, 0, "chapterVid-hash-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 2"]); //7258
await db.prepare("run", query, ["chapterVid-hash", 71, 75, 2, 0, "chapterVid-hash-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 3"]); //7258
await db.prepare("run", query, ["longMuteVid-hash", 40, 45, 2, 0, "longMuteVid-hash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613
await db.prepare("run", query, ["longMuteVid-hash", 30, 35, 2, 0, "longMuteVid-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613
await db.prepare("run", query, ["longMuteVid-hash", 2, 80, 2, 0, "longMuteVid-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613
await db.prepare("run", query, ["longMuteVid-hash", 3, 78, 2, 0, "longMuteVid-hash-4", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613
await db.prepare("run", query, ["longMuteVid-2-hash", 1, 15, 2, 0, "longMuteVid-2-hash-1", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c
await db.prepare("run", query, ["longMuteVid-2-hash", 30, 35, 2, 0, "longMuteVid-2-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c
await db.prepare("run", query, ["longMuteVid-2-hash", 2, 80, 2, 0, "longMuteVid-2-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c
await db.prepare("run", query, ["longMuteVid-2-hash", 3, 78, 2, 0, "longMuteVid-2-hash-4", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c
await db.prepare("run", query, ["requiredSegmentHashVid", 10, 20, -2, 0, "fbf0af454059733c8822f6a4ac8ec568e0787f8c0a5ee915dd5b05e0d7a9a388", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]);
await db.prepare("run", query, ["requiredSegmentHashVid", 20, 30, -2, 0, "7e1ebc5194551d2d0a606d64f675e5a14952e4576b2959f8c9d51e316c14f8da", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]);
await db.prepare("run", query, ["differentCategoryVid", 60, 70, 2, 0, "differentCategoryVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, differentCategoryVidHash, ""]);
await db.prepare("run", query, ["differentCategoryVid", 61, 70, 2, 1, "differentCategoryVid-2", "testman", 0, 50, "intro", "skip", "YouTube", 0, 0, differentCategoryVidHash, ""]);
await db.prepare("run", query, ["nonMusicOverlapVid", 60, 70, 2, 0, "nonMusicOverlapVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, nonMusicOverlapVidHash, ""]);
await db.prepare("run", query, ["nonMusicOverlapVid", 61, 70, 2, 1, "nonMusicOverlapVid-2", "testman", 0, 50, "music_offtopic", "skip", "YouTube", 0, 0, nonMusicOverlapVidHash, ""]);
});
it("Should be able to get a 200", (done) => {
@@ -434,13 +440,51 @@ describe("getSkipSegmentsByHash", () => {
}]
}];
assert.ok(partialDeepEquals(data, expected, false) || partialDeepEquals(data, expected2, false));
assert.ok(arrayPartialDeepEquals(data, expected) || arrayPartialDeepEquals(data, expected2));
assert.strictEqual(data[0].segments.length, 3);
done();
})
.catch(err => done(err));
});
it("Should be able to get only one segment when two categories are at the same time", (done) => {
client.get(`${endpoint}/7fac?categories=["sponsor","intro"]`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
category: "intro"
}]
}];
assert.ok(partialDeepEquals(data, expected));
assert.strictEqual(data[0].segments.length, 1);
done();
})
.catch(err => done(err));
});
it("Should be able to get overlapping segments where one is non music and one is other", (done) => {
client.get(`${endpoint}/3061?categories=["sponsor","music_offtopic"]`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
category: "sponsor"
}, {
category: "music_offtopic"
}]
}];
assert.ok(partialDeepEquals(data, expected));
assert.strictEqual(data[0].segments.length, 2);
done();
})
.catch(err => done(err));
});
it("Should be able to get mute segment with small skip segment in middle (2)", (done) => {
client.get(`${endpoint}/ab0c?actionType=skip&actionType=mute`)
.then(res => {

View File

@@ -20,6 +20,7 @@ describe("getUserStats", () => {
await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, 0, "getuserstatsuuid7", getHash("getuserstats_user_01"), 7, 7, "music_offtopic", 0]);
await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 11, 11, 0, "getuserstatsuuid8", getHash("getuserstats_user_01"), 8, 8, "poi_highlight", 0]);
await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, -2, "getuserstatsuuid9", getHash("getuserstats_user_02"), 8, 2, "sponsor", 0]);
await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, 0, "getuserstatsuuid10", getHash("getuserstats_user_01"), 8, 2, "filler", 0]);
});
@@ -48,14 +49,15 @@ describe("getUserStats", () => {
preview: 1,
music_offtopic: 1,
poi_highlight: 1,
filler: 1
},
actionTypeCount: {
mute: 0,
skip: 8
skip: 9
},
overallStats: {
minutesSaved: 28,
segmentCount: 8
minutesSaved: 30,
segmentCount: 9
}
};
assert.ok(partialDeepEquals(res.data, expected));

View File

@@ -3,7 +3,7 @@ import { getHash } from "../../../src/utils/getHash";
import assert from "assert";
import { client } from "../../utils/httpClient";
import { AxiosResponse } from "axios";
import { partialDeepEquals } from "../../utils/partialDeepEquals";
import { partialDeepEquals, arrayPartialDeepEquals } from "../../utils/partialDeepEquals";
const endpoint = "/api/ratings/rate";
const getRating = (hash: string, params?: unknown): Promise<AxiosResponse> => client.get(`${endpoint}/${hash}`, { params });
@@ -58,6 +58,9 @@ describe("getRating", () => {
.catch(err => done(err));
});
/*
This test will fail if tests are already ran with redis.
*/
it("Should be able to bulk fetch", (done) => {
getBulkRating([videoOnePartialHash, videoTwoPartialHash])
.then(res => {
@@ -80,7 +83,7 @@ describe("getRating", () => {
count: 10,
hash: videoOneIDHash,
}];
assert.ok(partialDeepEquals(res.data, expected));
assert.ok(arrayPartialDeepEquals(res.data, expected));
done();
})
.catch(err => done(err));

32
test/cases/redisTest.ts Normal file
View File

@@ -0,0 +1,32 @@
import { config } from "../../src/config";
import redis from "../../src/utils/redis";
import crypto from "crypto";
import assert from "assert";
const genRandom = (bytes=8) => crypto.pseudoRandomBytes(bytes).toString("hex");
const randKey1 = genRandom();
const randValue1 = genRandom();
const randKey2 = genRandom(16);
describe("redis test", function() {
before(async function() {
if (!config.redis) this.skip();
await redis.setAsync(randKey1, randValue1);
});
it("Should get stored value", (done) => {
redis.getAsync(randKey1)
.then(res => {
if (res.err) assert.fail(res.err);
assert.strictEqual(res.reply, randValue1);
done();
});
});
it("Should not be able to get not stored value", (done) => {
redis.getAsync(randKey2)
.then(res => {
if (res.reply || res.err ) assert.fail("Value should not be found");
done();
});
});
});

View File

@@ -9,6 +9,7 @@ import { initDb } from "../src/databases/databases";
import { ImportMock } from "ts-mock-imports";
import * as rateLimitMiddlewareModule from "../src/middleware/requestRateLimit";
import rateLimit from "express-rate-limit";
import redis from "../src/utils/redis";
async function init() {
ImportMock.mockFunction(rateLimitMiddlewareModule, "rateLimitMiddleware", rateLimit({
@@ -56,6 +57,7 @@ async function init() {
mocha.run((failures) => {
mockServer.close();
server.close();
redis.close(true);
process.exitCode = failures ? 1 : 0; // exit with non-zero status if there were failures
});
});

View File

@@ -22,6 +22,12 @@ export const partialDeepEquals = (actual: Record<string, any>, expected: Record<
return true;
};
export const arrayPartialDeepEquals = (actual: Array<any>, expected: Array<any>): boolean => {
for (const value of expected)
if (!actual.some(a => partialDeepEquals(a, value, false))) return false;
return true;
};
export const arrayDeepEquals = (actual: Record<string, any>, expected: Record<string, any>, print = true): boolean => {
if (actual.length !== expected.length) return false;
let flag = true;