From ec73ae309e44d8199b4f92725616aae37f20d71c Mon Sep 17 00:00:00 2001 From: Haidang666 Date: Fri, 17 Dec 2021 15:08:51 +0700 Subject: [PATCH 01/14] add custom pagination in getSearchSegments --- src/routes/getSearchSegments.ts | 49 +++++++++++++++++++++--- src/types/segments.model.ts | 10 ++++- test/cases/getSearchSegments.ts | 66 +++++++++++++++++++++++++++++++++ 3 files changed, 118 insertions(+), 7 deletions(-) diff --git a/src/routes/getSearchSegments.ts b/src/routes/getSearchSegments.ts index cbfb1f7..e3a6d27 100644 --- a/src/routes/getSearchSegments.ts +++ b/src/routes/getSearchSegments.ts @@ -1,8 +1,8 @@ import { Request, Response } from "express"; import { db } from "../databases/databases"; -import { ActionType, Category, DBSegment, Service, VideoID } from "../types/segments.model"; +import { ActionType, Category, DBSegment, Service, VideoID, SortableFields } from "../types/segments.model"; import { getService } from "../utils/getService"; -const segmentsPerPage = 10; +const maxSegmentsPerPage = 100; type searchSegmentResponse = { segmentCount: number, @@ -19,6 +19,22 @@ function getSegmentsFromDBByVideoID(videoID: VideoID, service: Service): Promise ) as Promise; } +function getSortField(...value: T[]): SortableFields { + const fieldByName = Object.values(SortableFields).reduce((acc, fieldName) => { + acc[fieldName.toLowerCase()] = fieldName; + + return acc; + }, {} as Record); + + for (const name of value) { + if (name?.trim().toLowerCase() in fieldByName) { + return fieldByName[name.trim().toLowerCase()]; + } + } + + return SortableFields.timeSubmitted; +} + /** * * Returns what would be sent to the client. @@ -64,6 +80,10 @@ async function handleGetSegments(req: Request, res: Response): Promise maxSegmentsPerPage ? maxSegmentsPerPage : Number(limit); + const sortBy: SortableFields = getSortField(req.query.sortBy, req.body.sortBy); + const sortDir: string = req.query.sortDir ?? req.body.sortDir ?? "asc"; const minVotes: number = req.query.minVotes ?? req.body.minVotes ?? -3; const maxVotes: number = req.query.maxVotes ?? req.body.maxVotes ?? Infinity; @@ -99,11 +119,11 @@ async function handleGetSegments(req: Request, res: Response): Promise) { - const startIndex = 0+(page*segmentsPerPage); - const endIndex = segmentsPerPage+(page*segmentsPerPage); +function filterSegments(segments: DBSegment[], filters: Record, page: number, limit: number, sortBy: SortableFields, sortDir: string) { + const startIndex = 0+(page*limit); + const endIndex = limit+(page*limit); const filteredSegments = segments.filter((segment) => !((segment.votes < filters.minVotes || segment.votes > filters.maxVotes) || (segment.views < filters.minViews || segment.views > filters.maxViews) @@ -114,10 +134,27 @@ function filterSegments(segments: DBSegment[], page: number, filters: Record { + const key = sortDir === "desc" ? 1 : -1; + if (a[sortBy] < b[sortBy]) { + return key; + } + + if (a[sortBy] > b[sortBy]) { + return -key; + } + + return 0; + }); + } + return { segmentCount: filteredSegments.length, page, segments: filteredSegments.slice(startIndex, endIndex) + }; } diff --git a/src/types/segments.model.ts b/src/types/segments.model.ts index a94bd8e..ba613c4 100644 --- a/src/types/segments.model.ts +++ b/src/types/segments.model.ts @@ -107,4 +107,12 @@ export interface LockCategory { reason: string, videoID: VideoID, userID: UserID -} \ No newline at end of file +} + +export enum SortableFields { + timeSubmitted = "timeSubmitted", + startTime = "startTime", + endTime = "endTime", + votes = "votes", + views = "views", +} diff --git a/test/cases/getSearchSegments.ts b/test/cases/getSearchSegments.ts index d330f55..dc8a3d8 100644 --- a/test/cases/getSearchSegments.ts +++ b/test/cases/getSearchSegments.ts @@ -282,4 +282,70 @@ describe("getSearchSegments", () => { }) .catch(err => done(err)); }); + + it("Should be able to get with custom limit", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", limit: 2 } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments.length, 2); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be able to get with custom limit(2) and page(2)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", limit: 2, page: 2 } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 2); + assert.strictEqual(segments.length, 2); + assert.strictEqual(segments[0].UUID, "search-page1-5"); + assert.strictEqual(segments[1].UUID, "search-page1-6"); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be able to get sorted result (desc)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", sortBy: "endTime", sortDir: "desc" } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments[0].UUID, "search-page2-2"); + assert.strictEqual(segments[1].UUID, "search-page2-1"); + assert.strictEqual(segments[2].UUID, "search-page1-10"); + assert.strictEqual(segments[3].UUID, "search-page1-9"); + assert.strictEqual(segments[4].UUID, "search-page1-8"); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be able to get sorted result (asc)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", sortBy: "endTime" } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments[0].UUID, "search-page1-1"); + assert.strictEqual(segments[1].UUID, "search-page1-2"); + assert.strictEqual(segments[2].UUID, "search-page1-3"); + assert.strictEqual(segments[3].UUID, "search-page1-4"); + assert.strictEqual(segments[4].UUID, "search-page1-5"); + done(); + }) + .catch(err => done(err)); + }); }); From 89a83f78cc90dcfb74ac0d317b11e964d656073a Mon Sep 17 00:00:00 2001 From: Haidang666 Date: Sun, 19 Dec 2021 10:13:06 +0700 Subject: [PATCH 02/14] add more test on limit, page --- src/routes/getSearchSegments.ts | 27 +++++- test/cases/getSearchSegments.ts | 152 ++++++++++++++++++++++++++++++++ 2 files changed, 175 insertions(+), 4 deletions(-) diff --git a/src/routes/getSearchSegments.ts b/src/routes/getSearchSegments.ts index e3a6d27..55bbb5e 100644 --- a/src/routes/getSearchSegments.ts +++ b/src/routes/getSearchSegments.ts @@ -3,6 +3,7 @@ import { db } from "../databases/databases"; import { ActionType, Category, DBSegment, Service, VideoID, SortableFields } from "../types/segments.model"; import { getService } from "../utils/getService"; const maxSegmentsPerPage = 100; +const defaultSegmentsPerPage = 10; type searchSegmentResponse = { segmentCount: number, @@ -35,6 +36,26 @@ function getSortField(...value: T[]): SortableFields { return SortableFields.timeSubmitted; } +function getLimit(value: T): number { + const limit = Number(value); + if (Number.isInteger(limit) + && limit >= 1 + && limit <= maxSegmentsPerPage) { + return limit; + } + + return defaultSegmentsPerPage; +} + +function getPage(value: T): number { + const page = Number(value); + if (Number.isInteger(page) && page >= 0) { + return page; + } + + return 0; +} + /** * * Returns what would be sent to the client. @@ -78,10 +99,8 @@ async function handleGetSegments(req: Request, res: Response): Promise maxSegmentsPerPage ? maxSegmentsPerPage : Number(limit); + const page: number = getPage(req.query.page ?? req.body.page); + const limit: number = getLimit(req.query.limit ?? req.body.limit); const sortBy: SortableFields = getSortField(req.query.sortBy, req.body.sortBy); const sortDir: string = req.query.sortDir ?? req.body.sortDir ?? "asc"; diff --git a/test/cases/getSearchSegments.ts b/test/cases/getSearchSegments.ts index dc8a3d8..a0a478c 100644 --- a/test/cases/getSearchSegments.ts +++ b/test/cases/getSearchSegments.ts @@ -313,6 +313,140 @@ describe("getSearchSegments", () => { .catch(err => done(err)); }); + it("Should be able to get with over range page", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", limit: 2, page: 2000 } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 2000); + assert.strictEqual(segments.length, 0); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be able to get with invalid page (=-100)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", page: -100 } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments.length, 10); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be able to get with invalid page (=text)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", page: "hello" } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments.length, 10); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be use default limit if invalid limit query (=0)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", limit: 0 } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments[0].UUID, "search-page1-1"); + assert.strictEqual(segments[1].UUID, "search-page1-2"); + assert.strictEqual(segments[2].UUID, "search-page1-3"); + assert.strictEqual(segments[3].UUID, "search-page1-4"); + assert.strictEqual(segments[4].UUID, "search-page1-5"); + assert.strictEqual(segments[5].UUID, "search-page1-6"); + assert.strictEqual(segments[6].UUID, "search-page1-7"); + assert.strictEqual(segments[7].UUID, "search-page1-8"); + assert.strictEqual(segments[8].UUID, "search-page1-9"); + assert.strictEqual(segments[9].UUID, "search-page1-10"); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be use default limit if invalid limit query (=-100)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", limit: -100 } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments[0].UUID, "search-page1-1"); + assert.strictEqual(segments[1].UUID, "search-page1-2"); + assert.strictEqual(segments[2].UUID, "search-page1-3"); + assert.strictEqual(segments[3].UUID, "search-page1-4"); + assert.strictEqual(segments[4].UUID, "search-page1-5"); + assert.strictEqual(segments[5].UUID, "search-page1-6"); + assert.strictEqual(segments[6].UUID, "search-page1-7"); + assert.strictEqual(segments[7].UUID, "search-page1-8"); + assert.strictEqual(segments[8].UUID, "search-page1-9"); + assert.strictEqual(segments[9].UUID, "search-page1-10"); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be use default limit if invalid limit query (=text)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", limit: "hello" } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments[0].UUID, "search-page1-1"); + assert.strictEqual(segments[1].UUID, "search-page1-2"); + assert.strictEqual(segments[2].UUID, "search-page1-3"); + assert.strictEqual(segments[3].UUID, "search-page1-4"); + assert.strictEqual(segments[4].UUID, "search-page1-5"); + assert.strictEqual(segments[5].UUID, "search-page1-6"); + assert.strictEqual(segments[6].UUID, "search-page1-7"); + assert.strictEqual(segments[7].UUID, "search-page1-8"); + assert.strictEqual(segments[8].UUID, "search-page1-9"); + assert.strictEqual(segments[9].UUID, "search-page1-10"); + done(); + }) + .catch(err => done(err)); + }); + + it("Should be use default limit if invalid limit query (=2000)", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", limit: 2000 } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments[0].UUID, "search-page1-1"); + assert.strictEqual(segments[1].UUID, "search-page1-2"); + assert.strictEqual(segments[2].UUID, "search-page1-3"); + assert.strictEqual(segments[3].UUID, "search-page1-4"); + assert.strictEqual(segments[4].UUID, "search-page1-5"); + assert.strictEqual(segments[5].UUID, "search-page1-6"); + assert.strictEqual(segments[6].UUID, "search-page1-7"); + assert.strictEqual(segments[7].UUID, "search-page1-8"); + assert.strictEqual(segments[8].UUID, "search-page1-9"); + assert.strictEqual(segments[9].UUID, "search-page1-10"); + done(); + }) + .catch(err => done(err)); + }); + it("Should be able to get sorted result (desc)", (done) => { client.get(endpoint, { params: { videoID: "searchTest4", sortBy: "endTime", sortDir: "desc" } }) .then(res => { @@ -348,4 +482,22 @@ describe("getSearchSegments", () => { }) .catch(err => done(err)); }); + + it("Should be use default sorted if invalid sort field", (done) => { + client.get(endpoint, { params: { videoID: "searchTest4", sortBy: "not exist", sortDir: "desc" } }) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + const segments = data.segments; + assert.strictEqual(data.segmentCount, 12); + assert.strictEqual(data.page, 0); + assert.strictEqual(segments[0].UUID, "search-page1-1"); + assert.strictEqual(segments[1].UUID, "search-page1-2"); + assert.strictEqual(segments[2].UUID, "search-page1-3"); + assert.strictEqual(segments[3].UUID, "search-page1-4"); + assert.strictEqual(segments[4].UUID, "search-page1-5"); + done(); + }) + .catch(err => done(err)); + }); }); From 68bc6469ced729abc19d80a381fd9819ec9b6225 Mon Sep 17 00:00:00 2001 From: Michael C Date: Sun, 19 Dec 2021 02:03:50 -0500 Subject: [PATCH 03/14] add redis tests --- ci.json | 4 ++++ docker/docker-compose-ci.yml | 6 +++++- package.json | 1 + src/utils/redis.ts | 2 ++ test/cases/redisTest.ts | 21 +++++++++++++++++++++ test/test.ts | 2 ++ 6 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 test/cases/redisTest.ts diff --git a/ci.json b/ci.json index 2f948b7..68262cb 100644 --- a/ci.json +++ b/ci.json @@ -16,6 +16,10 @@ "host": "localhost", "port": 5432 }, + "redis": { + "host": "localhost", + "port": 6379 + }, "createDatabaseIfNotExist": true, "schemaFolder": "./databases", "dbSchema": "./databases/_sponsorTimes.db.sql", diff --git a/docker/docker-compose-ci.yml b/docker/docker-compose-ci.yml index acfd112..7e64c76 100644 --- a/docker/docker-compose-ci.yml +++ b/docker/docker-compose-ci.yml @@ -6,4 +6,8 @@ services: - POSTGRES_USER=${PG_USER} - POSTGRES_PASSWORD=${PG_PASS} ports: - - 5432:5432 \ No newline at end of file + - 5432:5432 + redis: + image: redis:alpine + ports: + - 6379:6379 \ No newline at end of file diff --git a/package.json b/package.json index 4355380..30d912f 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,7 @@ "dev": "nodemon", "dev:bash": "nodemon -x 'npm test ; npm start'", "postgres:docker": "docker run --rm -p 5432:5432 -e POSTGRES_USER=ci_db_user -e POSTGRES_PASSWORD=ci_db_pass postgres:alpine", + "redis:docker": "docker run --rm -p 6379:6379 redis:alpine", "start": "ts-node src/index.ts", "tsc": "tsc -p tsconfig.json", "lint": "eslint src test", diff --git a/src/utils/redis.ts b/src/utils/redis.ts index ef2a640..8bdea23 100644 --- a/src/utils/redis.ts +++ b/src/utils/redis.ts @@ -8,6 +8,7 @@ interface RedisSB { set(key: string, value: string, callback?: Callback): void; setAsync?(key: string, value: string): Promise<{err: Error | null, reply: string | null}>; delAsync?(...keys: [string]): Promise; + close?(flush?: boolean): void; } let exportObject: RedisSB = { @@ -29,6 +30,7 @@ if (config.redis) { exportObject.getAsync = (key) => new Promise((resolve) => client.get(key, (err, reply) => resolve({ err, reply }))); exportObject.setAsync = (key, value) => new Promise((resolve) => client.set(key, value, (err, reply) => resolve({ err, reply }))); exportObject.delAsync = (...keys) => new Promise((resolve) => client.del(keys, (err) => resolve(err))); + exportObject.close = (flush) => client.end(flush); client.on("error", function(error) { Logger.error(error); diff --git a/test/cases/redisTest.ts b/test/cases/redisTest.ts new file mode 100644 index 0000000..b5452a3 --- /dev/null +++ b/test/cases/redisTest.ts @@ -0,0 +1,21 @@ +import { config } from "../../src/config"; +import redis from "../../src/utils/redis"; +import crypto from "crypto"; +import assert from "assert"; + +const randomID = crypto.pseudoRandomBytes(8).toString("hex"); + +describe("redis test", function() { + before(async function() { + if (!config.redis) this.skip(); + await redis.setAsync(randomID, "test"); + }); + it("Should get stored value", (done) => { + redis.getAsync(randomID) + .then(res => { + if (res.err) assert.fail(res.err); + assert.strictEqual(res.reply, "test"); + done(); + }); + }); +}); \ No newline at end of file diff --git a/test/test.ts b/test/test.ts index 3484add..b53879b 100644 --- a/test/test.ts +++ b/test/test.ts @@ -9,6 +9,7 @@ import { initDb } from "../src/databases/databases"; import { ImportMock } from "ts-mock-imports"; import * as rateLimitMiddlewareModule from "../src/middleware/requestRateLimit"; import rateLimit from "express-rate-limit"; +import redis from "../src/utils/redis"; async function init() { ImportMock.mockFunction(rateLimitMiddlewareModule, "rateLimitMiddleware", rateLimit({ @@ -56,6 +57,7 @@ async function init() { mocha.run((failures) => { mockServer.close(); server.close(); + redis.close(true); process.exitCode = failures ? 1 : 0; // exit with non-zero status if there were failures }); }); From 1f9dc92074d6f3765dceb609b9b3416424dfdfaf Mon Sep 17 00:00:00 2001 From: Michael C Date: Sun, 19 Dec 2021 19:37:22 -0500 Subject: [PATCH 04/14] add arrayDeepPartialEquals --- test/cases/getSkipSegmentsByHash.ts | 4 ++-- test/cases/ratings/getRating.ts | 7 +++++-- test/cases/redisTest.ts | 19 +++++++++++++++---- test/utils/partialDeepEquals.ts | 6 ++++++ 4 files changed, 28 insertions(+), 8 deletions(-) diff --git a/test/cases/getSkipSegmentsByHash.ts b/test/cases/getSkipSegmentsByHash.ts index 6716989..6763651 100644 --- a/test/cases/getSkipSegmentsByHash.ts +++ b/test/cases/getSkipSegmentsByHash.ts @@ -1,5 +1,5 @@ import { db } from "../../src/databases/databases"; -import { partialDeepEquals } from "../utils/partialDeepEquals"; +import { partialDeepEquals, arrayPartialDeepEquals } from "../utils/partialDeepEquals"; import { getHash } from "../../src/utils/getHash"; import { ImportMock, } from "ts-mock-imports"; import * as YouTubeAPIModule from "../../src/utils/youtubeApi"; @@ -434,7 +434,7 @@ describe("getSkipSegmentsByHash", () => { }] }]; - assert.ok(partialDeepEquals(data, expected, false) || partialDeepEquals(data, expected2)); + assert.ok(arrayPartialDeepEquals(data, expected) || arrayPartialDeepEquals(data, expected2)); assert.strictEqual(data[0].segments.length, 3); done(); }) diff --git a/test/cases/ratings/getRating.ts b/test/cases/ratings/getRating.ts index 3ba3909..1ab3737 100644 --- a/test/cases/ratings/getRating.ts +++ b/test/cases/ratings/getRating.ts @@ -3,7 +3,7 @@ import { getHash } from "../../../src/utils/getHash"; import assert from "assert"; import { client } from "../../utils/httpClient"; import { AxiosResponse } from "axios"; -import { partialDeepEquals } from "../../utils/partialDeepEquals"; +import { partialDeepEquals, arrayPartialDeepEquals } from "../../utils/partialDeepEquals"; const endpoint = "/api/ratings/rate"; const getRating = (hash: string, params?: unknown): Promise => client.get(`${endpoint}/${hash}`, { params }); @@ -58,6 +58,9 @@ describe("getRating", () => { .catch(err => done(err)); }); + /* + This test will fail if tests are already ran with redis. + */ it("Should be able to bulk fetch", (done) => { getBulkRating([videoOnePartialHash, videoTwoPartialHash]) .then(res => { @@ -80,7 +83,7 @@ describe("getRating", () => { count: 10, hash: videoOneIDHash, }]; - assert.ok(partialDeepEquals(res.data, expected)); + assert.ok(arrayPartialDeepEquals(res.data, expected)); done(); }) .catch(err => done(err)); diff --git a/test/cases/redisTest.ts b/test/cases/redisTest.ts index b5452a3..cc99edc 100644 --- a/test/cases/redisTest.ts +++ b/test/cases/redisTest.ts @@ -3,19 +3,30 @@ import redis from "../../src/utils/redis"; import crypto from "crypto"; import assert from "assert"; -const randomID = crypto.pseudoRandomBytes(8).toString("hex"); +const genRandom = (bytes=8) => crypto.pseudoRandomBytes(bytes).toString("hex"); + +const randKey1 = genRandom(); +const randValue1 = genRandom(); +const randKey2 = genRandom(16); describe("redis test", function() { before(async function() { if (!config.redis) this.skip(); - await redis.setAsync(randomID, "test"); + await redis.setAsync(randKey1, randValue1); }); it("Should get stored value", (done) => { - redis.getAsync(randomID) + redis.getAsync(randKey1) .then(res => { if (res.err) assert.fail(res.err); - assert.strictEqual(res.reply, "test"); + assert.strictEqual(res.reply, randValue1); done(); }); }); + it("Should not be able to get not stored value", (done) => { + redis.getAsync(randKey2) + .then(res => { + if (res.reply || res.err ) assert.fail("Value should not be found") + done(); + }); + }) }); \ No newline at end of file diff --git a/test/utils/partialDeepEquals.ts b/test/utils/partialDeepEquals.ts index df651d6..b2832f6 100644 --- a/test/utils/partialDeepEquals.ts +++ b/test/utils/partialDeepEquals.ts @@ -22,6 +22,12 @@ export const partialDeepEquals = (actual: Record, expected: Record< return true; }; +export const arrayPartialDeepEquals = (actual: Array, expected: Array): boolean => { + for (const value of expected) + if (!actual.some(a => partialDeepEquals(a, value, false))) return false; + return true; +}; + export const arrayDeepEquals = (actual: Record, expected: Record, print = true): boolean => { if (actual.length !== expected.length) return false; let flag = true; From d1ed4376ef79275e664a276799d7690786fc7c35 Mon Sep 17 00:00:00 2001 From: Michael C Date: Mon, 20 Dec 2021 00:19:14 -0500 Subject: [PATCH 05/14] add filler to userStats and getTopUsers --- src/routes/getTopUsers.ts | 19 +++++++++++-------- src/routes/getUserStats.ts | 12 +++++++----- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/src/routes/getTopUsers.ts b/src/routes/getTopUsers.ts index f063c6f..bc2b67b 100644 --- a/src/routes/getTopUsers.ts +++ b/src/routes/getTopUsers.ts @@ -16,14 +16,16 @@ async function generateTopUsersStats(sortBy: string, categoryStatsEnabled = fals let additionalFields = ""; if (categoryStatsEnabled) { - additionalFields += `SUM(CASE WHEN category = 'sponsor' THEN 1 ELSE 0 END) as "categorySponsor", + additionalFields += ` + SUM(CASE WHEN category = 'sponsor' THEN 1 ELSE 0 END) as "categorySumSponsor", SUM(CASE WHEN category = 'intro' THEN 1 ELSE 0 END) as "categorySumIntro", SUM(CASE WHEN category = 'outro' THEN 1 ELSE 0 END) as "categorySumOutro", SUM(CASE WHEN category = 'interaction' THEN 1 ELSE 0 END) as "categorySumInteraction", - SUM(CASE WHEN category = 'selfpromo' THEN 1 ELSE 0 END) as "categorySelfpromo", - SUM(CASE WHEN category = 'music_offtopic' THEN 1 ELSE 0 END) as "categoryMusicOfftopic", + SUM(CASE WHEN category = 'selfpromo' THEN 1 ELSE 0 END) as "categorySumSelfpromo", + SUM(CASE WHEN category = 'music_offtopic' THEN 1 ELSE 0 END) as "categorySumMusicOfftopic", SUM(CASE WHEN category = 'preview' THEN 1 ELSE 0 END) as "categorySumPreview", - SUM(CASE WHEN category = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight", `; + SUM(CASE WHEN category = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight", + SUM(CASE WHEN category = 'filler' THEN 1 ELSE 0 END) as "categorySumFiller",`; } const rows = await db.prepare("all", `SELECT COUNT(*) as "totalSubmissions", SUM(views) as "viewCount", @@ -42,14 +44,15 @@ async function generateTopUsersStats(sortBy: string, categoryStatsEnabled = fals minutesSaved[i] = rows[i].minutesSaved; if (categoryStatsEnabled) { categoryStats[i] = [ - rows[i].categorySponsor, + rows[i].categorySumSponsor, rows[i].categorySumIntro, rows[i].categorySumOutro, rows[i].categorySumInteraction, - rows[i].categorySelfpromo, - rows[i].categoryMusicOfftopic, + rows[i].categorySumSelfpromo, + rows[i].categorySumMusicOfftopic, rows[i].categorySumPreview, - rows[i].categorySumHighlight + rows[i].categorySumHighlight, + rows[i].categorySumFiller ]; } } diff --git a/src/routes/getUserStats.ts b/src/routes/getUserStats.ts index b58c611..2bd9b65 100644 --- a/src/routes/getUserStats.ts +++ b/src/routes/getUserStats.ts @@ -15,10 +15,11 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea SUM(CASE WHEN "category" = 'intro' THEN 1 ELSE 0 END) as "categorySumIntro", SUM(CASE WHEN "category" = 'outro' THEN 1 ELSE 0 END) as "categorySumOutro", SUM(CASE WHEN "category" = 'interaction' THEN 1 ELSE 0 END) as "categorySumInteraction", - SUM(CASE WHEN "category" = 'selfpromo' THEN 1 ELSE 0 END) as "categorySelfpromo", - SUM(CASE WHEN "category" = 'music_offtopic' THEN 1 ELSE 0 END) as "categoryMusicOfftopic", + SUM(CASE WHEN "category" = 'selfpromo' THEN 1 ELSE 0 END) as "categorySumSelfpromo", + SUM(CASE WHEN "category" = 'music_offtopic' THEN 1 ELSE 0 END) as "categorySumMusicOfftopic", SUM(CASE WHEN "category" = 'preview' THEN 1 ELSE 0 END) as "categorySumPreview", - SUM(CASE WHEN "category" = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight",`; + SUM(CASE WHEN "category" = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight", + SUM(CASE WHEN "category" = 'filler' THEN 1 ELSE 0 END) as "categorySumFiller",`; } if (fetchActionTypeStats) { additionalQuery += ` @@ -48,10 +49,11 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea intro: proxy.categorySumIntro, outro: proxy.categorySumOutro, interaction: proxy.categorySumInteraction, - selfpromo: proxy.categorySelfpromo, - music_offtopic: proxy.categoryMusicOfftopic, + selfpromo: proxy.categorySumSelfpromo, + music_offtopic: proxy.categorySumMusicOfftopic, preview: proxy.categorySumPreview, poi_highlight: proxy.categorySumHighlight, + filler: proxy.categorySumFiller, }; } if (fetchActionTypeStats) { From 2ee7c82760f33b187a9e30258087fa0ad8a355d6 Mon Sep 17 00:00:00 2001 From: Michael C Date: Mon, 20 Dec 2021 00:27:38 -0500 Subject: [PATCH 06/14] add test for filer --- test/cases/getUserStats.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/test/cases/getUserStats.ts b/test/cases/getUserStats.ts index 900a595..7704b15 100644 --- a/test/cases/getUserStats.ts +++ b/test/cases/getUserStats.ts @@ -20,6 +20,7 @@ describe("getUserStats", () => { await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, 0, "getuserstatsuuid7", getHash("getuserstats_user_01"), 7, 7, "music_offtopic", 0]); await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 11, 11, 0, "getuserstatsuuid8", getHash("getuserstats_user_01"), 8, 8, "poi_highlight", 0]); await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, -2, "getuserstatsuuid9", getHash("getuserstats_user_02"), 8, 2, "sponsor", 0]); + await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, 0, "getuserstatsuuid10", getHash("getuserstats_user_01"), 8, 2, "filler", 0]); }); @@ -48,14 +49,15 @@ describe("getUserStats", () => { preview: 1, music_offtopic: 1, poi_highlight: 1, + filler: 1 }, actionTypeCount: { mute: 0, - skip: 8 + skip: 9 }, overallStats: { - minutesSaved: 28, - segmentCount: 8 + minutesSaved: 30, + segmentCount: 9 } }; assert.ok(partialDeepEquals(res.data, expected)); From a137f8a434ed185dd7692ff93664979ca24274d1 Mon Sep 17 00:00:00 2001 From: Ajay Date: Mon, 20 Dec 2021 22:27:35 -0500 Subject: [PATCH 07/14] Add redis to ci name --- .github/workflows/{postgres-ci.yml => postgres-redis-ci.yml} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename .github/workflows/{postgres-ci.yml => postgres-redis-ci.yml} (87%) diff --git a/.github/workflows/postgres-ci.yml b/.github/workflows/postgres-redis-ci.yml similarity index 87% rename from .github/workflows/postgres-ci.yml rename to .github/workflows/postgres-redis-ci.yml index e0f1519..28a19f2 100644 --- a/.github/workflows/postgres-ci.yml +++ b/.github/workflows/postgres-redis-ci.yml @@ -1,10 +1,10 @@ -name: PostgreSQL CI +name: PostgreSQL + Redis CI on: [push, pull_request] jobs: build: - name: Run Tests with PostgreSQL + name: Run Tests with PostgreSQL and Redis runs-on: ubuntu-latest steps: From 7caaf833ddb5bebadea2c1ffea893e62b60af288 Mon Sep 17 00:00:00 2001 From: Ajay Ramachandran Date: Mon, 20 Dec 2021 22:29:48 -0500 Subject: [PATCH 08/14] Fix safe navigation --- src/routes/getSearchSegments.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/routes/getSearchSegments.ts b/src/routes/getSearchSegments.ts index 55bbb5e..51e4740 100644 --- a/src/routes/getSearchSegments.ts +++ b/src/routes/getSearchSegments.ts @@ -28,7 +28,7 @@ function getSortField(...value: T[]): SortableFields { }, {} as Record); for (const name of value) { - if (name?.trim().toLowerCase() in fieldByName) { + if (name?.trim()?.toLowerCase() in fieldByName) { return fieldByName[name.trim().toLowerCase()]; } } From 5ebb6389253860986989d2b0c79ab31f0cbbc348 Mon Sep 17 00:00:00 2001 From: Ajay Date: Mon, 20 Dec 2021 22:22:45 -0500 Subject: [PATCH 09/14] Add overlapping group caching --- package-lock.json | 14 ++++ package.json | 2 + src/routes/getSkipSegments.ts | 103 +++++++++++++++++----------- src/utils/queryCacher.ts | 3 +- src/utils/redisKeys.ts | 4 ++ test/cases/getSkipSegments.ts | 2 +- test/cases/getSkipSegmentsByHash.ts | 2 +- 7 files changed, 88 insertions(+), 42 deletions(-) diff --git a/package-lock.json b/package-lock.json index a6c71de..beb6b65 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,6 +16,7 @@ "express": "^4.17.1", "express-promise-router": "^4.1.1", "express-rate-limit": "^5.5.1", + "lodash": "^4.17.21", "pg": "^8.7.1", "redis": "^3.1.2", "sync-mysql": "^3.0.1" @@ -25,6 +26,7 @@ "@types/cron": "^1.7.3", "@types/express": "^4.17.13", "@types/express-rate-limit": "^5.1.3", + "@types/lodash": "^4.14.178", "@types/mocha": "^9.0.0", "@types/node": "^16.11.11", "@types/pg": "^8.6.1", @@ -317,6 +319,12 @@ "integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==", "dev": true }, + "node_modules/@types/lodash": { + "version": "4.14.178", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.178.tgz", + "integrity": "sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw==", + "dev": true + }, "node_modules/@types/mime": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", @@ -5337,6 +5345,12 @@ "integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==", "dev": true }, + "@types/lodash": { + "version": "4.14.178", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.178.tgz", + "integrity": "sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw==", + "dev": true + }, "@types/mime": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", diff --git a/package.json b/package.json index 30d912f..efee6ac 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ "express": "^4.17.1", "express-promise-router": "^4.1.1", "express-rate-limit": "^5.5.1", + "lodash": "^4.17.21", "pg": "^8.7.1", "redis": "^3.1.2", "sync-mysql": "^3.0.1" @@ -33,6 +34,7 @@ "@types/cron": "^1.7.3", "@types/express": "^4.17.13", "@types/express-rate-limit": "^5.1.3", + "@types/lodash": "^4.14.178", "@types/mocha": "^9.0.0", "@types/node": "^16.11.11", "@types/pg": "^8.6.1", diff --git a/src/routes/getSkipSegments.ts b/src/routes/getSkipSegments.ts index 6c7c587..0a38b06 100644 --- a/src/routes/getSkipSegments.ts +++ b/src/routes/getSkipSegments.ts @@ -1,7 +1,8 @@ import { Request, Response } from "express"; +import { partition } from "lodash" import { config } from "../config"; import { db, privateDB } from "../databases/databases"; -import { skipSegmentsHashKey, skipSegmentsKey } from "../utils/redisKeys"; +import { skipSegmentsHashKey, skipSegmentsKey, skipSegmentGroupsKey } from "../utils/redisKeys"; import { SBRecord } from "../types/lib.model"; import { ActionType, Category, CategoryActionType, DBSegment, HashedIP, IPAddress, OverlappingSegmentGroup, Segment, SegmentCache, SegmentUUID, Service, VideoData, VideoID, VideoIDHash, Visibility, VotableObject } from "../types/segments.model"; import { getCategoryActionType } from "../utils/categoryInfo"; @@ -13,7 +14,7 @@ import { getReputation } from "../utils/reputation"; import { getService } from "../utils/getService"; -async function prepareCategorySegments(req: Request, videoID: VideoID, category: Category, segments: DBSegment[], cache: SegmentCache = { shadowHiddenSegmentIPs: {} }): Promise { +async function prepareCategorySegments(req: Request, videoID: VideoID, service: Service, segments: DBSegment[], cache: SegmentCache = { shadowHiddenSegmentIPs: {} }, useCache: boolean): Promise { const shouldFilter: boolean[] = await Promise.all(segments.map(async (segment) => { if (segment.votes < -1 && !segment.required) { return false; //too untrustworthy, just ignore it @@ -39,14 +40,16 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, category: cache.userHashedIP = await getHashCache((getIP(req) + config.globalSalt) as IPAddress); } //if this isn't their ip, don't send it to them - return cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted]?.some( + const shouldShadowHide = cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted]?.some( (shadowHiddenSegment) => shadowHiddenSegment.hashedIP === cache.userHashedIP) ?? false; + + if (shouldShadowHide) useCache = false; + return shouldShadowHide; })); const filteredSegments = segments.filter((_, index) => shouldFilter[index]); - const maxSegments = getCategoryActionType(category) === CategoryActionType.Skippable ? Infinity : 1; - return (await chooseSegments(filteredSegments, maxSegments)).map((chosenSegment) => ({ + return (await chooseSegments(videoID, service, filteredSegments, useCache)).map((chosenSegment) => ({ category: chosenSegment.category, actionType: chosenSegment.actionType, segment: [chosenSegment.startTime, chosenSegment.endTime], @@ -62,28 +65,21 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, category: async function getSegmentsByVideoID(req: Request, videoID: VideoID, categories: Category[], actionTypes: ActionType[], requiredSegments: SegmentUUID[], service: Service): Promise { const cache: SegmentCache = { shadowHiddenSegmentIPs: {} }; - const segments: Segment[] = []; try { categories = categories.filter((category) => !/[^a-z|_|-]/.test(category)); if (categories.length === 0) return null; - const segmentsByCategory: SBRecord = (await getSegmentsFromDBByVideoID(videoID, service)) - .filter((segment: DBSegment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType)) - .reduce((acc: SBRecord, segment: DBSegment) => { + const segments: DBSegment[] = (await getSegmentsFromDBByVideoID(videoID, service)) + .map((segment: DBSegment) => { if (filterRequiredSegments(segment.UUID, requiredSegments)) segment.required = true; - - acc[segment.category] ??= []; - acc[segment.category].push(segment); - - return acc; + return segment; }, {}); - for (const [category, categorySegments] of Object.entries(segmentsByCategory)) { - segments.push(...(await prepareCategorySegments(req, videoID, category as Category, categorySegments, cache))); - } + const canUseCache = requiredSegments.length === 0; + const processedSegments: Segment[] = await prepareCategorySegments(req, videoID, service, segments, cache, canUseCache); - return segments; + return processedSegments.filter((segment: Segment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType)); } catch (err) { if (err) { Logger.error(err as string); @@ -98,34 +94,37 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash, const segments: SBRecord = {}; try { - type SegmentWithHashPerVideoID = SBRecord}>; + type SegmentWithHashPerVideoID = SBRecord; categories = categories.filter((category) => !(/[^a-z|_|-]/.test(category))); if (categories.length === 0) return null; const segmentPerVideoID: SegmentWithHashPerVideoID = (await getSegmentsFromDBByHash(hashedVideoIDPrefix, service)) - .filter((segment: DBSegment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType)) .reduce((acc: SegmentWithHashPerVideoID, segment: DBSegment) => { acc[segment.videoID] = acc[segment.videoID] || { hash: segment.hashedVideoID, - segmentPerCategory: {} + segments: [] }; if (filterRequiredSegments(segment.UUID, requiredSegments)) segment.required = true; - acc[segment.videoID].segmentPerCategory[segment.category] ??= []; - acc[segment.videoID].segmentPerCategory[segment.category].push(segment); + acc[segment.videoID].segments ??= []; + acc[segment.videoID].segments.push(segment); return acc; }, {}); for (const [videoID, videoData] of Object.entries(segmentPerVideoID)) { - segments[videoID] = { + const data: VideoData = { hash: videoData.hash, segments: [], }; - for (const [category, segmentPerCategory] of Object.entries(videoData.segmentPerCategory)) { - segments[videoID].segments.push(...(await prepareCategorySegments(req, videoID as VideoID, category as Category, segmentPerCategory, cache))); + const canUseCache = requiredSegments.length === 0; + data.segments = (await prepareCategorySegments(req, videoID as VideoID, service, videoData.segments, cache, canUseCache)) + .filter((segment: Segment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType)); + + if (data.segments.length > 0) { + segments[videoID] = data; } } @@ -164,10 +163,11 @@ async function getSegmentsFromDBByVideoID(videoID: VideoID, service: Service): P return await QueryCacher.get(fetchFromDB, skipSegmentsKey(videoID, service)); } -//gets a weighted random choice from the choices array based on their `votes` property. -//amountOfChoices specifies the maximum amount of choices to return, 1 or more. -//choices are unique -function getWeightedRandomChoice(choices: T[], amountOfChoices: number): T[] { +// Gets a weighted random choice from the choices array based on their `votes` property. +// amountOfChoices specifies the maximum amount of choices to return, 1 or more. +// Choices are unique +// If a predicate is given, it will only filter choices following it, and will leave the rest in the list +function getWeightedRandomChoice(choices: T[], amountOfChoices: number, predicate?: (choice: T) => void): T[] { //trivial case: no need to go through the whole process if (amountOfChoices >= choices.length) { return choices; @@ -179,7 +179,7 @@ function getWeightedRandomChoice(choices: T[], amountOf //assign a weight to each choice let totalWeight = 0; - const choicesWithWeights: TWithWeight[] = choices.map(choice => { + let choicesWithWeights: TWithWeight[] = choices.map(choice => { const boost = Math.min(choice.reputation, 4); //The 3 makes -2 the minimum votes before being ignored completely @@ -190,8 +190,20 @@ function getWeightedRandomChoice(choices: T[], amountOf return { ...choice, weight }; }); + let forceIncludedChoices: T[] = []; + if (predicate) { + const splitArray = partition(choicesWithWeights, predicate); + choicesWithWeights = splitArray[0]; + forceIncludedChoices = splitArray[1]; + } + + // Nothing to filter for + if (amountOfChoices >= choicesWithWeights.length) { + return choices; + } + //iterate and find amountOfChoices choices - const chosen = []; + const chosen = [...forceIncludedChoices]; while (amountOfChoices-- > 0) { //weighted random draw of one element of choices const randomNumber = Math.random() * totalWeight; @@ -210,11 +222,25 @@ function getWeightedRandomChoice(choices: T[], amountOf return chosen; } +async function chooseSegments(videoID: VideoID, service: Service, segments: DBSegment[], useCache: boolean): Promise { + const fetchData = async () => await buildSegmentGroups(segments); + + const groups = useCache + ? await QueryCacher.get(fetchData, skipSegmentGroupsKey(videoID, service)) + : await fetchData(); + + // Filter for only 1 item for POI categories + return getWeightedRandomChoice(groups, 1, (choice) => getCategoryActionType(choice.segments[0].category) === CategoryActionType.POI) + .map(//randomly choose 1 good segment per group and return them + group => getWeightedRandomChoice(group.segments, 1)[0] + ); +} + //This function will find segments that are contained inside of eachother, called similar segments //Only one similar time will be returned, randomly generated based on the sqrt of votes. //This allows new less voted items to still sometimes appear to give them a chance at getting votes. //Segments with less than -1 votes are already ignored before this function is called -async function chooseSegments(segments: DBSegment[], max: number): Promise { +async function buildSegmentGroups(segments: DBSegment[]): Promise { //Create groups of segments that are similar to eachother //Segments must be sorted by their startTime so that we can build groups chronologically: //1. As long as the segments' startTime fall inside the currentGroup, we keep adding them to that group @@ -265,10 +291,7 @@ async function chooseSegments(segments: DBSegment[], max: number): Promise getWeightedRandomChoice(group.segments, 1)[0], - ); + return overlappingSegmentsGroups; } function splitPercentOverlap(groups: OverlappingSegmentGroup[]): OverlappingSegmentGroup[] { @@ -277,12 +300,14 @@ function splitPercentOverlap(groups: OverlappingSegmentGroup[]): OverlappingSegm group.segments.forEach((segment) => { const bestGroup = result.find((group) => { // At least one segment in the group must have high % overlap or the same action type + // Since POI segments will always have 0 overlap, they will always be in their own groups return group.segments.some((compareSegment) => { const overlap = Math.min(segment.endTime, compareSegment.endTime) - Math.max(segment.startTime, compareSegment.startTime); const overallDuration = Math.max(segment.endTime, compareSegment.endTime) - Math.min(segment.startTime, compareSegment.startTime); const overlapPercent = overlap / overallDuration; - return (overlapPercent > 0 && segment.actionType === compareSegment.actionType && segment.actionType !== ActionType.Chapter) - || overlapPercent >= 0.6 + return (overlapPercent > 0 && segment.actionType === compareSegment.actionType && segment.category == compareSegment.category && segment.actionType !== ActionType.Chapter) + || (overlapPercent >= 0.6 && segment.actionType !== compareSegment.actionType && segment.category === compareSegment.category) + || (overlapPercent >= 0.8 && segment.actionType === compareSegment.actionType && segment.category !== compareSegment.category) || (overlapPercent >= 0.8 && segment.actionType === ActionType.Chapter && compareSegment.actionType === ActionType.Chapter); }); }); diff --git a/src/utils/queryCacher.ts b/src/utils/queryCacher.ts index 128f3a2..57af472 100644 --- a/src/utils/queryCacher.ts +++ b/src/utils/queryCacher.ts @@ -1,6 +1,6 @@ import redis from "../utils/redis"; import { Logger } from "../utils/logger"; -import { skipSegmentsHashKey, skipSegmentsKey, reputationKey, ratingHashKey } from "./redisKeys"; +import { skipSegmentsHashKey, skipSegmentsKey, reputationKey, ratingHashKey, skipSegmentGroupsKey } from "./redisKeys"; import { Service, VideoID, VideoIDHash } from "../types/segments.model"; import { UserID } from "../types/user.model"; @@ -82,6 +82,7 @@ async function getAndSplit(fetchFromDB: (values: U[]) => Pr function clearSegmentCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID?: UserID; }): void { if (videoInfo) { redis.delAsync(skipSegmentsKey(videoInfo.videoID, videoInfo.service)); + redis.delAsync(skipSegmentGroupsKey(videoInfo.videoID, videoInfo.service)); redis.delAsync(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service)); if (videoInfo.userID) redis.delAsync(reputationKey(videoInfo.userID)); } diff --git a/src/utils/redisKeys.ts b/src/utils/redisKeys.ts index 2245d2e..65b12ff 100644 --- a/src/utils/redisKeys.ts +++ b/src/utils/redisKeys.ts @@ -7,6 +7,10 @@ export function skipSegmentsKey(videoID: VideoID, service: Service): string { return `segments.v2.${service}.videoID.${videoID}`; } +export function skipSegmentGroupsKey(videoID: VideoID, service: Service): string { + return `segments.groups.${service}.videoID.${videoID}`; +} + export function skipSegmentsHashKey(hashedVideoIDPrefix: VideoIDHash, service: Service): string { hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 4) as VideoIDHash; if (hashedVideoIDPrefix.length !== 4) Logger.warn(`Redis skip segment hash-prefix key is not length 4! ${hashedVideoIDPrefix}`); diff --git a/test/cases/getSkipSegments.ts b/test/cases/getSkipSegments.ts index cf47098..780107f 100644 --- a/test/cases/getSkipSegments.ts +++ b/test/cases/getSkipSegments.ts @@ -25,7 +25,7 @@ describe("getSkipSegments", () => { await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 2, 0, "requiredSegmentVid4", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]); await db.prepare("run", query, ["chapterVid", 60, 80, 2, 0, "chapterVid-1", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 1"]); await db.prepare("run", query, ["chapterVid", 70, 75, 2, 0, "chapterVid-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 2"]); - await db.prepare("run", query, ["chapterVid", 71, 76, 2, 0, "chapterVid-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 3"]); + await db.prepare("run", query, ["chapterVid", 71, 75, 2, 0, "chapterVid-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 3"]); await db.prepare("run", query, ["requiredSegmentHashVid", 10, 20, -2, 0, "1d04b98f48e8f8bcc15c6ae5ac050801cd6dcfd428fb5f9e65c4e16e7807340fa", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]); await db.prepare("run", query, ["requiredSegmentHashVid", 20, 30, -2, 0, "1ebde8e8ae03096b6c866aa2c8cc7ee1d720ca1fca27bea3f39a6a1b876577e71", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]); return; diff --git a/test/cases/getSkipSegmentsByHash.ts b/test/cases/getSkipSegmentsByHash.ts index 6763651..d526f2d 100644 --- a/test/cases/getSkipSegmentsByHash.ts +++ b/test/cases/getSkipSegmentsByHash.ts @@ -33,7 +33,7 @@ describe("getSkipSegmentsByHash", () => { await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 2, "requiredSegmentVid-4", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); await db.prepare("run", query, ["chapterVid-hash", 60, 80, 2, "chapterVid-hash-1", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 1"]); //7258 await db.prepare("run", query, ["chapterVid-hash", 70, 75, 2, "chapterVid-hash-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 2"]); //7258 - await db.prepare("run", query, ["chapterVid-hash", 71, 76, 2, "chapterVid-hash-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 3"]); //7258 + await db.prepare("run", query, ["chapterVid-hash", 71, 75, 2, "chapterVid-hash-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 3"]); //7258 await db.prepare("run", query, ["longMuteVid-hash", 40, 45, 2, "longMuteVid-hash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 await db.prepare("run", query, ["longMuteVid-hash", 30, 35, 2, "longMuteVid-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 await db.prepare("run", query, ["longMuteVid-hash", 2, 80, 2, "longMuteVid-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 From 3dd9024cc7663532223322a589691455e84f1cbb Mon Sep 17 00:00:00 2001 From: Michael C Date: Mon, 20 Dec 2021 23:04:41 -0500 Subject: [PATCH 10/14] eslint + workflow formatting - appropriate job names - only trigger on pushes to master branch & PRs - conform to formatting --- .github/workflows/ci.yml | 9 ++++++--- .github/workflows/eslint.yml | 9 ++++++--- .github/workflows/generate-sqlite-base.yml | 5 +++++ .github/workflows/postgres-redis-ci.yml | 8 ++++++-- .github/workflows/take-action.yml | 12 +++++------- src/app.ts | 1 - test/cases/ratings/getRating.ts | 2 +- test/cases/redisTest.ts | 4 ++-- 8 files changed, 31 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d938520..619bb9c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,9 +1,13 @@ name: SQLite CI -on: [push, pull_request] +on: + push: + branches: + - master + pull_request: jobs: - build: + test: name: Run Tests with SQLite runs-on: ubuntu-latest @@ -12,7 +16,6 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 - run: npm install - - name: Run Tests timeout-minutes: 5 run: npm test diff --git a/.github/workflows/eslint.yml b/.github/workflows/eslint.yml index 38c42c4..fab273b 100644 --- a/.github/workflows/eslint.yml +++ b/.github/workflows/eslint.yml @@ -1,9 +1,13 @@ name: Linting -on: [push, pull_request] +on: + push: + branches: + - master + pull_request: jobs: - build: + lint: name: Lint with ESLint runs-on: ubuntu-latest @@ -12,7 +16,6 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 - run: npm install - - name: Run Tests timeout-minutes: 5 run: npm run lint \ No newline at end of file diff --git a/.github/workflows/generate-sqlite-base.yml b/.github/workflows/generate-sqlite-base.yml index 9f1a68e..53f1703 100644 --- a/.github/workflows/generate-sqlite-base.yml +++ b/.github/workflows/generate-sqlite-base.yml @@ -1,12 +1,17 @@ name: create-sqlite-base + on: push: + branches: + - master paths: - databases/** jobs: make-base-db: + name: Generate SQLite base .db runs-on: ubuntu-latest + steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/postgres-redis-ci.yml b/.github/workflows/postgres-redis-ci.yml index 28a19f2..44500f8 100644 --- a/.github/workflows/postgres-redis-ci.yml +++ b/.github/workflows/postgres-redis-ci.yml @@ -1,9 +1,13 @@ name: PostgreSQL + Redis CI -on: [push, pull_request] +on: + push: + branches: + - master + pull_request: jobs: - build: + test: name: Run Tests with PostgreSQL and Redis runs-on: ubuntu-latest diff --git a/.github/workflows/take-action.yml b/.github/workflows/take-action.yml index c5b6f1f..4529ffa 100644 --- a/.github/workflows/take-action.yml +++ b/.github/workflows/take-action.yml @@ -1,14 +1,12 @@ -# .github/workflows/take.yml name: Assign issue to contributor -on: - issue_comment: +on: [issue_comment] jobs: assign: name: Take an issue runs-on: ubuntu-latest steps: - - name: take the issue - uses: bdougie/take-action@main - env: - GITHUB_TOKEN: ${{ github.token }} + - name: take the issue + uses: bdougie/take-action@main + env: + GITHUB_TOKEN: ${{ github.token }} diff --git a/src/app.ts b/src/app.ts index 4f6e1f7..23ed171 100644 --- a/src/app.ts +++ b/src/app.ts @@ -46,7 +46,6 @@ import { getChapterNames } from "./routes/getChapterNames"; import { postRating } from "./routes/ratings/postRating"; import { getRating } from "./routes/ratings/getRating"; import { postClearCache as ratingPostClearCache } from "./routes/ratings/postClearCache"; -import path from "path"; export function createServer(callback: () => void): Server { // Create a service (the app object is just a callback). diff --git a/test/cases/ratings/getRating.ts b/test/cases/ratings/getRating.ts index 1ab3737..0f63ba7 100644 --- a/test/cases/ratings/getRating.ts +++ b/test/cases/ratings/getRating.ts @@ -59,7 +59,7 @@ describe("getRating", () => { }); /* - This test will fail if tests are already ran with redis. + This test will fail if tests are already ran with redis. */ it("Should be able to bulk fetch", (done) => { getBulkRating([videoOnePartialHash, videoTwoPartialHash]) diff --git a/test/cases/redisTest.ts b/test/cases/redisTest.ts index cc99edc..3ab238d 100644 --- a/test/cases/redisTest.ts +++ b/test/cases/redisTest.ts @@ -25,8 +25,8 @@ describe("redis test", function() { it("Should not be able to get not stored value", (done) => { redis.getAsync(randKey2) .then(res => { - if (res.reply || res.err ) assert.fail("Value should not be found") + if (res.reply || res.err ) assert.fail("Value should not be found"); done(); }); - }) + }); }); \ No newline at end of file From 66af4f60c85f5ae00c1a6026ebe4d1e76c6833b5 Mon Sep 17 00:00:00 2001 From: Ajay Date: Mon, 20 Dec 2021 23:07:12 -0500 Subject: [PATCH 11/14] Add test for different categories at same time --- src/routes/getSkipSegments.ts | 2 +- test/cases/getSkipSegmentsByHash.ts | 75 ++++++++++++++++++----------- 2 files changed, 49 insertions(+), 28 deletions(-) diff --git a/src/routes/getSkipSegments.ts b/src/routes/getSkipSegments.ts index 0a38b06..eeec7b4 100644 --- a/src/routes/getSkipSegments.ts +++ b/src/routes/getSkipSegments.ts @@ -307,7 +307,7 @@ function splitPercentOverlap(groups: OverlappingSegmentGroup[]): OverlappingSegm const overlapPercent = overlap / overallDuration; return (overlapPercent > 0 && segment.actionType === compareSegment.actionType && segment.category == compareSegment.category && segment.actionType !== ActionType.Chapter) || (overlapPercent >= 0.6 && segment.actionType !== compareSegment.actionType && segment.category === compareSegment.category) - || (overlapPercent >= 0.8 && segment.actionType === compareSegment.actionType && segment.category !== compareSegment.category) + || (overlapPercent >= 0.9 && segment.actionType === compareSegment.actionType && segment.category !== compareSegment.category) || (overlapPercent >= 0.8 && segment.actionType === ActionType.Chapter && compareSegment.actionType === ActionType.Chapter); }); }); diff --git a/test/cases/getSkipSegmentsByHash.ts b/test/cases/getSkipSegmentsByHash.ts index d526f2d..7f89973 100644 --- a/test/cases/getSkipSegmentsByHash.ts +++ b/test/cases/getSkipSegmentsByHash.ts @@ -16,34 +16,37 @@ describe("getSkipSegmentsByHash", () => { const getSegmentsByHash0Hash = "fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910"; const requiredSegmentVidHash = "d51822c3f681e07aef15a8855f52ad12db9eb9cf059e65b16b64c43359557f61"; const requiredSegmentHashVidHash = "17bf8d9090e050257772f8bff277293c29c7ce3b25eb969a8fae111a2434504d"; + const differentCategoryVidHash = "7fac44d1ee3257ec7f18953e2b5f991828de6854ad57193d1027c530981a89c0"; before(async () => { - const query = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", views, category, "actionType", "service", "hidden", "shadowHidden", "hashedVideoID", "description") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'; - await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, "getSegmentsByHash-01", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]); - await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, "getSegmentsByHash-02", "testman", 0, 50, "sponsor", "skip", "PeerTube", 0, 0, getSegmentsByHash0Hash, ""]); - await db.prepare("run", query, ["getSegmentsByHash-0", 20, 30, 2, "getSegmentsByHash-03", "testman", 100, 150, "intro", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]); - await db.prepare("run", query, ["getSegmentsByHash-0", 40, 50, 2, "getSegmentsByHash-04", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]); - await db.prepare("run", query, ["getSegmentsByHash-noMatchHash", 40, 50, 2, "getSegmentsByHash-noMatchHash", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, "fdaffnoMatchHash", ""]); - await db.prepare("run", query, ["getSegmentsByHash-1", 60, 70, 2, "getSegmentsByHash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, "3272fa85ee0927f6073ef6f07ad5f3146047c1abba794cfa364d65ab9921692b", ""]); - await db.prepare("run", query, ["onlyHidden", 60, 70, 2, "onlyHidden", "testman", 0, 50, "sponsor", "skip", "YouTube", 1, 0, "f3a199e1af001d716cdc6599360e2b062c2d2b3fa2885f6d9d2fd741166cbbd3", ""]); - await db.prepare("run", query, ["highlightVid", 60, 60, 2, "highlightVid-1", "testman", 0, 50, "poi_highlight", "skip", "YouTube", 0, 0, getHash("highlightVid", 1), ""]); - await db.prepare("run", query, ["highlightVid", 70, 70, 2, "highlightVid-2", "testman", 0, 50, "poi_highlight", "skip", "YouTube", 0, 0, getHash("highlightVid", 1), ""]); - await db.prepare("run", query, ["requiredSegmentVid", 60, 70, 2, "requiredSegmentVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); - await db.prepare("run", query, ["requiredSegmentVid", 60, 70, -2, "requiredSegmentVid-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); - await db.prepare("run", query, ["requiredSegmentVid", 80, 90, -2, "requiredSegmentVid-3", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); - await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 2, "requiredSegmentVid-4", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); - await db.prepare("run", query, ["chapterVid-hash", 60, 80, 2, "chapterVid-hash-1", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 1"]); //7258 - await db.prepare("run", query, ["chapterVid-hash", 70, 75, 2, "chapterVid-hash-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 2"]); //7258 - await db.prepare("run", query, ["chapterVid-hash", 71, 75, 2, "chapterVid-hash-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 3"]); //7258 - await db.prepare("run", query, ["longMuteVid-hash", 40, 45, 2, "longMuteVid-hash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 - await db.prepare("run", query, ["longMuteVid-hash", 30, 35, 2, "longMuteVid-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 - await db.prepare("run", query, ["longMuteVid-hash", 2, 80, 2, "longMuteVid-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 - await db.prepare("run", query, ["longMuteVid-hash", 3, 78, 2, "longMuteVid-hash-4", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 - await db.prepare("run", query, ["longMuteVid-2-hash", 1, 15, 2, "longMuteVid-2-hash-1", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c - await db.prepare("run", query, ["longMuteVid-2-hash", 30, 35, 2, "longMuteVid-2-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c - await db.prepare("run", query, ["longMuteVid-2-hash", 2, 80, 2, "longMuteVid-2-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c - await db.prepare("run", query, ["longMuteVid-2-hash", 3, 78, 2, "longMuteVid-2-hash-4", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c - await db.prepare("run", query, ["requiredSegmentHashVid", 10, 20, -2, "fbf0af454059733c8822f6a4ac8ec568e0787f8c0a5ee915dd5b05e0d7a9a388", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]); - await db.prepare("run", query, ["requiredSegmentHashVid", 20, 30, -2, "7e1ebc5194551d2d0a606d64f675e5a14952e4576b2959f8c9d51e316c14f8da", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]); + const query = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", views, category, "actionType", "service", "hidden", "shadowHidden", "hashedVideoID", "description") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'; + await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, 0, "getSegmentsByHash-01", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]); + await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, 0, "getSegmentsByHash-02", "testman", 0, 50, "sponsor", "skip", "PeerTube", 0, 0, getSegmentsByHash0Hash, ""]); + await db.prepare("run", query, ["getSegmentsByHash-0", 20, 30, 2, 0, "getSegmentsByHash-03", "testman", 100, 150, "intro", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]); + await db.prepare("run", query, ["getSegmentsByHash-0", 40, 50, 2, 0, "getSegmentsByHash-04", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]); + await db.prepare("run", query, ["getSegmentsByHash-noMatchHash", 40, 50, 2, 0, "getSegmentsByHash-noMatchHash", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, "fdaffnoMatchHash", ""]); + await db.prepare("run", query, ["getSegmentsByHash-1", 60, 70, 2, 0, "getSegmentsByHash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, "3272fa85ee0927f6073ef6f07ad5f3146047c1abba794cfa364d65ab9921692b", ""]); + await db.prepare("run", query, ["onlyHidden", 60, 70, 2, 0, "onlyHidden", "testman", 0, 50, "sponsor", "skip", "YouTube", 1, 0, "f3a199e1af001d716cdc6599360e2b062c2d2b3fa2885f6d9d2fd741166cbbd3", ""]); + await db.prepare("run", query, ["highlightVid", 60, 60, 2, 0, "highlightVid-1", "testman", 0, 50, "poi_highlight", "skip", "YouTube", 0, 0, getHash("highlightVid", 1), ""]); + await db.prepare("run", query, ["highlightVid", 70, 70, 2, 0, "highlightVid-2", "testman", 0, 50, "poi_highlight", "skip", "YouTube", 0, 0, getHash("highlightVid", 1), ""]); + await db.prepare("run", query, ["requiredSegmentVid", 60, 70, 2, 0, "requiredSegmentVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); + await db.prepare("run", query, ["requiredSegmentVid", 60, 70, -2, 0, "requiredSegmentVid-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); + await db.prepare("run", query, ["requiredSegmentVid", 80, 90, -2, 0, "requiredSegmentVid-3", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); + await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 2, 0, "requiredSegmentVid-4", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentVidHash, ""]); + await db.prepare("run", query, ["chapterVid-hash", 60, 80, 2, 0, "chapterVid-hash-1", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 1"]); //7258 + await db.prepare("run", query, ["chapterVid-hash", 70, 75, 2, 0, "chapterVid-hash-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 2"]); //7258 + await db.prepare("run", query, ["chapterVid-hash", 71, 75, 2, 0, "chapterVid-hash-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, getHash("chapterVid-hash", 1), "Chapter 3"]); //7258 + await db.prepare("run", query, ["longMuteVid-hash", 40, 45, 2, 0, "longMuteVid-hash-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 + await db.prepare("run", query, ["longMuteVid-hash", 30, 35, 2, 0, "longMuteVid-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 + await db.prepare("run", query, ["longMuteVid-hash", 2, 80, 2, 0, "longMuteVid-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 + await db.prepare("run", query, ["longMuteVid-hash", 3, 78, 2, 0, "longMuteVid-hash-4", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-hash", 1), ""]); //6613 + await db.prepare("run", query, ["longMuteVid-2-hash", 1, 15, 2, 0, "longMuteVid-2-hash-1", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c + await db.prepare("run", query, ["longMuteVid-2-hash", 30, 35, 2, 0, "longMuteVid-2-hash-2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c + await db.prepare("run", query, ["longMuteVid-2-hash", 2, 80, 2, 0, "longMuteVid-2-hash-3", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c + await db.prepare("run", query, ["longMuteVid-2-hash", 3, 78, 2, 0, "longMuteVid-2-hash-4", "testman", 0, 50, "sponsor", "mute", "YouTube", 0, 0, getHash("longMuteVid-2-hash", 1), ""]); //ab0c + await db.prepare("run", query, ["requiredSegmentHashVid", 10, 20, -2, 0, "fbf0af454059733c8822f6a4ac8ec568e0787f8c0a5ee915dd5b05e0d7a9a388", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]); + await db.prepare("run", query, ["requiredSegmentHashVid", 20, 30, -2, 0, "7e1ebc5194551d2d0a606d64f675e5a14952e4576b2959f8c9d51e316c14f8da", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]); + await db.prepare("run", query, ["differentCategoryVid", 60, 70, 2, 0, "differentCategoryVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, differentCategoryVidHash, ""]); + await db.prepare("run", query, ["differentCategoryVid", 61, 70, 2, 1, "differentCategoryVid-2", "testman", 0, 50, "intro", "skip", "YouTube", 0, 0, differentCategoryVidHash, ""]); }); it("Should be able to get a 200", (done) => { @@ -441,6 +444,24 @@ describe("getSkipSegmentsByHash", () => { .catch(err => done(err)); }); + it("Should be able to get only one segment when two categories are at the same time", (done) => { + client.get(`${endpoint}/7fac?categories=["sponsor","intro"]`) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + assert.strictEqual(data.length, 1); + const expected = [{ + segments: [{ + category: "intro" + }] + }]; + assert.ok(partialDeepEquals(data, expected)); + assert.strictEqual(data[0].segments.length, 1); + done(); + }) + .catch(err => done(err)); + }); + it("Should be able to get mute segment with small skip segment in middle (2)", (done) => { client.get(`${endpoint}/ab0c?actionType=skip&actionType=mute`) .then(res => { From 873551e1c40ed9dc0a7a31b8faadb3ae32fd25d2 Mon Sep 17 00:00:00 2001 From: Ajay Date: Mon, 20 Dec 2021 23:39:58 -0500 Subject: [PATCH 12/14] formatting fix --- src/routes/getSkipSegments.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/routes/getSkipSegments.ts b/src/routes/getSkipSegments.ts index eeec7b4..d1d543d 100644 --- a/src/routes/getSkipSegments.ts +++ b/src/routes/getSkipSegments.ts @@ -226,7 +226,7 @@ async function chooseSegments(videoID: VideoID, service: Service, segments: DBSe const fetchData = async () => await buildSegmentGroups(segments); const groups = useCache - ? await QueryCacher.get(fetchData, skipSegmentGroupsKey(videoID, service)) + ? await QueryCacher.get(fetchData, skipSegmentGroupsKey(videoID, service)) : await fetchData(); // Filter for only 1 item for POI categories @@ -307,7 +307,7 @@ function splitPercentOverlap(groups: OverlappingSegmentGroup[]): OverlappingSegm const overlapPercent = overlap / overallDuration; return (overlapPercent > 0 && segment.actionType === compareSegment.actionType && segment.category == compareSegment.category && segment.actionType !== ActionType.Chapter) || (overlapPercent >= 0.6 && segment.actionType !== compareSegment.actionType && segment.category === compareSegment.category) - || (overlapPercent >= 0.9 && segment.actionType === compareSegment.actionType && segment.category !== compareSegment.category) + || (overlapPercent >= 0.8 && segment.actionType === compareSegment.actionType && segment.category !== compareSegment.category) || (overlapPercent >= 0.8 && segment.actionType === ActionType.Chapter && compareSegment.actionType === ActionType.Chapter); }); }); From 7b2d9365a04f0191e35f981d0c7298b54e172afb Mon Sep 17 00:00:00 2001 From: Ajay Date: Mon, 20 Dec 2021 23:52:24 -0500 Subject: [PATCH 13/14] Fix undefined issue --- src/routes/getSkipSegments.ts | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/src/routes/getSkipSegments.ts b/src/routes/getSkipSegments.ts index d1d543d..93812d2 100644 --- a/src/routes/getSkipSegments.ts +++ b/src/routes/getSkipSegments.ts @@ -177,9 +177,17 @@ function getWeightedRandomChoice(choices: T[], amountOf weight: number } + let forceIncludedChoices: T[] = []; + let filteredChoices = choices; + if (predicate) { + const splitArray = partition(choices, predicate); + filteredChoices = splitArray[0]; + forceIncludedChoices = splitArray[1]; + } + //assign a weight to each choice let totalWeight = 0; - let choicesWithWeights: TWithWeight[] = choices.map(choice => { + const choicesWithWeights: TWithWeight[] = filteredChoices.map(choice => { const boost = Math.min(choice.reputation, 4); //The 3 makes -2 the minimum votes before being ignored completely @@ -190,13 +198,6 @@ function getWeightedRandomChoice(choices: T[], amountOf return { ...choice, weight }; }); - let forceIncludedChoices: T[] = []; - if (predicate) { - const splitArray = partition(choicesWithWeights, predicate); - choicesWithWeights = splitArray[0]; - forceIncludedChoices = splitArray[1]; - } - // Nothing to filter for if (amountOfChoices >= choicesWithWeights.length) { return choices; From 95d14c0fdb97b237f18f96cc82dcd74a913c3c95 Mon Sep 17 00:00:00 2001 From: Ajay Date: Tue, 21 Dec 2021 11:21:19 -0500 Subject: [PATCH 14/14] Allow overlaping non music with other categories --- src/routes/getSkipSegments.ts | 8 ++++---- test/cases/getSkipSegmentsByHash.ts | 23 +++++++++++++++++++++++ 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/src/routes/getSkipSegments.ts b/src/routes/getSkipSegments.ts index 93812d2..7850661 100644 --- a/src/routes/getSkipSegments.ts +++ b/src/routes/getSkipSegments.ts @@ -278,6 +278,7 @@ async function buildSegmentGroups(segments: DBSegment[]): Promise { if (group.required) { // Required beats locked @@ -289,8 +290,6 @@ async function buildSegmentGroups(segments: DBSegment[]): Promise 0 && segment.actionType === compareSegment.actionType && segment.category == compareSegment.category && segment.actionType !== ActionType.Chapter) + return (overlapPercent > 0 && segment.actionType === compareSegment.actionType && segment.category === compareSegment.category && segment.actionType !== ActionType.Chapter) || (overlapPercent >= 0.6 && segment.actionType !== compareSegment.actionType && segment.category === compareSegment.category) - || (overlapPercent >= 0.8 && segment.actionType === compareSegment.actionType && segment.category !== compareSegment.category) + || (overlapPercent >= 0.8 && segment.actionType === compareSegment.actionType && segment.category !== compareSegment.category + && segment.category !== "music_offtopic" && compareSegment.category !== "music_offtopic") || (overlapPercent >= 0.8 && segment.actionType === ActionType.Chapter && compareSegment.actionType === ActionType.Chapter); }); }); diff --git a/test/cases/getSkipSegmentsByHash.ts b/test/cases/getSkipSegmentsByHash.ts index 7f89973..e889238 100644 --- a/test/cases/getSkipSegmentsByHash.ts +++ b/test/cases/getSkipSegmentsByHash.ts @@ -17,6 +17,7 @@ describe("getSkipSegmentsByHash", () => { const requiredSegmentVidHash = "d51822c3f681e07aef15a8855f52ad12db9eb9cf059e65b16b64c43359557f61"; const requiredSegmentHashVidHash = "17bf8d9090e050257772f8bff277293c29c7ce3b25eb969a8fae111a2434504d"; const differentCategoryVidHash = "7fac44d1ee3257ec7f18953e2b5f991828de6854ad57193d1027c530981a89c0"; + const nonMusicOverlapVidHash = "306151f778f9bfd19872b3ccfc83cbab37c4f370717436bfd85e0a624cd8ba3c"; before(async () => { const query = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", views, category, "actionType", "service", "hidden", "shadowHidden", "hashedVideoID", "description") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'; await db.prepare("run", query, ["getSegmentsByHash-0", 1, 10, 2, 0, "getSegmentsByHash-01", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, getSegmentsByHash0Hash, ""]); @@ -47,6 +48,8 @@ describe("getSkipSegmentsByHash", () => { await db.prepare("run", query, ["requiredSegmentHashVid", 20, 30, -2, 0, "7e1ebc5194551d2d0a606d64f675e5a14952e4576b2959f8c9d51e316c14f8da", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, requiredSegmentHashVidHash, ""]); await db.prepare("run", query, ["differentCategoryVid", 60, 70, 2, 0, "differentCategoryVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, differentCategoryVidHash, ""]); await db.prepare("run", query, ["differentCategoryVid", 61, 70, 2, 1, "differentCategoryVid-2", "testman", 0, 50, "intro", "skip", "YouTube", 0, 0, differentCategoryVidHash, ""]); + await db.prepare("run", query, ["nonMusicOverlapVid", 60, 70, 2, 0, "nonMusicOverlapVid-1", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, nonMusicOverlapVidHash, ""]); + await db.prepare("run", query, ["nonMusicOverlapVid", 61, 70, 2, 1, "nonMusicOverlapVid-2", "testman", 0, 50, "music_offtopic", "skip", "YouTube", 0, 0, nonMusicOverlapVidHash, ""]); }); it("Should be able to get a 200", (done) => { @@ -462,6 +465,26 @@ describe("getSkipSegmentsByHash", () => { .catch(err => done(err)); }); + it("Should be able to get overlapping segments where one is non music and one is other", (done) => { + client.get(`${endpoint}/3061?categories=["sponsor","music_offtopic"]`) + .then(res => { + assert.strictEqual(res.status, 200); + const data = res.data; + assert.strictEqual(data.length, 1); + const expected = [{ + segments: [{ + category: "sponsor" + }, { + category: "music_offtopic" + }] + }]; + assert.ok(partialDeepEquals(data, expected)); + assert.strictEqual(data[0].segments.length, 2); + done(); + }) + .catch(err => done(err)); + }); + it("Should be able to get mute segment with small skip segment in middle (2)", (done) => { client.get(`${endpoint}/ab0c?actionType=skip&actionType=mute`) .then(res => {