82 Commits

Author SHA1 Message Date
Ajay
5c61f1d366 Fix inspect command 2023-04-16 21:34:34 -04:00
Ajay
36352b0c8e Build debug 2023-04-16 21:23:01 -04:00
Ajay
8db898fab5 Add pool status to branding header 2023-04-10 18:21:52 -04:00
Ajay
940cacfb0a Add pool details to postgres stats 2023-04-07 15:55:29 -04:00
Ajay
69a54f64b4 More specific redis error 2023-04-07 14:39:38 -04:00
Ajay
9600f56830 Fix 500 in tracing 2023-04-07 14:38:57 -04:00
Ajay
4e59526b4d Add missing semicolon 2023-04-07 14:31:15 -04:00
Ajay
42f2eb5eae Add traces to branding endpoint 2023-04-07 14:31:08 -04:00
Ajay
e1a607ba39 Add sb server host as response header 2023-04-07 12:47:08 -04:00
Ajay
8a236d66ed Fix video labels hash key error 2023-04-05 18:02:47 -04:00
Ajay
4a10faaea3 Add cb tables to db dump 2023-03-31 17:57:56 -04:00
Ajay
589b7d4e3e Disable branding locks for now 2023-03-31 15:08:28 -04:00
Ajay
a52feee25e Fix unlocking all submissions 2023-03-31 15:07:34 -04:00
Ajay Ramachandran
d6a12a5e99 Merge pull request #545 from ajayyy/clickbait
Clear query cache for branding
2023-03-29 00:33:27 -04:00
Ajay Ramachandran
bffc10e38f Merge pull request #534 from ajayyy/clickbait
Clickbait
2023-03-27 00:54:52 -04:00
Ajay
dde12bcc43 Fix wrong branch for github workflow 2023-03-19 00:41:29 -04:00
Ajay
7921bc4582 Remove chapters free test 2023-03-19 00:25:59 -04:00
Ajay
0b830610e9 Fix chapters duplicate check 2023-03-19 00:25:22 -04:00
Ajay
b6e9f92da8 Free chapters 2023-03-18 19:15:50 -04:00
Ajay
d4c4dbffcc Change video label cache to 3 chars 2023-03-18 00:36:05 -04:00
Ajay Ramachandran
5c549b5295 Merge pull request #544 from mchangrh/token-test
increase coverage for tokenUtils
2023-03-12 18:50:11 -04:00
Michael C
acae9da06c increase coverage for tokenUtils 2023-03-12 16:30:08 -04:00
Ajay Ramachandran
1f939116a4 Merge pull request #543 from mchangrh/unban-fix
fix unban query hitting limit, use number as type
2023-02-25 14:11:17 -05:00
Michael C
8495a9d6c0 fix unban query hitting limit, use number as type 2023-02-25 00:04:15 -05:00
Ajay Ramachandran
d76ee7cd22 Merge pull request #541 from mchangrh/etagTest
add etag and other tests
2023-02-22 01:38:41 -05:00
Michael C
436e75e3e6 add explicit test checks in reset 2023-02-22 01:15:49 -05:00
Michael C
7ba654e476 remove tests for maxNumberOfActiveWarnings 2023-02-22 00:35:58 -05:00
Ajay Ramachandran
f4286b15a1 Merge pull request #542 from mchangrh/warning-webhook
add warning webhook
2023-02-22 00:12:30 -05:00
Michael C
76ce1017ea add warning webhook 2023-02-22 00:08:27 -05:00
Michael C
780555e9df add ignores for getTotalStats 2023-02-21 20:16:25 -05:00
Michael C
79b7b6ea4c add highLoad tests 2023-02-21 20:10:01 -05:00
Michael C
80de71a68f add misc 400 tests 2023-02-21 20:09:57 -05:00
Michael C
4b8bc418ba clean up getCWSUsers 2023-02-21 20:09:38 -05:00
Michael C
f4537160de split postSkipSegments into stubbed 400 2023-02-21 20:09:13 -05:00
Michael C
900fa9f64e add test ignores 2023-02-21 17:00:53 -05:00
Michael C
e6f54f11f0 decompose postSkipSegments more 2023-02-21 17:00:23 -05:00
Michael C
6296761fe4 postSkipSegments improvements
- fix 80% check from same user
- split test cases into multiple files for easier viewing
2023-02-21 03:25:46 -05:00
Michael C
820a7eb02f simplify postWarning 2023-02-21 03:25:02 -05:00
Michael C
c6795a783d fix getCWS util 2023-02-21 03:24:32 -05:00
Michael C
37a1c7e88d used shared random gen for tests 2023-02-21 03:24:07 -05:00
Michael C
31a460e750 fix countcontributingusers 2023-02-20 22:56:38 -05:00
Michael C
1bda331b0c add new CWS user parsing method 2023-02-20 22:22:33 -05:00
Michael C
72fb4eb6ec add more tests for coverage 2023-02-20 22:22:33 -05:00
Michael C
d04230a1c4 reset postgres and redis between test runs 2023-02-20 22:22:33 -05:00
Michael C
f70a26009c add ignore clauses to tests 2023-02-20 22:22:33 -05:00
Michael C
c84eb839a0 clean up immediate cache after every run
run after codecov

confirm cache deletion

remove cache cleanup
2023-02-20 22:22:32 -05:00
Ajay Ramachandran
df279cf48a Adjust ip logging fix threshold 2023-02-20 18:57:19 -05:00
Michael C
cdc080b58b skip etag tests if no redis 2023-02-20 16:03:46 -05:00
Michael C
c586c9a7e7 add etag tests
- add shadowban self test
- add init and -it to docker runs
2023-02-20 15:56:08 -05:00
Ajay Ramachandran
81b0c27180 Merge pull request #540 from mchangrh/workflow
simplify workflows, use cache
2023-02-18 22:33:00 -05:00
Michael C
bff05dccaa simplify workflows, use cache 2023-02-18 03:12:50 -05:00
Ajay Ramachandran
9c438602f8 Add codecov config 2023-02-18 02:42:08 -05:00
Ajay Ramachandran
f48fb6c3f6 Merge pull request #539 from mchangrh/nyc
run nyc for codecov
2023-02-18 00:48:39 -05:00
Michael C
4a90ba8992 run sqlite and postgres coverage 2023-02-18 00:36:45 -05:00
Michael C
efc911a229 fix codecov 2023-02-18 00:30:21 -05:00
Ajay Ramachandran
2e3f4f8c70 Add codecov 2023-02-18 00:16:18 -05:00
Ajay Ramachandran
b95b6d8efe Merge pull request #538 from mchangrh/less-tempvip-perms
give less permissions to tempVIPs
2023-02-18 00:10:04 -05:00
Ajay Ramachandran
7985d131ef Merge pull request #537 from mchangrh/lenientSetUsername
make privateIDUsername check more lenient #532
2023-02-18 00:09:37 -05:00
Michael C
a384079562 more lenient privateIDUsername checks
- disallow username = privateID
- disallow username = other privateID on username table if length > minLength
2023-02-17 22:28:23 -05:00
Michael M. Chang
3281954019 Update src/routes/postSkipSegments.ts
Co-authored-by: mini-bomba <55105495+mini-bomba@users.noreply.github.com>
2023-02-08 12:21:58 -05:00
Michael C
aece615992 give less permissions to tempVIPs 2023-02-08 01:54:20 -05:00
Ajay
6448fbfbd8 Fix logger calls where this will be wrong 2023-02-05 13:27:18 -05:00
Ajay
c5426e5fc4 Fix reputation test 2023-02-04 18:43:08 -05:00
Ajay
3894d453a5 Improve performance of reputation query 2023-02-04 18:22:31 -05:00
Ajay
280e6684af Fix reputation using wrong date 2023-02-04 18:22:19 -05:00
Ajay
7361c7056b Print active db requests in redis errors 2023-02-04 15:12:22 -05:00
Ajay
2d751a0b21 Log response time limits 2023-02-04 14:13:49 -05:00
Ajay
399bda869f Add more detailed redis error message 2023-02-04 14:07:09 -05:00
Ajay Ramachandran
663bd96130 Fix forget sh file missing 2023-02-03 20:06:14 -05:00
Ajay
5b3f4b476e Update redis config 2023-02-03 12:31:30 -05:00
Michael C
b855eea349 make privateIDUsername check more lenient #532 2023-01-29 16:10:59 -05:00
Ajay
51d25cfc68 Ban users submitting from banned IPs 2023-01-29 13:53:08 -05:00
Ajay
f8f02d86d5 Fix shadow hidden requiring type 1 2023-01-29 13:15:05 -05:00
Ajay
cb7492628c Names for docker compose ci 2023-01-28 14:48:20 -05:00
Ajay
e69b61fb4c Fix shadow ban exiting early 2023-01-28 14:42:16 -05:00
Ajay
5c1b502a15 Fix ban users var not used 2023-01-28 14:04:14 -05:00
Ajay
d5c544f1ee Allow auto banning ips 2023-01-28 14:00:12 -05:00
Ajay
5426ae826e Add IP banning 2023-01-28 13:09:04 -05:00
Ajay Ramachandran
7911819cab Merge pull request #536 from mchangrh/sharedParser
use shared parseParams helper
2023-01-28 12:20:50 -05:00
Ajay Ramachandran
d8e0eac61b Use includes and check in sql usage 2023-01-28 12:18:34 -05:00
Michael C
74c0ba37e2 remove debug statements, fix shadowBanUser tests 2023-01-28 02:54:01 -05:00
Michael C
a64b8f99b7 use shared parseParams helper 2023-01-28 02:40:09 -05:00
87 changed files with 2490 additions and 1515 deletions

View File

@@ -1,23 +0,0 @@
name: SQLite CI
on:
push:
branches:
- master
pull_request:
jobs:
test:
name: Run Tests with SQLite
runs-on: ubuntu-latest
steps:
# Initialization
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- run: npm install
- name: Run Tests
timeout-minutes: 5
run: npm test

View File

@@ -22,8 +22,7 @@ jobs:
permissions:
packages: write
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v4

View File

@@ -1,23 +0,0 @@
name: Linting
on:
push:
branches:
- master
pull_request:
jobs:
lint:
name: Lint with ESLint
runs-on: ubuntu-latest
steps:
# Initialization
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
- run: npm install
- name: Run Tests
timeout-minutes: 5
run: npm run lint

View File

@@ -18,7 +18,8 @@ jobs:
- uses: actions/setup-node@v3
with:
node-version: 18
- run: npm install
cache: npm
- run: npm ci
- name: Set config
run: |
echo '{"mode": "init-db-and-exit"}' > config.json

View File

@@ -1,33 +0,0 @@
name: PostgreSQL + Redis CI
on:
push:
branches:
- master
pull_request:
jobs:
test:
name: Run Tests with PostgreSQL and Redis
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Build the docker-compose stack
env:
PG_USER: ci_db_user
PG_PASS: ci_db_pass
run: docker-compose -f docker/docker-compose-ci.yml up -d
- name: Check running containers
run: docker ps
- uses: actions/setup-node@v3
with:
node-version: 18
- run: npm install
- name: Run Tests
env:
TEST_POSTGRES: true
timeout-minutes: 5
run: npx nyc --silent npm test
- name: Generate coverage report
run: npm run cover:report

View File

@@ -2,24 +2,15 @@ name: Docker image builds
on:
push:
branches:
- master
- debug
workflow_dispatch:
jobs:
sb-server:
uses: ./.github/workflows/docker-build.yml
with:
name: "sb-server"
name: "sb-server-debug"
username: "ajayyy"
folder: "."
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
rsync-host:
needs: sb-server
uses: ./.github/workflows/docker-build.yml
with:
name: "rsync-host"
username: "ajayyy"
folder: "./containers/rsync"
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

116
.github/workflows/test.yaml vendored Normal file
View File

@@ -0,0 +1,116 @@
name: Tests
on:
push:
branches:
- master
pull_request:
workflow_dispatch:
jobs:
lint-build:
name: Lint with ESLint and build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
- run: npm ci
- run: npm run lint
- run: npm run tsc
- name: cache dist build
uses: actions/cache/save@v3
with:
key: dist-${{ github.sha }}
path: |
${{ github.workspace }}/dist
${{ github.workspace }}/node_modules
test-sqlite:
name: Run Tests with SQLite
runs-on: ubuntu-latest
needs: lint-build
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
- id: cache
uses: actions/cache/restore@v3
with:
key: dist-${{ github.sha }}
path: |
${{ github.workspace }}/dist
${{ github.workspace }}/node_modules
- if: steps.cache.outputs.cache-hit != 'true'
run: npm ci
- name: Run SQLite Tests
timeout-minutes: 5
run: npx nyc --silent npm test
- name: cache nyc output
uses: actions/cache/save@v3
with:
key: nyc-sqlite-${{ github.sha }}
path: ${{ github.workspace }}/.nyc_output
test-postgres:
name: Run Tests with PostgreSQL and Redis
runs-on: ubuntu-latest
needs: lint-build
steps:
- uses: actions/checkout@v3
- name: Build the docker-compose stack
env:
PG_USER: ci_db_user
PG_PASS: ci_db_pass
run: docker-compose -f docker/docker-compose-ci.yml up -d
- name: Check running containers
run: docker ps
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
- id: cache
uses: actions/cache/restore@v3
with:
key: dist-${{ github.sha }}
path: |
${{ github.workspace }}/dist
${{ github.workspace }}/node_modules
- if: steps.cache.outputs.cache-hit != 'true'
run: npm ci
- name: Run Postgres Tests
env:
TEST_POSTGRES: true
timeout-minutes: 5
run: npx nyc --silent npm test
- name: cache nyc output
uses: actions/cache/save@v3
with:
key: nyc-postgres-${{ github.sha }}
path: ${{ github.workspace }}/.nyc_output
codecov:
needs: [test-sqlite, test-postgres]
name: Run Codecov
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
- run: npm ci
- name: restore postgres nyc output
uses: actions/cache/restore@v3
with:
key: nyc-postgres-${{ github.sha }}
path: ${{ github.workspace }}/.nyc_output
- name: restore sqlite nyc output
uses: actions/cache/restore@v3
with:
key: nyc-sqlite-${{ github.sha }}
path: ${{ github.workspace }}/.nyc_output
- run: npx nyc report --reporter=lcov
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3

View File

@@ -56,7 +56,6 @@
]
}
],
"maxNumberOfActiveWarnings": 3,
"hoursAfterWarningExpires": 24,
"rateLimit": {
"vote": {

1
codecov.yml Normal file
View File

@@ -0,0 +1 @@
comment: false

View File

@@ -4,10 +4,10 @@ RUN apk add restic --repository http://dl-cdn.alpinelinux.org/alpine/latest-stab
COPY ./backup.sh /usr/src/app/backup.sh
RUN chmod +x /usr/src/app/backup.sh
COPY ./backup.sh /usr/src/app/forget.sh
COPY ./forget.sh /usr/src/app/forget.sh
RUN chmod +x /usr/src/app/forget.sh
RUN echo '30 * * * * /usr/src/app/backup.sh' >> /etc/crontabs/root
RUN echo '10 0 * * 1 /usr/src/app/forget.sh' >> /etc/crontabs/root
CMD crond -l 2 -f
CMD crond -l 2 -f

View File

@@ -32,6 +32,10 @@ CREATE TABLE IF NOT EXISTS "categoryVotes" (
"votes" INTEGER NOT NULL default 0
);
CREATE TABLE IF NOT EXISTS "shadowBannedIPs" (
"hashedIP" TEXT NOT NULL PRIMARY KEY
);
CREATE TABLE IF NOT EXISTS "config" (
"key" TEXT NOT NULL UNIQUE,
"value" TEXT NOT NULL

View File

@@ -1,6 +1,7 @@
version: '3'
services:
postgres:
container_name: database-co
image: postgres:alpine
environment:
- POSTGRES_USER=${PG_USER}
@@ -8,6 +9,7 @@ services:
ports:
- 5432:5432
redis:
container_name: redis-ci
image: redis:alpine
ports:
- 6379:6379

View File

@@ -18,6 +18,9 @@ services:
- ./redis/redis.conf:/usr/local/etc/redis/redis.conf
ports:
- 32773:6379
sysctls:
- net.core.somaxconn=324000
- net.ipv4.tcp_max_syn_backlog=3240000
restart: always
newleaf:
image: abeltramo/newleaf:latest

View File

@@ -1,4 +1,5 @@
maxmemory-policy allkeys-lru
maxmemory 6500mb
maxmemory 6000mb
appendonly no
save ""

View File

@@ -9,4 +9,4 @@ test -e config.json || cat <<EOF > config.json
}
EOF
node dist/src/index.js
node --inspect dist/src/index.js

View File

@@ -9,8 +9,8 @@
"cover:report": "nyc report",
"dev": "nodemon",
"dev:bash": "nodemon -x 'npm test ; npm start'",
"postgres:docker": "docker run --rm -p 5432:5432 -e POSTGRES_USER=ci_db_user -e POSTGRES_PASSWORD=ci_db_pass postgres:14-alpine",
"redis:docker": "docker run --rm -p 6379:6379 redis:7-alpine --save '' --appendonly no",
"postgres:docker": "docker run --init -it --rm -p 5432:5432 -e POSTGRES_USER=ci_db_user -e POSTGRES_PASSWORD=ci_db_pass postgres:14-alpine",
"redis:docker": "docker run --init -it --rm -p 6379:6379 redis:7-alpine --save '' --appendonly no",
"start": "ts-node src/index.ts",
"tsc": "tsc -p tsconfig.json",
"lint": "eslint src test",

View File

@@ -52,6 +52,7 @@ import { verifyTokenRequest } from "./routes/verifyToken";
import { getBranding, getBrandingByHashEndpoint } from "./routes/getBranding";
import { postBranding } from "./routes/postBranding";
import { cacheMiddlware } from "./middleware/etag";
import { hostHeader } from "./middleware/hostHeader";
export function createServer(callback: () => void): Server {
// Create a service (the app object is just a callback).
@@ -65,6 +66,7 @@ export function createServer(callback: () => void): Server {
router.use(corsMiddleware);
router.use(loggerMiddleware);
router.use("/api/", apiCspMiddleware);
router.use(hostHeader);
router.use(cacheMiddlware);
router.use(express.json());

View File

@@ -128,6 +128,21 @@ addDefaults(config, {
},
{
name: "ratings"
},
{
name: "titles"
},
{
name: "titleVotes"
},
{
name: "thumbnails"
},
{
name: "thumbnailTimestamps"
},
{
name: "thumbnailVotes"
}]
},
diskCacheURL: null,

View File

@@ -21,6 +21,11 @@ interface PostgresStats {
avgReadTime: number;
avgWriteTime: number;
avgFailedTime: number;
pool: {
total: number;
idle: number;
waiting: number;
}
}
export interface DatabaseConfig {
@@ -266,7 +271,12 @@ export class Postgres implements IDatabase {
activeRequests: this.activePostgresRequests,
avgReadTime: this.readResponseTime.length > 0 ? this.readResponseTime.reduce((a, b) => a + b, 0) / this.readResponseTime.length : 0,
avgWriteTime: this.writeResponseTime.length > 0 ? this.writeResponseTime.reduce((a, b) => a + b, 0) / this.writeResponseTime.length : 0,
avgFailedTime: this.failedResponseTime.length > 0 ? this.failedResponseTime.reduce((a, b) => a + b, 0) / this.failedResponseTime.length : 0
avgFailedTime: this.failedResponseTime.length > 0 ? this.failedResponseTime.reduce((a, b) => a + b, 0) / this.failedResponseTime.length : 0,
pool: {
total: this.pool.totalCount,
idle: this.pool.idleCount,
waiting: this.pool.waitingCount
}
};
}

View File

@@ -0,0 +1,7 @@
import { NextFunction, Request, Response } from "express";
import os from "os";
export function hostHeader(req: Request, res: Response, next: NextFunction): void {
res.header("SBSERVER-HOST", os.hostname());
next();
}

View File

@@ -14,7 +14,7 @@ export function userCounter(req: Request, res: Response, next: NextFunction): vo
method: "post",
url: `${config.userCounterURL}/api/v1/addIP?hashedIP=${getIP(req)}`,
httpAgent
}).catch(() => Logger.debug(`Failing to connect to user counter at: ${config.userCounterURL}`));
}).catch(() => /* instanbul skip next */ Logger.debug(`Failing to connect to user counter at: ${config.userCounterURL}`));
}
}

View File

@@ -2,6 +2,7 @@ import { Request, Response } from "express";
import { isEmpty } from "lodash";
import { config } from "../config";
import { db, privateDB } from "../databases/databases";
import { Postgres } from "../databases/Postgres";
import { BrandingDBSubmission, BrandingHashDBResult, BrandingResult, ThumbnailDBResult, ThumbnailResult, TitleDBResult, TitleResult } from "../types/branding.model";
import { HashedIP, IPAddress, Service, VideoID, VideoIDHash, Visibility } from "../types/segments.model";
import { shuffleArray } from "../utils/array";
@@ -19,7 +20,7 @@ enum BrandingSubmissionType {
Thumbnail = "thumbnail"
}
export async function getVideoBranding(videoID: VideoID, service: Service, ip: IPAddress): Promise<BrandingResult> {
export async function getVideoBranding(res: Response, videoID: VideoID, service: Service, ip: IPAddress): Promise<BrandingResult> {
const getTitles = () => db.prepare(
"all",
`SELECT "titles"."title", "titles"."original", "titleVotes"."votes", "titleVotes"."locked", "titleVotes"."shadowHidden", "titles"."UUID", "titles"."videoID", "titles"."hashedVideoID"
@@ -43,7 +44,19 @@ export async function getVideoBranding(videoID: VideoID, service: Service, ip: I
thumbnails: await getThumbnails()
});
const branding = await QueryCacher.get(getBranding, brandingKey(videoID, service));
const brandingTrace = await QueryCacher.getTraced(getBranding, brandingKey(videoID, service));
const branding = brandingTrace.data;
// Add trace info to request for debugging purposes
res.setHeader("X-Start-Time", brandingTrace.startTime);
if (brandingTrace.dbStartTime) res.setHeader("X-DB-Start-Time", brandingTrace.dbStartTime);
res.setHeader("X-End-Time", brandingTrace.endTime);
const stats = (db as Postgres)?.getStats?.();
if (stats) {
res.setHeader("X-DB-Pool-Total", stats.pool.total);
res.setHeader("X-DB-Pool-Idle", stats.pool.idle);
res.setHeader("X-DB-Pool-Waiting", stats.pool.waiting);
}
const cache = {
currentIP: null as Promise<HashedIP> | null
@@ -177,7 +190,7 @@ export async function getBranding(req: Request, res: Response) {
const ip = getIP(req);
try {
const result = await getVideoBranding(videoID, service, ip);
const result = await getVideoBranding(res, videoID, service, ip);
const status = result.titles.length > 0 || result.thumbnails.length > 0 ? 200 : 404;
return res.status(status).json(result);

View File

@@ -3,17 +3,12 @@ import { Logger } from "../utils/logger";
import { Request, Response } from "express";
import { ActionType, Category, VideoID } from "../types/segments.model";
import { getService } from "../utils/getService";
import { parseActionTypes } from "../utils/parseParams";
export async function getLockCategories(req: Request, res: Response): Promise<Response> {
const videoID = req.query.videoID as VideoID;
const service = getService(req.query.service as string);
const actionTypes: ActionType[] = req.query.actionTypes
? JSON.parse(req.query.actionTypes as string)
: req.query.actionType
? Array.isArray(req.query.actionType)
? req.query.actionType
: [req.query.actionType]
: [ActionType.Skip, ActionType.Mute];
const actionTypes: ActionType[] = parseActionTypes(req, [ActionType.Skip, ActionType.Mute]);
if (!videoID || !Array.isArray(actionTypes)) {
//invalid request
return res.sendStatus(400);

View File

@@ -3,6 +3,7 @@ import { Logger } from "../utils/logger";
import { Request, Response } from "express";
import { hashPrefixTester } from "../utils/hashPrefixTester";
import { ActionType, Category, VideoID, VideoIDHash } from "../types/segments.model";
import { parseActionTypes } from "../utils/parseParams";
interface LockResultByHash {
videoID: VideoID,
@@ -44,25 +45,13 @@ const mergeLocks = (source: DBLock[], actionTypes: ActionType[]): LockResultByHa
export async function getLockCategoriesByHash(req: Request, res: Response): Promise<Response> {
let hashPrefix = req.params.prefix as VideoIDHash;
let actionTypes: ActionType[] = [];
try {
actionTypes = req.query.actionTypes
? JSON.parse(req.query.actionTypes as string)
: req.query.actionType
? Array.isArray(req.query.actionType)
? req.query.actionType
: [req.query.actionType]
: [ActionType.Skip, ActionType.Mute];
if (!Array.isArray(actionTypes)) {
//invalid request
return res.sendStatus(400);
}
} catch (err) {
const actionTypes: ActionType[] = parseActionTypes(req, [ActionType.Skip, ActionType.Mute]);
if (!Array.isArray(actionTypes)) {
//invalid request
return res.status(400).send("Invalid request: JSON parse error (actionTypes)");
return res.sendStatus(400);
}
if (!hashPrefixTester(req.params.prefix)) {
if (!hashPrefixTester(req.params.prefix)) {
return res.status(400).send("Hash prefix does not match format requirements."); // Exit early on faulty prefix
}
hashPrefix = hashPrefix.toLowerCase() as VideoIDHash;

View File

@@ -2,9 +2,8 @@ import { db } from "../databases/databases";
import { Logger } from "../utils/logger";
import { Request, Response } from "express";
import { Category, VideoID, ActionType } from "../types/segments.model";
import { config } from "../config";
import { filterInvalidCategoryActionType, parseActionTypes, parseCategories } from "../utils/parseParams";
const categorySupportList = config.categorySupport;
interface lockArray {
category: Category;
locked: number,
@@ -13,62 +12,19 @@ interface lockArray {
userName: string,
}
const filterActionType = (actionTypes: ActionType[]) => {
const filterCategories = new Set();
for (const [key, value] of Object.entries(categorySupportList)) {
for (const type of actionTypes) {
if (value.includes(type)) {
filterCategories.add(key as Category);
}
}
}
return [...filterCategories];
};
export async function getLockReason(req: Request, res: Response): Promise<Response> {
const videoID = req.query.videoID as VideoID;
if (!videoID) {
// invalid request
return res.status(400).send("No videoID provided");
}
let categories: Category[] = [];
let actionTypes: ActionType[] = [];
try {
actionTypes = req.query.actionTypes
? JSON.parse(req.query.actionTypes as string)
: req.query.actionType
? Array.isArray(req.query.actionType)
? req.query.actionType
: [req.query.actionType]
: [ActionType.Skip, ActionType.Mute];
if (!Array.isArray(actionTypes)) {
//invalid request
return res.status(400).send("actionTypes parameter does not match format requirements");
}
} catch (error) {
return res.status(400).send("Bad parameter: actionTypes (invalid JSON)");
}
const possibleCategories = filterActionType(actionTypes);
const actionTypes = parseActionTypes(req, [ActionType.Skip, ActionType.Mute]);
const categories = parseCategories(req, []);
try {
categories = req.query.categories
? JSON.parse(req.query.categories as string)
: req.query.category
? Array.isArray(req.query.category)
? req.query.category
: [req.query.category]
: []; // default to empty, will be set to all
if (!Array.isArray(categories)) {
return res.status(400).send("Categories parameter does not match format requirements.");
}
} catch(error) {
return res.status(400).send("Bad parameter: categories (invalid JSON)");
}
// invalid requests
const errors = [];
if (!videoID) errors.push("No videoID provided");
if (!Array.isArray(actionTypes)) errors.push("actionTypes parameter does not match format requirements");
if (!Array.isArray(categories)) errors.push("Categories parameter does not match format requirements.");
if (errors.length) return res.status(400).send(errors.join(", "));
// only take valid categories
const searchCategories = (categories.length === 0 )
? possibleCategories
: categories.filter(x =>
possibleCategories.includes(x));
const searchCategories = filterInvalidCategoryActionType(categories, actionTypes);
try {
// Get existing lock categories markers

View File

@@ -2,6 +2,8 @@ import { Request, Response } from "express";
import { db } from "../databases/databases";
import { ActionType, Category, DBSegment, Service, VideoID, SortableFields } from "../types/segments.model";
import { getService } from "../utils/getService";
import { parseActionTypes, parseCategories } from "../utils/parseParams";
const maxSegmentsPerPage = 100;
const defaultSegmentsPerPage = 10;
@@ -73,25 +75,13 @@ async function handleGetSegments(req: Request, res: Response): Promise<searchSeg
return false;
}
// Default to sponsor
const categories: Category[] = req.query.categories
? JSON.parse(req.query.categories as string)
: req.query.category
? Array.isArray(req.query.category)
? req.query.category
: [req.query.category]
: [];
const categories: Category[] = parseCategories(req, []);
if (!Array.isArray(categories)) {
res.status(400).send("Categories parameter does not match format requirements.");
return false;
}
const actionTypes: ActionType[] = req.query.actionTypes
? JSON.parse(req.query.actionTypes as string)
: req.query.actionType
? Array.isArray(req.query.actionType)
? req.query.actionType
: [req.query.actionType]
: [ActionType.Skip];
const actionTypes: ActionType[] = parseActionTypes(req, [ActionType.Skip]);
if (!Array.isArray(actionTypes)) {
res.status(400).send("actionTypes parameter does not match format requirements.");
return false;

View File

@@ -34,11 +34,11 @@ async function handleGetSegmentInfo(req: Request, res: Response): Promise<DBSegm
// deduplicate with set
UUIDs = [ ...new Set(UUIDs)];
// if more than 10 entries, slice
if (UUIDs.length > 10) UUIDs = UUIDs.slice(0, 10);
if (!Array.isArray(UUIDs) || !UUIDs) {
if (!Array.isArray(UUIDs) || !UUIDs?.length) {
res.status(400).send("UUIDs parameter does not match format requirements.");
return;
}
if (UUIDs.length > 10) UUIDs = UUIDs.slice(0, 10);
const DBSegments = await getSegmentsByUUID(UUIDs);
// all uuids failed lookup
if (!DBSegments?.length) {

View File

@@ -27,7 +27,7 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, service:
//check if shadowHidden
//this means it is hidden to everyone but the original ip that submitted it
if (segment.shadowHidden != Visibility.HIDDEN) {
if (segment.shadowHidden === Visibility.VISIBLE) {
return true;
}

View File

@@ -25,13 +25,13 @@ export async function getSkipSegmentsByHash(req: Request, res: Response): Promis
try {
await getEtag("skipSegmentsHash", hashPrefix, service)
.then(etag => res.set("ETag", etag))
.catch(() => null);
.catch(/* istanbul ignore next */ () => null);
const output = Object.entries(segments).map(([videoID, data]) => ({
videoID,
segments: data.segments,
}));
return res.status(output.length === 0 ? 404 : 200).json(output);
} catch(e) {
} catch (e) /* istanbul ignore next */ {
Logger.error(`skip segments by hash error: ${e}`);
return res.status(500).send("Internal server error");

View File

@@ -2,10 +2,12 @@ import { db } from "../databases/databases";
import { createMemoryCache } from "../utils/createMemoryCache";
import { config } from "../config";
import { Request, Response } from "express";
import { validateCategories } from "../utils/parseParams";
const MILLISECONDS_IN_MINUTE = 60000;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
const getTopCategoryUsersWithCache = createMemoryCache(generateTopCategoryUsersStats, config.getTopUsersCacheTimeMinutes * MILLISECONDS_IN_MINUTE);
/* istanbul ignore next */
const maxRewardTimePerSegmentInSeconds = config.maxRewardTimePerSegmentInSeconds ?? 86400;
interface DBSegment {
@@ -38,7 +40,6 @@ async function generateTopCategoryUsersStats(sortBy: string, category: string) {
}
}
return {
userNames,
viewCounts,
@@ -51,7 +52,7 @@ export async function getTopCategoryUsers(req: Request, res: Response): Promise<
const sortType = parseInt(req.query.sortType as string);
const category = req.query.category as string;
if (sortType == undefined || !config.categoryList.includes(category) ) {
if (sortType == undefined || !validateCategories([category]) ) {
//invalid request
return res.sendStatus(400);
}

View File

@@ -3,6 +3,7 @@ import { config } from "../config";
import { Request, Response } from "express";
import axios from "axios";
import { Logger } from "../utils/logger";
import { getCWSUsers } from "../utils/getCWSUsers";
// A cache of the number of chrome web store users
let chromeUsersCache = 0;
@@ -29,30 +30,30 @@ let lastFetch: DBStatsData = {
updateExtensionUsers();
export async function getTotalStats(req: Request, res: Response): Promise<void> {
const row = await getStats(!!req.query.countContributingUsers);
const countContributingUsers = Boolean(req.query?.countContributingUsers == "true");
const row = await getStats(countContributingUsers);
lastFetch = row;
if (row !== undefined) {
const extensionUsers = chromeUsersCache + firefoxUsersCache;
/* istanbul ignore if */
if (!row) res.sendStatus(500);
const extensionUsers = chromeUsersCache + firefoxUsersCache;
//send this result
res.send({
userCount: row.userCount,
activeUsers: extensionUsers,
apiUsers: Math.max(apiUsersCache, extensionUsers),
viewCount: row.viewCount,
totalSubmissions: row.totalSubmissions,
minutesSaved: row.minutesSaved,
});
//send this result
res.send({
userCount: row.userCount ?? 0,
activeUsers: extensionUsers,
apiUsers: Math.max(apiUsersCache, extensionUsers),
viewCount: row.viewCount,
totalSubmissions: row.totalSubmissions,
minutesSaved: row.minutesSaved,
});
// Check if the cache should be updated (every ~14 hours)
const now = Date.now();
if (now - lastUserCountCheck > 5000000) {
lastUserCountCheck = now;
// Check if the cache should be updated (every ~14 hours)
const now = Date.now();
if (now - lastUserCountCheck > 5000000) {
lastUserCountCheck = now;
updateExtensionUsers();
}
updateExtensionUsers();
}
}
@@ -67,42 +68,53 @@ function getStats(countContributingUsers: boolean): Promise<DBStatsData> {
}
}
function updateExtensionUsers() {
/* istanbul ignore else */
if (config.userCounterURL) {
axios.get(`${config.userCounterURL}/api/v1/userCount`)
.then(res => {
apiUsersCache = Math.max(apiUsersCache, res.data.userCount);
})
.catch(() => Logger.debug(`Failing to connect to user counter at: ${config.userCounterURL}`));
.then(res => apiUsersCache = Math.max(apiUsersCache, res.data.userCount))
.catch( /* istanbul ignore next */ () => Logger.debug(`Failing to connect to user counter at: ${config.userCounterURL}`));
}
const mozillaAddonsUrl = "https://addons.mozilla.org/api/v3/addons/addon/sponsorblock/";
const chromeExtensionUrl = "https://chrome.google.com/webstore/detail/sponsorblock-for-youtube/mnjggcdmjocbbbhaepdhchncahnbgone";
const chromeExtId = "mnjggcdmjocbbbhaepdhchncahnbgone";
axios.get(mozillaAddonsUrl)
.then(res => {
firefoxUsersCache = res.data.average_daily_users;
axios.get(chromeExtensionUrl)
.then(res => {
const body = res.data;
// 2021-01-05
// [...]<span><meta itemprop="interactionCount" content="UserDownloads:100.000+"/><meta itemprop="opera[...]
const matchingString = '"UserDownloads:';
const matchingStringLen = matchingString.length;
const userDownloadsStartIndex = body.indexOf(matchingString);
if (userDownloadsStartIndex >= 0) {
const closingQuoteIndex = body.indexOf('"', userDownloadsStartIndex + matchingStringLen);
const userDownloadsStr = body.substr(userDownloadsStartIndex + matchingStringLen, closingQuoteIndex - userDownloadsStartIndex).replace(",", "").replace(".", "");
chromeUsersCache = parseInt(userDownloadsStr);
}
else {
lastUserCountCheck = 0;
}
})
.catch(() => Logger.debug(`Failing to connect to ${chromeExtensionUrl}`));
})
.catch(() => {
.then(res => firefoxUsersCache = res.data.average_daily_users )
.catch( /* istanbul ignore next */ () => {
Logger.debug(`Failing to connect to ${mozillaAddonsUrl}`);
return 0;
});
getCWSUsers(chromeExtId)
.then(res => chromeUsersCache = res)
.catch(/* istanbul ignore next */ () =>
getChromeUsers(chromeExtensionUrl)
.then(res => chromeUsersCache = res)
);
}
/* istanbul ignore next */
function getChromeUsers(chromeExtensionUrl: string): Promise<number> {
return axios.get(chromeExtensionUrl)
.then(res => {
const body = res.data;
// 2021-01-05
// [...]<span><meta itemprop="interactionCount" content="UserDownloads:100.000+"/><meta itemprop="opera[...]
const matchingString = '"UserDownloads:';
const matchingStringLen = matchingString.length;
const userDownloadsStartIndex = body.indexOf(matchingString);
/* istanbul ignore else */
if (userDownloadsStartIndex >= 0) {
const closingQuoteIndex = body.indexOf('"', userDownloadsStartIndex + matchingStringLen);
const userDownloadsStr = body.substr(userDownloadsStartIndex + matchingStringLen, closingQuoteIndex - userDownloadsStartIndex).replace(",", "").replace(".", "");
return parseInt(userDownloadsStr);
} else {
lastUserCountCheck = 0;
}
})
.catch(/* istanbul ignore next */ () => {
Logger.debug(`Failing to connect to ${chromeExtensionUrl}`);
return 0;
});
}

View File

@@ -8,7 +8,6 @@ import { getReputation } from "../utils/reputation";
import { Category, SegmentUUID } from "../types/segments.model";
import { config } from "../config";
import { canSubmit } from "../utils/permissions";
import { oneOf } from "../utils/promise";
const maxRewardTime = config.maxRewardTimePerSegmentInSeconds;
async function dbGetSubmittedSegmentSummary(userID: HashedUserID): Promise<{ minutesSaved: number, segmentCount: number }> {
@@ -116,12 +115,6 @@ async function getPermissions(userID: HashedUserID): Promise<Record<string, bool
return result;
}
async function getFreeChaptersAccess(userID: HashedUserID): Promise<boolean> {
return await oneOf([isUserVIP(userID),
(async () => !!(await db.prepare("get", `SELECT "timeSubmitted" FROM "sponsorTimes" WHERE "timeSubmitted" < 1666126187000 AND "userID" = ? LIMIT 1`, [userID], { useReplica: true })))()
]);
}
type cases = Record<string, any>
const executeIfFunction = (f: any) =>
@@ -147,7 +140,7 @@ const dbGetValue = (userID: HashedUserID, property: string): Promise<string|Segm
vip: () => isUserVIP(userID),
lastSegmentID: () => dbGetLastSegmentForUser(userID),
permissions: () => getPermissions(userID),
freeChaptersAccess: () => getFreeChaptersAccess(userID)
freeChaptersAccess: () => true
})("")(property);
};

View File

@@ -78,7 +78,7 @@ async function getSegmentsFromDBByHash(hashedVideoIDPrefix: VideoIDHash, service
[`${hashedVideoIDPrefix}%`, service]
) as Promise<DBSegment[]>;
if (hashedVideoIDPrefix.length === 4) {
if (hashedVideoIDPrefix.length === 3) {
return await QueryCacher.get(fetchFromDB, videoLabelsHashKey(hashedVideoIDPrefix, service));
}

View File

@@ -38,7 +38,8 @@ export async function postBranding(req: Request, res: Response) {
try {
const hashedUserID = await getHashCache(userID);
const isVip = await isUserVIP(hashedUserID);
// const isVip = await isUserVIP(hashedUserID);
const isVip = false; // TODO: In future, reenable locks
const hashedVideoID = await getHashCache(videoID, 1);
const hashedIP = await getHashCache(getIP(req) + config.globalSalt as IPAddress);
@@ -63,7 +64,7 @@ export async function postBranding(req: Request, res: Response) {
if (isVip) {
// unlock all other titles
await db.prepare("run", `UPDATE "titleVotes" SET "locked" = 0 WHERE "UUID" != ?`, [UUID]);
await db.prepare("run", `UPDATE "titleVotes" SET "locked" = 0 WHERE "UUID" != ? AND "videoID" = ?`, [UUID, videoID]);
}
}
})(), (async () => {
@@ -91,7 +92,7 @@ export async function postBranding(req: Request, res: Response) {
if (isVip) {
// unlock all other titles
await db.prepare("run", `UPDATE "thumbnailVotes" SET "locked" = 0 WHERE "UUID" != ?`, [UUID]);
await db.prepare("run", `UPDATE "thumbnailVotes" SET "locked" = 0 WHERE "UUID" != ? AND "videoID" = ?`, [UUID, videoID]);
}
}
}

View File

@@ -23,6 +23,7 @@ import { vote } from "./voteOnSponsorTime";
import { canSubmit } from "../utils/permissions";
import { getVideoDetails, videoDetails } from "../utils/getVideoDetails";
import * as youtubeID from "../utils/youtubeID";
import { banUser } from "./shadowBanUser";
type CheckResult = {
pass: boolean,
@@ -74,7 +75,7 @@ async function sendWebhooks(apiVideoDetails: videoDetails, userID: string, video
sendWebhookNotification(userID, videoID, UUID, userSubmissionCountRow.submissionCount, apiVideoDetails, {
submissionStart: startTime,
submissionEnd: endTime,
}, segmentInfo).catch(Logger.error);
}, segmentInfo).catch((e) => Logger.error(`sending webhooks: ${e}`));
// If it is a first time submission
// Then send a notification to discord
@@ -119,7 +120,7 @@ async function sendWebhooks(apiVideoDetails: videoDetails, userID: string, video
// false for a pass - it was confusing and lead to this bug - any use of this function in
// the future could have the same problem.
async function autoModerateSubmission(apiVideoDetails: videoDetails,
submission: { videoID: VideoID; userID: UserID; segments: IncomingSegment[], service: Service, videoDuration: number }) {
submission: { videoID: VideoID; userID: HashedUserID; segments: IncomingSegment[], service: Service, videoDuration: number }) {
// get duration from API
const apiDuration = apiVideoDetails.duration;
// if API fail or returns 0, get duration from client
@@ -155,7 +156,7 @@ async function autoModerateSubmission(apiVideoDetails: videoDetails,
return false;
}
async function checkUserActiveWarning(userID: string): Promise<CheckResult> {
async function checkUserActiveWarning(userID: HashedUserID): Promise<CheckResult> {
const MILLISECONDS_IN_HOUR = 3600000;
const now = Date.now();
const warnings = (await db.prepare("all",
@@ -248,7 +249,7 @@ async function checkInvalidFields(videoID: VideoID, userID: UserID, hashedUserID
}
async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, userID: HashedUserID, videoID: VideoID,
segments: IncomingSegment[], service: Service, isVIP: boolean, lockedCategoryList: Array<any>): Promise<CheckResult> {
segments: IncomingSegment[], service: Service, isVIP: boolean, isTempVIP: boolean, lockedCategoryList: Array<any>): Promise<CheckResult> {
for (let i = 0; i < segments.length; i++) {
if (segments[i] === undefined || segments[i].segment === undefined || segments[i].category === undefined) {
@@ -308,11 +309,11 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user
}
// Check for POI segments before some seconds
if (!isVIP && segments[i].actionType === ActionType.Poi && startTime < config.poiMinimumStartTime) {
if (!(isVIP || isTempVIP) && segments[i].actionType === ActionType.Poi && startTime < config.poiMinimumStartTime) {
return { pass: false, errorMessage: `POI cannot be that early`, errorCode: 400 };
}
if (!isVIP && segments[i].category === "sponsor"
if (!(isVIP || isTempVIP) && segments[i].category === "sponsor"
&& segments[i].actionType !== ActionType.Full && (endTime - startTime) < 1) {
// Too short
return { pass: false, errorMessage: "Segments must be longer than 1 second long", errorCode: 400 };
@@ -320,7 +321,7 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user
//check if this info has already been submitted before
const duplicateCheck2Row = await db.prepare("get", `SELECT "UUID" FROM "sponsorTimes" WHERE "startTime" = ?
and "endTime" = ? and "category" = ? and "actionType" = ? and "videoID" = ? and "service" = ?`, [startTime, endTime, segments[i].category, segments[i].actionType, videoID, service]);
and "endTime" = ? and "category" = ? and "actionType" = ? and "description" = ? and "videoID" = ? and "service" = ?`, [startTime, endTime, segments[i].category, segments[i].actionType, segments[i].description, videoID, service]);
if (duplicateCheck2Row) {
if (segments[i].actionType === ActionType.Full) {
// Forward as vote
@@ -336,10 +337,10 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user
return CHECK_PASS;
}
async function checkByAutoModerator(videoID: any, userID: any, segments: Array<any>, service:string, apiVideoDetails: videoDetails, videoDuration: number): Promise<CheckResult> {
async function checkByAutoModerator(videoID: VideoID, userID: HashedUserID, segments: IncomingSegment[], service: Service, apiVideoDetails: videoDetails, videoDuration: number): Promise<CheckResult> {
// Auto moderator check
if (service == Service.YouTube) {
const autoModerateResult = await autoModerateSubmission(apiVideoDetails, { userID, videoID, segments, service, videoDuration });
const autoModerateResult = await autoModerateSubmission(apiVideoDetails, { videoID, userID, segments, service, videoDuration });
if (autoModerateResult) {
return {
pass: false,
@@ -387,7 +388,7 @@ async function updateDataIfVideoDurationChange(videoID: VideoID, service: Servic
await db.prepare("run", `UPDATE "sponsorTimes" SET "hidden" = 1 WHERE "UUID" = ?`, [submission.UUID]);
}
lockedCategoryList = [];
deleteLockCategories(videoID, null, null, service).catch(Logger.error);
deleteLockCategories(videoID, null, null, service).catch((e) => Logger.error(`deleting lock categories: ${e}`));
}
return {
@@ -491,7 +492,10 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
let { videoID, userID: paramUserID, service, videoDuration, videoDurationParam, segments, userAgent } = preprocessInput(req);
//hash the userID
const userID = await getHashCache(paramUserID || "");
if (!paramUserID) {
return res.status(400).send("No userID provided");
}
const userID: HashedUserID = await getHashCache(paramUserID);
const invalidCheckResult = await checkInvalidFields(videoID, paramUserID, userID, segments, videoDurationParam, userAgent, service);
if (!invalidCheckResult.pass) {
@@ -504,7 +508,8 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
return res.status(userWarningCheckResult.errorCode).send(userWarningCheckResult.errorMessage);
}
const isVIP = (await isUserVIP(userID)) || (await isUserTempVIP(userID, videoID));
const isVIP = (await isUserVIP(userID));
const isTempVIP = (await isUserTempVIP(userID, videoID));
const rawIP = getIP(req);
const newData = await updateDataIfVideoDurationChange(videoID, service, videoDuration, videoDurationParam);
@@ -512,12 +517,12 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
const { lockedCategoryList, apiVideoDetails } = newData;
// Check if all submissions are correct
const segmentCheckResult = await checkEachSegmentValid(rawIP, paramUserID, userID, videoID, segments, service, isVIP, lockedCategoryList);
const segmentCheckResult = await checkEachSegmentValid(rawIP, paramUserID, userID, videoID, segments, service, isVIP, isTempVIP, lockedCategoryList);
if (!segmentCheckResult.pass) {
return res.status(segmentCheckResult.errorCode).send(segmentCheckResult.errorMessage);
}
if (!isVIP) {
if (!(isVIP || isTempVIP)) {
const autoModerateCheckResult = await checkByAutoModerator(videoID, userID, segments, service, apiVideoDetails, videoDurationParam);
if (!autoModerateCheckResult.pass) {
return res.status(autoModerateCheckResult.errorCode).send(autoModerateCheckResult.errorMessage);
@@ -541,14 +546,22 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
// }
//check to see if this user is shadowbanned
const shadowBanRow = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID]);
const userBanCount = (await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID]))?.userCount;
const ipBanCount = (await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedIPs" WHERE "hashedIP" = ? LIMIT 1`, [hashedIP]))?.userCount;
const shadowBanCount = userBanCount || ipBanCount;
const startingVotes = 0;
const reputation = await getReputation(userID);
if (!userBanCount && ipBanCount) {
// Make sure the whole user is banned
banUser(userID, true, true, 1, config.categoryList as Category[])
.catch((e) => Logger.error(`Error banning user after submitting from a banned IP: ${e}`));
}
for (const segmentInfo of segments) {
// Full segments are always rejected since there can only be one, so shadow hide wouldn't work
if (segmentInfo.ignoreSegment
|| (shadowBanRow.userCount && segmentInfo.actionType === ActionType.Full)) {
|| (shadowBanCount && segmentInfo.actionType === ActionType.Full)) {
continue;
}
@@ -565,7 +578,7 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "actionType", "service", "videoDuration", "reputation", "shadowHidden", "hashedVideoID", "userAgent", "description")
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0
, segmentInfo.category, segmentInfo.actionType, service, videoDuration, reputation, shadowBanRow.userCount, hashedVideoID, userAgent, segmentInfo.description
, segmentInfo.category, segmentInfo.actionType, service, videoDuration, reputation, shadowBanCount, hashedVideoID, userAgent, segmentInfo.description
],
);
@@ -603,7 +616,7 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
}
for (let i = 0; i < segments.length; i++) {
sendWebhooks(apiVideoDetails, userID, videoID, UUIDs[i], segments[i], service).catch(Logger.error);
sendWebhooks(apiVideoDetails, userID, videoID, UUIDs[i], segments[i], service).catch((e) => Logger.error(`call send webhooks ${e}`));
}
return res.json(newSegments);
}

View File

@@ -5,6 +5,7 @@ import { isUserVIP } from "../utils/isUserVIP";
import { getHashCache } from "../utils/getHashCache";
import { HashedUserID, UserID } from "../types/user.model";
import { config } from "../config";
import { generateWarningDiscord, warningData, dispatchEvent } from "../utils/webhookUtils";
type warningEntry = {
userID: HashedUserID,
@@ -21,6 +22,8 @@ function checkExpiredWarning(warning: warningEntry): boolean {
return warning.issueTime > expiry && !warning.enabled;
}
const getUsername = (userID: HashedUserID) => db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID], { useReplica: true });
export async function postWarning(req: Request, res: Response): Promise<Response> {
if (!req.body.userID) return res.status(400).json({ "message": "Missing parameters" });
@@ -62,6 +65,27 @@ export async function postWarning(req: Request, res: Response): Promise<Response
resultStatus = "removed from";
}
const targetUsername = await getUsername(userID) ?? null;
const issuerUsername = await getUsername(issuerUserID) ?? null;
const webhookData = {
target: {
userID,
username: targetUsername
},
issuer: {
userID: issuerUserID,
username: issuerUsername
},
reason
} as warningData;
try {
const warning = generateWarningDiscord(webhookData);
dispatchEvent("warning", warning);
} catch /* istanbul ignore next */ (err) {
Logger.error(`Error sending warning to Discord ${err}`);
}
return res.status(200).json({
message: `Warning ${resultStatus} user '${userID}'.`,
});

View File

@@ -95,11 +95,10 @@ export async function setUsername(req: Request, res: Response): Promise<Response
}
async function checkPrivateUsername(username: string, userID: string): Promise<boolean> {
const userIDHash = await getHashCache(userID);
if (username == userID) return false;
if (username.length <= config.minUserIDLength) return true; // don't check for cross matches <= 30 characters
const userNameHash = await getHashCache(username);
if (userIDHash == userNameHash) return false;
const sponsorTimeRow = await db.prepare("get", `SELECT "userID" FROM "sponsorTimes" WHERE "userID" = ? LIMIT 1`, [userNameHash]);
const userNameRow = await db.prepare("get", `SELECT "userID" FROM "userNames" WHERE "userID" = ? LIMIT 1`, [userNameHash]);
if ((sponsorTimeRow || userNameRow)?.userID) return false;
if (userNameRow?.userID) return false;
return true;
}

View File

@@ -1,28 +1,36 @@
import { db } from "../databases/databases";
import { db, privateDB } from "../databases/databases";
import { getHashCache } from "../utils/getHashCache";
import { Request, Response } from "express";
import { config } from "../config";
import { Category, Service, VideoID, VideoIDHash } from "../types/segments.model";
import { Category, HashedIP, Service, VideoID, VideoIDHash } from "../types/segments.model";
import { UserID } from "../types/user.model";
import { QueryCacher } from "../utils/queryCacher";
import { isUserVIP } from "../utils/isUserVIP";
import { parseCategories } from "../utils/parseParams";
export async function shadowBanUser(req: Request, res: Response): Promise<Response> {
const userID = req.query.userID as UserID;
const hashedIP = req.query.hashedIP as string;
const hashedIP = req.query.hashedIP as HashedIP;
const adminUserIDInput = req.query.adminUserID as UserID;
const type = Number.parseInt(req.query.type as string ?? "1");
if (isNaN(type)) {
return res.sendStatus(400);
}
const enabled = req.query.enabled === undefined
? true
: req.query.enabled === "true";
const lookForIPs = req.query.lookForIPs === "true";
const banUsers = req.query.banUsers === undefined
? true
: req.query.banUsers === "true";
//if enabled is false and the old submissions should be made visible again
const unHideOldSubmissions = req.query.unHideOldSubmissions !== "false";
const categories: string[] = req.query.categories ? JSON.parse(req.query.categories as string) : config.categoryList;
categories.filter((category) => typeof category === "string" && !(/[^a-z|_|-]/.test(category)));
const categories: Category[] = parseCategories(req, config.categoryList as Category[]);
if (adminUserIDInput == undefined || (userID == undefined && hashedIP == undefined)) {
if (adminUserIDInput == undefined || (userID == undefined && hashedIP == undefined || type <= 0)) {
//invalid request
return res.sendStatus(400);
}
@@ -37,88 +45,111 @@ export async function shadowBanUser(req: Request, res: Response): Promise<Respon
}
if (userID) {
//check to see if this user is already shadowbanned
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ?`, [userID]);
const result = await banUser(userID, enabled, unHideOldSubmissions, type, categories);
if (enabled && row.userCount == 0) {
//add them to the shadow ban list
if (enabled && lookForIPs) {
const ipLoggingFixedTime = 1675295716000;
const timeSubmitted = (await db.prepare("all", `SELECT "timeSubmitted" FROM "sponsorTimes" WHERE "timeSubmitted" > ? AND "userID" = ?`, [ipLoggingFixedTime, userID])) as { timeSubmitted: number }[];
const ips = (await Promise.all(timeSubmitted.map((s) => {
return privateDB.prepare("all", `SELECT "hashedIP" FROM "sponsorTimes" WHERE "timeSubmitted" = ?`, [s.timeSubmitted]) as Promise<{ hashedIP: HashedIP }[]>;
}))).flat();
//add it to the table
await db.prepare("run", `INSERT INTO "shadowBannedUsers" VALUES(?)`, [userID]);
await Promise.all([...new Set(ips.map((ip) => ip.hashedIP))].map((ip) => {
return banIP(ip, enabled, unHideOldSubmissions, type, categories, true);
}));
}
//find all previous submissions and hide them
if (unHideOldSubmissions) {
await unHideSubmissions(categories, userID);
}
} else if (!enabled && row.userCount > 0) {
//remove them from the shadow ban list
await db.prepare("run", `DELETE FROM "shadowBannedUsers" WHERE "userID" = ?`, [userID]);
//find all previous submissions and unhide them
if (unHideOldSubmissions) {
const segmentsToIgnore = (await db.prepare("all", `SELECT "UUID" FROM "sponsorTimes" st
JOIN "lockCategories" ns on "st"."videoID" = "ns"."videoID" AND st.category = ns.category AND "st"."service" = "ns"."service" WHERE "st"."userID" = ?`
, [userID])).map((item: {UUID: string}) => item.UUID);
const allSegments = (await db.prepare("all", `SELECT "UUID" FROM "sponsorTimes" st WHERE "st"."userID" = ?`, [userID]))
.map((item: {UUID: string}) => item.UUID);
await Promise.all(allSegments.filter((item: {uuid: string}) => {
return segmentsToIgnore.indexOf(item) === -1;
}).map(async (UUID: string) => {
// collect list for unshadowbanning
(await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service", "votes", "views", "userID" FROM "sponsorTimes" WHERE "UUID" = ? AND "shadowHidden" = 1 AND "category" in (${categories.map((c) => `'${c}'`).join(",")})`, [UUID]))
.forEach((videoInfo: {category: Category, videoID: VideoID, hashedVideoID: VideoIDHash, service: Service, userID: UserID}) => {
QueryCacher.clearSegmentCache(videoInfo);
}
);
return db.prepare("run", `UPDATE "sponsorTimes" SET "shadowHidden" = 0 WHERE "UUID" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})`, [UUID]);
}));
}
// already shadowbanned
} else if (enabled && row.userCount > 0) {
// apply unHideOldSubmissions if applicable
if (unHideOldSubmissions) {
await unHideSubmissions(categories, userID);
return res.sendStatus(200);
}
// otherwise ban already exists, send 409
return res.sendStatus(409);
if (result) {
res.sendStatus(result);
return;
}
} else if (hashedIP) {
//check to see if this user is already shadowbanned
// let row = await privateDB.prepare('get', "SELECT count(*) as userCount FROM shadowBannedIPs WHERE hashedIP = ?", [hashedIP]);
// if (enabled && row.userCount == 0) {
if (enabled) {
//add them to the shadow ban list
//add it to the table
// await privateDB.prepare('run', "INSERT INTO shadowBannedIPs VALUES(?)", [hashedIP]);
//find all previous submissions and hide them
if (unHideOldSubmissions) {
await db.prepare("run", `UPDATE "sponsorTimes" SET "shadowHidden" = 1 WHERE "timeSubmitted" IN
(SELECT "privateDB"."timeSubmitted" FROM "sponsorTimes" LEFT JOIN "privateDB"."sponsorTimes" as "privateDB" ON "sponsorTimes"."timeSubmitted"="privateDB"."timeSubmitted"
WHERE "privateDB"."hashedIP" = ?)`, [hashedIP]);
}
} /*else if (!enabled && row.userCount > 0) {
// //remove them from the shadow ban list
// await db.prepare('run', "DELETE FROM shadowBannedUsers WHERE userID = ?", [userID]);
// //find all previous submissions and unhide them
// if (unHideOldSubmissions) {
// await db.prepare('run', "UPDATE sponsorTimes SET shadowHidden = 0 WHERE userID = ?", [userID]);
// }
}*/
const result = await banIP(hashedIP, enabled, unHideOldSubmissions, type, categories, banUsers);
if (result) {
res.sendStatus(result);
return;
}
}
return res.sendStatus(200);
}
async function unHideSubmissions(categories: string[], userID: UserID) {
await db.prepare("run", `UPDATE "sponsorTimes" SET "shadowHidden" = 1 WHERE "userID" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})
export async function banUser(userID: UserID, enabled: boolean, unHideOldSubmissions: boolean, type: number, categories: Category[]): Promise<number> {
//check to see if this user is already shadowbanned
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ?`, [userID]);
if (enabled && row.userCount == 0) {
//add them to the shadow ban list
//add it to the table
await db.prepare("run", `INSERT INTO "shadowBannedUsers" VALUES(?)`, [userID]);
//find all previous submissions and hide them
if (unHideOldSubmissions) {
await unHideSubmissionsByUser(categories, userID, type);
}
} else if (enabled && row.userCount > 0) {
// apply unHideOldSubmissions if applicable
if (unHideOldSubmissions) {
await unHideSubmissionsByUser(categories, userID, type);
} else {
// otherwise ban already exists, send 409
return 409;
}
} else if (!enabled && row.userCount > 0) {
//find all previous submissions and unhide them
if (unHideOldSubmissions) {
await unHideSubmissionsByUser(categories, userID, 0);
}
//remove them from the shadow ban list
await db.prepare("run", `DELETE FROM "shadowBannedUsers" WHERE "userID" = ?`, [userID]);
} else if (row.userCount == 0) { // already shadowbanned
// already not shadowbanned
return 400;
}
return 200;
}
export async function banIP(hashedIP: HashedIP, enabled: boolean, unHideOldSubmissions: boolean, type: number, categories: Category[], banUsers: boolean): Promise<number> {
//check to see if this user is already shadowbanned
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedIPs" WHERE "hashedIP" = ?`, [hashedIP]);
if (enabled) {
if (row.userCount == 0) {
await db.prepare("run", `INSERT INTO "shadowBannedIPs" VALUES(?)`, [hashedIP]);
}
//find all previous submissions and hide them
if (unHideOldSubmissions) {
const users = await unHideSubmissionsByIP(categories, hashedIP, type);
if (banUsers) {
await Promise.all([...users].map((user) => {
return banUser(user, enabled, unHideOldSubmissions, type, categories);
}));
}
} else if (row.userCount > 0) {
// Nothing to do, and already added
return 409;
}
} else if (!enabled) {
if (row.userCount > 0) {
//remove them from the shadow ban list
await db.prepare("run", `DELETE FROM "shadowBannedIPs" WHERE "hashedIP" = ?`, [hashedIP]);
}
//find all previous submissions and unhide them
if (unHideOldSubmissions) {
await unHideSubmissionsByIP(categories, hashedIP, 0);
}
}
return 200;
}
async function unHideSubmissionsByUser(categories: string[], userID: UserID, type = 1) {
await db.prepare("run", `UPDATE "sponsorTimes" SET "shadowHidden" = '${type}' WHERE "userID" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})
AND NOT EXISTS ( SELECT "videoID", "category" FROM "lockCategories" WHERE
"sponsorTimes"."videoID" = "lockCategories"."videoID" AND "sponsorTimes"."service" = "lockCategories"."service" AND "sponsorTimes"."category" = "lockCategories"."category")`, [userID]);
@@ -128,3 +159,23 @@ async function unHideSubmissions(categories: string[], userID: UserID) {
QueryCacher.clearSegmentCache(videoInfo);
});
}
async function unHideSubmissionsByIP(categories: string[], hashedIP: HashedIP, type = 1): Promise<Set<UserID>> {
const submissions = await privateDB.prepare("all", `SELECT "timeSubmitted" FROM "sponsorTimes" WHERE "hashedIP" = ?`, [hashedIP]) as { timeSubmitted: number }[];
const users: Set<UserID> = new Set();
await Promise.all(submissions.map(async (submission) => {
(await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service", "votes", "views", "userID" FROM "sponsorTimes" WHERE "timeSubmitted" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})`, [submission.timeSubmitted]))
.forEach((videoInfo: { category: Category, videoID: VideoID, hashedVideoID: VideoIDHash, service: Service, userID: UserID }) => {
QueryCacher.clearSegmentCache(videoInfo);
users.add(videoInfo.userID);
}
);
await db.prepare("run", `UPDATE "sponsorTimes" SET "shadowHidden" = ${type} WHERE "timeSubmitted" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})
AND NOT EXISTS ( SELECT "videoID", "category" FROM "lockCategories" WHERE
"sponsorTimes"."videoID" = "lockCategories"."videoID" AND "sponsorTimes"."service" = "lockCategories"."service" AND "sponsorTimes"."category" = "lockCategories"."category")`, [submission.timeSubmitted]);
}));
return users;
}

View File

@@ -32,7 +32,7 @@ export async function verifyTokenRequest(req: VerifyTokenRequest, res: Response)
const identity = await getPatreonIdentity(tokens.accessToken);
if (tokens.expiresIn < 15 * 24 * 60 * 60) {
refreshToken(TokenType.patreon, licenseKey, tokens.refreshToken).catch(Logger.error);
refreshToken(TokenType.patreon, licenseKey, tokens.refreshToken).catch((e) => Logger.error(`refresh token: ${e}`));
}
/* istanbul ignore else */

View File

@@ -96,7 +96,7 @@ async function checkVideoDuration(UUID: SegmentUUID) {
AND "hidden" = 0 AND "shadowHidden" = 0 AND
"actionType" != 'full' AND "votes" > -2`,
[videoID, service, latestSubmission.timeSubmitted]);
deleteLockCategories(videoID, null, null, service).catch(Logger.error);
deleteLockCategories(videoID, null, null, service).catch((e) => Logger.error(`delete lock categories after vote: ${e}`));
}
}
@@ -411,7 +411,7 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
// no restrictions on checkDuration
// check duration of all submissions on this video
if (type <= 0) {
checkVideoDuration(UUID).catch(Logger.error);
checkVideoDuration(UUID).catch((e) => Logger.error(`checkVideoDuration: ${e}`));
}
try {
@@ -524,7 +524,7 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
incrementAmount,
oldIncrementAmount,
finalResponse
}).catch(Logger.error);
}).catch((e) => Logger.error(`Sending vote webhook: ${e}`));
}
return { status: finalResponse.finalStatus, message: finalResponse.finalMessage ?? undefined };
} catch (err) {

View File

@@ -52,7 +52,8 @@ export interface Segment {
export enum Visibility {
VISIBLE = 0,
HIDDEN = 1
HIDDEN = 1,
MORE_HIDDEN = 2
}
export interface DBSegment {

View File

@@ -1,4 +1,5 @@
export function createMemoryCache(memoryFn: (...args: any[]) => void, cacheTimeMs: number): any {
/* istanbul ignore if */
if (isNaN(cacheTimeMs)) cacheTimeMs = 0;
// holds the promise results

13
src/utils/getCWSUsers.ts Normal file
View File

@@ -0,0 +1,13 @@
import axios from "axios";
import { Logger } from "../utils/logger";
export const getCWSUsers = (extID: string): Promise<number | undefined> =>
axios.post(`https://chrome.google.com/webstore/ajax/detail?pv=20210820&id=${extID}`)
.then(res => res.data.split("\n")[2])
.then(data => JSON.parse(data))
.then(data => (data[1][1][0][23]).replaceAll(/,|\+/g,""))
.then(data => parseInt(data))
.catch((err) => {
Logger.error(`Error getting chrome users - ${err}`);
return 0;
});

View File

@@ -28,7 +28,7 @@ async function getFromRedis<T extends string>(key: HashedValue): Promise<T & Has
Logger.debug(`Got data from redis: ${reply}`);
return reply as T & HashedValue;
}
} catch (err) {
} catch (err) /* istanbul ignore next */ {
Logger.error(err as string);
}
}
@@ -37,7 +37,7 @@ async function getFromRedis<T extends string>(key: HashedValue): Promise<T & Has
const data = getHash(key, cachedHashTimes);
if (!config.redis?.disableHashCache) {
redis.set(redisKey, data).catch((err) => Logger.error(err));
redis.set(redisKey, data).catch(/* istanbul ignore next */ (err) => Logger.error(err));
}
return data as T & HashedValue;

View File

@@ -70,15 +70,16 @@ export async function getPlayerData (videoID: string, ignoreCache = false): Prom
}
try {
const data = await getFromITube(videoID)
.catch(err => {
.catch(/* istanbul ignore next */ err => {
Logger.warn(`InnerTube API Error for ${videoID}: ${err}`);
return Promise.reject(err);
});
DiskCache.set(cacheKey, data)
.then(() => Logger.debug(`InnerTube API: video information cache set for: ${videoID}`))
.catch((err: any) => Logger.warn(err));
.catch(/* istanbul ignore next */ (err: any) => Logger.warn(err));
return data;
} catch (err) {
/* istanbul ignore next */
return Promise.reject(err);
}
}

View File

@@ -11,7 +11,7 @@ export const isUserTempVIP = async (hashedUserID: HashedUserID, videoID: VideoID
try {
const reply = await redis.get(tempVIPKey(hashedUserID));
return reply && reply == channelID;
} catch (e) {
} catch (e) /* istanbul ignore next */ {
Logger.error(e as string);
return false;
}

View File

@@ -45,6 +45,7 @@ class Logger {
};
constructor() {
/* istanbul ignore if */
if (config.mode === "development") {
this._settings.INFO = true;
this._settings.DEBUG = true;
@@ -73,9 +74,11 @@ class Logger {
let color = colors.Bright;
if (level === LogLevel.ERROR) color = colors.FgRed;
/* istanbul ignore if */
if (level === LogLevel.WARN) color = colors.FgYellow;
let levelStr = level.toString();
/* istanbul ignore if */
if (levelStr.length === 4) {
levelStr += " "; // ensure logs are aligned
}

78
src/utils/parseParams.ts Normal file
View File

@@ -0,0 +1,78 @@
import { Request } from "express";
import { ActionType, SegmentUUID, Category } from "../types/segments.model";
import { config } from "../config";
type fn = (req: Request, fallback: any) => any[];
const syntaxErrorWrapper = (fn: fn, req: Request, fallback: any) => {
try { return fn(req, fallback); }
catch (e) {
return undefined;
}
};
const getCategories = (req: Request, fallback: Category[] ): string[] | Category[] =>
req.query.categories
? JSON.parse(req.query.categories as string)
: req.query.category
? Array.isArray(req.query.category)
? req.query.category
: [req.query.category]
: fallback;
const validateString = (array: any[]): any[] => {
if (!Array.isArray(array)) return undefined;
return array
.filter((item: any) => typeof item === "string")
.filter((item: string) => !(/[^a-z|_|-]/.test(item)));
};
const filterActionType = (actionTypes: ActionType[]) => {
const filterCategories = new Set();
for (const [key, value] of Object.entries(config.categorySupport)) {
for (const type of actionTypes) {
if (value.includes(type)) {
filterCategories.add(key as Category);
}
}
}
return [...filterCategories];
};
export const filterInvalidCategoryActionType = (categories: Category[], actionTypes: ActionType[]): Category[] =>
categories.filter((category: Category) => filterActionType(actionTypes).includes(category));
const getActionTypes = (req: Request, fallback: ActionType[]): ActionType[] =>
req.query.actionTypes
? JSON.parse(req.query.actionTypes as string)
: req.query.actionType
? Array.isArray(req.query.actionType)
? req.query.actionType
: [req.query.actionType]
: fallback;
// fallback to empty array
const getRequiredSegments = (req: Request): SegmentUUID[] =>
req.query.requiredSegments
? JSON.parse(req.query.requiredSegments as string)
: req.query.requiredSegment
? Array.isArray(req.query.requiredSegment)
? req.query.requiredSegment
: [req.query.requiredSegment]
: [];
export const parseCategories = (req: Request, fallback: Category[]): Category[] => {
const categories = syntaxErrorWrapper(getCategories, req, fallback);
return categories ? validateString(categories) : undefined;
};
export const parseActionTypes = (req: Request, fallback: ActionType[]): ActionType[] => {
const actionTypes = syntaxErrorWrapper(getActionTypes, req, fallback);
return actionTypes ? validateString(actionTypes) : undefined;
};
export const parseRequiredSegments = (req: Request): SegmentUUID[] | undefined =>
syntaxErrorWrapper(getRequiredSegments, req, []); // never fall back
export const validateCategories = (categories: string[]): boolean =>
categories.every((category: string) => config.categoryList.includes(category));

View File

@@ -2,42 +2,7 @@ import { Request } from "express";
import { ActionType, SegmentUUID, Category, Service } from "../types/segments.model";
import { getService } from "./getService";
type fn = (req: Request) => any[];
const syntaxErrorWrapper = (fn: fn, req: Request) => {
try { return fn(req); }
catch (e) { return undefined; }
};
// Default to sponsor
const getCategories = (req: Request): Category[] =>
req.query.categories
? JSON.parse(req.query.categories as string)
: req.query.category
? Array.isArray(req.query.category)
? req.query.category
: [req.query.category]
: ["sponsor"];
// Default to skip
const getActionTypes = (req: Request): ActionType[] =>
req.query.actionTypes
? JSON.parse(req.query.actionTypes as string)
: req.query.actionType
? Array.isArray(req.query.actionType)
? req.query.actionType
: [req.query.actionType]
: [ActionType.Skip];
// Default to empty array
const getRequiredSegments = (req: Request): SegmentUUID[] =>
req.query.requiredSegments
? JSON.parse(req.query.requiredSegments as string)
: req.query.requiredSegment
? Array.isArray(req.query.requiredSegment)
? req.query.requiredSegment
: [req.query.requiredSegment]
: [];
import { parseCategories, parseActionTypes, parseRequiredSegments } from "./parseParams";
const errorMessage = (parameter: string) => `${parameter} parameter does not match format requirements.`;
@@ -48,20 +13,14 @@ export function parseSkipSegments(req: Request): {
service: Service;
errors: string[];
} {
let categories: Category[] = syntaxErrorWrapper(getCategories, req);
const actionTypes: ActionType[] = syntaxErrorWrapper(getActionTypes, req);
const requiredSegments: SegmentUUID[] = syntaxErrorWrapper(getRequiredSegments, req);
const categories: Category[] = parseCategories(req, [ "sponsor" as Category ]);
const actionTypes: ActionType[] = parseActionTypes(req, [ActionType.Skip]);
const requiredSegments: SegmentUUID[] = parseRequiredSegments(req);
const service: Service = getService(req.query.service, req.body.services);
const errors: string[] = [];
if (!Array.isArray(categories)) errors.push(errorMessage("categories"));
else {
// check category names for invalid characters
// and none string elements
categories = categories
.filter((item: any) => typeof item === "string")
.filter((category) => !(/[^a-z|_|-]/.test(category)));
if (categories.length === 0) errors.push("No valid categories provided.");
}
else if (categories.length === 0) errors.push("No valid categories provided.");
if (!Array.isArray(actionTypes)) errors.push(errorMessage("actionTypes"));
if (!Array.isArray(requiredSegments)) errors.push(errorMessage("requiredSegments"));
// finished parsing

View File

@@ -16,7 +16,7 @@ async function lowDownvotes(userID: HashedUserID): Promise<boolean> {
const result = await db.prepare("get", `SELECT count(*) as "submissionCount", SUM(CASE WHEN "votes" < 0 AND "views" > 5 THEN 1 ELSE 0 END) AS "downvotedSubmissions" FROM "sponsorTimes" WHERE "userID" = ?`
, [userID], { useReplica: true });
return result.submissionCount > 100 && result.downvotedSubmissions / result.submissionCount < 0.15;
return result.submissionCount > 10 && result.downvotedSubmissions / result.submissionCount < 0.15;
}
export async function canSubmit(userID: HashedUserID, category: Category): Promise<CanSubmitResult> {

View File

@@ -22,6 +22,41 @@ async function get<T>(fetchFromDB: () => Promise<T>, key: string): Promise<T> {
return data;
}
async function getTraced<T>(fetchFromDB: () => Promise<T>, key: string): Promise<{
data: T;
startTime: number;
dbStartTime?: number;
endTime: number;
}> {
const startTime = Date.now();
try {
const reply = await redis.get(key);
if (reply) {
Logger.debug(`Got data from redis: ${reply}`);
return {
data: JSON.parse(reply),
startTime: startTime,
endTime: Date.now()
};
}
} catch (e) { } //eslint-disable-line no-empty
const dbStartTime = Date.now();
const data = await fetchFromDB();
redis.setEx(key, config.redis?.expiryTime, JSON.stringify(data)).catch((err) => Logger.error(err));
return {
data,
startTime: startTime,
dbStartTime: dbStartTime,
endTime: Date.now()
};
}
/**
* Gets from redis for all specified values and splits the result before adding it to redis cache
*/
@@ -117,6 +152,7 @@ function clearFeatureCache(userID: HashedUserID, feature: Feature): void {
export const QueryCacher = {
get,
getTraced,
getAndSplit,
clearSegmentCache,
clearBrandingCache,

View File

@@ -4,6 +4,8 @@ import { createClient } from "redis";
import { RedisCommandArgument, RedisCommandArguments, RedisCommandRawReply } from "@redis/client/dist/lib/commands";
import { RedisClientOptions } from "@redis/client/dist/lib/client";
import { RedisReply } from "rate-limit-redis";
import { db } from "../databases/databases";
import { Postgres } from "../databases/Postgres";
export interface RedisStats {
activeRequests: number;
@@ -59,7 +61,7 @@ if (config.redis?.enabled) {
const getRead = readClient?.get?.bind(readClient);
exportClient.get = (key) => new Promise((resolve, reject) => {
if (config.redis.maxConnections && activeRequests > config.redis.maxConnections) {
reject("Too many active requests");
reject("Too many active requests in general");
return;
}
@@ -79,6 +81,7 @@ if (config.redis?.enabled) {
if (readResponseTime.length > maxStoredTimes) readResponseTime.shift();
if (config.redis.stopWritingAfterResponseTime
&& responseTime > config.redis.stopWritingAfterResponseTime) {
Logger.error(`Hit response time limit at ${responseTime}ms`);
lastResponseTimeLimit = Date.now();
}
}).catch((err) => {
@@ -98,7 +101,7 @@ if (config.redis?.enabled) {
if ((config.redis.maxWriteConnections && activeRequests > config.redis.maxWriteConnections)
|| (config.redis.responseTimePause
&& Date.now() - lastResponseTimeLimit < config.redis.responseTimePause)) {
reject("Too many active requests to write");
reject(`Too many active requests to write due to ${activeRequests} requests and ${Date.now() - lastResponseTimeLimit}ms since last limit. ${(db as Postgres)?.getStats?.()?.activeRequests} active db requests with ${(db as Postgres)?.getStats?.()?.avgReadTime}ms`);
return;
}
@@ -132,17 +135,21 @@ if (config.redis?.enabled) {
.then((reply) => resolve(reply))
.catch((err) => reject(err))
);
/* istanbul ignore next */
client.on("error", function(error) {
lastClientFail = Date.now();
Logger.error(`Redis Error: ${error}`);
});
/* istanbul ignore next */
client.on("reconnect", () => {
Logger.info("Redis: trying to reconnect");
});
/* istanbul ignore next */
readClient?.on("error", function(error) {
lastReadFail = Date.now();
Logger.error(`Redis Read-Only Error: ${error}`);
});
/* istanbul ignore next */
readClient?.on("reconnect", () => {
Logger.info("Redis Read-Only: trying to reconnect");
});

View File

@@ -57,8 +57,8 @@ export const videoLabelsKey = (videoID: VideoID, service: Service): string =>
`labels.v1.${service}.videoID.${videoID}`;
export function videoLabelsHashKey(hashedVideoIDPrefix: VideoIDHash, service: Service): string {
hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 4) as VideoIDHash;
if (hashedVideoIDPrefix.length !== 4) Logger.warn(`Redis skip segment hash-prefix key is not length 4! ${hashedVideoIDPrefix}`);
hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 3) as VideoIDHash;
if (hashedVideoIDPrefix.length !== 3) Logger.warn(`Redis video labels hash-prefix key is not length 3! ${hashedVideoIDPrefix}`);
return `labels.v1.${service}.${hashedVideoIDPrefix}`;
}

View File

@@ -15,7 +15,7 @@ interface ReputationDBResult {
}
export async function getReputation(userID: UserID): Promise<number> {
const weekAgo = Date.now() - 1000 * 60 * 60 * 24 * 45; // 45 days ago
const weekAgo = Date.now() - 1000 * 60 * 60 * 24 * 7; // 45 days ago
const pastDate = Date.now() - 1000 * 60 * 60 * 24 * 45; // 45 days ago
// 1596240000000 is August 1st 2020, a little after auto upvote was disabled
const fetchFromDB = () => db.prepare("get",
@@ -31,14 +31,14 @@ export async function getReputation(userID: UserID): Promise<number> {
SUM(CASE WHEN "timeSubmitted" < ? AND "timeSubmitted" > 1596240000000 AND "votes" > 0 THEN 1 ELSE 0 END) AS "semiOldUpvotedSubmissions",
SUM(CASE WHEN "timeSubmitted" < ? AND "timeSubmitted" > 1596240000000 AND "votes" > 0 THEN 1 ELSE 0 END) AS "oldUpvotedSubmissions",
SUM(CASE WHEN "votes" > 0
AND NOT EXISTS (
SELECT * FROM "sponsorTimes" as c
WHERE (c."votes" > "a"."votes" OR c."locked" > "a"."locked") AND
c."videoID" = "a"."videoID" AND
c."category" = "a"."category" LIMIT 1)
AND EXISTS (
SELECT * FROM "lockCategories" as l
WHERE l."videoID" = "a"."videoID" AND l."service" = "a"."service" AND l."category" = "a"."category" LIMIT 1)
AND ("locked" > 0 OR NOT EXISTS (
SELECT * FROM "sponsorTimes" as c
WHERE (c."votes" > "a"."votes" OR c."locked" > "a"."locked") AND
c."videoID" = "a"."videoID" AND
c."category" = "a"."category" LIMIT 1) )
THEN 1 ELSE 0 END) AS "mostUpvotedInLockedVideoSum"
FROM "sponsorTimes" as "a" WHERE "userID" = ? AND "actionType" != 'full'`, [userID, weekAgo, pastDate, userID], { useReplica: true }) as Promise<ReputationDBResult>;

View File

@@ -1,6 +1,7 @@
import { config } from "../config";
import { Logger } from "../utils/logger";
import axios from "axios";
import { HashedUserID } from "../types/user.model";
function getVoteAuthorRaw(submissionCount: number, isTempVIP: boolean, isVIP: boolean, isOwnSubmission: boolean): string {
if (isOwnSubmission) {
@@ -57,8 +58,35 @@ function dispatchEvent(scope: string, data: Record<string, unknown>): void {
}
}
interface warningData {
target: {
userID: HashedUserID
username: string | null
},
issuer: {
userID: HashedUserID,
username: string | null
},
reason: string
}
function generateWarningDiscord(data: warningData) {
return {
embeds: [
{
title: "Warning",
description: `**User:** ${data.target.username} (${data.target.userID})\n**Issuer:** ${data.issuer.username} (${data.issuer.userID})\n**Reason:** ${data.reason}`,
color: 0xff0000,
timestamp: new Date().toISOString()
}
]
};
}
export {
getVoteAuthorRaw,
getVoteAuthor,
dispatchEvent,
generateWarningDiscord,
warningData
};

View File

@@ -42,9 +42,14 @@
"vote.up",
"vote.down"
]
}, {
"url": "http://127.0.0.1:8081/WarningWebhook",
"key": "superSecretKey",
"scopes": [
"warning"
]
}
],
"maxNumberOfActiveWarnings": 3,
"hoursAfterWarningExpires": 24,
"rateLimit": {
"vote": {

65
test/cases/eTag.ts Normal file
View File

@@ -0,0 +1,65 @@
import assert from "assert";
import { client } from "../utils/httpClient";
import redis from "../../src/utils/redis";
import { config } from "../../src/config";
import { genRandom } from "../utils/getRandom";
const validateEtag = (expected: string, actual: string): boolean => {
const [actualHashType, actualHashKey, actualService] = actual.split(";");
const [expectedHashType, expectedHashKey, expectedService] = expected.split(";");
return (actualHashType === expectedHashType) && (actualHashKey === expectedHashKey) && (actualService === expectedService);
};
describe("eTag", () => {
before(function() {
if (!config.redis?.enabled) this.skip();
});
const endpoint = "/etag";
it("Should reject weak etag", (done) => {
const etagKey = `W/test-etag-${genRandom()}`;
client.get(endpoint, { headers: { "If-None-Match": etagKey } })
.then(res => {
assert.strictEqual(res.status, 404);
done();
})
.catch(err => done(err));
});
});
describe("304 etag validation", () => {
before(function() {
if (!config.redis?.enabled) this.skip();
});
const endpoint = "/etag";
for (const hashType of ["skipSegments", "skipSegmentsHash", "videoLabel", "videoLabelHash"]) {
it(`${hashType} etag should return 304`, (done) => {
const etagKey = `${hashType};${genRandom};YouTube;${Date.now()}`;
redis.setEx(etagKey, 8400, "test").then(() =>
client.get(endpoint, { headers: { "If-None-Match": etagKey } }).then(res => {
assert.strictEqual(res.status, 304);
const etag = res.headers?.etag ?? "";
assert.ok(validateEtag(etagKey, etag));
done();
}).catch(err => done(err))
);
});
}
it(`other etag type should not return 304`, (done) => {
const etagKey = `invalidHashType;${genRandom};YouTube;${Date.now()}`;
client.get(endpoint, { headers: { "If-None-Match": etagKey } }).then(res => {
assert.strictEqual(res.status, 404);
done();
}).catch(err => done(err));
});
it(`outdated etag type should not return 304`, (done) => {
const etagKey = `skipSegments;${genRandom};YouTube;5000`;
client.get(endpoint, { headers: { "If-None-Match": etagKey } }).then(res => {
assert.strictEqual(res.status, 404);
done();
}).catch(err => done(err));
});
});

View File

@@ -3,11 +3,9 @@ import { getHashCache } from "../../src/utils/getHashCache";
import { shaHashKey } from "../../src/utils/redisKeys";
import { getHash } from "../../src/utils/getHash";
import redis from "../../src/utils/redis";
import crypto from "crypto";
import assert from "assert";
import { setTimeout } from "timers/promises";
const genRandom = (bytes=8) => crypto.pseudoRandomBytes(bytes).toString("hex");
import { genRandom } from "../utils/getRandom";
const rand1Hash = genRandom(24);
const rand1Hash_Key = getHash(rand1Hash, 1);

View File

@@ -338,4 +338,13 @@ describe("getSegmentInfo", () => {
})
.catch(err => done(err));
});
it("Should return 400 if no UUIDs not sent", (done) => {
client.get(endpoint)
.then(res => {
if (res.status !== 400) done(`non 400 response code: ${res.status}`);
else done(); // pass
})
.catch(err => done(err));
});
});

View File

@@ -486,4 +486,13 @@ describe("getSkipSegments", () => {
})
.catch(err => done(err));
});
it("Should get 400 for invalid category type", (done) => {
client.get(endpoint, { params: { videoID: "getSkipSegmentID0", category: 1 } })
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});

View File

@@ -29,7 +29,7 @@ describe("getTopCategoryUsers", () => {
.catch(err => done(err));
});
it("Should return 400 if invalid sortType provided", (done) => {
it("Should return 400 if invalid type of sortType provided", (done) => {
client.get(endpoint, { params: { sortType: "a" } })
.then(res => {
assert.strictEqual(res.status, 400);
@@ -38,6 +38,15 @@ describe("getTopCategoryUsers", () => {
.catch(err => done(err));
});
it("Should return 400 if invalid sortType number provided", (done) => {
client.get(endpoint, { params: { sortType: 15, category: "sponsor" } })
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 if invalid category provided", (done) => {
client.get(endpoint, { params: { sortType: 1, category: "never_valid_category" } })
.then(res => {
@@ -121,4 +130,16 @@ describe("getTopCategoryUsers", () => {
})
.catch(err => done(err));
});
it("Should return no time saved for chapters", (done) => {
client.get(endpoint, { params: { sortType: 2, category: "chapter" } })
.then(res => {
assert.strictEqual(res.status, 200);
for (const timeSaved of res.data.minutesSaved) {
assert.strictEqual(timeSaved, 0, "Time saved should be 0");
}
done();
})
.catch(err => done(err));
});
});

View File

@@ -81,4 +81,14 @@ describe("getTopUsers", () => {
})
.catch(err => done(err));
});
it("Should be able to get cached result", (done) => {
client.get(endpoint, { params: { sortType: 0 } })// minutesSaved
.then(res => {
assert.strictEqual(res.status, 200);
assert.ok(res.data.userNames.indexOf(user1) < res.data.userNames.indexOf(user2), `Actual Order: ${res.data.userNames}`);
done();
})
.catch(err => done(err));
});
});

View File

@@ -7,7 +7,29 @@ describe("getTotalStats", () => {
it("Can get total stats", async () => {
const result = await client({ url: endpoint });
const data = result.data;
assert.ok(data?.userCount ?? true);
assert.strictEqual(data.userCount, 0, "User count should default false");
assert.ok(data.activeUsers >= 0);
assert.ok(data.apiUsers >= 0);
assert.ok(data.viewCount >= 0);
assert.ok(data.totalSubmissions >= 0);
assert.ok(data.minutesSaved >= 0);
});
it("Can get total stats without contributing users", async () => {
const result = await client({ url: `${endpoint}?countContributingUsers=false` });
const data = result.data;
assert.strictEqual(data.userCount, 0);
assert.ok(data.activeUsers >= 0);
assert.ok(data.apiUsers >= 0);
assert.ok(data.viewCount >= 0);
assert.ok(data.totalSubmissions >= 0);
assert.ok(data.minutesSaved >= 0);
});
it("Can get total stats with contributing users", async () => {
const result = await client({ url: `${endpoint}?countContributingUsers=true` });
const data = result.data;
assert.ok(data.userCount >= 0);
assert.ok(data.activeUsers >= 0);
assert.ok(data.apiUsers >= 0);
assert.ok(data.viewCount >= 0);

View File

@@ -10,7 +10,6 @@ describe("getUserInfo Free Chapters", () => {
const vipQualifyUserID = "getUserInfo-Free-VIP";
const repQualifyUserID = "getUserInfo-Free-RepQualify";
const oldQualifyUserID = "getUserInfo-Free-OldQualify";
const newNoQualityUserID = "getUserInfo-Free-newNoQualify";
const postOldQualify = 1600000000000;
before(async () => {
@@ -24,16 +23,6 @@ describe("getUserInfo Free Chapters", () => {
const getUserInfo = (userID: string) => client.get(endpoint, { params: { userID, value: "freeChaptersAccess" } });
it("Should not get free access under new rule (newNoQualify)", (done) => {
getUserInfo(newNoQualityUserID)
.then(res => {
assert.strictEqual(res.status, 200);
assert.strictEqual(res.data.freeChaptersAccess, false);
done();
})
.catch(err => done(err));
});
it("Should get free access under new rule (newQualify)", (done) => {
getUserInfo(newQualifyUserID)
.then(res => {

36
test/cases/highLoad.ts Normal file
View File

@@ -0,0 +1,36 @@
import sinon from "sinon";
import { db } from "../../src/databases/databases";
import assert from "assert";
import { client } from "../utils/httpClient";
client.defaults.validateStatus = (status) => status < 600;
describe("High load test", () => {
before(() => {
sinon.stub(db, "highLoad").returns(true);
});
after(() => {
sinon.restore();
});
it("Should return 503 on getTopUsers", async () => {
await client.get("/api/getTopUsers?sortType=0")
.then(res => {
assert.strictEqual(res.status, 503);
});
});
it("Should return 503 on getTopCategoryUsers", async () => {
await client.get("/api/getTopCategoryUsers?sortType=0&category=sponsor")
.then(res => {
assert.strictEqual(res.status, 503);
});
});
it("Should return 0 on getTotalStats", async () => {
await client.get("/api/getTotalStats")
.then(res => {
assert.strictEqual(res.status, 200);
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,316 @@
import assert from "assert";
import { postSkipSegmentJSON, postSkipSegmentParam } from "./postSkipSegments";
const videoID = "postSkipSegments-404-video";
const userID = "postSkipSegments-404-user";
describe("postSkipSegments 400 - missing params", () => {
it("Should return 400 for missing params (JSON method) 1", (done) => {
postSkipSegmentJSON({
userID,
segments: [{
segment: [9, 10],
category: "sponsor",
}, {
segment: [31, 60],
category: "intro",
}],
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for missing params (JSON method) 2", (done) => {
postSkipSegmentJSON({
userID,
videoID,
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for missing params (JSON method) 3", (done) => {
postSkipSegmentJSON({
userID,
videoID,
segments: [{
segment: [0],
category: "sponsor",
}, {
segment: [31, 60],
category: "intro",
}],
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for missing params (JSON method) 4", (done) => {
postSkipSegmentJSON({
userID,
videoID,
segments: [{
segment: [9, 10],
}, {
segment: [31, 60],
category: "intro",
}],
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for missing params (JSON method) 5", (done) => {
postSkipSegmentJSON({
userID,
videoID,
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for missing multiple params (Params method)", (done) => {
postSkipSegmentParam({
startTime: 9,
endTime: 10,
userID
})
.then(res => {
if (res.status === 400) done();
else done(true);
})
.catch(err => done(err));
});
it("Should return 400 if videoID is empty", (done) => {
const videoID = null as unknown as string;
postSkipSegmentParam({
videoID,
startTime: 1,
endTime: 5,
category: "sponsor",
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 if no segments provided", (done) => {
postSkipSegmentJSON({
videoID,
segments: [],
category: "sponsor",
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});
describe("postSkipSegments 400 - Chapters", () => {
const actionType = "chapter";
const category = actionType;
it("Should not be able to submit a chapter name that is too long", (done) => {
postSkipSegmentParam({
videoID,
startTime: 1,
endTime: 5,
category,
actionType,
description: "a".repeat(256),
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});
describe("postSkipSegments 400 - POI", () => {
const category = "poi_highlight";
it("Should be rejected if a POI is at less than 1 second", (done) => {
postSkipSegmentParam({
videoID,
startTime: 0.5,
endTime: 0.5,
category,
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should be rejected if highlight segment doesn't start and end at the same time", (done) => {
postSkipSegmentParam({
videoID,
startTime: 30,
endTime: 30.5,
category,
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});
describe("postSkipSegments 400 - Automod", () => {
it("Should be rejected if over 80% of the video", (done) => {
postSkipSegmentParam({
videoID,
startTime: 30,
endTime: 1000000,
userID,
category: "sponsor"
})
.then(res => {
assert.strictEqual(res.status, 403);
done();
})
.catch(err => done(err));
});
it("Should be rejected if a sponsor is less than 1 second", (done) => {
postSkipSegmentParam({
videoID,
category: "sponsor",
startTime: 30,
endTime: 30.5,
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should be rejected if non-POI segment starts and ends at the same time", (done) => {
postSkipSegmentParam({
videoID,
startTime: 90,
endTime: 90,
userID,
category: "intro"
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should not allow submitting full video not at zero seconds", (done) => {
postSkipSegmentParam({
videoID,
startTime: 0,
endTime: 1,
category: "sponsor",
actionType: "full",
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should not be able to submit an music_offtopic with mute action type (JSON method)", (done) => {
postSkipSegmentJSON({
userID,
videoID,
segments: [{
segment: [0, 10],
category: "music_offtopic",
actionType: "mute"
}],
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});
describe("postSkipSegments 400 - Mismatched Types", () => {
it("Should not be able to submit with a category that does not exist", (done) => {
postSkipSegmentParam({
videoID,
startTime: 1,
endTime: 5,
category: "this-category-will-never-exist",
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should not be able to submit a chapter with skip action type (JSON method)", (done) => {
postSkipSegmentJSON({
userID,
videoID,
segments: [{
segment: [0, 10],
category: "chapter",
actionType: "skip"
}],
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should not be able to submit a sponsor with a description (JSON method)", (done) => {
const videoID = "postSkipChapter5";
postSkipSegmentJSON({
userID,
videoID,
segments: [{
segment: [0, 10],
category: "sponsor",
description: "This is a sponsor"
}],
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});

View File

@@ -0,0 +1,32 @@
import assert from "assert";
import { postSkipSegmentParam } from "./postSkipSegments";
import { config } from "../../src/config";
import sinon from "sinon";
const videoID = "postSkipSegments-404-video";
describe("postSkipSegments 400 - stubbed config", () => {
const USERID_LIMIT = 30;
before(() => {
sinon.stub(config, "minUserIDLength").value(USERID_LIMIT);
});
after(() => {
sinon.restore();
});
it("Should return 400 if userID is too short", (done) => {
const userID = "a".repeat(USERID_LIMIT - 10);
postSkipSegmentParam({
videoID,
startTime: 1,
endTime: 5,
category: "sponsor",
userID
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});

View File

@@ -0,0 +1,121 @@
import { getHash } from "../../src/utils/getHash";
import { db } from "../../src/databases/databases";
import assert from "assert";
import { arrayDeepEquals } from "../utils/partialDeepEquals";
import { postSkipSegmentJSON, convertMultipleToDBFormat } from "./postSkipSegments";
import { YouTubeApiMock } from "../mocks/youtubeMock";
import { ImportMock } from "ts-mock-imports";
import * as YouTubeAPIModule from "../../src/utils/youtubeApi";
const mockManager = ImportMock.mockStaticClass(YouTubeAPIModule, "YouTubeAPI");
const sinonStub = mockManager.mock("listVideos");
sinonStub.callsFake(YouTubeApiMock.listVideos);
describe("postSkipSegments - Automod 80%", () => {
const userID = "postSkipSegments-automodSubmit";
const userIDHash = getHash(userID);
const over80VideoID = "80percent_video";
const queryDatabaseCategory = (videoID: string) => db.prepare("all", `SELECT "startTime", "endTime", "category" FROM "sponsorTimes" WHERE "videoID" = ? and "votes" > -1`, [videoID]);
before(() => {
const insertSponsorTimeQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", views, category, "actionType", "videoDuration", "shadowHidden", "hashedVideoID") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
db.prepare("run", insertSponsorTimeQuery, [over80VideoID, 0, 1000, 0, "80percent-uuid-0", userIDHash, 0, 0, "interaction", "skip", 0, 0, over80VideoID]);
db.prepare("run", insertSponsorTimeQuery, [over80VideoID, 1001, 1005, 0, "80percent-uuid-1", userIDHash, 0, 0, "interaction", "skip", 0, 0, over80VideoID]);
db.prepare("run", insertSponsorTimeQuery, [over80VideoID, 0, 5000, -2, "80percent-uuid-2", userIDHash, 0, 0, "interaction", "skip", 0, 0, over80VideoID]);
});
it("Should allow multiple times if total is under 80% of video (JSON method)", (done) => {
const videoID = "postSkipSegments_80percent_video_blank1";
const segments = [{
segment: [3, 3000],
category: "sponsor",
}, {
segment: [3002, 3050],
category: "intro",
}, {
segment: [45, 100],
category: "interaction",
}, {
segment: [99, 170],
category: "sponsor",
}];
postSkipSegmentJSON({
userID,
videoID,
segments
})
.then(async res => {
assert.strictEqual(res.status, 200);
const rows = await queryDatabaseCategory(videoID);
const expected = convertMultipleToDBFormat(segments);
assert.ok(arrayDeepEquals(rows, expected));
done();
})
.catch(err => done(err));
}).timeout(5000);
it("Should reject multiple times if total is over 80% of video (JSON method)", (done) => {
const videoID = "postSkipSegments_80percent_video_blank2";
const segments = [{
segment: [0, 2000],
category: "interaction",
}, {
segment: [3000, 4000],
category: "sponsor",
}, {
segment: [1500, 2750],
category: "sponsor",
}, {
segment: [4050, 4750],
category: "intro",
}];
postSkipSegmentJSON({
userID,
videoID,
segments
})
.then(async res => {
assert.strictEqual(res.status, 403);
const rows = await queryDatabaseCategory(videoID);
assert.deepStrictEqual(rows, []);
done();
})
.catch(err => done(err));
}).timeout(5000);
it("Should reject multiple times if total is over 80% of video including previosuly submitted times (JSON method)", (done) => {
const segments = [{
segment: [2000, 4000], // adds 2000
category: "sponsor",
}, {
segment: [1500, 2750], // adds 500
category: "sponsor",
}, {
segment: [4050, 4570], // adds 520
category: "sponsor",
}];
const expected = [{
startTime: 0,
endTime: 1000,
category: "interaction"
}, {
startTime: 1001,
endTime: 1005,
category: "interaction"
}];
postSkipSegmentJSON({
userID,
videoID: over80VideoID,
segments: segments
})
.then(async res => {
assert.strictEqual(res.status, 403);
const rows = await queryDatabaseCategory(over80VideoID);
assert.ok(arrayDeepEquals(rows, expected, true));
done();
})
.catch(err => done(err));
}).timeout(5000);
});

View File

@@ -0,0 +1,205 @@
import assert from "assert";
import { postSkipSegmentJSON, postSkipSegmentParam } from "./postSkipSegments";
import { getHash } from "../../src/utils/getHash";
import { partialDeepEquals } from "../utils/partialDeepEquals";
import { db } from "../../src/databases/databases";
import { ImportMock } from "ts-mock-imports";
import * as YouTubeAPIModule from "../../src/utils/youtubeApi";
import { YouTubeApiMock } from "../mocks/youtubeMock";
import { convertSingleToDBFormat } from "./postSkipSegments";
const mockManager = ImportMock.mockStaticClass(YouTubeAPIModule, "YouTubeAPI");
const sinonStub = mockManager.mock("listVideos");
sinonStub.callsFake(YouTubeApiMock.listVideos);
describe("postSkipSegments - duration", () => {
const userIDOne = "postSkip-DurationUserOne";
const userIDTwo = "postSkip-DurationUserTwo";
const videoID = "postSkip-DurationVideo";
const noDurationVideoID = "noDuration";
const userID = userIDOne;
const queryDatabaseDuration = (videoID: string) => db.prepare("get", `SELECT "startTime", "endTime", "locked", "category", "videoDuration" FROM "sponsorTimes" WHERE "videoID" = ?`, [videoID]);
before(() => {
const insertSponsorTimeQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", views, category, "actionType", "videoDuration", "shadowHidden", "hashedVideoID") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
db.prepare("run", insertSponsorTimeQuery, ["full_video_duration_segment", 0, 0, 0, "full-video-duration-uuid-0", userIDTwo, 0, 0, "sponsor", "full", 123, 0, "full_video_duration_segment"]);
db.prepare("run", insertSponsorTimeQuery, ["full_video_duration_segment", 25, 30, 0, "full-video-duration-uuid-1", userIDTwo, 0, 0, "sponsor", "skip", 123, 0, "full_video_duration_segment"]);
});
it("Should be able to submit a single time with a precise duration close to the one from the YouTube API (JSON method)", (done) => {
const segment = {
segment: [1, 10],
category: "sponsor",
};
postSkipSegmentJSON({
userID,
videoID,
videoDuration: 4980.20,
segments: [segment],
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryDatabaseDuration(videoID);
const expected = {
...convertSingleToDBFormat(segment),
locked: 0,
videoDuration: 4980.20,
};
assert.ok(partialDeepEquals(row, expected));
done();
})
.catch(err => done(err));
});
it("Should be able to submit a single time with a duration in the body (JSON method)", (done) => {
const videoID = "noDuration";
const segment = {
segment: [0, 10],
category: "sponsor",
};
postSkipSegmentJSON({
userID,
videoID,
videoDuration: 100,
segments: [segment],
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryDatabaseDuration(videoID);
const expected = {
...convertSingleToDBFormat(segment),
locked: 0,
videoDuration: 100,
};
assert.ok(partialDeepEquals(row, expected));
done();
})
.catch(err => done(err));
});
it("Should be able to submit with a new duration, and hide old submissions and remove segment locks", async () => {
const videoID = "noDuration";
const segment = {
segment: [1, 10],
category: "sponsor",
};
await db.prepare("run", `INSERT INTO "lockCategories" ("userID", "videoID", "category")
VALUES(?, ?, ?)`, [getHash("generic-VIP"), videoID, "sponsor"]);
try {
const res = await postSkipSegmentJSON({
userID,
videoID,
videoDuration: 100,
segments: [segment],
});
assert.strictEqual(res.status, 200);
const lockCategoriesRow = await db.prepare("get", `SELECT * from "lockCategories" WHERE videoID = ?`, [videoID]);
const videoRows = await db.prepare("all", `SELECT "startTime", "endTime", "locked", "category", "videoDuration"
FROM "sponsorTimes" WHERE "videoID" = ? AND hidden = 0`, [videoID]);
const hiddenVideoRows = await db.prepare("all", `SELECT "startTime", "endTime", "locked", "category", "videoDuration"
FROM "sponsorTimes" WHERE "videoID" = ? AND hidden = 1`, [videoID]);
assert.ok(!lockCategoriesRow);
const expected = {
...convertSingleToDBFormat(segment),
locked: 0,
videoDuration: 100,
};
assert.ok(partialDeepEquals(videoRows[0], expected));
assert.strictEqual(videoRows.length, 1);
assert.strictEqual(hiddenVideoRows.length, 1);
} catch (e) {
return e;
}
});
it("Should still not be allowed if youtube thinks duration is 0", (done) => {
postSkipSegmentJSON({
userID,
videoID: noDurationVideoID,
videoDuration: 100,
segments: [{
segment: [30, 10000],
category: "sponsor",
}],
})
.then(res => {
assert.strictEqual(res.status, 403);
done();
})
.catch(err => done(err));
});
it("Should be able to submit with a new duration, and not hide full video segments", async () => {
const videoID = "full_video_duration_segment";
const segment = {
segment: [20, 30],
category: "sponsor",
};
const res = await postSkipSegmentJSON({
userID,
videoID,
videoDuration: 100,
segments: [segment],
});
assert.strictEqual(res.status, 200);
const videoRows = await db.prepare("all", `SELECT "startTime", "endTime", "locked", "category", "actionType", "videoDuration"
FROM "sponsorTimes" WHERE "videoID" = ? AND hidden = 0`, [videoID]);
const hiddenVideoRows = await db.prepare("all", `SELECT "startTime", "endTime", "locked", "category", "videoDuration"
FROM "sponsorTimes" WHERE "videoID" = ? AND hidden = 1`, [videoID]);
assert.strictEqual(videoRows.length, 2);
const expected = {
...convertSingleToDBFormat(segment),
locked: 0,
videoDuration: 100
};
const fullExpected = {
category: "sponsor",
actionType: "full"
};
assert.ok((partialDeepEquals(videoRows[0], fullExpected) && partialDeepEquals(videoRows[1], expected))
|| (partialDeepEquals(videoRows[1], fullExpected) && partialDeepEquals(videoRows[0], expected)));
assert.strictEqual(hiddenVideoRows.length, 1);
});
it("Should be able to submit a single time with a duration from the YouTube API (JSON method)", (done) => {
const segment = {
segment: [0, 10],
category: "sponsor",
};
const videoID = "postDuration-ytjson";
postSkipSegmentJSON({
userID,
videoID,
videoDuration: 100,
segments: [segment],
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryDatabaseDuration(videoID);
const expected = {
...convertSingleToDBFormat(segment),
videoDuration: 4980,
};
assert.ok(partialDeepEquals(row, expected));
done();
})
.catch(err => done(err));
});
it("Should successfully submit if video is private", (done) => {
const videoID = "private-video";
postSkipSegmentParam({
videoID,
startTime: 1,
endTime: 5,
category: "sponsor",
userID
})
.then(res => {
assert.strictEqual(res.status, 200);
done();
})
.catch(err => done(err));
});
});

View File

@@ -0,0 +1,84 @@
import { getHash } from "../../src/utils/getHash";
import { db } from "../../src/databases/databases";
import assert from "assert";
import { partialDeepEquals } from "../utils/partialDeepEquals";
import { genRandom } from "../utils/getRandom";
import { Feature } from "../../src/types/user.model";
import { Segment, postSkipSegmentJSON, convertSingleToDBFormat } from "./postSkipSegments";
describe("postSkipSegments Features - Chapters", () => {
const submitUser_noPermissions = "postSkipSegments-chapters-noperm";
const submitUser_reputation = "postSkipSegments-chapters-reputation";
const submitUser_feature = "postSkipSegments-chapters-feature";
const queryDatabaseChapter = (videoID: string) => db.prepare("get", `SELECT "startTime", "endTime", "category", "actionType", "description" FROM "sponsorTimes" WHERE "videoID" = ?`, [videoID]);
function createSegment(): Segment {
return {
segment: [0, 10],
category: "chapter",
actionType: "chapter",
description: genRandom()
};
}
before(() => {
const submitNumberOfTimes = 10;
const submitUser_reputationHash = getHash(submitUser_reputation);
const insertSponsorTimeQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", views, category, "actionType", "shadowHidden") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
for (let i = 0; i < submitNumberOfTimes; i++) {
const uuid = `post_reputation_uuid-${i}`;
const videoID = `post_reputation_video-${i}`;
db.prepare("run", insertSponsorTimeQuery, [videoID, 1, 11, 5, 1, uuid, submitUser_reputationHash, 1597240000000, 50, "sponsor", "skip", 0]);
}
// user feature
db.prepare("run", `INSERT INTO "userFeatures" ("userID", "feature", "issuerUserID", "timeSubmitted") VALUES(?, ?, ?, ?)`, [getHash(submitUser_feature), Feature.ChapterSubmitter, "generic-VIP", 0]);
});
it("Should be able to submit a single chapter due to reputation (JSON method)", (done) => {
const segment = createSegment();
const videoID = "postSkipSegments-chapter-reputation";
postSkipSegmentJSON({
userID: submitUser_reputation,
videoID,
segments: [segment]
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryDatabaseChapter(videoID);
assert.ok(partialDeepEquals(row, convertSingleToDBFormat(segment)));
done();
})
.catch(err => done(err));
});
it("Should be able to submit a single chapter due to user feature (JSON method)", (done) => {
const segment = createSegment();
const videoID = "postSkipSegments-chapter-feature";
postSkipSegmentJSON({
userID: submitUser_feature,
videoID,
segments: [segment]
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryDatabaseChapter(videoID);
assert.ok(partialDeepEquals(row, convertSingleToDBFormat(segment)));
done();
})
.catch(err => done(err));
});
it("Should not be able to submit a chapter without permission (JSON method)", (done) => {
const videoID = "postSkipSegments-chapter-submit";
postSkipSegmentJSON({
userID: submitUser_noPermissions,
videoID,
segments: [createSegment()]
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});

View File

@@ -0,0 +1,70 @@
import assert from "assert";
import { postSkipSegmentJSON } from "./postSkipSegments";
import { getHash } from "../../src/utils/getHash";
import { db } from "../../src/databases/databases";
describe("postSkipSegments - LockedVideos", () => {
const userIDOne = "postSkip-DurationUserOne";
const VIPLockUser = "VIPUser-lockCategories";
const videoID = "lockedVideo";
const userID = userIDOne;
before(() => {
const insertLockCategoriesQuery = `INSERT INTO "lockCategories" ("userID", "videoID", "category", "reason") VALUES(?, ?, ?, ?)`;
db.prepare("run", insertLockCategoriesQuery, [getHash(VIPLockUser), videoID, "sponsor", "Custom Reason"]);
db.prepare("run", insertLockCategoriesQuery, [getHash(VIPLockUser), videoID, "intro", ""]);
});
it("Should return 403 and custom reason for submiting in lockedCategory", (done) => {
postSkipSegmentJSON({
userID,
videoID,
segments: [{
segment: [1, 10],
category: "sponsor",
}],
})
.then(res => {
assert.strictEqual(res.status, 403);
assert.match(res.data, /Reason: /);
assert.match(res.data, /Custom Reason/);
done();
})
.catch(err => done(err));
});
it("Should return not be 403 when submitting with locked category but unlocked actionType", (done) => {
postSkipSegmentJSON({
userID,
videoID,
segments: [{
segment: [1, 10],
category: "sponsor",
actionType: "mute"
}],
})
.then(res => {
assert.strictEqual(res.status, 200);
done();
})
.catch(err => done(err));
});
it("Should return 403 for submiting in lockedCategory", (done) => {
postSkipSegmentJSON({
userID,
videoID,
segments: [{
segment: [1, 10],
category: "intro",
}],
})
.then(res => {
assert.strictEqual(res.status, 403);
assert.doesNotMatch(res.data, /Lock reason: /);
assert.doesNotMatch(res.data, /Custom Reason/);
done();
})
.catch(err => done(err));
});
});

View File

@@ -0,0 +1,68 @@
import assert from "assert";
import { postSkipSegmentParam } from "./postSkipSegments";
import { getHash } from "../../src/utils/getHash";
import { db } from "../../src/databases/databases";
import { ImportMock } from "ts-mock-imports";
import * as YouTubeAPIModule from "../../src/utils/youtubeApi";
import { YouTubeApiMock } from "../mocks/youtubeMock";
const mockManager = ImportMock.mockStaticClass(YouTubeAPIModule, "YouTubeAPI");
const sinonStub = mockManager.mock("listVideos");
sinonStub.callsFake(YouTubeApiMock.listVideos);
describe("postSkipSegments - shadowban", () => {
const banUser01 = "postSkip-banUser01";
const banUser01Hash = getHash(banUser01);
const shadowBanVideoID1 = "postSkipBan1";
const shadowBanVideoID2 = "postSkipBan2";
const queryDatabaseShadowhidden = (videoID: string) => db.prepare("get", `SELECT "startTime", "endTime", "shadowHidden", "userID" FROM "sponsorTimes" WHERE "videoID" = ?`, [videoID]);
before(() => {
db.prepare("run", `INSERT INTO "shadowBannedUsers" ("userID") VALUES(?)`, [banUser01Hash]);
});
it("Should automatically shadowban segments if user is banned", (done) => {
const videoID = shadowBanVideoID1;
postSkipSegmentParam({
videoID,
startTime: 0,
endTime: 10,
category: "sponsor",
userID: banUser01
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryDatabaseShadowhidden(videoID);
const expected = {
startTime: 0,
endTime: 10,
shadowHidden: 1,
userID: banUser01Hash
};
assert.deepStrictEqual(row, expected);
done();
})
.catch(err => done(err));
});
it("Should not add full segments to database if user if shadowbanned", (done) => {
const videoID = shadowBanVideoID2;
postSkipSegmentParam({
videoID,
startTime: 0,
endTime: 0,
category: "sponsor",
actionType: "full",
userID: banUser01
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryDatabaseShadowhidden(videoID);
assert.strictEqual(row, undefined);
done();
})
.catch(err => done(err));
});
});

View File

@@ -0,0 +1,104 @@
import assert from "assert";
import { convertSingleToDBFormat } from "./postSkipSegments";
import { getHash } from "../../src/utils/getHash";
import { db } from "../../src/databases/databases";
import { partialDeepEquals } from "../utils/partialDeepEquals";
import { client } from "../utils/httpClient";
const endpoint = "/api/skipSegments";
const queryUseragent = (videoID: string) => db.prepare("get", `SELECT "startTime", "endTime", "locked", "category", "userAgent" FROM "sponsorTimes" WHERE "videoID" = ?`, [videoID]);
describe("postSkipSegments - userAgent", () => {
const userIDOne = "postSkip-DurationUserOne";
const VIPLockUser = "VIPUser-lockCategories";
const videoID = "lockedVideo";
const userID = userIDOne;
const segment = {
segment: [0, 10],
category: "sponsor",
};
const dbFormatSegment = convertSingleToDBFormat(segment);
before(() => {
const insertLockCategoriesQuery = `INSERT INTO "lockCategories" ("userID", "videoID", "category", "reason") VALUES(?, ?, ?, ?)`;
db.prepare("run", insertLockCategoriesQuery, [getHash(VIPLockUser), videoID, "sponsor", "Custom Reason"]);
db.prepare("run", insertLockCategoriesQuery, [getHash(VIPLockUser), videoID, "intro", ""]);
});
it("Should be able to submit with empty user-agent", (done) => {
const videoID = "userAgent-3";
client(endpoint, {
method: "POST",
data: {
userID,
videoID,
segments: [segment],
userAgent: "",
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryUseragent(videoID);
const expected = {
...dbFormatSegment,
userAgent: "",
};
assert.ok(partialDeepEquals(row, expected));
done();
})
.catch(err => done(err));
});
it("Should be able to submit with custom userAgent in body", (done) => {
const videoID = "userAgent-4";
client(endpoint, {
method: "POST",
data: {
userID,
videoID,
segments: [segment],
userAgent: "MeaBot/5.0"
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryUseragent(videoID);
const expected = {
...dbFormatSegment,
userAgent: "MeaBot/5.0",
};
assert.ok(partialDeepEquals(row, expected));
done();
})
.catch(err => done(err));
});
it("Should be able to submit with custom user-agent 1", (done) => {
const videoID = "userAgent-1";
client(endpoint, {
method: "POST",
headers: {
"Content-Type": "application/json",
"User-Agent": "com.google.android.youtube/5.0"
},
data: {
userID,
videoID,
segments: [segment],
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryUseragent(videoID);
const expected = {
...dbFormatSegment,
userAgent: "Vanced/5.0",
};
assert.ok(partialDeepEquals(row, expected));
done();
})
.catch(err => done(err));
});
});

View File

@@ -0,0 +1,127 @@
import { config } from "../../src/config";
import { getHash } from "../../src/utils/getHash";
import { db } from "../../src/databases/databases";
import assert from "assert";
import { client } from "../utils/httpClient";
describe("postSkipSegments Warnings", () => {
// Constant and helpers
const warnUser01 = "warn-user01";
const warnUser01Hash = getHash(warnUser01);
const warnUser02 = "warn-user02";
const warnUser02Hash = getHash(warnUser02);
const warnUser03 = "warn-user03";
const warnUser03Hash = getHash(warnUser03);
const warnUser04 = "warn-user04";
const warnUser04Hash = getHash(warnUser04);
const warnVideoID = "postSkipSegments-warn-video";
const endpoint = "/api/skipSegments";
const postSkipSegmentJSON = (data: Record<string, any>) => client({
method: "POST",
url: endpoint,
data
});
before(() => {
const now = Date.now();
const warnVip01Hash = getHash("postSkipSegmentsWarnVIP");
const reason01 = "Reason01";
const reason02 = "";
const reason03 = "Reason03";
const MILLISECONDS_IN_HOUR = 3600000;
const WARNING_EXPIRATION_TIME = config.hoursAfterWarningExpires * MILLISECONDS_IN_HOUR;
const insertWarningQuery = 'INSERT INTO warnings ("userID", "issuerUserID", "enabled", "reason", "issueTime") VALUES(?, ?, ?, ?, ?)';
// User 1 | 1 active | custom reason
db.prepare("run", insertWarningQuery, [warnUser01Hash, warnVip01Hash, 1, reason01, now]);
// User 2 | 1 inactive | default reason
db.prepare("run", insertWarningQuery, [warnUser02Hash, warnVip01Hash, 0, reason02, now]);
// User 3 | 1 expired, active | custom reason
db.prepare("run", insertWarningQuery, [warnUser03Hash, warnVip01Hash, 1, reason03, (now - WARNING_EXPIRATION_TIME - 1000)]);
// User 4 | 1 active | default reason
db.prepare("run", insertWarningQuery, [warnUser04Hash, warnVip01Hash, 1, reason02, now]);
});
it("Should be rejected with custom message if user has active warnings", (done) => {
postSkipSegmentJSON({
userID: warnUser01,
videoID: warnVideoID,
segments: [{
segment: [0, 10],
category: "sponsor",
}],
})
.then(res => {
assert.strictEqual(res.status, 403);
const errorMessage = res.data;
const reason = "Reason01";
const expected = "Submission rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes"
+ " that are not malicious, and we just want to clarify the rules. "
+ "Could you please send a message in discord.gg/SponsorBlock or matrix.to/#/#sponsor:ajay.app so we can further help you? "
+ `Your userID is ${warnUser01Hash}.\n\nWarning reason: '${reason}'`;
assert.strictEqual(errorMessage, expected);
done();
})
.catch(err => done(err));
});
it("Should be accepted if user has inactive warning", (done) => {
postSkipSegmentJSON({
userID: warnUser02,
videoID: warnVideoID,
segments: [{
segment: [50, 60],
category: "sponsor",
}],
})
.then(res => {
assert.ok(res.status === 200, `Status code was ${res.status} ${res.data}`);
done();
})
.catch(err => done(err));
});
it("Should be accepted if user has expired warning", (done) => {
postSkipSegmentJSON({
userID: warnUser03,
videoID: warnVideoID,
segments: [{
segment: [53, 60],
category: "sponsor",
}],
})
.then(res => {
assert.ok(res.status === 200, `Status code was ${res.status} ${res.data}`);
done();
})
.catch(err => done(err));
});
it("Should be rejected with default message if user has active warning", (done) => {
postSkipSegmentJSON({
userID: warnUser04,
videoID: warnVideoID,
segments: [{
segment: [0, 10],
category: "sponsor",
}],
})
.then(res => {
assert.strictEqual(res.status, 403);
const errorMessage = res.data;
const expected = "Submission rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes"
+ " that are not malicious, and we just want to clarify the rules. "
+ "Could you please send a message in discord.gg/SponsorBlock or matrix.to/#/#sponsor:ajay.app so we can further help you? "
+ `Your userID is ${warnUser04Hash}.`;
assert.strictEqual(errorMessage, expected);
done();
})
.catch(err => done(err));
});
});

View File

@@ -9,16 +9,21 @@ describe("postWarning", () => {
const endpoint = "/api/warnUser";
const getWarning = (userID: string) => db.prepare("get", `SELECT "userID", "issueTime", "issuerUserID", enabled, "reason" FROM warnings WHERE "userID" = ?`, [userID]);
const warnedUser = getHash("warning-0");
const warneduserID = "warning-0";
const warnedUserPublicID = getHash(warneduserID);
const warningVipOne = "warning-vip-1";
const warningVipTwo = "warning-vip-2";
const nonVipUser = "warning-non-vip";
before(async () => {
await db.prepare("run", `INSERT INTO "vipUsers" ("userID") VALUES (?)`, [getHash("warning-vip")]);
await db.prepare("run", `INSERT INTO "vipUsers" ("userID") VALUES (?)`, [getHash(warningVipOne)]);
await db.prepare("run", `INSERT INTO "vipUsers" ("userID") VALUES (?)`, [getHash(warningVipTwo)]);
});
it("Should be able to create warning if vip (exp 200)", (done) => {
const json = {
issuerUserID: "warning-vip",
userID: warnedUser,
issuerUserID: warningVipOne,
userID: warnedUserPublicID,
reason: "warning-reason-0"
};
client.post(endpoint, json)
@@ -38,8 +43,8 @@ describe("postWarning", () => {
it("Should be not be able to create a duplicate warning if vip", (done) => {
const json = {
issuerUserID: "warning-vip",
userID: warnedUser,
issuerUserID: warningVipOne,
userID: warnedUserPublicID,
};
client.post(endpoint, json)
@@ -58,8 +63,8 @@ describe("postWarning", () => {
it("Should be able to remove warning if vip", (done) => {
const json = {
issuerUserID: "warning-vip",
userID: warnedUser,
issuerUserID: warningVipOne,
userID: warnedUserPublicID,
enabled: false
};
@@ -78,8 +83,8 @@ describe("postWarning", () => {
it("Should not be able to create warning if not vip (exp 403)", (done) => {
const json = {
issuerUserID: "warning-not-vip",
userID: "warning-1",
issuerUserID: nonVipUser,
userID: warnedUserPublicID,
};
client.post(endpoint, json)
@@ -101,8 +106,8 @@ describe("postWarning", () => {
it("Should re-enable disabled warning", (done) => {
const json = {
issuerUserID: "warning-vip",
userID: warnedUser,
issuerUserID: warningVipOne,
userID: warnedUserPublicID,
enabled: true
};
@@ -121,14 +126,14 @@ describe("postWarning", () => {
it("Should be able to remove your own warning", (done) => {
const json = {
userID: "warning-0",
userID: warneduserID,
enabled: false
};
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 200);
const data = await getWarning(warnedUser);
const data = await getWarning(warnedUserPublicID);
const expected = {
enabled: 0
};
@@ -138,15 +143,16 @@ describe("postWarning", () => {
.catch(err => done(err));
});
it("Should be able to add your own warning", (done) => {
it("Should not be able to add your own warning", (done) => {
const json = {
userID: "warning-0"
userID: warneduserID,
enabled: true
};
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 403);
const data = await getWarning(warnedUser);
const data = await getWarning(warnedUserPublicID);
const expected = {
enabled: 0
};

View File

@@ -1,9 +1,7 @@
import { config } from "../../src/config";
import redis from "../../src/utils/redis";
import crypto from "crypto";
import assert from "assert";
const genRandom = (bytes=8) => crypto.pseudoRandomBytes(bytes).toString("hex");
import { genRandom } from "../utils/getRandom";
const randKey1 = genRandom();
const randValue1 = genRandom();

View File

@@ -208,7 +208,7 @@ describe("reputation", () => {
assert.strictEqual(data, 0.19310344827586207);
});
it("user with high reputation and locked segments", async () => {
it("user with high reputation and locked segmentsssss", async () => {
const metrics = {
totalSubmissions: 8,
downvotedSubmissions: 1,
@@ -217,11 +217,11 @@ describe("reputation", () => {
lockedSum: 4,
semiOldUpvotedSubmissions: 5,
oldUpvotedSubmissions: 5,
mostUpvotedInLockedVideoSum: 0
mostUpvotedInLockedVideoSum: 4
};
const data = await getReputation(userHashHighAndLocked);
assert.strictEqual(data, calculateReputationFromMetrics(metrics));
assert.strictEqual(data, 1.793103448275862);
assert.strictEqual(data, 3.393103448275862);
});
it("user with most upvoted segments in locked video", async () => {

View File

@@ -22,16 +22,6 @@ const user07PrivateUserID = "setUsername_07";
const username07 = "Username 07";
const user08PrivateUserID = "setUsername_08";
// private = public cases
// user09 - username === privateID
const user09PrivateUserID = "setUsername_09";
// user 10/11 - user 11 username === user 10 privateID
const user10PrivateUserID = "setUsername_10_collision";
const username10 = "setUsername_10";
const user11PrivateUserID = "setUsername_11";
const user12PrivateUserID = "setUsername_12";
const username12 = "Username 12";
async function addUsername(userID: string, userName: string, locked = 0) {
await db.prepare("run", 'INSERT INTO "userNames" ("userID", "userName", "locked") VALUES(?, ?, ?)', [userID, userName, locked]);
await addLogUserNameChange(userID, userName);
@@ -40,7 +30,7 @@ async function addUsername(userID: string, userName: string, locked = 0) {
async function getUsernameInfo(userID: string): Promise<{ userName: string, locked: string}> {
const row = await db.prepare("get", 'SELECT "userName", "locked" FROM "userNames" WHERE "userID" = ?', [userID]);
if (!row) {
return null;
throw new Error("No username found");
}
return row;
}
@@ -98,9 +88,6 @@ describe("setUsername", () => {
await addUsername(getHash(user05PrivateUserID), username05, 0);
await addUsername(getHash(user06PrivateUserID), username06, 0);
await addUsername(getHash(user07PrivateUserID), username07, 1);
await addUsername(getHash(user10PrivateUserID), username10, 0);
// user11 skipped
await addUsername(getHash(user12PrivateUserID), username12, 0);
});
it("Should be able to set username that has never been set", (done) => {
@@ -249,47 +236,10 @@ describe("setUsername", () => {
it("Should delete row if new username is same as publicID", (done) => {
const publicID = getHash(user08PrivateUserID);
postSetUserName(getHash(user08PrivateUserID), publicID)
.then(async () => {
const usernameInfo = await getUsernameInfo(getHash(user08PrivateUserID));
assert.strictEqual(usernameInfo, null);
done();
})
.catch((err) => done(err));
});
it("Should return error if trying to set username to privateID", (done) => {
const privateID = user09PrivateUserID;
postSetUserName(privateID, privateID)
.then(async (res) => {
assert.strictEqual(res.status, 400);
const usernameInfo = await getUsernameInfo(getHash(privateID));
assert.strictEqual(usernameInfo, null);
done();
})
.catch((err) => done(err));
});
it("Should return error if trying to set username to someone else's privateID", (done) => {
const privateID = user11PrivateUserID;
postSetUserName(privateID, user10PrivateUserID)
.then(async (res) => {
assert.strictEqual(res.status, 400);
const usernameInfo = await getUsernameInfo(getHash(privateID)); // user 10's privateID
assert.strictEqual(usernameInfo, null);
done();
})
.catch((err) => done(err));
});
it("Should not return error if trying to set username to someone else's publicID", (done) => {
const privateID = user12PrivateUserID;
const user10PublicID = getHash(user10PrivateUserID);
postSetUserName(privateID, user10PublicID)
.then(async (res) => {
assert.strictEqual(res.status, 200);
const usernameInfo = await getUsernameInfo(getHash(privateID)); // user 10's publicID
assert.strictEqual(usernameInfo.userName, user10PublicID);
done();
.then(() => {
getUsernameInfo(getHash(user08PrivateUserID))
.then(usernameinfo => done(`Username should be deleted - ${usernameinfo})`))
.catch(() => done());
})
.catch((err) => done(err));
});

View File

@@ -0,0 +1,117 @@
import { db } from "../../src/databases/databases";
import { getHash } from "../../src/utils/getHash";
import assert from "assert";
import { client } from "../utils/httpClient";
import { config } from "../../src/config";
import sinon from "sinon";
const USERID_LIMIT = 30;
// preexisting username with userid < Limit
const preExisting_underLimit = "preExisting_under";
// preexisting username with userid > Limit
const preExisting_overLimit = `preExisting_over${"*".repeat(USERID_LIMIT)}`;
// new username to privateID < Limit
const newUser_underLimit = "newUser_under";
// new username to privateID > Limit
const newUser_overLimit = `newUser_over${"*".repeat(USERID_LIMIT)}`;
// new username to someone else'e privateID
const otherUser = `otherUser${"*".repeat(USERID_LIMIT)}`;
const addUsername = async (userID: string, userName: string, locked = 0) =>
await db.prepare("run", 'INSERT INTO "userNames" ("userID", "userName", "locked") VALUES(?, ?, ?)', [userID, userName, locked]);
async function hasSetUsername(userID: string): Promise<boolean> {
const row = await db.prepare("get", 'SELECT "userName", "locked" FROM "userNames" WHERE "userID" = ?', [userID]);
return Boolean(row);
}
const endpoint = "/api/setUsername";
const postSetUserName = (userID: string, username: string) => client({
method: "POST",
url: endpoint,
params: {
userID,
username,
}
});
describe("setUsernamePrivate tests", () => {
// add preexisitng usernames
before(async () => {
await addUsername(getHash(preExisting_underLimit), preExisting_underLimit, 0);
await addUsername(getHash(preExisting_overLimit), preExisting_overLimit, 0);
});
// stub minUserIDLength
before(() => sinon.stub(config, "minUserIDLength").value(USERID_LIMIT));
after(() => sinon.restore());
it("Existing privateID = username under Limit should retreive successfully", (done) => {
const privateID = preExisting_underLimit;
hasSetUsername(getHash(privateID))
.then((usernameInfo) => {
assert.ok(usernameInfo);
done();
});
});
it("Existing privateID = username over Limit should retreive successfully", (done) => {
const privateID = preExisting_overLimit;
hasSetUsername(getHash(privateID))
.then((usernameInfo) => {
assert.ok(usernameInfo);
done();
});
});
it("Should return error if trying to set userID = username under limit", (done) => {
const privateID = newUser_underLimit;
postSetUserName(privateID, privateID)
.then(async (res) => {
assert.strictEqual(res.status, 400);
const usernameInfo = await hasSetUsername(getHash(privateID));
assert.ok(!usernameInfo);
done();
})
.catch((err) => done(err));
});
it("Should return error if trying to set username = other privateID over limit", (done) => {
const privateID = newUser_overLimit;
postSetUserName(privateID, privateID)
.then(async (res) => {
assert.strictEqual(res.status, 400);
const usernameInfo = await hasSetUsername(getHash(privateID));
assert.ok(!usernameInfo);
done();
})
.catch((err) => done(err));
});
it("Should return error if trying to set username = other privateID over limit", (done) => {
const privateID = otherUser;
const otherUserPrivate = preExisting_overLimit;
postSetUserName(privateID, otherUserPrivate)
.then(async (res) => {
assert.strictEqual(res.status, 400);
const usernameInfo = await hasSetUsername(getHash(privateID));
assert.ok(!usernameInfo);
done();
})
.catch((err) => done(err));
});
it("Should not return error if trying to set username = other privateID under limit", (done) => {
const privateID = otherUser;
const otherUserPrivate = preExisting_underLimit;
postSetUserName(privateID, otherUserPrivate)
.then(async (res) => {
assert.strictEqual(res.status, 200);
const usernameInfo = await hasSetUsername(getHash(privateID));
assert.ok(usernameInfo);
done();
})
.catch((err) => done(err));
});
});

View File

@@ -1,4 +1,4 @@
import { db } from "../../src/databases/databases";
import { db, privateDB } from "../../src/databases/databases";
import { getHash } from "../../src/utils/getHash";
import assert from "assert";
import { Category } from "../../src/types/segments.model";
@@ -9,29 +9,64 @@ describe("shadowBanUser", () => {
const getShadowBanSegments = (userID: string, status: number) => db.prepare("all", `SELECT "shadowHidden" FROM "sponsorTimes" WHERE "userID" = ? AND "shadowHidden" = ?`, [userID, status]);
const getShadowBanSegmentCategory = (userID: string, status: number): Promise<{shadowHidden: number, category: Category}[]> => db.prepare("all", `SELECT "shadowHidden", "category" FROM "sponsorTimes" WHERE "userID" = ? AND "shadowHidden" = ?`, [userID, status]);
const getIPShadowBan = (hashedIP: string) => db.prepare("get", `SELECT * FROM "shadowBannedIPs" WHERE "hashedIP" = ?`, [hashedIP]);
const endpoint = "/api/shadowBanUser";
const VIPuserID = "shadow-ban-vip";
const video = "shadowBanVideo";
const videohash = getHash(video, 1);
before(async () => {
const insertQuery = `INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "service", "videoDuration", "hidden", "shadowHidden", "hashedVideoID") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
await db.prepare("run", insertQuery, ["testtesttest", 1, 11, 2, 0, "shadow-1-uuid-0", "shadowBanned", 0, 50, "sponsor", "YouTube", 100, 0, 0, getHash("testtesttest", 1)]);
await db.prepare("run", insertQuery, ["testtesttest2", 1, 11, 2, 0, "shadow-1-uuid-0-1", "shadowBanned", 0, 50, "sponsor", "PeerTube", 120, 0, 0, getHash("testtesttest2", 1)]);
await db.prepare("run", insertQuery, ["testtesttest", 20, 33, 2, 0, "shadow-1-uuid-2", "shadowBanned", 0, 50, "intro", "YouTube", 101, 0, 0, getHash("testtesttest", 1)]);
const insertQuery = `INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "service", "shadowHidden", "hashedVideoID") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
await db.prepare("run", insertQuery, [video, 1, 11, 2, 0, "shadow-10", "shadowBanned", 0, 50, "sponsor", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 1, 11, 2, 0, "shadow-11", "shadowBanned", 0, 50, "sponsor", "PeerTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 33, 2, 0, "shadow-12", "shadowBanned", 0, 50, "intro", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, ["testtesttest", 1, 11, 2, 0, "shadow-2-uuid-0", "shadowBanned2", 0, 50, "sponsor", "YouTube", 100, 0, 0, getHash("testtesttest", 1)]);
await db.prepare("run", insertQuery, ["testtesttest2", 1, 11, 2, 0, "shadow-2-uuid-0-1", "shadowBanned2", 0, 50, "sponsor", "PeerTube", 120, 0, 0, getHash("testtesttest2", 1)]);
await db.prepare("run", insertQuery, ["testtesttest", 20, 33, 2, 0, "shadow-2-uuid-2", "shadowBanned2", 0, 50, "intro", "YouTube", 101, 0, 0, getHash("testtesttest", 1)]);
await db.prepare("run", insertQuery, [video, 1, 11, 2, 0, "shadow-20", "shadowBanned2", 0, 50, "sponsor", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 1, 11, 2, 0, "shadow-21", "shadowBanned2", 0, 50, "sponsor", "PeerTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 33, 2, 0, "shadow-22", "shadowBanned2", 0, 50, "intro", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, ["testtesttest", 1, 11, 2, 0, "shadow-3-uuid-0", "shadowBanned3", 0, 50, "sponsor", "YouTube", 100, 0, 1, getHash("testtesttest", 1)]);
await db.prepare("run", insertQuery, ["testtesttest2", 1, 11, 2, 0, "shadow-3-uuid-0-1", "shadowBanned3", 0, 50, "sponsor", "PeerTube", 120, 0, 1, getHash("testtesttest2", 1)]);
await db.prepare("run", insertQuery, ["testtesttest", 20, 33, 2, 0, "shadow-3-uuid-2", "shadowBanned3", 0, 50, "intro", "YouTube", 101, 0, 1, getHash("testtesttest", 1)]);
await db.prepare("run", insertQuery, [video, 1, 11, 2, 0, "shadow-30", "shadowBanned3", 0, 50, "sponsor", "YouTube", 1, videohash]);
await db.prepare("run", insertQuery, [video, 1, 11, 2, 0, "shadow-31", "shadowBanned3", 0, 50, "sponsor", "PeerTube", 1, videohash]);
await db.prepare("run", insertQuery, [video, 20, 33, 2, 0, "shadow-32", "shadowBanned3", 0, 50, "intro", "YouTube", 1, videohash]);
await db.prepare("run", insertQuery, ["testtesttest", 21, 34, 2, 0, "shadow-4-uuid-1", "shadowBanned4", 0, 50, "sponsor", "YouTube", 101, 0, 0, getHash("testtesttest", 1)]);
await db.prepare("run", insertQuery, [video, 21, 34, 2, 0, "shadow-40", "shadowBanned4", 0, 50, "sponsor", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-50", "shadowBanned5", 0, 50, "sponsor", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 10, 10, 2, 1, "shadow-60", "shadowBanned6", 0, 50, "sponsor", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, ["lockedVideo", 10, 10, 2, 1, "shadow-61", "shadowBanned6", 0, 50, "sponsor", "YouTube", 0, getHash("lockedVideo", 1)]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-70", "shadowBanned7", 383848, 50, "sponsor", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-71", "shadowBanned7", 2332, 50, "intro", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-72", "shadowBanned7", 4923, 50, "interaction", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-80", "shadowBanned8", 1674590916068933, 50, "sponsor", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-81", "shadowBanned8", 1674590916062936, 50, "intro", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-82", "shadowBanned8", 1674590916064324, 50, "interaction", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-90", "shadowBanned9", 1674590916062443, 50, "sponsor", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-91", "shadowBanned9", 1674590916062342, 50, "intro", "YouTube", 0, videohash]);
await db.prepare("run", insertQuery, [video, 20, 10, 2, 0, "shadow-92", "shadowBanned9", 1674590916069491, 50, "interaction", "YouTube", 0, videohash]);
await db.prepare("run", `INSERT INTO "shadowBannedUsers" ("userID") VALUES(?)`, ["shadowBanned3"]);
await db.prepare("run", `INSERT INTO "shadowBannedUsers" ("userID") VALUES(?)`, ["shadowBanned4"]);
await db.prepare("run", `INSERT INTO "lockCategories" ("userID", "videoID", "actionType", "category", "service") VALUES (?, ?, ?, ?, ?)`,
[getHash("shadow-ban-vip", 1), "lockedVideo", "skip", "sponsor", "YouTube"]);
await db.prepare("run", `INSERT INTO "vipUsers" ("userID") VALUES(?)`, [getHash(VIPuserID)]);
const privateInsertQuery = `INSERT INTO "sponsorTimes" ("videoID", "hashedIP", "timeSubmitted", "service") VALUES(?, ?, ?, ?)`;
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP7", 383848, "YouTube"]);
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP7", 2332, "YouTube"]);
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP7", 4923, "YouTube"]);
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP8", 1674590916068933, "YouTube"]);
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP8", 1674590916062936, "YouTube"]);
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP8", 1674590916064324, "YouTube"]);
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP8", 1674590916062443, "YouTube"]);
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP8", 1674590916062342, "YouTube"]);
await privateDB.prepare("run", privateInsertQuery, [video, "shadowBannedIP8", 1674590916069491, "YouTube"]);
});
it("Should be able to ban user and hide submissions", (done) => {
@@ -220,4 +255,212 @@ describe("shadowBanUser", () => {
})
.catch(err => done(err));
});
it("Should be able to shadowban user with different type", (done) => {
const userID = "shadowBanned5";
client({
method: "POST",
url: endpoint,
params: {
userID,
adminUserID: VIPuserID,
enabled: true,
categories: `["sponsor"]`,
unHideOldSubmissions: true,
type: "2"
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const type2Videos = await getShadowBanSegmentCategory(userID, 2);
const type1Videos = await getShadowBanSegmentCategory(userID, 1);
const type0Videos = await getShadowBanSegmentCategory(userID, 0);
const shadowRow = await getShadowBan(userID);
assert.ok(shadowRow); // ban still exists
assert.ok(type2Videos.length > 0); // videos at type 2
assert.strictEqual(type1Videos.length, 0); // no videos at type 1
assert.strictEqual(type0Videos.length, 0); // no videos at type 0
done();
})
.catch(err => done(err));
});
it("Should not be able to shadowban user with invalid type", (done) => {
const userID = "shadowBanned5";
client({
method: "POST",
url: endpoint,
params: {
userID,
adminUserID: VIPuserID,
enabled: true,
categories: `["sponsor"]`,
unHideOldSubmissions: true,
type: "bad"
}
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should exclude locked segments when shadowbanning and removing segments", (done) => {
const userID = "shadowBanned6";
client({
method: "POST",
url: endpoint,
params: {
userID,
adminUserID: VIPuserID,
enabled: true,
categories: `["sponsor"]`,
unHideOldSubmissions: true
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const type1Videos = await getShadowBanSegmentCategory(userID, 2);
const type0Videos = await getShadowBanSegmentCategory(userID, 0);
const shadowRow = await getShadowBan(userID);
assert.ok(shadowRow); // ban exists
assert.strictEqual(type1Videos.length, 0); // no banned videos
assert.strictEqual(type0Videos.length, 1); // video still visible
done();
})
.catch(err => done(err));
});
it("Should be able to ban user by IP and hide submissions of a specific category", (done) => {
const hashedIP = "shadowBannedIP7";
const userID = "shadowBanned7";
client({
method: "POST",
url: endpoint,
params: {
hashedIP,
categories: `["sponsor", "intro"]`,
adminUserID: VIPuserID,
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const videoRow = await getShadowBanSegments(userID, 1);
const normalShadowRow = await getShadowBan(userID);
const ipShadowRow = await getIPShadowBan(hashedIP);
assert.ok(ipShadowRow);
assert.ok(normalShadowRow);
assert.strictEqual(videoRow.length, 2);
done();
})
.catch(err => done(err));
});
it("Should be able to unban user by IP", (done) => {
const hashedIP = "shadowBannedIP7";
const userID = "shadowBanned7";
client({
method: "POST",
url: endpoint,
params: {
hashedIP,
enabled: false,
unHideOldSubmissions: false,
adminUserID: VIPuserID,
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const videoRow = await getShadowBanSegments(userID, 1);
const normalShadowRow = await getShadowBan(userID);
const ipShadowRow = await getIPShadowBan(hashedIP);
assert.ok(!ipShadowRow);
assert.ok(normalShadowRow);
assert.strictEqual(videoRow.length, 2);
done();
})
.catch(err => done(err));
});
it("Should be able to unban user by IP and unhide specific category", (done) => {
const hashedIP = "shadowBannedIP7";
const userID = "shadowBanned7";
client({
method: "POST",
url: endpoint,
params: {
hashedIP,
enabled: false,
categories: `["sponsor"]`,
unHideOldSubmissions: true,
adminUserID: VIPuserID,
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const videoRow = await getShadowBanSegments(userID, 1);
const normalShadowRow = await getShadowBan(userID);
const ipShadowRow = await getIPShadowBan(hashedIP);
assert.ok(!ipShadowRow);
assert.ok(normalShadowRow);
assert.strictEqual(videoRow.length, 1);
done();
})
.catch(err => done(err));
});
it("Should be possible to ban self", (done) => {
const userID = VIPuserID;
const hashUserID = getHash(userID);
client({
method: "POST",
url: endpoint,
params: {
enabled: true,
userID: hashUserID,
categories: `["sponsor"]`,
unHideOldSubmissions: true,
adminUserID: userID,
}
})
.then(res => {
assert.strictEqual(res.status, 200);
done();
})
.catch(err => done(err));
});
it("Should be able to ban user by userID and other users who used that IP and hide specific category", (done) => {
const hashedIP = "shadowBannedIP8";
const userID = "shadowBanned8";
const userID2 = "shadowBanned9";
client({
method: "POST",
url: endpoint,
params: {
userID,
enabled: true,
categories: `["sponsor", "intro"]`,
unHideOldSubmissions: true,
adminUserID: VIPuserID,
lookForIPs: true
}
})
.then(async res => {
assert.strictEqual(res.status, 200);
const videoRow = await getShadowBanSegments(userID, 1);
const videoRow2 = await getShadowBanSegments(userID2, 1);
const normalShadowRow = await getShadowBan(userID);
const normalShadowRow2 = await getShadowBan(userID2);
const ipShadowRow = await getIPShadowBan(hashedIP);
assert.ok(ipShadowRow);
assert.ok(normalShadowRow);
assert.ok(normalShadowRow2);
assert.strictEqual(videoRow.length, 2);
assert.strictEqual(videoRow2.length, 2);
done();
})
.catch(err => done(err));
});
});

View File

@@ -44,6 +44,31 @@ describe("tokenUtils test", function() {
});
});
after(function () {
mock.restore();
});
});
describe("tokenUtils failing tests", function() {
before(function() {
mock = new MockAdapter(axios, { onNoMatch: "throwException" });
mock.onPost("https://www.patreon.com/api/oauth2/token").reply(204, patreon.fakeOauth);
mock.onGet(/identity/).reply(204, patreon.activeIdentity);
});
it("Should fail if patreon is not correctly stubbed", function (done) {
tokenUtils.createAndSaveToken(tokenUtils.TokenType.patreon, "test_code").then((licenseKey) => {
assert.strictEqual(licenseKey, null);
done();
});
});
it("Should fail if token type is invalid", (done) => {
tokenUtils.createAndSaveToken("invalidTokenType" as tokenUtils.TokenType).then((licenseKey) => {
assert.strictEqual(licenseKey, null);
done();
});
});
after(function () {
mock.restore();
});

View File

@@ -2,6 +2,7 @@ import axios from "axios";
import assert from "assert";
import { config } from "../../src/config";
import { getHash } from "../../src/utils/getHash";
import { client } from "../utils/httpClient";
describe("userCounter", () => {
it("Should return 200", function (done) {
@@ -20,4 +21,13 @@ describe("userCounter", () => {
})
.catch(err => done(err));
});
it("Should not incremeent counter on OPTIONS", function (done) {
/* cannot spy test */
if (!config.userCounterURL) this.skip(); // skip if no userCounterURL is set
//const spy = sinon.spy(UserCounter);
client({ method: "OPTIONS", url: "/api/status" })
.then(() => client({ method: "GET", url: "/api/status" }));
//assert.strictEqual(spy.callCount, 1);
done();
});
});

View File

@@ -13,6 +13,10 @@ app.post("/webhook/FirstTimeSubmissions", (req, res) => {
res.sendStatus(200);
});
app.post("/webhook/WarningWebhook", (req, res) => {
res.sendStatus(200);
});
app.post("/webhook/CompletelyIncorrectReport", (req, res) => {
res.sendStatus(200);
});

View File

@@ -10,6 +10,7 @@ import { ImportMock } from "ts-mock-imports";
import * as rateLimitMiddlewareModule from "../src/middleware/requestRateLimit";
import rateLimit from "express-rate-limit";
import redis from "../src/utils/redis";
import { resetRedis, resetPostgres } from "./utils/reset";
async function init() {
ImportMock.mockFunction(rateLimitMiddlewareModule, "rateLimitMiddleware", rateLimit({
@@ -19,6 +20,8 @@ async function init() {
// delete old test database
if (fs.existsSync(config.db)) fs.unlinkSync(config.db);
if (fs.existsSync(config.privateDB)) fs.unlinkSync(config.privateDB);
if (config?.redis?.enabled) await resetRedis();
if (config?.postgres) await resetPostgres();
await initDb();
@@ -59,6 +62,7 @@ async function init() {
server.close();
redis.quit();
process.exitCode = failures ? 1 : 0; // exit with non-zero status if there were failures
process.exit();
});
});
});

3
test/utils/getRandom.ts Normal file
View File

@@ -0,0 +1,3 @@
import crypto from "crypto";
export const genRandom = (bytes=8) => crypto.pseudoRandomBytes(bytes).toString("hex");

22
test/utils/reset.ts Normal file
View File

@@ -0,0 +1,22 @@
// drop postgres tables
// reset redis cache
import { config } from "../../src/config";
import { createClient } from "redis";
import { Pool } from "pg";
import { Logger } from "../../src/utils/logger";
export async function resetRedis() {
if (config?.redis?.enabled && config.mode === "test") {
const client = createClient(config.redis);
await client.connect();
await client.flushAll();
}
}
export async function resetPostgres() {
if (process.env.TEST_POSTGRES && config.mode == "test" && config.postgres) {
const pool = new Pool({ ...config.postgres });
await pool.query(`DROP DATABASE IF EXISTS "sponsorTimes"`);
await pool.query(`DROP DATABASE IF EXISTS "privateDB"`);
await pool.end().catch(err => Logger.error(`closing db (postgres): ${err}`));
}
}