Merge branch 'master' of https://github.com/ajayyy/SponsorBlockServer into fullVideoLabels

This commit is contained in:
Michael C
2022-09-30 19:21:54 -04:00
108 changed files with 5844 additions and 4299 deletions

View File

@@ -29,4 +29,18 @@ module.exports = {
"semi": "warn",
"no-console": "warn"
},
overrides: [
{
files: ["src/**/*.ts"],
parserOptions: {
project: ["./tsconfig.json"],
},
rules: {
"@typescript-eslint/no-misused-promises": "warn",
"@typescript-eslint/no-floating-promises" : "warn"
}
},
],
};

18
.github/workflows/db-backup.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
name: Docker image builds
on:
push:
branches:
- master
paths:
- containers/backup-db/**
workflow_dispatch:
jobs:
backup-db:
uses: ./.github/workflows/docker-build.yml
with:
name: "db-backup"
username: "ajayyy"
folder: "./containers/backup-db"
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -19,6 +19,8 @@ on:
jobs:
build_container:
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- name: Checkout
uses: actions/checkout@v3

View File

@@ -6,6 +6,7 @@ on:
- master
paths:
- databases/**
workflow_dispatch:
jobs:
make-base-db:
@@ -26,3 +27,12 @@ jobs:
with:
name: SponsorTimesDB.db
path: databases/sponsorTimes.db
- uses: mchangrh/s3cmd-sync@f4f36b9705bdd9af7ac91964136989ac17e3b513
with:
args: --acl-public
env:
S3_ENDPOINT: ${{ secrets.S3_ENDPOINT }}
S3_BUCKET: ${{ secrets.S3_BUCKET }}
S3_ACCESS_KEY_ID: ${{ secrets.S3_ACCESS_KEY_ID }}
S3_ACCESS_KEY_SECRET: ${{ secrets.S3_ACCESS_KEY_SECRET }}
SOURCE_DIR: 'databases/sponsorTimes.db'

View File

@@ -20,6 +20,6 @@ jobs:
with:
name: "rsync-host"
username: "ajayyy"
folder: "./rsync"
folder: "./containers/rsync"
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -7,6 +7,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: take the issue
uses: bdougie/take-action@main
uses: bdougie/take-action@28b86cd8d25593f037406ecbf96082db2836e928
env:
GITHUB_TOKEN: ${{ github.token }}

3
.gitignore vendored
View File

@@ -45,3 +45,6 @@ working
.DS_Store
/.idea/
/dist/
# nyc coverage output
.nyc_output/

5
.nycrc.json Normal file
View File

@@ -0,0 +1,5 @@
{
"exclude": [
"src/routes/addUnlitedVideo.ts"
]
}

View File

@@ -3,7 +3,6 @@
[vipUsers](#vipUsers)
[sponsorTimes](#sponsorTimes)
[userNames](#userNames)
[userNameLogs](#userNameLogs)
[categoryVotes](#categoryVotes)
[lockCategories](#lockCategories)
[warnings](#warnings)
@@ -51,7 +50,7 @@
| sponsorTime_timeSubmitted | timeSubmitted |
| sponsorTime_userID | userID |
| sponsorTimes_UUID | UUID |
| sponsorTimes_hashedVideoID_gin| hashedVideoID, category |
| sponsorTimes_hashedVideoID | hashedVideoID, category |
| sponsorTimes_videoID | videoID, service, category, timeSubmitted |
### userNames
@@ -66,16 +65,6 @@
| -- | :--: |
| userNames_userID | userID |
### userNameLogs
| Name | Type | |
| -- | :--: | -- |
| userID | TEXT | not null |
| newUserName | TEXT | not null |
| oldUserName | TEXT | not null |
| updatedByAdmin | BOOLEAN | not null |
| updatedAt | INTEGER | not null |
### categoryVotes
| Name | Type | |
@@ -137,7 +126,6 @@
| channelID | TEXT | not null |
| title | TEXT | not null |
| published | REAL | not null |
| genreUrl | TEXT | not null |
| index | field |
| -- | :--: |
@@ -204,13 +192,15 @@
# Private
[vote](#vote)
[votes](#votes)
[categoryVotes](#categoryVotes)
[sponsorTimes](#sponsorTimes)
[config](#config)
[ratings](#ratings)
[tempVipLog](#tempVipLog)
[userNameLogs](#userNameLogs)
### vote
### votes
| Name | Type | |
| -- | :--: | -- |
@@ -218,6 +208,7 @@
| userID | TEXT | not null |
| hashedIP | TEXT | not null |
| type | INTEGER | not null |
| originalVoteType | INTEGER | not null | # Since type was reused to also specify the number of votes removed when less than 0, this is being used for the actual type
| index | field |
| -- | :--: |
@@ -280,3 +271,13 @@
| targetUserID | TEXT | not null |
| enabled | BOOLEAN | not null |
| updatedAt | INTEGER | not null |
### userNameLogs
| Name | Type | |
| -- | :--: | -- |
| userID | TEXT | not null |
| newUserName | TEXT | not null |
| oldUserName | TEXT | not null |
| updatedByAdmin | BOOLEAN | not null |
| updatedAt | INTEGER | not null |

View File

@@ -17,10 +17,12 @@
"port": 5432
},
"redis": {
"enabled": true,
"socket": {
"host": "localhost",
"port": 6379
}
},
"expiryTime": 86400
},
"createDatabaseIfNotExist": true,
"schemaFolder": "./databases",
@@ -67,5 +69,6 @@
"max": 20,
"statusCode": 200
}
}
},
"minReputationToSubmitFiller": -1
}

View File

@@ -0,0 +1,13 @@
FROM alpine
RUN apk add postgresql-client
RUN apk add restic --repository http://dl-cdn.alpinelinux.org/alpine/latest-stable/community/
COPY ./backup.sh /usr/src/app/backup.sh
RUN chmod +x /usr/src/app/backup.sh
COPY ./backup.sh /usr/src/app/forget.sh
RUN chmod +x /usr/src/app/forget.sh
RUN echo '30 * * * * /usr/src/app/backup.sh' >> /etc/crontabs/root
RUN echo '10 0 * * 1 /usr/src/app/forget.sh' >> /etc/crontabs/root
CMD crond -l 2 -f

View File

@@ -0,0 +1,6 @@
mkdir ./dump
pg_dump -f ./dump/sponsorTimes.dump sponsorTimes
pg_dump -f ./dump/privateDB.dump privateDB
restic backup ./dump

View File

@@ -0,0 +1 @@
restic forget --prune --keep-last 48 --keep-daily 7 --keep-weekly 8

View File

@@ -1,9 +1,8 @@
-- sponsorTimes
CREATE INDEX IF NOT EXISTS "privateDB_sponsorTimes_v3"
CREATE INDEX IF NOT EXISTS "privateDB_sponsorTimes_v4"
ON public."sponsorTimes" USING btree
("hashedIP" COLLATE pg_catalog."default" ASC NULLS LAST, "videoID" ASC NULLS LAST, service COLLATE pg_catalog."default" ASC NULLS LAST)
;
("videoID" ASC NULLS LAST, service COLLATE pg_catalog."default" ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST);
-- votes

View File

@@ -15,14 +15,19 @@ CREATE INDEX IF NOT EXISTS "sponsorTimes_UUID"
("UUID" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "sponsorTimes_hashedVideoID_gin"
ON public."sponsorTimes" USING gin
("hashedVideoID" COLLATE pg_catalog."default" gin_trgm_ops, category COLLATE pg_catalog."default" gin_trgm_ops)
CREATE INDEX IF NOT EXISTS "sponsorTimes_hashedVideoID"
ON public."sponsorTimes" USING btree
(service COLLATE pg_catalog."default" ASC NULLS LAST, "hashedVideoID" text_pattern_ops ASC NULLS LAST, "startTime" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "sponsorTimes_videoID"
ON public."sponsorTimes" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, service COLLATE pg_catalog."default" ASC NULLS LAST, category COLLATE pg_catalog."default" ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST)
(service COLLATE pg_catalog."default" ASC NULLS LAST, "videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "startTime" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "sponsorTimes_videoID_category"
ON public."sponsorTimes" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "category" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "sponsorTimes_description_gin"
@@ -104,3 +109,10 @@ CREATE INDEX IF NOT EXISTS "ratings_videoID"
ON public."ratings" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, service COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;
--- userFeatures
CREATE INDEX IF NOT EXISTS "userFeatures_userID"
ON public."userFeatures" USING btree
("userID" COLLATE pg_catalog."default" ASC NULLS LAST, "feature" ASC NULLS LAST)
TABLESPACE pg_default;

View File

@@ -0,0 +1,9 @@
BEGIN TRANSACTION;
-- Add primary keys
ALTER TABLE "votes" ADD "originalType" INTEGER NOT NULL default -1;
UPDATE "config" SET value = 10 WHERE key = 'version';
COMMIT;

View File

@@ -0,0 +1,18 @@
BEGIN TRANSACTION;
CREATE TABLE IF NOT EXISTS "licenseKeys" (
"licenseKey" TEXT NOT NULL PRIMARY KEY,
"time" INTEGER NOT NULL,
"type" TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS "oauthLicenseKeys" (
"licenseKey" TEXT NOT NULL PRIMARY KEY,
"accessToken" TEXT NOT NULL,
"refreshToken" TEXT NOT NULL,
"expiresIn" INTEGER NOT NULL
);
UPDATE "config" SET value = 11 WHERE key = 'version';
COMMIT;

View File

@@ -0,0 +1,15 @@
BEGIN TRANSACTION;
-- Add primary keys
ALTER TABLE "userNameLogs" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
ALTER TABLE "categoryVotes" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
ALTER TABLE "sponsorTimes" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
ALTER TABLE "config" ADD PRIMARY KEY ("key"); --!sqlite-ignore
ALTER TABLE "ratings" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
ALTER TABLE "tempVipLog" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
ALTER TABLE "votes" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
UPDATE "config" SET value = 8 WHERE key = 'version';
COMMIT;

View File

@@ -0,0 +1,9 @@
BEGIN TRANSACTION;
-- Add primary keys
DROP INDEX IF EXISTS "privateDB_sponsorTimes_v3"; --!sqlite-ignore
UPDATE "config" SET value = 9 WHERE key = 'version';
COMMIT;

View File

@@ -0,0 +1,19 @@
BEGIN TRANSACTION;
-- Add primary keys
ALTER TABLE "sponsorTimes" ADD PRIMARY KEY ("UUID"); --!sqlite-ignore
ALTER TABLE "vipUsers" ADD PRIMARY KEY ("userID"); --!sqlite-ignore
ALTER TABLE "userNames" ADD PRIMARY KEY ("userID"); --!sqlite-ignore
ALTER TABLE "categoryVotes" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
ALTER TABLE "lockCategories" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
ALTER TABLE "warnings" ADD PRIMARY KEY ("userID", "issueTime"); --!sqlite-ignore
ALTER TABLE "shadowBannedUsers" ADD PRIMARY KEY ("userID"); --!sqlite-ignore
ALTER TABLE "unlistedVideos" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
ALTER TABLE "config" ADD PRIMARY KEY ("key"); --!sqlite-ignore
ALTER TABLE "archivedSponsorTimes" ADD PRIMARY KEY ("UUID"); --!sqlite-ignore
ALTER TABLE "ratings" ADD "id" SERIAL PRIMARY KEY; --!sqlite-ignore
UPDATE "config" SET value = 32 WHERE key = 'version';
COMMIT;

View File

@@ -0,0 +1,13 @@
BEGIN TRANSACTION;
CREATE TABLE IF NOT EXISTS "userFeatures" (
"userID" TEXT NOT NULL,
"feature" INTEGER NOT NULL,
"issuerUserID" TEXT NOT NULL,
"timeSubmitted" INTEGER NOT NULL,
PRIMARY KEY ("userID", "feature")
);
UPDATE "config" SET value = 33 WHERE key = 'version';
COMMIT;

View File

@@ -0,0 +1,7 @@
BEGIN TRANSACTION;
ALTER TABLE "videoInfo" DROP COLUMN "genreUrl";
UPDATE "config" SET value = 34 WHERE key = 'version';
COMMIT;

View File

@@ -2,7 +2,7 @@ version: '3'
services:
database:
container_name: database
image: postgres:13
image: postgres:14
env_file:
- database.env
volumes:
@@ -12,7 +12,7 @@ services:
restart: always
redis:
container_name: redis
image: redis:6.0
image: redis:7.0
command: /usr/local/etc/redis/redis.conf
volumes:
- ./redis/redis.conf:/usr/local/etc/redis/redis.conf

View File

@@ -1,11 +0,0 @@
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, DELETE';
add_header 'Access-Control-Allow-Headers' 'Content-Type';
# cache CORS for 24 hours
add_header 'Access-Control-Max-Age' 86400;
# return empty response for preflight
add_header 'Content-Type' 'text/plain; charset=UTF-8';
add_header 'Content-Length' 0;
return 204;
}

View File

@@ -1,7 +0,0 @@
error_page 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 421 422 423 424 425 426 428 429 431 451 500 501 502 503 504 505 506 507 508 510 511 /error.html;
location = /error.html {
ssi on;
internal;
root /etc/nginx/error;
}

View File

@@ -1 +0,0 @@
<!--# echo var="status"--> <!--# echo var="status_text"--> https://status.sponsor.ajay.app

View File

@@ -1,15 +0,0 @@
map $status $status_text {
400 'Bad Request';
401 'Unauthorized';
403 'Forbidden';
404 'Not Found';
405 'Method Not Allowed';
408 'Request Timeout';
409 'Conflict';
429 'Too Many Requests';
500 'Internal Server Error';
502 'Bad Gateway';
503 'Service Unavailable';
504 'Gateway Timeout';
505 'HTTP Version Not Supported';
}

View File

@@ -1,317 +0,0 @@
worker_processes 2;
worker_rlimit_nofile 500000;
worker_shutdown_timeout 10;
events {
worker_connections 100000; # Default: 1024
#use epoll;
#multi_accept on;
}
http {
log_format no_ip '$remote_user [$time_local] '
'"$request" $status $body_bytes_sent '
'"$http_referer" "$http_user_agent" "$gzip_ratio"';
log_format user_agent '[$time_local] '
'"$http_referer" "$http_user_agent" "$gzip_ratio"';
#limit_req_zone $binary_remote_addr zone=mylimit:10m rate=10r/s;
limit_req_log_level warn;
include /etc/nginx/mime.types;
include /etc/nginx/proxy.conf;
# error_map has to be at http level
include /etc/nginx/error_map.conf;
# Custom MIME definition
types {
text/csv csv;
}
# keepalive settings
#keepalive_requests 10;
keepalive_timeout 10s;
http2_idle_timeout 20s; # replaced by keepalive_timeout in 1.19.7
access_log off;
#error_log /etc/nginx/logs/error.log warn;
error_log /dev/null crit;
upstream backend_GET {
least_conn;
#keepalive 5;
#server localhost:4441;
#server localhost:4442;
#server localhost:4443;
#server localhost:4444;
#server localhost:4445;
#server localhost:4446;
#server localhost:4447;
#server localhost:4448;
#server 10.0.0.4:4441 max_fails=25 fail_timeout=20s;
#server 10.0.0.3:4441 max_fails=25 fail_timeout=20s;
#server 10.0.0.3:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.5:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.5:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.6:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.6:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.9:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.9:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.12:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.12:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.10:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.10:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.13:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.13:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.14:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.14:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.11:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.11:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.16:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.16:4442 max_fails=25 fail_timeout=20s;
server 10.0.0.17:4441 max_fails=25 fail_timeout=20s;
server 10.0.0.17:4442 max_fails=25 fail_timeout=20s;
#server 134.209.69.251:80 backup;
#server 116.203.32.253:80 backup;
#server 116.203.32.253:80;
}
upstream backend_POST {
#server localhost:4441;
#server localhost:4442;
server 10.0.0.3:4441 max_fails=25 fail_timeout=15s;
server 10.0.0.4:4441 max_fails=25 fail_timeout=15s;
#server 10.0.0.3:4442;
}
upstream backend_db {
server 10.0.0.4:4441 max_fails=1 fail_timeout=3s;
#server 10.0.0.3:4441;
#server 10.0.0.4;
}
upstream backend_db_dl {
server 10.0.0.4;
}
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=CACHEZONE:10m inactive=60m max_size=400m;
proxy_cache_key "$scheme$request_method$host$request_uri";
add_header X-Cache $upstream_cache_status;
server {
server_name sponsor.ajay.app api.sponsor.ajay.app;
include /etc/nginx/error.conf;
set_real_ip_from 10.0.0.0/24;
real_ip_header proxy_protocol;
location /news {
return 301 https://blog.ajay.app/sponsorblock;
}
location /viewer {
return 301 https://sb.ltn.fi;
}
location /test/ {
# return 404 "";
proxy_pass http://10.0.0.4:4445/;
#proxy_pass https://sbtest.etcinit.com/;
}
#access_log /etc/nginx/logs/requests.log no_ip buffer=64k;
location /api/skipSegments {
include /etc/nginx/cors.conf;
#return 200 "[]";
proxy_pass http://backend_$request_method;
#proxy_cache CACHEZONE;
#proxy_cache_valid 10s;
#limit_req zone=mylimit;
#access_log /etc/nginx/logs/download.log no_ip;
gzip on;
if ($request_method = POST) {
access_log /etc/nginx/logs/submissions.log user_agent buffer=64k;
}
#proxy_read_timeout 6s;
#proxy_next_upstream error timeout http_500 http_502;
}
location /api/getTopUsers {
include /etc/nginx/cors.conf;
proxy_pass http://backend_GET;
proxy_cache CACHEZONE;
proxy_cache_valid 20m;
}
location /api/getTotalStats {
include /etc/nginx/cors.conf;
proxy_pass http://backend_POST;
proxy_cache CACHEZONE;
proxy_cache_valid 20m;
#return 204;
}
location /api/getTopCategoryUsers {
include /etc/nginx/cors.conf;
proxy_pass http://backend_POST;
proxy_cache CACHEZONE;
proxy_cache_valid 20m;
}
location /api/getVideoSponsorTimes {
include /etc/nginx/cors.conf;
proxy_pass http://backend_GET;
}
location /api/isUserVIP {
include /etc/nginx/cors.conf;
proxy_pass http://backend_GET;
}
location /download/ {
#access_log /etc/nginx/logs/download.log no_ip buffer=64k;
gzip on;
proxy_max_temp_file_size 0;
#proxy_cache CACHEZONE;
#proxy_cache_valid 20m;
#proxy_http_version 1.0;
#gzip_types text/csv;
#gzip_comp_level 1;
#proxy_buffering off;
proxy_pass http://backend_db;
#alias /home/sbadmin/sponsor/docker/database-export/;
#return 307 https://rsync.sponsor.ajay.app$request_uri;
}
location /database {
proxy_pass http://backend_db;
#return 200 "Disabled for load reasons";
}
location = /database.db {
return 404 "Sqlite database has been replaced with csv exports at https://sponsor.ajay.app/database. Sqlite exports might come back soon, but exported at longer intervals.";
#alias /home/sbadmin/sponsor/databases/sponsorTimes.db;
#alias /home/sbadmin/test-db/database.db;
}
#location = /database/sponsorTimes.csv {
# alias /home/sbadmin/sponsorTimes.csv;
#}
#location /api/voteOnSponsorTime {
# return 200 "Success";
#}
#location /api/viewedVideoSponsorTime {
# return 200 "Success";
#}
location /api {
include /etc/nginx/cors.conf;
proxy_pass http://backend_POST;
}
location / {
root /home/sbadmin/SponsorBlockSite/public-prod;
error_page 404 /404.html;
}
listen [::]:443 default_server ssl http2 ipv6only=on backlog=323999;
listen 443 default_server ssl http2 reuseport backlog=3000999; # managed by Certbot
listen 4443 default_server ssl http2 proxy_protocol reuseport backlog=3000999;
#listen 443 http3 reuseport;
#ssl_protocols TLSv1.2 TLSv1.3;
listen 8081 proxy_protocol;
port_in_redirect off;
ssl_certificate /home/sbadmin/certs/cert.pem;
ssl_certificate_key /home/sbadmin/certs/key.pem;
#ssl_certificate /etc/letsencrypt/live/sponsor.ajay.app-0001/fullchain.pem; # managed by Certbot
#ssl_certificate_key /etc/letsencrypt/live/sponsor.ajay.app-0001/privkey.pem; # managed by Certbot
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
}
server {
server_name cdnsponsor.ajay.app;
error_page 404 /404.html;
#location /database/ {
# alias /home/sbadmin/sponsor/docker/database-export/;
#}
#location /download/ {
# alias /home/sbadmin/sponsor/docker/database-export/;
#}
location / {
root /home/sbadmin/SponsorBlockSite/public-prod;
}
listen 443 ssl; # managed by Certbot
ssl_certificate /home/sbadmin/certs/cert.pem;
ssl_certificate_key /home/sbadmin/certs/key.pem;
#ssl_certificate /etc/letsencrypt/live/sponsor.ajay.app-0001/fullchain.pem; # managed by Certbot
#ssl_certificate_key /etc/letsencrypt/live/sponsor.ajay.app-0001/privkey.pem; # managed by Certbot
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
}
server {
access_log off;
return 301 https://$host$request_uri;
listen [::]:80 ipv6only=on;
listen 8080 proxy_protocol;
listen 80;
server_name sponsor.ajay.app api.sponsor.ajay.app, cdnsponsor.ajay.app, wiki.sponsor.ajay.app;
return 404; # managed by Certbot
}
server {
server_name wiki.sponsor.ajay.app; # managed by Certbot
location /.well-known/ {
root /home/sbadmin/SponsorBlockSite/public-prod;
}
location ~* ^/index.php/(?<pagename>.*)$ {
return 301 /w/$pagename;
}
location / {
proxy_pass http://10.0.0.3:8080;
}
port_in_redirect off;
listen [::]:443 ssl http2;
listen 443 ssl http2; # managed by Certbot
listen 8081 proxy_protocol;
#listen 443 http3 reuseport;
#ssl_protocols TLSv1.2 TLSv1.3;
#listen 80;
ssl_certificate /home/sbadmin/certs/cert.pem;
ssl_certificate_key /home/sbadmin/certs/key.pem;
#ssl_certificate /etc/letsencrypt/live/sponsor.ajay.app-0001/fullchain.pem; # managed by Certbot
#ssl_certificate_key /etc/letsencrypt/live/sponsor.ajay.app-0001/privkey.pem; # managed by Certbot
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
}
}

View File

@@ -1,12 +0,0 @@
proxy_redirect off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Connection "";
client_max_body_size 10m;
client_body_buffer_size 128k;
proxy_connect_timeout 5s;
#proxy_send_timeout 10;
proxy_read_timeout 30s;
proxy_buffers 32 4k;
proxy_http_version 1.1;

6687
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -5,6 +5,7 @@
"main": "src/index.ts",
"scripts": {
"test": "npm run tsc && ts-node test/test.ts",
"test:coverage": "nyc npm run test",
"dev": "nodemon",
"dev:bash": "nodemon -x 'npm test ; npm start'",
"postgres:docker": "docker run --rm -p 5432:5432 -e POSTGRES_USER=ci_db_user -e POSTGRES_PASSWORD=ci_db_pass postgres:alpine",
@@ -17,38 +18,39 @@
"author": "Ajay Ramachandran",
"license": "MIT",
"dependencies": {
"@ajayyy/lru-diskcache": "^2.0.0",
"axios": "^0.24.0",
"better-sqlite3": "^7.4.5",
"cron": "^1.8.2",
"express": "^4.17.1",
"axios": "^0.27.2",
"better-sqlite3": "^7.6.0",
"cron": "^2.0.0",
"express": "^4.18.1",
"express-promise-router": "^4.1.1",
"express-rate-limit": "^6.3.0",
"express-rate-limit": "^6.4.0",
"form-data": "^4.0.0",
"lodash": "^4.17.21",
"pg": "^8.7.1",
"pg": "^8.7.3",
"rate-limit-redis": "^3.0.1",
"redis": "^4.0.6",
"redis": "^4.2.0",
"sync-mysql": "^3.0.1"
},
"devDependencies": {
"@types/better-sqlite3": "^7.4.1",
"@types/cron": "^1.7.3",
"@types/better-sqlite3": "^7.5.0",
"@types/cron": "^2.0.0",
"@types/express": "^4.17.13",
"@types/lodash": "^4.14.178",
"@types/mocha": "^9.0.0",
"@types/node": "^16.11.11",
"@types/pg": "^8.6.1",
"@typescript-eslint/eslint-plugin": "^5.5.0",
"@typescript-eslint/parser": "^5.5.0",
"eslint": "^8.3.0",
"mocha": "^9.1.3",
"nodemon": "^2.0.15",
"sinon": "^12.0.1",
"@types/lodash": "^4.14.182",
"@types/mocha": "^9.1.1",
"@types/node": "^18.0.3",
"@types/pg": "^8.6.5",
"@typescript-eslint/eslint-plugin": "^5.30.6",
"@typescript-eslint/parser": "^5.30.6",
"eslint": "^8.19.0",
"mocha": "^10.0.0",
"nodemon": "^2.0.19",
"nyc": "^15.1.0",
"sinon": "^14.0.0",
"ts-mock-imports": "^1.3.8",
"ts-node": "^10.4.0",
"typescript": "^4.5.2"
"ts-node": "^10.8.2",
"typescript": "^4.7.4"
},
"engines": {
"node": ">=10"
"node": ">=16"
}
}

View File

@@ -43,13 +43,13 @@ import ExpressPromiseRouter from "express-promise-router";
import { Server } from "http";
import { youtubeApiProxy } from "./routes/youtubeApiProxy";
import { getChapterNames } from "./routes/getChapterNames";
import { postRating } from "./routes/ratings/postRating";
import { getRating } from "./routes/ratings/getRating";
import { postClearCache as ratingPostClearCache } from "./routes/ratings/postClearCache";
import { getTopCategoryUsers } from "./routes/getTopCategoryUsers";
import { addUserAsTempVIP } from "./routes/addUserAsTempVIP";
import { endpoint as getVideoLabels } from "./routes/getVideoLabel";
import { getVideoLabelsByHash } from "./routes/getVideoLabelByHash";
import { addFeature } from "./routes/addFeature";
import { generateTokenRequest } from "./routes/generateToken";
import { verifyTokenRequest } from "./routes/verifyToken";
export function createServer(callback: () => void): Server {
// Create a service (the app object is just a callback).
@@ -77,15 +77,14 @@ export function createServer(callback: () => void): Server {
return app.listen(config.port, callback);
}
/* eslint-disable @typescript-eslint/no-misused-promises */
function setupRoutes(router: Router) {
// Rate limit endpoint lists
const voteEndpoints: RequestHandler[] = [voteOnSponsorTime];
const viewEndpoints: RequestHandler[] = [viewedVideoSponsorTime];
const postRateEndpoints: RequestHandler[] = [postRating];
if (config.rateLimit) {
if (config.rateLimit.vote) voteEndpoints.unshift(rateLimitMiddleware(config.rateLimit.vote, voteGetUserID));
if (config.rateLimit.view) viewEndpoints.unshift(rateLimitMiddleware(config.rateLimit.view));
if (config.rateLimit.rate) postRateEndpoints.unshift(rateLimitMiddleware(config.rateLimit.rate));
}
//add the get function
@@ -198,11 +197,10 @@ function setupRoutes(router: Router) {
router.get("/api/lockReason", getLockReason);
// ratings
router.get("/api/ratings/rate/:prefix", getRating);
router.get("/api/ratings/rate", getRating);
router.post("/api/ratings/rate", postRateEndpoints);
router.post("/api/ratings/clearCache", ratingPostClearCache);
router.post("/api/feature", addFeature);
router.get("/api/generateToken/:type", generateTokenRequest);
router.get("/api/verifyToken", verifyTokenRequest);
// labels
router.get("/api/videoLabels", getVideoLabels);
@@ -219,3 +217,4 @@ function setupRoutes(router: Router) {
});
}
}
/* eslint-enable @typescript-eslint/no-misused-promises */

View File

@@ -1,7 +1,7 @@
import fs from "fs";
import { SBSConfig } from "./types/config.model";
import packageJson from "../package.json";
import { isBoolean, isNumber } from "lodash";
import { isNumber } from "lodash";
const isTestMode = process.env.npm_lifecycle_script === packageJson.scripts.test;
const configFile = process.env.TEST_POSTGRES ? "ci.json"
@@ -20,7 +20,7 @@ addDefaults(config, {
privateDBSchema: "./databases/_private.db.sql",
readOnly: false,
webhooks: [],
categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight"],
categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"],
categorySupport: {
sponsor: ["skip", "mute", "full"],
selfpromo: ["skip", "mute", "full"],
@@ -42,6 +42,9 @@ addDefaults(config, {
discordNeuralBlockRejectWebhookURL: null,
discordFailedReportChannelWebhookURL: null,
discordReportChannelWebhookURL: null,
discordMaliciousReportWebhookURL: null,
minReputationToSubmitChapter: 0,
minReputationToSubmitFiller: 0,
getTopUsersCacheTimeMinutes: 240,
globalSalt: null,
mode: "",
@@ -59,12 +62,6 @@ addDefaults(config, {
max: 10,
statusCode: 200,
message: "OK",
},
rate: {
windowMs: 900000,
max: 20,
statusCode: 200,
message: "Success",
}
},
userCounterURL: null,
@@ -76,7 +73,25 @@ addDefaults(config, {
user: "",
host: "",
password: "",
port: 5432
port: 5432,
max: 10,
idleTimeoutMillis: 10000,
maxTries: 3,
maxConcurrentRequests: 3500
},
postgresReadOnly: {
enabled: false,
weight: 1,
user: "",
host: "",
password: "",
port: 5432,
readTimeout: 250,
max: 10,
idleTimeoutMillis: 10000,
maxTries: 3,
fallbackOnFail: true,
maxConcurrentRequests: 3500
},
dumpDatabase: {
enabled: false,
@@ -112,9 +127,7 @@ addDefaults(config, {
name: "ratings"
}]
},
diskCache: {
max: 10737418240
},
diskCacheURL: null,
crons: null,
redis: {
enabled: false,
@@ -122,7 +135,18 @@ addDefaults(config, {
host: "",
port: 0
},
disableOfflineQueue: true
disableOfflineQueue: true,
expiryTime: 24 * 60 * 60,
getTimeout: 40
},
patreon: {
clientId: "",
clientSecret: "",
minPrice: 0,
redirectUri: "https://sponsor.ajay.app/api/generateToken/patreon"
},
gumroad: {
productPermalinks: ["sponsorblock"]
}
});
loadFromEnv(config);
@@ -167,12 +191,12 @@ function loadFromEnv(config: SBSConfig, prefix = "") {
const fullKey = (prefix ? `${prefix}_` : "") + key;
const data = config[key];
if (typeof data === "object" && !Array.isArray(data)) {
if (data && typeof data === "object" && !Array.isArray(data)) {
loadFromEnv(data, fullKey);
} else if (process.env[fullKey]) {
const value = process.env[fullKey];
if (isNumber(value)) {
config[key] = parseInt(value, 10);
config[key] = parseFloat(value);
} else if (value.toLowerCase() === "true" || value.toLowerCase() === "false") {
config[key] = value === "true";
} else if (key === "newLeafURLs") {

View File

@@ -57,7 +57,7 @@ export const archiveDownvoteSegment = async (dayLimit: number, voteLimit: number
const DownvoteSegmentArchiveJob = new CronJob(
jobConfig?.schedule || "0 0 * * * 0",
() => archiveDownvoteSegment(jobConfig?.timeThresholdInDays, jobConfig?.voteThreshold)
() => void archiveDownvoteSegment(jobConfig?.timeThresholdInDays, jobConfig?.voteThreshold)
);
if (serverConfig?.crons?.enabled && jobConfig && !jobConfig.schedule) {

View File

@@ -1,7 +1,12 @@
export interface QueryOption {
useReplica?: boolean;
forceReplica?: boolean;
}
export interface IDatabase {
init(): Promise<void>;
prepare(type: QueryType, query: string, params?: any[]): Promise<any | any[] | void>;
prepare(type: QueryType, query: string, params?: any[], options?: QueryOption): Promise<any | any[] | void>;
}
export type QueryType = "get" | "all" | "run";

View File

@@ -1,8 +1,10 @@
import { Logger } from "../utils/logger";
import { IDatabase, QueryType } from "./IDatabase";
import { Client, Pool, PoolClient, types } from "pg";
import { IDatabase, QueryOption, QueryType } from "./IDatabase";
import { Client, Pool, QueryResult, types } from "pg";
import fs from "fs";
import { CustomPostgresConfig, CustomPostgresReadOnlyConfig } from "../types/config.model";
import { timeoutPomise, PromiseWithState, savePromiseState, nextFulfilment } from "../utils/promise";
// return numeric (pg_type oid=1700) as float
types.setTypeParser(1700, function(val) {
@@ -14,13 +16,58 @@ types.setTypeParser(20, function(val) {
return parseInt(val, 10);
});
export interface DatabaseConfig {
dbSchemaFileName: string,
dbSchemaFolder: string,
fileNamePrefix: string,
readOnly: boolean,
createDbIfNotExists: boolean,
postgres: CustomPostgresConfig,
postgresReadOnly: CustomPostgresReadOnlyConfig
}
export class Postgres implements IDatabase {
private pool: Pool;
private lastPoolFail = 0;
constructor(private config: Record<string, any>) {}
private poolRead: Pool;
private lastPoolReadFail = 0;
private concurrentRequests = 0;
private concurrentReadRequests = 0;
constructor(private config: DatabaseConfig) {}
async init(): Promise<void> {
this.pool = new Pool(this.config.postgres);
this.pool = new Pool({
...this.config.postgres
});
this.pool.on("error", (err, client) => {
Logger.error(err.stack);
this.lastPoolFail = Date.now();
try {
client.release(true);
} catch (err) {
Logger.error(`pool (postgres): ${err}`);
}
});
if (this.config.postgresReadOnly && this.config.postgresReadOnly.enabled) {
this.poolRead = new Pool({
...this.config.postgresReadOnly
});
this.poolRead.on("error", (err, client) => {
Logger.error(err.stack);
this.lastPoolReadFail = Date.now();
try {
client.release(true);
} catch (err) {
Logger.error(`poolRead (postgres): ${err}`);
}
});
}
if (!this.config.readOnly) {
if (this.config.createDbIfNotExists) {
@@ -43,7 +90,7 @@ export class Postgres implements IDatabase {
}
}
async prepare(type: QueryType, query: string, params?: any[]): Promise<any[]> {
async prepare(type: QueryType, query: string, params?: any[], options: QueryOption = {}): Promise<any[]> {
// Convert query to use numbered parameters
let count = 1;
for (let char = 0; char < query.length; char++) {
@@ -55,31 +102,96 @@ export class Postgres implements IDatabase {
Logger.debug(`prepare (postgres): type: ${type}, query: ${query}, params: ${params}`);
let client: PoolClient;
try {
client = await this.pool.connect();
const queryResult = await client.query({ text: query, values: params });
switch (type) {
case "get": {
const value = queryResult.rows[0];
Logger.debug(`result (postgres): ${JSON.stringify(value)}`);
return value;
}
case "all": {
const values = queryResult.rows;
Logger.debug(`result (postgres): ${JSON.stringify(values)}`);
return values;
}
case "run": {
break;
}
if (this.config.readOnly) {
if (this.concurrentReadRequests > this.config.postgresReadOnly?.maxConcurrentRequests) {
Logger.error(`prepare (postgres): cancelling read query because too many concurrent requests, query: ${query}`);
throw new Error("Too many concurrent requests");
}
} catch (err) {
Logger.error(`prepare (postgres): ${err}`);
} finally {
client?.release();
this.concurrentReadRequests++;
} else {
if (this.concurrentRequests > this.config.postgres.maxConcurrentRequests) {
Logger.error(`prepare (postgres): cancelling query because too many concurrent requests, query: ${query}`);
throw new Error("Too many concurrent requests");
}
this.concurrentRequests++;
}
const pendingQueries: PromiseWithState<QueryResult<any>>[] = [];
let tries = 0;
let lastPool: Pool = null;
const maxTries = () => (lastPool === this.pool
? this.config.postgres.maxTries : this.config.postgresReadOnly.maxTries);
do {
tries++;
try {
lastPool = this.getPool(type, options);
pendingQueries.push(savePromiseState(lastPool.query({ text: query, values: params })));
const currentPromises = [...pendingQueries];
if (options.useReplica && maxTries() - tries > 1) currentPromises.push(savePromiseState(timeoutPomise(this.config.postgresReadOnly.readTimeout)));
const queryResult = await nextFulfilment(currentPromises);
if (this.config.readOnly) {
this.concurrentReadRequests--;
} else {
this.concurrentRequests--;
}
switch (type) {
case "get": {
const value = queryResult.rows[0];
Logger.debug(`result (postgres): ${JSON.stringify(value)}`);
return value;
}
case "all": {
const values = queryResult.rows;
Logger.debug(`result (postgres): ${JSON.stringify(values)}`);
return values;
}
case "run": {
return;
}
}
} catch (err) {
if (lastPool === this.pool) {
// Only applies if it is get or all request
options.forceReplica = true;
} else if (lastPool === this.poolRead && maxTries() - tries <= 1) {
options.useReplica = false;
}
Logger.error(`prepare (postgres) try ${tries}: ${err}`);
}
} while (this.isReadQuery(type) && tries < maxTries());
if (this.config.readOnly) {
this.concurrentReadRequests--;
} else {
this.concurrentRequests--;
}
throw new Error(`prepare (postgres): ${type} ${query} failed after ${tries} tries`);
}
private getPool(type: string, options: QueryOption): Pool {
const readAvailable = this.poolRead && options.useReplica && this.isReadQuery(type);
const ignroreReadDueToFailure = this.config.postgresReadOnly.fallbackOnFail
&& this.lastPoolReadFail > Date.now() - 1000 * 30;
const readDueToFailure = this.config.postgresReadOnly.fallbackOnFail
&& this.lastPoolFail > Date.now() - 1000 * 30;
if (readAvailable && !ignroreReadDueToFailure && (options.forceReplica || readDueToFailure ||
Math.random() > 1 / (this.config.postgresReadOnly.weight + 1))) {
return this.poolRead;
} else {
return this.pool;
}
}
private isReadQuery(type: string): boolean {
return type === "get" || type === "all";
}
private async createDB() {
@@ -98,7 +210,7 @@ export class Postgres implements IDatabase {
);
}
client.end();
client.end().catch(err => Logger.error(`closing db (postgres): ${err}`));
}
private async upgradeDB(fileNamePrefix: string, schemaFolder: string) {

View File

@@ -93,9 +93,7 @@ export class Sqlite implements IDatabase {
}
private static processUpgradeQuery(query: string): string {
const result = query.replace(/^.*--!sqlite-ignore/gm, "");
return result;
return query.replace(/^.*--!sqlite-ignore/gm, "");
}
}

View File

@@ -17,12 +17,13 @@ if (config.mysql) {
readOnly: config.readOnly,
createDbIfNotExists: config.createDatabaseIfNotExist,
postgres: {
user: config.postgres?.user,
host: config.postgres?.host,
...config.postgres,
database: "sponsorTimes",
password: config.postgres?.password,
port: config.postgres?.port,
}
},
postgresReadOnly: config.postgresReadOnly ? {
...config.postgresReadOnly,
database: "sponsorTimes"
} : null
});
privateDB = new Postgres({
@@ -32,12 +33,13 @@ if (config.mysql) {
readOnly: config.readOnly,
createDbIfNotExists: config.createDatabaseIfNotExist,
postgres: {
user: config.postgres?.user,
host: config.postgres?.host,
database: "privateDB",
password: config.postgres?.password,
port: config.postgres?.port,
}
...config.postgres,
database: "privateDB"
},
postgresReadOnly: config.postgresReadOnly ? {
...config.postgresReadOnly,
database: "privateDB"
} : null
});
} else {
db = new Sqlite({

View File

@@ -27,4 +27,4 @@ async function init() {
}).setTimeout(15000);
}
init();
init().catch((err) => Logger.error(err));

View File

@@ -1,35 +1,42 @@
import { getIP } from "../utils/getIP";
import { getHash } from "../utils/getHash";
import { getHashCache } from "../utils/getHashCache";
import rateLimit, { RateLimitRequestHandler } from "express-rate-limit";
import rateLimit from "express-rate-limit";
import { RateLimitConfig } from "../types/config.model";
import { Request } from "express";
import { Request, RequestHandler } from "express";
import { isUserVIP } from "../utils/isUserVIP";
import { UserID } from "../types/user.model";
import RedisStore from "rate-limit-redis";
import RedisStore, { RedisReply } from "rate-limit-redis";
import redis from "../utils/redis";
import { config } from "../config";
import { Logger } from "../utils/logger";
export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (req: Request) => UserID): RateLimitRequestHandler {
return rateLimit({
windowMs: limitConfig.windowMs,
max: limitConfig.max,
message: limitConfig.message,
statusCode: limitConfig.statusCode,
legacyHeaders: false,
standardHeaders: false,
keyGenerator: (req) => {
return getHash(getIP(req), 1);
},
handler: async (req, res, next) => {
if (getUserID === undefined || !await isUserVIP(await getHashCache(getUserID(req)))) {
return res.status(limitConfig.statusCode).send(limitConfig.message);
} else {
return next();
}
},
store: config.redis?.enabled ? new RedisStore({
sendCommand: (...args: string[]) => redis.sendCommand(args),
}) : null,
});
export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (req: Request) => UserID): RequestHandler {
try {
return rateLimit({
windowMs: limitConfig.windowMs,
max: limitConfig.max,
message: limitConfig.message,
statusCode: limitConfig.statusCode,
legacyHeaders: false,
standardHeaders: false,
keyGenerator: (req) => {
return getHash(getIP(req), 1);
},
// eslint-disable-next-line @typescript-eslint/no-misused-promises
handler: async (req, res, next) => {
if (getUserID === undefined || !await isUserVIP(await getHashCache(getUserID(req)))) {
return res.status(limitConfig.statusCode).send(limitConfig.message);
} else {
return next();
}
},
store: config.redis?.enabled ? new RedisStore({
sendCommand: (...args: string[]) => redis.sendCommand(args).catch((err) => Logger.error(err)) as Promise<RedisReply>,
}) : null,
});
} catch (e) {
Logger.error(`Rate limit error: ${e}`);
return (req, res, next) => next();
}
}

View File

@@ -6,8 +6,10 @@ import { getHash } from "../utils/getHash";
import { NextFunction, Request, Response } from "express";
export function userCounter(req: Request, res: Response, next: NextFunction): void {
axios.post(`${config.userCounterURL}/api/v1/addIP?hashedIP=${getHash(getIP(req), 1)}`)
.catch(() => Logger.debug(`Failing to connect to user counter at: ${config.userCounterURL}`));
if (req.method !== "OPTIONS") {
axios.post(`${config.userCounterURL}/api/v1/addIP?hashedIP=${getHash(getIP(req), 1)}`)
.catch(() => Logger.debug(`Failing to connect to user counter at: ${config.userCounterURL}`));
}
next();
}

74
src/routes/addFeature.ts Normal file
View File

@@ -0,0 +1,74 @@
import { getHashCache } from "../utils/getHashCache";
import { db } from "../databases/databases";
import { config } from "../config";
import { Request, Response } from "express";
import { isUserVIP } from "../utils/isUserVIP";
import { Feature, HashedUserID, UserID } from "../types/user.model";
import { Logger } from "../utils/logger";
import { QueryCacher } from "../utils/queryCacher";
interface AddFeatureRequest extends Request {
body: {
userID: HashedUserID;
adminUserID: string;
feature: string;
enabled: string;
}
}
const allowedFeatures = {
vip: [
Feature.ChapterSubmitter,
Feature.FillerSubmitter
],
admin: [
Feature.ChapterSubmitter,
Feature.FillerSubmitter
]
};
export async function addFeature(req: AddFeatureRequest, res: Response): Promise<Response> {
const { body: { userID, adminUserID } } = req;
const feature = parseInt(req.body.feature) as Feature;
const enabled = req.body?.enabled !== "false";
if (!userID || !adminUserID) {
// invalid request
return res.sendStatus(400);
}
// hash the userID
const adminUserIDInput = await getHashCache(adminUserID as UserID);
const isAdmin = adminUserIDInput === config.adminUserID;
const isVIP = (await isUserVIP(adminUserIDInput)) || isAdmin;
if (!isVIP) {
// not authorized
return res.sendStatus(403);
}
try {
const currentAllowedFeatures = isAdmin ? allowedFeatures.admin : allowedFeatures.vip;
if (currentAllowedFeatures.includes(feature)) {
if (enabled) {
const featureAdded = await db.prepare("get", 'SELECT "feature" from "userFeatures" WHERE "userID" = ? AND "feature" = ?', [userID, feature]);
if (!featureAdded) {
await db.prepare("run", 'INSERT INTO "userFeatures" ("userID", "feature", "issuerUserID", "timeSubmitted") VALUES(?, ?, ?, ?)'
, [userID, feature, adminUserID, Date.now()]);
}
} else {
await db.prepare("run", 'DELETE FROM "userFeatures" WHERE "userID" = ? AND "feature" = ?', [userID, feature]);
}
QueryCacher.clearFeatureCache(userID, feature);
} else {
return res.status(400).send("Invalid feature");
}
return res.sendStatus(200);
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
}
}

View File

@@ -1,7 +1,5 @@
import { VideoID } from "../types/segments.model";
import { YouTubeAPI } from "../utils/youtubeApi";
import { APIVideoInfo } from "../types/youtubeApi.model";
import { config } from "../config";
import { getVideoDetails } from "../utils/getVideoDetails";
import { getHashCache } from "../utils/getHashCache";
import { privateDB } from "../databases/databases";
import { Request, Response } from "express";
@@ -20,15 +18,11 @@ interface AddUserAsTempVIPRequest extends Request {
}
}
function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise<APIVideoInfo> {
return (config.newLeafURLs) ? YouTubeAPI.listVideos(videoID, ignoreCache) : null;
}
const getChannelInfo = async (videoID: VideoID): Promise<{id: string | null, name: string | null }> => {
const videoInfo = await getYouTubeVideoInfo(videoID);
const videoInfo = await getVideoDetails(videoID);
return {
id: videoInfo?.data?.authorId,
name: videoInfo?.data?.author
id: videoInfo?.authorId,
name: videoInfo?.authorName
};
};

View File

@@ -5,7 +5,7 @@ import { config } from "../config";
import util from "util";
import fs from "fs";
import path from "path";
import { ChildProcess, exec, ExecOptions, spawn } from "child_process";
import { exec, ExecOptions } from "child_process";
const unlink = util.promisify(fs.unlink);
const ONE_MINUTE = 1000 * 60;
@@ -44,7 +44,7 @@ const credentials: ExecOptions = {
PGPASSWORD: String(config.postgres.password),
PGDATABASE: "sponsorTimes",
}
}
};
interface TableDumpList {
fileName: string;
@@ -75,6 +75,7 @@ function removeOutdatedDumps(exportPath: string): Promise<void> {
}, {});
// read files in export directory
// eslint-disable-next-line @typescript-eslint/no-misused-promises
fs.readdir(exportPath, async (err: any, files: string[]) => {
if (err) Logger.error(err);
if (err) return resolve();
@@ -232,7 +233,7 @@ async function queueDump(): Promise<void> {
resolve(error ? stderr : stdout);
});
})
});
dumpFiles.push({
fileName,

View File

@@ -0,0 +1,48 @@
import { Request, Response } from "express";
import { config } from "../config";
import { createAndSaveToken, TokenType } from "../utils/tokenUtils";
interface GenerateTokenRequest extends Request {
query: {
code: string;
adminUserID?: string;
},
params: {
type: TokenType;
}
}
export async function generateTokenRequest(req: GenerateTokenRequest, res: Response): Promise<Response> {
const { query: { code, adminUserID }, params: { type } } = req;
if (!code || !type) {
return res.status(400).send("Invalid request");
}
if (type === TokenType.patreon || (type === TokenType.local && adminUserID === config.adminUserID)) {
const licenseKey = await createAndSaveToken(type, code);
if (licenseKey) {
return res.status(200).send(`
<h1>
Your license key:
</h1>
<p>
<b>
${licenseKey}
</b>
</p>
<p>
Copy this into the textbox in the other tab
</p>
`);
} else {
return res.status(401).send(`
<h1>
Failed to generate an license key
</h1>
`);
}
}
}

View File

@@ -22,7 +22,7 @@ export async function getChapterNames(req: Request, res: Response): Promise<Resp
const descriptions = await db.prepare("all", `
SELECT "description"
FROM "sponsorTimes"
WHERE ("votes" > 0 OR ("views" > 100 AND "votes" >= 0)) AND "videoID" IN (
WHERE ("locked" = 1 OR "votes" > 0 OR ("views" > 25 AND "votes" >= 0)) AND "videoID" IN (
SELECT "videoID"
FROM "videoInfo"
WHERE "channelID" = ?

View File

@@ -18,7 +18,7 @@ export async function getSavedTimeForUser(req: Request, res: Response): Promise<
userID = await getHashCache(userID);
try {
const row = await db.prepare("get", 'SELECT SUM(((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views") as "minutesSaved" FROM "sponsorTimes" WHERE "userID" = ? AND "votes" > -1 AND "shadowHidden" != 1 ', [maxRewardTimePerSegmentInSeconds, maxRewardTimePerSegmentInSeconds, userID]);
const row = await db.prepare("get", 'SELECT SUM(((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views") as "minutesSaved" FROM "sponsorTimes" WHERE "userID" = ? AND "votes" > -1 AND "shadowHidden" != 1 ', [maxRewardTimePerSegmentInSeconds, maxRewardTimePerSegmentInSeconds, userID], { useReplica: true });
if (row.minutesSaved != null) {
return res.send({

View File

@@ -11,11 +11,16 @@ import { Logger } from "../utils/logger";
import { QueryCacher } from "../utils/queryCacher";
import { getReputation } from "../utils/reputation";
import { getService } from "../utils/getService";
import { promiseOrTimeout } from "../utils/promise";
async function prepareCategorySegments(req: Request, videoID: VideoID, service: Service, segments: DBSegment[], cache: SegmentCache = { shadowHiddenSegmentIPs: {} }, useCache: boolean): Promise<Segment[]> {
const shouldFilter: boolean[] = await Promise.all(segments.map(async (segment) => {
if (segment.votes < -1 && !segment.required) {
if (segment.required) {
return true; //required - always send
}
if (segment.hidden || segment.votes < -1) {
return false; //too untrustworthy, just ignore it
}
@@ -27,17 +32,25 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, service:
if (cache.shadowHiddenSegmentIPs[videoID] === undefined) cache.shadowHiddenSegmentIPs[videoID] = {};
if (cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted] === undefined) {
if (cache.userHashedIP === undefined && cache.userHashedIPPromise === undefined) {
cache.userHashedIPPromise = getHashCache((getIP(req) + config.globalSalt) as IPAddress);
}
const service = getService(req?.query?.service as string);
const fetchData = () => privateDB.prepare("all", 'SELECT "hashedIP" FROM "sponsorTimes" WHERE "videoID" = ? AND "timeSubmitted" = ? AND "service" = ?',
[videoID, segment.timeSubmitted, service]) as Promise<{ hashedIP: HashedIP }[]>;
cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted] = await QueryCacher.get(fetchData, shadowHiddenIPKey(videoID, segment.timeSubmitted, service));
[videoID, segment.timeSubmitted, service], { useReplica: true }) as Promise<{ hashedIP: HashedIP }[]>;
try {
cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted] = await promiseOrTimeout(QueryCacher.get(fetchData, shadowHiddenIPKey(videoID, segment.timeSubmitted, service)), 150);
} catch (e) {
// give up on shadowhide for now
cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted] = null;
}
}
const ipList = cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted];
if (ipList?.length > 0 && cache.userHashedIP === undefined) {
//hash the IP only if it's strictly necessary
cache.userHashedIP = await getHashCache((getIP(req) + config.globalSalt) as IPAddress);
cache.userHashedIP = await cache.userHashedIPPromise;
}
//if this isn't their ip, don't send it to them
const shouldShadowHide = cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted]?.some(
@@ -133,7 +146,7 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
return acc;
}, {});
for (const [videoID, videoData] of Object.entries(segmentPerVideoID)) {
await Promise.all(Object.entries(segmentPerVideoID).map(async ([videoID, videoData]) => {
const data: VideoData = {
hash: videoData.hash,
segments: [],
@@ -153,7 +166,7 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
if (data.segments.length > 0) {
segments[videoID] = data;
}
}
}));
return segments;
} catch (err) {
@@ -166,9 +179,10 @@ async function getSegmentsFromDBByHash(hashedVideoIDPrefix: VideoIDHash, service
const fetchFromDB = () => db
.prepare(
"all",
`SELECT "videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "videoDuration", "reputation", "shadowHidden", "hashedVideoID", "timeSubmitted", "description" FROM "sponsorTimes"
WHERE "hashedVideoID" LIKE ? AND "service" = ? AND "hidden" = 0 ORDER BY "startTime"`,
[`${hashedVideoIDPrefix}%`, service]
`SELECT "videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "videoDuration", "hidden", "reputation", "shadowHidden", "hashedVideoID", "timeSubmitted", "description" FROM "sponsorTimes"
WHERE "hashedVideoID" LIKE ? AND "service" = ? ORDER BY "startTime"`,
[`${hashedVideoIDPrefix}%`, service],
{ useReplica: true }
) as Promise<DBSegment[]>;
if (hashedVideoIDPrefix.length === 4) {
@@ -182,9 +196,10 @@ async function getSegmentsFromDBByVideoID(videoID: VideoID, service: Service): P
const fetchFromDB = () => db
.prepare(
"all",
`SELECT "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "videoDuration", "reputation", "shadowHidden", "timeSubmitted", "description" FROM "sponsorTimes"
WHERE "videoID" = ? AND "service" = ? AND "hidden" = 0 ORDER BY "startTime"`,
[videoID, service]
`SELECT "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "videoDuration", "hidden", "reputation", "shadowHidden", "timeSubmitted", "description" FROM "sponsorTimes"
WHERE "videoID" = ? AND "service" = ? ORDER BY "startTime"`,
[videoID, service],
{ useReplica: true }
) as Promise<DBSegment[]>;
return await QueryCacher.get(fetchFromDB, skipSegmentsKey(videoID, service));
@@ -275,6 +290,9 @@ async function chooseSegments(videoID: VideoID, service: Service, segments: DBSe
//This allows new less voted items to still sometimes appear to give them a chance at getting votes.
//Segments with less than -1 votes are already ignored before this function is called
async function buildSegmentGroups(segments: DBSegment[]): Promise<OverlappingSegmentGroup[]> {
const reputationPromises = segments.map(segment =>
segment.userID ? getReputation(segment.userID).catch((e) => Logger.error(e)) : null);
//Create groups of segments that are similar to eachother
//Segments must be sorted by their startTime so that we can build groups chronologically:
//1. As long as the segments' startTime fall inside the currentGroup, we keep adding them to that group
@@ -283,7 +301,8 @@ async function buildSegmentGroups(segments: DBSegment[]): Promise<OverlappingSeg
let overlappingSegmentsGroups: OverlappingSegmentGroup[] = [];
let currentGroup: OverlappingSegmentGroup;
let cursor = -1; //-1 to make sure that, even if the 1st segment starts at 0, a new group is created
for (const segment of segments) {
for (let i = 0; i < segments.length; i++) {
const segment = segments[i];
if (segment.startTime >= cursor) {
currentGroup = { segments: [], votes: 0, reputation: 0, locked: false, required: false };
overlappingSegmentsGroups.push(currentGroup);
@@ -295,7 +314,7 @@ async function buildSegmentGroups(segments: DBSegment[]): Promise<OverlappingSeg
currentGroup.votes += segment.votes;
}
if (segment.userID) segment.reputation = Math.min(segment.reputation, await getReputation(segment.userID));
if (segment.userID) segment.reputation = Math.min(segment.reputation, (await reputationPromises[i]) || Infinity);
if (segment.reputation > 0) {
currentGroup.reputation += segment.reputation;
}

View File

@@ -3,27 +3,44 @@ import { Logger } from "../utils/logger";
import { Request, Response } from "express";
import os from "os";
import redis from "../utils/redis";
import { promiseOrTimeout } from "../utils/promise";
export async function getStatus(req: Request, res: Response): Promise<Response> {
const startTime = Date.now();
let value = req.params.value as string[] | string;
value = Array.isArray(value) ? value[0] : value;
let processTime, redisProcessTime = -1;
try {
const dbVersion = (await db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"])).value;
const dbVersion = await promiseOrTimeout(db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"]), 5000)
.then(e => {
processTime = Date.now() - startTime;
return e.value;
})
.catch(e => {
Logger.error(`status: SQL query timed out: ${e}`);
return -1;
});
let statusRequests: unknown = 0;
try {
const numberRequests = await redis.increment("statusRequest");
statusRequests = numberRequests?.[0];
} catch (error) { } // eslint-disable-line no-empty
const numberRequests = await promiseOrTimeout(redis.increment("statusRequest"), 5000)
.then(e => {
redisProcessTime = Date.now() - startTime;
return e;
}).catch(e => {
Logger.error(`status: redis increment timed out ${e}`);
return [-1];
});
statusRequests = numberRequests?.[0];
const statusValues: Record<string, any> = {
uptime: process.uptime(),
commit: (global as any).HEADCOMMIT || "unknown",
db: Number(dbVersion),
startTime,
processTime: Date.now() - startTime,
processTime,
redisProcessTime,
loadavg: os.loadavg().slice(1), // only return 5 & 15 minute load average
statusRequests
statusRequests,
hostname: os.hostname()
};
return value ? res.send(JSON.stringify(statusValues[value])) : res.send(statusValues);
} catch (err) {

View File

@@ -4,6 +4,7 @@ import { config } from "../config";
import { Request, Response } from "express";
const MILLISECONDS_IN_MINUTE = 60000;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
const getTopCategoryUsersWithCache = createMemoryCache(generateTopCategoryUsersStats, config.getTopUsersCacheTimeMinutes * MILLISECONDS_IN_MINUTE);
const maxRewardTimePerSegmentInSeconds = config.maxRewardTimePerSegmentInSeconds ?? 86400;

View File

@@ -4,6 +4,7 @@ import { config } from "../config";
import { Request, Response } from "express";
const MILLISECONDS_IN_MINUTE = 60000;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
const getTopUsersWithCache = createMemoryCache(generateTopUsersStats, config.getTopUsersCacheTimeMinutes * MILLISECONDS_IN_MINUTE);
const maxRewardTimePerSegmentInSeconds = config.maxRewardTimePerSegmentInSeconds ?? 86400;
@@ -34,7 +35,7 @@ async function generateTopUsersStats(sortBy: string, categoryStatsEnabled = fals
SUM(((CASE WHEN "sponsorTimes"."endTime" - "sponsorTimes"."startTime" > ? THEN ? ELSE "sponsorTimes"."endTime" - "sponsorTimes"."startTime" END) / 60) * "sponsorTimes"."views") as "minutesSaved",
SUM("votes") as "userVotes", ${additionalFields} COALESCE("userNames"."userName", "sponsorTimes"."userID") as "userName" FROM "sponsorTimes" LEFT JOIN "userNames" ON "sponsorTimes"."userID"="userNames"."userID"
LEFT JOIN "shadowBannedUsers" ON "sponsorTimes"."userID"="shadowBannedUsers"."userID"
WHERE "sponsorTimes"."votes" > -1 AND "sponsorTimes"."shadowHidden" != 1 AND "shadowBannedUsers"."userID" IS NULL
WHERE "sponsorTimes"."votes" > -1 AND "sponsorTimes"."shadowHidden" != 1 AND "sponsorTimes"."actionType" != 'chapter' AND "shadowBannedUsers"."userID" IS NULL
GROUP BY COALESCE("userName", "sponsorTimes"."userID") HAVING SUM("votes") > 20
ORDER BY "${sortBy}" DESC LIMIT 100`, [maxRewardTimePerSegmentInSeconds, maxRewardTimePerSegmentInSeconds]);

View File

@@ -10,14 +10,15 @@ let firefoxUsersCache = 0;
// By the privacy friendly user counter
let apiUsersCache = 0;
let lastUserCountCheck = 0;
updateExtensionUsers();
export async function getTotalStats(req: Request, res: Response): Promise<void> {
const userCountQuery = `(SELECT COUNT(*) FROM (SELECT DISTINCT "userID" from "sponsorTimes") t) "userCount",`;
const row = await db.prepare("get", `SELECT ${req.query.countContributingUsers ? userCountQuery : ""} COUNT(*) as "totalSubmissions",
SUM("views") as "viewCount", SUM(("endTime" - "startTime") / 60 * "views") as "minutesSaved" FROM "sponsorTimes" WHERE "shadowHidden" != 1 AND "votes" >= 0`, []);
SUM("views") as "viewCount", SUM(("endTime" - "startTime") / 60 * "views") as "minutesSaved" FROM "sponsorTimes" WHERE "shadowHidden" != 1 AND "votes" >= 0 AND "actionType" != 'chapter'`, []);
if (row !== undefined) {
const extensionUsers = chromeUsersCache + firefoxUsersCache;

View File

@@ -5,16 +5,18 @@ import { Request, Response } from "express";
import { Logger } from "../utils/logger";
import { HashedUserID, UserID } from "../types/user.model";
import { getReputation } from "../utils/reputation";
import { SegmentUUID } from "../types/segments.model";
import { Category, SegmentUUID } from "../types/segments.model";
import { config } from "../config";
import { canSubmit } from "../utils/permissions";
import { oneOf } from "../utils/promise";
const maxRewardTime = config.maxRewardTimePerSegmentInSeconds;
async function dbGetSubmittedSegmentSummary(userID: HashedUserID): Promise<{ minutesSaved: number, segmentCount: number }> {
try {
const row = await db.prepare("get",
`SELECT SUM(((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views") as "minutesSaved",
`SELECT SUM(CASE WHEN "actionType" = 'chapter' THEN 0 ELSE ((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views" END) as "minutesSaved",
count(*) as "segmentCount" FROM "sponsorTimes"
WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != 1`, [maxRewardTime, maxRewardTime, userID]);
WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != 1`, [maxRewardTime, maxRewardTime, userID], { useReplica: true });
if (row.minutesSaved != null) {
return {
minutesSaved: row.minutesSaved,
@@ -33,7 +35,7 @@ async function dbGetSubmittedSegmentSummary(userID: HashedUserID): Promise<{ min
async function dbGetIgnoredSegmentCount(userID: HashedUserID): Promise<number> {
try {
const row = await db.prepare("get", `SELECT COUNT(*) as "ignoredSegmentCount" FROM "sponsorTimes" WHERE "userID" = ? AND ( "votes" <= -2 OR "shadowHidden" = 1 )`, [userID]);
const row = await db.prepare("get", `SELECT COUNT(*) as "ignoredSegmentCount" FROM "sponsorTimes" WHERE "userID" = ? AND ( "votes" <= -2 OR "shadowHidden" = 1 )`, [userID], { useReplica: true });
return row?.ignoredSegmentCount ?? 0;
} catch (err) {
return null;
@@ -51,7 +53,7 @@ async function dbGetUsername(userID: HashedUserID) {
async function dbGetViewsForUser(userID: HashedUserID) {
try {
const row = await db.prepare("get", `SELECT SUM("views") as "viewCount" FROM "sponsorTimes" WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != 1`, [userID]);
const row = await db.prepare("get", `SELECT SUM("views") as "viewCount" FROM "sponsorTimes" WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != 1`, [userID], { useReplica: true });
return row?.viewCount ?? 0;
} catch (err) {
return false;
@@ -60,7 +62,7 @@ async function dbGetViewsForUser(userID: HashedUserID) {
async function dbGetIgnoredViewsForUser(userID: HashedUserID) {
try {
const row = await db.prepare("get", `SELECT SUM("views") as "ignoredViewCount" FROM "sponsorTimes" WHERE "userID" = ? AND ( "votes" <= -2 OR "shadowHidden" = 1 )`, [userID]);
const row = await db.prepare("get", `SELECT SUM("views") as "ignoredViewCount" FROM "sponsorTimes" WHERE "userID" = ? AND ( "votes" <= -2 OR "shadowHidden" = 1 )`, [userID], { useReplica: true });
return row?.ignoredViewCount ?? 0;
} catch (err) {
return false;
@@ -69,7 +71,7 @@ async function dbGetIgnoredViewsForUser(userID: HashedUserID) {
async function dbGetWarningsForUser(userID: HashedUserID): Promise<number> {
try {
const row = await db.prepare("get", `SELECT COUNT(*) as total FROM "warnings" WHERE "userID" = ? AND "enabled" = 1`, [userID]);
const row = await db.prepare("get", `SELECT COUNT(*) as total FROM "warnings" WHERE "userID" = ? AND "enabled" = 1`, [userID], { useReplica: true });
return row?.total ?? 0;
} catch (err) {
Logger.error(`Couldn't get warnings for user ${userID}. returning 0`);
@@ -79,7 +81,7 @@ async function dbGetWarningsForUser(userID: HashedUserID): Promise<number> {
async function dbGetLastSegmentForUser(userID: HashedUserID): Promise<SegmentUUID> {
try {
const row = await db.prepare("get", `SELECT "UUID" FROM "sponsorTimes" WHERE "userID" = ? ORDER BY "timeSubmitted" DESC LIMIT 1`, [userID]);
const row = await db.prepare("get", `SELECT "UUID" FROM "sponsorTimes" WHERE "userID" = ? ORDER BY "timeSubmitted" DESC LIMIT 1`, [userID], { useReplica: true });
return row?.UUID ?? null;
} catch (err) {
return null;
@@ -88,7 +90,7 @@ async function dbGetLastSegmentForUser(userID: HashedUserID): Promise<SegmentUUI
async function dbGetActiveWarningReasonForUser(userID: HashedUserID): Promise<string> {
try {
const row = await db.prepare("get", `SELECT reason FROM "warnings" WHERE "userID" = ? AND "enabled" = 1 ORDER BY "issueTime" DESC LIMIT 1`, [userID]);
const row = await db.prepare("get", `SELECT reason FROM "warnings" WHERE "userID" = ? AND "enabled" = 1 ORDER BY "issueTime" DESC LIMIT 1`, [userID], { useReplica: true });
return row?.reason ?? "";
} catch (err) {
Logger.error(`Couldn't get reason for user ${userID}. returning blank`);
@@ -98,13 +100,29 @@ async function dbGetActiveWarningReasonForUser(userID: HashedUserID): Promise<st
async function dbGetBanned(userID: HashedUserID): Promise<boolean> {
try {
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID]);
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID], { useReplica: true });
return row?.userCount > 0 ?? false;
} catch (err) {
return false;
}
}
async function getPermissions(userID: HashedUserID): Promise<Record<string, boolean>> {
const result: Record<string, boolean> = {};
for (const category of config.categoryList) {
result[category] = (await canSubmit(userID, category as Category)).canSubmit;
}
return result;
}
async function getFreeChaptersAccess(userID: HashedUserID): Promise<boolean> {
return await oneOf([isUserVIP(userID),
(async () => !!(await db.prepare("get", `SELECT "timeSubmitted" FROM "sponsorTimes" WHERE "reputation" > 0 AND "timeSubmitted" < 1663872563000 AND "userID" = ? LIMIT 1`, [userID], { useReplica: true })))(),
(async () => !!(await db.prepare("get", `SELECT "timeSubmitted" FROM "sponsorTimes" WHERE "timeSubmitted" < 1590969600000 AND "userID" = ? LIMIT 1`, [userID], { useReplica: true })))()
]);
}
type cases = Record<string, any>
const executeIfFunction = (f: any) =>
@@ -119,16 +137,18 @@ const functionSwitch = (cases: cases) => (defaultCase: string) => (key: string)
const dbGetValue = (userID: HashedUserID, property: string): Promise<string|SegmentUUID|number> => {
return functionSwitch({
userID,
userName: dbGetUsername(userID),
ignoredSegmentCount: dbGetIgnoredSegmentCount(userID),
viewCount: dbGetViewsForUser(userID),
ignoredViewCount: dbGetIgnoredViewsForUser(userID),
warnings: dbGetWarningsForUser(userID),
warningReason: dbGetActiveWarningReasonForUser(userID),
banned: dbGetBanned(userID),
reputation: getReputation(userID),
vip: isUserVIP(userID),
lastSegmentID: dbGetLastSegmentForUser(userID),
userName: () => dbGetUsername(userID),
ignoredSegmentCount: () => dbGetIgnoredSegmentCount(userID),
viewCount: () => dbGetViewsForUser(userID),
ignoredViewCount: () => dbGetIgnoredViewsForUser(userID),
warnings: () => dbGetWarningsForUser(userID),
warningReason: () => dbGetActiveWarningReasonForUser(userID),
banned: () => dbGetBanned(userID),
reputation: () => getReputation(userID),
vip: () => isUserVIP(userID),
lastSegmentID: () => dbGetLastSegmentForUser(userID),
permissions: () => getPermissions(userID),
freeChaptersAccess: () => getFreeChaptersAccess(userID)
})("")(property);
};
@@ -138,7 +158,7 @@ async function getUserInfo(req: Request, res: Response): Promise<Response> {
const defaultProperties: string[] = ["userID", "userName", "minutesSaved", "segmentCount", "ignoredSegmentCount",
"viewCount", "ignoredViewCount", "warnings", "warningReason", "reputation",
"vip", "lastSegmentID"];
const allProperties: string[] = [...defaultProperties, "banned"];
const allProperties: string[] = [...defaultProperties, "banned", "permissions", "freeChaptersAccess"];
let paramValues: string[] = req.query.values
? JSON.parse(req.query.values as string)
: req.query.value

View File

@@ -21,6 +21,7 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea
SUM(CASE WHEN "category" = 'poi_highlight' THEN 1 ELSE 0 END) as "categorySumHighlight",
SUM(CASE WHEN "category" = 'filler' THEN 1 ELSE 0 END) as "categorySumFiller",
SUM(CASE WHEN "category" = 'exclusive_access' THEN 1 ELSE 0 END) as "categorySumExclusiveAccess",
SUM(CASE WHEN "category" = 'chapter' THEN 1 ELSE 0 END) as "categorySumChapter",
`;
}
if (fetchActionTypeStats) {
@@ -29,15 +30,16 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea
SUM(CASE WHEN "actionType" = 'mute' THEN 1 ELSE 0 END) as "typeSumMute",
SUM(CASE WHEN "actionType" = 'full' THEN 1 ELSE 0 END) as "typeSumFull",
SUM(CASE WHEN "actionType" = 'poi' THEN 1 ELSE 0 END) as "typeSumPoi",
SUM(CASE WHEN "actionType" = 'chapter' THEN 1 ELSE 0 END) as "typeSumChapter",
`;
}
try {
const row = await db.prepare("get", `
SELECT SUM(((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views") as "minutesSaved",
SELECT SUM(CASE WHEN "actionType" = 'chapter' THEN 0 ELSE ((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views" END) as "minutesSaved",
${additionalQuery}
count(*) as "segmentCount"
FROM "sponsorTimes"
WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" !=1`,
WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != 1`,
[maxRewardTimePerSegmentInSeconds, maxRewardTimePerSegmentInSeconds, userID]);
const source = (row.minutesSaved != null) ? row : {};
const handler = { get: (target: Record<string, any>, name: string) => target?.[name] || 0 };
@@ -60,6 +62,7 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea
poi_highlight: proxy.categorySumHighlight,
filler: proxy.categorySumFiller,
exclusive_access: proxy.categorySumExclusiveAccess,
chapter: proxy.categorySumChapter,
};
}
if (fetchActionTypeStats) {
@@ -67,7 +70,8 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea
skip: proxy.typeSumSkip,
mute: proxy.typeSumMute,
full: proxy.typeSumFull,
poi: proxy.typeSumPoi
poi: proxy.typeSumPoi,
chapter: proxy.typeSumChapter,
};
}
return result;

View File

@@ -15,7 +15,7 @@ export async function getUsername(req: Request, res: Response): Promise<Response
userID = await getHashCache(userID);
try {
const row = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID]);
const row = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID], { useReplica: true });
if (row !== undefined) {
return res.send({

View File

@@ -15,7 +15,7 @@ export async function getViewsForUser(req: Request, res: Response): Promise<Resp
userID = await getHashCache(userID);
try {
const row = await db.prepare("get", `SELECT SUM("views") as "viewCount" FROM "sponsorTimes" WHERE "userID" = ?`, [userID]);
const row = await db.prepare("get", `SELECT SUM("views") as "viewCount" FROM "sponsorTimes" WHERE "userID" = ?`, [userID], { useReplica: true });
//increase the view count by one
if (row.viewCount != null) {

View File

@@ -1,7 +1,7 @@
import { config } from "../config";
import { Logger } from "../utils/logger";
import { db, privateDB } from "../databases/databases";
import { getMaxResThumbnail, YouTubeAPI } from "../utils/youtubeApi";
import { getMaxResThumbnail } from "../utils/youtubeApi";
import { getSubmissionUUID } from "../utils/getSubmissionUUID";
import { getHash } from "../utils/getHash";
import { getHashCache } from "../utils/getHashCache";
@@ -13,7 +13,6 @@ import { ActionType, Category, IncomingSegment, IPAddress, SegmentUUID, Service,
import { deleteLockCategories } from "./deleteLockCategories";
import { QueryCacher } from "../utils/queryCacher";
import { getReputation } from "../utils/reputation";
import { APIVideoData, APIVideoInfo } from "../types/youtubeApi.model";
import { HashedUserID, UserID } from "../types/user.model";
import { isUserVIP } from "../utils/isUserVIP";
import { isUserTempVIP } from "../utils/isUserTempVIP";
@@ -21,6 +20,8 @@ import { parseUserAgent } from "../utils/userAgent";
import { getService } from "../utils/getService";
import axios from "axios";
import { vote } from "./voteOnSponsorTime";
import { canSubmit } from "../utils/permissions";
import { getVideoDetails, videoDetails } from "../utils/getVideoDetails";
type CheckResult = {
pass: boolean,
@@ -34,7 +35,7 @@ const CHECK_PASS: CheckResult = {
errorCode: 0
};
async function sendWebhookNotification(userID: string, videoID: string, UUID: string, submissionCount: number, youtubeData: APIVideoData, { submissionStart, submissionEnd }: { submissionStart: number; submissionEnd: number; }, segmentInfo: any) {
async function sendWebhookNotification(userID: string, videoID: string, UUID: string, submissionCount: number, youtubeData: videoDetails, { submissionStart, submissionEnd }: { submissionStart: number; submissionEnd: number; }, segmentInfo: any) {
const row = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID]);
const userName = row !== undefined ? row.userName : null;
@@ -47,7 +48,7 @@ async function sendWebhookNotification(userID: string, videoID: string, UUID: st
"video": {
"id": videoID,
"title": youtubeData?.title,
"thumbnail": getMaxResThumbnail(youtubeData) || null,
"thumbnail": getMaxResThumbnail(videoID),
"url": `https://www.youtube.com/watch?v=${videoID}`,
},
"submission": {
@@ -63,19 +64,16 @@ async function sendWebhookNotification(userID: string, videoID: string, UUID: st
});
}
async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID: string, UUID: string, segmentInfo: any, service: Service) {
if (apiVideoInfo && service == Service.YouTube) {
async function sendWebhooks(apiVideoDetails: videoDetails, userID: string, videoID: string, UUID: string, segmentInfo: any, service: Service) {
if (apiVideoDetails && service == Service.YouTube) {
const userSubmissionCountRow = await db.prepare("get", `SELECT count(*) as "submissionCount" FROM "sponsorTimes" WHERE "userID" = ?`, [userID]);
const { data, err } = apiVideoInfo;
if (err) return;
const startTime = parseFloat(segmentInfo.segment[0]);
const endTime = parseFloat(segmentInfo.segment[1]);
sendWebhookNotification(userID, videoID, UUID, userSubmissionCountRow.submissionCount, data, {
sendWebhookNotification(userID, videoID, UUID, userSubmissionCountRow.submissionCount, apiVideoDetails, {
submissionStart: startTime,
submissionEnd: endTime,
}, segmentInfo);
}, segmentInfo).catch(Logger.error);
// If it is a first time submission
// Then send a notification to discord
@@ -83,7 +81,7 @@ async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID:
axios.post(config.discordFirstTimeSubmissionsWebhookURL, {
embeds: [{
title: data?.title,
title: apiVideoDetails.title,
url: `https://www.youtube.com/watch?v=${videoID}&t=${(parseInt(startTime.toFixed(0)) - 2)}s#requiredSegment=${UUID}`,
description: `Submission ID: ${UUID}\
\n\nTimestamp: \
@@ -94,7 +92,7 @@ async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID:
name: userID,
},
thumbnail: {
url: getMaxResThumbnail(data) || "",
url: getMaxResThumbnail(videoID),
},
}],
})
@@ -119,18 +117,10 @@ async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID:
// Looks like this was broken for no defined youtube key - fixed but IMO we shouldn't return
// false for a pass - it was confusing and lead to this bug - any use of this function in
// the future could have the same problem.
async function autoModerateSubmission(apiVideoInfo: APIVideoInfo,
async function autoModerateSubmission(apiVideoDetails: videoDetails,
submission: { videoID: VideoID; userID: UserID; segments: IncomingSegment[], service: Service, videoDuration: number }) {
const apiVideoDuration = (apiVideoInfo: APIVideoInfo) => {
if (!apiVideoInfo) return undefined;
const { err, data } = apiVideoInfo;
// return undefined if API error
if (err) return undefined;
return data?.lengthSeconds;
};
// get duration from API
const apiDuration = apiVideoDuration(apiVideoInfo);
const apiDuration = apiVideoDetails.duration;
// if API fail or returns 0, get duration from client
const duration = apiDuration || submission.videoDuration;
// return false on undefined or 0
@@ -138,14 +128,16 @@ async function autoModerateSubmission(apiVideoInfo: APIVideoInfo,
const segments = submission.segments;
// map all times to float array
const allSegmentTimes = segments.map(segment => [parseFloat(segment.segment[0]), parseFloat(segment.segment[1])]);
const allSegmentTimes = segments.filter((s) => s.actionType !== ActionType.Chapter)
.map(segment => [parseFloat(segment.segment[0]), parseFloat(segment.segment[1])]);
// add previous submissions by this user
const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? AND "videoID" = ? AND "votes" > -1 AND "hidden" = 0`, [submission.userID, submission.videoID]);
const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? AND "videoID" = ? AND "votes" > -1 AND "actionType" != 'chapter' AND "hidden" = 0`
, [submission.userID, submission.videoID]) as { startTime: string, endTime: string }[];
if (allSubmittedByUser) {
//add segments the user has previously submitted
const allSubmittedTimes = allSubmittedByUser.map((segment: { startTime: string, endTime: string }) => [parseFloat(segment.startTime), parseFloat(segment.endTime)]);
const allSubmittedTimes = allSubmittedByUser.map((segment) => [parseFloat(segment.startTime), parseFloat(segment.endTime)]);
allSegmentTimes.push(...allSubmittedTimes);
}
@@ -162,14 +154,6 @@ async function autoModerateSubmission(apiVideoInfo: APIVideoInfo,
return false;
}
function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise<APIVideoInfo> {
if (config.newLeafURLs !== null) {
return YouTubeAPI.listVideos(videoID, ignoreCache);
} else {
return null;
}
}
async function checkUserActiveWarning(userID: string): Promise<CheckResult> {
const MILLISECONDS_IN_HOUR = 3600000;
const now = Date.now();
@@ -200,7 +184,8 @@ async function checkUserActiveWarning(userID: string): Promise<CheckResult> {
return CHECK_PASS;
}
function checkInvalidFields(videoID: VideoID, userID: UserID, segments: IncomingSegment[]): CheckResult {
async function checkInvalidFields(videoID: VideoID, userID: UserID, hashedUserID: HashedUserID
, segments: IncomingSegment[], videoDurationParam: number, userAgent: string): Promise<CheckResult> {
const invalidFields = [];
const errors = [];
if (typeof videoID !== "string" || videoID?.length == 0) {
@@ -223,10 +208,20 @@ function checkInvalidFields(videoID: VideoID, userID: UserID, segments: Incoming
}
if (typeof segmentPair.description !== "string"
|| (segmentPair.actionType === ActionType.Chapter && segmentPair.description.length > 60 )
|| (segmentPair.description.length !== 0 && segmentPair.actionType !== ActionType.Chapter)) {
invalidFields.push("segment description");
}
if (segmentPair.actionType === ActionType.Chapter && segmentPair.description.length > 200) {
invalidFields.push("chapter name (too long)");
}
const permission = await canSubmit(hashedUserID, segmentPair.category);
if (!permission.canSubmit) {
Logger.warn(`Rejecting submission due to lack of permissions for category ${segmentPair.category}: ${segmentPair.segment} ${hashedUserID} ${videoID} ${videoDurationParam} ${userAgent}`);
invalidFields.push(`permission to submit ${segmentPair.category}`);
errors.push(permission.reason);
}
}
if (invalidFields.length !== 0) {
@@ -235,7 +230,7 @@ function checkInvalidFields(videoID: VideoID, userID: UserID, segments: Incoming
const formattedErrors = errors.reduce((p, c, i) => p + (i !== 0 ? ". " : " ") + c, "");
return {
pass: false,
errorMessage: `No valid ${formattedFields} field(s) provided.${formattedErrors}`,
errorMessage: `No valid ${formattedFields}.${formattedErrors}`,
errorCode: 400
};
}
@@ -244,7 +239,7 @@ function checkInvalidFields(videoID: VideoID, userID: UserID, segments: Incoming
}
async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, userID: HashedUserID, videoID: VideoID,
segments: IncomingSegment[], service: string, isVIP: boolean, lockedCategoryList: Array<any>): Promise<CheckResult> {
segments: IncomingSegment[], service: Service, isVIP: boolean, lockedCategoryList: Array<any>): Promise<CheckResult> {
for (let i = 0; i < segments.length; i++) {
if (segments[i] === undefined || segments[i].segment === undefined || segments[i].category === undefined) {
@@ -259,7 +254,13 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user
// Reject segment if it's in the locked categories list
const lockIndex = lockedCategoryList.findIndex(c => segments[i].category === c.category && segments[i].actionType === c.actionType);
if (!isVIP && lockIndex !== -1) {
// TODO: Do something about the fradulent submission
QueryCacher.clearSegmentCache({
videoID,
hashedVideoID: await getHashCache(videoID, 1),
service,
userID
});
Logger.warn(`Caught a submission for a locked category. userID: '${userID}', videoID: '${videoID}', category: '${segments[i].category}', times: ${segments[i].segment}`);
return {
pass: false,
@@ -325,10 +326,10 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user
return CHECK_PASS;
}
async function checkByAutoModerator(videoID: any, userID: any, segments: Array<any>, service:string, apiVideoInfo: APIVideoInfo, videoDuration: number): Promise<CheckResult> {
async function checkByAutoModerator(videoID: any, userID: any, segments: Array<any>, service:string, apiVideoDetails: videoDetails, videoDuration: number): Promise<CheckResult> {
// Auto moderator check
if (service == Service.YouTube) {
const autoModerateResult = await autoModerateSubmission(apiVideoInfo, { userID, videoID, segments, service, videoDuration });
const autoModerateResult = await autoModerateSubmission(apiVideoDetails, { userID, videoID, segments, service, videoDuration });
if (autoModerateResult) {
return {
pass: false,
@@ -357,12 +358,13 @@ async function updateDataIfVideoDurationChange(videoID: VideoID, service: Servic
const videoDurationChanged = (videoDuration: number) => videoDuration != 0
&& previousSubmissions.length > 0 && !previousSubmissions.some((e) => Math.abs(videoDuration - e.videoDuration) < 2);
let apiVideoInfo: APIVideoInfo = null;
let apiVideoDetails: videoDetails = null;
if (service == Service.YouTube) {
// Don't use cache if we don't know the video duration, or the client claims that it has changed
apiVideoInfo = await getYouTubeVideoInfo(videoID, !videoDurationParam || previousSubmissions.length === 0 || videoDurationChanged(videoDurationParam));
const ignoreCache = !videoDurationParam || previousSubmissions.length === 0 || videoDurationChanged(videoDurationParam);
apiVideoDetails = await getVideoDetails(videoID, ignoreCache);
}
const apiVideoDuration = apiVideoInfo?.data?.lengthSeconds as VideoDuration;
const apiVideoDuration = apiVideoDetails?.duration as VideoDuration;
if (!videoDurationParam || (apiVideoDuration && Math.abs(videoDurationParam - apiVideoDuration) > 2)) {
// If api duration is far off, take that one instead (it is only precise to seconds, not millis)
videoDuration = apiVideoDuration || 0 as VideoDuration;
@@ -375,12 +377,12 @@ async function updateDataIfVideoDurationChange(videoID: VideoID, service: Servic
await db.prepare("run", `UPDATE "sponsorTimes" SET "hidden" = 1 WHERE "UUID" = ?`, [submission.UUID]);
}
lockedCategoryList = [];
deleteLockCategories(videoID, null, null, service);
deleteLockCategories(videoID, null, null, service).catch(Logger.error);
}
return {
videoDuration,
apiVideoInfo,
apiVideoDetails,
lockedCategoryList
};
}
@@ -478,27 +480,26 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
// eslint-disable-next-line prefer-const
let { videoID, userID: paramUserID, service, videoDuration, videoDurationParam, segments, userAgent } = preprocessInput(req);
const invalidCheckResult = checkInvalidFields(videoID, paramUserID, segments);
//hash the userID
const userID = await getHashCache(paramUserID || "");
const invalidCheckResult = await checkInvalidFields(videoID, paramUserID, userID, segments, videoDurationParam, userAgent);
if (!invalidCheckResult.pass) {
return res.status(invalidCheckResult.errorCode).send(invalidCheckResult.errorMessage);
}
//hash the userID
const userID = await getHashCache(paramUserID);
const userWarningCheckResult = await checkUserActiveWarning(userID);
if (!userWarningCheckResult.pass) {
Logger.warn(`Caught a submission for a warned user. userID: '${userID}', videoID: '${videoID}', category: '${segments.reduce<string>((prev, val) => `${prev} ${val.category}`, "")}', times: ${segments.reduce<string>((prev, val) => `${prev} ${val.segment}`, "")}`);
return res.status(userWarningCheckResult.errorCode).send(userWarningCheckResult.errorMessage);
}
const isVIP = await isUserVIP(userID);
const isTempVIP = await isUserTempVIP(userID, videoID);
const isVIP = (await isUserVIP(userID)) || (await isUserTempVIP(userID, videoID));
const rawIP = getIP(req);
const newData = await updateDataIfVideoDurationChange(videoID, service, videoDuration, videoDurationParam);
videoDuration = newData.videoDuration;
const { lockedCategoryList, apiVideoInfo } = newData;
const { lockedCategoryList, apiVideoDetails } = newData;
// Check if all submissions are correct
const segmentCheckResult = await checkEachSegmentValid(rawIP, paramUserID, userID, videoID, segments, service, isVIP, lockedCategoryList);
@@ -506,8 +507,8 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
return res.status(segmentCheckResult.errorCode).send(segmentCheckResult.errorMessage);
}
if (!isVIP && !isTempVIP) {
const autoModerateCheckResult = await checkByAutoModerator(videoID, userID, segments, service, apiVideoInfo, videoDurationParam);
if (!isVIP) {
const autoModerateCheckResult = await checkByAutoModerator(videoID, userID, segments, service, apiVideoDetails, videoDurationParam);
if (!autoModerateCheckResult.pass) {
return res.status(autoModerateCheckResult.errorCode).send(autoModerateCheckResult.errorMessage);
}
@@ -560,10 +561,10 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
//add to private db as well
await privateDB.prepare("run", `INSERT INTO "sponsorTimes" VALUES(?, ?, ?, ?)`, [videoID, hashedIP, timeSubmitted, service]);
await db.prepare("run", `INSERT INTO "videoInfo" ("videoID", "channelID", "title", "published", "genreUrl")
SELECT ?, ?, ?, ?, ?
await db.prepare("run", `INSERT INTO "videoInfo" ("videoID", "channelID", "title", "published")
SELECT ?, ?, ?, ?
WHERE NOT EXISTS (SELECT 1 FROM "videoInfo" WHERE "videoID" = ?)`, [
videoID, apiVideoInfo?.data?.authorId || "", apiVideoInfo?.data?.title || "", apiVideoInfo?.data?.published || 0, apiVideoInfo?.data?.genreUrl || "", videoID]);
videoID, apiVideoDetails?.authorId || "", apiVideoDetails?.title || "", apiVideoDetails?.published || 0, videoID]);
// Clear redis cache for this video
QueryCacher.clearSegmentCache({
@@ -591,7 +592,7 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
}
for (let i = 0; i < segments.length; i++) {
sendWebhooks(apiVideoInfo, userID, videoID, UUIDs[i], segments[i], service);
sendWebhooks(apiVideoDetails, userID, videoID, UUIDs[i], segments[i], service).catch(Logger.error);
}
return res.json(newSegments);
}

View File

@@ -22,17 +22,15 @@ function checkExpiredWarning(warning: warningEntry): boolean {
}
export async function postWarning(req: Request, res: Response): Promise<Response> {
// exit early if no body passed in
if (!req.body.userID && !req.body.issuerUserID) return res.status(400).json({ "message": "Missing parameters" });
// Collect user input data
const issuerUserID: HashedUserID = await getHashCache(<UserID> req.body.issuerUserID);
const userID: UserID = req.body.userID;
if (!req.body.userID) return res.status(400).json({ "message": "Missing parameters" });
const issuerUserID: HashedUserID = req.body.issuerUserID ? await getHashCache(req.body.issuerUserID as UserID) : null;
const userID: HashedUserID = issuerUserID ? req.body.userID : await getHashCache(req.body.userID as UserID);
const issueTime = new Date().getTime();
const enabled: boolean = req.body.enabled ?? true;
const reason: string = req.body.reason ?? "";
// Ensure user is a VIP
if (!await isUserVIP(issuerUserID)) {
if ((!issuerUserID && enabled) || (issuerUserID && !await isUserVIP(issuerUserID))) {
Logger.warn(`Permission violation: User ${issuerUserID} attempted to warn user ${userID}.`);
return res.status(403).json({ "message": "Not a VIP" });
}
@@ -52,8 +50,8 @@ export async function postWarning(req: Request, res: Response): Promise<Response
// check if warning is still within issue time and warning is not enabled
} else if (checkExpiredWarning(previousWarning) ) {
await db.prepare(
"run", 'UPDATE "warnings" SET "enabled" = 1 WHERE "userID" = ? AND "issueTime" = ?',
[userID, previousWarning.issueTime]
"run", 'UPDATE "warnings" SET "enabled" = 1, "reason" = ? WHERE "userID" = ? AND "issueTime" = ?',
[reason, userID, previousWarning.issueTime]
);
resultStatus = "re-enabled";
} else {

View File

@@ -1,91 +0,0 @@
import { Request, Response } from "express";
import { db } from "../../databases/databases";
import { RatingType } from "../../types/ratings.model";
import { Service, VideoID, VideoIDHash } from "../../types/segments.model";
import { getService } from "../../utils/getService";
import { hashPrefixTester } from "../../utils/hashPrefixTester";
import { Logger } from "../../utils/logger";
import { QueryCacher } from "../../utils/queryCacher";
import { ratingHashKey } from "../../utils/redisKeys";
interface DBRating {
videoID: VideoID,
hashedVideoID: VideoIDHash,
service: Service,
type: RatingType,
count: number
}
export async function getRating(req: Request, res: Response): Promise<Response> {
let hashPrefixes: VideoIDHash[] = [];
try {
hashPrefixes = req.query.hashPrefixes
? JSON.parse(req.query.hashPrefixes as string)
: Array.isArray(req.query.prefix)
? req.query.prefix
: [req.query.prefix ?? req.params.prefix];
if (!Array.isArray(hashPrefixes)) {
return res.status(400).send("hashPrefixes parameter does not match format requirements.");
}
hashPrefixes.map((hashPrefix) => hashPrefix?.toLowerCase());
} catch(error) {
return res.status(400).send("Bad parameter: hashPrefixes (invalid JSON)");
}
if (hashPrefixes.length === 0 || hashPrefixes.length > 75
|| hashPrefixes.some((hashPrefix) => !hashPrefix || !hashPrefixTester(hashPrefix))) {
return res.status(400).send("Hash prefix does not match format requirements."); // Exit early on faulty prefix
}
let types: RatingType[] = [];
try {
types = req.query.types
? JSON.parse(req.query.types as string)
: req.query.type
? Array.isArray(req.query.type)
? req.query.type
: [req.query.type]
: [RatingType.Upvote, RatingType.Downvote];
if (!Array.isArray(types)) {
return res.status(400).send("Types parameter does not match format requirements.");
}
types = types.map((type) => parseInt(type as unknown as string, 10));
} catch(error) {
return res.status(400).send("Bad parameter: types (invalid JSON)");
}
const service: Service = getService(req.query.service, req.body.service);
try {
const ratings = (await getRatings(hashPrefixes, service))
.filter((rating) => types.includes(rating.type))
.map((rating) => ({
videoID: rating.videoID,
hash: rating.hashedVideoID,
service: rating.service,
type: rating.type,
count: rating.count
}));
return res.status((ratings.length) ? 200 : 404)
.send(ratings ?? []);
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
}
}
function getRatings(hashPrefixes: VideoIDHash[], service: Service): Promise<DBRating[]> {
const fetchFromDB = (hashPrefixes: VideoIDHash[]) => db
.prepare(
"all",
`SELECT "videoID", "hashedVideoID", "type", "count" FROM "ratings" WHERE "hashedVideoID" ~* ? AND "service" = ? ORDER BY "hashedVideoID"`,
[`^(?:${hashPrefixes.join("|")})`, service]
) as Promise<DBRating[]>;
return (hashPrefixes.every((hashPrefix) => hashPrefix.length === 4))
? QueryCacher.getAndSplit(fetchFromDB, (prefix) => ratingHashKey(prefix, service), "hashedVideoID", hashPrefixes)
: fetchFromDB(hashPrefixes);
}

View File

@@ -1,53 +0,0 @@
import { Logger } from "../../utils/logger";
import { HashedUserID, UserID } from "../../types/user.model";
import { getHash } from "../../utils/getHash";
import { getHashCache } from "../../utils/getHashCache";
import { Request, Response } from "express";
import { Service, VideoID } from "../../types/segments.model";
import { QueryCacher } from "../../utils/queryCacher";
import { isUserVIP } from "../../utils/isUserVIP";
import { VideoIDHash } from "../../types/segments.model";
import { getService } from "../..//utils/getService";
export async function postClearCache(req: Request, res: Response): Promise<Response> {
const videoID = req.query.videoID as VideoID;
const userID = req.query.userID as UserID;
const service = getService(req.query.service as Service);
const invalidFields = [];
if (typeof videoID !== "string") {
invalidFields.push("videoID");
}
if (typeof userID !== "string") {
invalidFields.push("userID");
}
if (invalidFields.length !== 0) {
// invalid request
const fields = invalidFields.reduce((p, c, i) => p + (i !== 0 ? ", " : "") + c, "");
return res.status(400).send(`No valid ${fields} field(s) provided`);
}
// hash the userID as early as possible
const hashedUserID: HashedUserID = await getHashCache(userID);
// hash videoID
const hashedVideoID: VideoIDHash = getHash(videoID, 1);
// Ensure user is a VIP
if (!(await isUserVIP(hashedUserID))){
Logger.warn(`Permission violation: User ${hashedUserID} attempted to clear cache for video ${videoID}.`);
return res.status(403).json({ "message": "Not a VIP" });
}
try {
QueryCacher.clearRatingCache({
hashedVideoID,
service
});
return res.status(200).json({
message: `Cache cleared on video ${videoID}`
});
} catch(err) {
return res.sendStatus(500);
}
}

View File

@@ -1,65 +0,0 @@
import { db, privateDB } from "../../databases/databases";
import { getHash } from "../../utils/getHash";
import { getHashCache } from "../../utils/getHashCache";
import { Logger } from "../../utils/logger";
import { Request, Response } from "express";
import { HashedUserID, UserID } from "../../types/user.model";
import { HashedIP, IPAddress, VideoID } from "../../types/segments.model";
import { getIP } from "../../utils/getIP";
import { getService } from "../../utils/getService";
import { RatingType, RatingTypes } from "../../types/ratings.model";
import { config } from "../../config";
import { QueryCacher } from "../../utils/queryCacher";
export async function postRating(req: Request, res: Response): Promise<Response> {
const privateUserID = req.body.userID as UserID;
const videoID = req.body.videoID as VideoID;
const service = getService(req.query.service, req.body.service);
const type = req.body.type as RatingType;
const enabled = req.body.enabled ?? true;
if (privateUserID == undefined || videoID == undefined || service == undefined || type == undefined
|| (typeof privateUserID !== "string") || (typeof videoID !== "string") || (typeof service !== "string")
|| (typeof type !== "number") || (enabled && (typeof enabled !== "boolean")) || !RatingTypes.includes(type)) {
//invalid request
return res.sendStatus(400);
}
const hashedIP: HashedIP = getHash(getIP(req) + config.globalSalt as IPAddress, 1);
const hashedUserID: HashedUserID = await getHashCache(privateUserID);
const hashedVideoID = getHash(videoID, 1);
try {
// Check if this user has voted before
const existingVote = await privateDB.prepare("get", `SELECT count(*) as "count" FROM "ratings" WHERE "videoID" = ? AND "service" = ? AND "type" = ? AND "userID" = ?`, [videoID, service, type, hashedUserID]);
if (existingVote.count > 0 && !enabled) {
// Undo the vote
await privateDB.prepare("run", `DELETE FROM "ratings" WHERE "videoID" = ? AND "service" = ? AND "type" = ? AND "userID" = ?`, [videoID, service, type, hashedUserID]);
await db.prepare("run", `UPDATE "ratings" SET "count" = "count" - 1 WHERE "videoID" = ? AND "service" = ? AND type = ?`, [videoID, service, type]);
} else if (existingVote.count === 0 && enabled) {
// Make sure there hasn't been another vote from this IP
const existingIPVote = (await privateDB.prepare("get", `SELECT count(*) as "count" FROM "ratings" WHERE "videoID" = ? AND "service" = ? AND "type" = ? AND "hashedIP" = ?`, [videoID, service, type, hashedIP]))
.count > 0;
if (existingIPVote) { // if exisiting vote, exit early instead
return res.sendStatus(200);
}
// Create entry in privateDB
await privateDB.prepare("run", `INSERT INTO "ratings" ("videoID", "service", "type", "userID", "timeSubmitted", "hashedIP") VALUES (?, ?, ?, ?, ?, ?)`, [videoID, service, type, hashedUserID, Date.now(), hashedIP]);
// Check if general rating already exists, if so increase it
const rating = await db.prepare("get", `SELECT count(*) as "count" FROM "ratings" WHERE "videoID" = ? AND "service" = ? AND type = ?`, [videoID, service, type]);
if (rating.count > 0) {
await db.prepare("run", `UPDATE "ratings" SET "count" = "count" + 1 WHERE "videoID" = ? AND "service" = ? AND type = ?`, [videoID, service, type]);
} else {
await db.prepare("run", `INSERT INTO "ratings" ("videoID", "service", "type", "count", "hashedVideoID") VALUES (?, ?, ?, 1, ?)`, [videoID, service, type, hashedVideoID]);
}
}
// clear rating cache
QueryCacher.clearRatingCache({ hashedVideoID, service });
return res.sendStatus(200);
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
}
}

87
src/routes/verifyToken.ts Normal file
View File

@@ -0,0 +1,87 @@
import axios from "axios";
import { Request, Response } from "express";
import { config } from "../config";
import { privateDB } from "../databases/databases";
import { Logger } from "../utils/logger";
import { getPatreonIdentity, PatronStatus, refreshToken, TokenType } from "../utils/tokenUtils";
import FormData from "form-data";
interface VerifyTokenRequest extends Request {
query: {
licenseKey: string;
}
}
export async function verifyTokenRequest(req: VerifyTokenRequest, res: Response): Promise<Response> {
const { query: { licenseKey } } = req;
if (!licenseKey) {
return res.status(400).send("Invalid request");
}
const licenseRegex = new RegExp(/[a-zA-Z0-9]{40}|[A-Z0-9-]{35}/);
if (!licenseRegex.test(licenseKey)) {
return res.status(200).send({
allowed: false
});
}
const tokens = (await privateDB.prepare("get", `SELECT "accessToken", "refreshToken", "expiresIn" from "oauthLicenseKeys" WHERE "licenseKey" = ?`
, [licenseKey])) as {accessToken: string, refreshToken: string, expiresIn: number};
if (tokens) {
const identity = await getPatreonIdentity(tokens.accessToken);
if (tokens.expiresIn < 15 * 24 * 60 * 60) {
refreshToken(TokenType.patreon, licenseKey, tokens.refreshToken).catch(Logger.error);
}
if (identity) {
const membership = identity.included?.[0]?.attributes;
const allowed = !!membership && ((membership.patron_status === PatronStatus.active && membership.currently_entitled_amount_cents > 0)
|| (membership.patron_status === PatronStatus.former && membership.campaign_lifetime_support_cents > 300));
return res.status(200).send({
allowed
});
} else {
return res.status(500);
}
} else {
// Check Local
const result = await privateDB.prepare("get", `SELECT "licenseKey" from "licenseKeys" WHERE "licenseKey" = ?`, [licenseKey]);
if (result) {
return res.status(200).send({
allowed: true
});
} else {
// Gumroad
return res.status(200).send({
allowed: await checkAllGumroadProducts(licenseKey)
});
}
}
}
async function checkAllGumroadProducts(licenseKey: string): Promise<boolean> {
for (const link of config.gumroad.productPermalinks) {
try {
const formData = new FormData();
formData.append("product_permalink", link);
formData.append("license_key", licenseKey);
const result = await axios.request({
url: "https://api.gumroad.com/v2/licenses/verify",
data: formData,
method: "POST",
headers: formData.getHeaders()
});
const allowed = result.status === 200 && result.data?.success;
if (allowed) return allowed;
} catch (e) {
Logger.error(`Gumroad fetch for ${link} failed: ${e}`);
}
}
return false;
}

View File

@@ -3,7 +3,6 @@ import { Logger } from "../utils/logger";
import { isUserVIP } from "../utils/isUserVIP";
import { isUserTempVIP } from "../utils/isUserTempVIP";
import { getMaxResThumbnail, YouTubeAPI } from "../utils/youtubeApi";
import { APIVideoInfo } from "../types/youtubeApi.model";
import { db, privateDB } from "../databases/databases";
import { dispatchEvent, getVoteAuthor, getVoteAuthorRaw } from "../utils/webhookUtils";
import { getFormattedTime } from "../utils/getFormattedTime";
@@ -11,9 +10,10 @@ import { getIP } from "../utils/getIP";
import { getHashCache } from "../utils/getHashCache";
import { config } from "../config";
import { UserID } from "../types/user.model";
import { DBSegment, Category, HashedIP, IPAddress, SegmentUUID, Service, VideoID, VideoIDHash, VideoDuration, ActionType } from "../types/segments.model";
import { DBSegment, Category, HashedIP, IPAddress, SegmentUUID, Service, VideoID, VideoIDHash, VideoDuration, ActionType, VoteType } from "../types/segments.model";
import { QueryCacher } from "../utils/queryCacher";
import axios from "axios";
import { getVideoDetails, videoDetails } from "../utils/getVideoDetails";
const voteTypes = {
normal: 0,
@@ -36,6 +36,7 @@ interface FinalResponse {
interface VoteData {
UUID: string;
nonAnonUserID: string;
originalType: VoteType;
voteTypeEnum: number;
isTempVIP: boolean;
isVIP: boolean;
@@ -51,20 +52,16 @@ interface VoteData {
finalResponse: FinalResponse;
}
function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise<APIVideoInfo> {
return config.newLeafURLs ? YouTubeAPI.listVideos(videoID, ignoreCache) : null;
}
const videoDurationChanged = (segmentDuration: number, APIDuration: number) => (APIDuration > 0 && Math.abs(segmentDuration - APIDuration) > 2);
async function updateSegmentVideoDuration(UUID: SegmentUUID) {
const { videoDuration, videoID, service } = await db.prepare("get", `select "videoDuration", "videoID", "service" from "sponsorTimes" where "UUID" = ?`, [UUID]);
let apiVideoInfo: APIVideoInfo = null;
let apiVideoDetails: videoDetails = null;
if (service == Service.YouTube) {
// don't use cache since we have no information about the video length
apiVideoInfo = await getYouTubeVideoInfo(videoID);
apiVideoDetails = await getVideoDetails(videoID);
}
const apiVideoDuration = apiVideoInfo?.data?.lengthSeconds as VideoDuration;
const apiVideoDuration = apiVideoDetails?.duration as VideoDuration;
if (videoDurationChanged(videoDuration, apiVideoDuration)) {
Logger.info(`Video duration changed for ${videoID} from ${videoDuration} to ${apiVideoDuration}`);
await db.prepare("run", `UPDATE "sponsorTimes" SET "videoDuration" = ? WHERE "UUID" = ?`, [apiVideoDuration, UUID]);
@@ -73,12 +70,12 @@ async function updateSegmentVideoDuration(UUID: SegmentUUID) {
async function checkVideoDuration(UUID: SegmentUUID) {
const { videoID, service } = await db.prepare("get", `select "videoID", "service" from "sponsorTimes" where "UUID" = ?`, [UUID]);
let apiVideoInfo: APIVideoInfo = null;
let apiVideoDetails: videoDetails = null;
if (service == Service.YouTube) {
// don't use cache since we have no information about the video length
apiVideoInfo = await getYouTubeVideoInfo(videoID, true);
apiVideoDetails = await getVideoDetails(videoID, true);
}
const apiVideoDuration = apiVideoInfo?.data?.lengthSeconds as VideoDuration;
const apiVideoDuration = apiVideoDetails?.duration as VideoDuration;
// if no videoDuration return early
if (isNaN(apiVideoDuration)) return;
// fetch latest submission
@@ -112,7 +109,9 @@ async function sendWebhooks(voteData: VoteData) {
if (submissionInfoRow !== undefined && userSubmissionCountRow != undefined) {
let webhookURL: string = null;
if (voteData.voteTypeEnum === voteTypes.normal) {
if (voteData.originalType === VoteType.Malicious) {
webhookURL = config.discordMaliciousReportWebhookURL;
} else if (voteData.voteTypeEnum === voteTypes.normal) {
switch (voteData.finalResponse.webhookType) {
case VoteWebhookType.Normal:
webhookURL = config.discordReportChannelWebhookURL;
@@ -126,7 +125,8 @@ async function sendWebhooks(voteData: VoteData) {
}
if (config.newLeafURLs !== null) {
const { err, data } = await YouTubeAPI.listVideos(submissionInfoRow.videoID);
const videoID = submissionInfoRow.videoID;
const { err, data } = await YouTubeAPI.listVideos(videoID);
if (err) return;
const isUpvote = voteData.incrementAmount > 0;
@@ -138,8 +138,8 @@ async function sendWebhooks(voteData: VoteData) {
"video": {
"id": submissionInfoRow.videoID,
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${submissionInfoRow.videoID}`,
"thumbnail": getMaxResThumbnail(data) || null,
"url": `https://www.youtube.com/watch?v=${videoID}`,
"thumbnail": getMaxResThumbnail(videoID),
},
"submission": {
"UUID": voteData.UUID,
@@ -184,7 +184,7 @@ async function sendWebhooks(voteData: VoteData) {
`${getVoteAuthor(userSubmissionCountRow.submissionCount, voteData.isTempVIP, voteData.isVIP, voteData.isOwnSubmission)}${voteData.row.locked ? " (Locked)" : ""}`,
},
"thumbnail": {
"url": getMaxResThumbnail(data) || "",
"url": getMaxResThumbnail(videoID),
},
}],
})
@@ -208,7 +208,7 @@ async function sendWebhooks(voteData: VoteData) {
async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, isTempVIP: boolean, isOwnSubmission: boolean, category: Category
, hashedIP: HashedIP, finalResponse: FinalResponse): Promise<{ status: number, message?: string }> {
// Check if they've already made a vote
const usersLastVoteInfo = await privateDB.prepare("get", `select count(*) as votes, category from "categoryVotes" where "UUID" = ? and "userID" = ? group by category`, [UUID, userID]);
const usersLastVoteInfo = await privateDB.prepare("get", `select count(*) as votes, category from "categoryVotes" where "UUID" = ? and "userID" = ? group by category`, [UUID, userID], { useReplica: true });
if (usersLastVoteInfo?.category === category) {
// Double vote, ignore
@@ -216,20 +216,17 @@ async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, i
}
const segmentInfo = (await db.prepare("get", `SELECT "category", "actionType", "videoID", "hashedVideoID", "service", "userID", "locked" FROM "sponsorTimes" WHERE "UUID" = ?`,
[UUID])) as {category: Category, actionType: ActionType, videoID: VideoID, hashedVideoID: VideoIDHash, service: Service, userID: UserID, locked: number};
[UUID], { useReplica: true })) as {category: Category, actionType: ActionType, videoID: VideoID, hashedVideoID: VideoIDHash, service: Service, userID: UserID, locked: number};
if (segmentInfo.actionType === ActionType.Full) {
return { status: 400, message: "Not allowed to change category of a full video segment" };
}
if (segmentInfo.actionType === ActionType.Poi || category === "poi_highlight") {
return { status: 400, message: "Not allowed to change category for single point segments" };
if (!config.categorySupport[category]?.includes(segmentInfo.actionType) || segmentInfo.actionType === ActionType.Full) {
return { status: 400, message: `Not allowed to change to ${category} when for segment of type ${segmentInfo.actionType}`};
}
if (!config.categoryList.includes(category)) {
return { status: 400, message: "Category doesn't exist." };
}
// Ignore vote if the next category is locked
const nextCategoryLocked = await db.prepare("get", `SELECT "videoID", "category" FROM "lockCategories" WHERE "videoID" = ? AND "service" = ? AND "category" = ?`, [segmentInfo.videoID, segmentInfo.service, category]);
const nextCategoryLocked = await db.prepare("get", `SELECT "videoID", "category" FROM "lockCategories" WHERE "videoID" = ? AND "service" = ? AND "category" = ?`, [segmentInfo.videoID, segmentInfo.service, category], { useReplica: true });
if (nextCategoryLocked && !isVIP) {
return { status: 200 };
}
@@ -239,12 +236,13 @@ async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, i
return { status: 200 };
}
const nextCategoryInfo = await db.prepare("get", `select votes from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, category]);
const nextCategoryInfo = await db.prepare("get", `select votes from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, category], { useReplica: true });
const timeSubmitted = Date.now();
const voteAmount = (isVIP || isTempVIP) ? 500 : 1;
const ableToVote = isVIP || isTempVIP || finalResponse.finalStatus === 200 || true;
const ableToVote = finalResponse.finalStatus === 200
&& (await db.prepare("get", `SELECT "userID" FROM "shadowBannedUsers" WHERE "userID" = ?`, [userID], { useReplica: true })) === undefined;
if (ableToVote) {
// Add the vote
@@ -267,15 +265,15 @@ async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, i
}
// See if the submissions category is ready to change
const currentCategoryInfo = await db.prepare("get", `select votes from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, segmentInfo.category]);
const currentCategoryInfo = await db.prepare("get", `select votes from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, segmentInfo.category], { useReplica: true });
const submissionInfo = await db.prepare("get", `SELECT "userID", "timeSubmitted", "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]);
const submissionInfo = await db.prepare("get", `SELECT "userID", "timeSubmitted", "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID], { useReplica: true });
const isSubmissionVIP = submissionInfo && await isUserVIP(submissionInfo.userID);
const startingVotes = isSubmissionVIP ? 10000 : 1;
// Change this value from 1 in the future to make it harder to change categories
// Done this way without ORs incase the value is zero
const currentCategoryCount = (currentCategoryInfo === undefined || currentCategoryInfo === null) ? startingVotes : currentCategoryInfo.votes;
const currentCategoryCount = currentCategoryInfo?.votes ?? startingVotes;
// Add submission as vote
if (!currentCategoryInfo && submissionInfo) {
@@ -287,7 +285,7 @@ async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, i
//TODO: In the future, raise this number from zero to make it harder to change categories
// VIPs change it every time
if (nextCategoryCount - currentCategoryCount >= Math.max(Math.ceil(submissionInfo?.votes / 2), 2) || isVIP || isTempVIP || isOwnSubmission) {
if (isVIP || isTempVIP || isOwnSubmission || nextCategoryCount - currentCategoryCount >= Math.max(Math.ceil(submissionInfo?.votes / 2), 2)) {
// Replace the category
await db.prepare("run", `update "sponsorTimes" set "category" = ? where "UUID" = ?`, [category, UUID]);
}
@@ -329,6 +327,8 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
return { status: 200 };
}
const originalType = type;
//hash the userID
const nonAnonUserID = await getHashCache(paramUserID);
const userID = await getHashCache(paramUserID + UUID);
@@ -362,6 +362,19 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
return { status: 400 };
}
const MILLISECONDS_IN_HOUR = 3600000;
const now = Date.now();
const warnings = (await db.prepare("all", `SELECT "reason" FROM warnings WHERE "userID" = ? AND "issueTime" > ? AND enabled = 1`,
[nonAnonUserID, Math.floor(now - (config.hoursAfterWarningExpires * MILLISECONDS_IN_HOUR))],
));
if (warnings.length >= config.maxNumberOfActiveWarnings) {
const warningReason = warnings[0]?.reason;
return { status: 403, message: "Vote rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes that are not malicious, and we just want to clarify the rules. " +
"Could you please send a message in Discord or Matrix so we can further help you?" +
`${(warningReason.length > 0 ? ` Warning reason: '${warningReason}'` : "")}` };
}
// no type but has category, categoryVote
if (!type && category) {
return categoryVote(UUID, nonAnonUserID, isVIP, isTempVIP, isOwnSubmission, category, hashedIP, finalResponse);
@@ -372,7 +385,7 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
const isSegmentLocked = segmentInfo.locked;
const isVideoLocked = async () => !!(await db.prepare("get", `SELECT "category" FROM "lockCategories" WHERE
"videoID" = ? AND "service" = ? AND "category" = ? AND "actionType" = ?`,
[segmentInfo.videoID, segmentInfo.service, segmentInfo.category, segmentInfo.actionType]));
[segmentInfo.videoID, segmentInfo.service, segmentInfo.category, segmentInfo.actionType], { useReplica: true }));
if (isSegmentLocked || await isVideoLocked()) {
finalResponse.blockVote = true;
finalResponse.webhookType = VoteWebhookType.Rejected;
@@ -391,43 +404,30 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
}
}
const MILLISECONDS_IN_HOUR = 3600000;
const now = Date.now();
const warnings = (await db.prepare("all", `SELECT "reason" FROM warnings WHERE "userID" = ? AND "issueTime" > ? AND enabled = 1`,
[nonAnonUserID, Math.floor(now - (config.hoursAfterWarningExpires * MILLISECONDS_IN_HOUR))],
));
if (warnings.length >= config.maxNumberOfActiveWarnings) {
const warningReason = warnings[0]?.reason;
return { status: 403, message: "Vote rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes that are not malicious, and we just want to clarify the rules. " +
"Could you please send a message in Discord or Matrix so we can further help you?" +
`${(warningReason.length > 0 ? ` Warning reason: '${warningReason}'` : "")}` };
}
const voteTypeEnum = (type == 0 || type == 1 || type == 20) ? voteTypes.normal : voteTypes.incorrect;
// no restrictions on checkDuration
// check duration of all submissions on this video
if (type <= 0) {
checkVideoDuration(UUID);
checkVideoDuration(UUID).catch(Logger.error);
}
try {
// check if vote has already happened
const votesRow = await privateDB.prepare("get", `SELECT "type" FROM "votes" WHERE "userID" = ? AND "UUID" = ?`, [userID, UUID]);
const votesRow = await privateDB.prepare("get", `SELECT "type" FROM "votes" WHERE "userID" = ? AND "UUID" = ?`, [userID, UUID], { useReplica: true });
// -1 for downvote, 1 for upvote. Maybe more depending on reputation in the future
// oldIncrementAmount will be zero if row is null
let incrementAmount = 0;
let oldIncrementAmount = 0;
if (type == 1) {
if (type == VoteType.Upvote) {
//upvote
incrementAmount = 1;
} else if (type == 0) {
} else if (type === VoteType.Downvote || type === VoteType.Malicious) {
//downvote
incrementAmount = -1;
} else if (type == 20) {
} else if (type == VoteType.Undo) {
//undo/cancel vote
incrementAmount = 0;
} else {
@@ -435,17 +435,13 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
return { status: 400 };
}
if (votesRow) {
if (votesRow.type === 1) {
//upvote
if (votesRow.type === VoteType.Upvote) {
oldIncrementAmount = 1;
} else if (votesRow.type === 0) {
//downvote
} else if (votesRow.type === VoteType.Downvote) {
oldIncrementAmount = -1;
} else if (votesRow.type === 2) {
//extra downvote
} else if (votesRow.type === VoteType.ExtraDownvote) {
oldIncrementAmount = -4;
} else if (votesRow.type === 20) {
//undo/cancel vote
} else if (votesRow.type === VoteType.Undo) {
oldIncrementAmount = 0;
} else if (votesRow.type < 0) {
//vip downvote
@@ -466,13 +462,19 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
type = incrementAmount;
}
if (type === VoteType.Malicious) {
incrementAmount = -Math.min(segmentInfo.votes + 2 - oldIncrementAmount, 5);
type = incrementAmount;
}
// Only change the database if they have made a submission before and haven't voted recently
const userAbleToVote = (!(isOwnSubmission && incrementAmount > 0 && oldIncrementAmount >= 0)
&& !(originalType === VoteType.Malicious && segmentInfo.actionType !== ActionType.Chapter)
&& !finalResponse.blockVote
&& finalResponse.finalStatus === 200
&& (await db.prepare("get", `SELECT "userID" FROM "sponsorTimes" WHERE "userID" = ?`, [nonAnonUserID])) !== undefined
&& (await db.prepare("get", `SELECT "userID" FROM "shadowBannedUsers" WHERE "userID" = ?`, [nonAnonUserID])) === undefined
&& (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID])) === undefined);
&& (await db.prepare("get", `SELECT "userID" FROM "sponsorTimes" WHERE "userID" = ?`, [nonAnonUserID], { useReplica: true })) !== undefined
&& (await db.prepare("get", `SELECT "userID" FROM "shadowBannedUsers" WHERE "userID" = ?`, [nonAnonUserID], { useReplica: true })) === undefined
&& (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID], { useReplica: true })) === undefined);
const ableToVote = isVIP || isTempVIP || userAbleToVote;
@@ -480,9 +482,9 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
if (ableToVote) {
//update the votes table
if (votesRow) {
await privateDB.prepare("run", `UPDATE "votes" SET "type" = ? WHERE "userID" = ? AND "UUID" = ?`, [type, userID, UUID]);
await privateDB.prepare("run", `UPDATE "votes" SET "type" = ?, "originalType" = ? WHERE "userID" = ? AND "UUID" = ?`, [type, originalType, userID, UUID]);
} else {
await privateDB.prepare("run", `INSERT INTO "votes" VALUES(?, ?, ?, ?, ?)`, [UUID, userID, hashedIP, type, nonAnonUserID]);
await privateDB.prepare("run", `INSERT INTO "votes" ("UUID", "userID", "hashedIP", "type", "normalUserID", "originalType") VALUES(?, ?, ?, ?, ?, ?)`, [UUID, userID, hashedIP, type, nonAnonUserID, originalType]);
}
// update the vote count on this sponsorTime
@@ -510,6 +512,7 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
sendWebhooks({
UUID,
nonAnonUserID,
originalType,
voteTypeEnum,
isTempVIP,
isVIP,
@@ -519,7 +522,7 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
incrementAmount,
oldIncrementAmount,
finalResponse
});
}).catch(Logger.error);
}
return { status: finalResponse.finalStatus, message: finalResponse.finalMessage ?? undefined };
} catch (err) {

View File

@@ -1,13 +1,22 @@
import { PoolConfig } from "pg";
import * as redis from "redis";
import { CacheOptions } from "@ajayyy/lru-diskcache";
interface RedisConfig extends redis.RedisClientOptions {
enabled: boolean;
expiryTime: number;
getTimeout: number;
}
interface CustomPostgresConfig extends PoolConfig {
export interface CustomPostgresConfig extends PoolConfig {
enabled: boolean;
maxTries: number;
maxConcurrentRequests: number;
}
export interface CustomPostgresReadOnlyConfig extends CustomPostgresConfig {
weight: number;
readTimeout: number;
fallbackOnFail: boolean;
}
export interface SBSConfig {
@@ -21,8 +30,11 @@ export interface SBSConfig {
discordFailedReportChannelWebhookURL?: string;
discordFirstTimeSubmissionsWebhookURL?: string;
discordCompletelyIncorrectReportWebhookURL?: string;
discordMaliciousReportWebhookURL?: string;
neuralBlockURL?: string;
discordNeuralBlockRejectWebhookURL?: string;
minReputationToSubmitChapter: number;
minReputationToSubmitFiller: number;
userCounterURL?: string;
proxySubmission?: string;
behindProxy: string | boolean;
@@ -43,7 +55,6 @@ export interface SBSConfig {
rateLimit: {
vote: RateLimitConfig;
view: RateLimitConfig;
rate: RateLimitConfig;
};
mysql?: any;
privateMysql?: any;
@@ -52,9 +63,19 @@ export interface SBSConfig {
redis?: RedisConfig;
maxRewardTimePerSegmentInSeconds?: number;
postgres?: CustomPostgresConfig;
postgresReadOnly?: CustomPostgresReadOnlyConfig;
dumpDatabase?: DumpDatabase;
diskCache: CacheOptions;
diskCacheURL: string;
crons: CronJobOptions;
patreon: {
clientId: string,
clientSecret: string,
minPrice: number,
redirectUri: string
}
gumroad: {
productPermalinks: string[],
}
}
export interface WebhookConfig {

View File

@@ -0,0 +1,24 @@
export interface innerTubeVideoDetails {
"videoId": string,
"title": string,
"lengthSeconds": string, // yes, don't ask.
"channelId": string,
"isOwnerViewing": boolean,
"shortDescription": string,
"isCrawlable": boolean,
"thumbnail": {
"thumbnails": [{
"url": string,
"width": number,
"height": number
}
]
},
"allowRatings": boolean,
"viewCount": string, // yes, don't ask
"author": string,
"isPrivate": boolean,
"isUnpluggedCorpus": boolean,
"isLiveContent": boolean,
"publishDate": string
}

View File

@@ -101,6 +101,7 @@ export interface VideoData {
export interface SegmentCache {
shadowHiddenSegmentIPs: SBRecord<VideoID, SBRecord<string, {hashedIP: HashedIP}[]>>,
userHashedIP?: HashedIP
userHashedIPPromise?: Promise<HashedIP>;
}
export interface DBLock {
@@ -120,3 +121,12 @@ export enum SortableFields {
votes = "votes",
views = "views",
}
export enum VoteType {
Downvote = 0,
Upvote = 1,
ExtraDownvote = 2,
Undo = 20,
Malicious = 30
}

View File

@@ -2,3 +2,8 @@ import { HashedValue } from "./hash.model";
export type UserID = string & { __userIDBrand: unknown };
export type HashedUserID = UserID & HashedValue;
export enum Feature {
ChapterSubmitter = 0,
FillerSubmitter = 1
}

View File

@@ -1,34 +1,45 @@
import LRU from "@ajayyy/lru-diskcache";
import axios, { AxiosError } from "axios";
import { config } from "../config";
import { Logger } from "./logger";
let DiskCache: LRU<string, string>;
class DiskCache {
async set(key: string, value: unknown): Promise<boolean> {
if (!config.diskCacheURL) return false;
if (config.diskCache) {
DiskCache = new LRU("./databases/cache", config.diskCache);
DiskCache.init();
} else {
DiskCache = {
/* eslint-disable @typescript-eslint/no-unused-vars */
// constructor(rootPath, options): {};
try {
const result = await axios.post(`${config.diskCacheURL}/api/v1/item`, {
key,
value
});
init(): void { return; },
return result.status === 200;
} catch (err) {
const response = (err as AxiosError).response;
if (!response || response.status !== 404) {
Logger.error(`DiskCache: Error setting key ${key}: ${err}`);
}
reset(): void { return; },
return false;
}
}
has(key: string): boolean { return false; },
async get(key: string): Promise<unknown> {
if (!config.diskCacheURL) return null;
get(key: string, opts?: {encoding?: string}): string { return null; },
try {
const result = await axios.get(`${config.diskCacheURL}/api/v1/item?key=${key}`, { timeout: 500 });
// Returns size
set(key: string, dataOrSteam: string): Promise<number> { return new Promise(() => 0); },
return result.status === 200 ? result.data : null;
} catch (err) {
const response = (err as AxiosError).response;
if (!response || response.status !== 404) {
Logger.error(`DiskCache: Error getting key ${key}: ${err}`);
}
del(key: string): void { return; },
size(): number { return 0; },
prune(): void {return; },
/* eslint-enable @typescript-eslint/no-unused-vars */
};
return null;
}
}
}
export default DiskCache;
const diskCache = new DiskCache();
export default diskCache;

11
src/utils/features.ts Normal file
View File

@@ -0,0 +1,11 @@
import { db } from "../databases/databases";
import { Feature, HashedUserID } from "../types/user.model";
import { QueryCacher } from "./queryCacher";
import { userFeatureKey } from "./redisKeys";
export async function hasFeature(userID: HashedUserID, feature: Feature): Promise<boolean> {
return await QueryCacher.get(async () => {
const result = await db.prepare("get", 'SELECT "feature" from "userFeatures" WHERE "userID" = ? AND "feature" = ?', [userID, feature], { useReplica: true });
return !!result;
}, userFeatureKey(userID, feature));
}

View File

@@ -26,11 +26,13 @@ async function getFromRedis<T extends string>(key: HashedValue): Promise<T & Has
Logger.debug(`Got data from redis: ${reply}`);
return reply as T & HashedValue;
}
} catch (e) {} // eslint-disable-line no-empty
} catch (err) {
Logger.error(err as string);
}
// Otherwise, calculate it
const data = getHash(key, cachedHashTimes);
redis.set(key, data);
redis.set(redisKey, data).catch((err) => Logger.error(err));
return data as T & HashedValue;
}

View File

@@ -0,0 +1,59 @@
import { config } from "../config";
import { innerTubeVideoDetails } from "../types/innerTubeApi.model";
import { APIVideoData } from "../types/youtubeApi.model";
import { YouTubeAPI } from "../utils/youtubeApi";
import { getPlayerData } from "../utils/innerTubeAPI";
export interface videoDetails {
videoId: string,
duration: number,
authorId: string,
authorName: string,
title: string,
published: number,
thumbnails: {
url: string,
width: number,
height: number,
}[]
}
const convertFromInnerTube = (input: innerTubeVideoDetails): videoDetails => ({
videoId: input.videoId,
duration: Number(input.lengthSeconds),
authorId: input.channelId,
authorName: input.author,
title: input.title,
published: new Date(input.publishDate).getTime()/1000,
thumbnails: input.thumbnail.thumbnails
});
const convertFromNewLeaf = (input: APIVideoData): videoDetails => ({
videoId: input.videoId,
duration: input.lengthSeconds,
authorId: input.authorId,
authorName: input.author,
title: input.title,
published: input.published,
thumbnails: input.videoThumbnails
});
async function newLeafWrapper(videoId: string, ignoreCache: boolean) {
const result = await YouTubeAPI.listVideos(videoId, ignoreCache);
return result?.data ?? Promise.reject();
}
export function getVideoDetails(videoId: string, ignoreCache = false): Promise<videoDetails> {
if (!config.newLeafURLs) {
return getPlayerData(videoId, ignoreCache)
.then(data => convertFromInnerTube(data));
}
return Promise.any([
newLeafWrapper(videoId, ignoreCache)
.then(videoData => convertFromNewLeaf(videoData)),
getPlayerData(videoId, ignoreCache)
.then(data => convertFromInnerTube(data))
]).catch(() => {
return null;
});
}

58
src/utils/innerTubeAPI.ts Normal file
View File

@@ -0,0 +1,58 @@
import axios from "axios";
import { Logger } from "./logger";
import { innerTubeVideoDetails } from "../types/innerTubeApi.model";
import DiskCache from "./diskCache";
async function getFromITube (videoID: string): Promise<innerTubeVideoDetails> {
// start subrequest
const url = "https://www.youtube.com/youtubei/v1/player";
const data = {
context: {
client: {
clientName: "WEB",
clientVersion: "2.20211129.09.00"
}
},
videoId: videoID
};
const result = await axios.post(url, data, {
timeout: 3500
});
if (result.status === 200) {
return result.data.videoDetails;
} else {
return Promise.reject(result.status);
}
}
export async function getPlayerData (videoID: string, ignoreCache = false): Promise<innerTubeVideoDetails> {
if (!videoID || videoID.length !== 11 || videoID.includes(".")) {
return Promise.reject("Invalid video ID");
}
const cacheKey = `yt.itube.video.${videoID}`;
if (!ignoreCache) { // try fetching from cache
try {
const data = await DiskCache.get(cacheKey);
if (data) {
Logger.debug(`InnerTube API: cache used for video information: ${videoID}`);
return data as innerTubeVideoDetails;
}
} catch (err) {
return Promise.reject(err);
}
}
try {
const data = await getFromITube(videoID)
.catch(err => {
Logger.warn(`InnerTube API Error for ${videoID}: ${err}`);
return Promise.reject(err);
});
DiskCache.set(cacheKey, data)
.then(() => Logger.debug(`InnerTube API: video information cache set for: ${videoID}`))
.catch((err: any) => Logger.warn(err));
return data;
} catch (err) {
return Promise.reject(err);
}
}

View File

@@ -1,19 +1,13 @@
import redis from "../utils/redis";
import { tempVIPKey } from "../utils/redisKeys";
import { HashedUserID } from "../types/user.model";
import { YouTubeAPI } from "../utils/youtubeApi";
import { APIVideoInfo } from "../types/youtubeApi.model";
import { VideoID } from "../types/segments.model";
import { config } from "../config";
import { Logger } from "./logger";
function getYouTubeVideoInfo(videoID: VideoID, ignoreCache = false): Promise<APIVideoInfo> {
return config.newLeafURLs ? YouTubeAPI.listVideos(videoID, ignoreCache) : null;
}
import { getVideoDetails } from "./getVideoDetails";
export const isUserTempVIP = async (hashedUserID: HashedUserID, videoID: VideoID): Promise<boolean> => {
const apiVideoInfo = await getYouTubeVideoInfo(videoID);
const channelID = apiVideoInfo?.data?.authorId;
const apiVideoDetails = await getVideoDetails(videoID);
const channelID = apiVideoDetails?.authorId;
try {
const reply = await redis.get(tempVIPKey(hashedUserID));
return reply && reply == channelID;

View File

@@ -2,5 +2,6 @@ import { db } from "../databases/databases";
import { HashedUserID } from "../types/user.model";
export async function isUserVIP(userID: HashedUserID): Promise<boolean> {
return (await db.prepare("get", `SELECT count(*) as "userCount" FROM "vipUsers" WHERE "userID" = ? LIMIT 1`, [userID])).userCount > 0;
return (await db.prepare("get", `SELECT count(*) as "userCount" FROM "vipUsers" WHERE "userID" = ? LIMIT 1`,
[userID]))?.userCount > 0;
}

37
src/utils/permissions.ts Normal file
View File

@@ -0,0 +1,37 @@
import { config } from "../config";
import { db } from "../databases/databases";
import { Category } from "../types/segments.model";
import { Feature, HashedUserID } from "../types/user.model";
import { hasFeature } from "./features";
import { isUserVIP } from "./isUserVIP";
import { oneOf } from "./promise";
import { getReputation } from "./reputation";
interface CanSubmitResult {
canSubmit: boolean;
reason?: string;
}
async function lowDownvotes(userID: HashedUserID): Promise<boolean> {
const result = await db.prepare("get", `SELECT count(*) as "submissionCount", SUM(CASE WHEN "votes" < 0 AND "views" > 5 THEN 1 ELSE 0 END) AS "downvotedSubmissions" FROM "sponsorTimes" WHERE "userID" = ?`
, [userID], { useReplica: true });
return result.submissionCount > 100 && result.downvotedSubmissions / result.submissionCount < 0.15;
}
export async function canSubmit(userID: HashedUserID, category: Category): Promise<CanSubmitResult> {
switch (category) {
case "chapter":
return {
canSubmit: await oneOf([isUserVIP(userID),
lowDownvotes(userID),
(async () => (await getReputation(userID)) > config.minReputationToSubmitChapter)(),
hasFeature(userID, Feature.ChapterSubmitter)
])
};
default:
return {
canSubmit: true
};
}
}

75
src/utils/promise.ts Normal file
View File

@@ -0,0 +1,75 @@
import { Logger } from "./logger";
export class PromiseTimeoutError<T> extends Error {
promise?: Promise<T>;
constructor(promise?: Promise<T>) {
super("Promise timed out");
this.promise = promise;
}
}
export interface PromiseWithState<T> extends Promise<T> {
isResolved: boolean;
isRejected: boolean;
}
export function promiseOrTimeout<T>(promise: Promise<T>, timeout?: number): Promise<T> {
return Promise.race([timeoutPomise<T>(timeout), promise]);
}
export function timeoutPomise<T>(timeout?: number): Promise<T> {
return new Promise((resolve, reject) => {
if (timeout) {
setTimeout(() => {
reject(new PromiseTimeoutError());
}, timeout);
}
});
}
export function savePromiseState<T>(promise: Promise<T>): PromiseWithState<T> {
const p = promise as PromiseWithState<T>;
p.isResolved = false;
p.isRejected = false;
p.then(() => {
p.isResolved = true;
}).catch(() => {
p.isRejected = true;
});
return p;
}
/**
* Allows rejection or resolve
* Allows past resolves too, but not past rejections
*/
export function nextFulfilment<T>(promises: PromiseWithState<T>[]): Promise<T> {
return Promise.race(promises.filter((p) => !p.isRejected));
}
export function oneOf<T>(promises: Promise<T>[]): Promise<T> {
return new Promise((resolve, reject) => {
let fulfilments = 0;
for (const promise of promises) {
promise.then((result) => {
fulfilments++;
if (result || fulfilments === promises.length) {
resolve(result);
}
}).catch((err) => {
fulfilments++;
if (fulfilments === promises.length) {
reject(err);
} else {
Logger.error(`oneOf ignore error (promise): ${err}`);
}
});
}
});
}

View File

@@ -1,8 +1,9 @@
import redis from "../utils/redis";
import { Logger } from "../utils/logger";
import { skipSegmentsHashKey, skipSegmentsKey, reputationKey, ratingHashKey, skipSegmentGroupsKey } from "./redisKeys";
import { skipSegmentsHashKey, skipSegmentsKey, reputationKey, ratingHashKey, skipSegmentGroupsKey, userFeatureKey } from "./redisKeys";
import { Service, VideoID, VideoIDHash } from "../types/segments.model";
import { UserID } from "../types/user.model";
import { Feature, HashedUserID, UserID } from "../types/user.model";
import { config } from "../config";
async function get<T>(fetchFromDB: () => Promise<T>, key: string): Promise<T> {
try {
@@ -16,7 +17,7 @@ async function get<T>(fetchFromDB: () => Promise<T>, key: string): Promise<T> {
const data = await fetchFromDB();
redis.set(key, JSON.stringify(data));
redis.setEx(key, config.redis?.expiryTime, JSON.stringify(data)).catch((err) => Logger.error(err));
return data;
}
@@ -52,7 +53,7 @@ async function getAndSplit<T, U extends string>(fetchFromDB: (values: U[]) => Pr
if (valuesToBeFetched.length > 0) {
data = await fetchFromDB(valuesToBeFetched);
new Promise(() => {
void new Promise(() => {
const newResults: Record<string, T[]> = {};
for (const item of data) {
const splitValue = (item as unknown as Record<string, string>)[splitKey];
@@ -67,7 +68,7 @@ async function getAndSplit<T, U extends string>(fetchFromDB: (values: U[]) => Pr
}
for (const key in newResults) {
redis.set(key, JSON.stringify(newResults[key]));
redis.setEx(key, config.redis?.expiryTime, JSON.stringify(newResults[key])).catch((err) => Logger.error(err));
}
});
}
@@ -77,22 +78,27 @@ async function getAndSplit<T, U extends string>(fetchFromDB: (values: U[]) => Pr
function clearSegmentCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID?: UserID; }): void {
if (videoInfo) {
redis.del(skipSegmentsKey(videoInfo.videoID, videoInfo.service));
redis.del(skipSegmentGroupsKey(videoInfo.videoID, videoInfo.service));
redis.del(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service));
if (videoInfo.userID) redis.del(reputationKey(videoInfo.userID));
redis.del(skipSegmentsKey(videoInfo.videoID, videoInfo.service)).catch((err) => Logger.error(err));
redis.del(skipSegmentGroupsKey(videoInfo.videoID, videoInfo.service)).catch((err) => Logger.error(err));
redis.del(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service)).catch((err) => Logger.error(err));
if (videoInfo.userID) redis.del(reputationKey(videoInfo.userID)).catch((err) => Logger.error(err));
}
}
function clearRatingCache(videoInfo: { hashedVideoID: VideoIDHash; service: Service;}): void {
if (videoInfo) {
redis.del(ratingHashKey(videoInfo.hashedVideoID, videoInfo.service));
redis.del(ratingHashKey(videoInfo.hashedVideoID, videoInfo.service)).catch((err) => Logger.error(err));
}
}
function clearFeatureCache(userID: HashedUserID, feature: Feature): void {
redis.del(userFeatureKey(userID, feature)).catch((err) => Logger.error(err));
}
export const QueryCacher = {
get,
getAndSplit,
clearSegmentCache,
clearRatingCache
clearRatingCache,
clearFeatureCache
};

View File

@@ -1,8 +1,8 @@
import { config } from "../config";
import { Logger } from "./logger";
import { createClient } from "redis";
import { RedisCommandArgument, RedisCommandArguments, RedisCommandRawReply } from "@node-redis/client/dist/lib/commands";
import { ClientCommandOptions } from "@node-redis/client/dist/lib/client";
import { RedisCommandArgument, RedisCommandArguments, RedisCommandRawReply } from "@redis/client/dist/lib/commands";
import { RedisClientOptions } from "@redis/client/dist/lib/client";
import { RedisReply } from "rate-limit-redis";
interface RedisSB {
@@ -11,7 +11,7 @@ interface RedisSB {
setEx(key: RedisCommandArgument, seconds: number, value: RedisCommandArgument): Promise<string>;
del(...keys: [RedisCommandArgument]): Promise<number>;
increment?(key: RedisCommandArgument): Promise<RedisCommandRawReply[]>;
sendCommand(args: RedisCommandArguments, options?: ClientCommandOptions): Promise<RedisReply>;
sendCommand(args: RedisCommandArguments, options?: RedisClientOptions): Promise<RedisReply>;
quit(): Promise<void>;
}
@@ -28,20 +28,19 @@ let exportClient: RedisSB = {
if (config.redis?.enabled) {
Logger.info("Connected to redis");
const client = createClient(config.redis);
client.connect();
exportClient = client;
void client.connect(); // void as we don't care about the promise
exportClient = client as RedisSB;
const timeoutDuration = 200;
const get = client.get.bind(client);
exportClient.get = (key) => new Promise((resolve, reject) => {
const timeout = setTimeout(() => reject(), timeoutDuration);
const timeout = config.redis.getTimeout ? setTimeout(() => reject(), config.redis.getTimeout) : null;
get(key).then((reply) => {
clearTimeout(timeout);
if (timeout !== null) clearTimeout(timeout);
resolve(reply);
}).catch((err) => reject(err));
});
exportClient.increment = (key) => new Promise((resolve, reject) =>
client.multi()
void client.multi()
.incr(key)
.expire(key, 60)
.exec()
@@ -49,7 +48,10 @@ if (config.redis?.enabled) {
.catch((err) => reject(err))
);
client.on("error", function(error) {
Logger.error(error);
Logger.error(`Redis Error: ${error}`);
});
client.on("reconnect", () => {
Logger.info("Redis: trying to reconnect");
});
}

View File

@@ -1,5 +1,5 @@
import { Service, VideoID, VideoIDHash } from "../types/segments.model";
import { HashedUserID, UserID } from "../types/user.model";
import { Feature, HashedUserID, UserID } from "../types/user.model";
import { HashedValue } from "../types/hash.model";
import { Logger } from "./logger";
@@ -47,3 +47,7 @@ export function videoLabelsHashKey(hashedVideoIDPrefix: VideoIDHash, service: Se
return `labels.v1.${service}.${hashedVideoIDPrefix}`;
}
export function userFeatureKey (userID: HashedUserID, feature: Feature): string {
return `user.${userID}.feature.${feature}`;
}

View File

@@ -28,9 +28,9 @@ export async function getReputation(userID: UserID): Promise<number> {
THEN 1 ELSE 0 END) AS "nonSelfDownvotedSubmissions",
SUM(CASE WHEN "timeSubmitted" > 1596240000000 THEN "votes" ELSE 0 END) AS "votedSum",
SUM(locked) AS "lockedSum",
SUM(CASE WHEN "timeSubmitted" < ? AND "timeSubmitted" > 1596240000000 AND "actionType" != 'full' AND "votes" > 0 THEN 1 ELSE 0 END) AS "semiOldUpvotedSubmissions",
SUM(CASE WHEN "timeSubmitted" < ? AND "timeSubmitted" > 1596240000000 AND "actionType" != 'full' AND "votes" > 0 THEN 1 ELSE 0 END) AS "oldUpvotedSubmissions",
SUM(CASE WHEN "votes" > 0 AND "actionType" != 'full'
SUM(CASE WHEN "timeSubmitted" < ? AND "timeSubmitted" > 1596240000000 AND "votes" > 0 THEN 1 ELSE 0 END) AS "semiOldUpvotedSubmissions",
SUM(CASE WHEN "timeSubmitted" < ? AND "timeSubmitted" > 1596240000000 AND "votes" > 0 THEN 1 ELSE 0 END) AS "oldUpvotedSubmissions",
SUM(CASE WHEN "votes" > 0
AND NOT EXISTS (
SELECT * FROM "sponsorTimes" as c
WHERE (c."votes" > "a"."votes" OR c."locked" > "a"."locked") AND
@@ -40,7 +40,7 @@ export async function getReputation(userID: UserID): Promise<number> {
SELECT * FROM "lockCategories" as l
WHERE l."videoID" = "a"."videoID" AND l."service" = "a"."service" AND l."category" = "a"."category" LIMIT 1)
THEN 1 ELSE 0 END) AS "mostUpvotedInLockedVideoSum"
FROM "sponsorTimes" as "a" WHERE "userID" = ?`, [userID, weekAgo, pastDate, userID]) as Promise<ReputationDBResult>;
FROM "sponsorTimes" as "a" WHERE "userID" = ? AND "actionType" != 'full'`, [userID, weekAgo, pastDate, userID], { useReplica: true }) as Promise<ReputationDBResult>;
const result = await QueryCacher.get(fetchFromDB, reputationKey(userID));
@@ -55,19 +55,21 @@ function convertRange(value: number, currentMin: number, currentMax: number, tar
}
export function calculateReputationFromMetrics(metrics: ReputationDBResult): number {
if (!metrics) return 0;
// Grace period
if (metrics.totalSubmissions < 5) {
return 0;
}
const downvoteRatio = metrics.downvotedSubmissions / metrics.totalSubmissions;
if (downvoteRatio > 0.3) {
return convertRange(Math.min(downvoteRatio, 0.7), 0.3, 0.7, -0.5, -2.5);
if (downvoteRatio > 0.5) {
return convertRange(Math.min(downvoteRatio, 0.7), 0.5, 0.7, -0.5, -2.5);
}
const nonSelfDownvoteRatio = metrics.nonSelfDownvotedSubmissions / metrics.totalSubmissions;
if (nonSelfDownvoteRatio > 0.05) {
return convertRange(Math.min(nonSelfDownvoteRatio, 0.4), 0.05, 0.4, -0.5, -2.5);
if (nonSelfDownvoteRatio > 0.3) {
return convertRange(Math.min(nonSelfDownvoteRatio, 0.4), 0.3, 0.4, -0.5, -2.5);
}
if (metrics.votedSum < 5) {

144
src/utils/tokenUtils.ts Normal file
View File

@@ -0,0 +1,144 @@
import axios from "axios";
import { config } from "../config";
import { privateDB } from "../databases/databases";
import { Logger } from "./logger";
import FormData from "form-data";
import { randomInt } from "node:crypto";
export enum TokenType {
patreon = "patreon",
local = "local",
gumroad = "gumroad"
}
export enum PatronStatus {
active = "active_patron",
declined = "declined_patron",
former = "former_patron",
}
export interface PatreonIdentityData {
included: Array<{
attributes: {
currently_entitled_amount_cents: number,
campaign_lifetime_support_cents: number,
pledge_relationship_start: number,
patron_status: PatronStatus,
}
}>
}
export async function createAndSaveToken(type: TokenType, code?: string): Promise<string> {
switch(type) {
case TokenType.patreon: {
const domain = "https://www.patreon.com";
try {
const formData = new FormData();
formData.append("code", code);
formData.append("client_id", config.patreon.clientId);
formData.append("client_secret", config.patreon.clientSecret);
formData.append("grant_type", "authorization_code");
formData.append("redirect_uri", config.patreon.redirectUri);
const result = await axios.request({
url: `${domain}/api/oauth2/token`,
data: formData,
method: "POST",
headers: formData.getHeaders()
});
if (result.status === 200) {
const licenseKey = generateToken();
const time = Date.now();
await privateDB.prepare("run", `INSERT INTO "licenseKeys"("licenseKey", "time", "type") VALUES(?, ?, ?)`, [licenseKey, time, type]);
await privateDB.prepare("run", `INSERT INTO "oauthLicenseKeys"("licenseKey", "accessToken", "refreshToken", "expiresIn") VALUES(?, ?, ?, ?)`
, [licenseKey, result.data.access_token, result.data.refresh_token, result.data.expires_in]);
return licenseKey;
}
} catch (e) {
Logger.error(`token creation: ${e}`);
return null;
}
break;
}
case TokenType.local: {
const licenseKey = generateToken();
const time = Date.now();
await privateDB.prepare("run", `INSERT INTO "licenseKeys"("licenseKey", "time", "type") VALUES(?, ?, ?)`, [licenseKey, time, type]);
return licenseKey;
}
}
return null;
}
export async function refreshToken(type: TokenType, licenseKey: string, refreshToken: string): Promise<boolean> {
switch(type) {
case TokenType.patreon: {
try {
const formData = new FormData();
formData.append("refreshToken", refreshToken);
formData.append("client_id", config.patreon.clientId);
formData.append("client_secret", config.patreon.clientSecret);
formData.append("grant_type", "refresh_token");
const domain = "https://www.patreon.com";
const result = await axios.request({
url: `${domain}/api/oauth2/token`,
data: formData,
method: "POST",
headers: formData.getHeaders()
});
if (result.status === 200) {
await privateDB.prepare("run", `UPDATE "oauthLicenseKeys" SET "accessToken" = ?, "refreshToken" = ?, "expiresIn" = ? WHERE "licenseKey" = ?`
, [result.data.access_token, result.data.refresh_token, result.data.expires_in, licenseKey]);
return true;
}
} catch (e) {
Logger.error(`token refresh: ${e}`);
return false;
}
break;
}
}
return false;
}
function generateToken(length = 40): string {
const charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let result = "";
for (let i = 0; i < length; i++) {
result += charset[randomInt(charset.length)];
}
return result;
}
export async function getPatreonIdentity(accessToken: string): Promise<PatreonIdentityData> {
try {
const identityRequest = await axios.get(`https://www.patreon.com/api/oauth2/v2/identity?include=memberships&fields%5Bmember%5D=patron_status,currently_entitled_amount_cents,campaign_lifetime_support_cents,pledge_relationship_start`, {
headers: {
Authorization: `Bearer ${accessToken}`
}
});
if (identityRequest.status === 200) {
return identityRequest.data;
}
} catch (e) {
Logger.error(`identity request: ${e}`);
}
return null;
}

View File

@@ -17,7 +17,7 @@ export class YouTubeAPI {
if (data) {
Logger.debug(`YouTube API: cache used for video information: ${videoID}`);
return { err: null, data: JSON.parse(data) };
return { err: null, data: data as APIVideoData };
}
} catch (err) {
return { err: err as string | boolean, data: null };
@@ -38,9 +38,9 @@ export class YouTubeAPI {
return { err: data.error, data: null };
}
const apiResult = data as APIVideoData;
DiskCache.set(cacheKey, JSON.stringify(apiResult))
.catch((err: any) => Logger.warn(err))
.then(() => Logger.debug(`YouTube API: video information cache set for: ${videoID}`));
DiskCache.set(cacheKey, apiResult)
.then(() => Logger.debug(`YouTube API: video information cache set for: ${videoID}`))
.catch((err: any) => Logger.warn(err));
return { err: false, data: apiResult };
} else {
@@ -52,6 +52,5 @@ export class YouTubeAPI {
}
}
export function getMaxResThumbnail(apiInfo: APIVideoData): string | void {
return apiInfo?.videoThumbnails?.find((elem) => elem.quality === "maxres")?.second__originalUrl;
}
export const getMaxResThumbnail = (videoID: string): string =>
`https://i.ytimg.com/vi/${videoID}/maxresdefault.jpg`;

View File

@@ -57,5 +57,6 @@
"max": 20,
"statusCode": 200
}
}
},
"minReputationToSubmitFiller": -1
}

68
test/cases/addFeatures.ts Normal file
View File

@@ -0,0 +1,68 @@
import assert from "assert";
import { db } from "../../src/databases/databases";
import { Feature, HashedUserID } from "../../src/types/user.model";
import { hasFeature } from "../../src/utils/features";
import { getHash } from "../../src/utils/getHash";
import { client } from "../utils/httpClient";
const endpoint = "/api/feature";
const postAddFeatures = (userID: string, adminUserID: string, feature: Feature, enabled: string) => client({
method: "POST",
url: endpoint,
data: {
userID,
feature,
enabled,
adminUserID
}
});
const privateVipUserID = "VIPUser-addFeatures";
const vipUserID = getHash(privateVipUserID);
const hashedUserID1 = "user1-addFeatures" as HashedUserID;
const hashedUserID2 = "user2-addFeatures" as HashedUserID;
const hashedUserID3 = "user3-addFeatures" as HashedUserID;
const validFeatures = [Feature.ChapterSubmitter];
describe("addFeatures", () => {
before(() => {
const userFeatureQuery = `INSERT INTO "userFeatures" ("userID", "feature", "issuerUserID", "timeSubmitted") VALUES(?, ?, ?, ?)`;
return Promise.all([
db.prepare("run", `INSERT INTO "vipUsers" ("userID") VALUES (?)`, [vipUserID]),
db.prepare("run", userFeatureQuery, [hashedUserID2, Feature.ChapterSubmitter, "some-user", 0]),
db.prepare("run", userFeatureQuery, [hashedUserID3, Feature.ChapterSubmitter, "some-user", 0])
]);
});
it("can add features", async () => {
for (const feature of validFeatures) {
const result = await postAddFeatures(hashedUserID1, privateVipUserID, feature, "true");
assert.strictEqual(result.status, 200);
assert.strictEqual(await hasFeature(hashedUserID1, feature), true);
}
});
it("can remove features", async () => {
const feature = Feature.ChapterSubmitter;
const result = await postAddFeatures(hashedUserID2, privateVipUserID, feature, "false");
assert.strictEqual(result.status, 200);
assert.strictEqual(await hasFeature(hashedUserID2, feature), false);
});
it("can update features", async () => {
const feature = Feature.ChapterSubmitter;
const result = await postAddFeatures(hashedUserID3, privateVipUserID, feature, "true");
assert.strictEqual(result.status, 200);
assert.strictEqual(await hasFeature(hashedUserID3, feature), true);
});
});

View File

@@ -19,9 +19,9 @@ if (db instanceof Postgres) {
await db.prepare("run", query, [chapterNamesVid1, 70, 75, 2, 0, "chapterNamesVid-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "A different one"]);
await db.prepare("run", query, [chapterNamesVid1, 71, 76, 2, 0, "chapterNamesVid-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Something else"]);
await db.prepare("run", `INSERT INTO "videoInfo" ("videoID", "channelID", "title", "published", "genreUrl")
SELECT ?, ?, ?, ?, ?`, [
chapterNamesVid1, chapterChannelID, "", 0, ""
await db.prepare("run", `INSERT INTO "videoInfo" ("videoID", "channelID", "title", "published")
SELECT ?, ?, ?, ?`, [
chapterNamesVid1, chapterChannelID, "", 0
]);
});

View File

@@ -0,0 +1,31 @@
import { config } from "../../src/config";
import { getHashCache } from "../../src/utils/getHashCache";
import { shaHashKey } from "../../src/utils/redisKeys";
import { getHash } from "../../src/utils/getHash";
import redis from "../../src/utils/redis";
import crypto from "crypto";
import assert from "assert";
import { setTimeout } from "timers/promises";
const genRandom = (bytes=8) => crypto.pseudoRandomBytes(bytes).toString("hex");
const rand1Hash = genRandom(24);
const rand1Hash_Key = getHash(rand1Hash, 1);
const rand1Hash_Result = getHash(rand1Hash);
describe("getHashCache test", function() {
before(function() {
if (!config.redis?.enabled) this.skip();
});
it("Should set hashKey and be able to retreive", (done) => {
const redisKey = shaHashKey(rand1Hash_Key);
getHashCache(rand1Hash)
.then(() => setTimeout(50)) // add timeout for redis to complete async
.then(() => redis.get(redisKey))
.then(result => {
assert.strictEqual(result, rand1Hash_Result);
done();
})
.catch(err => done(err === undefined ? "no set value" : err));
}).timeout(5000);
});

View File

@@ -31,7 +31,7 @@ describe("getLockReason", () => {
});
after(async () => {
const deleteUserNameQuery = 'DELETE FROM "userNames" WHERE "userID" = ? AND "userName" = ? LIMIT 1';
const deleteUserNameQuery = 'DELETE FROM "userNames" WHERE "userID" = ? AND "userName" = ?';
await db.prepare("run", deleteUserNameQuery, [vipUserID1, vipUserName1]);
await db.prepare("run", deleteUserNameQuery, [vipUserID2, vipUserName2]);
});

View File

@@ -23,6 +23,8 @@ describe("getSkipSegments", () => {
await db.prepare("run", query, ["requiredSegmentVid", 60, 70, -2, 0, "requiredSegmentVid2", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 80, 90, -2, 0, "requiredSegmentVid3", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 2, 0, "requiredSegmentVid4", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 0, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 60, 70, 0, 0, "requiredSegmentVid-hidden", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 1, 0, ""]);
await db.prepare("run", query, ["requiredSegmentVid", 80, 90, 0, 0, "requiredSegmentVid-shadowhidden", "testman", 0, 50, "sponsor", "skip", "YouTube", 0, 0, 1, ""]);
await db.prepare("run", query, ["chapterVid", 60, 80, 2, 0, "chapterVid-1", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 1"]);
await db.prepare("run", query, ["chapterVid", 70, 75, 2, 0, "chapterVid-2", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 2"]);
await db.prepare("run", query, ["chapterVid", 71, 75, 2, 0, "chapterVid-3", "testman", 0, 50, "chapter", "chapter", "YouTube", 0, 0, 0, "Chapter 3"]);
@@ -447,4 +449,42 @@ describe("getSkipSegments", () => {
})
.catch(err => done(err));
});
it("Should be able to get hidden segments with requiredSegments", (done) => {
const required3 = "requiredSegmentVid3";
const requiredHidden = "requiredSegmentVid-hidden";
client.get(endpoint, { params: { videoID: "requiredSegmentVid", requiredSegments: `["${requiredHidden}","${required3}"]` } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
assert.strictEqual(data.length, 2);
const expected = [{
UUID: requiredHidden,
}, {
UUID: required3,
}];
assert.ok(partialDeepEquals(data, expected));
done();
})
.catch(err => done(err));
});
it("Should be able to get shadowhidden segments with requiredSegments", (done) => {
const required2 = "requiredSegmentVid2";
const requiredShadowHidden = "requiredSegmentVid-shadowhidden";
client.get(endpoint, { params: { videoID: "requiredSegmentVid", requiredSegments: `["${required2}","${requiredShadowHidden}"]` } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
assert.strictEqual(data.length, 2);
const expected = [{
UUID: required2,
}, {
UUID: requiredShadowHidden,
}];
assert.ok(partialDeepEquals(data, expected));
done();
})
.catch(err => done(err));
});
});

View File

@@ -150,7 +150,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/fdaf`, { params: { categories: `["sponsor","intro"]` } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 2);
assert.strictEqual(data[0].segments.length, 2);
assert.strictEqual(data[1].segments.length, 1);
@@ -163,15 +163,15 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/fdaf`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
const expected = [{
segments: [{
category: "sponsor",
UUID: "getSegmentsByHash-01",
UUID: "getSegmentsByHash-01"
}]
}, {
segments: [{
category: "sponsor",
category: "sponsor"
}]
}];
assert.strictEqual(data.length, 2);
@@ -187,7 +187,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/fdaf`, { params: { actionType: "skip" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 2);
assert.strictEqual(data[0].segments.length, 1);
assert.strictEqual(data[1].segments.length, 1);
@@ -211,7 +211,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/fdaf?actionType=skip&actionType=mute`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 2);
assert.strictEqual(data[0].segments.length, 2);
assert.strictEqual(data[1].segments.length, 1);
@@ -237,7 +237,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/fdaf?actionTypes=["skip","mute"]`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 2);
const expected = [{
segments: [{
@@ -261,7 +261,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/fdaf`, { params: { service: "PeerTube" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
@@ -279,7 +279,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/c962`, { params: { category: "poi_highlight", actionType: "poi" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
assert.strictEqual(data[0].segments.length, 1);
assert.strictEqual(data[0].segments[0].category, "poi_highlight");
@@ -293,7 +293,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/c962`, { params: { category: "poi_highlight" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
assert.strictEqual(data[0].segments.length, 1);
assert.strictEqual(data[0].segments[0].category, "poi_highlight");
@@ -317,7 +317,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/${getHash(testID, 1).substring(0, 3)}`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
@@ -337,7 +337,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/fdaff4?&category=sponsor&category=intro`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
@@ -360,7 +360,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/d518?requiredSegments=["requiredSegmentVid-2","requiredSegmentVid-3"]`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
@@ -380,7 +380,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/d518?requiredSegment=requiredSegmentVid-2&requiredSegment=requiredSegmentVid-3`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
assert.strictEqual(data[0].segments.length, 2);
const expected = [{
@@ -400,7 +400,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/7258?category=chapter&actionType=chapter`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
@@ -432,7 +432,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/6613?actionType=skip&actionType=mute`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
@@ -490,7 +490,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/3061?categories=["sponsor","music_offtopic"]`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
@@ -510,7 +510,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/ab0c?actionType=skip&actionType=mute`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{
@@ -551,7 +551,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/278f`, { params: { category: ["sponsor", "selfpromo"], actionType: "full" } })
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
assert.strictEqual(data[0].segments.length, 1);
assert.strictEqual(data[0].segments[0].category, "selfpromo");
@@ -566,7 +566,7 @@ describe("getSkipSegmentsByHash", () => {
client.get(`${endpoint}/17bf?requiredSegments=["${requiredSegment1.slice(0,8)}","${requiredSegment2.slice(0,8)}"]`)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
const data = (res.data as Array<any>).sort((a, b) => a.videoID.localeCompare(b.videoID));
assert.strictEqual(data.length, 1);
const expected = [{
segments: [{

View File

@@ -110,4 +110,16 @@ describe("getStatus", () => {
})
.catch(err => done(err));
});
it("Should be able to get redis latency", function (done) {
if (!config.redis?.enabled) this.skip();
client.get(endpoint)
.then(res => {
assert.strictEqual(res.status, 200);
const data = res.data;
assert.ok(data.redisProcessTime >= 0);
done();
})
.catch(err => done(err));
});
});

View File

@@ -10,16 +10,17 @@ describe("getUserInfo", () => {
const insertUserNameQuery = 'INSERT INTO "userNames" ("userID", "userName") VALUES(?, ?)';
await db.prepare("run", insertUserNameQuery, [getHash("getuserinfo_user_01"), "Username user 01"]);
const sponsorTimesQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", views, category, "shadowHidden") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 1, 11, 2, "uuid000001", getHash("getuserinfo_user_01"), 1, 10, "sponsor", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 1, 11, 2, "uuid000002", getHash("getuserinfo_user_01"), 2, 10, "sponsor", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo1", 1, 11, -1, "uuid000003", getHash("getuserinfo_user_01"), 3, 10, "sponsor", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo1", 1, 11, -2, "uuid000004", getHash("getuserinfo_user_01"), 4, 10, "sponsor", 1]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo2", 1, 11, -5, "uuid000005", getHash("getuserinfo_user_01"), 5, 10, "sponsor", 1]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 1, 11, 2, "uuid000007", getHash("getuserinfo_user_02"), 7, 10, "sponsor", 1]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 1, 11, 2, "uuid000008", getHash("getuserinfo_user_02"), 8, 10, "sponsor", 1]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 0, 36000, 2,"uuid000009", getHash("getuserinfo_user_03"), 8, 10, "sponsor", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo3", 1, 11, 2, "uuid000006", getHash("getuserinfo_user_02"), 6, 10, "sponsor", 0]);
const sponsorTimesQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", views, category, "actionType", "shadowHidden") VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)';
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 1, 11, 2, "uuid000001", getHash("getuserinfo_user_01"), 1, 10, "sponsor", "skip", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 1, 11, 2, "uuid000002", getHash("getuserinfo_user_01"), 2, 10, "sponsor", "skip", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo1", 1, 11, -1, "uuid000003", getHash("getuserinfo_user_01"), 3, 10, "sponsor", "skip", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo1", 1, 11, -2, "uuid000004", getHash("getuserinfo_user_01"), 4, 10, "sponsor", "skip", 1]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo2", 1, 11, -5, "uuid000005", getHash("getuserinfo_user_01"), 5, 10, "sponsor", "skip", 1]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 1, 11, 2, "uuid000007", getHash("getuserinfo_user_02"), 7, 10, "sponsor", "skip", 1]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 1, 11, 2, "uuid000008", getHash("getuserinfo_user_02"), 8, 10, "sponsor", "skip", 1]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo0", 0, 36000, 2,"uuid000009", getHash("getuserinfo_user_03"), 8, 10, "sponsor", "skip", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo3", 1, 11, 2, "uuid000006", getHash("getuserinfo_user_02"), 6, 10, "sponsor", "skip", 0]);
await db.prepare("run", sponsorTimesQuery, ["getUserInfo4", 1, 11, 2, "uuid000010", getHash("getuserinfo_user_04"), 9, 10, "chapter", "chapter", 0]);
const insertWarningQuery = 'INSERT INTO warnings ("userID", "issueTime", "issuerUserID", "enabled", "reason") VALUES (?, ?, ?, ?, ?)';
@@ -64,7 +65,7 @@ describe("getUserInfo", () => {
ignoredViewCount: 20,
segmentCount: 3,
ignoredSegmentCount: 2,
reputation: -2,
reputation: -1.5,
lastSegmentID: "uuid000005",
vip: false,
warnings: 0,
@@ -307,4 +308,28 @@ describe("getUserInfo", () => {
})
.catch(err => done(err));
});
it("Should ignore chapters for saved time calculations", (done) => {
client.get(endpoint, { params: { userID: "getuserinfo_user_04" } })
.then(res => {
assert.strictEqual(res.status, 200);
const expected = {
userName: "f187933817e7b0211a3f6f7d542a63ca9cc289d6cc8a8a79669d69a313671ccf",
userID: "f187933817e7b0211a3f6f7d542a63ca9cc289d6cc8a8a79669d69a313671ccf",
minutesSaved: 0,
viewCount: 10,
ignoredViewCount: 0,
segmentCount: 1,
ignoredSegmentCount: 0,
reputation: 0,
lastSegmentID: "uuid000010",
vip: false,
warnings: 0,
warningReason: ""
};
assert.deepStrictEqual(res.data, expected);
done();
})
.catch(err => done(err));
});
});

View File

@@ -22,8 +22,7 @@ describe("getUserStats", () => {
await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, -2, "skip", "getuserstatsuuid9", getHash("getuserstats_user_02"), 8, 2, "sponsor", 0]);
await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, 0, "skip", "getuserstatsuuid10", getHash("getuserstats_user_01"), 8, 2, "filler", 0]);
await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 0, 0, "full", "getuserstatsuuid11", getHash("getuserstats_user_01"), 8, 2, "exclusive_access", 0]);
await db.prepare("run", sponsorTimesQuery, ["getuserstats1", 0, 60, 0, "chapter", "getuserstatsuuid12", getHash("getuserstats_user_01"), 9, 2, "chapter", 0]);
});
it("Should be able to get a 400 (No userID parameter)", (done) => {
@@ -52,17 +51,19 @@ describe("getUserStats", () => {
music_offtopic: 1,
poi_highlight: 1,
filler: 1,
exclusive_access: 1
exclusive_access: 1,
chapter: 1,
},
actionTypeCount: {
mute: 0,
skip: 8,
full: 1,
poi: 1
poi: 1,
chapter: 1,
},
overallStats: {
minutesSaved: 30,
segmentCount: 10
segmentCount: 11
}
};
assert.ok(partialDeepEquals(res.data, expected));

View File

@@ -0,0 +1,49 @@
import { config } from "../../src/config";
import assert from "assert";
import { YouTubeAPI } from "../../src/utils/youtubeApi";
import * as innerTube from "../../src/utils/innerTubeAPI";
import { partialDeepEquals } from "../utils/partialDeepEquals";
import { getVideoDetails } from "../../src/utils/getVideoDetails";
const videoID = "BaW_jenozKc";
const expectedInnerTube = { // partial type of innerTubeVideoDetails
videoId: videoID,
title: "youtube-dl test video \"'/\\ä↭𝕐",
lengthSeconds: "10",
channelId: "UCLqxVugv74EIW3VWh2NOa3Q",
isOwnerViewing: false,
isCrawlable: true,
allowRatings: true,
author: "Philipp Hagemeister",
isPrivate: false,
isUnpluggedCorpus: false,
isLiveContent: false
};
const currentViews = 49816;
describe("innertube API test", function() {
it("should be able to get innerTube details", async () => {
const result = await innerTube.getPlayerData(videoID, true);
assert.ok(partialDeepEquals(result, expectedInnerTube));
});
it("Should have more views than current", async () => {
const result = await innerTube.getPlayerData(videoID, true);
assert.ok(Number(result.viewCount) >= currentViews);
});
it("Should have equivalent response from NewLeaf", async function () {
if (!config.newLeafURLs || config.newLeafURLs.length <= 0 || config.newLeafURLs[0] == "placeholder") this.skip();
const itResponse = await innerTube.getPlayerData(videoID, true);
const newLeafResponse = await YouTubeAPI.listVideos(videoID, true);
// validate videoID
assert.strictEqual(itResponse.videoId, videoID);
assert.strictEqual(newLeafResponse.data?.videoId, videoID);
// validate description
assert.strictEqual(itResponse.shortDescription, newLeafResponse.data?.description);
// validate authorId
assert.strictEqual(itResponse.channelId, newLeafResponse.data?.authorId);
});
it("Should return data from generic endpoint", async function () {
const videoDetail = await getVideoDetails(videoID);
assert.ok(videoDetail);
});
});

View File

@@ -54,6 +54,7 @@ describe("lockCategoriesRecords", () => {
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "delete-record-1", "mute", "sponsor", "reason-5", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "delete-record-1", "skip", "intro", "reason-5", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "delete-record-1", "mute", "intro", "reason-5", "YouTube"]);
await db.prepare("run", insertLockCategoryQuery, [lockVIPUserHash, "delete-record-poi", "poi", "poi_highlight", "reason-6", "YouTube"]);
});
it("Should update the database version when starting the application", async () => {
@@ -519,4 +520,44 @@ describe("lockCategoriesRecords", () => {
})
.catch(err => done(err));
});
it("should be able to delete poi type category by type poi", (done) => {
const videoID = "delete-record-poi";
const json = {
videoID,
userID: lockVIPUser,
categories: [
"poi_highlight",
],
actionTypes: ["poi"]
};
client.delete(endpoint, { data: json })
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories(videoID);
assert.strictEqual(result.length, 0);
done();
})
.catch(err => done(err));
});
it("should be able to delete poi type category by type poi", (done) => {
const videoID = "delete-record-poi";
const json = {
videoID,
userID: lockVIPUser,
categories: [
"poi_highlight",
],
actionTypes: ["poi"]
};
client.delete(endpoint, { data: json })
.then(async res => {
assert.strictEqual(res.status, 200);
const result = await checkLockCategories(videoID);
assert.strictEqual(result.length, 0);
done();
})
.catch(err => done(err));
});
});

View File

@@ -44,8 +44,17 @@ describe("postVideoSponsorTime (Old submission method)", () => {
.catch(err => done(err));
});
it("Should return 400 for missing params", (done) => {
client.post(endpoint, { params: { startTime: 1, endTime: 10, userID } })
it("Should return 400 for missing video", (done) => {
client.get(endpoint, { params: { startTime: 1, endTime: 10, userID } })
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 400 for missing userID", (done) => {
client.get(endpoint, { params: { videoID: videoID1, startTime: 1, endTime: 10 } })
.then(res => {
assert.strictEqual(res.status, 400);
done();

View File

@@ -7,6 +7,7 @@ import * as YouTubeAPIModule from "../../src/utils/youtubeApi";
import { YouTubeApiMock } from "../youtubeMock";
import assert from "assert";
import { client } from "../utils/httpClient";
import { Feature } from "../../src/types/user.model";
const mockManager = ImportMock.mockStaticClass(YouTubeAPIModule, "YouTubeAPI");
const sinonStub = mockManager.mock("listVideos");
@@ -15,6 +16,7 @@ sinonStub.callsFake(YouTubeApiMock.listVideos);
describe("postSkipSegments", () => {
// Constant and helpers
const submitUserOne = `PostSkipUser1${".".repeat(18)}`;
const submitUserOneHash = getHash(submitUserOne);
const submitUserTwo = `PostSkipUser2${".".repeat(18)}`;
const submitUserTwoHash = getHash(submitUserTwo);
const submitUserThree = `PostSkipUser3${".".repeat(18)}`;
@@ -30,7 +32,6 @@ describe("postSkipSegments", () => {
const banUser01 = "ban-user01-loremipsumdolorsitametconsectetur";
const banUser01Hash = getHash(banUser01);
const submitUserOneHash = getHash(submitUserOne);
const submitVIPuser = `VIPPostSkipUser${".".repeat(16)}`;
const warnVideoID = "postSkip2";
const badInputVideoID = "dQw4w9WgXcQ";
@@ -66,6 +67,15 @@ describe("postSkipSegments", () => {
db.prepare("run", insertSponsorTimeQuery, ["full_video_duration_segment", 0, 0, 0, "full-video-duration-uuid-0", submitUserTwoHash, 0, 0, "sponsor", "full", 123, 0, "full_video_duration_segment"]);
db.prepare("run", insertSponsorTimeQuery, ["full_video_duration_segment", 25, 30, 0, "full-video-duration-uuid-1", submitUserTwoHash, 0, 0, "sponsor", "skip", 123, 0, "full_video_duration_segment"]);
const reputationVideoID = "post_reputation_video";
db.prepare("run", insertSponsorTimeQuery, [reputationVideoID, 1, 11, 2,"post_reputation-5-uuid-0", submitUserOneHash, 1606240000000, 50, "sponsor", "skip", 0, 0, reputationVideoID]);
db.prepare("run", insertSponsorTimeQuery, [reputationVideoID, 1, 11, 2,"post_reputation-5-uuid-1", submitUserOneHash, 1606240000000, 50, "sponsor", "skip", 0, 0, reputationVideoID]);
db.prepare("run", insertSponsorTimeQuery, [reputationVideoID, 1, 11, 2,"post_reputation-5-uuid-2", submitUserOneHash, 1606240000000, 50, "sponsor", "skip", 0, 0, reputationVideoID]);
db.prepare("run", insertSponsorTimeQuery, [reputationVideoID, 1, 11, 2,"post_reputation-5-uuid-3", submitUserOneHash, 1606240000000, 50, "sponsor", "skip", 0, 0, reputationVideoID]);
db.prepare("run", insertSponsorTimeQuery, [reputationVideoID, 1, 11, 2,"post_reputation-5-uuid-4", submitUserOneHash, 1606240000000, 50, "sponsor", "skip", 0, 0, reputationVideoID]);
db.prepare("run", insertSponsorTimeQuery, [reputationVideoID, 1, 11, 0,"post_reputation-5-uuid-6", submitUserOneHash, 1606240000000, 50, "sponsor", "skip", 0, 0, reputationVideoID]);
db.prepare("run", insertSponsorTimeQuery, [reputationVideoID, 1, 11, 0,"post_reputation-5-uuid-7", submitUserOneHash, 1606240000000, 50, "sponsor", "skip", 0, 0, reputationVideoID]);
const now = Date.now();
const warnVip01Hash = getHash("warn-vip01-qwertyuiopasdfghjklzxcvbnm");
const reason01 = "Reason01";
@@ -83,7 +93,6 @@ describe("postSkipSegments", () => {
db.prepare("run", insertWarningQuery, [warnUser01Hash, warnVip01Hash, 1, reason01, (now - 3601000)]);
// User 2
db.prepare("run", insertWarningQuery, [warnUser02Hash, warnVip01Hash, 1, reason02, now]);
db.prepare("run", insertWarningQuery, [warnUser02Hash, warnVip01Hash, 1, reason02, now]);
db.prepare("run", insertWarningQuery, [warnUser02Hash, warnVip01Hash, 1, reason02, (now - (warningExpireTime + 1000))]);
db.prepare("run", insertWarningQuery, [warnUser02Hash, warnVip01Hash, 1, reason02, (now - (warningExpireTime + 2000))]);
// User 3
@@ -102,6 +111,9 @@ describe("postSkipSegments", () => {
// ban user
db.prepare("run", `INSERT INTO "shadowBannedUsers" ("userID") VALUES(?)`, [banUser01Hash]);
// user feature
db.prepare("run", `INSERT INTO "userFeatures" ("userID", "feature", "issuerUserID", "timeSubmitted") VALUES(?, ?, ?, ?)`, [submitUserTwoHash, Feature.ChapterSubmitter, "some-user", 0]);
});
it("Should be able to submit a single time (Params method)", (done) => {
@@ -129,7 +141,6 @@ describe("postSkipSegments", () => {
title: "Example Title",
channelID: "ExampleChannel",
published: 123,
genreUrl: ""
};
assert.ok(partialDeepEquals(videoInfo, expectedVideoInfo));
@@ -189,7 +200,7 @@ describe("postSkipSegments", () => {
.catch(err => done(err));
});
it("Should be able to submit a single chapter (JSON method)", (done) => {
it("Should be able to submit a single chapter due to reputation (JSON method)", (done) => {
const videoID = "postSkipChapter1";
postSkipSegmentJSON({
userID: submitUserOne,
@@ -217,6 +228,34 @@ describe("postSkipSegments", () => {
.catch(err => done(err));
});
it("Should be able to submit a single chapter due to user feature (JSON method)", (done) => {
const videoID = "postSkipChapter2";
postSkipSegmentJSON({
userID: submitUserTwo,
videoID,
segments: [{
segment: [0, 10],
category: "chapter",
actionType: "chapter",
description: "This is a chapter"
}],
})
.then(async res => {
assert.strictEqual(res.status, 200);
const row = await queryDatabaseChapter(videoID);
const expected = {
startTime: 0,
endTime: 10,
category: "chapter",
actionType: "chapter",
description: "This is a chapter"
};
assert.ok(partialDeepEquals(row, expected));
done();
})
.catch(err => done(err));
});
it("Should not be able to submit an music_offtopic with mute action type (JSON method)", (done) => {
const videoID = "postSkip4";
postSkipSegmentJSON({
@@ -237,8 +276,27 @@ describe("postSkipSegments", () => {
.catch(err => done(err));
});
it("Should not be able to submit a chapter without permission (JSON method)", (done) => {
const videoID = "postSkipChapter3";
postSkipSegmentJSON({
userID: submitUserThree,
videoID,
segments: [{
segment: [0, 10],
category: "chapter",
actionType: "chapter",
description: "This is a chapter"
}],
})
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should not be able to submit a chapter with skip action type (JSON method)", (done) => {
const videoID = "postSkipChapter2";
const videoID = "postSkipChapter4";
postSkipSegmentJSON({
userID: submitUserOne,
videoID,
@@ -258,7 +316,7 @@ describe("postSkipSegments", () => {
});
it("Should not be able to submit a sponsor with a description (JSON method)", (done) => {
const videoID = "postSkipChapter3";
const videoID = "postSkipChapter5";
postSkipSegmentJSON({
userID: submitUserOne,
videoID,
@@ -1220,7 +1278,7 @@ describe("postSkipSegments", () => {
});
it("Should return 400 if videoID is empty", (done) => {
const videoID = null as string;
const videoID = null as unknown as string;
postSkipSegmentParam({
videoID,
startTime: 1,

View File

@@ -9,6 +9,8 @@ describe("postWarning", () => {
const endpoint = "/api/warnUser";
const getWarning = (userID: string) => db.prepare("get", `SELECT "userID", "issueTime", "issuerUserID", enabled, "reason" FROM warnings WHERE "userID" = ?`, [userID]);
const warnedUser = getHash("warning-0");
before(async () => {
await db.prepare("run", `INSERT INTO "vipUsers" ("userID") VALUES (?)`, [getHash("warning-vip")]);
});
@@ -16,7 +18,7 @@ describe("postWarning", () => {
it("Should be able to create warning if vip (exp 200)", (done) => {
const json = {
issuerUserID: "warning-vip",
userID: "warning-0",
userID: warnedUser,
reason: "warning-reason-0"
};
client.post(endpoint, json)
@@ -37,7 +39,7 @@ describe("postWarning", () => {
it("Should be not be able to create a duplicate warning if vip", (done) => {
const json = {
issuerUserID: "warning-vip",
userID: "warning-0",
userID: warnedUser,
};
client.post(endpoint, json)
@@ -57,7 +59,7 @@ describe("postWarning", () => {
it("Should be able to remove warning if vip", (done) => {
const json = {
issuerUserID: "warning-vip",
userID: "warning-0",
userID: warnedUser,
enabled: false
};
@@ -100,7 +102,7 @@ describe("postWarning", () => {
it("Should re-enable disabled warning", (done) => {
const json = {
issuerUserID: "warning-vip",
userID: "warning-0",
userID: warnedUser,
enabled: true
};
@@ -116,4 +118,41 @@ describe("postWarning", () => {
})
.catch(err => done(err));
});
it("Should be able to remove your own warning", (done) => {
const json = {
userID: "warning-0",
enabled: false
};
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 200);
const data = await getWarning(warnedUser);
const expected = {
enabled: 0
};
assert.ok(partialDeepEquals(data, expected));
done();
})
.catch(err => done(err));
});
it("Should be able to add your own warning", (done) => {
const json = {
userID: "warning-0"
};
client.post(endpoint, json)
.then(async res => {
assert.strictEqual(res.status, 403);
const data = await getWarning(warnedUser);
const expected = {
enabled: 0
};
assert.ok(partialDeepEquals(data, expected));
done();
})
.catch(err => done(err));
});
});

View File

@@ -1,118 +0,0 @@
import { db } from "../../../src/databases/databases";
import { getHash } from "../../../src/utils/getHash";
import assert from "assert";
import { client } from "../../utils/httpClient";
import { AxiosResponse } from "axios";
import { partialDeepEquals, arrayPartialDeepEquals } from "../../utils/partialDeepEquals";
const endpoint = "/api/ratings/rate";
const getRating = (hash: string, params?: unknown): Promise<AxiosResponse> => client.get(`${endpoint}/${hash}`, { params });
const getBulkRating = (hashes: string[], params?: any): Promise<AxiosResponse> => client.get(endpoint, { params: { ...params, prefix: hashes } });
const videoOneID = "some-likes-and-dislikes";
const videoOneIDHash = getHash(videoOneID, 1);
const videoOnePartialHash = videoOneIDHash.substr(0, 4);
const videoTwoID = "some-likes-and-dislikes-2";
const videoTwoIDHash = getHash(videoTwoID, 1);
const videoTwoPartialHash = videoTwoIDHash.substr(0, 4);
describe("getRating", () => {
before(async () => {
const insertUserNameQuery = 'INSERT INTO "ratings" ("videoID", "service", "type", "count", "hashedVideoID") VALUES (?, ?, ?, ?, ?)';
await db.prepare("run", insertUserNameQuery, [videoOneID, "YouTube", 0, 5, videoOneIDHash]);
await db.prepare("run", insertUserNameQuery, [videoOneID, "YouTube", 1, 10, videoOneIDHash]);
await db.prepare("run", insertUserNameQuery, [videoTwoID, "YouTube", 0, 20, videoTwoIDHash]);
await db.prepare("run", insertUserNameQuery, [videoTwoID, "YouTube", 1, 30, videoTwoIDHash]);
});
it("Should be able to get dislikes and likes by default", (done) => {
getRating(videoOnePartialHash)
.then(res => {
assert.strictEqual(res.status, 200);
const expected = [{
type: 0,
count: 5,
}, {
type: 1,
count: 10,
}];
assert.ok(partialDeepEquals(res.data, expected));
done();
})
.catch(err => done(err));
});
it("Should be able to filter for only dislikes", (done) => {
getRating(videoOnePartialHash, { type: 0 })
.then(res => {
assert.strictEqual(res.status, 200);
const expected = [{
type: 0,
count: 5,
}];
assert.ok(partialDeepEquals(res.data, expected));
done();
})
.catch(err => done(err));
});
/*
This test will fail if tests are already ran with redis.
*/
it("Should be able to bulk fetch", (done) => {
getBulkRating([videoOnePartialHash, videoTwoPartialHash])
.then(res => {
assert.strictEqual(res.status, 200);
const expected = [{
type: 0,
count: 20,
hash: videoTwoIDHash,
},
{
type: 1,
count: 30,
hash: videoTwoIDHash,
}, {
type: 0,
count: 5,
hash: videoOneIDHash,
}, {
type: 1,
count: 10,
hash: videoOneIDHash,
}];
assert.ok(arrayPartialDeepEquals(res.data, expected));
done();
})
.catch(err => done(err));
});
it("Should return 400 for invalid hash", (done) => {
getRating("a")
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should return 404 for nonexitent type", (done) => {
getRating(videoOnePartialHash, { type: 100 })
.then(res => {
assert.strictEqual(res.status, 404);
done();
})
.catch(err => done(err));
});
it("Should return 404 for nonexistent videoID", (done) => {
getRating("aaaa")
.then(res => {
assert.strictEqual(res.status, 404);
done();
})
.catch(err => done(err));
});
});

View File

@@ -1,51 +0,0 @@
import { db } from "../../../src/databases/databases";
import { getHash } from "../../../src/utils/getHash";
import assert from "assert";
import { client } from "../../utils/httpClient";
const VIPUser = "clearCacheVIP";
const regularUser = "regular-user";
const endpoint = "/api/ratings/clearCache";
const postClearCache = (userID: string, videoID: string) => client({ method: "post", url: endpoint, params: { userID, videoID } });
describe("ratings postClearCache", () => {
before(async () => {
await db.prepare("run", `INSERT INTO "vipUsers" ("userID") VALUES ('${getHash(VIPUser)}')`);
});
it("Should be able to clear cache amy video", (done) => {
postClearCache(VIPUser, "dne-video")
.then(res => {
assert.strictEqual(res.status, 200);
done();
})
.catch(err => done(err));
});
it("Should get 403 as non-vip", (done) => {
postClearCache(regularUser, "clear-test")
.then(res => {
assert.strictEqual(res.status, 403);
done();
})
.catch(err => done(err));
});
it("Should give 400 with missing videoID", (done) => {
client.post(endpoint, { params: { userID: VIPUser } })
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
it("Should give 400 with missing userID", (done) => {
client.post(endpoint, { params: { videoID: "clear-test" } })
.then(res => {
assert.strictEqual(res.status, 400);
done();
})
.catch(err => done(err));
});
});

Some files were not shown because too many files have changed in this diff Show More