diff --git a/.gitignore b/.gitignore
index 3cff924..dd78e49 100644
--- a/.gitignore
+++ b/.gitignore
@@ -99,6 +99,7 @@ test/databases/sponsorTimes.db
test/databases/sponsorTimes.db-shm
test/databases/sponsorTimes.db-wal
test/databases/private.db
+docker/database-export
# Config files
config.json
diff --git a/README.MD b/README.MD
index 8f61c57..e309e54 100644
--- a/README.MD
+++ b/README.MD
@@ -8,7 +8,7 @@ This is the server backend for it
This uses a Postgres or Sqlite database to hold all the timing data.
-To make sure that this project doesn't die, I have made the database publicly downloadable at https://sponsor.ajay.app/database.db. You can download a backup or get archive.org to take a backup if you do desire. The database is under [this license](https://creativecommons.org/licenses/by-nc-sa/4.0/) unless you get explicit permission from me.
+To make sure that this project doesn't die, I have made the database publicly downloadable at https://sponsor.ajay.app/database. You can download a backup or get archive.org to take a backup if you do desire. The database is under [this license](https://creativecommons.org/licenses/by-nc-sa/4.0/) unless you get explicit permission from me.
Hopefully this project can be combined with projects like [this](https://github.com/Sponsoff/sponsorship_remover) and use this data to create a neural network to predict when sponsored segments happen. That project is sadly abandoned now, so I have decided to attempt to revive this idea.
diff --git a/config.json.example b/config.json.example
index 075872a..f1846f9 100644
--- a/config.json.example
+++ b/config.json.example
@@ -39,5 +39,31 @@
"statusCode": 200
}
},
- "maxRewardTimePerSegmentInSeconds": 86400 // maximum time a user get rewarded in the leaderboard for a single segment
+ "maxRewardTimePerSegmentInSeconds": 86400, // maximum time a user get rewarded in the leaderboard for a single segment
+ "dumpDatabase": {
+ "enabled": true,
+ "minTimeBetweenMs": 60000, // 1 minute between dumps
+ "appExportPath": "./docker/database-export",
+ "postgresExportPath": "/opt/exports",
+ "tables": [{
+ "name": "sponsorTimes",
+ "order": "timeSubmitted"
+ },
+ {
+ "name": "userNames"
+ },
+ {
+ "name": "categoryVotes"
+ },
+ {
+ "name": "noSegments"
+ },
+ {
+ "name": "warnings",
+ "order": "issueTime"
+ },
+ {
+ "name": "vipUsers"
+ }]
+ }
}
diff --git a/databases/_private_indexes.sql b/databases/_private_indexes.sql
new file mode 100644
index 0000000..2b70fa7
--- /dev/null
+++ b/databases/_private_indexes.sql
@@ -0,0 +1,32 @@
+-- sponsorTimes
+
+CREATE INDEX IF NOT EXISTS "sponsorTimes_hashedIP"
+ ON public."sponsorTimes" USING btree
+ ("hashedIP" COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+CREATE INDEX IF NOT EXISTS "privateDB_sponsorTimes_videoID"
+ ON public."sponsorTimes" USING btree
+ ("videoID" ASC NULLS LAST)
+;
+
+-- votes
+
+CREATE INDEX IF NOT EXISTS "votes_userID"
+ ON public.votes USING btree
+ ("UUID" COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+-- shadowBannedUsers
+
+CREATE INDEX IF NOT EXISTS "shadowBannedUsers_index"
+ ON public."shadowBannedUsers" USING btree
+ ("userID" COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+-- categoryVotes
+
+CREATE INDEX IF NOT EXISTS "categoryVotes_UUID"
+ ON public."categoryVotes" USING btree
+ ("UUID" COLLATE pg_catalog."default" ASC NULLS LAST, "userID" COLLATE pg_catalog."default" ASC NULLS LAST, "hashedIP" COLLATE pg_catalog."default" ASC NULLS LAST, category COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
\ No newline at end of file
diff --git a/databases/_sponsorTimes_indexes.sql b/databases/_sponsorTimes_indexes.sql
new file mode 100644
index 0000000..c90ef51
--- /dev/null
+++ b/databases/_sponsorTimes_indexes.sql
@@ -0,0 +1,66 @@
+-- sponsorTimes
+
+CREATE INDEX IF NOT EXISTS "sponsorTime_timeSubmitted"
+ ON public."sponsorTimes" USING btree
+ ("timeSubmitted" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+CREATE INDEX IF NOT EXISTS "sponsorTime_userID"
+ ON public."sponsorTimes" USING btree
+ ("userID" COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+CREATE INDEX IF NOT EXISTS "sponsorTimes_UUID"
+ ON public."sponsorTimes" USING btree
+ ("UUID" COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+CREATE INDEX IF NOT EXISTS "sponsorTimes_hashedVideoID_gin"
+ ON public."sponsorTimes" USING gin
+ ("hashedVideoID" COLLATE pg_catalog."default" gin_trgm_ops, category COLLATE pg_catalog."default" gin_trgm_ops)
+ TABLESPACE pg_default;
+
+CREATE INDEX IF NOT EXISTS "sponsorTimes_videoID"
+ ON public."sponsorTimes" USING btree
+ ("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, service COLLATE pg_catalog."default" ASC NULLS LAST, category COLLATE pg_catalog."default" ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+-- userNames
+
+CREATE INDEX IF NOT EXISTS "userNames_userID"
+ ON public."userNames" USING btree
+ ("userID" COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+-- vipUsers
+
+CREATE INDEX IF NOT EXISTS "vipUsers_index"
+ ON public."vipUsers" USING btree
+ ("userID" COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+-- warnings
+
+CREATE INDEX IF NOT EXISTS "warnings_index"
+ ON public.warnings USING btree
+ ("userID" COLLATE pg_catalog."default" ASC NULLS LAST, "issueTime" DESC NULLS LAST, enabled DESC NULLS LAST)
+ TABLESPACE pg_default;
+
+CREATE INDEX IF NOT EXISTS "warnings_issueTime"
+ ON public.warnings USING btree
+ ("issueTime" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+-- noSegments
+
+CREATE INDEX IF NOT EXISTS "noSegments_videoID"
+ ON public."noSegments" USING btree
+ ("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, category COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
+
+-- categoryVotes
+
+CREATE INDEX IF NOT EXISTS "categoryVotes_UUID_public"
+ ON public."categoryVotes" USING btree
+ ("UUID" COLLATE pg_catalog."default" ASC NULLS LAST, category COLLATE pg_catalog."default" ASC NULLS LAST)
+ TABLESPACE pg_default;
\ No newline at end of file
diff --git a/databases/_upgrade_sponsorTimes_10.sql b/databases/_upgrade_sponsorTimes_10.sql
new file mode 100644
index 0000000..174ceaf
--- /dev/null
+++ b/databases/_upgrade_sponsorTimes_10.sql
@@ -0,0 +1,30 @@
+BEGIN TRANSACTION;
+
+/* Add Hidden field */
+CREATE TABLE "sqlb_temp_table_10" (
+ "videoID" TEXT NOT NULL,
+ "startTime" REAL NOT NULL,
+ "endTime" REAL NOT NULL,
+ "votes" INTEGER NOT NULL,
+ "locked" INTEGER NOT NULL default '0',
+ "incorrectVotes" INTEGER NOT NULL default '1',
+ "UUID" TEXT NOT NULL UNIQUE,
+ "userID" TEXT NOT NULL,
+ "timeSubmitted" INTEGER NOT NULL,
+ "views" INTEGER NOT NULL,
+ "category" TEXT NOT NULL DEFAULT 'sponsor',
+ "service" TEXT NOT NULL DEFAULT 'YouTube',
+ "videoDuration" REAL NOT NULL DEFAULT '0',
+ "hidden" INTEGER NOT NULL DEFAULT '0',
+ "shadowHidden" INTEGER NOT NULL,
+ "hashedVideoID" TEXT NOT NULL default ''
+);
+
+INSERT INTO sqlb_temp_table_10 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category","service","videoDuration",0,"shadowHidden","hashedVideoID" FROM "sponsorTimes";
+
+DROP TABLE "sponsorTimes";
+ALTER TABLE sqlb_temp_table_10 RENAME TO "sponsorTimes";
+
+UPDATE "config" SET value = 10 WHERE key = 'version';
+
+COMMIT;
\ No newline at end of file
diff --git a/databases/_upgrade_sponsorTimes_7.sql b/databases/_upgrade_sponsorTimes_7.sql
new file mode 100644
index 0000000..8f6060b
--- /dev/null
+++ b/databases/_upgrade_sponsorTimes_7.sql
@@ -0,0 +1,28 @@
+BEGIN TRANSACTION;
+
+/* Add Service field */
+CREATE TABLE "sqlb_temp_table_7" (
+ "videoID" TEXT NOT NULL,
+ "startTime" REAL NOT NULL,
+ "endTime" REAL NOT NULL,
+ "votes" INTEGER NOT NULL,
+ "locked" INTEGER NOT NULL default '0',
+ "incorrectVotes" INTEGER NOT NULL default '1',
+ "UUID" TEXT NOT NULL UNIQUE,
+ "userID" TEXT NOT NULL,
+ "timeSubmitted" INTEGER NOT NULL,
+ "views" INTEGER NOT NULL,
+ "category" TEXT NOT NULL DEFAULT 'sponsor',
+ "service" TEXT NOT NULL DEFAULT 'YouTube',
+ "shadowHidden" INTEGER NOT NULL,
+ "hashedVideoID" TEXT NOT NULL default ''
+);
+
+INSERT INTO sqlb_temp_table_7 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category",'YouTube', "shadowHidden","hashedVideoID" FROM "sponsorTimes";
+
+DROP TABLE "sponsorTimes";
+ALTER TABLE sqlb_temp_table_7 RENAME TO "sponsorTimes";
+
+UPDATE "config" SET value = 7 WHERE key = 'version';
+
+COMMIT;
\ No newline at end of file
diff --git a/databases/_upgrade_sponsorTimes_8.sql b/databases/_upgrade_sponsorTimes_8.sql
new file mode 100644
index 0000000..ccc2ec9
--- /dev/null
+++ b/databases/_upgrade_sponsorTimes_8.sql
@@ -0,0 +1,29 @@
+BEGIN TRANSACTION;
+
+/* Add Service field */
+CREATE TABLE "sqlb_temp_table_8" (
+ "videoID" TEXT NOT NULL,
+ "startTime" REAL NOT NULL,
+ "endTime" REAL NOT NULL,
+ "votes" INTEGER NOT NULL,
+ "locked" INTEGER NOT NULL default '0',
+ "incorrectVotes" INTEGER NOT NULL default '1',
+ "UUID" TEXT NOT NULL UNIQUE,
+ "userID" TEXT NOT NULL,
+ "timeSubmitted" INTEGER NOT NULL,
+ "views" INTEGER NOT NULL,
+ "category" TEXT NOT NULL DEFAULT 'sponsor',
+ "service" TEXT NOT NULL DEFAULT 'YouTube',
+ "videoDuration" INTEGER NOT NULL DEFAULT '0',
+ "shadowHidden" INTEGER NOT NULL,
+ "hashedVideoID" TEXT NOT NULL default ''
+);
+
+INSERT INTO sqlb_temp_table_8 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category","service",'0', "shadowHidden","hashedVideoID" FROM "sponsorTimes";
+
+DROP TABLE "sponsorTimes";
+ALTER TABLE sqlb_temp_table_8 RENAME TO "sponsorTimes";
+
+UPDATE "config" SET value = 8 WHERE key = 'version';
+
+COMMIT;
\ No newline at end of file
diff --git a/databases/_upgrade_sponsorTimes_9.sql b/databases/_upgrade_sponsorTimes_9.sql
new file mode 100644
index 0000000..5015a4a
--- /dev/null
+++ b/databases/_upgrade_sponsorTimes_9.sql
@@ -0,0 +1,29 @@
+BEGIN TRANSACTION;
+
+/* Add Service field */
+CREATE TABLE "sqlb_temp_table_9" (
+ "videoID" TEXT NOT NULL,
+ "startTime" REAL NOT NULL,
+ "endTime" REAL NOT NULL,
+ "votes" INTEGER NOT NULL,
+ "locked" INTEGER NOT NULL default '0',
+ "incorrectVotes" INTEGER NOT NULL default '1',
+ "UUID" TEXT NOT NULL UNIQUE,
+ "userID" TEXT NOT NULL,
+ "timeSubmitted" INTEGER NOT NULL,
+ "views" INTEGER NOT NULL,
+ "category" TEXT NOT NULL DEFAULT 'sponsor',
+ "service" TEXT NOT NULL DEFAULT 'YouTube',
+ "videoDuration" REAL NOT NULL DEFAULT '0',
+ "shadowHidden" INTEGER NOT NULL,
+ "hashedVideoID" TEXT NOT NULL default ''
+);
+
+INSERT INTO sqlb_temp_table_9 SELECT "videoID","startTime","endTime","votes","locked","incorrectVotes","UUID","userID","timeSubmitted","views","category","service",'0', "shadowHidden","hashedVideoID" FROM "sponsorTimes";
+
+DROP TABLE "sponsorTimes";
+ALTER TABLE sqlb_temp_table_9 RENAME TO "sponsorTimes";
+
+UPDATE "config" SET value = 9 WHERE key = 'version';
+
+COMMIT;
\ No newline at end of file
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index a54a3c0..f7408d3 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -7,8 +7,9 @@ services:
- database.env
volumes:
- database-data:/var/lib/postgresql/data
+ - ./database-export/:/opt/exports # To make this work, run chmod 777 ./database-exports
ports:
- - 127.0.0.1:5432:5432
+ - 5432:5432
redis:
container_name: redis
image: redis
@@ -16,7 +17,7 @@ services:
volumes:
- ./redis/redis.conf:/usr/local/etc/redis/redis.conf
ports:
- - 127.0.0.1:32773:6379
+ - 32773:6379
volumes:
- database-data:
\ No newline at end of file
+ database-data:
diff --git a/nginx/nginx.conf b/nginx/nginx.conf
index daee81f..d420877 100644
--- a/nginx/nginx.conf
+++ b/nginx/nginx.conf
@@ -2,7 +2,7 @@ worker_processes 8;
worker_rlimit_nofile 8192;
events {
- worker_connections 32768; ## Default: 1024
+ worker_connections 132768; ## Default: 1024
}
http {
@@ -12,28 +12,41 @@ http {
upstream backend_GET {
least_conn;
- server localhost:4442;
- server localhost:4443;
- server localhost:4444;
- server localhost:4445;
- server localhost:4446;
+ server localhost:4441;
+ server localhost:4442;
+ #server localhost:4443;
+ #server localhost:4444;
+ #server localhost:4445;
+ #server localhost:4446;
#server localhost:4447;
#server localhost:4448;
+
+ server 10.0.0.3:4441;
+ server 10.0.0.3:4442;
+
+ #server 134.209.69.251:80 backup;
+
+ server 116.203.32.253:80 backup;
+ #server 116.203.32.253:80;
}
upstream backend_POST {
+ #server localhost:4441;
+ #server localhost:4442;
+ server 10.0.0.3:4441;
+ #server 10.0.0.3:4442;
+ }
+ upstream backend_db {
server localhost:4441;
+ #server 10.0.0.3:4441;
}
- proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=CACHEZONE:10m inactive=60m max_size=40m;
+ proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=CACHEZONE:10m inactive=60m max_size=400m;
proxy_cache_key "$scheme$request_method$host$request_uri";
add_header X-Cache $upstream_cache_status;
server {
server_name sponsor.ajay.app api.sponsor.ajay.app;
- access_log off;
- error_log /dev/null;
-
error_page 404 /404.html;
error_page 500 @myerrordirective_500;
error_page 502 @myerrordirective_502;
@@ -43,14 +56,16 @@ http {
# internal;
#}
+ #proxy_send_timeout 120s;
+
location @myerrordirective_500 {
- return 502 "Internal Server Error";
+ return 400 "Internal Server Error";
}
location @myerrordirective_502 {
- return 502 "Bad Gateway";
+ return 400 "Bad Gateway";
}
location @myerrordirective_504 {
- return 502 "Gateway Timeout";
+ return 400 "Gateway Timeout";
}
@@ -62,17 +77,16 @@ http {
return 301 https://sb.ltn.fi;
}
- location /invidious/ {
- proxy_pass https://invidious.fdn.fr/;
- }
-
location /test/ {
proxy_pass http://localhost:4440/;
#proxy_pass https://sbtest.etcinit.com/;
}
location /api/skipSegments {
- proxy_pass http://backend_$request_method;
+ #return 200 "[]";
+ proxy_pass http://backend_$request_method;
+ #proxy_cache CACHEZONE;
+ #proxy_cache_valid 2m;
}
location /api/getTopUsers {
@@ -83,24 +97,47 @@ http {
location /api/getTotalStats {
proxy_pass http://backend_GET;
- }
+ #return 200 "";
+ }
location /api/getVideoSponsorTimes {
proxy_pass http://backend_GET;
}
-
- location = /database.db {
- alias /home/sbadmin/sponsor/databases/sponsorTimes.db;
+
+ location /database/ {
+ return 307 https://cdnsponsor.ajay.app$request_uri;
+ }
+ location /database {
+ proxy_pass http://backend_db;
}
-
+
+ location = /database.db {
+ #return 404 "Sqlite database has been replaced with csv exports at https://sponsor.ajay.app/database. Sqlite exports might come back soon, but exported at longer intervals.";
+ #alias /home/sbadmin/sponsor/databases/sponsorTimes.db;
+ alias /home/sbadmin/test-db/database.db;
+ }
+
+ #location = /database/sponsorTimes.csv {
+ # alias /home/sbadmin/sponsorTimes.csv;
+ #}
+
+
+ #location /api/voteOnSponsorTime {
+ # return 200 "Success";
+ #}
+
+ #location /api/viewedVideoSponsorTime {
+ # return 200 "Success";
+ #}
+
location /api {
proxy_pass http://backend_POST;
}
location / {
- root /home/sbadmin/caddy/SponsorBlockSite/public-prod;
-
+ root /home/sbadmin/SponsorBlockSite/public-prod;
+
### CORS
if ($request_method = 'OPTIONS') {
add_header 'Access-Control-Allow-Origin' '*';
@@ -132,14 +169,74 @@ http {
}
- listen 443 ssl; # managed by Certbot
- ssl_certificate /etc/letsencrypt/live/sponsor.ajay.app/fullchain.pem; # managed by Certbot
- ssl_certificate_key /etc/letsencrypt/live/sponsor.ajay.app/privkey.pem; # managed by Certbot
+ listen 443 default_server ssl http2; # managed by Certbot
+ #listen 443 http3 reuseport;
+ ssl_protocols TLSv1.2 TLSv1.3;
+ #listen 80;
+ ssl_certificate /etc/letsencrypt/live/sponsor.ajay.app-0001/fullchain.pem; # managed by Certbot
+ ssl_certificate_key /etc/letsencrypt/live/sponsor.ajay.app-0001/privkey.pem; # managed by Certbot
include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
+
+
+
+
+
+
}
+ server {
+ server_name cdnsponsor.ajay.app;
+
+ error_page 404 /404.html;
+
+ location /database/ {
+ alias /home/sbadmin/sponsor/docker/database-export/;
+ }
+
+ location / {
+ root /home/sbadmin/SponsorBlockSite/public-prod;
+
+ ### CORS
+ if ($request_method = 'OPTIONS') {
+ add_header 'Access-Control-Allow-Origin' '*';
+ add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
+ #
+ # Custom headers and headers various browsers *should* be OK with but aren't
+ #
+ add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range';
+ #
+ # Tell client that this pre-flight info is valid for 20 days
+ #
+ add_header 'Access-Control-Max-Age' 1728000;
+ add_header 'Content-Type' 'text/plain; charset=utf-8';
+ add_header 'Content-Length' 0;
+ return 204;
+ }
+ if ($request_method = 'POST') {
+ add_header 'Access-Control-Allow-Origin' '*';
+ add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
+ add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range';
+ add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range';
+ }
+ if ($request_method = 'GET') {
+ add_header 'Access-Control-Allow-Origin' '*';
+ add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
+ add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range';
+ add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range';
+ }
+ }
+
+
+ listen 443 ssl; # managed by Certbot
+ ssl_certificate /etc/letsencrypt/live/sponsor.ajay.app-0001/fullchain.pem; # managed by Certbot
+ ssl_certificate_key /etc/letsencrypt/live/sponsor.ajay.app-0001/privkey.pem; # managed by Certbot
+ include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
+ ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
+
+
+}
@@ -148,6 +245,7 @@ http {
access_log off;
error_log /dev/null;
+
if ($host = api.sponsor.ajay.app) {
return 301 https://$host$request_uri;
@@ -166,4 +264,17 @@ http {
+}
+
+ server {
+ if ($host = cdnsponsor.ajay.app) {
+ return 301 https://$host$request_uri;
+ } # managed by Certbot
+
+
+ server_name cdnsponsor.ajay.app;
+ listen 80;
+ return 404; # managed by Certbot
+
+
}}
diff --git a/package-lock.json b/package-lock.json
index dd69f2e..f17b109 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2989,9 +2989,9 @@
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
},
"y18n": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz",
- "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==",
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz",
+ "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==",
"dev": true
},
"yallist": {
diff --git a/src/app.ts b/src/app.ts
index 098e5f3..2af8835 100644
--- a/src/app.ts
+++ b/src/app.ts
@@ -5,7 +5,7 @@ import {oldGetVideoSponsorTimes} from './routes/oldGetVideoSponsorTimes';
import {postSegmentShift} from './routes/postSegmentShift';
import {postWarning} from './routes/postWarning';
import {getIsUserVIP} from './routes/getIsUserVIP';
-import {deleteNoSegments} from './routes/deleteNoSegments';
+import {deleteNoSegmentsEndpoint} from './routes/deleteNoSegments';
import {postNoSegments} from './routes/postNoSegments';
import {getUserInfo} from './routes/getUserInfo';
import {getDaysSavedFormatted} from './routes/getDaysSavedFormatted';
@@ -26,6 +26,7 @@ import {userCounter} from './middleware/userCounter';
import {loggerMiddleware} from './middleware/logger';
import {corsMiddleware} from './middleware/cors';
import {rateLimitMiddleware} from './middleware/requestRateLimit';
+import dumpDatabase from './routes/dumpDatabase';
export function createServer(callback: () => void) {
@@ -116,7 +117,7 @@ function setupRoutes(app: Express) {
//submit video containing no segments
app.post('/api/noSegments', postNoSegments);
- app.delete('/api/noSegments', deleteNoSegments);
+ app.delete('/api/noSegments', deleteNoSegmentsEndpoint);
//get if user is a vip
app.get('/api/isUserVIP', getIsUserVIP);
@@ -127,7 +128,12 @@ function setupRoutes(app: Express) {
//get if user is a vip
app.post('/api/segmentShift', postSegmentShift);
- app.get('/database.db', function (req: Request, res: Response) {
- res.sendFile("./databases/sponsorTimes.db", {root: "./"});
- });
+ if (config.postgres) {
+ app.get('/database', (req, res) => dumpDatabase(req, res, true));
+ app.get('/database.json', (req, res) => dumpDatabase(req, res, false));
+ } else {
+ app.get('/database.db', function (req: Request, res: Response) {
+ res.sendFile("./databases/sponsorTimes.db", {root: "./"});
+ });
+ }
}
diff --git a/src/config.ts b/src/config.ts
index 87043d1..34301ca 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -16,7 +16,7 @@ addDefaults(config, {
privateDBSchema: "./databases/_private.db.sql",
readOnly: false,
webhooks: [],
- categoryList: ["sponsor", "intro", "outro", "interaction", "selfpromo", "music_offtopic"],
+ categoryList: ["sponsor", "selfpromo", "interaction", "intro", "outro", "preview", "music_offtopic"],
maxNumberOfActiveWarnings: 3,
hoursAfterWarningExpires: 24,
adminUserID: "",
@@ -46,7 +46,33 @@ addDefaults(config, {
userCounterURL: null,
youtubeAPIKey: null,
maxRewardTimePerSegmentInSeconds: 86400,
- postgres: null
+ postgres: null,
+ dumpDatabase: {
+ enabled: true,
+ minTimeBetweenMs: 60000,
+ appExportPath: './docker/database-export',
+ postgresExportPath: '/opt/exports',
+ tables: [{
+ name: "sponsorTimes",
+ order: "timeSubmitted"
+ },
+ {
+ name: "userNames"
+ },
+ {
+ name: "categoryVotes"
+ },
+ {
+ name: "noSegments",
+ },
+ {
+ name: "warnings",
+ order: "issueTime"
+ },
+ {
+ name: "vipUsers"
+ }]
+ }
});
// Add defaults
diff --git a/src/databases/Postgres.ts b/src/databases/Postgres.ts
index bae2da6..779ebf2 100644
--- a/src/databases/Postgres.ts
+++ b/src/databases/Postgres.ts
@@ -23,6 +23,13 @@ export class Postgres implements IDatabase {
// Upgrade database if required
await this.upgradeDB(this.config.fileNamePrefix, this.config.dbSchemaFolder);
+
+ try {
+ await this.applyIndexes(this.config.fileNamePrefix, this.config.dbSchemaFolder);
+ } catch (e) {
+ Logger.warn("Applying indexes failed. See https://github.com/ajayyy/SponsorBlockServer/wiki/Postgres-Extensions for more information.");
+ Logger.warn(e);
+ }
}
}
@@ -118,9 +125,18 @@ export class Postgres implements IDatabase {
Logger.debug('db update: no file ' + path);
}
+ private async applyIndexes(fileNamePrefix: string, schemaFolder: string) {
+ const path = schemaFolder + "/_" + fileNamePrefix + "_indexes.sql";
+ if (fs.existsSync(path)) {
+ await this.pool.query(fs.readFileSync(path).toString());
+ } else {
+ Logger.debug('failed to apply indexes to ' + fileNamePrefix);
+ }
+ }
+
private processUpgradeQuery(query: string): string {
let result = query;
- result = result.replace(/sha256\((.*?)\)/gm, "digest($1, 'sha256')");
+ result = result.replace(/sha256\((.*?)\)/gm, "encode(digest($1, 'sha256'), 'hex')");
result = result.replace(/integer/gmi, "NUMERIC");
return result;
diff --git a/src/middleware/redisKeys.ts b/src/middleware/redisKeys.ts
index 1335e15..56b1aab 100644
--- a/src/middleware/redisKeys.ts
+++ b/src/middleware/redisKeys.ts
@@ -1,5 +1,13 @@
-import { Category, VideoID } from "../types/segments.model";
+import { Service, VideoID, VideoIDHash } from "../types/segments.model";
+import { Logger } from "../utils/logger";
export function skipSegmentsKey(videoID: VideoID): string {
return "segments-" + videoID;
}
+
+export function skipSegmentsHashKey(hashedVideoIDPrefix: VideoIDHash, service: Service): string {
+ hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 4) as VideoIDHash;
+ if (hashedVideoIDPrefix.length !== 4) Logger.warn("Redis skip segment hash-prefix key is not length 4! " + hashedVideoIDPrefix);
+
+ return "segments." + service + "." + hashedVideoIDPrefix;
+}
\ No newline at end of file
diff --git a/src/routes/deleteNoSegments.ts b/src/routes/deleteNoSegments.ts
index cfa6c78..5584c5a 100644
--- a/src/routes/deleteNoSegments.ts
+++ b/src/routes/deleteNoSegments.ts
@@ -2,12 +2,14 @@ import {Request, Response} from 'express';
import {isUserVIP} from '../utils/isUserVIP';
import {getHash} from '../utils/getHash';
import {db} from '../databases/databases';
+import { Category, VideoID } from '../types/segments.model';
+import { UserID } from '../types/user.model';
-export async function deleteNoSegments(req: Request, res: Response) {
+export async function deleteNoSegmentsEndpoint(req: Request, res: Response) {
// Collect user input data
- const videoID = req.body.videoID;
- let userID = req.body.userID;
- const categories = req.body.categories;
+ const videoID = req.body.videoID as VideoID;
+ const userID = req.body.userID as UserID;
+ const categories = req.body.categories as Category[];
// Check input data is valid
if (!videoID
@@ -23,8 +25,8 @@ export async function deleteNoSegments(req: Request, res: Response) {
}
// Check if user is VIP
- userID = getHash(userID);
- const userIsVIP = await isUserVIP(userID);
+ const hashedUserID = getHash(userID);
+ const userIsVIP = await isUserVIP(hashedUserID);
if (!userIsVIP) {
res.status(403).json({
@@ -33,13 +35,22 @@ export async function deleteNoSegments(req: Request, res: Response) {
return;
}
+ deleteNoSegments(videoID, categories);
+
+ res.status(200).json({message: 'Removed no segments entrys for video ' + videoID});
+}
+
+/**
+ *
+ * @param videoID
+ * @param categories If null, will remove all
+ */
+export async function deleteNoSegments(videoID: VideoID, categories: Category[]): Promise {
const entries = (await db.prepare("all", 'SELECT * FROM "noSegments" WHERE "videoID" = ?', [videoID])).filter((entry: any) => {
- return (categories.indexOf(entry.category) !== -1);
+ return categories === null || categories.indexOf(entry.category) !== -1;
});
for (const entry of entries) {
await db.prepare('run', 'DELETE FROM "noSegments" WHERE "videoID" = ? AND "category" = ?', [videoID, entry.category]);
}
-
- res.status(200).json({message: 'Removed no segments entrys for video ' + videoID});
}
diff --git a/src/routes/dumpDatabase.ts b/src/routes/dumpDatabase.ts
new file mode 100644
index 0000000..7610589
--- /dev/null
+++ b/src/routes/dumpDatabase.ts
@@ -0,0 +1,173 @@
+import {db} from '../databases/databases';
+import {Logger} from '../utils/logger';
+import {Request, Response} from 'express';
+import { config } from '../config';
+import util from 'util';
+import fs from 'fs';
+import path from 'path';
+const unlink = util.promisify(fs.unlink);
+
+const ONE_MINUTE = 1000 * 60;
+
+const styleHeader = ``
+
+const licenseHeader = `The API and database follow CC BY-NC-SA 4.0 unless you have explicit permission.
+Attribution Template
+If you need to use the database or API in a way that violates this license, contact me with your reason and I may grant you access under a different license.
`;
+
+const tables = config?.dumpDatabase?.tables ?? [];
+const MILLISECONDS_BETWEEN_DUMPS = config?.dumpDatabase?.minTimeBetweenMs ?? ONE_MINUTE;
+const appExportPath = config?.dumpDatabase?.appExportPath ?? './docker/database-export';
+const postgresExportPath = config?.dumpDatabase?.postgresExportPath ?? '/opt/exports';
+const tableNames = tables.map(table => table.name);
+
+interface TableDumpList {
+ fileName: string;
+ tableName: string;
+};
+let latestDumpFiles: TableDumpList[] = [];
+
+interface TableFile {
+ file: string,
+ timestamp: number
+};
+
+if (tables.length === 0) {
+ Logger.warn('[dumpDatabase] No tables configured');
+}
+
+let lastUpdate = 0;
+
+function removeOutdatedDumps(exportPath: string): Promise {
+ return new Promise((resolve, reject) => {
+ // Get list of table names
+ // Create array for each table
+ const tableFiles: Record = tableNames.reduce((obj: any, tableName) => {
+ obj[tableName] = [];
+ return obj;
+ }, {});
+
+ // read files in export directory
+ fs.readdir(exportPath, async (err: any, files: string[]) => {
+ if (err) Logger.error(err);
+ if (err) return resolve();
+
+ files.forEach(file => {
+ // we only care about files that start with "_" and ends with .csv
+ tableNames.forEach(tableName => {
+ if (file.startsWith(`${tableName}`) && file.endsWith('.csv')) {
+ const filePath = path.join(exportPath, file);
+ tableFiles[tableName].push({
+ file: filePath,
+ timestamp: fs.statSync(filePath).mtime.getTime()
+ });
+ }
+ });
+ });
+
+ for (let tableName in tableFiles) {
+ const files = tableFiles[tableName].sort((a, b) => b.timestamp - a.timestamp);
+ for (let i = 2; i < files.length; i++) {
+ // remove old file
+ await unlink(files[i].file).catch((error: any) => {
+ Logger.error(`[dumpDatabase] Garbage collection failed ${error}`);
+ });
+ }
+ }
+
+ resolve();
+ });
+ });
+}
+
+export default async function dumpDatabase(req: Request, res: Response, showPage: boolean) {
+ if (!config?.dumpDatabase?.enabled) {
+ res.status(404).send("Database dump is disabled");
+ return;
+ }
+ if (!config.postgres) {
+ res.status(404).send("Not supported on this instance");
+ return;
+ }
+
+ const now = Date.now();
+ const updateQueued = now - lastUpdate > MILLISECONDS_BETWEEN_DUMPS;
+
+ res.status(200)
+
+ if (showPage) {
+ res.send(`${styleHeader}
+ SponsorBlock database dumps
${licenseHeader}
+ How this works
+ Send a request to https://sponsor.ajay.app/database.json, or visit this page to trigger the database dump to run.
+ Then, you can download the csv files below, or use the links returned from the JSON request.
+ Links
+
+
+
+ | Table |
+ CSV |
+
+
+
+ ${latestDumpFiles.map((item:any) => {
+ return `
+
+ | ${item.tableName} |
+ ${item.fileName} |
+
+ `;
+ }).join('')}
+ ${latestDumpFiles.length === 0 ? '| Please wait: Generating files |
' : ''}
+
+
+
+ ${updateQueued ? `Update queued.` : ``} Last updated: ${lastUpdate ? new Date(lastUpdate).toUTCString() : `Unknown`}`);
+ } else {
+ res.send({
+ lastUpdated: lastUpdate,
+ updateQueued,
+ links: latestDumpFiles.map((item:any) => {
+ return {
+ table: item.tableName,
+ url: `/database/${item.fileName}`,
+ size: item.fileSize,
+ };
+ }),
+ })
+ }
+
+ if (updateQueued) {
+ lastUpdate = Date.now();
+
+ await removeOutdatedDumps(appExportPath);
+
+ const dumpFiles = [];
+
+ for (const table of tables) {
+ const fileName = `${table.name}_${lastUpdate}.csv`;
+ const file = `${postgresExportPath}/${fileName}`;
+ await db.prepare('run', `COPY (SELECT * FROM "${table.name}"${table.order ? ` ORDER BY "${table.order}"` : ``})
+ TO '${file}' WITH (FORMAT CSV, HEADER true);`);
+ dumpFiles.push({
+ fileName,
+ tableName: table.name,
+ });
+ }
+ latestDumpFiles = [...dumpFiles];
+ }
+}
diff --git a/src/routes/getSkipSegments.ts b/src/routes/getSkipSegments.ts
index 65ab4f1..d36a91a 100644
--- a/src/routes/getSkipSegments.ts
+++ b/src/routes/getSkipSegments.ts
@@ -2,9 +2,9 @@ import { Request, Response } from 'express';
import { RedisClient } from 'redis';
import { config } from '../config';
import { db, privateDB } from '../databases/databases';
-import { skipSegmentsKey } from '../middleware/redisKeys';
+import { skipSegmentsHashKey, skipSegmentsKey } from '../middleware/redisKeys';
import { SBRecord } from '../types/lib.model';
-import { Category, DBSegment, HashedIP, IPAddress, OverlappingSegmentGroup, Segment, SegmentCache, VideoData, VideoID, VideoIDHash, Visibility, VotableObject } from "../types/segments.model";
+import { Category, DBSegment, HashedIP, IPAddress, OverlappingSegmentGroup, Segment, SegmentCache, Service, VideoData, VideoID, VideoIDHash, Visibility, VotableObject } from "../types/segments.model";
import { getHash } from '../utils/getHash';
import { getIP } from '../utils/getIP';
import { Logger } from '../utils/logger';
@@ -43,11 +43,12 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, category:
return chooseSegments(filteredSegments).map((chosenSegment) => ({
category,
segment: [chosenSegment.startTime, chosenSegment.endTime],
- UUID: chosenSegment.UUID
+ UUID: chosenSegment.UUID,
+ videoDuration: chosenSegment.videoDuration
}));
}
-async function getSegmentsByVideoID(req: Request, videoID: string, categories: Category[]): Promise {
+async function getSegmentsByVideoID(req: Request, videoID: string, categories: Category[], service: Service): Promise {
const cache: SegmentCache = {shadowHiddenSegmentIPs: {}};
const segments: Segment[] = [];
@@ -58,9 +59,9 @@ async function getSegmentsByVideoID(req: Request, videoID: string, categories: C
const segmentsByCategory: SBRecord = (await db
.prepare(
'all',
- `SELECT "startTime", "endTime", "votes", "locked", "UUID", "category", "shadowHidden" FROM "sponsorTimes"
- WHERE "videoID" = ? AND "category" IN (${categories.map((c) => "'" + c + "'")}) ORDER BY "startTime"`,
- [videoID]
+ `SELECT "startTime", "endTime", "votes", "locked", "UUID", "category", "videoDuration", "shadowHidden" FROM "sponsorTimes"
+ WHERE "videoID" = ? AND "category" IN (${categories.map((c) => "'" + c + "'")}) AND "service" = ? AND "hidden" = 0 ORDER BY "startTime"`,
+ [videoID, service]
)).reduce((acc: SBRecord, segment: DBSegment) => {
acc[segment.category] = acc[segment.category] || [];
acc[segment.category].push(segment);
@@ -81,7 +82,7 @@ async function getSegmentsByVideoID(req: Request, videoID: string, categories: C
}
}
-async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash, categories: Category[]): Promise> {
+async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash, categories: Category[], service: Service): Promise> {
const cache: SegmentCache = {shadowHiddenSegmentIPs: {}};
const segments: SBRecord = {};
@@ -91,13 +92,9 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
categories = categories.filter((category) => !(/[^a-z|_|-]/.test(category)));
if (categories.length === 0) return null;
- const segmentPerVideoID: SegmentWithHashPerVideoID = (await db
- .prepare(
- 'all',
- `SELECT "videoID", "startTime", "endTime", "votes", "locked", "UUID", "category", "shadowHidden", "hashedVideoID" FROM "sponsorTimes"
- WHERE "hashedVideoID" LIKE ? AND "category" IN (${categories.map((c) => "'" + c + "'")}) ORDER BY "startTime"`,
- [hashedVideoIDPrefix + '%']
- )).reduce((acc: SegmentWithHashPerVideoID, segment: DBSegment) => {
+ const segmentPerVideoID: SegmentWithHashPerVideoID = (await getSegmentsFromDB(hashedVideoIDPrefix, service))
+ .filter((segment: DBSegment) => categories.includes(segment?.category))
+ .reduce((acc: SegmentWithHashPerVideoID, segment: DBSegment) => {
acc[segment.videoID] = acc[segment.videoID] || {
hash: segment.hashedVideoID,
segmentPerCategory: {},
@@ -130,6 +127,37 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
}
}
+async function getSegmentsFromDB(hashedVideoIDPrefix: VideoIDHash, service: Service): Promise {
+ const fetchFromDB = () => db
+ .prepare(
+ 'all',
+ `SELECT "videoID", "startTime", "endTime", "votes", "locked", "UUID", "category", "videoDuration", "shadowHidden", "hashedVideoID" FROM "sponsorTimes"
+ WHERE "hashedVideoID" LIKE ? AND "service" = ? AND "hidden" = 0 ORDER BY "startTime"`,
+ [hashedVideoIDPrefix + '%', service]
+ );
+
+ if (hashedVideoIDPrefix.length === 4) {
+ const key = skipSegmentsHashKey(hashedVideoIDPrefix, service);
+ const {err, reply} = await redis.getAsync(key);
+
+ if (!err && reply) {
+ try {
+ Logger.debug("Got data from redis: " + reply);
+ return JSON.parse(reply);
+ } catch (e) {
+ // If all else, continue on to fetching from the database
+ }
+ }
+
+ const data = await fetchFromDB();
+
+ redis.setAsync(key, JSON.stringify(data));
+ return data;
+ }
+
+ return await fetchFromDB();
+}
+
//gets a weighted random choice from the choices array based on their `votes` property.
//amountOfChoices specifies the maximum amount of choices to return, 1 or more.
//choices are unique
@@ -239,6 +267,11 @@ async function handleGetSegments(req: Request, res: Response): Promise val == service)) {
+ service = Service.YouTube;
+ }
+
// Only 404s are cached at the moment
const redisResult = await redis.getAsync(skipSegmentsKey(videoID));
@@ -251,7 +284,7 @@ async function handleGetSegments(req: Request, res: Response): Promise val == service)) {
+ service = Service.YouTube;
+ }
// filter out none string elements, only flat array with strings is valid
categories = categories.filter((item: any) => typeof item === "string");
// Get all video id's that match hash prefix
- const segments = await getSegmentsByHash(req, hashPrefix, categories);
+ const segments = await getSegmentsByHash(req, hashPrefix, categories, service);
if (!segments) return res.status(404).json([]);
diff --git a/src/routes/postSkipSegments.ts b/src/routes/postSkipSegments.ts
index 6a9c781..1f380da 100644
--- a/src/routes/postSkipSegments.ts
+++ b/src/routes/postSkipSegments.ts
@@ -11,9 +11,15 @@ import {getFormattedTime} from '../utils/getFormattedTime';
import {isUserTrustworthy} from '../utils/isUserTrustworthy';
import {dispatchEvent} from '../utils/webhookUtils';
import {Request, Response} from 'express';
-import { skipSegmentsKey } from '../middleware/redisKeys';
+import { skipSegmentsHashKey, skipSegmentsKey } from '../middleware/redisKeys';
import redis from '../utils/redis';
+import { Category, IncomingSegment, Segment, SegmentUUID, Service, VideoDuration, VideoID } from '../types/segments.model';
+import { deleteNoSegments } from './deleteNoSegments';
+interface APIVideoInfo {
+ err: string | boolean,
+ data: any
+}
async function sendWebhookNotification(userID: string, videoID: string, UUID: string, submissionCount: number, youtubeData: any, {submissionStart, submissionEnd}: { submissionStart: number; submissionEnd: number; }, segmentInfo: any) {
const row = await db.prepare('get', `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID]);
@@ -45,62 +51,58 @@ async function sendWebhookNotification(userID: string, videoID: string, UUID: st
});
}
-async function sendWebhooks(userID: string, videoID: string, UUID: string, segmentInfo: any) {
- if (config.youtubeAPIKey !== null) {
+async function sendWebhooks(apiVideoInfo: APIVideoInfo, userID: string, videoID: string, UUID: string, segmentInfo: any, service: Service) {
+ if (apiVideoInfo && service == Service.YouTube) {
const userSubmissionCountRow = await db.prepare('get', `SELECT count(*) as "submissionCount" FROM "sponsorTimes" WHERE "userID" = ?`, [userID]);
- YouTubeAPI.listVideos(videoID, (err: any, data: any) => {
- if (err || data.items.length === 0) {
- err && Logger.error(err);
- return;
+ const {data, err} = apiVideoInfo;
+ if (err) return;
+
+ const startTime = parseFloat(segmentInfo.segment[0]);
+ const endTime = parseFloat(segmentInfo.segment[1]);
+ sendWebhookNotification(userID, videoID, UUID, userSubmissionCountRow.submissionCount, data, {
+ submissionStart: startTime,
+ submissionEnd: endTime,
+ }, segmentInfo);
+
+ // If it is a first time submission
+ // Then send a notification to discord
+ if (config.discordFirstTimeSubmissionsWebhookURL === null || userSubmissionCountRow.submissionCount > 1) return;
+
+ fetch(config.discordFirstTimeSubmissionsWebhookURL, {
+ method: 'POST',
+ body: JSON.stringify({
+ "embeds": [{
+ "title": data.items[0].snippet.title,
+ "url": "https://www.youtube.com/watch?v=" + videoID + "&t=" + (parseInt(startTime.toFixed(0)) - 2),
+ "description": "Submission ID: " + UUID +
+ "\n\nTimestamp: " +
+ getFormattedTime(startTime) + " to " + getFormattedTime(endTime) +
+ "\n\nCategory: " + segmentInfo.category,
+ "color": 10813440,
+ "author": {
+ "name": userID,
+ },
+ "thumbnail": {
+ "url": data.items[0].snippet.thumbnails.maxres ? data.items[0].snippet.thumbnails.maxres.url : "",
+ },
+ }],
+ }),
+ headers: {
+ 'Content-Type': 'application/json'
}
-
- const startTime = parseFloat(segmentInfo.segment[0]);
- const endTime = parseFloat(segmentInfo.segment[1]);
- sendWebhookNotification(userID, videoID, UUID, userSubmissionCountRow.submissionCount, data, {
- submissionStart: startTime,
- submissionEnd: endTime,
- }, segmentInfo);
-
- // If it is a first time submission
- // Then send a notification to discord
- if (config.discordFirstTimeSubmissionsWebhookURL === null || userSubmissionCountRow.submissionCount > 1) return;
-
- fetch(config.discordFirstTimeSubmissionsWebhookURL, {
- method: 'POST',
- body: JSON.stringify({
- "embeds": [{
- "title": data.items[0].snippet.title,
- "url": "https://www.youtube.com/watch?v=" + videoID + "&t=" + (parseInt(startTime.toFixed(0)) - 2),
- "description": "Submission ID: " + UUID +
- "\n\nTimestamp: " +
- getFormattedTime(startTime) + " to " + getFormattedTime(endTime) +
- "\n\nCategory: " + segmentInfo.category,
- "color": 10813440,
- "author": {
- "name": userID,
- },
- "thumbnail": {
- "url": data.items[0].snippet.thumbnails.maxres ? data.items[0].snippet.thumbnails.maxres.url : "",
- },
- }],
- }),
- headers: {
- 'Content-Type': 'application/json'
- }
- })
- .then(res => {
- if (res.status >= 400) {
- Logger.error("Error sending first time submission Discord hook");
- Logger.error(JSON.stringify(res));
- Logger.error("\n");
- }
- })
- .catch(err => {
- Logger.error("Failed to send first time submission Discord hook.");
- Logger.error(JSON.stringify(err));
+ })
+ .then(res => {
+ if (res.status >= 400) {
+ Logger.error("Error sending first time submission Discord hook");
+ Logger.error(JSON.stringify(res));
Logger.error("\n");
- });
+ }
+ })
+ .catch(err => {
+ Logger.error("Failed to send first time submission Discord hook.");
+ Logger.error(JSON.stringify(err));
+ Logger.error("\n");
});
}
}
@@ -166,73 +168,98 @@ async function sendWebhooksNB(userID: string, videoID: string, UUID: string, sta
// Looks like this was broken for no defined youtube key - fixed but IMO we shouldn't return
// false for a pass - it was confusing and lead to this bug - any use of this function in
// the future could have the same problem.
-async function autoModerateSubmission(submission: { videoID: any; userID: any; segments: any }) {
- // Get the video information from the youtube API
- if (config.youtubeAPIKey !== null) {
- const {err, data} = await new Promise((resolve) => {
- YouTubeAPI.listVideos(submission.videoID, (err: any, data: any) => resolve({err, data}));
- });
+async function autoModerateSubmission(apiVideoInfo: APIVideoInfo,
+ submission: { videoID: any; userID: any; segments: any }) {
+ if (apiVideoInfo) {
+ const {err, data} = apiVideoInfo;
+ if (err) return false;
- if (err) {
- return false;
- } else {
- // Check to see if video exists
- if (data.pageInfo.totalResults === 0) {
- return "No video exists with id " + submission.videoID;
+ // Check to see if video exists
+ if (data.pageInfo.totalResults === 0) return "No video exists with id " + submission.videoID;
+
+ const duration = getYouTubeVideoDuration(apiVideoInfo);
+ const segments = submission.segments;
+ let nbString = "";
+ for (let i = 0; i < segments.length; i++) {
+ const startTime = parseFloat(segments[i].segment[0]);
+ const endTime = parseFloat(segments[i].segment[1]);
+
+ if (duration == 0) {
+ // Allow submission if the duration is 0 (bug in youtube api)
+ return false;
} else {
- const segments = submission.segments;
- let nbString = "";
- for (let i = 0; i < segments.length; i++) {
+ if (segments[i].category === "sponsor") {
+ //Prepare timestamps to send to NB all at once
+ nbString = nbString + segments[i].segment[0] + "," + segments[i].segment[1] + ";";
+ }
+ }
+ }
+
+ // Get all submissions for this user
+ const allSubmittedByUser = await db.prepare('all', `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ? and "votes" > -1`, [submission.userID, submission.videoID]);
+ const allSegmentTimes = [];
+ if (allSubmittedByUser !== undefined) {
+ //add segments the user has previously submitted
+ for (const segmentInfo of allSubmittedByUser) {
+ allSegmentTimes.push([parseFloat(segmentInfo.startTime), parseFloat(segmentInfo.endTime)]);
+ }
+ }
+
+ //add segments they are trying to add in this submission
+ for (let i = 0; i < segments.length; i++) {
+ let startTime = parseFloat(segments[i].segment[0]);
+ let endTime = parseFloat(segments[i].segment[1]);
+ allSegmentTimes.push([startTime, endTime]);
+ }
+
+ //merge all the times into non-overlapping arrays
+ const allSegmentsSorted = mergeTimeSegments(allSegmentTimes.sort(function (a, b) {
+ return a[0] - b[0] || a[1] - b[1];
+ }));
+
+ let videoDuration = data.items[0].contentDetails.duration;
+ videoDuration = isoDurations.toSeconds(isoDurations.parse(videoDuration));
+ if (videoDuration != 0) {
+ let allSegmentDuration = 0;
+ //sum all segment times together
+ allSegmentsSorted.forEach(segmentInfo => allSegmentDuration += segmentInfo[1] - segmentInfo[0]);
+ if (allSegmentDuration > (videoDuration / 100) * 80) {
+ // Reject submission if all segments combine are over 80% of the video
+ return "Total length of your submitted segments are over 80% of the video.";
+ }
+ }
+
+ // Check NeuralBlock
+ const neuralBlockURL = config.neuralBlockURL;
+ if (!neuralBlockURL) return false;
+ const response = await fetch(neuralBlockURL + "/api/checkSponsorSegments?vid=" + submission.videoID +
+ "&segments=" + nbString.substring(0, nbString.length - 1));
+ if (!response.ok) return false;
+
+ const nbPredictions = await response.json();
+ let nbDecision = false;
+ let predictionIdx = 0; //Keep track because only sponsor categories were submitted
+ for (let i = 0; i < segments.length; i++) {
+ if (segments[i].category === "sponsor") {
+ if (nbPredictions.probabilities[predictionIdx] < 0.70) {
+ nbDecision = true; // At least one bad entry
const startTime = parseFloat(segments[i].segment[0]);
const endTime = parseFloat(segments[i].segment[1]);
- let duration = data.items[0].contentDetails.duration;
- duration = isoDurations.toSeconds(isoDurations.parse(duration));
- if (duration == 0) {
- // Allow submission if the duration is 0 (bug in youtube api)
- return false;
- } else if ((endTime - startTime) > (duration / 100) * 80) {
- // Reject submission if over 80% of the video
- return "One of your submitted segments is over 80% of the video.";
- } else {
- if (segments[i].category === "sponsor") {
- //Prepare timestamps to send to NB all at once
- nbString = nbString + segments[i].segment[0] + "," + segments[i].segment[1] + ";";
- }
- }
- }
- // Check NeuralBlock
- const neuralBlockURL = config.neuralBlockURL;
- if (!neuralBlockURL) return false;
- const response = await fetch(neuralBlockURL + "/api/checkSponsorSegments?vid=" + submission.videoID +
- "&segments=" + nbString.substring(0, nbString.length - 1));
- if (!response.ok) return false;
-
- const nbPredictions = await response.json();
- let nbDecision = false;
- let predictionIdx = 0; //Keep track because only sponsor categories were submitted
- for (let i = 0; i < segments.length; i++) {
- if (segments[i].category === "sponsor") {
- if (nbPredictions.probabilities[predictionIdx] < 0.70) {
- nbDecision = true; // At least one bad entry
- const startTime = parseFloat(segments[i].segment[0]);
- const endTime = parseFloat(segments[i].segment[1]);
-
- const UUID = getSubmissionUUID(submission.videoID, segments[i].category, submission.userID, startTime, endTime);
- // Send to Discord
- // Note, if this is too spammy. Consider sending all the segments as one Webhook
- sendWebhooksNB(submission.userID, submission.videoID, UUID, startTime, endTime, segments[i].category, nbPredictions.probabilities[predictionIdx], data);
- }
- predictionIdx++;
- }
-
- }
- if (nbDecision) {
- return "Rejected based on NeuralBlock predictions.";
- } else {
- return false;
+ const UUID = getSubmissionUUID(submission.videoID, segments[i].category, submission.userID, startTime, endTime);
+ // Send to Discord
+ // Note, if this is too spammy. Consider sending all the segments as one Webhook
+ sendWebhooksNB(submission.userID, submission.videoID, UUID, startTime, endTime, segments[i].category, nbPredictions.probabilities[predictionIdx], data);
}
+ predictionIdx++;
}
+
+ }
+
+ if (nbDecision) {
+ return "Rejected based on NeuralBlock predictions.";
+ } else {
+ return false;
}
} else {
Logger.debug("Skipped YouTube API");
@@ -243,6 +270,21 @@ async function autoModerateSubmission(submission: { videoID: any; userID: any; s
}
}
+function getYouTubeVideoDuration(apiVideoInfo: APIVideoInfo): VideoDuration {
+ const duration = apiVideoInfo?.data?.items[0]?.contentDetails?.duration;
+ return duration ? isoDurations.toSeconds(isoDurations.parse(duration)) as VideoDuration : null;
+}
+
+async function getYouTubeVideoInfo(videoID: VideoID): Promise {
+ if (config.youtubeAPIKey !== null) {
+ return new Promise((resolve) => {
+ YouTubeAPI.listVideos(videoID, (err: any, data: any) => resolve({err, data}));
+ });
+ } else {
+ return null;
+ }
+}
+
function proxySubmission(req: Request) {
fetch(config.proxySubmission + '/api/skipSegments?userID=' + req.query.userID + '&videoID=' + req.query.videoID, {
method: 'POST',
@@ -267,14 +309,18 @@ export async function postSkipSegments(req: Request, res: Response) {
const videoID = req.query.videoID || req.body.videoID;
let userID = req.query.userID || req.body.userID;
+ let service: Service = req.query.service ?? req.body.service ?? Service.YouTube;
+ if (!Object.values(Service).some((val) => val == service)) {
+ service = Service.YouTube;
+ }
+ let videoDuration: VideoDuration = (parseFloat(req.query.videoDuration || req.body.videoDuration) || 0) as VideoDuration;
-
- let segments = req.body.segments;
+ let segments = req.body.segments as IncomingSegment[];
if (segments === undefined) {
// Use query instead
segments = [{
- segment: [req.query.startTime, req.query.endTime],
- category: req.query.category,
+ segment: [req.query.startTime as string, req.query.endTime as string],
+ category: req.query.category as Category
}];
}
@@ -312,7 +358,7 @@ export async function postSkipSegments(req: Request, res: Response) {
return res.status(403).send('Submission rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes that are not malicious, and we just want to clarify the rules. Could you please send a message in Discord or Matrix so we can further help you?');
}
- const noSegmentList = (await db.prepare('all', 'SELECT category from "noSegments" where "videoID" = ?', [videoID])).map((list: any) => {
+ let noSegmentList = (await db.prepare('all', 'SELECT category from "noSegments" where "videoID" = ?', [videoID])).map((list: any) => {
return list.category;
});
@@ -321,6 +367,32 @@ export async function postSkipSegments(req: Request, res: Response) {
const decreaseVotes = 0;
+ let apiVideoInfo: APIVideoInfo = null;
+ if (service == Service.YouTube) {
+ apiVideoInfo = await getYouTubeVideoInfo(videoID);
+ }
+ const apiVideoDuration = getYouTubeVideoDuration(apiVideoInfo);
+ if (!videoDuration || (apiVideoDuration && Math.abs(videoDuration - apiVideoDuration) > 2)) {
+ // If api duration is far off, take that one instead (it is only precise to seconds, not millis)
+ videoDuration = apiVideoDuration || 0 as VideoDuration;
+ }
+
+ const previousSubmissions = await db.prepare('all', `SELECT "videoDuration", "UUID" FROM "sponsorTimes" WHERE "videoID" = ? AND "service" = ? AND "hidden" = 0
+ AND "shadowHidden" = 0 AND "votes" >= 0 AND "videoDuration" != 0`, [videoID, service]) as
+ {videoDuration: VideoDuration, UUID: SegmentUUID}[];
+ // If the video's duration is changed, then the video should be unlocked and old submissions should be hidden
+ const videoDurationChanged = previousSubmissions.length > 0 && !previousSubmissions.some((e) => Math.abs(videoDuration - e.videoDuration) < 2);
+ if (videoDurationChanged) {
+ // Hide all previous submissions
+ for (const submission of previousSubmissions) {
+ await db.prepare('run', `UPDATE "sponsorTimes" SET "hidden" = 1 WHERE "UUID" = ?`, [submission.UUID]);
+ }
+
+ // Reset no segments
+ noSegmentList = [];
+ deleteNoSegments(videoID, null);
+ }
+
// Check if all submissions are correct
for (let i = 0; i < segments.length; i++) {
if (segments[i] === undefined || segments[i].segment === undefined || segments[i].category === undefined) {
@@ -343,7 +415,7 @@ export async function postSkipSegments(req: Request, res: Response) {
+ segments[i].category + "'. A moderator has decided that no new segments are needed and that all current segments of this category are timed perfectly.\n\n "
+ (segments[i].category === "sponsor" ? "Maybe the segment you are submitting is a different category that you have not enabled and is not a sponsor. " +
"Categories that aren't sponsor, such as self-promotion can be enabled in the options.\n\n " : "")
- + "If you believe this is incorrect, please contact someone on Discord.",
+ + "If you believe this is incorrect, please contact someone on discord.gg/SponsorBlock or matrix.to/#/+sponsorblock:ajay.app",
);
return;
}
@@ -367,7 +439,7 @@ export async function postSkipSegments(req: Request, res: Response) {
//check if this info has already been submitted before
const duplicateCheck2Row = await db.prepare('get', `SELECT COUNT(*) as count FROM "sponsorTimes" WHERE "startTime" = ?
- and "endTime" = ? and "category" = ? and "videoID" = ?`, [startTime, endTime, segments[i].category, videoID]);
+ and "endTime" = ? and "category" = ? and "videoID" = ? and "service" = ?`, [startTime, endTime, segments[i].category, videoID, service]);
if (duplicateCheck2Row.count > 0) {
res.sendStatus(409);
return;
@@ -375,8 +447,8 @@ export async function postSkipSegments(req: Request, res: Response) {
}
// Auto moderator check
- if (!isVIP) {
- const autoModerateResult = await autoModerateSubmission({userID, videoID, segments});//startTime, endTime, category: segments[i].category});
+ if (!isVIP && service == Service.YouTube) {
+ const autoModerateResult = await autoModerateSubmission(apiVideoInfo, {userID, videoID, segments});//startTime, endTime, category: segments[i].category});
if (autoModerateResult == "Rejected based on NeuralBlock predictions.") {
// If NB automod rejects, the submission will start with -2 votes.
// Note, if one submission is bad all submissions will be affected.
@@ -437,63 +509,19 @@ export async function postSkipSegments(req: Request, res: Response) {
let startingVotes = 0 + decreaseVotes;
- if (config.youtubeAPIKey !== null) {
- let {err, data} = await new Promise((resolve) => {
- YouTubeAPI.listVideos(videoID, (err: any, data: any) => resolve({err, data}));
- });
-
- if (err) {
- Logger.error("Error while submitting when connecting to YouTube API: " + err);
- } else {
- //get all segments for this video and user
- const allSubmittedByUser = await db.prepare('all', `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? and "videoID" = ? and "votes" > -1`, [userID, videoID]);
- const allSegmentTimes = [];
- if (allSubmittedByUser !== undefined) {
- //add segments the user has previously submitted
- for (const segmentInfo of allSubmittedByUser) {
- allSegmentTimes.push([parseFloat(segmentInfo.startTime), parseFloat(segmentInfo.endTime)]);
- }
- }
-
- //add segments they are trying to add in this submission
- for (let i = 0; i < segments.length; i++) {
- let startTime = parseFloat(segments[i].segment[0]);
- let endTime = parseFloat(segments[i].segment[1]);
- allSegmentTimes.push([startTime, endTime]);
- }
-
- //merge all the times into non-overlapping arrays
- const allSegmentsSorted = mergeTimeSegments(allSegmentTimes.sort(function (a, b) {
- return a[0] - b[0] || a[1] - b[1];
- }));
-
- let videoDuration = data.items[0].contentDetails.duration;
- videoDuration = isoDurations.toSeconds(isoDurations.parse(videoDuration));
- if (videoDuration != 0) {
- let allSegmentDuration = 0;
- //sum all segment times together
- allSegmentsSorted.forEach(segmentInfo => allSegmentDuration += segmentInfo[1] - segmentInfo[0]);
- if (allSegmentDuration > (videoDuration / 100) * 80) {
- // Reject submission if all segments combine are over 80% of the video
- res.status(400).send("Total length of your submitted segments are over 80% of the video.");
- return;
- }
- }
- }
- }
-
for (const segmentInfo of segments) {
//this can just be a hash of the data
//it's better than generating an actual UUID like what was used before
//also better for duplication checking
- const UUID = getSubmissionUUID(videoID, segmentInfo.category, userID, segmentInfo.segment[0], segmentInfo.segment[1]);
+ const UUID = getSubmissionUUID(videoID, segmentInfo.category, userID, parseFloat(segmentInfo.segment[0]), parseFloat(segmentInfo.segment[1]));
+ const hashedVideoID = getHash(videoID, 1);
const startingLocked = isVIP ? 1 : 0;
try {
await db.prepare('run', `INSERT INTO "sponsorTimes"
- ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "shadowHidden", "hashedVideoID")
- VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
- videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0, segmentInfo.category, shadowBanned, getHash(videoID, 1),
+ ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "service", "videoDuration", "shadowHidden", "hashedVideoID")
+ VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
+ videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0, segmentInfo.category, service, videoDuration, shadowBanned, hashedVideoID,
],
);
@@ -502,9 +530,10 @@ export async function postSkipSegments(req: Request, res: Response) {
// Clear redis cache for this video
redis.delAsync(skipSegmentsKey(videoID));
+ redis.delAsync(skipSegmentsHashKey(hashedVideoID, service));
} catch (err) {
//a DB change probably occurred
- res.sendStatus(502);
+ res.sendStatus(500);
Logger.error("Error when putting sponsorTime in the DB: " + videoID + ", " + segmentInfo.segment[0] + ", " +
segmentInfo.segment[1] + ", " + userID + ", " + segmentInfo.category + ". " + err);
@@ -529,7 +558,7 @@ export async function postSkipSegments(req: Request, res: Response) {
res.json(newSegments);
for (let i = 0; i < segments.length; i++) {
- sendWebhooks(userID, videoID, UUIDs[i], segments[i]);
+ sendWebhooks(apiVideoInfo, userID, videoID, UUIDs[i], segments[i], service);
}
}
diff --git a/src/routes/voteOnSponsorTime.ts b/src/routes/voteOnSponsorTime.ts
index 29f0e66..4cb7690 100644
--- a/src/routes/voteOnSponsorTime.ts
+++ b/src/routes/voteOnSponsorTime.ts
@@ -12,8 +12,8 @@ import {getHash} from '../utils/getHash';
import {config} from '../config';
import { UserID } from '../types/user.model';
import redis from '../utils/redis';
-import { skipSegmentsKey } from '../middleware/redisKeys';
-import { VideoID } from '../types/segments.model';
+import { skipSegmentsHashKey, skipSegmentsKey } from '../middleware/redisKeys';
+import { Category, HashedIP, IPAddress, SegmentUUID, Service, VideoID, VideoIDHash } from '../types/segments.model';
const voteTypes = {
normal: 0,
@@ -147,8 +147,8 @@ async function sendWebhooks(voteData: VoteData) {
}
}
-async function categoryVote(UUID: string, userID: string, isVIP: boolean, isOwnSubmission: boolean, category: string
- , hashedIP: string, finalResponse: FinalResponse, res: Response) {
+async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, isOwnSubmission: boolean, category: Category
+ , hashedIP: HashedIP, finalResponse: FinalResponse, res: Response) {
// Check if they've already made a vote
const usersLastVoteInfo = await privateDB.prepare('get', `select count(*) as votes, category from "categoryVotes" where "UUID" = ? and "userID" = ? group by category`, [UUID, userID]);
@@ -158,8 +158,9 @@ async function categoryVote(UUID: string, userID: string, isVIP: boolean, isOwnS
return;
}
- const currentCategory = await db.prepare('get', `select category from "sponsorTimes" where "UUID" = ?`, [UUID]);
- if (!currentCategory) {
+ const videoInfo = (await db.prepare('get', `SELECT "category", "videoID", "hashedVideoID", "service" FROM "sponsorTimes" WHERE "UUID" = ?`,
+ [UUID])) as {category: Category, videoID: VideoID, hashedVideoID: VideoIDHash, service: Service};
+ if (!videoInfo) {
// Submission doesn't exist
res.status(400).send("Submission doesn't exist.");
return;
@@ -175,52 +176,57 @@ async function categoryVote(UUID: string, userID: string, isVIP: boolean, isOwnS
const timeSubmitted = Date.now();
const voteAmount = isVIP ? 500 : 1;
+ const ableToVote = isVIP || finalResponse.finalStatus === 200 || true;
- // Add the vote
- if ((await db.prepare('get', `select count(*) as count from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, category])).count > 0) {
- // Update the already existing db entry
- await db.prepare('run', `update "categoryVotes" set "votes" = "votes" + ? where "UUID" = ? and "category" = ?`, [voteAmount, UUID, category]);
- } else {
- // Add a db entry
- await db.prepare('run', `insert into "categoryVotes" ("UUID", "category", "votes") values (?, ?, ?)`, [UUID, category, voteAmount]);
+ if (ableToVote) {
+ // Add the vote
+ if ((await db.prepare('get', `select count(*) as count from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, category])).count > 0) {
+ // Update the already existing db entry
+ await db.prepare('run', `update "categoryVotes" set "votes" = "votes" + ? where "UUID" = ? and "category" = ?`, [voteAmount, UUID, category]);
+ } else {
+ // Add a db entry
+ await db.prepare('run', `insert into "categoryVotes" ("UUID", "category", "votes") values (?, ?, ?)`, [UUID, category, voteAmount]);
+ }
+
+ // Add the info into the private db
+ if (usersLastVoteInfo?.votes > 0) {
+ // Reverse the previous vote
+ await db.prepare('run', `update "categoryVotes" set "votes" = "votes" - ? where "UUID" = ? and "category" = ?`, [voteAmount, UUID, usersLastVoteInfo.category]);
+
+ await privateDB.prepare('run', `update "categoryVotes" set "category" = ?, "timeSubmitted" = ?, "hashedIP" = ? where "userID" = ? and "UUID" = ?`, [category, timeSubmitted, hashedIP, userID, UUID]);
+ } else {
+ await privateDB.prepare('run', `insert into "categoryVotes" ("UUID", "userID", "hashedIP", "category", "timeSubmitted") values (?, ?, ?, ?, ?)`, [UUID, userID, hashedIP, category, timeSubmitted]);
+ }
+
+ // See if the submissions category is ready to change
+ const currentCategoryInfo = await db.prepare("get", `select votes from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, videoInfo.category]);
+
+ const submissionInfo = await db.prepare("get", `SELECT "userID", "timeSubmitted", "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]);
+ const isSubmissionVIP = submissionInfo && await isUserVIP(submissionInfo.userID);
+ const startingVotes = isSubmissionVIP ? 10000 : 1;
+
+ // Change this value from 1 in the future to make it harder to change categories
+ // Done this way without ORs incase the value is zero
+ const currentCategoryCount = (currentCategoryInfo === undefined || currentCategoryInfo === null) ? startingVotes : currentCategoryInfo.votes;
+
+ // Add submission as vote
+ if (!currentCategoryInfo && submissionInfo) {
+ await db.prepare("run", `insert into "categoryVotes" ("UUID", "category", "votes") values (?, ?, ?)`, [UUID, videoInfo.category, currentCategoryCount]);
+
+ await privateDB.prepare("run", `insert into "categoryVotes" ("UUID", "userID", "hashedIP", "category", "timeSubmitted") values (?, ?, ?, ?, ?)`, [UUID, submissionInfo.userID, "unknown", videoInfo.category, submissionInfo.timeSubmitted]);
+ }
+
+ const nextCategoryCount = (nextCategoryInfo?.votes || 0) + voteAmount;
+
+ //TODO: In the future, raise this number from zero to make it harder to change categories
+ // VIPs change it every time
+ if (nextCategoryCount - currentCategoryCount >= Math.max(Math.ceil(submissionInfo?.votes / 2), 2) || isVIP || isOwnSubmission) {
+ // Replace the category
+ await db.prepare('run', `update "sponsorTimes" set "category" = ? where "UUID" = ?`, [category, UUID]);
+ }
}
- // Add the info into the private db
- if (usersLastVoteInfo?.votes > 0) {
- // Reverse the previous vote
- await db.prepare('run', `update "categoryVotes" set "votes" = "votes" - ? where "UUID" = ? and "category" = ?`, [voteAmount, UUID, usersLastVoteInfo.category]);
-
- await privateDB.prepare('run', `update "categoryVotes" set "category" = ?, "timeSubmitted" = ?, "hashedIP" = ? where "userID" = ? and "UUID" = ?`, [category, timeSubmitted, hashedIP, userID, UUID]);
- } else {
- await privateDB.prepare('run', `insert into "categoryVotes" ("UUID", "userID", "hashedIP", "category", "timeSubmitted") values (?, ?, ?, ?, ?)`, [UUID, userID, hashedIP, category, timeSubmitted]);
- }
-
- // See if the submissions category is ready to change
- const currentCategoryInfo = await db.prepare("get", `select votes from "categoryVotes" where "UUID" = ? and category = ?`, [UUID, currentCategory.category]);
-
- const submissionInfo = await db.prepare("get", `SELECT "userID", "timeSubmitted", "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]);
- const isSubmissionVIP = submissionInfo && await isUserVIP(submissionInfo.userID);
- const startingVotes = isSubmissionVIP ? 10000 : 1;
-
- // Change this value from 1 in the future to make it harder to change categories
- // Done this way without ORs incase the value is zero
- const currentCategoryCount = (currentCategoryInfo === undefined || currentCategoryInfo === null) ? startingVotes : currentCategoryInfo.votes;
-
- // Add submission as vote
- if (!currentCategoryInfo && submissionInfo) {
- await db.prepare("run", `insert into "categoryVotes" ("UUID", "category", "votes") values (?, ?, ?)`, [UUID, currentCategory.category, currentCategoryCount]);
-
- await privateDB.prepare("run", `insert into "categoryVotes" ("UUID", "userID", "hashedIP", "category", "timeSubmitted") values (?, ?, ?, ?, ?)`, [UUID, submissionInfo.userID, "unknown", currentCategory.category, submissionInfo.timeSubmitted]);
- }
-
- const nextCategoryCount = (nextCategoryInfo?.votes || 0) + voteAmount;
-
- //TODO: In the future, raise this number from zero to make it harder to change categories
- // VIPs change it every time
- if (nextCategoryCount - currentCategoryCount >= Math.max(Math.ceil(submissionInfo?.votes / 2), 2) || isVIP || isOwnSubmission) {
- // Replace the category
- await db.prepare('run', `update "sponsorTimes" set "category" = ? where "UUID" = ?`, [category, UUID]);
- }
+ clearRedisCache(videoInfo);
res.sendStatus(finalResponse.finalStatus);
}
@@ -230,10 +236,10 @@ export function getUserID(req: Request): UserID {
}
export async function voteOnSponsorTime(req: Request, res: Response) {
- const UUID = req.query.UUID as string;
+ const UUID = req.query.UUID as SegmentUUID;
const paramUserID = getUserID(req);
let type = req.query.type !== undefined ? parseInt(req.query.type as string) : undefined;
- const category = req.query.category as string;
+ const category = req.query.category as Category;
if (UUID === undefined || paramUserID === undefined || (type === undefined && category === undefined)) {
//invalid request
@@ -255,7 +261,7 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
const ip = getIP(req);
//hash the ip 5000 times so no one can get it from the database
- const hashedIP = getHash(ip + config.globalSalt);
+ const hashedIP: HashedIP = getHash((ip + config.globalSalt) as IPAddress);
//check if this user is on the vip list
const isVIP = (await db.prepare('get', `SELECT count(*) as "userCount" FROM "vipUsers" WHERE "userID" = ?`, [nonAnonUserID])).userCount > 0;
@@ -280,13 +286,19 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
return categoryVote(UUID, nonAnonUserID, isVIP, isOwnSubmission, category, hashedIP, finalResponse, res);
}
- if (type == 1 && !isVIP && !isOwnSubmission) {
+ if (type !== undefined && !isVIP && !isOwnSubmission) {
// Check if upvoting hidden segment
const voteInfo = await db.prepare('get', `SELECT votes FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]);
if (voteInfo && voteInfo.votes <= -2) {
- res.status(403).send("Not allowed to upvote segment with too many downvotes unless you are VIP.");
- return;
+ if (type == 1) {
+ res.status(403).send("Not allowed to upvote segment with too many downvotes unless you are VIP.");
+ return;
+ } else if (type == 0) {
+ // Already downvoted enough, ignore
+ res.status(200).send();
+ return;
+ }
}
}
@@ -350,13 +362,13 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
}
//check if the increment amount should be multiplied (downvotes have more power if there have been many views)
- const row = await db.prepare('get', `SELECT "videoID", votes, views FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]) as
- {videoID: VideoID, votes: number, views: number};
+ const videoInfo = await db.prepare('get', `SELECT "videoID", "hashedVideoID", "service", "votes", "views" FROM "sponsorTimes" WHERE "UUID" = ?`, [UUID]) as
+ {videoID: VideoID, hashedVideoID: VideoIDHash, service: Service, votes: number, views: number};
if (voteTypeEnum === voteTypes.normal) {
if ((isVIP || isOwnSubmission) && incrementAmount < 0) {
//this user is a vip and a downvote
- incrementAmount = -(row.votes + 2 - oldIncrementAmount);
+ incrementAmount = -(videoInfo.votes + 2 - oldIncrementAmount);
type = incrementAmount;
}
} else if (voteTypeEnum == voteTypes.incorrect) {
@@ -371,7 +383,8 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
const ableToVote = isVIP
|| ((await db.prepare("get", `SELECT "userID" FROM "sponsorTimes" WHERE "userID" = ?`, [nonAnonUserID])) !== undefined
&& (await privateDB.prepare("get", `SELECT "userID" FROM "shadowBannedUsers" WHERE "userID" = ?`, [nonAnonUserID])) === undefined
- && (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID])) === undefined);
+ && (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID])) === undefined)
+ && finalResponse.finalStatus === 200;
if (ableToVote) {
//update the votes table
@@ -399,8 +412,7 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
await db.prepare('run', 'UPDATE "sponsorTimes" SET locked = 0 WHERE "UUID" = ?', [UUID]);
}
- // Clear redis cache for this video
- redis.delAsync(skipSegmentsKey(row?.videoID));
+ clearRedisCache(videoInfo);
//for each positive vote, see if a hidden submission can be shown again
if (incrementAmount > 0 && voteTypeEnum === voteTypes.normal) {
@@ -437,7 +449,7 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
voteTypeEnum,
isVIP,
isOwnSubmission,
- row,
+ row: videoInfo,
category,
incrementAmount,
oldIncrementAmount,
@@ -449,4 +461,11 @@ export async function voteOnSponsorTime(req: Request, res: Response) {
res.status(500).json({error: 'Internal error creating segment vote'});
}
-}
\ No newline at end of file
+}
+
+function clearRedisCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; }) {
+ if (videoInfo) {
+ redis.delAsync(skipSegmentsKey(videoInfo.videoID));
+ redis.delAsync(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service));
+ }
+}
diff --git a/src/types/config.model.ts b/src/types/config.model.ts
index 16c6b3a..117c732 100644
--- a/src/types/config.model.ts
+++ b/src/types/config.model.ts
@@ -39,6 +39,7 @@ export interface SBSConfig {
redis?: redis.ClientOpts;
maxRewardTimePerSegmentInSeconds?: number;
postgres?: PoolConfig;
+ dumpDatabase?: DumpDatabase;
}
export interface WebhookConfig {
@@ -62,4 +63,17 @@ export interface PostgresConfig {
createDbIfNotExists: boolean;
enableWalCheckpointNumber: boolean;
postgres: PoolConfig;
-}
\ No newline at end of file
+}
+
+export interface DumpDatabase {
+ enabled: boolean;
+ minTimeBetweenMs: number;
+ appExportPath: string;
+ postgresExportPath: string;
+ tables: DumpDatabaseTable[];
+}
+
+export interface DumpDatabaseTable {
+ name: string;
+ order?: string;
+}
diff --git a/src/types/segments.model.ts b/src/types/segments.model.ts
index e7b6eaf..a95bac3 100644
--- a/src/types/segments.model.ts
+++ b/src/types/segments.model.ts
@@ -3,15 +3,33 @@ import { SBRecord } from "./lib.model";
export type SegmentUUID = string & { __segmentUUIDBrand: unknown };
export type VideoID = string & { __videoIDBrand: unknown };
+export type VideoDuration = number & { __videoDurationBrand: unknown };
export type Category = string & { __categoryBrand: unknown };
export type VideoIDHash = VideoID & HashedValue;
export type IPAddress = string & { __ipAddressBrand: unknown };
export type HashedIP = IPAddress & HashedValue;
+// Uncomment as needed
+export enum Service {
+ YouTube = 'YouTube',
+ PeerTube = 'PeerTube',
+ // Twitch = 'Twitch',
+ // Nebula = 'Nebula',
+ // RSS = 'RSS',
+ // Corridor = 'Corridor',
+ // Lbry = 'Lbry'
+}
+
+export interface IncomingSegment {
+ category: Category;
+ segment: string[];
+}
+
export interface Segment {
category: Category;
segment: number[];
UUID: SegmentUUID;
+ videoDuration: VideoDuration;
}
export enum Visibility {
@@ -28,6 +46,7 @@ export interface DBSegment {
locked: boolean;
shadowHidden: Visibility;
videoID: VideoID;
+ videoDuration: VideoDuration;
hashedVideoID: VideoIDHash;
}
diff --git a/test.json b/test.json
index add6ebf..58eb72a 100644
--- a/test.json
+++ b/test.json
@@ -49,7 +49,7 @@
]
}
],
- "categoryList": ["sponsor", "intro", "outro", "interaction", "selfpromo", "music_offtopic"],
+ "categoryList": ["sponsor", "selfpromo", "interaction", "intro", "outro", "preview", "music_offtopic"],
"maxNumberOfActiveWarnings": 3,
"hoursAfterWarningExpires": 24,
"rateLimit": {
diff --git a/test/cases/getSkipSegments.ts b/test/cases/getSkipSegments.ts
index 85dcfbb..f40203c 100644
--- a/test/cases/getSkipSegments.ts
+++ b/test/cases/getSkipSegments.ts
@@ -5,17 +5,19 @@ import {getHash} from '../../src/utils/getHash';
describe('getSkipSegments', () => {
before(async () => {
- let startOfQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", views, category, "shadowHidden", "hashedVideoID") VALUES';
- await db.prepare("run", startOfQuery + "('testtesttest', 1, 11, 2, 0, '1-uuid-0', 'testman', 0, 50, 'sponsor', 0, '" + getHash('testtesttest', 1) + "')");
- await db.prepare("run", startOfQuery + "('testtesttest', 20, 33, 2, 0, '1-uuid-2', 'testman', 0, 50, 'intro', 0, '" + getHash('testtesttest', 1) + "')");
- await db.prepare("run", startOfQuery + "('testtesttest,test', 1, 11, 2, 0, '1-uuid-1', 'testman', 0, 50, 'sponsor', 0, '" + getHash('testtesttest,test', 1) + "')");
- await db.prepare("run", startOfQuery + "('test3', 1, 11, 2, 0, '1-uuid-4', 'testman', 0, 50, 'sponsor', 0, '" + getHash('test3', 1) + "')");
- await db.prepare("run", startOfQuery + "('test3', 7, 22, -3, 0, '1-uuid-5', 'testman', 0, 50, 'sponsor', 0, '" + getHash('test3', 1) + "')");
- await db.prepare("run", startOfQuery + "('multiple', 1, 11, 2, 0, '1-uuid-6', 'testman', 0, 50, 'intro', 0, '" + getHash('multiple', 1) + "')");
- await db.prepare("run", startOfQuery + "('multiple', 20, 33, 2, 0, '1-uuid-7', 'testman', 0, 50, 'intro', 0, '" + getHash('multiple', 1) + "')");
- await db.prepare("run", startOfQuery + "('locked', 20, 33, 2, 1, '1-uuid-locked-8', 'testman', 0, 50, 'intro', 0, '" + getHash('locked', 1) + "')");
- await db.prepare("run", startOfQuery + "('locked', 20, 34, 100000, 0, '1-uuid-9', 'testman', 0, 50, 'intro', 0, '" + getHash('locked', 1) + "')");
-
+ let startOfQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", views, category, "service", "videoDuration", "hidden", "shadowHidden", "hashedVideoID") VALUES';
+ await db.prepare("run", startOfQuery + "('testtesttest', 1, 11, 2, 0, '1-uuid-0', 'testman', 0, 50, 'sponsor', 'YouTube', 100, 0, 0, '" + getHash('testtesttest', 1) + "')");
+ await db.prepare("run", startOfQuery + "('testtesttest2', 1, 11, 2, 0, '1-uuid-0-1', 'testman', 0, 50, 'sponsor', 'PeerTube', 120, 0, 0, '" + getHash('testtesttest2', 1) + "')");
+ await db.prepare("run", startOfQuery + "('testtesttest', 20, 33, 2, 0, '1-uuid-2', 'testman', 0, 50, 'intro', 'YouTube', 101, 0, 0, '" + getHash('testtesttest', 1) + "')");
+ await db.prepare("run", startOfQuery + "('testtesttest,test', 1, 11, 2, 0, '1-uuid-1', 'testman', 0, 50, 'sponsor', 'YouTube', 140, 0, 0, '" + getHash('testtesttest,test', 1) + "')");
+ await db.prepare("run", startOfQuery + "('test3', 1, 11, 2, 0, '1-uuid-4', 'testman', 0, 50, 'sponsor', 'YouTube', 200, 0, 0, '" + getHash('test3', 1) + "')");
+ await db.prepare("run", startOfQuery + "('test3', 7, 22, -3, 0, '1-uuid-5', 'testman', 0, 50, 'sponsor', 'YouTube', 300, 0, 0, '" + getHash('test3', 1) + "')");
+ await db.prepare("run", startOfQuery + "('multiple', 1, 11, 2, 0, '1-uuid-6', 'testman', 0, 50, 'intro', 'YouTube', 400, 0, 0, '" + getHash('multiple', 1) + "')");
+ await db.prepare("run", startOfQuery + "('multiple', 20, 33, 2, 0, '1-uuid-7', 'testman', 0, 50, 'intro', 'YouTube', 500, 0, 0, '" + getHash('multiple', 1) + "')");
+ await db.prepare("run", startOfQuery + "('locked', 20, 33, 2, 1, '1-uuid-locked-8', 'testman', 0, 50, 'intro', 'YouTube', 230, 0, 0, '" + getHash('locked', 1) + "')");
+ await db.prepare("run", startOfQuery + "('locked', 20, 34, 100000, 0, '1-uuid-9', 'testman', 0, 50, 'intro', 'YouTube', 190, 0, 0, '" + getHash('locked', 1) + "')");
+ await db.prepare("run", startOfQuery + "('onlyHiddenSegments', 20, 34, 100000, 0, 'onlyHiddenSegments', 'testman', 0, 50, 'sponsor', 'YouTube', 190, 1, 0, '" + getHash('onlyHiddenSegments', 1) + "')");
+
return;
});
@@ -27,7 +29,24 @@ describe('getSkipSegments', () => {
else {
const data = await res.json();
if (data.length === 1 && data[0].segment[0] === 1 && data[0].segment[1] === 11
- && data[0].category === "sponsor" && data[0].UUID === "1-uuid-0") {
+ && data[0].category === "sponsor" && data[0].UUID === "1-uuid-0" && data[0].videoDuration === 100) {
+ return;
+ } else {
+ return ("Received incorrect body: " + (await res.text()));
+ }
+ }
+ })
+ .catch(err => "Couldn't call endpoint");
+ });
+
+ it('Should be able to get a time by category for a different service 1', () => {
+ fetch(getbaseURL() + "/api/skipSegments?videoID=testtesttest&category=sponsor&service=PeerTube")
+ .then(async res => {
+ if (res.status !== 200) return ("Status code was: " + res.status);
+ else {
+ const data = await res.json();
+ if (data.length === 1 && data[0].segment[0] === 1 && data[0].segment[1] === 11
+ && data[0].category === "sponsor" && data[0].UUID === "1-uuid-0-1" && data[0].videoDuration === 120) {
return;
} else {
return ("Received incorrect body: " + (await res.text()));
@@ -61,7 +80,7 @@ describe('getSkipSegments', () => {
else {
const data = await res.json();
if (data.length === 1 && data[0].segment[0] === 1 && data[0].segment[1] === 11
- && data[0].category === "sponsor" && data[0].UUID === "1-uuid-0") {
+ && data[0].category === "sponsor" && data[0].UUID === "1-uuid-0" && data[0].videoDuration === 100) {
return;
} else {
return ("Received incorrect body: " + (await res.text()));
@@ -78,7 +97,23 @@ describe('getSkipSegments', () => {
else {
const data = await res.json();
if (data.length === 1 && data[0].segment[0] === 20 && data[0].segment[1] === 33
- && data[0].category === "intro" && data[0].UUID === "1-uuid-2") {
+ && data[0].category === "intro" && data[0].UUID === "1-uuid-2" && data[0].videoDuration === 101) {
+ return;
+ } else {
+ return ("Received incorrect body: " + (await res.text()));
+ }
+ }
+ })
+ .catch(err => ("Couldn't call endpoint"));
+ });
+
+ it('Should be empty if all submissions are hidden', () => {
+ fetch(getbaseURL() + "/api/skipSegments?videoID=onlyHiddenSegments")
+ .then(async res => {
+ if (res.status !== 200) return ("Status code was: " + res.status);
+ else {
+ const data = await res.json();
+ if (data.length === 0) {
return;
} else {
return ("Received incorrect body: " + (await res.text()));
@@ -99,9 +134,9 @@ describe('getSkipSegments', () => {
let success = true;
for (const segment of data) {
if ((segment.segment[0] !== 20 || segment.segment[1] !== 33
- || segment.category !== "intro" || segment.UUID !== "1-uuid-7") &&
+ || segment.category !== "intro" || segment.UUID !== "1-uuid-7" || segment.videoDuration === 500) &&
(segment.segment[0] !== 1 || segment.segment[1] !== 11
- || segment.category !== "intro" || segment.UUID !== "1-uuid-6")) {
+ || segment.category !== "intro" || segment.UUID !== "1-uuid-6" || segment.videoDuration === 400)) {
success = false;
break;
}
diff --git a/test/cases/getSegmentsByHash.ts b/test/cases/getSkipSegmentsByHash.ts
similarity index 74%
rename from test/cases/getSegmentsByHash.ts
rename to test/cases/getSkipSegmentsByHash.ts
index d957e2d..23d9d49 100644
--- a/test/cases/getSegmentsByHash.ts
+++ b/test/cases/getSkipSegmentsByHash.ts
@@ -12,11 +12,13 @@ sinonStub.callsFake(YouTubeApiMock.listVideos);
describe('getSegmentsByHash', () => {
before(async () => {
- let startOfQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", views, category, "shadowHidden", "hashedVideoID") VALUES';
- await db.prepare("run", startOfQuery + "('getSegmentsByHash-0', 1, 10, 2, 'getSegmentsByHash-0-0', 'testman', 0, 50, 'sponsor', 0, '" + getHash('getSegmentsByHash-0', 1) + "')"); // hash = fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910
- await db.prepare("run", startOfQuery + "('getSegmentsByHash-0', 20, 30, 2, 'getSegmentsByHash-0-1', 'testman', 100, 150, 'intro', 0, '" + getHash('getSegmentsByHash-0', 1) + "')"); // hash = fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910
- await db.prepare("run", startOfQuery + "('getSegmentsByHash-noMatchHash', 40, 50, 2, 'getSegmentsByHash-noMatchHash', 'testman', 0, 50, 'sponsor', 0, 'fdaffnoMatchHash')"); // hash = fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910
- await db.prepare("run", startOfQuery + "('getSegmentsByHash-1', 60, 70, 2, 'getSegmentsByHash-1', 'testman', 0, 50, 'sponsor', 0, '" + getHash('getSegmentsByHash-1', 1) + "')"); // hash = 3272fa85ee0927f6073ef6f07ad5f3146047c1abba794cfa364d65ab9921692b
+ let startOfQuery = 'INSERT INTO "sponsorTimes" ("videoID", "startTime", "endTime", "votes", "UUID", "userID", "timeSubmitted", views, category, "service", "hidden", "shadowHidden", "hashedVideoID") VALUES';
+ await db.prepare("run", startOfQuery + "('getSegmentsByHash-0', 1, 10, 2, 'getSegmentsByHash-0-0', 'testman', 0, 50, 'sponsor', 'YouTube', 0, 0, '" + getHash('getSegmentsByHash-0', 1) + "')"); // hash = fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910
+ await db.prepare("run", startOfQuery + "('getSegmentsByHash-0', 1, 10, 2, 'getSegmentsByHash-0-0-1', 'testman', 0, 50, 'sponsor', 'PeerTube', 0, 0, '" + getHash('getSegmentsByHash-0', 1) + "')"); // hash = fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910
+ await db.prepare("run", startOfQuery + "('getSegmentsByHash-0', 20, 30, 2, 'getSegmentsByHash-0-1', 'testman', 100, 150, 'intro', 'YouTube', 0, 0, '" + getHash('getSegmentsByHash-0', 1) + "')"); // hash = fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910
+ await db.prepare("run", startOfQuery + "('getSegmentsByHash-noMatchHash', 40, 50, 2, 'getSegmentsByHash-noMatchHash', 'testman', 0, 50, 'sponsor', 'YouTube', 0, 0, 'fdaffnoMatchHash')"); // hash = fdaff4dee1043451faa7398324fb63d8618ebcd11bddfe0491c488db12c6c910
+ await db.prepare("run", startOfQuery + "('getSegmentsByHash-1', 60, 70, 2, 'getSegmentsByHash-1', 'testman', 0, 50, 'sponsor', 'YouTube', 0, 0, '" + getHash('getSegmentsByHash-1', 1) + "')"); // hash = 3272fa85ee0927f6073ef6f07ad5f3146047c1abba794cfa364d65ab9921692b
+ await db.prepare("run", startOfQuery + "('onlyHidden', 60, 70, 2, 'onlyHidden', 'testman', 0, 50, 'sponsor', 'YouTube', 1, 0, '" + getHash('onlyHidden', 1) + "')"); // hash = f3a199e1af001d716cdc6599360e2b062c2d2b3fa2885f6d9d2fd741166cbbd3
});
it('Should be able to get a 200', (done: Done) => {
@@ -54,6 +56,19 @@ describe('getSegmentsByHash', () => {
.catch(err => done("Couldn't call endpoint"));
});
+ it('Should be able to get an empty array if only hidden videos', (done: Done) => {
+ fetch(getbaseURL() + '/api/skipSegments/f3a1?categories=["sponsor"]')
+ .then(async res => {
+ if (res.status !== 404) done("non 404 status code, was " + res.status);
+ else {
+ const body = await res.text();
+ if (JSON.parse(body).length === 0 && body === '[]') done(); // pass
+ else done("non empty array returned");
+ }
+ })
+ .catch(err => done("Couldn't call endpoint"));
+ });
+
it('Should return 400 prefix too short', (done: Done) => {
fetch(getbaseURL() + '/api/skipSegments/11?categories=["shilling"]')
.then(res => {
@@ -128,7 +143,24 @@ describe('getSegmentsByHash', () => {
if (body.length !== 2) done("expected 2 videos, got " + body.length);
else if (body[0].segments.length !== 1) done("expected 1 segments for first video, got " + body[0].segments.length);
else if (body[1].segments.length !== 1) done("expected 1 segments for second video, got " + body[1].segments.length);
- else if (body[0].segments[0].category !== 'sponsor' || body[1].segments[0].category !== 'sponsor') done("both segments are not sponsor");
+ else if (body[0].segments[0].category !== 'sponsor'
+ || body[0].segments[0].UUID !== 'getSegmentsByHash-0-0'
+ || body[1].segments[0].category !== 'sponsor') done("both segments are not sponsor");
+ else done();
+ }
+ })
+ .catch(err => done("Couldn't call endpoint"));
+ });
+
+ it('Should be able to get 200 for no categories (default sponsor) for a non YouTube service', (done: Done) => {
+ fetch(getbaseURL() + '/api/skipSegments/fdaf?service=PeerTube')
+ .then(async res => {
+ if (res.status !== 200) done("non 200 status code, was " + res.status);
+ else {
+ const body = await res.json();
+ if (body.length !== 1) done("expected 2 videos, got " + body.length);
+ else if (body[0].segments.length !== 1) done("expected 1 segments for first video, got " + body[0].segments.length);
+ else if (body[0].segments[0].UUID !== 'getSegmentsByHash-0-0-1') done("both segments are not sponsor");
else done();
}
})
diff --git a/test/cases/postSkipSegments.ts b/test/cases/postSkipSegments.ts
index e31326a..512c2ee 100644
--- a/test/cases/postSkipSegments.ts
+++ b/test/cases/postSkipSegments.ts
@@ -6,6 +6,7 @@ import {db} from '../../src/databases/databases';
import {ImportMock} from 'ts-mock-imports';
import * as YouTubeAPIModule from '../../src/utils/youtubeApi';
import {YouTubeApiMock} from '../youtubeMock';
+import e from 'express';
const mockManager = ImportMock.mockStaticClass(YouTubeAPIModule, 'YouTubeAPI');
const sinonStub = mockManager.mock('listVideos');
@@ -97,6 +98,177 @@ describe('postSkipSegments', () => {
.catch(err => done(err));
});
+ it('Should be able to submit a single time with a duration from the YouTube API (JSON method)', (done: Done) => {
+ fetch(getbaseURL()
+ + "/api/postVideoSponsorTimes", {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ userID: "test",
+ videoID: "dQw4w9WgXZX",
+ videoDuration: 100,
+ segments: [{
+ segment: [0, 10],
+ category: "sponsor",
+ }],
+ }),
+ })
+ .then(async res => {
+ if (res.status === 200) {
+ const row = await db.prepare('get', `SELECT "startTime", "endTime", "locked", "category", "videoDuration" FROM "sponsorTimes" WHERE "videoID" = ?`, ["dQw4w9WgXZX"]);
+ if (row.startTime === 0 && row.endTime === 10 && row.locked === 0 && row.category === "sponsor" && row.videoDuration === 5010) {
+ done();
+ } else {
+ done("Submitted times were not saved. Actual submission: " + JSON.stringify(row));
+ }
+ } else {
+ done("Status code was " + res.status);
+ }
+ })
+ .catch(err => done(err));
+ });
+
+ it('Should be able to submit a single time with a precise duration close to the one from the YouTube API (JSON method)', (done: Done) => {
+ fetch(getbaseURL()
+ + "/api/postVideoSponsorTimes", {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ userID: "test",
+ videoID: "dQw4w9WgXZH",
+ videoDuration: 5010.20,
+ segments: [{
+ segment: [1, 10],
+ category: "sponsor",
+ }],
+ }),
+ })
+ .then(async res => {
+ if (res.status === 200) {
+ const row = await db.prepare('get', `SELECT "startTime", "endTime", "locked", "category", "videoDuration" FROM "sponsorTimes" WHERE "videoID" = ?`, ["dQw4w9WgXZH"]);
+ if (row.startTime === 1 && row.endTime === 10 && row.locked === 0 && row.category === "sponsor" && row.videoDuration === 5010.20) {
+ done();
+ } else {
+ done("Submitted times were not saved. Actual submission: " + JSON.stringify(row));
+ }
+ } else {
+ done("Status code was " + res.status);
+ }
+ })
+ .catch(err => done(err));
+ });
+
+ it('Should be able to submit a single time with a duration in the body (JSON method)', (done: Done) => {
+ fetch(getbaseURL()
+ + "/api/postVideoSponsorTimes", {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ userID: "test",
+ videoID: "noDuration",
+ videoDuration: 100,
+ segments: [{
+ segment: [0, 10],
+ category: "sponsor",
+ }],
+ }),
+ })
+ .then(async res => {
+ if (res.status === 200) {
+ const row = await db.prepare('get', `SELECT "startTime", "endTime", "locked", "category", "videoDuration" FROM "sponsorTimes" WHERE "videoID" = ?`, ["noDuration"]);
+ if (row.startTime === 0 && row.endTime === 10 && row.locked === 0 && row.category === "sponsor" && row.videoDuration === 100) {
+ done();
+ } else {
+ done("Submitted times were not saved. Actual submission: " + JSON.stringify(row));
+ }
+ } else {
+ done("Status code was " + res.status);
+ }
+ })
+ .catch(err => done(err));
+ });
+
+ it('Should be able to submit with a new duration, and hide old submissions and remove segment locks', async () => {
+ await db.prepare("run", `INSERT INTO "noSegments" ("userID", "videoID", "category")
+ VALUES ('` + getHash("VIPUser-noSegments") + "', 'noDuration', 'sponsor')");
+
+ try {
+ const res = await fetch(getbaseURL()
+ + "/api/postVideoSponsorTimes", {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ userID: "test",
+ videoID: "noDuration",
+ videoDuration: 100,
+ segments: [{
+ segment: [1, 10],
+ category: "sponsor",
+ }],
+ }),
+ });
+
+ if (res.status === 200) {
+ const noSegmentsRow = await db.prepare('get', `SELECT * from "noSegments" WHERE videoID = ?`, ["noDuration"]);
+ const videoRows = await db.prepare('all', `SELECT "startTime", "endTime", "locked", "category", "videoDuration"
+ FROM "sponsorTimes" WHERE "videoID" = ? AND hidden = 0`, ["noDuration"]);
+ const videoRow = videoRows[0];
+ const hiddenVideoRows = await db.prepare('all', `SELECT "startTime", "endTime", "locked", "category", "videoDuration"
+ FROM "sponsorTimes" WHERE "videoID" = ? AND hidden = 1`, ["noDuration"]);
+ if (noSegmentsRow === undefined && videoRows.length === 1 && hiddenVideoRows.length === 1 && videoRow.startTime === 1 && videoRow.endTime === 10
+ && videoRow.locked === 0 && videoRow.category === "sponsor" && videoRow.videoDuration === 100) {
+ return;
+ } else {
+ return "Submitted times were not saved. Actual submission: " + JSON.stringify(videoRow);
+ }
+ } else {
+ return "Status code was " + res.status;
+ }
+ } catch (e) {
+ return e;
+ }
+ });
+
+ it('Should be able to submit a single time under a different service (JSON method)', (done: Done) => {
+ fetch(getbaseURL()
+ + "/api/postVideoSponsorTimes", {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ body: JSON.stringify({
+ userID: "test",
+ videoID: "dQw4w9WgXcG",
+ service: "PeerTube",
+ segments: [{
+ segment: [0, 10],
+ category: "sponsor",
+ }],
+ }),
+ })
+ .then(async res => {
+ if (res.status === 200) {
+ const row = await db.prepare('get', `SELECT "startTime", "endTime", "locked", "category", "service" FROM "sponsorTimes" WHERE "videoID" = ?`, ["dQw4w9WgXcG"]);
+ if (row.startTime === 0 && row.endTime === 10 && row.locked === 0 && row.category === "sponsor" && row.service === "PeerTube") {
+ done();
+ } else {
+ done("Submitted times were not saved. Actual submission: " + JSON.stringify(row));
+ }
+ } else {
+ done("Status code was " + res.status);
+ }
+ })
+ .catch(err => done(err));
+ });
+
it('VIP submission should start locked', (done: Done) => {
fetch(getbaseURL()
+ "/api/postVideoSponsorTimes", {
@@ -244,7 +416,7 @@ describe('postSkipSegments', () => {
}),
})
.then(async res => {
- if (res.status === 400) {
+ if (res.status === 403) {
const rows = await db.prepare('all', `SELECT "startTime", "endTime", "category" FROM "sponsorTimes" WHERE "videoID" = ? and "votes" > -1`, ["n9rIGdXnSJc"]);
let success = true;
if (rows.length === 4) {
@@ -292,7 +464,7 @@ describe('postSkipSegments', () => {
}),
})
.then(async res => {
- if (res.status === 400) {
+ if (res.status === 403) {
const rows = await db.prepare('all', `SELECT "startTime", "endTime", "category" FROM "sponsorTimes" WHERE "videoID" = ? and "votes" > -1`, ["80percent_video"]);
let success = rows.length == 2;
for (const row of rows) {
diff --git a/test/cases/voteOnSponsorTime.ts b/test/cases/voteOnSponsorTime.ts
index 47a1f19..86c3233 100644
--- a/test/cases/voteOnSponsorTime.ts
+++ b/test/cases/voteOnSponsorTime.ts
@@ -368,10 +368,25 @@ describe('voteOnSponsorTime', () => {
fetch(getbaseURL()
+ "/api/voteOnSponsorTime?userID=randomID2&UUID=vote-uuid-5&type=1")
.then(async res => {
- if (res.status === 403) {
+ let row = await db.prepare('get', `SELECT "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, ["vote-uuid-5"]);
+ if (res.status === 403 && row.votes === -3) {
done();
} else {
- done("Status code was " + res.status + " instead of 403");
+ done("Status code was " + res.status + ", row is " + JSON.stringify(row));
+ }
+ })
+ .catch(err => done(err));
+ });
+
+ it('Non-VIP should not be able to downvote "dead" submission', (done: Done) => {
+ fetch(getbaseURL()
+ + "/api/voteOnSponsorTime?userID=randomID2&UUID=vote-uuid-5&type=0")
+ .then(async res => {
+ let row = await db.prepare('get', `SELECT "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, ["vote-uuid-5"]);
+ if (res.status === 200 && row.votes === -3) {
+ done();
+ } else {
+ done("Status code was " + res.status + ", row is " + JSON.stringify(row));
}
})
.catch(err => done(err));
@@ -410,12 +425,13 @@ describe('voteOnSponsorTime', () => {
it('Non-VIP should not be able to downvote on a segment with no-segments category', (done: Done) => {
fetch(getbaseURL()
- + "/api/voteOnSponsorTime?userID=no-segments-voter&UUID=no-sponsor-segments-uuid-0&type=0")
+ + "/api/voteOnSponsorTime?userID=randomID&UUID=no-sponsor-segments-uuid-0&type=0")
.then(async res => {
- if (res.status === 403) {
+ let row = await db.prepare('get', `SELECT "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, ["no-sponsor-segments-uuid-0"]);
+ if (res.status === 403 && row.votes === 2) {
done();
} else {
- done("Status code was " + res.status + " instead of 403");
+ done("Status code was " + res.status + " instead of 403, row was " + JSON.stringify(row));
}
})
.catch(err => done(err));
@@ -423,12 +439,13 @@ describe('voteOnSponsorTime', () => {
it('Non-VIP should be able to upvote on a segment with no-segments category', (done: Done) => {
fetch(getbaseURL()
- + "/api/voteOnSponsorTime?userID=no-segments-voter&UUID=no-sponsor-segments-uuid-0&type=1")
+ + "/api/voteOnSponsorTime?userID=randomID&UUID=no-sponsor-segments-uuid-0&type=1")
.then(async res => {
- if (res.status === 200) {
+ let row = await db.prepare('get', `SELECT "votes" FROM "sponsorTimes" WHERE "UUID" = ?`, ["no-sponsor-segments-uuid-0"]);
+ if (res.status === 200 && row.votes === 3) {
done();
} else {
- done("Status code was " + res.status + " instead of 200");
+ done("Status code was " + res.status + " instead of 403, row was " + JSON.stringify(row));
}
})
.catch(err => done(err));
@@ -436,12 +453,13 @@ describe('voteOnSponsorTime', () => {
it('Non-VIP should not be able to category vote on a segment with no-segments category', (done: Done) => {
fetch(getbaseURL()
- + "/api/voteOnSponsorTime?userID=no-segments-voter&UUID=no-sponsor-segments-uuid-0&category=outro")
+ + "/api/voteOnSponsorTime?userID=randomID&UUID=no-sponsor-segments-uuid-0&category=outro")
.then(async res => {
- if (res.status === 403) {
+ let row = await db.prepare('get', `SELECT "category" FROM "sponsorTimes" WHERE "UUID" = ?`, ["no-sponsor-segments-uuid-0"]);
+ if (res.status === 403 && row.category === "sponsor") {
done();
} else {
- done("Status code was " + res.status + " instead of 403");
+ done("Status code was " + res.status + " instead of 403, row was " + JSON.stringify(row));
}
})
.catch(err => done(err));