2 Commits

Author SHA1 Message Date
Ajay
5c61f1d366 Fix inspect command 2023-04-16 21:34:34 -04:00
Ajay
36352b0c8e Build debug 2023-04-16 21:23:01 -04:00
168 changed files with 4932 additions and 10038 deletions

View File

@@ -31,15 +31,15 @@ module.exports = {
},
overrides: [
{
files: ["**/*.ts"],
files: ["src/**/*.ts"],
parserOptions: {
project: ["./tsconfig.eslint.json"],
project: ["./tsconfig.json"],
},
rules: {
"@typescript-eslint/no-misused-promises": "error",
"@typescript-eslint/no-floating-promises" : "error"
"@typescript-eslint/no-misused-promises": "warn",
"@typescript-eslint/no-floating-promises" : "warn"
}
},
],

View File

@@ -22,10 +22,10 @@ jobs:
permissions:
packages: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
uses: docker/metadata-action@v4
with:
images: |
ghcr.io/${{ inputs.username }}/${{ inputs.name }}
@@ -34,21 +34,14 @@ jobs:
flavor: |
latest=true
- name: Login to GHCR
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GH_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
platforms: arm,arm64
- name: Set up buildx
uses: docker/setup-buildx-action@v3
- name: push
uses: docker/build-push-action@v6
uses: docker/build-push-action@v3
with:
context: ${{ inputs.folder }}
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}

View File

@@ -1,16 +0,0 @@
name: Docker image builds
on:
push:
branches:
- master
workflow_dispatch:
jobs:
error-server:
uses: ./.github/workflows/docker-build.yml
with:
name: "error-server"
username: "ajayyy"
folder: "./containers/error-server"
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -14,8 +14,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
@@ -26,7 +26,7 @@ jobs:
- name: Run Server
timeout-minutes: 10
run: npm start
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v3
with:
name: SponsorTimesDB.db
path: databases/sponsorTimes.db

View File

@@ -2,24 +2,15 @@ name: Docker image builds
on:
push:
branches:
- master
- debug
workflow_dispatch:
jobs:
sb-server:
uses: ./.github/workflows/docker-build.yml
with:
name: "sb-server"
name: "sb-server-debug"
username: "ajayyy"
folder: "."
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
rsync-host:
needs: sb-server
uses: ./.github/workflows/docker-build.yml
with:
name: "rsync-host"
username: "ajayyy"
folder: "./containers/rsync"
secrets:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -12,8 +12,8 @@ jobs:
name: Lint with ESLint and build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
@@ -21,7 +21,7 @@ jobs:
- run: npm run lint
- run: npm run tsc
- name: cache dist build
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
key: dist-${{ github.sha }}
path: |
@@ -32,13 +32,13 @@ jobs:
runs-on: ubuntu-latest
needs: lint-build
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
- id: cache
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
key: dist-${{ github.sha }}
path: |
@@ -46,14 +46,11 @@ jobs:
${{ github.workspace }}/node_modules
- if: steps.cache.outputs.cache-hit != 'true'
run: npm ci
env:
youTubeKeys_visitorData: ${{ secrets.YOUTUBEKEYS_VISITORDATA }}
youTubeKeys_poToken: ${{ secrets.YOUTUBEKEYS_POTOKEN }}
- name: Run SQLite Tests
timeout-minutes: 5
run: npx nyc --silent npm test
- name: cache nyc output
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
key: nyc-sqlite-${{ github.sha }}
path: ${{ github.workspace }}/.nyc_output
@@ -62,20 +59,20 @@ jobs:
runs-on: ubuntu-latest
needs: lint-build
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Build the docker-compose stack
env:
PG_USER: ci_db_user
PG_PASS: ci_db_pass
run: docker compose -f docker/docker-compose-ci.yml up -d
run: docker-compose -f docker/docker-compose-ci.yml up -d
- name: Check running containers
run: docker ps
- uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
- id: cache
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
key: dist-${{ github.sha }}
path: |
@@ -86,12 +83,10 @@ jobs:
- name: Run Postgres Tests
env:
TEST_POSTGRES: true
youTubeKeys_visitorData: ${{ secrets.YOUTUBEKEYS_VISITORDATA }}
youTubeKeys_poToken: ${{ secrets.YOUTUBEKEYS_POTOKEN }}
timeout-minutes: 5
run: npx nyc --silent npm test
- name: cache nyc output
uses: actions/cache/save@v4
uses: actions/cache/save@v3
with:
key: nyc-postgres-${{ github.sha }}
path: ${{ github.workspace }}/.nyc_output
@@ -100,22 +95,22 @@ jobs:
name: Run Codecov
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: npm
- run: npm ci
- name: restore postgres nyc output
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
key: nyc-postgres-${{ github.sha }}
path: ${{ github.workspace }}/.nyc_output
- name: restore sqlite nyc output
uses: actions/cache/restore@v4
uses: actions/cache/restore@v3
with:
key: nyc-sqlite-${{ github.sha }}
path: ${{ github.workspace }}/.nyc_output
- run: npx nyc report --reporter=lcov
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v3

4
.gitignore vendored
View File

@@ -48,6 +48,4 @@ working
# nyc coverage output
.nyc_output/
coverage/
.vscode
coverage/

View File

@@ -1,29 +1,20 @@
# SponsorTimesDB
- [vipUsers](#vipusers)
- [sponsorTimes](#sponsortimes)
- [userNames](#usernames)
- [categoryVotes](#categoryvotes)
- [lockCategories](#lockcategories)
- [warnings](#warnings)
- [shadowBannedUsers](#shadowbannedusers)
- [videoInfo](#videoinfo)
- [unlistedVideos](#unlistedvideos)
- [config](#config)
- [archivedSponsorTimes](#archivedsponsortimes)
- [ratings](#ratings)
- [userFeatures](#userFeatures)
- [shadowBannedIPs](#shadowBannedIPs)
- [titles](#titles)
- [titleVotes](#titleVotes)
- [thumbnails](#thumbnails)
- [thumbnailTimestamps](#thumbnailTimestamps)
- [thumbnailVotes](#thumbnailVotes)
[vipUsers](#vipUsers)
[sponsorTimes](#sponsorTimes)
[userNames](#userNames)
[categoryVotes](#categoryVotes)
[lockCategories](#lockCategories)
[warnings](#warnings)
[shadowBannedUsers](#shadowBannedUsers)
[unlistedVideos](#unlistedVideos)
[config](#config)
[archivedSponsorTimes](#archivedSponsorTimes)
### vipUsers
| Name | Type | |
| -- | :--: | -- |
| userID | TEXT | not null, primary key |
| userID | TEXT | not null |
| index | field |
| -- | :--: |
@@ -39,7 +30,7 @@
| votes | INTEGER | not null |
| locked | INTEGER | not null, default '0' |
| incorrectVotes | INTEGER | not null, default 1 |
| UUID | TEXT | not null, unique, primary key |
| UUID | TEXT | not null, unique |
| userID | TEXT | not null |
| timeSubmitted | INTEGER | not null |
| views | INTEGER | not null |
@@ -59,16 +50,14 @@
| sponsorTime_timeSubmitted | timeSubmitted |
| sponsorTime_userID | userID |
| sponsorTimes_UUID | UUID |
| sponsorTimes_hashedVideoID | service, hashedVideoID, startTime |
| sponsorTimes_videoID | service, videoID, startTime |
| sponsorTimes_videoID_category | videoID, category |
| sponsorTimes_description_gin | description, category |
| sponsorTimes_hashedVideoID | hashedVideoID, category |
| sponsorTimes_videoID | videoID, service, category, timeSubmitted |
### userNames
| Name | Type | |
| -- | :--: | -- |
| userID | TEXT | not null, primary key |
| userID | TEXT | not null |
| userName | TEXT | not null |
| locked | INTEGER | not nul, default '0' |
@@ -83,7 +72,6 @@
| UUID | TEXT | not null |
| category | TEXT | not null |
| votes | INTEGER | not null, default 0 |
| id | SERIAL | primary key
| index | field |
| -- | :--: |
@@ -100,7 +88,6 @@
| hashedVideoID | TEXT | not null, default '' |
| reason | TEXT | not null, default '' |
| service | TEXT | not null, default 'YouTube' |
| id | SERIAL | primary key
| index | field |
| -- | :--: |
@@ -115,22 +102,17 @@
| issuerUserID | TEXT | not null |
| enabled | INTEGER | not null |
| reason | TEXT | not null, default '' |
| type | INTEGER | default 0 |
| constraint | field |
| -- | :--: |
| PRIMARY KEY | userID, issueTime |
| index | field |
| -- | :--: |
| warnings_index | userID, issueTime, enabled |
| warnings_index | userID |
| warnings_issueTime | issueTime |
### shadowBannedUsers
| Name | Type | |
| -- | :--: | -- |
| userID | TEXT | not null, primary key |
| userID | TEXT | not null |
| index | field |
| -- | :--: |
@@ -147,8 +129,8 @@
| index | field |
| -- | :--: |
| videoInfo_videoID | videoID |
| videoInfo_channelID | channelID |
| videoInfo_videoID | timeSubmitted |
| videoInfo_channelID | userID |
### unlistedVideos
@@ -160,13 +142,12 @@
| channelID | TEXT | not null |
| timeSubmitted | INTEGER | not null |
| service | TEXT | not null, default 'YouTube' |
| id | SERIAL | primary key
### config
| Name | Type | |
| -- | :--: | -- |
| key | TEXT | not null, unique, primary key |
| key | TEXT | not null, unique |
| value | TEXT | not null |
### archivedSponsorTimes
@@ -179,7 +160,7 @@
| votes | INTEGER | not null |
| locked | INTEGER | not null, default '0' |
| incorrectVotes | INTEGER | not null, default 1 |
| UUID | TEXT | not null, unique, primary key |
| UUID | TEXT | not null, unique |
| userID | TEXT | not null |
| timeSubmitted | INTEGER | not null |
| views | INTEGER | not null |
@@ -192,7 +173,6 @@
| shadowHidden | INTEGER | not null |
| hashedVideoID | TEXT | not null, default '', sha256 |
| userAgent | TEXT | not null, default '' |
| description | TEXT | not null, default '' |
### ratings
@@ -203,7 +183,6 @@
| type | INTEGER | not null |
| count | INTEGER | not null |
| hashedVideoID | TEXT | not null |
| id | SERIAL | primary key
| index | field |
| -- | :--: |
@@ -211,125 +190,15 @@
| ratings_hashedVideoID | hashedVideoID, service |
| ratings_videoID | videoID, service |
### userFeatures
| Name | Type | |
| -- | :--: | -- |
| userID | TEXT | not null |
| feature | INTEGER | not null |
| issuerUserID | TEXT | not null |
| timeSubmitted | INTEGER | not null |
| constraint | field |
| -- | :--: |
| primary key | userID, feature |
| index | field |
| -- | :--: |
| userFeatures_userID | userID, feature |
### shadowBannedIPs
| Name | Type | |
| -- | :--: | -- |
| hashedIP | TEXT | not null, primary key |
### titles
| Name | Type | |
| -- | :--: | -- |
| videoID | TEXT | not null |
| title | TEXT | not null |
| original | INTEGER | default 0 |
| userID | TEXT | not null
| service | TEXT | not null |
| hashedVideoID | TEXT | not null |
| timeSubmitted | INTEGER | not null |
| UUID | TEXT | not null, primary key
| index | field |
| -- | :--: |
| titles_timeSubmitted | timeSubmitted |
| titles_userID_timeSubmitted | videoID, service, userID, timeSubmitted |
| titles_videoID | videoID, service |
| titles_hashedVideoID_2 | service, hashedVideoID, timeSubmitted |
### titleVotes
| Name | Type | |
| -- | :--: | -- |
| UUID | TEXT | not null, primary key |
| votes | INTEGER | not null, default 0 |
| locked | INTEGER | not null, default 0 |
| shadowHidden | INTEGER | not null, default 0 |
| verification | INTEGER | default 0 |
| downvotes | INTEGER | default 0 |
| removed | INTEGER | default 0 |
| constraint | field |
| -- | :--: |
| foreign key | UUID references "titles"("UUID")
| index | field |
| -- | :--: |
| titleVotes_votes | UUID, votes
### thumbnails
| Name | Type | |
| -- | :--: | -- |
| original | INTEGER | default 0 |
| userID | TEXT | not null |
| service | TEXT | not null |
| hashedVideoID | TEXT | not null |
| timeSubmitted | INTEGER | not null |
| UUID | TEXT | not null, primary key |
| index | field |
| -- | :--: |
| thumbnails_timeSubmitted | timeSubmitted |
| thumbnails_votes_timeSubmitted | videoID, service, userID, timeSubmitted |
| thumbnails_videoID | videoID, service |
| thumbnails_hashedVideoID_2 | service, hashedVideoID, timeSubmitted |
### thumbnailTimestamps
| index | field |
| -- | :--: |
| UUID | TEXT | not null, primary key
| timestamp | INTEGER | not null, default 0
| constraint | field |
| -- | :--: |
| foreign key | UUID references "thumbnails"("UUID")
### thumbnailVotes
| Name | Type | |
| -- | :--: | -- |
| UUID | TEXT | not null, primary key |
| votes | INTEGER | not null, default 0 |
| locked | INTEGER |not null, default 0 |
| shadowHidden | INTEGER | not null, default 0 |
| downvotes | INTEGER | default 0 |
| removed | INTEGER | default 0 |
| constraint | field |
| -- | :--: |
| foreign key | UUID references "thumbnails"("UUID")
| index | field |
| -- | :--: |
| thumbnailVotes_votes | UUID, votes
# Private
- [votes](#votes)
- [categoryVotes](#categoryVotes)
- [sponsorTimes](#sponsorTimes)
- [config](#config)
- [ratings](#ratings)
- [tempVipLog](#tempVipLog)
- [userNameLogs](#userNameLogs)
[votes](#votes)
[categoryVotes](#categoryVotes)
[sponsorTimes](#sponsorTimes)
[config](#config)
[ratings](#ratings)
[tempVipLog](#tempVipLog)
[userNameLogs](#userNameLogs)
### votes
@@ -340,7 +209,6 @@
| hashedIP | TEXT | not null |
| type | INTEGER | not null |
| originalVoteType | INTEGER | not null | # Since type was reused to also specify the number of votes removed when less than 0, this is being used for the actual type
| id | SERIAL | primary key |
| index | field |
| -- | :--: |
@@ -355,11 +223,10 @@
| hashedIP | TEXT | not null |
| category | TEXT | not null |
| timeSubmitted | INTEGER | not null |
| id | SERIAL | primary key |
| index | field |
| -- | :--: |
| categoryVotes_UUID | UUID, userID, hashedIP, category |
| categoryVotes_UUID | UUID, userID, hasedIP, category |
### sponsorTimes
@@ -369,17 +236,17 @@
| hashedIP | TEXT | not null |
| timeSubmitted | INTEGER | not null |
| service | TEXT | not null, default 'YouTube' |
| id | SERIAL | primary key |
| index | field |
| -- | :--: |
| privateDB_sponsorTimes_v4 | videoID, service, timeSubmitted |
| sponsorTimes_hashedIP | hashedIP |
| privateDB_sponsorTimes_videoID_v2 | videoID, service |
### config
| Name | Type | |
| -- | :--: | -- |
| key | TEXT | not null, primary key |
| key | TEXT | not null |
| value | TEXT | not null |
### ratings
@@ -392,7 +259,6 @@
| type | INTEGER | not null |
| timeSubmitted | INTEGER | not null |
| hashedIP | TEXT | not null |
| id | SERIAL | primary key |
| index | field |
| -- | :--: |
@@ -405,7 +271,6 @@
| targetUserID | TEXT | not null |
| enabled | BOOLEAN | not null |
| updatedAt | INTEGER | not null |
| id | SERIAL | primary key |
### userNameLogs
@@ -416,4 +281,3 @@
| oldUserName | TEXT | not null |
| updatedByAdmin | BOOLEAN | not null |
| updatedAt | INTEGER | not null |
| id | SERIAL | primary key |

View File

@@ -9,7 +9,7 @@ WORKDIR /usr/src/app
RUN apk add --no-cache git postgresql-client
COPY --from=builder ./node_modules ./node_modules
COPY --from=builder ./dist ./dist
COPY ./.git/ ./.git
COPY ./.git ./.git
COPY entrypoint.sh .
COPY databases/*.sql databases/
EXPOSE 8080

View File

@@ -56,6 +56,7 @@
]
}
],
"hoursAfterWarningExpires": 24,
"rateLimit": {
"vote": {
"windowMs": 900000,

View File

@@ -1,9 +1 @@
comment: false
coverage:
status:
project:
default:
informational: true
patch:
default:
informational: true

View File

@@ -25,6 +25,8 @@
"webhooks": [],
"categoryList": ["sponsor", "intro", "outro", "interaction", "selfpromo", "preview", "music_offtopic", "poi_highlight"], // List of supported categories any other category will be rejected
"getTopUsersCacheTimeMinutes": 5, // cacheTime for getTopUsers result in minutes
"maxNumberOfActiveWarnings": 3, // Users with this number of warnings will be blocked until warnings expire
"hoursAfterWarningExpire": 24,
"rateLimit": {
"vote": {
"windowMs": 900000, // 15 minutes

View File

@@ -1,13 +1,13 @@
FROM alpine as builder
WORKDIR /scripts
COPY ./backup.sh ./backup.sh
COPY ./forget.sh ./forget.sh
FROM alpine
RUN apk add --no-cache postgresql-client restic
COPY --from=builder --chmod=755 /scripts /usr/src/app/
RUN apk add postgresql-client
RUN apk add restic --repository http://dl-cdn.alpinelinux.org/alpine/latest-stable/community/
COPY ./backup.sh /usr/src/app/backup.sh
RUN chmod +x /usr/src/app/backup.sh
COPY ./forget.sh /usr/src/app/forget.sh
RUN chmod +x /usr/src/app/forget.sh
RUN echo '30 * * * * /usr/src/app/backup.sh' >> /etc/crontabs/root
RUN echo '10 0 * * */2 /usr/src/app/forget.sh' >> /etc/crontabs/root
RUN echo '10 0 * * 1 /usr/src/app/forget.sh' >> /etc/crontabs/root
CMD crond -l 2 -f

View File

@@ -1 +1 @@
restic forget --prune --keep-hourly 24 --keep-daily 7 --keep-weekly 8
restic forget --prune --keep-last 48 --keep-daily 7 --keep-weekly 8

View File

@@ -1,4 +0,0 @@
FROM nginx as app
EXPOSE 80
COPY nginx.conf /etc/nginx/nginx.conf
COPY default.conf /etc/nginx/conf.d/default.conf

View File

@@ -1,9 +0,0 @@
server {
listen 80;
listen [::]:80;
server_name localhost;
location / {
return 503;
}
}

View File

@@ -1,19 +0,0 @@
user nginx;
worker_processes auto;
error_log /var/log/nginx/error.log notice;
pid /var/run/nginx.pid;
events {
worker_connections 4096;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
access_log off;
error_log /dev/null crit;
include /etc/nginx/conf.d/*.conf;
}

View File

@@ -1,6 +1,6 @@
FROM ghcr.io/ajayyy/sb-server:latest
EXPOSE 873/tcp
RUN apk add rsync>3.4.1-r0
RUN apk add rsync>3.2.4-r0
RUN mkdir /usr/src/app/database-export
CMD rsync --no-detach --daemon & ./entrypoint.sh
CMD rsync --no-detach --daemon & ./entrypoint.sh

View File

@@ -44,15 +44,4 @@ CREATE TABLE IF NOT EXISTS "thumbnailVotes" (
"type" INTEGER NOT NULL
);
CREATE TABLE IF NOT EXISTS "casualVotes" (
"UUID" SERIAL PRIMARY KEY,
"videoID" TEXT NOT NULL,
"service" TEXT NOT NULL,
"userID" TEXT NOT NULL,
"hashedIP" TEXT NOT NULL,
"category" TEXT NOT NULL,
"type" INTEGER NOT NULL,
"timeSubmitted" INTEGER NOT NULL
);
COMMIT;

View File

@@ -23,16 +23,4 @@ CREATE INDEX IF NOT EXISTS "categoryVotes_UUID"
CREATE INDEX IF NOT EXISTS "ratings_videoID"
ON public."ratings" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, service COLLATE pg_catalog."default" ASC NULLS LAST, "userID" COLLATE pg_catalog."default" ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST)
TABLESPACE pg_default;
-- casualVotes
CREATE INDEX IF NOT EXISTS "casualVotes_videoID"
ON public."casualVotes" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST, "userID" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "casualVotes_userID"
ON public."casualVotes" USING btree
("userID" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;

View File

@@ -84,26 +84,6 @@ CREATE TABLE IF NOT EXISTS "thumbnailVotes" (
FOREIGN KEY("UUID") REFERENCES "thumbnails"("UUID")
);
CREATE TABLE IF NOT EXISTS "casualVotes" (
"UUID" TEXT PRIMARY KEY,
"videoID" TEXT NOT NULL,
"service" TEXT NOT NULL,
"hashedVideoID" TEXT NOT NULL,
"category" TEXT NOT NULL,
"upvotes" INTEGER NOT NULL default 0,
"downvotes" INTEGER NOT NULL default 0,
"timeSubmitted" INTEGER NOT NULL
);
CREATE TABLE IF NOT EXISTS "casualVoteTitles" (
"videoID" TEXT NOT NULL,
"service" TEXT NOT NULL,
"id" INTEGER NOT NULL,
"hashedVideoID" TEXT NOT NULL,
"title" TEXT NOT NULL,
PRIMARY KEY("videoID", "service", "id")
);
CREATE EXTENSION IF NOT EXISTS pgcrypto; --!sqlite-ignore
CREATE EXTENSION IF NOT EXISTS pg_trgm; --!sqlite-ignore

View File

@@ -124,26 +124,14 @@ CREATE INDEX IF NOT EXISTS "titles_timeSubmitted"
("timeSubmitted" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "titles_userID_timeSubmitted"
ON public."titles" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST, "userID" COLLATE pg_catalog."default" DESC NULLS LAST, "timeSubmitted" DESC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "titles_videoID"
ON public."titles" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "titles_hashedVideoID_2"
CREATE INDEX IF NOT EXISTS "titles_hashedVideoID"
ON public."titles" USING btree
(service COLLATE pg_catalog."default" ASC NULLS LAST, "hashedVideoID" text_pattern_ops ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST)
TABLESPACE pg_default;
-- titleVotes
CREATE INDEX IF NOT EXISTS "titleVotes_votes"
ON public."titleVotes" USING btree
("UUID" COLLATE pg_catalog."default" ASC NULLS LAST, "votes" DESC NULLS LAST)
("hashedVideoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;
-- thumbnails
@@ -153,46 +141,12 @@ CREATE INDEX IF NOT EXISTS "thumbnails_timeSubmitted"
("timeSubmitted" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "thumbnails_votes_timeSubmitted"
ON public."thumbnails" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST, "userID" COLLATE pg_catalog."default" DESC NULLS LAST, "timeSubmitted" DESC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "thumbnails_videoID"
ON public."thumbnails" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "thumbnails_hashedVideoID_2"
CREATE INDEX IF NOT EXISTS "thumbnails_hashedVideoID"
ON public."thumbnails" USING btree
(service COLLATE pg_catalog."default" ASC NULLS LAST, "hashedVideoID" text_pattern_ops ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST)
TABLESPACE pg_default;
-- thumbnailVotes
CREATE INDEX IF NOT EXISTS "thumbnailVotes_votes"
ON public."thumbnailVotes" USING btree
("UUID" COLLATE pg_catalog."default" ASC NULLS LAST, "votes" DESC NULLS LAST)
TABLESPACE pg_default;
-- casualVotes
CREATE INDEX IF NOT EXISTS "casualVotes_timeSubmitted"
ON public."casualVotes" USING btree
("timeSubmitted" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "casualVotes_userID_timeSubmitted"
ON public."casualVotes" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST, "timeSubmitted" DESC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "casualVotes_videoID"
ON public."casualVotes" USING btree
("videoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;
CREATE INDEX IF NOT EXISTS "casualVotes_hashedVideoID_2"
ON public."casualVotes" USING btree
(service COLLATE pg_catalog."default" ASC NULLS LAST, "hashedVideoID" text_pattern_ops ASC NULLS LAST, "timeSubmitted" ASC NULLS LAST)
("hashedVideoID" COLLATE pg_catalog."default" ASC NULLS LAST, "service" COLLATE pg_catalog."default" ASC NULLS LAST)
TABLESPACE pg_default;

View File

@@ -1,7 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "casualVotes" DROP COLUMN "type";
UPDATE "config" SET value = 12 WHERE key = 'version';
COMMIT;

View File

@@ -1,7 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "casualVotes" ADD "titleID" INTEGER default 0;
UPDATE "config" SET value = 13 WHERE key = 'version';
COMMIT;

View File

@@ -1,7 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "titleVotes" ADD "verification" INTEGER default 0;
UPDATE "config" SET value = 35 WHERE key = 'version';
COMMIT;

View File

@@ -1,7 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "warnings" ADD "type" INTEGER default 0;
UPDATE "config" SET value = 36 WHERE key = 'version';
COMMIT;

View File

@@ -1,7 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "titles" ADD UNIQUE ("videoID", "title"); --!sqlite-ignore
UPDATE "config" SET value = 37 WHERE key = 'version';
COMMIT;

View File

@@ -1,11 +0,0 @@
BEGIN TRANSACTION;
UPDATE "titleVotes" SET "shadowHidden" = 1
WHERE "UUID" IN (SELECT "UUID" FROM "titles" INNER JOIN "shadowBannedUsers" "bans" ON "titles"."userID" = "bans"."userID");
UPDATE "thumbnailVotes" SET "shadowHidden" = 1
WHERE "UUID" IN (SELECT "UUID" FROM "thumbnails" INNER JOIN "shadowBannedUsers" "bans" ON "thumbnails"."userID" = "bans"."userID");
UPDATE "config" SET value = 38 WHERE key = 'version';
COMMIT;

View File

@@ -1,11 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "titleVotes" ADD "downvotes" INTEGER default 0;
ALTER TABLE "titleVotes" ADD "removed" INTEGER default 0;
ALTER TABLE "thumbnailVotes" ADD "downvotes" INTEGER default 0;
ALTER TABLE "thumbnailVotes" ADD "removed" INTEGER default 0;
UPDATE "config" SET value = 39 WHERE key = 'version';
COMMIT;

View File

@@ -1,8 +0,0 @@
BEGIN TRANSACTION;
DROP INDEX IF EXISTS "titles_hashedVideoID";
DROP INDEX IF EXISTS "thumbnails_hashedVideoID";
UPDATE "config" SET value = 40 WHERE key = 'version';
COMMIT;

View File

@@ -1,8 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "titles" ADD "casualMode" INTEGER default 0;
ALTER TABLE "thumbnails" ADD "casualMode" INTEGER default 0;
UPDATE "config" SET value = 41 WHERE key = 'version';
COMMIT;

View File

@@ -1,7 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "casualVotes" DROP COLUMN "downvotes";
UPDATE "config" SET value = 42 WHERE key = 'version';
COMMIT;

View File

@@ -1,7 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "casualVotes" ADD "titleID" INTEGER default 0;
UPDATE "config" SET value = 43 WHERE key = 'version';
COMMIT;

View File

@@ -1,8 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "titles" ADD "userAgent" TEXT NOT NULL default '';
ALTER TABLE "thumbnails" ADD "userAgent" TEXT NOT NULL default '';
UPDATE "config" SET value = 44 WHERE key = 'version';
COMMIT;

View File

@@ -1,7 +0,0 @@
BEGIN TRANSACTION;
ALTER TABLE "warnings" ADD "disableTime" INTEGER NULL;
UPDATE "config" SET value = 45 WHERE key = 'version';
COMMIT;

View File

@@ -1,6 +1,6 @@
BEGIN TRANSACTION;
/* Add 'locked' field */
/* Add new voting field */
CREATE TABLE "sqlb_temp_table_6" (
"videoID" TEXT NOT NULL,
"startTime" REAL NOT NULL,

View File

@@ -1,6 +1,6 @@
BEGIN TRANSACTION;
/* Add 'videoDuration' field */
/* Add Service field */
CREATE TABLE "sqlb_temp_table_8" (
"videoID" TEXT NOT NULL,
"startTime" REAL NOT NULL,

View File

@@ -1,6 +1,6 @@
BEGIN TRANSACTION;
/* Change 'videoDuration' field from INTEGER to REAL */
/* Add Service field */
CREATE TABLE "sqlb_temp_table_9" (
"videoID" TEXT NOT NULL,
"startTime" REAL NOT NULL,

View File

@@ -9,4 +9,4 @@ test -e config.json || cat <<EOF > config.json
}
EOF
node dist/src/index.js
node --inspect dist/src/index.js

2556
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,20 +19,18 @@
"author": "Ajay Ramachandran",
"license": "AGPL-3.0-only",
"dependencies": {
"axios": "^1.12.1",
"better-sqlite3": "^11.2.1",
"axios": "^1.1.3",
"better-sqlite3": "^8.0.1",
"cron": "^2.1.0",
"express": "^4.21.2",
"express": "^4.18.2",
"express-promise-router": "^4.1.1",
"express-rate-limit": "^6.7.0",
"form-data": "^4.0.4",
"form-data": "^4.0.0",
"lodash": "^4.17.21",
"lru-cache": "^10.2.0",
"lz4-napi": "^2.2.0",
"pg": "^8.8.0",
"rate-limit-redis": "^3.0.1",
"redis": "^4.6.13",
"seedrandom": "^3.0.5"
"redis": "^4.5.0",
"sync-mysql": "^3.0.1"
},
"devDependencies": {
"@istanbuljs/nyc-config-typescript": "^1.0.2",
@@ -43,14 +41,13 @@
"@types/mocha": "^10.0.0",
"@types/node": "^18.11.9",
"@types/pg": "^8.6.5",
"@types/seedrandom": "^3.0.5",
"@types/sinon": "^10.0.13",
"@typescript-eslint/eslint-plugin": "^5.44.0",
"@typescript-eslint/parser": "^5.44.0",
"axios-mock-adapter": "^1.21.2",
"eslint": "^8.28.0",
"mocha": "^10.8.2",
"nodemon": "^3.1.9",
"mocha": "^10.1.0",
"nodemon": "^2.0.20",
"nyc": "^15.1.0",
"sinon": "^14.0.2",
"ts-mock-imports": "^1.3.8",

View File

@@ -53,15 +53,6 @@ import { getBranding, getBrandingByHashEndpoint } from "./routes/getBranding";
import { postBranding } from "./routes/postBranding";
import { cacheMiddlware } from "./middleware/etag";
import { hostHeader } from "./middleware/hostHeader";
import { getBrandingStats } from "./routes/getBrandingStats";
import { getTopBrandingUsers } from "./routes/getTopBrandingUsers";
import { getFeatureFlag } from "./routes/getFeatureFlag";
import { getReady } from "./routes/getReady";
import { getMetrics } from "./routes/getMetrics";
import { getSegmentID } from "./routes/getSegmentID";
import { postCasual } from "./routes/postCasual";
import { getConfigEndpoint } from "./routes/getConfig";
import { setConfig } from "./routes/setConfig";
export function createServer(callback: () => void): Server {
// Create a service (the app object is just a callback).
@@ -87,15 +78,13 @@ export function createServer(callback: () => void): Server {
// Set production mode
app.set("env", config.mode || "production");
const server = app.listen(config.port, callback);
setupRoutes(router);
setupRoutes(router, server);
return server;
return app.listen(config.port, callback);
}
/* eslint-disable @typescript-eslint/no-misused-promises */
function setupRoutes(router: Router, server: Server) {
function setupRoutes(router: Router) {
// Rate limit endpoint lists
const voteEndpoints: RequestHandler[] = [voteOnSponsorTime];
const viewEndpoints: RequestHandler[] = [viewedVideoSponsorTime];
@@ -126,8 +115,6 @@ function setupRoutes(router: Router, server: Server) {
router.get("/api/viewedVideoSponsorTime", ...viewEndpoints);
router.post("/api/viewedVideoSponsorTime", ...viewEndpoints);
router.get("/api/segmentID", getSegmentID);
//To set your username for the stats view
router.post("/api/setUsername", setUsername);
@@ -153,14 +140,11 @@ function setupRoutes(router: Router, server: Server) {
router.get("/api/getTopUsers", getTopUsers);
router.get("/api/getTopCategoryUsers", getTopCategoryUsers);
router.get("/api/getTopBrandingUsers", getTopBrandingUsers);
//send out totals
//send the total submissions, total views and total minutes saved
router.get("/api/getTotalStats", getTotalStats);
router.get("/api/brandingStats", getBrandingStats);
router.get("/api/getUserInfo", getUserInfo);
router.get("/api/userInfo", getUserInfo);
@@ -210,11 +194,8 @@ function setupRoutes(router: Router, server: Server) {
router.get("/api/chapterNames", getChapterNames);
// get status
router.get("/api/status/:value", (req, res) => getStatus(req, res, server));
router.get("/api/status", (req, res) => getStatus(req, res, server));
router.get("/metrics", (req, res) => getMetrics(req, res, server));
router.get("/api/ready", (req, res) => getReady(req, res, server));
router.get("/api/status/:value", getStatus);
router.get("/api/status", getStatus);
router.get("/api/youtubeApiProxy", youtubeApiProxy);
// get user category stats
@@ -224,8 +205,6 @@ function setupRoutes(router: Router, server: Server) {
router.post("/api/feature", addFeature);
router.get("/api/featureFlag/:name", getFeatureFlag);
router.get("/api/generateToken/:type", generateTokenRequest);
router.get("/api/verifyToken", verifyTokenRequest);
@@ -237,11 +216,6 @@ function setupRoutes(router: Router, server: Server) {
router.get("/api/branding/:prefix", getBrandingByHashEndpoint);
router.post("/api/branding", postBranding);
router.get("/api/config", getConfigEndpoint);
router.post("/api/config", setConfig);
router.post("/api/casual", postCasual);
/* istanbul ignore next */
if (config.postgres?.enabled) {
router.get("/database", (req, res) => dumpDatabase(req, res, true));

View File

@@ -1,6 +1,7 @@
import fs from "fs";
import { SBSConfig } from "./types/config.model";
import packageJson from "../package.json";
import { isNumber } from "lodash";
const isTestMode = process.env.npm_lifecycle_script === packageJson.scripts.test;
const configFile = process.env.TEST_POSTGRES ? "ci.json"
@@ -19,8 +20,7 @@ addDefaults(config, {
privateDBSchema: "./databases/_private.db.sql",
readOnly: false,
webhooks: [],
categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "hook", "music_offtopic", "filler", "poi_highlight", "chapter"],
casualCategoryList: ["funny", "creative", "clever", "descriptive", "other"],
categoryList: ["sponsor", "selfpromo", "exclusive_access", "interaction", "intro", "outro", "preview", "music_offtopic", "filler", "poi_highlight", "chapter"],
categorySupport: {
sponsor: ["skip", "mute", "full"],
selfpromo: ["skip", "mute", "full"],
@@ -29,14 +29,13 @@ addDefaults(config, {
intro: ["skip", "mute"],
outro: ["skip", "mute"],
preview: ["skip", "mute"],
hook: ["skip", "mute"],
filler: ["skip", "mute"],
music_offtopic: ["skip"],
poi_highlight: ["poi"],
chapter: ["chapter"]
},
deArrowTypes: ["title", "thumbnail"],
maxTitleLength: 110,
maxNumberOfActiveWarnings: 1,
hoursAfterWarningExpires: 16300000,
adminUserID: "",
discordCompletelyIncorrectReportWebhookURL: null,
discordFirstTimeSubmissionsWebhookURL: null,
@@ -44,10 +43,6 @@ addDefaults(config, {
discordFailedReportChannelWebhookURL: null,
discordReportChannelWebhookURL: null,
discordMaliciousReportWebhookURL: null,
discordDeArrowLockedWebhookURL: null,
discordDeArrowWarnedWebhookURL: null,
discordNewUserWebhookURL: null,
discordRejectedNewUserWebhookURL: null,
minReputationToSubmitChapter: 0,
minReputationToSubmitFiller: 0,
getTopUsersCacheTimeMinutes: 240,
@@ -69,7 +64,6 @@ addDefaults(config, {
message: "OK",
}
},
requestValidatorRules: [],
userCounterURL: null,
userCounterRatio: 10,
newLeafURLs: null,
@@ -86,8 +80,7 @@ addDefaults(config, {
maxTries: 3,
maxActiveRequests: 0,
timeout: 60000,
highLoadThreshold: 10,
redisTimeoutThreshold: 1000
highLoadThreshold: 10
},
postgresReadOnly: {
enabled: false,
@@ -103,7 +96,6 @@ addDefaults(config, {
fallbackOnFail: true,
stopRetryThreshold: 800
},
postgresPrivateMax: 10,
dumpDatabase: {
enabled: false,
minTimeBetweenMs: 180000,
@@ -151,13 +143,6 @@ addDefaults(config, {
},
{
name: "thumbnailVotes"
},
{
name: "casualVotes",
order: "timeSubmitted"
},
{
name: "casualVoteTitles"
}]
},
diskCacheURL: null,
@@ -176,11 +161,7 @@ addDefaults(config, {
commandsQueueMaxLength: 3000,
stopWritingAfterResponseTime: 50,
responseTimePause: 1000,
maxReadResponseTime: 500,
disableHashCache: false,
clientCacheSize: 2000,
useCompression: false,
dragonflyMode: false
disableHashCache: false
},
redisRead: {
enabled: false,
@@ -201,20 +182,7 @@ addDefaults(config, {
gumroad: {
productPermalinks: ["sponsorblock"]
},
tokenSeed: "",
minUserIDLength: 30,
deArrowPaywall: false,
useCacheForSegmentGroups: false,
maxConnections: 100,
maxResponseTime: 1000,
maxResponseTimeWhileLoadingCache: 2000,
etagExpiry: 5000,
youTubeKeys: {
visitorData: null,
poToken: null,
floatieUrl: null,
floatieAuth: null
}
minUserIDLength: 30
});
loadFromEnv(config);
migrate(config);
@@ -262,17 +230,15 @@ function loadFromEnv(config: SBSConfig, prefix = "") {
loadFromEnv(data, fullKey);
} else if (process.env[fullKey]) {
const value = process.env[fullKey];
if (value !== "" && !isNaN(value as unknown as number)) {
if (isNumber(value)) {
config[key] = parseFloat(value);
} else if (value.toLowerCase() === "true" || value.toLowerCase() === "false") {
config[key] = value === "true";
} else if (key === "newLeafURLs") {
config[key] = [value];
} else if (key === "requestValidatorRules") {
config[key] = JSON.parse(value) ?? [];
} else {
config[key] = value;
}
}
}
}
}

View File

@@ -3,6 +3,7 @@ import { CronJob } from "cron";
import { config as serverConfig } from "../config";
import { Logger } from "../utils/logger";
import { db } from "../databases/databases";
import { DBSegment } from "../types/segments.model";
const jobConfig = serverConfig?.crons?.downvoteSegmentArchive;
@@ -13,18 +14,18 @@ export const archiveDownvoteSegment = async (dayLimit: number, voteLimit: number
Logger.info(`DownvoteSegmentArchiveJob starts at ${timeNow}`);
try {
// insert into archive sponsorTime
await db.prepare(
"run",
`INSERT INTO "archivedSponsorTimes"
SELECT *
FROM "sponsorTimes"
WHERE "votes" < ? AND (? - "timeSubmitted") > ?`,
[
voteLimit,
timeNow,
threshold
]
);
await db.prepare(
"run",
`INSERT INTO "archivedSponsorTimes"
SELECT *
FROM "sponsorTimes"
WHERE "votes" < ? AND (? - "timeSubmitted") > ?`,
[
voteLimit,
timeNow,
threshold
]
) as DBSegment[];
} catch (err) {
Logger.error("Execption when insert segment in archivedSponsorTimes");
@@ -34,15 +35,15 @@ export const archiveDownvoteSegment = async (dayLimit: number, voteLimit: number
// remove from sponsorTime
try {
await db.prepare(
"run",
'DELETE FROM "sponsorTimes" WHERE "votes" < ? AND (? - "timeSubmitted") > ?',
[
voteLimit,
timeNow,
threshold
]
);
await db.prepare(
"run",
'DELETE FROM "sponsorTimes" WHERE "votes" < ? AND (? - "timeSubmitted") > ?',
[
voteLimit,
timeNow,
threshold
]
) as DBSegment[];
} catch (err) {
Logger.error("Execption when deleting segment in sponsorTimes");

View File

@@ -6,14 +6,9 @@ export interface QueryOption {
export interface IDatabase {
init(): Promise<void>;
prepare(type: "run", query: string, params?: any[], options?: QueryOption): Promise<void>;
prepare(type: "get", query: string, params?: any[], options?: QueryOption): Promise<any>;
prepare(type: "all", query: string, params?: any[], options?: QueryOption): Promise<any[]>;
prepare(type: QueryType, query: string, params?: any[], options?: QueryOption): Promise<any>;
prepare(type: QueryType, query: string, params?: any[], options?: QueryOption): Promise<any | any[] | void>;
highLoad(): boolean;
shouldUseRedisTimeout(): boolean;
}
export type QueryType = "get" | "all" | "run";
export type QueryType = "get" | "all" | "run";

39
src/databases/Mysql.ts Normal file
View File

@@ -0,0 +1,39 @@
import { Logger } from "../utils/logger";
import { IDatabase, QueryType } from "./IDatabase";
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
import MysqlInterface from "sync-mysql";
export class Mysql implements IDatabase {
private connection: any;
constructor(private config: unknown) {
}
// eslint-disable-next-line require-await
async init(): Promise<void> {
this.connection = new MysqlInterface(this.config);
}
prepare(type: QueryType, query: string, params?: any[]): Promise<any[]> {
Logger.debug(`prepare (mysql): type: ${type}, query: ${query}, params: ${params}`);
const queryResult = this.connection.query(query, params);
switch (type) {
case "get": {
return queryResult[0];
}
case "all": {
return queryResult;
}
case "run": {
break;
}
}
}
highLoad() {
return false;
}
}

View File

@@ -109,7 +109,7 @@ export class Postgres implements IDatabase {
}
}
async prepare(type: QueryType, query: string, params?: any[], options: QueryOption = {}): Promise<any> {
async prepare(type: QueryType, query: string, params?: any[], options: QueryOption = {}): Promise<any[]> {
// Convert query to use numbered parameters
let count = 1;
for (let char = 0; char < query.length; char++) {
@@ -283,8 +283,4 @@ export class Postgres implements IDatabase {
highLoad() {
return this.activePostgresRequests > this.config.postgres.highLoadThreshold;
}
shouldUseRedisTimeout() {
return this.activePostgresRequests < this.config.postgres.redisTimeoutThreshold;
}
}

View File

@@ -13,7 +13,7 @@ export class Sqlite implements IDatabase {
}
// eslint-disable-next-line require-await
async prepare(type: QueryType, query: string, params: any[] = []): Promise<any> {
async prepare(type: QueryType, query: string, params: any[] = []): Promise<any[]> {
// Logger.debug(`prepare (sqlite): type: ${type}, query: ${query}, params: ${params}`);
const preparedQuery = this.db.prepare(Sqlite.processQuery(query));
@@ -72,15 +72,6 @@ export class Sqlite implements IDatabase {
}
private static processQuery(query: string): string {
if (query.includes("DISTINCT ON")) {
const column = query.match(/DISTINCT ON \((.*)\) (.*)/)[1];
query = query.replace(/DISTINCT ON \((.*)\)/g, "");
const parts = query.split("ORDER BY");
query = `${parts[0]} GROUP BY ${column} ORDER BY ${parts[1]}`;
}
return query.replace(/ ~\* /g, " REGEXP ");
}
@@ -102,18 +93,12 @@ export class Sqlite implements IDatabase {
}
private static processUpgradeQuery(query: string): string {
return query
.replace(/SERIAL PRIMARY KEY/gi, "INTEGER PRIMARY KEY AUTOINCREMENT")
.replace(/^.*--!sqlite-ignore/gm, "");
return query.replace(/^.*--!sqlite-ignore/gm, "");
}
highLoad() {
return false;
}
shouldUseRedisTimeout() {
return false;
}
}
export interface SqliteConfig {

View File

@@ -1,11 +1,15 @@
import { config } from "../config";
import { Sqlite } from "./Sqlite";
import { Mysql } from "./Mysql";
import { Postgres } from "./Postgres";
import { IDatabase } from "./IDatabase";
let db: IDatabase;
let privateDB: IDatabase;
if (config.postgres?.enabled) {
if (config.mysql) {
db = new Mysql(config.mysql);
privateDB = new Mysql(config.privateMysql);
} else if (config.postgres?.enabled) {
db = new Postgres({
dbSchemaFileName: config.dbSchema,
dbSchemaFolder: config.schemaFolder,
@@ -30,7 +34,6 @@ if (config.postgres?.enabled) {
createDbIfNotExists: config.createDatabaseIfNotExist,
postgres: {
...config.postgres,
max: config.postgresPrivateMax ?? config.postgres.max,
database: "privateDB"
},
postgresReadOnly: config.postgresReadOnly ? {

View File

@@ -10,11 +10,7 @@ async function init() {
process.on("unhandledRejection", (error: any) => {
// eslint-disable-next-line no-console
console.dir(error?.stack);
});
process.on("uncaughtExceptions", (error: any) => {
// eslint-disable-next-line no-console
console.dir(error?.stack);
process.exit(1);
});
try {

View File

@@ -3,6 +3,6 @@ import { NextFunction, Request, Response } from "express";
export function corsMiddleware(req: Request, res: Response, next: NextFunction): void {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Methods", "GET, POST, OPTIONS, DELETE");
res.header("Access-Control-Allow-Headers", "Content-Type, If-None-Match, x-client-name");
res.header("Access-Control-Allow-Headers", "Content-Type, If-None-Match");
next();
}

View File

@@ -1,10 +1,10 @@
import { NextFunction, Request, Response } from "express";
import { VideoID, VideoIDHash, Service } from "../types/segments.model";
import { QueryCacher } from "../utils/queryCacher";
import { brandingHashKey, brandingKey, skipSegmentsHashKey, skipSegmentsKey, skipSegmentsLargerHashKey, videoLabelsHashKey, videoLabelsKey, videoLabelsLargerHashKey } from "../utils/redisKeys";
import { skipSegmentsHashKey, skipSegmentsKey, videoLabelsHashKey, videoLabelsKey } from "../utils/redisKeys";
type hashType = "skipSegments" | "skipSegmentsHash" | "skipSegmentsLargerHash" | "videoLabel" | "videoLabelHash" | "videoLabelsLargerHash" | "branding" | "brandingHash";
type ETag = `"${hashType};${VideoIDHash};${Service};${number}"`;
type hashType = "skipSegments" | "skipSegmentsHash" | "videoLabel" | "videoLabelHash";
type ETag = `${hashType};${VideoIDHash};${Service};${number}`;
type hashKey = string | VideoID | VideoIDHash;
export function cacheMiddlware(req: Request, res: Response, next: NextFunction): void {
@@ -12,13 +12,13 @@ export function cacheMiddlware(req: Request, res: Response, next: NextFunction):
// if weak etag, do not handle
if (!reqEtag || reqEtag.startsWith("W/")) return next();
// split into components
const [hashType, hashKey, service, lastModified] = reqEtag.replace(/^"|"$/g, "").split(";");
const [hashType, hashKey, service, lastModified] = reqEtag.split(";");
// fetch last-modified
getLastModified(hashType as hashType, hashKey as VideoIDHash, service as Service)
.then(redisLastModified => {
if (redisLastModified <= new Date(Number(lastModified) + 1000)) {
// match cache, generate etag
const etag = `"${hashType};${hashKey};${service};${redisLastModified.getTime()}"` as ETag;
const etag = `${hashType};${hashKey};${service};${redisLastModified.getTime()}` as ETag;
res.status(304).set("etag", etag).send();
}
else next();
@@ -30,19 +30,15 @@ function getLastModified(hashType: hashType, hashKey: hashKey, service: Service)
let redisKey: string | null;
if (hashType === "skipSegments") redisKey = skipSegmentsKey(hashKey as VideoID, service);
else if (hashType === "skipSegmentsHash") redisKey = skipSegmentsHashKey(hashKey as VideoIDHash, service);
else if (hashType === "skipSegmentsLargerHash") redisKey = skipSegmentsLargerHashKey(hashKey as VideoIDHash, service);
else if (hashType === "videoLabel") redisKey = videoLabelsKey(hashKey as VideoID, service);
else if (hashType === "videoLabelHash") redisKey = videoLabelsHashKey(hashKey as VideoIDHash, service);
else if (hashType === "videoLabelsLargerHash") redisKey = videoLabelsLargerHashKey(hashKey as VideoIDHash, service);
else if (hashType === "branding") redisKey = brandingKey(hashKey as VideoID, service);
else if (hashType === "brandingHash") redisKey = brandingHashKey(hashKey as VideoIDHash, service);
else return Promise.reject();
return QueryCacher.getKeyLastModified(redisKey);
}
export async function getEtag(hashType: hashType, hashKey: hashKey, service: Service): Promise<ETag> {
const lastModified = await getLastModified(hashType, hashKey, service);
return `"${hashType};${hashKey};${service};${lastModified.getTime()}"` as ETag;
return `${hashType};${hashKey};${service};${lastModified.getTime()}` as ETag;
}
/* example usage

View File

@@ -6,7 +6,6 @@ import { isUserVIP } from "../utils/isUserVIP";
import { Feature, HashedUserID, UserID } from "../types/user.model";
import { Logger } from "../utils/logger";
import { QueryCacher } from "../utils/queryCacher";
import { getVerificationValue, verifyOldSubmissions } from "./postBranding";
interface AddFeatureRequest extends Request {
body: {
@@ -20,13 +19,11 @@ interface AddFeatureRequest extends Request {
const allowedFeatures = {
vip: [
Feature.ChapterSubmitter,
Feature.FillerSubmitter,
Feature.DeArrowTitleSubmitter,
Feature.FillerSubmitter
],
admin: [
Feature.ChapterSubmitter,
Feature.FillerSubmitter,
Feature.DeArrowTitleSubmitter,
Feature.FillerSubmitter
]
};
@@ -59,10 +56,6 @@ export async function addFeature(req: AddFeatureRequest, res: Response): Promise
await db.prepare("run", 'INSERT INTO "userFeatures" ("userID", "feature", "issuerUserID", "timeSubmitted") VALUES(?, ?, ?, ?)'
, [userID, feature, adminUserID, Date.now()]);
}
if (feature === Feature.DeArrowTitleSubmitter) {
await verifyOldSubmissions(userID, await getVerificationValue(userID, false));
}
} else {
await db.prepare("run", 'DELETE FROM "userFeatures" WHERE "userID" = ? AND "feature" = ?', [userID, feature]);
}

View File

@@ -4,7 +4,6 @@ import { config } from "../config";
import { Request, Response } from "express";
import { isUserVIP } from "../utils/isUserVIP";
import { HashedUserID } from "../types/user.model";
import { Logger } from "../utils/logger";
interface AddUserAsVIPRequest extends Request {
query: {
@@ -35,21 +34,15 @@ export async function addUserAsVIP(req: AddUserAsVIPRequest, res: Response): Pro
// check to see if this user is already a vip
const userIsVIP = await isUserVIP(userID);
try {
if (enabled && !userIsVIP) {
// add them to the vip list
await db.prepare("run", 'INSERT INTO "vipUsers" VALUES(?)', [userID]);
}
if (!enabled && userIsVIP) {
//remove them from the shadow ban list
await db.prepare("run", 'DELETE FROM "vipUsers" WHERE "userID" = ?', [userID]);
}
return res.sendStatus(200);
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
if (enabled && !userIsVIP) {
// add them to the vip list
await db.prepare("run", 'INSERT INTO "vipUsers" VALUES(?)', [userID]);
}
if (!enabled && userIsVIP) {
//remove them from the shadow ban list
await db.prepare("run", 'DELETE FROM "vipUsers" WHERE "userID" = ?', [userID]);
}
return res.sendStatus(200);
}

View File

@@ -6,7 +6,6 @@ import { ActionType, Category, Service, VideoID } from "../types/segments.model"
import { UserID } from "../types/user.model";
import { getService } from "../utils/getService";
import { config } from "../config";
import { Logger } from "../utils/logger";
interface DeleteLockCategoriesRequest extends Request {
body: {
@@ -54,12 +53,7 @@ export async function deleteLockCategoriesEndpoint(req: DeleteLockCategoriesRequ
});
}
try {
await deleteLockCategories(videoID, categories, actionTypes, getService(service));
} catch (e) {
Logger.error(e as string);
return res.status(500);
}
await deleteLockCategories(videoID, categories, actionTypes, getService(service));
return res.status(200).json({ message: `Removed lock categories entries for video ${videoID}` });
}

View File

@@ -96,12 +96,10 @@ function removeOutdatedDumps(exportPath: string): Promise<void> {
for (const tableName in tableFiles) {
const files = tableFiles[tableName].sort((a, b) => b.timestamp - a.timestamp);
for (let i = 2; i < files.length; i++) {
if (!latestDumpFiles.some((file) => file.fileName === files[i].file.match(/[^/]+$/)[0])) {
// remove old file
await unlink(files[i].file).catch((error: any) => {
Logger.error(`[dumpDatabase] Garbage collection failed ${error}`);
});
}
// remove old file
await unlink(files[i].file).catch((error: any) => {
Logger.error(`[dumpDatabase] Garbage collection failed ${error}`);
});
}
}
resolve();
@@ -166,23 +164,18 @@ export default async function dumpDatabase(req: Request, res: Response, showPage
<hr/>
${updateQueued ? `Update queued.` : ``} Last updated: ${lastUpdate ? new Date(lastUpdate).toUTCString() : `Unknown`}`);
} else {
try {
res.send({
dbVersion: await getDbVersion(),
lastUpdated: lastUpdate,
updateQueued,
links: latestDumpFiles.map((item:any) => {
return {
table: item.tableName,
url: `/database/${item.tableName}.csv`,
size: item.fileSize,
};
}),
});
} catch (e) {
Logger.error(e as string);
res.sendStatus(500);
}
res.send({
dbVersion: await getDbVersion(),
lastUpdated: lastUpdate,
updateQueued,
links: latestDumpFiles.map((item:any) => {
return {
table: item.tableName,
url: `/database/${item.tableName}.csv`,
size: item.fileSize,
};
}),
});
}
await queueDump();
@@ -236,11 +229,11 @@ async function queueDump(): Promise<void> {
const fileName = `${table.name}_${startTime}.csv`;
const file = `${appExportPath}/${fileName}`;
await new Promise<string>((resolve, reject) => {
await new Promise<string>((resolve) => {
exec(`psql -c "\\copy (SELECT * FROM \\"${table.name}\\"${table.order ? ` ORDER BY \\"${table.order}\\"` : ``})`
+ ` TO '${file}' WITH (FORMAT CSV, HEADER true);"`, credentials, (error, stdout, stderr) => {
if (error) {
reject(`[dumpDatabase] Failed to dump ${table.name} to ${file} due to ${stderr}`);
Logger.error(`[dumpDatabase] Failed to dump ${table.name} to ${file} due to ${stderr}`);
}
resolve(error ? stderr : stdout);
@@ -255,10 +248,10 @@ async function queueDump(): Promise<void> {
latestDumpFiles = [...dumpFiles];
lastUpdate = startTime;
updateQueued = false;
} catch(e) {
Logger.error(e as string);
} finally {
updateQueued = false;
updateRunning = false;
}
}

View File

@@ -7,8 +7,6 @@ interface GenerateTokenRequest extends Request {
query: {
code: string;
adminUserID?: string;
total?: string;
key?: string;
},
params: {
type: TokenType;
@@ -16,45 +14,31 @@ interface GenerateTokenRequest extends Request {
}
export async function generateTokenRequest(req: GenerateTokenRequest, res: Response): Promise<Response> {
const { query: { code, adminUserID, total, key }, params: { type } } = req;
const { query: { code, adminUserID }, params: { type } } = req;
const adminUserIDHash = adminUserID ? (await getHashCache(adminUserID)) : null;
if (!type || (!code && type === TokenType.patreon)) {
if (!code || !type) {
return res.status(400).send("Invalid request");
}
if (type === TokenType.free && (!key || Math.abs(Date.now() - parseInt(key)) > 1000 * 60 * 60 * 24)) {
return res.status(400).send("Invalid request");
}
if (type === TokenType.patreon
|| ([TokenType.local, TokenType.gift].includes(type) && adminUserIDHash === config.adminUserID)
|| type === TokenType.free) {
const licenseKeys = await createAndSaveToken(type, code, adminUserIDHash === config.adminUserID ? parseInt(total) : 1);
if (type === TokenType.patreon || (type === TokenType.local && adminUserIDHash === config.adminUserID)) {
const licenseKey = await createAndSaveToken(type, code);
/* istanbul ignore else */
if (licenseKeys) {
if (type === TokenType.patreon) {
return res.status(200).send(`
<h1>
Your license key:
</h1>
<p>
<b>
${licenseKeys[0]}
</b>
</p>
<p>
Copy this into the textbox in the other tab
</p>
`);
} else if (type === TokenType.free) {
return res.status(200).send({
licenseKey: licenseKeys[0]
});
} else {
return res.status(200).send(licenseKeys.join("<br/>"));
}
if (licenseKey) {
return res.status(200).send(`
<h1>
Your license key:
</h1>
<p>
<b>
${licenseKey}
</b>
</p>
<p>
Copy this into the textbox in the other tab
</p>
`);
} else {
return res.status(401).send(`
<h1>

View File

@@ -3,7 +3,7 @@ import { isEmpty } from "lodash";
import { config } from "../config";
import { db, privateDB } from "../databases/databases";
import { Postgres } from "../databases/Postgres";
import { BrandingDBSubmission, BrandingDBSubmissionData, BrandingHashDBResult, BrandingResult, BrandingSegmentDBResult, BrandingSegmentHashDBResult, CasualVoteDBResult, CasualVoteHashDBResult, ThumbnailDBResult, ThumbnailResult, TitleDBResult, TitleResult } from "../types/branding.model";
import { BrandingDBSubmission, BrandingHashDBResult, BrandingResult, ThumbnailDBResult, ThumbnailResult, TitleDBResult, TitleResult } from "../types/branding.model";
import { HashedIP, IPAddress, Service, VideoID, VideoIDHash, Visibility } from "../types/segments.model";
import { shuffleArray } from "../utils/array";
import { getHashCache } from "../utils/getHashCache";
@@ -14,70 +14,35 @@ import { Logger } from "../utils/logger";
import { promiseOrTimeout } from "../utils/promise";
import { QueryCacher } from "../utils/queryCacher";
import { brandingHashKey, brandingIPKey, brandingKey } from "../utils/redisKeys";
import * as SeedRandom from "seedrandom";
import { getEtag } from "../middleware/etag";
enum BrandingSubmissionType {
Title = "title",
Thumbnail = "thumbnail"
}
export async function getVideoBranding(res: Response, videoID: VideoID, service: Service, ip: IPAddress, returnUserID: boolean, fetchAll: boolean): Promise<BrandingResult> {
export async function getVideoBranding(res: Response, videoID: VideoID, service: Service, ip: IPAddress): Promise<BrandingResult> {
const getTitles = () => db.prepare(
"all",
`SELECT "titles"."title", "titles"."original", "titleVotes"."votes", "titleVotes"."downvotes", "titleVotes"."locked", "titleVotes"."shadowHidden", "titles"."UUID", "titles"."videoID", "titles"."hashedVideoID", "titleVotes"."verification", "titles"."userID"
`SELECT "titles"."title", "titles"."original", "titleVotes"."votes", "titleVotes"."locked", "titleVotes"."shadowHidden", "titles"."UUID", "titles"."videoID", "titles"."hashedVideoID"
FROM "titles" JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID"
WHERE "titles"."videoID" = ? AND "titles"."service" = ? AND "titleVotes"."votes" > -1 AND "titleVotes"."votes" - "titleVotes"."downvotes" > -2 AND "titleVotes"."removed" = 0`,
WHERE "titles"."videoID" = ? AND "titles"."service" = ? AND "titleVotes"."votes" > -2`,
[videoID, service],
{ useReplica: true }
) as Promise<TitleDBResult[]>;
const getThumbnails = () => db.prepare(
"all",
`SELECT "thumbnailTimestamps"."timestamp", "thumbnails"."original", "thumbnailVotes"."votes", "thumbnailVotes"."downvotes", "thumbnailVotes"."locked", "thumbnailVotes"."shadowHidden", "thumbnails"."UUID", "thumbnails"."videoID", "thumbnails"."hashedVideoID", "thumbnails"."userID"
`SELECT "thumbnailTimestamps"."timestamp", "thumbnails"."original", "thumbnailVotes"."votes", "thumbnailVotes"."locked", "thumbnailVotes"."shadowHidden", "thumbnails"."UUID", "thumbnails"."videoID", "thumbnails"."hashedVideoID"
FROM "thumbnails" LEFT JOIN "thumbnailVotes" ON "thumbnails"."UUID" = "thumbnailVotes"."UUID" LEFT JOIN "thumbnailTimestamps" ON "thumbnails"."UUID" = "thumbnailTimestamps"."UUID"
WHERE "thumbnails"."videoID" = ? AND "thumbnails"."service" = ? AND "thumbnailVotes"."votes" - "thumbnailVotes"."downvotes" > -2 AND "thumbnailVotes"."removed" = 0
ORDER BY "thumbnails"."timeSubmitted" ASC`,
WHERE "thumbnails"."videoID" = ? AND "thumbnails"."service" = ? AND "thumbnailVotes"."votes" > -2`,
[videoID, service],
{ useReplica: true }
) as Promise<ThumbnailDBResult[]>;
const getSegments = () => db.prepare(
"all",
`SELECT "startTime", "endTime", "category", "videoDuration" FROM "sponsorTimes"
WHERE "votes" > -2 AND "shadowHidden" = 0 AND "hidden" = 0 AND "actionType" = 'skip' AND "videoID" = ? AND "service" = ?
ORDER BY "timeSubmitted" ASC`,
[videoID, service],
{ useReplica: true }
) as Promise<BrandingSegmentDBResult[]>;
const getCasualVotes = () => db.prepare(
"all",
`SELECT "casualVotes"."category", "casualVotes"."upvotes", "casualVoteTitles"."title"
FROM "casualVotes" LEFT JOIN "casualVoteTitles" ON "casualVotes"."videoID" = "casualVoteTitles"."videoID" AND "casualVotes"."service" = "casualVoteTitles"."service" AND "casualVotes"."titleID" = "casualVoteTitles"."id"
WHERE "casualVotes"."videoID" = ? AND "casualVotes"."service" = ?
ORDER BY "casualVotes"."timeSubmitted" ASC`,
[videoID, service],
{ useReplica: true }
) as Promise<CasualVoteDBResult[]>;
const getBranding = async () => {
const titles = getTitles();
const thumbnails = getThumbnails();
const segments = getSegments();
const casualVotes = getCasualVotes();
for (const title of await titles) {
title.title = title.title.replaceAll("<", "");
}
return {
titles: await titles,
thumbnails: await thumbnails,
segments: await segments,
casualVotes: await casualVotes
};
};
const getBranding = async () => ({
titles: await getTitles(),
thumbnails: await getThumbnails()
});
const brandingTrace = await QueryCacher.getTraced(getBranding, brandingKey(videoID, service));
const branding = brandingTrace.data;
@@ -97,89 +62,52 @@ export async function getVideoBranding(res: Response, videoID: VideoID, service:
currentIP: null as Promise<HashedIP> | null
};
return filterAndSortBranding(videoID, returnUserID, fetchAll, branding.titles,
branding.thumbnails, branding.segments, branding.casualVotes, ip, cache);
return filterAndSortBranding(branding.titles, branding.thumbnails, ip, cache);
}
export async function getVideoBrandingByHash(videoHashPrefix: VideoIDHash, service: Service, ip: IPAddress, returnUserID: boolean, fetchAll: boolean): Promise<Record<VideoID, BrandingResult>> {
export async function getVideoBrandingByHash(videoHashPrefix: VideoIDHash, service: Service, ip: IPAddress): Promise<Record<VideoID, BrandingResult>> {
const getTitles = () => db.prepare(
"all",
`SELECT "titles"."title", "titles"."original", "titleVotes"."votes", "titleVotes"."downvotes", "titleVotes"."locked", "titleVotes"."shadowHidden", "titles"."UUID", "titles"."videoID", "titles"."hashedVideoID", "titleVotes"."verification"
`SELECT "titles"."title", "titles"."original", "titleVotes"."votes", "titleVotes"."locked", "titleVotes"."shadowHidden", "titles"."UUID", "titles"."videoID", "titles"."hashedVideoID"
FROM "titles" JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID"
WHERE "titles"."hashedVideoID" LIKE ? AND "titles"."service" = ? AND "titleVotes"."votes" > -1 AND "titleVotes"."votes" - "titleVotes"."downvotes" > -2 AND "titleVotes"."removed" = 0`,
WHERE "titles"."hashedVideoID" LIKE ? AND "titles"."service" = ? AND "titleVotes"."votes" > -2`,
[`${videoHashPrefix}%`, service],
{ useReplica: true }
) as Promise<TitleDBResult[]>;
const getThumbnails = () => db.prepare(
"all",
`SELECT "thumbnailTimestamps"."timestamp", "thumbnails"."original", "thumbnailVotes"."votes", "thumbnailVotes"."downvotes", "thumbnailVotes"."locked", "thumbnailVotes"."shadowHidden", "thumbnails"."UUID", "thumbnails"."videoID", "thumbnails"."hashedVideoID"
`SELECT "thumbnailTimestamps"."timestamp", "thumbnails"."original", "thumbnailVotes"."votes", "thumbnailVotes"."locked", "thumbnailVotes"."shadowHidden", "thumbnails"."UUID", "thumbnails"."videoID", "thumbnails"."hashedVideoID"
FROM "thumbnails" LEFT JOIN "thumbnailVotes" ON "thumbnails"."UUID" = "thumbnailVotes"."UUID" LEFT JOIN "thumbnailTimestamps" ON "thumbnails"."UUID" = "thumbnailTimestamps"."UUID"
WHERE "thumbnails"."hashedVideoID" LIKE ? AND "thumbnails"."service" = ? AND "thumbnailVotes"."votes" - "thumbnailVotes"."downvotes" > -2 AND "thumbnailVotes"."removed" = 0
ORDER BY "thumbnails"."timeSubmitted" ASC`,
WHERE "thumbnails"."hashedVideoID" LIKE ? AND "thumbnails"."service" = ? AND "thumbnailVotes"."votes" > -2`,
[`${videoHashPrefix}%`, service],
{ useReplica: true }
) as Promise<ThumbnailDBResult[]>;
const getSegments = () => db.prepare(
"all",
`SELECT "videoID", "startTime", "endTime", "category", "videoDuration" FROM "sponsorTimes"
WHERE "votes" > -2 AND "shadowHidden" = 0 AND "hidden" = 0 AND "actionType" = 'skip' AND "hashedVideoID" LIKE ? AND "service" = ?
ORDER BY "timeSubmitted" ASC`,
[`${videoHashPrefix}%`, service],
{ useReplica: true }
) as Promise<BrandingSegmentHashDBResult[]>;
const getCasualVotes = () => db.prepare(
"all",
`SELECT "casualVotes"."videoID", "casualVotes"."category", "casualVotes"."upvotes", "casualVoteTitles"."title"
FROM "casualVotes" LEFT JOIN "casualVoteTitles" ON "casualVotes"."videoID" = "casualVoteTitles"."videoID" AND "casualVotes"."service" = "casualVoteTitles"."service" AND "casualVotes"."titleID" = "casualVoteTitles"."id"
WHERE "casualVotes"."hashedVideoID" LIKE ? AND "casualVotes"."service" = ?
ORDER BY "casualVotes"."timeSubmitted" ASC`,
[`${videoHashPrefix}%`, service],
{ useReplica: true }
) as Promise<CasualVoteHashDBResult[]>;
const branding = await QueryCacher.get(async () => {
// Make sure they are both called in parallel
const branding = {
titles: getTitles(),
thumbnails: getThumbnails(),
segments: getSegments(),
casualVotes: getCasualVotes()
thumbnails: getThumbnails()
};
const dbResult: Record<VideoID, BrandingHashDBResult> = {};
const initResult = (submission: BrandingDBSubmissionData) => {
const initResult = (submission: BrandingDBSubmission) => {
dbResult[submission.videoID] = dbResult[submission.videoID] || {
titles: [],
thumbnails: [],
segments: [],
casualVotes: []
thumbnails: []
};
};
(await branding.titles).forEach((title) => {
title.title = title.title.replaceAll("<", "");
(await branding.titles).map((title) => {
initResult(title);
dbResult[title.videoID].titles.push(title);
});
(await branding.thumbnails).forEach((thumbnail) => {
(await branding.thumbnails).map((thumbnail) => {
initResult(thumbnail);
dbResult[thumbnail.videoID].thumbnails.push(thumbnail);
});
(await branding.segments).forEach((segment) => {
initResult(segment);
dbResult[segment.videoID].segments.push(segment);
});
(await branding.casualVotes).forEach((casualVote) => {
initResult(casualVote);
dbResult[casualVote.videoID].casualVotes.push(casualVote);
});
return dbResult;
}, brandingHashKey(videoHashPrefix, service));
@@ -191,62 +119,41 @@ export async function getVideoBrandingByHash(videoHashPrefix: VideoIDHash, servi
const processedResult: Record<VideoID, BrandingResult> = {};
await Promise.all(Object.keys(branding).map(async (key) => {
const castedKey = key as VideoID;
processedResult[castedKey] = await filterAndSortBranding(castedKey, returnUserID, fetchAll, branding[castedKey].titles,
branding[castedKey].thumbnails, branding[castedKey].segments, branding[castedKey].casualVotes, ip, cache);
processedResult[castedKey] = await filterAndSortBranding(branding[castedKey].titles, branding[castedKey].thumbnails, ip, cache);
}));
return processedResult;
}
async function filterAndSortBranding(videoID: VideoID, returnUserID: boolean, fetchAll: boolean, dbTitles: TitleDBResult[],
dbThumbnails: ThumbnailDBResult[], dbSegments: BrandingSegmentDBResult[], dbCasualVotes: CasualVoteDBResult[],
ip: IPAddress, cache: { currentIP: Promise<HashedIP> | null }): Promise<BrandingResult> {
async function filterAndSortBranding(dbTitles: TitleDBResult[], dbThumbnails: ThumbnailDBResult[], ip: IPAddress, cache: { currentIP: Promise<HashedIP> | null }): Promise<BrandingResult> {
const shouldKeepTitles = shouldKeepSubmission(dbTitles, BrandingSubmissionType.Title, ip, cache);
const shouldKeepThumbnails = shouldKeepSubmission(dbThumbnails, BrandingSubmissionType.Thumbnail, ip, cache);
const titles = shuffleArray(dbTitles.filter(await shouldKeepTitles))
.sort((a, b) => b.votes - a.votes)
.sort((a, b) => b.locked - a.locked)
.map((r) => ({
title: r.title,
original: r.original === 1,
votes: r.votes + r.verification - r.downvotes,
votes: r.votes,
locked: r.locked === 1,
UUID: r.UUID,
userID: returnUserID ? r.userID : undefined
}))
.filter((a) => fetchAll || a.votes >= 0 || a.locked)
.sort((a, b) => b.votes - a.votes)
.sort((a, b) => +b.locked - +a.locked) as TitleResult[];
})) as TitleResult[];
const thumbnails = dbThumbnails.filter(await shouldKeepThumbnails)
.sort((a, b) => +a.original - +b.original)
const thumbnails = shuffleArray(dbThumbnails.filter(await shouldKeepThumbnails))
.sort((a, b) => b.votes - a.votes)
.sort((a, b) => b.locked - a.locked)
.map((r) => ({
timestamp: r.timestamp,
original: r.original === 1,
votes: r.votes - r.downvotes,
votes: r.votes,
locked: r.locked === 1,
UUID: r.UUID,
userID: returnUserID ? r.userID : undefined
}))
.filter((a) => (fetchAll && !a.original) || a.votes >= 1 || (a.votes >= 0 && !a.original) || a.locked) as ThumbnailResult[];
const casualDownvotes = dbCasualVotes.filter((r) => r.category === "downvote")[0];
const casualVotes = dbCasualVotes.filter((r) => r.category !== "downvote").map((r) => ({
id: r.category,
count: r.upvotes - (casualDownvotes?.upvotes ?? 0),
title: r.title
})).filter((a) => a.count > 0);
const videoDuration = dbSegments.filter(s => s.videoDuration !== 0)[0]?.videoDuration ?? null;
UUID: r.UUID
})) as ThumbnailResult[];
return {
titles,
thumbnails,
casualVotes,
randomTime: findRandomTime(videoID, dbSegments, videoDuration),
videoDuration: videoDuration,
thumbnails
};
}
@@ -254,7 +161,7 @@ async function shouldKeepSubmission(submissions: BrandingDBSubmission[], type: B
cache: { currentIP: Promise<HashedIP> | null }): Promise<(_: unknown, index: number) => boolean> {
const shouldKeep = await Promise.all(submissions.map(async (s) => {
if (s.shadowHidden === Visibility.VISIBLE) return true;
if (s.shadowHidden != Visibility.HIDDEN) return true;
const table = type === BrandingSubmissionType.Title ? "titleVotes" : "thumbnailVotes";
const fetchData = () => privateDB.prepare("get", `SELECT "hashedIP" FROM "${table}" WHERE "UUID" = ?`,
[s.UUID], { useReplica: true }) as Promise<{ hashedIP: HashedIP }>;
@@ -263,11 +170,9 @@ async function shouldKeepSubmission(submissions: BrandingDBSubmission[], type: B
if (cache.currentIP === null) cache.currentIP = getHashCache((ip + config.globalSalt) as IPAddress);
const hashedIP = await cache.currentIP;
return submitterIP?.hashedIP === hashedIP;
return submitterIP.hashedIP !== hashedIP;
} catch (e) {
// give up on shadow hide for now
Logger.error(`getBranding: Error while trying to find IP: ${e}`);
return false;
}
}));
@@ -275,59 +180,9 @@ async function shouldKeepSubmission(submissions: BrandingDBSubmission[], type: B
return (_, index) => shouldKeep[index];
}
export function findRandomTime(videoID: VideoID, segments: BrandingSegmentDBResult[], videoDuration: number): number {
let randomTime = SeedRandom.alea(videoID)();
// Don't allow random times past 90% of the video if no endcard
if (!segments.some((s) => s.category === "outro") && randomTime > 0.9) {
randomTime -= 0.9;
}
if (segments.length === 0) return randomTime;
videoDuration ||= Math.max(...segments.map((s) => s.endTime)); // use highest end time as a fallback here
// There are segments, treat this as a relative time in the chopped up video
const sorted = segments.sort((a, b) => a.startTime - b.startTime);
const emptySegments: [number, number][] = [];
let totalTime = 0;
let nextEndTime = 0;
for (const segment of sorted) {
if (segment.startTime > nextEndTime) {
emptySegments.push([nextEndTime, segment.startTime]);
totalTime += segment.startTime - nextEndTime;
}
nextEndTime = Math.max(segment.endTime, nextEndTime);
}
if (nextEndTime < videoDuration) {
emptySegments.push([nextEndTime, videoDuration]);
totalTime += videoDuration - nextEndTime;
}
let cursor = 0;
for (const segment of emptySegments) {
const duration = segment[1] - segment[0];
if (cursor + duration >= randomTime * totalTime) {
// Found it
return (segment[0] + (randomTime * totalTime - cursor)) / videoDuration;
}
cursor += duration;
}
// Fallback to just the random time
return randomTime;
}
export async function getBranding(req: Request, res: Response) {
const videoID: VideoID = req.query.videoID as VideoID;
const service: Service = getService(req.query.service as string);
const returnUserID = req.query.returnUserID === "true";
const fetchAll = req.query.fetchAll === "true";
if (!videoID) {
return res.status(400).send("Missing parameter: videoID");
@@ -335,13 +190,9 @@ export async function getBranding(req: Request, res: Response) {
const ip = getIP(req);
try {
const result = await getVideoBranding(res, videoID, service, ip, returnUserID, fetchAll);
const result = await getVideoBranding(res, videoID, service, ip);
await getEtag("branding", (videoID as string), service)
.then(etag => res.set("ETag", etag))
.catch(() => null);
const status = result.titles.length > 0 || result.thumbnails.length > 0 || result.casualVotes.length > 0 ? 200 : 404;
const status = result.titles.length > 0 || result.thumbnails.length > 0 ? 200 : 404;
return res.status(status).json(result);
} catch (e) {
Logger.error(e as string);
@@ -358,15 +209,9 @@ export async function getBrandingByHashEndpoint(req: Request, res: Response) {
const service: Service = getService(req.query.service as string);
const ip = getIP(req);
const returnUserID = req.query.returnUserID === "true";
const fetchAll = req.query.fetchAll === "true";
try {
const result = await getVideoBrandingByHash(hashPrefix, service, ip, returnUserID, fetchAll);
await getEtag("brandingHash", (hashPrefix as string), service)
.then(etag => res.set("ETag", etag))
.catch(() => null);
const result = await getVideoBrandingByHash(hashPrefix, service, ip);
const status = !isEmpty(result) ? 200 : 404;
return res.status(status).json(result);
@@ -374,4 +219,4 @@ export async function getBrandingByHashEndpoint(req: Request, res: Response) {
Logger.error(e as string);
return res.status(500).send([]);
}
}
}

View File

@@ -1,82 +0,0 @@
/* istanbul ignore file */
import { db } from "../databases/databases";
import { Request, Response } from "express";
import axios from "axios";
import { Logger } from "../utils/logger";
import { getCWSUsers, getChromeUsers } from "../utils/getCWSUsers";
// A cache of the number of chrome web store users
let chromeUsersCache = 30000;
let firefoxUsersCache = 0;
interface DBStatsData {
userCount: number,
titles: number,
thumbnails: number,
}
let lastFetch: DBStatsData = {
userCount: 0,
titles: 0,
thumbnails: 0
};
updateExtensionUsers();
export async function getBrandingStats(req: Request, res: Response): Promise<void> {
try {
const row = await getStats();
lastFetch = row;
/* istanbul ignore if */
if (!row) res.sendStatus(500);
const extensionUsers = chromeUsersCache + firefoxUsersCache;
//send this result
res.send({
userCount: row.userCount ?? 0,
activeUsers: extensionUsers,
titles: row.titles,
thumbnails: row.thumbnails,
});
} catch (e) {
Logger.error(e as string);
res.sendStatus(500);
}
}
async function getStats(): Promise<DBStatsData> {
if (db.highLoad()) {
return Promise.resolve(lastFetch);
} else {
const userCount = (await db.prepare("get", `SELECT COUNT(DISTINCT "userID") as "userCount" FROM titles`, []))?.userCount;
const titles = (await db.prepare("get", `SELECT COUNT(*) as "titles" FROM titles`, []))?.titles;
const thumbnails = (await db.prepare("get", `SELECT COUNT(*) as "thumbnails" FROM thumbnails`, []))?.thumbnails;
return {
userCount: userCount ?? 0,
titles: titles ?? 0,
thumbnails: thumbnails ?? 0
};
}
}
function updateExtensionUsers() {
const mozillaAddonsUrl = "https://addons.mozilla.org/api/v3/addons/addon/dearrow/";
const chromeExtensionUrl = "https://chromewebstore.google.com/detail/dearrow-better-titles-and/enamippconapkdmgfgjchkhakpfinmaj";
const chromeExtId = "enamippconapkdmgfgjchkhakpfinmaj";
axios.get(mozillaAddonsUrl)
.then(res => firefoxUsersCache = res.data.average_daily_users )
.catch( /* istanbul ignore next */ () => {
Logger.debug(`Failing to connect to ${mozillaAddonsUrl}`);
return 0;
});
getCWSUsers(chromeExtId)
.then(res => chromeUsersCache = res)
.catch(/* istanbul ignore next */ () =>
getChromeUsers(chromeExtensionUrl)
.then(res => chromeUsersCache = res)
);
}

View File

@@ -22,16 +22,15 @@ export async function getChapterNames(req: Request, res: Response): Promise<Resp
const descriptions = await db.prepare("all", `
SELECT "description"
FROM "sponsorTimes"
WHERE ("locked" = 1 OR "votes" >= 0) AND "videoID" IN (
WHERE ("locked" = 1 OR "votes" > 0 OR ("views" > 25 AND "votes" >= 0)) AND "videoID" IN (
SELECT "videoID"
FROM "videoInfo"
WHERE "channelID" = ?
) AND "description" != ''
AND similarity("description", ?) >= 0.1
GROUP BY "description"
ORDER BY SUM("votes"), similarity("description", ?) DESC
LIMIT 5;`
, [channelID, description, description]) as { description: string }[];
, [channelID, description]) as { description: string }[];
if (descriptions?.length > 0) {
return res.status(200).json(descriptions.map(d => ({

View File

@@ -1,35 +0,0 @@
import { getHashCache } from "../utils/getHashCache";
import { Request, Response } from "express";
import { isUserVIP } from "../utils/isUserVIP";
import { UserID } from "../types/user.model";
import { Logger } from "../utils/logger";
import { getServerConfig } from "../utils/serverConfig";
export async function getConfigEndpoint(req: Request, res: Response): Promise<Response> {
const userID = req.query.userID as string;
const key = req.query.key as string;
if (!userID || !key) {
// invalid request
return res.sendStatus(400);
}
// hash the userID
const hashedUserID = await getHashCache(userID as UserID);
const isVIP = (await isUserVIP(hashedUserID));
if (!isVIP) {
// not authorized
return res.sendStatus(403);
}
try {
return res.status(200).json({
value: await getServerConfig(key)
});
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
}
}

View File

@@ -1,23 +1,17 @@
import { db } from "../databases/databases";
import { Request, Response } from "express";
import { Logger } from "../utils/logger";
export async function getDaysSavedFormatted(req: Request, res: Response): Promise<Response> {
try {
const row = await db.prepare("get", 'SELECT SUM(("endTime" - "startTime") / 60 / 60 / 24 * "views") as "daysSaved" from "sponsorTimes" where "shadowHidden" != 1', []);
const row = await db.prepare("get", 'SELECT SUM(("endTime" - "startTime") / 60 / 60 / 24 * "views") as "daysSaved" from "sponsorTimes" where "shadowHidden" != 1', []);
if (row !== undefined) {
//send this result
return res.send({
daysSaved: row.daysSaved?.toFixed(2) ?? "0",
});
} else {
return res.send({
daysSaved: 0
});
}
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
if (row !== undefined) {
//send this result
return res.send({
daysSaved: row.daysSaved?.toFixed(2) ?? "0",
});
} else {
return res.send({
daysSaved: 0
});
}
}

View File

@@ -1,15 +0,0 @@
import { config } from "../config";
import { Request, Response } from "express";
export function getFeatureFlag(req: Request, res: Response): Response {
const { params: { name } } = req;
switch (name) {
case "deArrowPaywall":
return res.status(200).json({
enabled: config.deArrowPaywall,
});
}
return res.status(404).json();
}

View File

@@ -1,106 +0,0 @@
import { db, privateDB } from "../databases/databases";
import { Request, Response } from "express";
import os from "os";
import redis, { getRedisStats } from "../utils/redis";
import { Postgres } from "../databases/Postgres";
import { Server } from "http";
export async function getMetrics(req: Request, res: Response, server: Server): Promise<Response> {
const redisStats = getRedisStats();
return res.type("text").send([
`# HELP sb_uptime Uptime of this instance`,
`# TYPE sb_uptime counter`,
`sb_uptime ${process.uptime()}`,
`# HELP sb_db_version The version of the database`,
`# TYPE sb_db_version counter`,
`sb_db_version ${await db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"]).then(e => e.value).catch(() => -1)}`,
`# HELP sb_start_time The time this instance was started`,
`# TYPE sb_start_time gauge`,
`sb_start_time ${Date.now()}`,
`# HELP sb_loadavg_5 The 5 minute load average of the system`,
`# TYPE sb_loadavg_5 gauge`,
`sb_loadavg_5 ${os.loadavg()[0]}`,
`# HELP sb_loadavg_15 The 15 minute load average of the system`,
`# TYPE sb_loadavg_15 gauge`,
`sb_loadavg_15 ${os.loadavg()[1]}`,
`# HELP sb_connections The number of connections to this instance`,
`# TYPE sb_connections gauge`,
`sb_connections ${await new Promise((resolve) => server.getConnections((_, count) => resolve(count)) as any)}`,
`# HELP sb_status_requests The number of status requests made to this instance`,
`# TYPE sb_status_requests gauge`,
`sb_status_requests ${await redis.increment("statusRequest").then(e => e[0]).catch(() => -1)}`,
`# HELP sb_postgres_active_requests The number of active requests to the postgres database`,
`# TYPE sb_postgres_active_requests gauge`,
`sb_postgres_active_requests ${(db as Postgres)?.getStats?.()?.activeRequests ?? -1}`,
`# HELP sb_postgres_avg_read_time The average read time of the postgres database`,
`# TYPE sb_postgres_avg_read_time gauge`,
`sb_postgres_avg_read_time ${(db as Postgres)?.getStats?.()?.avgReadTime ?? -1}`,
`# HELP sb_postgres_avg_write_time The average write time of the postgres database`,
`# TYPE sb_postgres_avg_write_time gauge`,
`sb_postgres_avg_write_time ${(db as Postgres)?.getStats?.()?.avgWriteTime ?? -1}`,
`# HELP sb_postgres_avg_failed_time The average failed time of the postgres database`,
`# TYPE sb_postgres_avg_failed_time gauge`,
`sb_postgres_avg_failed_time ${(db as Postgres)?.getStats?.()?.avgFailedTime ?? -1}`,
`# HELP sb_postgres_pool_total The total number of connections in the postgres pool`,
`# TYPE sb_postgres_pool_total gauge`,
`sb_postgres_pool_total ${(db as Postgres)?.getStats?.()?.pool?.total ?? -1}`,
`# HELP sb_postgres_pool_idle The number of idle connections in the postgres pool`,
`# TYPE sb_postgres_pool_idle gauge`,
`sb_postgres_pool_idle ${(db as Postgres)?.getStats?.()?.pool?.idle ?? -1}`,
`# HELP sb_postgres_pool_waiting The number of connections waiting in the postgres pool`,
`# TYPE sb_postgres_pool_waiting gauge`,
`sb_postgres_pool_waiting ${(db as Postgres)?.getStats?.()?.pool?.waiting ?? -1}`,
`# HELP sb_postgres_private_active_requests The number of active requests to the private postgres database`,
`# TYPE sb_postgres_private_active_requests gauge`,
`sb_postgres_private_active_requests ${(privateDB as Postgres)?.getStats?.()?.activeRequests ?? -1}`,
`# HELP sb_postgres_private_avg_read_time The average read time of the private postgres database`,
`# TYPE sb_postgres_private_avg_read_time gauge`,
`sb_postgres_private_avg_read_time ${(privateDB as Postgres)?.getStats?.()?.avgReadTime ?? -1}`,
`# HELP sb_postgres_private_avg_write_time The average write time of the private postgres database`,
`# TYPE sb_postgres_private_avg_write_time gauge`,
`sb_postgres_private_avg_write_time ${(privateDB as Postgres)?.getStats?.()?.avgWriteTime ?? -1}`,
`# HELP sb_postgres_private_avg_failed_time The average failed time of the private postgres database`,
`# TYPE sb_postgres_private_avg_failed_time gauge`,
`sb_postgres_private_avg_failed_time ${(privateDB as Postgres)?.getStats?.()?.avgFailedTime ?? -1}`,
`# HELP sb_postgres_private_pool_total The total number of connections in the private postgres pool`,
`# TYPE sb_postgres_private_pool_total gauge`,
`sb_postgres_private_pool_total ${(privateDB as Postgres)?.getStats?.()?.pool?.total ?? -1}`,
`# HELP sb_postgres_private_pool_idle The number of idle connections in the private postgres pool`,
`# TYPE sb_postgres_private_pool_idle gauge`,
`sb_postgres_private_pool_idle ${(privateDB as Postgres)?.getStats?.()?.pool?.idle ?? -1}`,
`# HELP sb_postgres_private_pool_waiting The number of connections waiting in the private postgres pool`,
`# TYPE sb_postgres_private_pool_waiting gauge`,
`sb_postgres_private_pool_waiting ${(privateDB as Postgres)?.getStats?.()?.pool?.waiting ?? -1}`,
`# HELP sb_redis_active_requests The number of active requests to redis`,
`# TYPE sb_redis_active_requests gauge`,
`sb_redis_active_requests ${redisStats.activeRequests}`,
`# HELP sb_redis_write_requests The number of write requests to redis`,
`# TYPE sb_redis_write_requests gauge`,
`sb_redis_write_requests ${redisStats.writeRequests}`,
`# HELP sb_redis_avg_read_time The average read time of redis`,
`# TYPE sb_redis_avg_read_time gauge`,
`sb_redis_avg_read_time ${redisStats?.avgReadTime}`,
`# HELP sb_redis_avg_write_time The average write time of redis`,
`# TYPE sb_redis_avg_write_time gauge`,
`sb_redis_avg_write_time ${redisStats.avgWriteTime}`,
`# HELP sb_redis_memory_cache_hits The cache hit ratio in redis`,
`# TYPE sb_redis_memory_cache_hits gauge`,
`sb_redis_memory_cache_hits ${redisStats.memoryCacheHits}`,
`# HELP sb_redis_memory_cache_total_hits The cache hit ratio in redis including uncached items`,
`# TYPE sb_redis_memory_cache_total_hits gauge`,
`sb_redis_memory_cache_total_hits ${redisStats.memoryCacheTotalHits}`,
`# HELP sb_redis_memory_cache_length The length of the memory cache in redis`,
`# TYPE sb_redis_memory_cache_length gauge`,
`sb_redis_memory_cache_length ${redisStats.memoryCacheLength}`,
`# HELP sb_redis_memory_cache_size The size of the memory cache in redis`,
`# TYPE sb_redis_memory_cache_size gauge`,
`sb_redis_memory_cache_size ${redisStats.memoryCacheSize}`,
`# HELP sb_redis_last_invalidation The time of the last successful invalidation in redis`,
`# TYPE sb_redis_last_invalidation gauge`,
`sb_redis_last_invalidation ${redisStats.lastInvalidation}`,
`# HELP sb_redis_last_invalidation_message The time of the last invalidation message in redis`,
`# TYPE sb_redis_last_invalidation_message gauge`,
`sb_redis_last_invalidation_message ${redisStats.lastInvalidationMessage}`,
].join("\n"));
}

View File

@@ -1,26 +0,0 @@
import { Request, Response } from "express";
import { Server } from "http";
import { config } from "../config";
import { getRedisStats } from "../utils/redis";
import { Postgres } from "../databases/Postgres";
import { db } from "../databases/databases";
export async function getReady(req: Request, res: Response, server: Server): Promise<Response> {
const connections = await new Promise((resolve) => server.getConnections((_, count) => resolve(count))) as number;
const redisStats = getRedisStats();
const postgresStats = (db as Postgres).getStats?.();
if (!connections
|| (connections < config.maxConnections
&& (!config.redis || redisStats.activeRequests < config.redis.maxConnections * 0.8)
&& (!config.redis || redisStats.activeRequests < 1 || redisStats.avgReadTime < config.maxResponseTime
|| (redisStats.memoryCacheSize < config.redis.clientCacheSize * 0.8 && redisStats.avgReadTime < config.maxResponseTimeWhileLoadingCache))
&& (!config.postgres || postgresStats.activeRequests < config.postgres.maxActiveRequests * 0.8)
&& (!config.postgres || postgresStats.avgReadTime < config.maxResponseTime
|| (redisStats.memoryCacheSize < config.redis.clientCacheSize * 0.8 && postgresStats.avgReadTime < config.maxResponseTimeWhileLoadingCache)))) {
return res.sendStatus(200);
} else {
return res.sendStatus(500);
}
}

View File

@@ -1,22 +0,0 @@
import { db } from "../databases/databases";
import { Request, Response } from "express";
import { getService } from "../utils/getService";
export async function getSegmentID(req: Request, res: Response): Promise<Response> {
const partialUUID = req.query?.UUID;
const videoID = req.query?.videoID;
const service = getService(req.query?.service as string);
if (!partialUUID || !videoID) {
//invalid request
return res.sendStatus(400);
}
const data = await db.prepare("get", `SELECT "UUID" from "sponsorTimes" WHERE "UUID" LIKE ? AND "videoID" = ? AND "service" = ?`, [`${partialUUID}%`, videoID, service]);
if (data) {
return res.status(200).send(data.UUID);
} else {
return res.sendStatus(404);
}
}

View File

@@ -2,7 +2,7 @@ import { Request, Response } from "express";
import { partition } from "lodash";
import { config } from "../config";
import { db, privateDB } from "../databases/databases";
import { skipSegmentsHashKey, skipSegmentsKey, skipSegmentGroupsKey, shadowHiddenIPKey, skipSegmentsLargerHashKey } from "../utils/redisKeys";
import { skipSegmentsHashKey, skipSegmentsKey, skipSegmentGroupsKey, shadowHiddenIPKey } from "../utils/redisKeys";
import { SBRecord } from "../types/lib.model";
import { ActionType, Category, DBSegment, HashedIP, IPAddress, OverlappingSegmentGroup, Segment, SegmentCache, SegmentUUID, Service, VideoData, VideoID, VideoIDHash, Visibility, VotableObject } from "../types/segments.model";
import { getHashCache } from "../utils/getHashCache";
@@ -14,9 +14,6 @@ import { getService } from "../utils/getService";
import { promiseOrTimeout } from "../utils/promise";
import { parseSkipSegments } from "../utils/parseSkipSegments";
import { getEtag } from "../middleware/etag";
import { shuffleArray } from "../utils/array";
import { Postgres } from "../databases/Postgres";
import { getRedisStats } from "../utils/redis";
async function prepareCategorySegments(req: Request, videoID: VideoID, service: Service, segments: DBSegment[], cache: SegmentCache = { shadowHiddenSegmentIPs: {} }, useCache: boolean): Promise<Segment[]> {
const shouldFilter: boolean[] = await Promise.all(segments.map(async (segment) => {
@@ -24,9 +21,7 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, service:
return true; //required - always send
}
if (segment.hidden
|| segment.votes < -1
|| segment.shadowHidden === Visibility.MORE_HIDDEN) {
if (segment.hidden || segment.votes < -1) {
return false; //too untrustworthy, just ignore it
}
@@ -46,41 +41,20 @@ async function prepareCategorySegments(req: Request, videoID: VideoID, service:
const fetchData = () => privateDB.prepare("all", 'SELECT "hashedIP" FROM "sponsorTimes" WHERE "videoID" = ? AND "timeSubmitted" = ? AND "service" = ?',
[videoID, segment.timeSubmitted, service], { useReplica: true }) as Promise<{ hashedIP: HashedIP }[]>;
try {
if (db.highLoad() || privateDB.highLoad()) {
Logger.error("High load, not handling shadowhide");
if (db instanceof Postgres && privateDB instanceof Postgres) {
Logger.error(`Postgres stats: ${JSON.stringify(db.getStats())}`);
Logger.error(`Postgres private stats: ${JSON.stringify(privateDB.getStats())}`);
}
Logger.error(`Redis stats: ${JSON.stringify(getRedisStats())}`);
return false;
}
cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted] = promiseOrTimeout(QueryCacher.get(fetchData, shadowHiddenIPKey(videoID, segment.timeSubmitted, service)), 150);
cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted] = await promiseOrTimeout(QueryCacher.get(fetchData, shadowHiddenIPKey(videoID, segment.timeSubmitted, service)), 150);
} catch (e) {
// give up on shadowhide for now
cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted] = null;
}
}
let ipList = [];
try {
ipList = await cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted];
} catch (e) {
Logger.error(`skipSegments: Error while trying to find IP: ${e}`);
if (db instanceof Postgres && privateDB instanceof Postgres) {
Logger.error(`Postgres stats: ${JSON.stringify(db.getStats())}`);
Logger.error(`Postgres private stats: ${JSON.stringify(privateDB.getStats())}`);
}
return false;
}
const ipList = cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted];
if (ipList?.length > 0 && cache.userHashedIP === undefined) {
cache.userHashedIP = await cache.userHashedIPPromise;
}
//if this isn't their ip, don't send it to them
const shouldShadowHide = ipList?.some(
const shouldShadowHide = cache.shadowHiddenSegmentIPs[videoID][segment.timeSubmitted]?.some(
(shadowHiddenSegment) => shadowHiddenSegment.hashedIP === cache.userHashedIP) ?? false;
if (shouldShadowHide) useCache = false;
@@ -150,7 +124,7 @@ async function getSegmentsByVideoID(req: Request, videoID: VideoID, categories:
}
async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash, categories: Category[],
actionTypes: ActionType[], trimUUIDs: number, requiredSegments: SegmentUUID[], service: Service): Promise<SBRecord<VideoID, VideoData>> {
actionTypes: ActionType[], requiredSegments: SegmentUUID[], service: Service): Promise<SBRecord<VideoID, VideoData>> {
const cache: SegmentCache = { shadowHiddenSegmentIPs: {} };
const segments: SBRecord<VideoID, VideoData> = {};
@@ -182,32 +156,13 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
};
const canUseCache = requiredSegments.length === 0;
const filteredSegments = (await prepareCategorySegments(req, videoID as VideoID, service, videoData.segments, cache, canUseCache))
.filter((segment: Segment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType));
// Make sure no hash duplicates exist
if (trimUUIDs) {
const seen = new Set<string>();
for (const segment of filteredSegments) {
const shortUUID = segment.UUID.substring(0, trimUUIDs);
if (seen.has(shortUUID)) {
// Duplicate found, disable trimming
trimUUIDs = undefined;
break;
}
seen.add(shortUUID);
}
seen.clear();
}
data.segments = filteredSegments
data.segments = (await prepareCategorySegments(req, videoID as VideoID, service, videoData.segments, cache, canUseCache))
.filter((segment: Segment) => categories.includes(segment?.category) && actionTypes.includes(segment?.actionType))
.map((segment) => ({
category: segment.category,
actionType: segment.actionType,
segment: segment.segment,
UUID: trimUUIDs ? segment.UUID.substring(0, trimUUIDs) as SegmentUUID : segment.UUID,
UUID: segment.UUID,
videoDuration: segment.videoDuration,
locked: segment.locked,
votes: segment.votes,
@@ -228,7 +183,7 @@ async function getSegmentsByHash(req: Request, hashedVideoIDPrefix: VideoIDHash,
return segments;
} catch (err) /* istanbul ignore next */ {
Logger.error(`get segments by hash error: ${err}`);
Logger.error(err as string);
return null;
}
}
@@ -245,8 +200,6 @@ async function getSegmentsFromDBByHash(hashedVideoIDPrefix: VideoIDHash, service
if (hashedVideoIDPrefix.length === 4) {
return await QueryCacher.get(fetchFromDB, skipSegmentsHashKey(hashedVideoIDPrefix, service));
} else if (hashedVideoIDPrefix.length === 5) {
return await QueryCacher.get(fetchFromDB, skipSegmentsLargerHashKey(hashedVideoIDPrefix, service));
}
return await fetchFromDB();
@@ -265,11 +218,11 @@ async function getSegmentsFromDBByVideoID(videoID: VideoID, service: Service): P
return await QueryCacher.get(fetchFromDB, skipSegmentsKey(videoID, service));
}
// Gets the best choice from the choices array based on their `votes` property.
// Gets a weighted random choice from the choices array based on their `votes` property.
// amountOfChoices specifies the maximum amount of choices to return, 1 or more.
// Choices are unique
// If a predicate is given, it will only filter choices following it, and will leave the rest in the list
function getBestChoice<T extends VotableObject>(choices: T[], amountOfChoices: number, filterLocked = false, predicate?: (choice: T) => void): T[] {
function getWeightedRandomChoice<T extends VotableObject>(choices: T[], amountOfChoices: number, filterLocked = false, predicate?: (choice: T) => void): T[] {
//trivial case: no need to go through the whole process
if (amountOfChoices >= choices.length) {
return choices;
@@ -292,22 +245,39 @@ function getBestChoice<T extends VotableObject>(choices: T[], amountOfChoices: n
}
//assign a weight to each choice
const choicesWithWeights: TWithWeight[] = shuffleArray(filteredChoices.map(choice => {
const boost = choice.reputation;
let totalWeight = 0;
const choicesWithWeights: TWithWeight[] = filteredChoices.map(choice => {
const boost = Math.min(choice.reputation, 4);
//The 3 makes -2 the minimum votes before being ignored completely
//this can be changed if this system increases in popularity.
const repFactor = choice.votes > 0 ? Math.max(1, choice.reputation + 1) : 1;
const weight = Math.exp(choice.votes * repFactor + 3 + boost);
totalWeight += Math.max(weight, 0);
const weight = choice.votes + boost;
return { ...choice, weight };
})).sort((a, b) => b.weight - a.weight);
});
// Nothing to filter for
if (amountOfChoices >= choicesWithWeights.length) {
return [...forceIncludedChoices, ...filteredChoices];
}
// Pick the top options
//iterate and find amountOfChoices choices
const chosen = [...forceIncludedChoices];
for (let i = 0; i < amountOfChoices; i++) {
while (amountOfChoices-- > 0) {
//weighted random draw of one element of choices
const randomNumber = Math.random() * totalWeight;
let stackWeight = choicesWithWeights[0].weight;
let i = 0;
while (stackWeight < randomNumber) {
stackWeight += choicesWithWeights[++i].weight;
}
//add it to the chosen ones and remove it from the choices before the next iteration
chosen.push(choicesWithWeights[i]);
totalWeight -= choicesWithWeights[i].weight;
choicesWithWeights.splice(i, 1);
}
return chosen;
@@ -316,20 +286,20 @@ function getBestChoice<T extends VotableObject>(choices: T[], amountOfChoices: n
async function chooseSegments(videoID: VideoID, service: Service, segments: DBSegment[], useCache: boolean): Promise<DBSegment[]> {
const fetchData = async () => await buildSegmentGroups(segments);
const groups = useCache && config.useCacheForSegmentGroups
const groups = useCache
? await QueryCacher.get(fetchData, skipSegmentGroupsKey(videoID, service))
: await fetchData();
// Filter for only 1 item for POI categories and Full video
let chosenGroups = getBestChoice(groups, 1, true, (choice) => choice.segments[0].actionType === ActionType.Full);
chosenGroups = getBestChoice(chosenGroups, 1, true, (choice) => choice.segments[0].actionType === ActionType.Poi);
return chosenGroups.map(// choose 1 good segment per group and return them
group => getBestChoice(group.segments, 1)[0]
let chosenGroups = getWeightedRandomChoice(groups, 1, true, (choice) => choice.segments[0].actionType === ActionType.Full);
chosenGroups = getWeightedRandomChoice(chosenGroups, 1, true, (choice) => choice.segments[0].actionType === ActionType.Poi);
return chosenGroups.map(//randomly choose 1 good segment per group and return them
group => getWeightedRandomChoice(group.segments, 1)[0]
);
}
//This function will find segments that are contained inside of eachother, called similar segments
//Only one similar time will be returned, based on its score
//Only one similar time will be returned, randomly generated based on the sqrt of votes.
//This allows new less voted items to still sometimes appear to give them a chance at getting votes.
//Segments with less than -1 votes are already ignored before this function is called
async function buildSegmentGroups(segments: DBSegment[]): Promise<OverlappingSegmentGroup[]> {
@@ -443,7 +413,7 @@ async function getSkipSegments(req: Request, res: Response): Promise<Response> {
await getEtag("skipSegments", (videoID as string), service)
.then(etag => res.set("ETag", etag))
.catch(() => ({}));
.catch(() => null);
return res.send(segments);
}

View File

@@ -17,14 +17,13 @@ export async function getSkipSegmentsByHash(req: Request, res: Response): Promis
if (parseResult.errors.length > 0) {
return res.status(400).send(parseResult.errors);
}
const { categories, actionTypes, trimUUIDs, requiredSegments, service } = parseResult;
const { categories, actionTypes, requiredSegments, service } = parseResult;
// Get all video id's that match hash prefix
const segments = await getSegmentsByHash(req, hashPrefix, categories, actionTypes, trimUUIDs, requiredSegments, service);
const segments = await getSegmentsByHash(req, hashPrefix, categories, actionTypes, requiredSegments, service);
try {
const hashKey = hashPrefix.length === 4 ? "skipSegmentsHash" : "skipSegmentsLargerHash";
await getEtag(hashKey, hashPrefix, service)
await getEtag("skipSegmentsHash", hashPrefix, service)
.then(etag => res.set("ETag", etag))
.catch(/* istanbul ignore next */ () => null);
const output = Object.entries(segments).map(([videoID, data]) => ({

View File

@@ -1,20 +1,19 @@
import { db, privateDB } from "../databases/databases";
import { db } from "../databases/databases";
import { Logger } from "../utils/logger";
import { Request, Response } from "express";
import os from "os";
import redis, { getRedisStats } from "../utils/redis";
import { promiseOrTimeout } from "../utils/promise";
import { Postgres } from "../databases/Postgres";
import { Server } from "http";
export async function getStatus(req: Request, res: Response, server: Server): Promise<Response> {
export async function getStatus(req: Request, res: Response): Promise<Response> {
const startTime = Date.now();
let value = req.params.value as string[] | string;
value = Array.isArray(value) ? value[0] : value;
let processTime, redisProcessTime = -1;
try {
const dbStartTime = Date.now();
const dbVersion = await promiseOrTimeout(db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"]), 1000)
const dbVersion = await promiseOrTimeout(db.prepare("get", "SELECT key, value FROM config where key = ?", ["version"]), 5000)
.then(e => {
processTime = Date.now() - dbStartTime;
return e.value;
@@ -25,12 +24,12 @@ export async function getStatus(req: Request, res: Response, server: Server): Pr
});
let statusRequests: unknown = 0;
const redisStartTime = Date.now();
const numberRequests = await promiseOrTimeout(redis.increment("statusRequest"), 1000)
const numberRequests = await promiseOrTimeout(redis.increment("statusRequest"), 5000)
.then(e => {
redisProcessTime = Date.now() - redisStartTime;
return e;
}).catch(e => /* istanbul ignore next */ {
Logger.error(`status: redis increment timed out ${e}\nload: ${os.loadavg().slice(1)} with ${JSON.stringify(getRedisStats())}\n${JSON.stringify((db as Postgres)?.getStats?.())}`);
Logger.error(`status: redis increment timed out ${e}`);
return [-1];
});
statusRequests = numberRequests?.[0];
@@ -43,11 +42,9 @@ export async function getStatus(req: Request, res: Response, server: Server): Pr
processTime,
redisProcessTime,
loadavg: os.loadavg().slice(1), // only return 5 & 15 minute load average
connections: await new Promise((resolve) => server.getConnections((_, count) => resolve(count))),
statusRequests,
hostname: os.hostname(),
postgresStats: (db as Postgres)?.getStats?.(),
postgresPrivateStats: (privateDB as Postgres)?.getStats?.(),
redisStats: getRedisStats(),
};
return value ? res.send(JSON.stringify(statusValues[value])) : res.send(statusValues);

View File

@@ -1,49 +0,0 @@
import { db } from "../databases/databases";
import { Request, Response } from "express";
import { Logger } from "../utils/logger";
async function generateTopUsersStats(sortBy: string) {
const rows = await db.prepare("all", `SELECT COUNT(distinct "titles"."UUID") as "titleCount", COUNT(distinct "thumbnails"."UUID") as "thumbnailCount", COALESCE("userName", "titles"."userID") as "userName"
FROM "titles"
LEFT JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID"
LEFT JOIN "userNames" ON "titles"."userID"="userNames"."userID"
LEFT JOIN "thumbnails" ON "titles"."userID" = "thumbnails"."userID"
LEFT JOIN "thumbnailVotes" ON "thumbnails"."UUID" = "thumbnailVotes"."UUID"
WHERE "titleVotes"."votes" > -1 AND "titleVotes"."shadowHidden" != 1
GROUP BY COALESCE("userName", "titles"."userID") HAVING SUM("titleVotes"."votes") > 2 OR SUM("thumbnailVotes"."votes") > 2
ORDER BY "${sortBy}" DESC LIMIT 100`, []) as { titleCount: number, thumbnailCount: number, userName: string }[];
return rows.map((row) => ({
userName: row.userName,
titles: row.titleCount,
thumbnails: row.thumbnailCount
}));
}
export async function getTopBrandingUsers(req: Request, res: Response): Promise<Response> {
const sortType = parseInt(req.query.sortType as string);
let sortBy = "";
if (sortType == 0) {
sortBy = "titleCount";
} else if (sortType == 1) {
sortBy = "thumbnailCount";
} else {
//invalid request
return res.sendStatus(400);
}
if (db.highLoad()) {
return res.status(503).send("Disabled for load reasons");
}
try {
const stats = await generateTopUsersStats(sortBy);
//send this result
return res.send(stats);
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
}
}

View File

@@ -3,7 +3,6 @@ import { createMemoryCache } from "../utils/createMemoryCache";
import { config } from "../config";
import { Request, Response } from "express";
import { validateCategories } from "../utils/parseParams";
import { Logger } from "../utils/logger";
const MILLISECONDS_IN_MINUTE = 60000;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
@@ -75,13 +74,8 @@ export async function getTopCategoryUsers(req: Request, res: Response): Promise<
return res.sendStatus(400);
}
try {
const stats = await getTopCategoryUsersWithCache(sortBy, category);
const stats = await getTopCategoryUsersWithCache(sortBy, category);
//send this result
return res.send(stats);
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
}
//send this result
return res.send(stats);
}

View File

@@ -2,7 +2,6 @@ import { db } from "../databases/databases";
import { createMemoryCache } from "../utils/createMemoryCache";
import { config } from "../config";
import { Request, Response } from "express";
import { Logger } from "../utils/logger";
const MILLISECONDS_IN_MINUTE = 60000;
// eslint-disable-next-line @typescript-eslint/no-misused-promises
@@ -93,13 +92,8 @@ export async function getTopUsers(req: Request, res: Response): Promise<Response
return res.status(503).send("Disabled for load reasons");
}
try {
const stats = await getTopUsersWithCache(sortBy, categoryStatsEnabled);
const stats = await getTopUsersWithCache(sortBy, categoryStatsEnabled);
//send this result
return res.send(stats);
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
}
//send this result
return res.send(stats);
}

View File

@@ -3,7 +3,7 @@ import { config } from "../config";
import { Request, Response } from "express";
import axios from "axios";
import { Logger } from "../utils/logger";
import { getCWSUsers, getChromeUsers } from "../utils/getCWSUsers";
import { getCWSUsers } from "../utils/getCWSUsers";
// A cache of the number of chrome web store users
let chromeUsersCache = 0;
@@ -30,35 +30,30 @@ let lastFetch: DBStatsData = {
updateExtensionUsers();
export async function getTotalStats(req: Request, res: Response): Promise<void> {
try {
const countContributingUsers = Boolean(req.query?.countContributingUsers == "true");
const row = await getStats(countContributingUsers);
lastFetch = row;
const countContributingUsers = Boolean(req.query?.countContributingUsers == "true");
const row = await getStats(countContributingUsers);
lastFetch = row;
/* istanbul ignore if */
if (!row) res.sendStatus(500);
const extensionUsers = chromeUsersCache + firefoxUsersCache;
/* istanbul ignore if */
if (!row) res.sendStatus(500);
const extensionUsers = chromeUsersCache + firefoxUsersCache;
//send this result
res.send({
userCount: row.userCount ?? 0,
activeUsers: extensionUsers,
apiUsers: Math.max(apiUsersCache, extensionUsers),
viewCount: row.viewCount,
totalSubmissions: row.totalSubmissions,
minutesSaved: row.minutesSaved,
});
//send this result
res.send({
userCount: row.userCount ?? 0,
activeUsers: extensionUsers,
apiUsers: Math.max(apiUsersCache, extensionUsers),
viewCount: row.viewCount,
totalSubmissions: row.totalSubmissions,
minutesSaved: row.minutesSaved,
});
// Check if the cache should be updated (every ~14 hours)
const now = Date.now();
if (now - lastUserCountCheck > 5000000) {
lastUserCountCheck = now;
// Check if the cache should be updated (every ~14 hours)
const now = Date.now();
if (now - lastUserCountCheck > 5000000) {
lastUserCountCheck = now;
updateExtensionUsers();
}
} catch (e) {
Logger.error(e as string);
res.sendStatus(500);
updateExtensionUsers();
}
}
@@ -97,4 +92,29 @@ function updateExtensionUsers() {
getChromeUsers(chromeExtensionUrl)
.then(res => chromeUsersCache = res)
);
}
/* istanbul ignore next */
function getChromeUsers(chromeExtensionUrl: string): Promise<number> {
return axios.get(chromeExtensionUrl)
.then(res => {
const body = res.data;
// 2021-01-05
// [...]<span><meta itemprop="interactionCount" content="UserDownloads:100.000+"/><meta itemprop="opera[...]
const matchingString = '"UserDownloads:';
const matchingStringLen = matchingString.length;
const userDownloadsStartIndex = body.indexOf(matchingString);
/* istanbul ignore else */
if (userDownloadsStartIndex >= 0) {
const closingQuoteIndex = body.indexOf('"', userDownloadsStartIndex + matchingStringLen);
const userDownloadsStr = body.substr(userDownloadsStartIndex + matchingStringLen, closingQuoteIndex - userDownloadsStartIndex).replace(",", "").replace(".", "");
return parseInt(userDownloadsStr);
} else {
lastUserCountCheck = 0;
}
})
.catch(/* istanbul ignore next */ () => {
Logger.debug(`Failing to connect to ${chromeExtensionUrl}`);
return 0;
});
}

View File

@@ -1,7 +1,6 @@
import { db } from "../databases/databases";
import { Request, Response } from "express";
import { UserID } from "../types/user.model";
import { Logger } from "../utils/logger";
function getFuzzyUserID(userName: string): Promise<{userName: string, userID: UserID }[]> {
// escape [_ % \] to avoid ReDOS
@@ -38,22 +37,16 @@ export async function getUserID(req: Request, res: Response): Promise<Response>
// invalid request
return res.sendStatus(400);
}
const results = exactSearch
? await getExactUserID(userName)
: await getFuzzyUserID(userName);
try {
const results = exactSearch
? await getExactUserID(userName)
: await getFuzzyUserID(userName);
if (results === undefined || results === null) {
/* istanbul ignore next */
return res.sendStatus(500);
} else if (results.length === 0) {
return res.sendStatus(404);
} else {
return res.send(results);
}
} catch (e) {
Logger.error(e as string);
if (results === undefined || results === null) {
/* istanbul ignore next */
return res.sendStatus(500);
} else if (results.length === 0) {
return res.sendStatus(404);
} else {
return res.send(results);
}
}

View File

@@ -1,4 +1,4 @@
import { db, privateDB } from "../databases/databases";
import { db } from "../databases/databases";
import { getHashCache } from "../utils/getHashCache";
import { isUserVIP } from "../utils/isUserVIP";
import { Request, Response } from "express";
@@ -8,16 +8,14 @@ import { getReputation } from "../utils/reputation";
import { Category, SegmentUUID } from "../types/segments.model";
import { config } from "../config";
import { canSubmit } from "../utils/permissions";
import { isUserBanned } from "../utils/checkBan";
const maxRewardTime = config.maxRewardTimePerSegmentInSeconds;
async function dbGetSubmittedSegmentSummary(userID: HashedUserID): Promise<{ minutesSaved: number, segmentCount: number }> {
try {
const countShadowHidden = await isUserBanned(userID) ? 2 : 1; // if shadowbanned, count shadowhidden as well
const row = await db.prepare("get",
`SELECT SUM(CASE WHEN "actionType" = 'chapter' THEN 0 ELSE ((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views" END) as "minutesSaved",
count(*) as "segmentCount" FROM "sponsorTimes"
WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != ?`, [maxRewardTime, maxRewardTime, userID, countShadowHidden], { useReplica: true });
WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != 1`, [maxRewardTime, maxRewardTime, userID], { useReplica: true });
if (row.minutesSaved != null) {
return {
minutesSaved: row.minutesSaved,
@@ -72,7 +70,7 @@ async function dbGetIgnoredViewsForUser(userID: HashedUserID) {
async function dbGetWarningsForUser(userID: HashedUserID): Promise<number> {
try {
const row = await db.prepare("get", `SELECT COUNT(*) as total FROM "warnings" WHERE "userID" = ? AND "enabled" = 1 AND "type" = 0`, [userID], { useReplica: true });
const row = await db.prepare("get", `SELECT COUNT(*) as total FROM "warnings" WHERE "userID" = ? AND "enabled" = 1`, [userID], { useReplica: true });
return row?.total ?? 0;
} catch (err) /* istanbul ignore next */ {
Logger.error(`Couldn't get warnings for user ${userID}. returning 0`);
@@ -80,16 +78,6 @@ async function dbGetWarningsForUser(userID: HashedUserID): Promise<number> {
}
}
async function dbGetDeArrowWarningReasonForUser(userID: HashedUserID): Promise<number> {
try {
const row = await db.prepare("get", `SELECT reason FROM "warnings" WHERE "userID" = ? AND "enabled" = 1 AND "type" = 1`, [userID], { useReplica: true });
return row?.reason ?? 0;
} catch (err) /* istanbul ignore next */ {
Logger.error(`Couldn't get warnings for user ${userID}. returning 0`);
return 0;
}
}
async function dbGetLastSegmentForUser(userID: HashedUserID): Promise<SegmentUUID> {
try {
const row = await db.prepare("get", `SELECT "UUID" FROM "sponsorTimes" WHERE "userID" = ? ORDER BY "timeSubmitted" DESC LIMIT 1`, [userID], { useReplica: true });
@@ -101,7 +89,7 @@ async function dbGetLastSegmentForUser(userID: HashedUserID): Promise<SegmentUUI
async function dbGetActiveWarningReasonForUser(userID: HashedUserID): Promise<string> {
try {
const row = await db.prepare("get", `SELECT reason FROM "warnings" WHERE "userID" = ? AND "enabled" = 1 AND "type" = 0 ORDER BY "issueTime" DESC LIMIT 1`, [userID], { useReplica: true });
const row = await db.prepare("get", `SELECT reason FROM "warnings" WHERE "userID" = ? AND "enabled" = 1 ORDER BY "issueTime" DESC LIMIT 1`, [userID], { useReplica: true });
return row?.reason ?? "";
} catch (err) /* istanbul ignore next */ {
Logger.error(`Couldn't get reason for user ${userID}. returning blank`);
@@ -111,7 +99,8 @@ async function dbGetActiveWarningReasonForUser(userID: HashedUserID): Promise<st
async function dbGetBanned(userID: HashedUserID): Promise<boolean> {
try {
return await isUserBanned(userID);
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID], { useReplica: true });
return row?.userCount > 0 ?? false;
} catch (err) /* istanbul ignore next */ {
return false;
}
@@ -126,34 +115,6 @@ async function getPermissions(userID: HashedUserID): Promise<Record<string, bool
return result;
}
async function getTitleSubmissionCount(userID: HashedUserID): Promise<number> {
try {
const row = await db.prepare("get", `SELECT COUNT(*) as "titleSubmissionCount" FROM "titles" JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID" WHERE "titles"."userID" = ? AND "titleVotes"."votes" >= 0`, [userID], { useReplica: true });
return row?.titleSubmissionCount ?? 0;
} catch (err) /* istanbul ignore next */ {
return null;
}
}
async function getThumbnailSubmissionCount(userID: HashedUserID): Promise<number> {
try {
const row = await db.prepare("get", `SELECT COUNT(*) as "thumbnailSubmissionCount" FROM "thumbnails" JOIN "thumbnailVotes" ON "thumbnails"."UUID" = "thumbnailVotes"."UUID" WHERE "thumbnails"."userID" = ? AND "thumbnailVotes"."votes" >= 0`, [userID], { useReplica: true });
return row?.thumbnailSubmissionCount ?? 0;
} catch (err) /* istanbul ignore next */ {
return null;
}
}
async function getCasualSubmissionCount(userID: HashedUserID): Promise<number> {
try {
const row = await privateDB.prepare("get", `SELECT COUNT(DISTINCT "videoID") as "casualSubmissionCount" FROM "casualVotes" WHERE "userID" = ?`, [userID], { useReplica: true });
return row?.casualSubmissionCount ?? 0;
} catch (err) /* istanbul ignore next */ {
return null;
}
}
type cases = Record<string, any>
const executeIfFunction = (f: any) =>
@@ -174,16 +135,12 @@ const dbGetValue = (userID: HashedUserID, property: string): Promise<string|Segm
ignoredViewCount: () => dbGetIgnoredViewsForUser(userID),
warnings: () => dbGetWarningsForUser(userID),
warningReason: () => dbGetActiveWarningReasonForUser(userID),
deArrowWarningReason: () => dbGetDeArrowWarningReasonForUser(userID),
banned: () => dbGetBanned(userID),
reputation: () => getReputation(userID),
vip: () => isUserVIP(userID),
lastSegmentID: () => dbGetLastSegmentForUser(userID),
permissions: () => getPermissions(userID),
freeChaptersAccess: () => true,
titleSubmissionCount: () => getTitleSubmissionCount(userID),
thumbnailSubmissionCount: () => getThumbnailSubmissionCount(userID),
casualSubmissionCount: () => getCasualSubmissionCount(userID),
freeChaptersAccess: () => true
})("")(property);
};
@@ -193,8 +150,7 @@ async function getUserInfo(req: Request, res: Response): Promise<Response> {
const defaultProperties: string[] = ["userID", "userName", "minutesSaved", "segmentCount", "ignoredSegmentCount",
"viewCount", "ignoredViewCount", "warnings", "warningReason", "reputation",
"vip", "lastSegmentID"];
const allProperties: string[] = [...defaultProperties, "banned", "permissions", "freeChaptersAccess",
"ignoredSegmentCount", "titleSubmissionCount", "thumbnailSubmissionCount", "casualSubmissionCount", "deArrowWarningReason"];
const allProperties: string[] = [...defaultProperties, "banned", "permissions", "freeChaptersAccess"];
let paramValues: string[] = req.query.values
? JSON.parse(req.query.values as string)
: req.query.value
@@ -217,24 +173,15 @@ async function getUserInfo(req: Request, res: Response): Promise<Response> {
return res.status(400).send("Invalid userID or publicUserID parameter");
}
try {
const responseObj = {} as Record<string, string|SegmentUUID|number>;
for (const property of paramValues) {
responseObj[property] = await dbGetValue(hashedUserID, property);
}
// add minutesSaved and segmentCount after to avoid getting overwritten
if (paramValues.includes("minutesSaved") || paramValues.includes("segmentCount")) {
const segmentsSummary = await dbGetSubmittedSegmentSummary(hashedUserID);
responseObj["minutesSaved"] = segmentsSummary.minutesSaved;
responseObj["segmentCount"] = segmentsSummary.segmentCount;
}
return res.send(responseObj);
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
const segmentsSummary = await dbGetSubmittedSegmentSummary(hashedUserID);
const responseObj = {} as Record<string, string|SegmentUUID|number>;
for (const property of paramValues) {
responseObj[property] = await dbGetValue(hashedUserID, property);
}
// add minutesSaved and segmentCount after to avoid getting overwritten
if (paramValues.includes("minutesSaved")) responseObj["minutesSaved"] = segmentsSummary.minutesSaved;
if (paramValues.includes("segmentCount")) responseObj["segmentCount"] = segmentsSummary.segmentCount;
return res.send(responseObj);
}
export async function endpoint(req: Request, res: Response): Promise<Response> {

View File

@@ -4,7 +4,6 @@ import { Request, Response } from "express";
import { HashedUserID, UserID } from "../types/user.model";
import { config } from "../config";
import { Logger } from "../utils/logger";
import { isUserBanned } from "../utils/checkBan";
type nestedObj = Record<string, Record<string, number>>;
const maxRewardTimePerSegmentInSeconds = config.maxRewardTimePerSegmentInSeconds ?? 86400;
@@ -35,14 +34,13 @@ async function dbGetUserSummary(userID: HashedUserID, fetchCategoryStats: boolea
`;
}
try {
const countShadowHidden = await isUserBanned(userID) ? 2 : 1; // if shadowbanned, count shadowhidden as well
const row = await db.prepare("get", `
SELECT SUM(CASE WHEN "actionType" = 'chapter' THEN 0 ELSE ((CASE WHEN "endTime" - "startTime" > ? THEN ? ELSE "endTime" - "startTime" END) / 60) * "views" END) as "minutesSaved",
${additionalQuery}
count(*) as "segmentCount"
FROM "sponsorTimes"
WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != ?`,
[maxRewardTimePerSegmentInSeconds, maxRewardTimePerSegmentInSeconds, userID, countShadowHidden]);
WHERE "userID" = ? AND "votes" > -2 AND "shadowHidden" != 1`,
[maxRewardTimePerSegmentInSeconds, maxRewardTimePerSegmentInSeconds, userID]);
const source = (row.minutesSaved != null) ? row : {};
const handler = { get: (target: Record<string, any>, name: string) => target?.[name] || 0 };
const proxy = new Proxy(source, handler);

View File

@@ -1,28 +1,27 @@
import { Request, Response } from "express";
import { db } from "../databases/databases";
import { videoLabelsHashKey, videoLabelsKey, videoLabelsLargerHashKey } from "../utils/redisKeys";
import { videoLabelsHashKey, videoLabelsKey } from "../utils/redisKeys";
import { SBRecord } from "../types/lib.model";
import { ActionType, Category, DBSegment, Service, VideoID, VideoIDHash } from "../types/segments.model";
import { DBSegment, Segment, Service, VideoData, VideoID, VideoIDHash } from "../types/segments.model";
import { Logger } from "../utils/logger";
import { QueryCacher } from "../utils/queryCacher";
import { getService } from "../utils/getService";
interface FullVideoSegment {
category: Category;
}
interface FullVideoSegmentVideoData {
segments: FullVideoSegment[];
hasStartSegment: boolean;
}
function transformDBSegments(segments: DBSegment[]): FullVideoSegment[] {
function transformDBSegments(segments: DBSegment[]): Segment[] {
return segments.map((chosenSegment) => ({
category: chosenSegment.category
category: chosenSegment.category,
actionType: chosenSegment.actionType,
segment: [chosenSegment.startTime, chosenSegment.endTime],
UUID: chosenSegment.UUID,
locked: chosenSegment.locked,
votes: chosenSegment.votes,
videoDuration: chosenSegment.videoDuration,
userID: chosenSegment.userID,
description: chosenSegment.description
}));
}
async function getLabelsByVideoID(videoID: VideoID, service: Service): Promise<FullVideoSegmentVideoData> {
async function getLabelsByVideoID(videoID: VideoID, service: Service): Promise<Segment[]> {
try {
const segments: DBSegment[] = await getSegmentsFromDBByVideoID(videoID, service);
return chooseSegment(segments);
@@ -34,8 +33,8 @@ async function getLabelsByVideoID(videoID: VideoID, service: Service): Promise<F
}
}
async function getLabelsByHash(hashedVideoIDPrefix: VideoIDHash, service: Service, checkHasStartSegment: boolean): Promise<SBRecord<VideoID, FullVideoSegmentVideoData>> {
const segments: SBRecord<VideoID, FullVideoSegmentVideoData> = {};
async function getLabelsByHash(hashedVideoIDPrefix: VideoIDHash, service: Service): Promise<SBRecord<VideoID, VideoData>> {
const segments: SBRecord<VideoID, VideoData> = {};
try {
type SegmentWithHashPerVideoID = SBRecord<VideoID, { hash: VideoIDHash, segments: DBSegment[] }>;
@@ -54,13 +53,11 @@ async function getLabelsByHash(hashedVideoIDPrefix: VideoIDHash, service: Servic
}, {});
for (const [videoID, videoData] of Object.entries(segmentPerVideoID)) {
const result = chooseSegment(videoData.segments);
const data: FullVideoSegmentVideoData = {
segments: result.segments,
hasStartSegment: checkHasStartSegment ? result.hasStartSegment : undefined
const data: VideoData = {
segments: chooseSegment(videoData.segments),
};
if (data.segments.length > 0 || (data.hasStartSegment && checkHasStartSegment)) {
if (data.segments.length > 0) {
segments[videoID] = data;
}
}
@@ -77,14 +74,12 @@ async function getSegmentsFromDBByHash(hashedVideoIDPrefix: VideoIDHash, service
.prepare(
"all",
`SELECT "startTime", "endTime", "videoID", "votes", "locked", "UUID", "userID", "category", "actionType", "hashedVideoID", "description" FROM "sponsorTimes"
WHERE "hashedVideoID" LIKE ? AND "service" = ? AND "hidden" = 0 AND "shadowHidden" = 0`,
WHERE "hashedVideoID" LIKE ? AND "service" = ? AND "actionType" = 'full' AND "hidden" = 0 AND "shadowHidden" = 0`,
[`${hashedVideoIDPrefix}%`, service]
) as Promise<DBSegment[]>;
if (hashedVideoIDPrefix.length === 3) {
return await QueryCacher.get(fetchFromDB, videoLabelsHashKey(hashedVideoIDPrefix, service));
} else if (hashedVideoIDPrefix.length === 4) {
return await QueryCacher.get(fetchFromDB, videoLabelsLargerHashKey(hashedVideoIDPrefix, service));
}
return await fetchFromDB();
@@ -95,34 +90,22 @@ async function getSegmentsFromDBByVideoID(videoID: VideoID, service: Service): P
.prepare(
"all",
`SELECT "startTime", "endTime", "votes", "locked", "UUID", "userID", "category", "actionType", "description" FROM "sponsorTimes"
WHERE "videoID" = ? AND "service" = ? AND "hidden" = 0 AND "shadowHidden" = 0`,
WHERE "videoID" = ? AND "service" = ? AND "actionType" = 'full' AND "hidden" = 0 AND "shadowHidden" = 0`,
[videoID, service]
) as Promise<DBSegment[]>;
return await QueryCacher.get(fetchFromDB, videoLabelsKey(videoID, service));
}
function chooseSegment<T extends DBSegment>(choices: T[]): FullVideoSegmentVideoData {
function chooseSegment<T extends DBSegment>(choices: T[]): Segment[] {
// filter out -2 segments
choices = choices.filter((segment) => segment.votes > -2);
const hasStartSegment = !!choices.some((segment) => segment.startTime < 5
&& (segment.actionType === ActionType.Skip || segment.actionType === ActionType.Mute));
choices = choices.filter((segment) => segment.actionType === ActionType.Full);
const results = [];
// trivial decisions
if (choices.length === 0) {
return {
segments: [],
hasStartSegment
};
return [];
} else if (choices.length === 1) {
return {
segments: transformDBSegments(choices),
hasStartSegment
};
return transformDBSegments(choices);
}
// if locked, only choose from locked
const locked = choices.filter((segment) => segment.locked);
@@ -131,10 +114,7 @@ function chooseSegment<T extends DBSegment>(choices: T[]): FullVideoSegmentVideo
}
//no need to filter, just one label
if (choices.length === 1) {
return {
segments: transformDBSegments(choices),
hasStartSegment
};
return transformDBSegments(choices);
}
// sponsor > exclusive > selfpromo
const findCategory = (category: string) => choices.find((segment) => segment.category === category);
@@ -142,36 +122,25 @@ function chooseSegment<T extends DBSegment>(choices: T[]): FullVideoSegmentVideo
const categoryResult = findCategory("sponsor") ?? findCategory("exclusive_access") ?? findCategory("selfpromo");
if (categoryResult) results.push(categoryResult);
return {
segments: transformDBSegments(results),
hasStartSegment
};
return transformDBSegments(results);
}
async function handleGetLabel(req: Request, res: Response): Promise<FullVideoSegmentVideoData | FullVideoSegment[] | false> {
async function handleGetLabel(req: Request, res: Response): Promise<Segment[] | false> {
const videoID = req.query.videoID as VideoID;
if (!videoID) {
res.status(400).send("videoID not specified");
return false;
}
const hasStartSegment = req.query.hasStartSegment === "true";
const service = getService(req.query.service, req.body.service);
const segmentData = await getLabelsByVideoID(videoID, service);
const segments = segmentData.segments;
const segments = await getLabelsByVideoID(videoID, service);
if (!segments || segments.length === 0) {
res.sendStatus(404);
return false;
}
if (hasStartSegment) {
return segmentData;
} else {
return segments;
}
return segments;
}
async function endpoint(req: Request, res: Response): Promise<Response> {

View File

@@ -11,19 +11,16 @@ export async function getVideoLabelsByHash(req: Request, res: Response): Promise
}
hashPrefix = hashPrefix.toLowerCase() as VideoIDHash;
const checkHasStartSegment = req.query.hasStartSegment === "true";
const service: Service = getService(req.query.service, req.body.service);
// Get all video id's that match hash prefix
const segments = await getLabelsByHash(hashPrefix, service, checkHasStartSegment);
const segments = await getLabelsByHash(hashPrefix, service);
if (!segments) return res.status(404).json([]);
const output = Object.entries(segments).map(([videoID, data]) => ({
videoID,
segments: data.segments,
hasStartSegment: data.hasStartSegment
}));
return res.status(output.length === 0 ? 404 : 200).json(output);
}

View File

@@ -2,9 +2,9 @@ import { Request, Response } from "express";
import { config } from "../config";
import { db, privateDB } from "../databases/databases";
import { BrandingSubmission, BrandingUUID, TimeThumbnailSubmission, TitleSubmission } from "../types/branding.model";
import { BrandingSubmission, BrandingUUID, TimeThumbnailSubmission } from "../types/branding.model";
import { HashedIP, IPAddress, VideoID } from "../types/segments.model";
import { Feature, HashedUserID } from "../types/user.model";
import { HashedUserID } from "../types/user.model";
import { getHashCache } from "../utils/getHashCache";
import { getIP } from "../utils/getIP";
import { getService } from "../utils/getService";
@@ -12,26 +12,12 @@ import { isUserVIP } from "../utils/isUserVIP";
import { Logger } from "../utils/logger";
import crypto from "crypto";
import { QueryCacher } from "../utils/queryCacher";
import { acquireLock } from "../utils/redisLock";
import { hasFeature } from "../utils/features";
import { checkBanStatus } from "../utils/checkBan";
import axios from "axios";
import { getMaxResThumbnail } from "../utils/youtubeApi";
import { getVideoDetails } from "../utils/getVideoDetails";
import { canSubmitDeArrow } from "../utils/permissions";
import { parseUserAgent } from "../utils/userAgent";
import { isRequestInvalid } from "../utils/requestValidator";
enum BrandingType {
Title,
Thumbnail
}
enum BrandingVoteType {
Upvote = 1,
Downvote = 2
}
interface ExistingVote {
UUID: BrandingUUID;
type: number;
@@ -39,454 +25,125 @@ interface ExistingVote {
}
export async function postBranding(req: Request, res: Response) {
const { videoID, userID, title, thumbnail, autoLock, downvote, videoDuration, wasWarned, casualMode } = req.body as BrandingSubmission;
const { videoID, userID, title, thumbnail } = req.body as BrandingSubmission;
const service = getService(req.body.service);
const userAgent = req.body.userAgent ?? parseUserAgent(req.get("user-agent")) ?? "";
if (!videoID || !userID || userID.length < 30 || !service
|| ((!title || !title.title)
&& (!thumbnail || thumbnail.original == null
|| (!thumbnail.original && (thumbnail as TimeThumbnailSubmission).timestamp) == null))) {
|| (!thumbnail.original && !(thumbnail as TimeThumbnailSubmission).timestamp)))) {
res.status(400).send("Bad Request");
return;
}
try {
const hashedUserID = await getHashCache(userID);
const isVip = await isUserVIP(hashedUserID);
const shouldLock = isVip && autoLock !== false;
// const isVip = await isUserVIP(hashedUserID);
const isVip = false; // TODO: In future, reenable locks
const hashedVideoID = await getHashCache(videoID, 1);
const hashedIP = await getHashCache(getIP(req) + config.globalSalt as IPAddress);
const isBanned = await checkBanStatus(hashedUserID, hashedIP);
const matchedRule = isRequestInvalid({
userAgent,
userAgentHeader: req.headers["user-agent"],
videoDuration,
videoID,
userID,
service,
dearrow: {
title,
thumbnail,
downvote,
},
endpoint: "dearrow-postBranding",
});
if (matchedRule !== null) {
sendNewUserWebhook(config.discordRejectedNewUserWebhookURL, hashedUserID, videoID, userAgent, req, videoDuration, title, `Caught by rule: ${matchedRule}`);
Logger.warn(`Dearrow submission rejected by request validator: ${hashedUserID} ${videoID} ${videoDuration} ${userAgent} ${req.headers["user-agent"]} ${title.title} ${thumbnail.timestamp}`);
res.status(200).send("OK");
return;
}
// treat banned users as existing users who "can submit" for the purposes of these checks
// this is to avoid their titles from being logged and them taking up "new user" slots with every submission
const permission = isBanned ? {
canSubmit: true,
newUser: false,
reason: "",
} : await canSubmitDeArrow(hashedUserID);
if (!permission.canSubmit) {
Logger.warn(`New user trying to submit dearrow: ${hashedUserID} ${videoID} ${videoDuration} ${Object.keys(req.body)} ${userAgent} ${title?.title} ${req.headers["user-agent"]}`);
res.status(403).send(permission.reason);
return;
} else if (permission.newUser) {
sendNewUserWebhook(config.discordNewUserWebhookURL, hashedUserID, videoID, userAgent, req, videoDuration, title, undefined);
}
if (videoDuration && thumbnail && await checkForWrongVideoDuration(videoID, videoDuration)) {
res.status(403).send("YouTube is currently testing a new anti-adblock technique called server-side ad-injection. This causes skips and submissions to be offset by the duration of the ad. It seems that you are affected by this A/B test, so until a fix is developed, we cannot accept submissions from your device due to them potentially being inaccurate.");
return;
}
const lock = await acquireLock(`postBranding:${videoID}.${hashedUserID}`);
if (!lock.status) {
res.status(429).send("Vote already in progress");
return;
}
const now = Date.now();
const voteType: BrandingVoteType = downvote ? BrandingVoteType.Downvote : BrandingVoteType.Upvote;
if (title && !isVip && title.title.length > config.maxTitleLength) {
lock.unlock();
res.status(400).send("Your title is too long. Please keep titles concise.");
return;
}
let errorCode = 0;
const voteType = 1;
await Promise.all([(async () => {
if (title) {
// ignore original submissions from banned users - hiding those would cause issues
if (title.original && isBanned) return;
const existingUUID = (await db.prepare("get", `SELECT "UUID" from "titles" where "videoID" = ? AND "title" = ?`, [videoID, title.title]))?.UUID;
const existingIsLocked = !!existingUUID && (await db.prepare("get", `SELECT "locked" from "titleVotes" where "UUID" = ?`, [existingUUID]))?.locked;
if (existingUUID != undefined && isBanned) return; // ignore votes on existing details from banned users
if (downvote && existingIsLocked && !isVip) {
if (!isBanned) sendWebhooks(videoID, existingUUID, voteType, wasWarned, shouldLock).catch((e) => Logger.error(e));
errorCode = 403;
return;
}
const UUID = existingUUID || crypto.randomUUID();
await handleExistingVotes(BrandingType.Title, videoID, hashedUserID, UUID, hashedIP, voteType);
const existingVote = await handleExistingVotes(BrandingType.Title, videoID, hashedUserID, UUID, hashedIP, voteType);
if (existingUUID) {
await updateVoteTotals(BrandingType.Title, UUID, hashedUserID, shouldLock, !!downvote);
await updateVoteTotals(BrandingType.Title, existingVote, UUID, isVip);
} else {
if (downvote) {
throw new Error("Title submission doesn't exist");
}
await db.prepare("run", `INSERT INTO "titles" ("videoID", "title", "original", "userID", "service", "hashedVideoID", "timeSubmitted", "UUID") VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
[videoID, title.title, title.original ? 1 : 0, hashedUserID, service, hashedVideoID, now, UUID]);
await db.prepare("run", `INSERT INTO "titles" ("videoID", "title", "original", "userID", "service", "hashedVideoID", "timeSubmitted", "UUID", "casualMode", "userAgent") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[videoID, title.title, title.original ? 1 : 0, hashedUserID, service, hashedVideoID, now, UUID, casualMode ? 1 : 0, userAgent]);
const verificationValue = await getVerificationValue(hashedUserID, isVip);
await db.prepare("run", `INSERT INTO "titleVotes" ("UUID", "votes", "locked", "shadowHidden", "verification") VALUES (?, 0, ?, ?, ?);`,
[UUID, shouldLock ? 1 : 0, isBanned ? 1 : 0, verificationValue]);
await verifyOldSubmissions(hashedUserID, verificationValue);
await db.prepare("run", `INSERT INTO "titleVotes" ("UUID", "votes", "locked", "shadowHidden") VALUES (?, 0, ?, 0);`,
[UUID, isVip ? 1 : 0]);
}
if (isVip && !downvote && shouldLock) {
if (isVip) {
// unlock all other titles
await db.prepare("run", `UPDATE "titleVotes" as tv SET "locked" = 0 FROM "titles" t WHERE tv."UUID" = t."UUID" AND tv."UUID" != ? AND t."videoID" = ?`, [UUID, videoID]);
await db.prepare("run", `UPDATE "titleVotes" SET "locked" = 0 WHERE "UUID" != ? AND "videoID" = ?`, [UUID, videoID]);
}
if (!isBanned) sendWebhooks(videoID, UUID, voteType, wasWarned, shouldLock).catch((e) => Logger.error(e));
}
})(), (async () => {
if (thumbnail) {
// ignore original submissions from banned users - hiding those would cause issues
if (thumbnail.original && (isBanned || !await canSubmitOriginal(hashedUserID, isVip))) return;
const existingUUID = thumbnail.original
? (await db.prepare("get", `SELECT "UUID" from "thumbnails" where "videoID" = ? AND "original" = 1`, [videoID]))?.UUID
: (await db.prepare("get", `SELECT "thumbnails"."UUID" from "thumbnailTimestamps" JOIN "thumbnails" ON "thumbnails"."UUID" = "thumbnailTimestamps"."UUID"
WHERE "thumbnailTimestamps"."timestamp" = ? AND "thumbnails"."videoID" = ?`, [(thumbnail as TimeThumbnailSubmission).timestamp, videoID]))?.UUID;
const existingIsLocked = !!existingUUID && (await db.prepare("get", `SELECT "locked" from "thumbnailVotes" where "UUID" = ?`, [existingUUID]))?.locked;
if (existingUUID != undefined && isBanned) return; // ignore votes on existing details from banned users
if (downvote && existingIsLocked && !isVip) {
errorCode = 403;
return;
}
const UUID = existingUUID || crypto.randomUUID();
await handleExistingVotes(BrandingType.Thumbnail, videoID, hashedUserID, UUID, hashedIP, voteType);
const existingVote = await handleExistingVotes(BrandingType.Thumbnail, videoID, hashedUserID, UUID, hashedIP, voteType);
if (existingUUID) {
await updateVoteTotals(BrandingType.Thumbnail, UUID, hashedUserID, shouldLock, !!downvote);
await updateVoteTotals(BrandingType.Thumbnail, existingVote, UUID, isVip);
} else {
if (downvote) {
throw new Error("Thumbnail submission doesn't exist");
}
await db.prepare("run", `INSERT INTO "thumbnails" ("videoID", "original", "userID", "service", "hashedVideoID", "timeSubmitted", "UUID") VALUES (?, ?, ?, ?, ?, ?, ?)`,
[videoID, thumbnail.original ? 1 : 0, hashedUserID, service, hashedVideoID, now, UUID]);
await db.prepare("run", `INSERT INTO "thumbnails" ("videoID", "original", "userID", "service", "hashedVideoID", "timeSubmitted", "UUID", "casualMode", "userAgent") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[videoID, thumbnail.original ? 1 : 0, hashedUserID, service, hashedVideoID, now, UUID, casualMode ? 1 : 0, userAgent]);
await db.prepare("run", `INSERT INTO "thumbnailVotes" ("UUID", "votes", "locked", "shadowHidden") VALUES (?, 0, ?, ?)`,
[UUID, shouldLock ? 1 : 0, isBanned ? 1 : 0]);
await db.prepare("run", `INSERT INTO "thumbnailVotes" ("UUID", "votes", "locked", "shadowHidden") VALUES (?, 0, ?, 0)`,
[UUID, isVip ? 1 : 0]);
if (!thumbnail.original) {
await db.prepare("run", `INSERT INTO "thumbnailTimestamps" ("UUID", "timestamp") VALUES (?, ?)`,
[UUID, (thumbnail as TimeThumbnailSubmission).timestamp]);
}
}
if (isVip && !downvote && shouldLock) {
// unlock all other titles
await db.prepare("run", `UPDATE "thumbnailVotes" as tv SET "locked" = 0 FROM "thumbnails" t WHERE tv."UUID" = t."UUID" AND tv."UUID" != ? AND t."videoID" = ?`, [UUID, videoID]);
if (isVip) {
// unlock all other titles
await db.prepare("run", `UPDATE "thumbnailVotes" SET "locked" = 0 WHERE "UUID" != ? AND "videoID" = ?`, [UUID, videoID]);
}
}
}
})()]);
QueryCacher.clearBrandingCache({ videoID, hashedVideoID, service });
if (errorCode) {
res.status(errorCode).send();
} else {
res.status(200).send("OK");
}
lock.unlock();
res.status(200).send("OK");
} catch (e) {
Logger.error(e as string);
res.status(500).send("Internal Server Error");
}
}
function sendNewUserWebhook(webhookUrl: string, hashedUserID: HashedUserID, videoID: VideoID, userAgent: any, req: Request, videoDuration: number, title: TitleSubmission, footerText: string | undefined) {
if (!webhookUrl) return;
axios.post(webhookUrl, {
"embeds": [{
"title": hashedUserID,
"url": `https://www.youtube.com/watch?v=${videoID}`,
"description": `**User Agent**: ${userAgent}\
\n**Sent User Agent**: ${req.body.userAgent}\
\n**Real User Agent**: ${req.headers["user-agent"]}\
\n**Video Duration**: ${videoDuration}\
\n**Title**: ${title?.title}`,
"color": 1184701,
"thumbnail": {
"url": getMaxResThumbnail(videoID),
},
"footer": footerText === undefined ? null : {
"text": footerText,
},
}],
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending reported submission Discord hook");
Logger.error(JSON.stringify((res.data)));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send reported submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
}
/**
* Finds an existing vote, if found, and it's for a different submission, it undoes it, and points to the new submission.
* If no existing vote, it adds one.
*/
async function handleExistingVotes(type: BrandingType, videoID: VideoID,
hashedUserID: HashedUserID, UUID: BrandingUUID, hashedIP: HashedIP, voteType: BrandingVoteType) {
hashedUserID: HashedUserID, UUID: BrandingUUID, hashedIP: HashedIP, voteType: number): Promise<ExistingVote> {
const table = type === BrandingType.Title ? `"titleVotes"` : `"thumbnailVotes"`;
const idsDealtWith: BrandingUUID[] = [];
// Either votes of the same type, or on the same submission (undo a downvote)
const existingVotes = await privateDB.prepare("all", `SELECT "id", "UUID", "type" from ${table} where "videoID" = ? AND "userID" = ? AND ("type" = ? OR "UUID" = ?)`, [videoID, hashedUserID, voteType, UUID]) as ExistingVote[];
if (existingVotes.length > 0) {
// Only one upvote per video
for (const existingVote of existingVotes) {
// For downvotes, only undo for this specific submission (multiple downvotes on one submission not allowed)
if (voteType === BrandingVoteType.Downvote && existingVote.UUID !== UUID) continue;
switch (existingVote.type) {
case BrandingVoteType.Upvote:
// Old case where there are duplicate rows in private db
if (!idsDealtWith.includes(existingVote.UUID)) {
idsDealtWith.push(existingVote.UUID);
await db.prepare("run", `UPDATE ${table} SET "votes" = "votes" - 1 WHERE "UUID" = ?`, [existingVote.UUID]);
}
await privateDB.prepare("run", `DELETE FROM ${table} WHERE "id" = ?`, [existingVote.id]);
break;
case BrandingVoteType.Downvote: {
await db.prepare("run", `UPDATE ${table} SET "downvotes" = "downvotes" - 1 WHERE "UUID" = ?`, [existingVote.UUID]);
await privateDB.prepare("run", `DELETE FROM ${table} WHERE "id" = ?`, [existingVote.id]);
break;
}
}
const existingVote = await privateDB.prepare("get", `SELECT "id", "UUID", "type" from ${table} where "videoID" = ? AND "userID" = ?`, [videoID, hashedUserID]);
if (existingVote && existingVote.UUID !== UUID) {
if (existingVote.type === 1) {
await db.prepare("run", `UPDATE ${table} SET "votes" = "votes" - 1 WHERE "UUID" = ?`, [existingVote.UUID]);
}
await privateDB.prepare("run", `UPDATE ${table} SET "type" = ?, "UUID" = ? WHERE "id" = ?`, [voteType, UUID, existingVote.id]);
} else if (!existingVote) {
await privateDB.prepare("run", `INSERT INTO ${table} ("videoID", "UUID", "userID", "hashedIP", "type") VALUES (?, ?, ?, ?, ?)`,
[videoID, UUID, hashedUserID, hashedIP, voteType]);
}
await privateDB.prepare("run", `INSERT INTO ${table} ("videoID", "UUID", "userID", "hashedIP", "type") VALUES (?, ?, ?, ?, ?)`,
[videoID, UUID, hashedUserID, hashedIP, voteType]);
return existingVote;
}
/**
* Only called if an existing vote exists.
* Will update public vote totals and locked status.
*/
async function updateVoteTotals(type: BrandingType, UUID: BrandingUUID, userID: HashedUserID, shouldLock: boolean, downvote: boolean): Promise<void> {
async function updateVoteTotals(type: BrandingType, existingVote: ExistingVote, UUID: BrandingUUID, isVip: boolean): Promise<void> {
if (!existingVote) return;
const table = type === BrandingType.Title ? `"titleVotes"` : `"thumbnailVotes"`;
const table2 = type === BrandingType.Title ? `"titles"` : `"thumbnails"`;
if (downvote) {
// Only downvote if it is not their submission
const isUsersSubmission = (await db.prepare("get", `SELECT "userID" FROM ${table2} WHERE "UUID" = ?`, [UUID]))?.userID === userID;
if (!isUsersSubmission) {
await db.prepare("run", `UPDATE ${table} SET "downvotes" = "downvotes" + 1 WHERE "UUID" = ?`, [UUID]);
}
} else {
// Don't upvote if we vote on the same submission
if (!existingVote || existingVote.UUID !== UUID) {
await db.prepare("run", `UPDATE ${table} SET "votes" = "votes" + 1 WHERE "UUID" = ?`, [UUID]);
if (type === BrandingType.Title) {
const votedSubmitterUserID = (await db.prepare("get", `SELECT "userID" FROM ${table2} WHERE "UUID" = ?`, [UUID]))?.userID;
if (votedSubmitterUserID) {
await verifyOldSubmissions(votedSubmitterUserID, await getVerificationValue(votedSubmitterUserID, await isUserVIP(votedSubmitterUserID)));
}
}
}
if (shouldLock) {
if (downvote) {
await db.prepare("run", `UPDATE ${table} SET "removed" = 1 WHERE "UUID" = ?`, [UUID]);
} else {
await db.prepare("run", `UPDATE ${table} SET "locked" = 1, "removed" = 0 WHERE "UUID" = ?`, [UUID]);
}
if (isVip) {
await db.prepare("run", `UPDATE ${table} SET "locked" = 1 WHERE "UUID" = ?`, [UUID]);
}
}
export async function getVerificationValue(hashedUserID: HashedUserID, isVip: boolean): Promise<number> {
const voteSum = await db.prepare("get", `SELECT SUM("maxVotes") as "voteSum" FROM (SELECT MAX("votes") as "maxVotes" from "titles" JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID" WHERE "titles"."userID" = ? GROUP BY "titles"."videoID") t`, [hashedUserID]);
if (voteSum.voteSum >= 1 || isVip || await hasFeature(hashedUserID, Feature.DeArrowTitleSubmitter)) {
return 0;
} else {
return -1;
}
}
export async function verifyOldSubmissions(hashedUserID: HashedUserID, verification: number): Promise<void> {
if (verification >= 0) {
const unverifiedSubmissions = await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service" FROM "titles" JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID" WHERE "titles"."userID" = ? AND "titleVotes"."verification" < ? GROUP BY "videoID", "hashedVideoID", "service"`, [hashedUserID, verification]);
if (unverifiedSubmissions.length > 0) {
for (const submission of unverifiedSubmissions) {
QueryCacher.clearBrandingCache({
videoID: submission.videoID,
hashedVideoID: submission.hashedVideoID,
service: submission.service
});
}
await db.prepare("run", `UPDATE "titleVotes" as tv SET "verification" = ? FROM "titles" WHERE "titles"."UUID" = tv."UUID" AND "titles"."userID" = ? AND tv."verification" < ?`, [verification, hashedUserID, verification]);
}
}
}
async function canSubmitOriginal(hashedUserID: HashedUserID, isVip: boolean): Promise<boolean> {
const upvotedThumbs = (await db.prepare("get", `SELECT count(*) as "upvotedThumbs" FROM "thumbnails" JOIN "thumbnailVotes" ON "thumbnails"."UUID" = "thumbnailVotes"."UUID" WHERE "thumbnailVotes"."votes" > 0 AND "thumbnails"."original" = 0 AND "thumbnails"."userID" = ?`, [hashedUserID])).upvotedThumbs;
const customThumbs = (await db.prepare("get", `SELECT count(*) as "customThumbs" FROM "thumbnails" JOIN "thumbnailVotes" ON "thumbnails"."UUID" = "thumbnailVotes"."UUID" WHERE "thumbnailVotes"."votes" >= 0 AND "thumbnails"."original" = 0 AND "thumbnails"."userID" = ?`, [hashedUserID])).customThumbs;
const originalThumbs = (await db.prepare("get", `SELECT count(*) as "originalThumbs" FROM "thumbnails" JOIN "thumbnailVotes" ON "thumbnails"."UUID" = "thumbnailVotes"."UUID" WHERE "thumbnailVotes"."votes" >= 0 AND "thumbnails"."original" = 1 AND "thumbnails"."userID" = ?`, [hashedUserID])).originalThumbs;
return isVip || (upvotedThumbs > 1 && customThumbs > 1 && originalThumbs / customThumbs < 0.4);
}
async function sendWebhooks(videoID: VideoID, UUID: BrandingUUID, voteType: BrandingVoteType, wasWarned: boolean, vipAction: boolean) {
const currentSubmission = await db.prepare(
"get",
`SELECT
"titles"."title",
"titleVotes"."locked",
"titles"."userID",
"titleVotes"."votes"-"titleVotes"."downvotes"+"titleVotes"."verification" AS "score"
FROM "titles" JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID"
WHERE "titles"."UUID" = ?`,
[UUID]);
if (wasWarned && voteType === BrandingVoteType.Upvote) {
const data = await getVideoDetails(videoID);
axios.post(config.discordDeArrowWarnedWebhookURL, {
"embeds": [{
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${videoID}`,
"description": `**Submitted title:** ${currentSubmission.title}\
\n\n**Submitted by:** ${currentSubmission.userID}`,
"color": 10813440,
"thumbnail": {
"url": getMaxResThumbnail(videoID),
},
}],
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending reported submission Discord hook");
Logger.error(JSON.stringify((res.data)));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send reported submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
}
// Unlocked title getting more upvotes than the locked one
if (voteType === BrandingVoteType.Upvote) {
const lockedSubmission = await db.prepare(
"get",
`SELECT
"titles"."title",
"titles"."userID",
"titleVotes"."votes"-"titleVotes"."downvotes"+"titleVotes"."verification" AS "score"
FROM "titles" JOIN "titleVotes" ON "titles"."UUID" = "titleVotes"."UUID"
WHERE "titles"."videoID" = ?
AND "titles"."UUID" != ?
AND "titleVotes"."locked" = 1`,
[videoID, UUID]);
// Time to warn that there may be an issue
if (lockedSubmission && currentSubmission.score - lockedSubmission.score > 2) {
const usernameRow = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [lockedSubmission.userID]);
const data = await getVideoDetails(videoID);
axios.post(config.discordDeArrowLockedWebhookURL, {
"embeds": [{
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${videoID}`,
"description": `**${lockedSubmission.score}** score vs **${currentSubmission.score}**\
\n\n**Locked title:** ${lockedSubmission.title}\
\n**New title:** ${currentSubmission.title}\
\n\n**Submitted by:** ${usernameRow?.userName ?? ""}\n${lockedSubmission.userID}`,
"color": 10813440,
"thumbnail": {
"url": getMaxResThumbnail(videoID),
},
}],
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending reported submission Discord hook");
Logger.error(JSON.stringify((res.data)));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send reported submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
}
}
// Downvotes on locked title
if (voteType === BrandingVoteType.Downvote && currentSubmission.locked === 1) {
const usernameRow = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [currentSubmission.userID]);
const data = await getVideoDetails(videoID);
axios.post(config.discordDeArrowLockedWebhookURL, {
"embeds": [{
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${videoID}`,
"description": `Locked title ${vipAction ? "was removed by a VIP" : `with **${currentSubmission.score}** score received a downvote`}\
\n\n**Locked title:** ${currentSubmission.title}\
\n**Submitted by:** ${usernameRow?.userName ?? ""}\n${currentSubmission.userID}`,
"color": 10813440,
"thumbnail": {
"url": getMaxResThumbnail(videoID),
},
}],
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending reported submission Discord hook");
Logger.error(JSON.stringify((res.data)));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send reported submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
}
}
async function checkForWrongVideoDuration(videoID: VideoID, duration: number): Promise<boolean> {
const apiVideoDetails = await getVideoDetails(videoID, true);
const apiDuration = apiVideoDetails?.duration;
return apiDuration && apiDuration > 2 && duration && duration > 2 && Math.abs(apiDuration - duration) > 3;
}
}

View File

@@ -1,153 +0,0 @@
import { Request, Response } from "express";
import { config } from "../config";
import { db, privateDB } from "../databases/databases";
import { BrandingUUID, CasualCategory, CasualVoteSubmission } from "../types/branding.model";
import { HashedIP, IPAddress, Service, VideoID } from "../types/segments.model";
import { HashedUserID } from "../types/user.model";
import { getHashCache } from "../utils/getHashCache";
import { getIP } from "../utils/getIP";
import { getService } from "../utils/getService";
import { Logger } from "../utils/logger";
import crypto from "crypto";
import { QueryCacher } from "../utils/queryCacher";
import { acquireLock } from "../utils/redisLock";
import { checkBanStatus } from "../utils/checkBan";
import { canSubmitDeArrow } from "../utils/permissions";
import { isRequestInvalid } from "../utils/requestValidator";
import { parseUserAgent } from "../utils/userAgent";
interface ExistingVote {
UUID: BrandingUUID;
type: number;
}
export async function postCasual(req: Request, res: Response) {
const { videoID, userID, downvote } = req.body as CasualVoteSubmission;
const userAgent = req.body.userAgent ?? parseUserAgent(req.get("user-agent")) ?? "";
let categories = req.body.categories as CasualCategory[];
const title = (req.body.title as string)?.toLowerCase();
const service = getService(req.body.service);
if (downvote) {
categories = ["downvote" as CasualCategory];
} else if (!categories.every((c) => config.casualCategoryList.includes(c))) {
return res.status(400).send("Invalid category");
}
if (!videoID || !userID || userID.length < 30 || !service || !categories || !Array.isArray(categories)) {
return res.status(400).send("Bad Request");
}
if (isRequestInvalid({
userID,
videoID,
userAgent,
userAgentHeader: req.headers["user-agent"],
casualCategories: categories,
service,
endpoint: "dearrow-postCasual",
})) {
Logger.warn(`Casual vote rejected by request validator: ${userAgent} ${req.headers["user-agent"]} ${categories} ${service} ${videoID}`);
return res.status(200).send("OK");
}
try {
const hashedUserID = await getHashCache(userID);
const hashedVideoID = await getHashCache(videoID, 1);
const hashedIP = await getHashCache(getIP(req) + config.globalSalt as IPAddress);
const isBanned = await checkBanStatus(hashedUserID, hashedIP);
const permission = await canSubmitDeArrow(hashedUserID);
if (!permission.canSubmit) {
res.status(403).send(permission.reason);
return;
}
const lock = await acquireLock(`postCasual:${videoID}.${hashedUserID}`);
if (!lock.status) {
res.status(429).send("Vote already in progress");
return;
}
if (isBanned) {
return res.status(200).send("OK");
}
let titleID = 0;
if (title) {
// See if title needs to be added
const titles = await db.prepare("all", `SELECT "title", "id" from "casualVoteTitles" WHERE "videoID" = ? AND "service" = ? ORDER BY "id"`, [videoID, service]) as { title: string, id: number }[];
if (titles.length > 0) {
const existingTitle = titles.find((t) => t.title === title);
if (existingTitle) {
titleID = existingTitle.id;
} else {
titleID = titles[titles.length - 1].id + 1;
await db.prepare("run", `INSERT INTO "casualVoteTitles" ("videoID", "service", "hashedVideoID", "id", "title") VALUES (?, ?, ?, ?, ?)`, [videoID, service, hashedVideoID, titleID, title]);
}
} else {
await db.prepare("run", `INSERT INTO "casualVoteTitles" ("videoID", "service", "hashedVideoID", "id", "title") VALUES (?, ?, ?, ?, ?)`, [videoID, service, hashedVideoID, titleID, title]);
}
} else {
const titles = await db.prepare("all", `SELECT "title", "id" from "casualVoteTitles" WHERE "videoID" = ? AND "service" = ? ORDER BY "id"`, [videoID, service]) as { title: string, id: number }[];
if (titles.length > 0) {
titleID = titles[titles.length - 1].id;
}
}
const now = Date.now();
for (const category of categories) {
const existingUUID = (await db.prepare("get", `SELECT "UUID" from "casualVotes" where "videoID" = ? AND "service" = ? AND "titleID" = ? AND "category" = ?`, [videoID, service, titleID, category]))?.UUID;
const UUID = existingUUID || crypto.randomUUID();
const alreadyVotedTheSame = await handleExistingVotes(videoID, service, titleID, hashedUserID, hashedIP, category, downvote, now);
if (existingUUID) {
if (!alreadyVotedTheSame) {
await db.prepare("run", `UPDATE "casualVotes" SET "upvotes" = "upvotes" + 1 WHERE "UUID" = ?`, [UUID]);
}
} else {
await db.prepare("run", `INSERT INTO "casualVotes" ("videoID", "service", "titleID", "hashedVideoID", "timeSubmitted", "UUID", "category", "upvotes") VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
[videoID, service, titleID, hashedVideoID, now, UUID, category, 1]);
}
}
QueryCacher.clearBrandingCache({ videoID, hashedVideoID, service });
res.status(200).send("OK");
lock.unlock();
} catch (e) {
Logger.error(e as string);
res.status(500).send("Internal Server Error");
}
}
async function handleExistingVotes(videoID: VideoID, service: Service, titleID: number,
hashedUserID: HashedUserID, hashedIP: HashedIP, category: CasualCategory, downvote: boolean, now: number): Promise<boolean> {
const existingVote = await privateDB.prepare("get", `SELECT "UUID" from "casualVotes" WHERE "videoID" = ? AND "service" = ? AND "titleID" = ? AND "userID" = ? AND "category" = ?`, [videoID, service, titleID, hashedUserID, category]) as ExistingVote;
if (existingVote) {
return true;
} else {
if (downvote) {
// Remove upvotes for all categories on this video
const existingUpvotes = await privateDB.prepare("all", `SELECT "category" from "casualVotes" WHERE "category" != 'downvote' AND "videoID" = ? AND "service" = ? AND "titleID" = ? AND "userID" = ?`, [videoID, service, titleID, hashedUserID]);
for (const existingUpvote of existingUpvotes) {
await db.prepare("run", `UPDATE "casualVotes" SET "upvotes" = "upvotes" - 1 WHERE "videoID" = ? AND "service" = ? AND "titleID" = ? AND "category" = ?`, [videoID, service, titleID, existingUpvote.category]);
await privateDB.prepare("run", `DELETE FROM "casualVotes" WHERE "videoID" = ? AND "service" = ? AND "titleID" = ? AND "userID" = ? AND "category" = ?`, [videoID, service, titleID, hashedUserID, existingUpvote.category]);
}
} else {
// Undo a downvote if it exists
const existingDownvote = await privateDB.prepare("get", `SELECT "UUID" from "casualVotes" WHERE "category" = 'downvote' AND "videoID" = ? AND "service" = ? AND "titleID" = ? AND "userID" = ?`, [videoID, service, titleID, hashedUserID]) as ExistingVote;
if (existingDownvote) {
await db.prepare("run", `UPDATE "casualVotes" SET "upvotes" = "upvotes" - 1 WHERE "category" = 'downvote' AND "videoID" = ? AND "service" = ? AND "titleID" = ?`, [videoID, service, titleID]);
await privateDB.prepare("run", `DELETE FROM "casualVotes" WHERE "category" = 'downvote' AND "videoID" = ? AND "service" = ? AND "titleID" = ? AND "userID" = ?`, [videoID, service, titleID, hashedUserID]);
}
}
}
await privateDB.prepare("run", `INSERT INTO "casualVotes" ("videoID", "service", "titleID", "userID", "hashedIP", "category", "timeSubmitted") VALUES (?, ?, ?, ?, ?, ?, ?)`,
[videoID, service, titleID, hashedUserID, hashedIP, category, now]);
return false;
}

View File

@@ -23,7 +23,7 @@ export async function postClearCache(req: Request, res: Response): Promise<Respo
if (invalidFields.length !== 0) {
// invalid request
const fields = invalidFields.join(", ");
const fields = invalidFields.reduce((p, c, i) => p + (i !== 0 ? ", " : "") + c, "");
return res.status(400).send(`No valid ${fields} field(s) provided`);
}

View File

@@ -9,7 +9,7 @@ import { getIP } from "../utils/getIP";
import { getFormattedTime } from "../utils/getFormattedTime";
import { dispatchEvent } from "../utils/webhookUtils";
import { Request, Response } from "express";
import { ActionType, Category, HashedIP, IncomingSegment, IPAddress, SegmentUUID, Service, VideoDuration, VideoID } from "../types/segments.model";
import { ActionType, Category, IncomingSegment, IPAddress, SegmentUUID, Service, VideoDuration, VideoID } from "../types/segments.model";
import { deleteLockCategories } from "./deleteLockCategories";
import { QueryCacher } from "../utils/queryCacher";
import { getReputation } from "../utils/reputation";
@@ -20,12 +20,10 @@ import { parseUserAgent } from "../utils/userAgent";
import { getService } from "../utils/getService";
import axios from "axios";
import { vote } from "./voteOnSponsorTime";
import { canSubmit, canSubmitGlobal } from "../utils/permissions";
import { canSubmit } from "../utils/permissions";
import { getVideoDetails, videoDetails } from "../utils/getVideoDetails";
import * as youtubeID from "../utils/youtubeID";
import { acquireLock } from "../utils/redisLock";
import { checkBanStatus } from "../utils/checkBan";
import { isRequestInvalid } from "../utils/requestValidator";
import { banUser } from "./shadowBanUser";
type CheckResult = {
pass: boolean,
@@ -130,19 +128,14 @@ async function autoModerateSubmission(apiVideoDetails: videoDetails,
// return false on undefined or 0
if (!duration) return false;
if (apiDuration && apiDuration > 2 && duration && duration > 2 && Math.abs(apiDuration - duration) > 3) {
// YouTube server-side ad injection might be active, reject
return "YouTube is currently testing a new anti-adblock technique called server-side ad-injection. This causes skips and submissions to be offset by the duration of the ad. It seems that you are affected by this A/B test, so until a fix is developed, we cannot accept submissions from your device due to them potentially being inaccurate.";
}
const segments = submission.segments;
// map all times to float array
const allSegmentTimes = segments.filter((s) => s.actionType !== ActionType.Chapter)
.map(segment => [parseFloat(segment.segment[0]), parseFloat(segment.segment[1])]);
// add previous submissions by this user
const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? AND "videoID" = ? AND "service" = ? AND "votes" > -1 AND "actionType" != 'chapter' AND "hidden" = 0`
, [submission.userID, submission.videoID, submission.service]) as { startTime: string, endTime: string }[];
const allSubmittedByUser = await db.prepare("all", `SELECT "startTime", "endTime" FROM "sponsorTimes" WHERE "userID" = ? AND "videoID" = ? AND "votes" > -1 AND "actionType" != 'chapter' AND "hidden" = 0`
, [submission.userID, submission.videoID]) as { startTime: string, endTime: string }[];
if (allSubmittedByUser) {
//add segments the user has previously submitted
@@ -164,23 +157,28 @@ async function autoModerateSubmission(apiVideoDetails: videoDetails,
}
async function checkUserActiveWarning(userID: HashedUserID): Promise<CheckResult> {
const warning = await db.prepare("get",
const MILLISECONDS_IN_HOUR = 3600000;
const now = Date.now();
const warnings = (await db.prepare("all",
`SELECT "reason"
FROM warnings
WHERE "userID" = ? AND enabled = 1 AND type = 0
WHERE "userID" = ? AND "issueTime" > ? AND enabled = 1
ORDER BY "issueTime" DESC`,
[userID],
) as {reason: string};
[
userID,
Math.floor(now - (config.hoursAfterWarningExpires * MILLISECONDS_IN_HOUR))
],
) as {reason: string}[]).sort((a, b) => (b?.reason?.length ?? 0) - (a?.reason?.length ?? 0));
if (warning != null) {
const defaultMessage = "Submission rejected due to a tip from a moderator. This means that we noticed you were making some common mistakes"
if (warnings?.length >= config.maxNumberOfActiveWarnings) {
const defaultMessage = "Submission rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes"
+ " that are not malicious, and we just want to clarify the rules. "
+ "Could you please send a message in discord.gg/SponsorBlock or matrix.to/#/#sponsor:ajay.app so we can further help you? "
+ `Your userID is ${userID}.`;
return {
pass: false,
errorMessage: defaultMessage + (warning.reason?.length > 0 ? `\n\nTip message: '${warning.reason}'` : ""),
errorMessage: defaultMessage + (warnings[0]?.reason?.length > 0 ? `\n\nWarning reason: '${warnings[0].reason}'` : ""),
errorCode: 403
};
}
@@ -195,17 +193,12 @@ async function checkInvalidFields(videoID: VideoID, userID: UserID, hashedUserID
if (typeof videoID !== "string" || videoID?.length == 0) {
invalidFields.push("videoID");
}
if (service === Service.YouTube) {
if (config.mode !== "test") {
const sanitizedVideoID = youtubeID.validate(videoID) ? videoID : youtubeID.sanitize(videoID);
if (!youtubeID.validate(sanitizedVideoID)) {
invalidFields.push("videoID");
errors.push("YouTube videoID could not be extracted");
}
if (service === Service.YouTube && config.mode !== "test") {
const sanitizedVideoID = youtubeID.validate(videoID) ? videoID : youtubeID.sanitize(videoID);
if (!youtubeID.validate(sanitizedVideoID)) {
invalidFields.push("videoID");
errors.push("YouTube videoID could not be extracted");
}
} else if (service !== Service.Spotify) {
invalidFields.push("service");
errors.push("Service is not supported");
}
const minLength = config.minUserIDLength;
if (typeof userID !== "string" || userID?.length < minLength) {
@@ -243,11 +236,11 @@ async function checkInvalidFields(videoID: VideoID, userID: UserID, hashedUserID
if (invalidFields.length !== 0) {
// invalid request
const formattedFields = invalidFields.join(", ");
const formattedErrors = errors.join(". ");
const formattedFields = invalidFields.reduce((p, c, i) => p + (i !== 0 ? ", " : "") + c, "");
const formattedErrors = errors.reduce((p, c, i) => p + (i !== 0 ? ". " : " ") + c, "");
return {
pass: false,
errorMessage: `No valid ${formattedFields}. ${formattedErrors}`,
errorMessage: `No valid ${formattedFields}.${formattedErrors}`,
errorCode: 400
};
}
@@ -285,7 +278,7 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user
errorMessage:
`Users have voted that all the segments required for this video have already been submitted for the following category: ` +
`'${segments[i].category}'\n` +
`${lockedCategoryList[lockIndex].reason?.length !== 0 ? `\nReason: '${lockedCategoryList[lockIndex].reason}'\n` : ""}` +
`${lockedCategoryList[lockIndex].reason?.length !== 0 ? `\nReason: '${lockedCategoryList[lockIndex].reason}\n'` : ""}` +
`You may need to refresh if you don't see the segments.\n` +
`${(segments[i].category === "sponsor" ? "\nMaybe the segment you are submitting is a different category that you have not enabled and is not a sponsor. " +
"Categories that aren't sponsor, such as self-promotion can be enabled in the options.\n" : "")}` +
@@ -321,7 +314,7 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user
}
if (!(isVIP || isTempVIP) && segments[i].category === "sponsor"
&& segments[i].actionType === ActionType.Skip && (endTime - startTime) < 1) {
&& segments[i].actionType !== ActionType.Full && (endTime - startTime) < 1) {
// Too short
return { pass: false, errorMessage: "Segments must be longer than 1 second long", errorCode: 400 };
}
@@ -330,26 +323,23 @@ async function checkEachSegmentValid(rawIP: IPAddress, paramUserID: UserID, user
const duplicateCheck2Row = await db.prepare("get", `SELECT "UUID" FROM "sponsorTimes" WHERE "startTime" = ?
and "endTime" = ? and "category" = ? and "actionType" = ? and "description" = ? and "videoID" = ? and "service" = ?`, [startTime, endTime, segments[i].category, segments[i].actionType, segments[i].description, videoID, service]);
if (duplicateCheck2Row) {
segments[i].ignoreSegment = true;
if (segments[i].actionType === ActionType.Full) {
// Forward as vote
await vote(rawIP, duplicateCheck2Row.UUID, paramUserID, 1);
segments[i].ignoreSegment = true;
continue;
} else {
return { pass: false, errorMessage: "Segment has already been submitted before.", errorCode: 409 };
}
}
}
if (segments.every((s) => s.ignoreSegment && s.actionType !== ActionType.Full)) {
return { pass: false, errorMessage: "Segment has already been submitted before.", errorCode: 409 };
}
return CHECK_PASS;
}
async function checkByAutoModerator(videoID: VideoID, userID: HashedUserID, segments: IncomingSegment[], service: Service, apiVideoDetails: videoDetails, videoDuration: number): Promise<CheckResult> {
// Auto moderator check
if (service == Service.YouTube && apiVideoDetails) {
if (service == Service.YouTube) {
const autoModerateResult = await autoModerateSubmission(apiVideoDetails, { videoID, userID, segments, service, videoDuration });
if (autoModerateResult) {
return {
@@ -365,15 +355,6 @@ async function checkByAutoModerator(videoID: VideoID, userID: HashedUserID, segm
async function updateDataIfVideoDurationChange(videoID: VideoID, service: Service, videoDuration: VideoDuration, videoDurationParam: VideoDuration) {
let lockedCategoryList = await db.prepare("all", 'SELECT category, "actionType", reason from "lockCategories" where "videoID" = ? AND "service" = ?', [videoID, service]);
if (service === Service.Spotify) {
// Don't handle changed durations
return {
videoDuration,
apiVideoDetails: null,
lockedCategoryList
};
}
const previousSubmissions = await db.prepare("all",
`SELECT "videoDuration", "UUID"
FROM "sponsorTimes"
@@ -403,12 +384,9 @@ async function updateDataIfVideoDurationChange(videoID: VideoID, service: Servic
// Only treat as difference if both the api duration and submitted duration have changed
if (videoDurationChanged(videoDuration) && (!videoDurationParam || videoDurationChanged(videoDurationParam))) {
// Hide all previous submissions
await db.prepare("run", `UPDATE "sponsorTimes" SET "hidden" = 1
WHERE "videoID" = ? AND "service" = ? AND "videoDuration" != ?
AND "hidden" = 0 AND "shadowHidden" = 0 AND
"actionType" != 'full' AND "votes" > -2`,
[videoID, service, videoDuration]);
for (const submission of previousSubmissions) {
await db.prepare("run", `UPDATE "sponsorTimes" SET "hidden" = 1 WHERE "UUID" = ?`, [submission.UUID]);
}
lockedCategoryList = [];
deleteLockCategories(videoID, null, null, service).catch((e) => Logger.error(`deleting lock categories: ${e}`));
}
@@ -519,22 +497,6 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
}
const userID: HashedUserID = await getHashCache(paramUserID);
const matchedRule = isRequestInvalid({
userAgent,
userAgentHeader: req.headers["user-agent"],
videoDuration,
videoID,
userID: paramUserID,
service,
segments,
endpoint: "sponsorblock-postSkipSegments"
});
if (matchedRule !== null) {
sendNewUserWebhook(config.discordRejectedNewUserWebhookURL, userID, videoID, userAgent, req, videoDurationParam, matchedRule);
Logger.warn(`Sponsorblock submission rejected by request validator: ${userID} ${videoID} ${videoDurationParam} ${userAgent} ${req.headers["user-agent"]}`);
return res.status(200).send("OK");
}
const invalidCheckResult = await checkInvalidFields(videoID, paramUserID, userID, segments, videoDurationParam, userAgent, service);
if (!invalidCheckResult.pass) {
return res.status(invalidCheckResult.errorCode).send(invalidCheckResult.errorMessage);
@@ -546,52 +508,36 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
return res.status(userWarningCheckResult.errorCode).send(userWarningCheckResult.errorMessage);
}
const lock = await acquireLock(`postSkipSegment:${videoID}.${userID}`);
if (!lock.status) {
res.status(429).send("Submission already in progress");
return;
const isVIP = (await isUserVIP(userID));
const isTempVIP = (await isUserTempVIP(userID, videoID));
const rawIP = getIP(req);
const newData = await updateDataIfVideoDurationChange(videoID, service, videoDuration, videoDurationParam);
videoDuration = newData.videoDuration;
const { lockedCategoryList, apiVideoDetails } = newData;
// Check if all submissions are correct
const segmentCheckResult = await checkEachSegmentValid(rawIP, paramUserID, userID, videoID, segments, service, isVIP, isTempVIP, lockedCategoryList);
if (!segmentCheckResult.pass) {
return res.status(segmentCheckResult.errorCode).send(segmentCheckResult.errorMessage);
}
if (!(isVIP || isTempVIP)) {
const autoModerateCheckResult = await checkByAutoModerator(videoID, userID, segments, service, apiVideoDetails, videoDurationParam);
if (!autoModerateCheckResult.pass) {
return res.status(autoModerateCheckResult.errorCode).send(autoModerateCheckResult.errorMessage);
}
}
// Will be filled when submitting
const UUIDs = [];
const newSegments = [];
//hash the ip 5000 times so no one can get it from the database
const hashedIP = await getHashCache(rawIP + config.globalSalt);
try {
const isVIP = (await isUserVIP(userID));
const isTempVIP = (await isUserTempVIP(userID, videoID));
const rawIP = getIP(req);
const newData = await updateDataIfVideoDurationChange(videoID, service, videoDuration, videoDurationParam);
videoDuration = newData.videoDuration;
const { lockedCategoryList, apiVideoDetails } = newData;
// Check if all submissions are correct
const segmentCheckResult = await checkEachSegmentValid(rawIP, paramUserID, userID, videoID, segments, service, isVIP, isTempVIP, lockedCategoryList);
if (!segmentCheckResult.pass) {
lock.unlock();
return res.status(segmentCheckResult.errorCode).send(segmentCheckResult.errorMessage);
}
if (!(isVIP || isTempVIP)) {
const autoModerateCheckResult = await checkByAutoModerator(videoID, userID, segments, service, apiVideoDetails, videoDurationParam);
if (!autoModerateCheckResult.pass) {
return res.status(autoModerateCheckResult.errorCode).send(autoModerateCheckResult.errorMessage);
}
}
const permission = await canSubmitGlobal(userID);
if (!permission.canSubmit) {
lock.unlock();
Logger.warn(`New user trying to submit: ${userID} ${videoID} ${Object.keys(segments?.[0] ?? {})} ${Object.keys(req.query)} ${videoDurationParam} ${userAgent} ${req.headers["user-agent"]}`);
return res.status(403).send(permission.reason);
} else if (permission.newUser) {
sendNewUserWebhook(config.discordNewUserWebhookURL, userID, videoID, userAgent, req, videoDurationParam, undefined);
}
// Will be filled when submitting
const UUIDs = [];
const newSegments = [];
//hash the ip 5000 times so no one can get it from the database
const hashedIP = await getHashCache(rawIP + config.globalSalt) as HashedIP;
//get current time
const timeSubmitted = Date.now();
// const rateLimitCheckResult = checkRateLimit(userID, videoID, service, timeSubmitted, hashedIP);
@@ -600,14 +546,22 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
// }
//check to see if this user is shadowbanned
const isBanned = await checkBanStatus(userID, hashedIP);
const userBanCount = (await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID]))?.userCount;
const ipBanCount = (await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedIPs" WHERE "hashedIP" = ? LIMIT 1`, [hashedIP]))?.userCount;
const shadowBanCount = userBanCount || ipBanCount;
const startingVotes = 0;
const reputation = await getReputation(userID);
if (!userBanCount && ipBanCount) {
// Make sure the whole user is banned
banUser(userID, true, true, 1, config.categoryList as Category[])
.catch((e) => Logger.error(`Error banning user after submitting from a banned IP: ${e}`));
}
for (const segmentInfo of segments) {
// Full segments are always rejected since there can only be one, so shadow hide wouldn't work
if (segmentInfo.ignoreSegment
|| (isBanned && segmentInfo.actionType === ActionType.Full)) {
|| (shadowBanCount && segmentInfo.actionType === ActionType.Full)) {
continue;
}
@@ -624,19 +578,17 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
("videoID", "startTime", "endTime", "votes", "locked", "UUID", "userID", "timeSubmitted", "views", "category", "actionType", "service", "videoDuration", "reputation", "shadowHidden", "hashedVideoID", "userAgent", "description")
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`, [
videoID, segmentInfo.segment[0], segmentInfo.segment[1], startingVotes, startingLocked, UUID, userID, timeSubmitted, 0
, segmentInfo.category, segmentInfo.actionType, service, videoDuration, reputation, isBanned ? 1 : 0, hashedVideoID, userAgent, segmentInfo.description
, segmentInfo.category, segmentInfo.actionType, service, videoDuration, reputation, shadowBanCount, hashedVideoID, userAgent, segmentInfo.description
],
);
//add to private db as well
await privateDB.prepare("run", `INSERT INTO "sponsorTimes" VALUES(?, ?, ?, ?)`, [videoID, hashedIP, timeSubmitted, service]);
if (service === Service.YouTube) {
await db.prepare("run", `INSERT INTO "videoInfo" ("videoID", "channelID", "title", "published")
SELECT ?, ?, ?, ?
WHERE NOT EXISTS (SELECT 1 FROM "videoInfo" WHERE "videoID" = ?)`, [
videoID, apiVideoDetails?.authorId || "", apiVideoDetails?.title || "", apiVideoDetails?.published || 0, videoID]);
}
await db.prepare("run", `INSERT INTO "videoInfo" ("videoID", "channelID", "title", "published")
SELECT ?, ?, ?, ?
WHERE NOT EXISTS (SELECT 1 FROM "videoInfo" WHERE "videoID" = ?)`, [
videoID, apiVideoDetails?.authorId || "", apiVideoDetails?.title || "", apiVideoDetails?.published || 0, videoID]);
// Clear redis cache for this video
QueryCacher.clearSegmentCache({
@@ -648,7 +600,6 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
} catch (err) {
//a DB change probably occurred
Logger.error(`Error when putting sponsorTime in the DB: ${videoID}, ${segmentInfo.segment[0]}, ${segmentInfo.segment[1]}, ${userID}, ${segmentInfo.category}. ${err}`);
lock.unlock();
return res.sendStatus(500);
}
@@ -659,52 +610,15 @@ export async function postSkipSegments(req: Request, res: Response): Promise<Res
segment: segmentInfo.segment,
});
}
for (let i = 0; i < segments.length; i++) {
sendWebhooks(apiVideoDetails, userID, videoID, UUIDs[i], segments[i], service).catch((e) => Logger.error(`call send webhooks ${e}`));
}
return res.json(newSegments);
} catch (err) {
Logger.error(err as string);
return res.sendStatus(500);
} finally {
lock.unlock();
}
}
function sendNewUserWebhook(webhookUrl: string, userID: HashedUserID, videoID: any, userAgent: any, req: Request, videoDurationParam: VideoDuration, ruleName: string | undefined) {
if (!webhookUrl) return;
axios.post(webhookUrl, {
"embeds": [{
"title": userID,
"url": `https://www.youtube.com/watch?v=${videoID}`,
"description": `**User Agent**: ${userAgent}\
\n**Sent User Agent**: ${req.query.userAgent ?? req.body.userAgent}\
\n**Real User Agent**: ${req.headers["user-agent"]}\
\n**Video Duration**: ${videoDurationParam}`,
"color": 10813440,
"thumbnail": {
"url": getMaxResThumbnail(videoID),
},
"footer": {
"text": ruleName === undefined ? "Caught by permission check" : `Caught by rule '${ruleName}'`,
},
}],
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending reported submission Discord hook");
Logger.error(JSON.stringify((res.data)));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send reported submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
for (let i = 0; i < segments.length; i++) {
sendWebhooks(apiVideoDetails, userID, videoID, UUIDs[i], segments[i], service).catch((e) => Logger.error(`call send webhooks ${e}`));
}
return res.json(newSegments);
}
// Takes an array of arrays:

View File

@@ -4,8 +4,8 @@ import { db } from "../databases/databases";
import { isUserVIP } from "../utils/isUserVIP";
import { getHashCache } from "../utils/getHashCache";
import { HashedUserID, UserID } from "../types/user.model";
import { config } from "../config";
import { generateWarningDiscord, warningData, dispatchEvent } from "../utils/webhookUtils";
import { WarningType } from "../types/warning.model";
type warningEntry = {
userID: HashedUserID,
@@ -15,7 +15,12 @@ type warningEntry = {
reason: string
}
const MAX_EDIT_DELAY = 900000; // 15 mins
function checkExpiredWarning(warning: warningEntry): boolean {
const MILLISECONDS_IN_HOUR = 3600000;
const now = Date.now();
const expiry = Math.floor(now - (config.hoursAfterWarningExpires * MILLISECONDS_IN_HOUR));
return warning.issueTime > expiry && !warning.enabled;
}
const getUsername = (userID: HashedUserID) => db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ?`, [userID], { useReplica: true });
@@ -27,7 +32,6 @@ export async function postWarning(req: Request, res: Response): Promise<Response
const issueTime = new Date().getTime();
const enabled: boolean = req.body.enabled ?? true;
const reason: string = req.body.reason ?? "";
const type: WarningType = req.body.type ?? WarningType.SponsorBlock;
if ((!issuerUserID && enabled) || (issuerUserID && !await isUserVIP(issuerUserID))) {
Logger.warn(`Permission violation: User ${issuerUserID} attempted to warn user ${userID}.`);
@@ -36,61 +40,53 @@ export async function postWarning(req: Request, res: Response): Promise<Response
let resultStatus = "";
try {
if (enabled) {
if (!reason) {
return res.status(400).json({ "message": "Missing warning reason" });
}
const previousWarning = await db.prepare("get", 'SELECT * FROM "warnings" WHERE "userID" = ? AND "type" = ? AND "enabled" = 1', [userID, type]) as warningEntry;
if (enabled) {
const previousWarning = await db.prepare("get", 'SELECT * FROM "warnings" WHERE "userID" = ? AND "issuerUserID" = ?', [userID, issuerUserID]) as warningEntry;
if (!previousWarning) {
await db.prepare(
"run",
'INSERT INTO "warnings" ("userID", "issueTime", "issuerUserID", "enabled", "reason", "type") VALUES (?, ?, ?, 1, ?, ?)',
[userID, issueTime, issuerUserID, reason, type]
);
resultStatus = "issued to";
// allow a warning to be edited by the same vip within 15 mins of issuing
} else if (issuerUserID === previousWarning.issuerUserID && (Date.now() - MAX_EDIT_DELAY) < previousWarning.issueTime) {
await db.prepare(
"run", 'UPDATE "warnings" SET "reason" = ? WHERE "userID" = ? AND "issueTime" = ?',
[reason, userID, previousWarning.issueTime]
);
resultStatus = "edited for";
} else {
return res.sendStatus(409);
}
if (!previousWarning) {
await db.prepare(
"run",
'INSERT INTO "warnings" ("userID", "issueTime", "issuerUserID", "enabled", "reason") VALUES (?, ?, ?, 1, ?)',
[userID, issueTime, issuerUserID, reason]
);
resultStatus = "issued to";
// check if warning is still within issue time and warning is not enabled
} else if (checkExpiredWarning(previousWarning) ) {
await db.prepare(
"run", 'UPDATE "warnings" SET "enabled" = 1, "reason" = ? WHERE "userID" = ? AND "issueTime" = ?',
[reason, userID, previousWarning.issueTime]
);
resultStatus = "re-enabled";
} else {
await db.prepare("run", 'UPDATE "warnings" SET "enabled" = 0, "disableTime" = ? WHERE "userID" = ? AND "type" = ? AND "enabled" = 1', [issueTime, userID, type]);
resultStatus = "removed from";
return res.sendStatus(409);
}
const targetUsername = await getUsername(userID) ?? null;
const issuerUsername = await getUsername(issuerUserID) ?? null;
const webhookData = {
target: {
userID,
username: targetUsername
},
issuer: {
userID: issuerUserID,
username: issuerUsername
},
reason
} as warningData;
try {
const warning = generateWarningDiscord(webhookData);
dispatchEvent("warning", warning);
} catch /* istanbul ignore next */ (err) {
Logger.error(`Error sending warning to Discord ${err}`);
}
return res.status(200).json({
message: `Tip ${resultStatus} user '${userID}'.`,
});
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
} else {
await db.prepare("run", 'UPDATE "warnings" SET "enabled" = 0 WHERE "userID" = ?', [userID]);
resultStatus = "removed from";
}
const targetUsername = await getUsername(userID) ?? null;
const issuerUsername = await getUsername(issuerUserID) ?? null;
const webhookData = {
target: {
userID,
username: targetUsername
},
issuer: {
userID: issuerUserID,
username: issuerUsername
},
reason
} as warningData;
try {
const warning = generateWarningDiscord(webhookData);
dispatchEvent("warning", warning);
} catch /* istanbul ignore next */ (err) {
Logger.error(`Error sending warning to Discord ${err}`);
}
return res.status(200).json({
message: `Warning ${resultStatus} user '${userID}'.`,
});
}

View File

@@ -1,48 +0,0 @@
import { getHashCache } from "../utils/getHashCache";
import { db } from "../databases/databases";
import { Request, Response } from "express";
import { isUserVIP } from "../utils/isUserVIP";
import { UserID } from "../types/user.model";
import { Logger } from "../utils/logger";
interface SetConfigRequest extends Request {
body: {
userID: UserID;
key: string;
value: string;
}
}
const allowedConfigs = [
"old-submitter-block-date",
"max-users-per-minute",
"max-users-per-minute-dearrow"
];
export async function setConfig(req: SetConfigRequest, res: Response): Promise<Response> {
const { body: { userID, key, value } } = req;
if (!userID || !allowedConfigs.includes(key)) {
// invalid request
return res.sendStatus(400);
}
// hash the userID
const hashedUserID = await getHashCache(userID as UserID);
const isVIP = (await isUserVIP(hashedUserID));
if (!isVIP) {
// not authorized
return res.sendStatus(403);
}
try {
await db.prepare("run", `INSERT INTO "config" ("key", "value") VALUES(?, ?) ON CONFLICT ("key") DO UPDATE SET "value" = ?`, [key, value, value]);
return res.sendStatus(200);
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
}
}

View File

@@ -3,11 +3,8 @@ import { Logger } from "../utils/logger";
import { db, privateDB } from "../databases/databases";
import { getHashCache } from "../utils/getHashCache";
import { Request, Response } from "express";
import { isUserBanned } from "../utils/checkBan";
import { HashedUserID } from "../types/user.model";
import { isRequestInvalid } from "../utils/requestValidator";
function logUserNameChange(userID: string, newUserName: string, oldUserName: string, updatedByAdmin: boolean): Promise<void> {
function logUserNameChange(userID: string, newUserName: string, oldUserName: string, updatedByAdmin: boolean): Promise<Response> {
return privateDB.prepare("run",
`INSERT INTO "userNameLogs"("userID", "newUserName", "oldUserName", "updatedByAdmin", "updatedAt") VALUES(?, ?, ?, ?, ?)`,
[userID, newUserName, oldUserName, + updatedByAdmin, new Date().getTime()]
@@ -15,12 +12,12 @@ function logUserNameChange(userID: string, newUserName: string, oldUserName: str
}
export async function setUsername(req: Request, res: Response): Promise<Response> {
const userIDInput = req.query.userID as string;
const adminUserIDInput = req.query.adminUserID as string | undefined;
let userID = req.query.userID as string;
let userName = req.query.username as string;
let hashedUserID: HashedUserID;
if (userIDInput == undefined || userName == undefined || userIDInput === "undefined" || userName.length > 64) {
let adminUserIDInput = req.query.adminUserID as string;
if (userID == undefined || userName == undefined || userID === "undefined" || userName.length > 64) {
//invalid request
return res.sendStatus(400);
}
@@ -35,41 +32,33 @@ export async function setUsername(req: Request, res: Response): Promise<Response
// eslint-disable-next-line no-control-regex
userName = userName.replace(/[\u0000-\u001F\u007F-\u009F]/g, "");
if (isRequestInvalid({
userAgentHeader: req.headers["user-agent"],
userID: adminUserIDInput ?? userIDInput,
newUsername: userName,
endpoint: "setUsername",
})) {
Logger.warn(`Username change rejected by request validator: ${userName} ${req.headers["user-agent"]}`);
return res.sendStatus(200);
// check privateID against publicID
if (!await checkPrivateUsername(userName, userID)) {
return res.sendStatus(400);
}
if (adminUserIDInput != undefined) {
//this is the admin controlling the other users account, don't hash the controling account's ID
adminUserIDInput = await getHashCache(adminUserIDInput);
if (adminUserIDInput != config.adminUserID) {
//they aren't the admin
return res.sendStatus(403);
}
} else {
//hash the userID
userID = await getHashCache(userID);
}
try {
if (adminUserIDInput != undefined) {
//this is the admin controlling the other users account, don't hash the controling account's ID
hashedUserID = userIDInput as HashedUserID;
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "userNames" WHERE "userID" = ? AND "locked" = 1`, [userID]);
if (adminUserIDInput === undefined && row.userCount > 0) {
return res.sendStatus(200);
}
if (await getHashCache(adminUserIDInput) != config.adminUserID) {
//they aren't the admin
return res.sendStatus(403);
}
} else {
// check privateID against publicID
if (!await checkPrivateUsername(userName, userIDInput)) {
return res.sendStatus(400);
}
//hash the userID
hashedUserID = await getHashCache(userIDInput) as HashedUserID;
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "userNames" WHERE "userID" = ? AND "locked" = 1`, [hashedUserID]);
if (row.userCount > 0) {
return res.sendStatus(200);
}
if (await isUserBanned(hashedUserID)) {
return res.sendStatus(200);
}
const shadowBanRow = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID]);
if (adminUserIDInput === undefined && shadowBanRow.userCount > 0) {
return res.sendStatus(200);
}
}
catch (error) /* istanbul ignore next */ {
@@ -79,26 +68,24 @@ export async function setUsername(req: Request, res: Response): Promise<Response
try {
//check if username is already set
const row = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ? LIMIT 1`, [hashedUserID]);
const row = await db.prepare("get", `SELECT "userName" FROM "userNames" WHERE "userID" = ? LIMIT 1`, [userID]);
const locked = adminUserIDInput === undefined ? 0 : 1;
let oldUserName = "";
if (row?.userName !== undefined) {
//already exists, update this row
oldUserName = row.userName;
if (userName == hashedUserID && !locked) {
await db.prepare("run", `DELETE FROM "userNames" WHERE "userID" = ?`, [hashedUserID]);
if (userName == userID && !locked) {
await db.prepare("run", `DELETE FROM "userNames" WHERE "userID" = ?`, [userID]);
} else {
await db.prepare("run", `UPDATE "userNames" SET "userName" = ?, "locked" = ? WHERE "userID" = ?`, [userName, locked, hashedUserID]);
await db.prepare("run", `UPDATE "userNames" SET "userName" = ?, "locked" = ? WHERE "userID" = ?`, [userName, locked, userID]);
}
} else if (userName === hashedUserID) {
return res.sendStatus(200);
} else {
//add to the db
await db.prepare("run", `INSERT INTO "userNames"("userID", "userName", "locked") VALUES(?, ?, ?)`, [hashedUserID, userName, locked]);
await db.prepare("run", `INSERT INTO "userNames"("userID", "userName", "locked") VALUES(?, ?, ?)`, [userID, userName, locked]);
}
await logUserNameChange(hashedUserID, userName, oldUserName, adminUserIDInput !== undefined);
await logUserNameChange(userID, userName, oldUserName, adminUserIDInput !== undefined);
return res.sendStatus(200);
} catch (err) /* istanbul ignore next */ {
@@ -114,4 +101,4 @@ async function checkPrivateUsername(username: string, userID: string): Promise<b
const userNameRow = await db.prepare("get", `SELECT "userID" FROM "userNames" WHERE "userID" = ? LIMIT 1`, [userNameHash]);
if (userNameRow?.userID) return false;
return true;
}
}

View File

@@ -1,16 +1,16 @@
import { db } from "../databases/databases";
import { db, privateDB } from "../databases/databases";
import { getHashCache } from "../utils/getHashCache";
import { Request, Response } from "express";
import { config } from "../config";
import { Category, DeArrowType, Service, VideoID, VideoIDHash } from "../types/segments.model";
import { Category, HashedIP, Service, VideoID, VideoIDHash } from "../types/segments.model";
import { UserID } from "../types/user.model";
import { QueryCacher } from "../utils/queryCacher";
import { isUserVIP } from "../utils/isUserVIP";
import { parseCategories, parseDeArrowTypes } from "../utils/parseParams";
import { Logger } from "../utils/logger";
import { parseCategories } from "../utils/parseParams";
export async function shadowBanUser(req: Request, res: Response): Promise<Response> {
const userID = req.query.userID as UserID;
const hashedIP = req.query.hashedIP as HashedIP;
const adminUserIDInput = req.query.adminUserID as UserID;
const type = Number.parseInt(req.query.type as string ?? "1");
if (isNaN(type)) {
@@ -20,41 +20,60 @@ export async function shadowBanUser(req: Request, res: Response): Promise<Respon
const enabled = req.query.enabled === undefined
? true
: req.query.enabled === "true";
const lookForIPs = req.query.lookForIPs === "true";
const banUsers = req.query.banUsers === undefined
? true
: req.query.banUsers === "true";
//if enabled is false and the old submissions should be made visible again
const unHideOldSubmissions = req.query.unHideOldSubmissions !== "false";
const categories: Category[] = parseCategories(req, config.categoryList as Category[]);
const deArrowTypes: DeArrowType[] = parseDeArrowTypes(req, config.deArrowTypes);
if (adminUserIDInput == undefined || (userID == undefined || type <= 0)) {
if (adminUserIDInput == undefined || (userID == undefined && hashedIP == undefined || type <= 0)) {
//invalid request
return res.sendStatus(400);
}
try {
//hash the userID
const adminUserID = await getHashCache(adminUserIDInput);
//hash the userID
const adminUserID = await getHashCache(adminUserIDInput);
const isVIP = await isUserVIP(adminUserID);
if (!isVIP) {
//not authorized
return res.sendStatus(403);
const isVIP = await isUserVIP(adminUserID);
if (!isVIP) {
//not authorized
return res.sendStatus(403);
}
if (userID) {
const result = await banUser(userID, enabled, unHideOldSubmissions, type, categories);
if (enabled && lookForIPs) {
const ipLoggingFixedTime = 1675295716000;
const timeSubmitted = (await db.prepare("all", `SELECT "timeSubmitted" FROM "sponsorTimes" WHERE "timeSubmitted" > ? AND "userID" = ?`, [ipLoggingFixedTime, userID])) as { timeSubmitted: number }[];
const ips = (await Promise.all(timeSubmitted.map((s) => {
return privateDB.prepare("all", `SELECT "hashedIP" FROM "sponsorTimes" WHERE "timeSubmitted" = ?`, [s.timeSubmitted]) as Promise<{ hashedIP: HashedIP }[]>;
}))).flat();
await Promise.all([...new Set(ips.map((ip) => ip.hashedIP))].map((ip) => {
return banIP(ip, enabled, unHideOldSubmissions, type, categories, true);
}));
}
const result = await banUser(userID, enabled, unHideOldSubmissions, type, categories, deArrowTypes);
if (result) {
res.sendStatus(result);
return;
}
} else if (hashedIP) {
const result = await banIP(hashedIP, enabled, unHideOldSubmissions, type, categories, banUsers);
if (result) {
res.sendStatus(result);
return;
}
return res.sendStatus(200);
} catch (e) {
Logger.error(e as string);
return res.sendStatus(500);
}
return res.sendStatus(200);
}
export async function banUser(userID: UserID, enabled: boolean, unHideOldSubmissions: boolean,
type: number, categories: Category[], deArrowTypes: DeArrowType[]): Promise<number> {
export async function banUser(userID: UserID, enabled: boolean, unHideOldSubmissions: boolean, type: number, categories: Category[]): Promise<number> {
//check to see if this user is already shadowbanned
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedUsers" WHERE "userID" = ?`, [userID]);
@@ -66,12 +85,12 @@ export async function banUser(userID: UserID, enabled: boolean, unHideOldSubmiss
//find all previous submissions and hide them
if (unHideOldSubmissions) {
await unHideSubmissionsByUser(categories, deArrowTypes, userID, type);
await unHideSubmissionsByUser(categories, userID, type);
}
} else if (enabled && row.userCount > 0) {
// apply unHideOldSubmissions if applicable
if (unHideOldSubmissions) {
await unHideSubmissionsByUser(categories, deArrowTypes, userID, type);
await unHideSubmissionsByUser(categories, userID, type);
} else {
// otherwise ban already exists, send 409
return 409;
@@ -79,7 +98,7 @@ export async function banUser(userID: UserID, enabled: boolean, unHideOldSubmiss
} else if (!enabled && row.userCount > 0) {
//find all previous submissions and unhide them
if (unHideOldSubmissions) {
await unHideSubmissionsByUser(categories, deArrowTypes, userID, 0);
await unHideSubmissionsByUser(categories, userID, 0);
}
//remove them from the shadow ban list
@@ -88,40 +107,75 @@ export async function banUser(userID: UserID, enabled: boolean, unHideOldSubmiss
// already not shadowbanned
return 400;
}
return 200;
}
async function unHideSubmissionsByUser(categories: string[], deArrowTypes: DeArrowType[],
userID: UserID, type = 1) {
export async function banIP(hashedIP: HashedIP, enabled: boolean, unHideOldSubmissions: boolean, type: number, categories: Category[], banUsers: boolean): Promise<number> {
//check to see if this user is already shadowbanned
const row = await db.prepare("get", `SELECT count(*) as "userCount" FROM "shadowBannedIPs" WHERE "hashedIP" = ?`, [hashedIP]);
if (categories.length) {
await db.prepare("run", `UPDATE "sponsorTimes" SET "shadowHidden" = '${type}' WHERE "userID" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})
AND NOT EXISTS ( SELECT "videoID", "category" FROM "lockCategories" WHERE
"sponsorTimes"."videoID" = "lockCategories"."videoID" AND "sponsorTimes"."service" = "lockCategories"."service" AND "sponsorTimes"."category" = "lockCategories"."category")`, [userID]);
if (enabled) {
if (row.userCount == 0) {
await db.prepare("run", `INSERT INTO "shadowBannedIPs" VALUES(?)`, [hashedIP]);
}
//find all previous submissions and hide them
if (unHideOldSubmissions) {
const users = await unHideSubmissionsByIP(categories, hashedIP, type);
if (banUsers) {
await Promise.all([...users].map((user) => {
return banUser(user, enabled, unHideOldSubmissions, type, categories);
}));
}
} else if (row.userCount > 0) {
// Nothing to do, and already added
return 409;
}
} else if (!enabled) {
if (row.userCount > 0) {
//remove them from the shadow ban list
await db.prepare("run", `DELETE FROM "shadowBannedIPs" WHERE "hashedIP" = ?`, [hashedIP]);
}
//find all previous submissions and unhide them
if (unHideOldSubmissions) {
await unHideSubmissionsByIP(categories, hashedIP, 0);
}
}
return 200;
}
async function unHideSubmissionsByUser(categories: string[], userID: UserID, type = 1) {
await db.prepare("run", `UPDATE "sponsorTimes" SET "shadowHidden" = '${type}' WHERE "userID" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})
AND NOT EXISTS ( SELECT "videoID", "category" FROM "lockCategories" WHERE
"sponsorTimes"."videoID" = "lockCategories"."videoID" AND "sponsorTimes"."service" = "lockCategories"."service" AND "sponsorTimes"."category" = "lockCategories"."category")`, [userID]);
// clear cache for all old videos
(await db.prepare("all", `SELECT "category", "videoID", "hashedVideoID", "service", "userID" FROM "sponsorTimes" WHERE "userID" = ?`, [userID]))
(await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service", "votes", "views" FROM "sponsorTimes" WHERE "userID" = ?`, [userID]))
.forEach((videoInfo: { category: Category; videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID: UserID; }) => {
QueryCacher.clearSegmentCache(videoInfo);
});
}
if (deArrowTypes.includes("title")) {
await db.prepare("run", `UPDATE "titleVotes" as tv SET "shadowHidden" = ${type} FROM "titles" t WHERE tv."UUID" = t."UUID" AND t."userID" = ?`,
[userID]);
}
async function unHideSubmissionsByIP(categories: string[], hashedIP: HashedIP, type = 1): Promise<Set<UserID>> {
const submissions = await privateDB.prepare("all", `SELECT "timeSubmitted" FROM "sponsorTimes" WHERE "hashedIP" = ?`, [hashedIP]) as { timeSubmitted: number }[];
if (deArrowTypes.includes("thumbnail")) {
await db.prepare("run", `UPDATE "thumbnailVotes" as tv SET "shadowHidden" = ${type} FROM "thumbnails" t WHERE tv."UUID" = t."UUID" AND t."userID" = ?`,
[userID]);
}
const users: Set<UserID> = new Set();
await Promise.all(submissions.map(async (submission) => {
(await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service", "votes", "views", "userID" FROM "sponsorTimes" WHERE "timeSubmitted" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})`, [submission.timeSubmitted]))
.forEach((videoInfo: { category: Category, videoID: VideoID, hashedVideoID: VideoIDHash, service: Service, userID: UserID }) => {
QueryCacher.clearSegmentCache(videoInfo);
users.add(videoInfo.userID);
}
);
(await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service" FROM "titles" WHERE "userID" = ?`, [userID]))
.forEach((videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; }) => {
QueryCacher.clearBrandingCache(videoInfo);
});
(await db.prepare("all", `SELECT "videoID", "hashedVideoID", "service" FROM "thumbnails" WHERE "userID" = ?`, [userID]))
.forEach((videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; }) => {
QueryCacher.clearBrandingCache(videoInfo);
});
}
await db.prepare("run", `UPDATE "sponsorTimes" SET "shadowHidden" = ${type} WHERE "timeSubmitted" = ? AND "category" in (${categories.map((c) => `'${c}'`).join(",")})
AND NOT EXISTS ( SELECT "videoID", "category" FROM "lockCategories" WHERE
"sponsorTimes"."videoID" = "lockCategories"."videoID" AND "sponsorTimes"."service" = "lockCategories"."service" AND "sponsorTimes"."category" = "lockCategories"."category")`, [submission.timeSubmitted]);
}));
return users;
}

View File

@@ -4,7 +4,6 @@ import { config } from "../config";
import { privateDB } from "../databases/databases";
import { Logger } from "../utils/logger";
import { getPatreonIdentity, PatronStatus, refreshToken, TokenType } from "../utils/tokenUtils";
import { getHash } from "../utils/getHash";
interface VerifyTokenRequest extends Request {
query: {
@@ -13,74 +12,55 @@ interface VerifyTokenRequest extends Request {
}
export const validateLicenseKeyRegex = (token: string) =>
new RegExp(/[A-Za-z0-9]{40}|[A-Za-z0-9-]{35}|[A-Za-z0-9-]{5}-[A-Za-z0-9-]{5}/).test(token);
const isLocalLicenseKey = (token: string) => /[A-Za-z0-9]{5}-[A-Za-z0-9]{5}/.test(token);
new RegExp(/[A-Za-z0-9]{40}|[A-Za-z0-9-]{35}/).test(token);
export async function verifyTokenRequest(req: VerifyTokenRequest, res: Response): Promise<Response> {
const { query: { licenseKey } } = req;
try {
if (!licenseKey) {
return res.status(400).send("Invalid request");
} else if (!validateLicenseKeyRegex(licenseKey)) {
// fast check for invalid licence key
if (!licenseKey) {
return res.status(400).send("Invalid request");
} else if (!validateLicenseKeyRegex(licenseKey)) {
// fast check for invalid licence key
return res.status(200).send({
allowed: false
});
}
const tokens = (await privateDB.prepare("get", `SELECT "accessToken", "refreshToken", "expiresIn" from "oauthLicenseKeys" WHERE "licenseKey" = ?`
, [licenseKey])) as {accessToken: string, refreshToken: string, expiresIn: number};
if (tokens) {
const identity = await getPatreonIdentity(tokens.accessToken);
if (tokens.expiresIn < 15 * 24 * 60 * 60) {
refreshToken(TokenType.patreon, licenseKey, tokens.refreshToken).catch((e) => Logger.error(`refresh token: ${e}`));
}
/* istanbul ignore else */
if (identity) {
const membership = identity.included?.[0]?.attributes;
const allowed = !!membership && ((membership.patron_status === PatronStatus.active && membership.currently_entitled_amount_cents > 0)
|| (membership.patron_status === PatronStatus.former && membership.campaign_lifetime_support_cents > 300));
return res.status(200).send({
allowed: false
allowed
});
} else {
return res.status(500);
}
} else {
// Check Local
const result = await privateDB.prepare("get", `SELECT "licenseKey" from "licenseKeys" WHERE "licenseKey" = ?`, [licenseKey]);
if (result) {
return res.status(200).send({
allowed: true
});
} else {
// Gumroad
return res.status(200).send({
allowed: await checkAllGumroadProducts(licenseKey)
});
}
if (isLocalLicenseKey(licenseKey) && !licenseKey.startsWith("P")) {
const parts = licenseKey.split("-");
const code = parts[0];
const givenResult = parts[1];
if (getHash(config.tokenSeed + code, 1).startsWith(givenResult)) {
return res.status(200).send({
allowed: true
});
}
}
const tokens = (await privateDB.prepare("get", `SELECT "accessToken", "refreshToken", "expiresIn" from "oauthLicenseKeys" WHERE "licenseKey" = ?`
, [licenseKey])) as {accessToken: string, refreshToken: string, expiresIn: number};
if (tokens) {
const identity = await getPatreonIdentity(tokens.accessToken);
if (tokens.expiresIn < 15 * 24 * 60 * 60) {
refreshToken(TokenType.patreon, licenseKey, tokens.refreshToken).catch((e) => Logger.error(`refresh token: ${e}`));
}
/* istanbul ignore else */
if (identity) {
const membership = identity.included?.[0]?.attributes;
const allowed = !!membership && ((membership.patron_status === PatronStatus.active && membership.currently_entitled_amount_cents > 0)
|| (membership.patron_status === PatronStatus.former && membership.campaign_lifetime_support_cents > 300));
return res.status(200).send({
allowed
});
} else {
return res.status(500);
}
} else {
// Check Local
const result = await privateDB.prepare("get", `SELECT "licenseKey" from "licenseKeys" WHERE "licenseKey" = ?`, [licenseKey]);
if (result) {
return res.status(200).send({
allowed: true
});
} else {
// Gumroad
return res.status(200).send({
allowed: await checkAllGumroadProducts(licenseKey)
});
}
}
} catch (e) {
Logger.error(e as string);
return res.status(500);
}
}

View File

@@ -3,18 +3,14 @@ import { Request, Response } from "express";
export async function viewedVideoSponsorTime(req: Request, res: Response): Promise<Response> {
const UUID = req.query?.UUID;
const videoID = req.query?.videoID;
if (!UUID) {
//invalid request
return res.sendStatus(400);
}
if (!videoID) {
await db.prepare("run", `UPDATE "sponsorTimes" SET views = views + 1 WHERE "UUID" = ?`, [UUID]);
} else {
await db.prepare("run", `UPDATE "sponsorTimes" SET views = views + 1 WHERE "UUID" LIKE ? AND "videoID" = ?`, [`${UUID}%`, videoID]);
}
//up the view count by one
await db.prepare("run", `UPDATE "sponsorTimes" SET views = views + 1 WHERE "UUID" = ?`, [UUID]);
return res.sendStatus(200);
}

View File

@@ -2,21 +2,19 @@ import { Request, Response } from "express";
import { Logger } from "../utils/logger";
import { isUserVIP } from "../utils/isUserVIP";
import { isUserTempVIP } from "../utils/isUserTempVIP";
import { getMaxResThumbnail } from "../utils/youtubeApi";
import { getMaxResThumbnail, YouTubeAPI } from "../utils/youtubeApi";
import { db, privateDB } from "../databases/databases";
import { dispatchEvent, getVoteAuthor, getVoteAuthorRaw } from "../utils/webhookUtils";
import { getFormattedTime } from "../utils/getFormattedTime";
import { getIP } from "../utils/getIP";
import { getHashCache } from "../utils/getHashCache";
import { config } from "../config";
import { HashedUserID, UserID } from "../types/user.model";
import { UserID } from "../types/user.model";
import { DBSegment, Category, HashedIP, IPAddress, SegmentUUID, Service, VideoID, VideoIDHash, VideoDuration, ActionType, VoteType } from "../types/segments.model";
import { QueryCacher } from "../utils/queryCacher";
import axios from "axios";
import { getVideoDetails, videoDetails } from "../utils/getVideoDetails";
import { deleteLockCategories } from "./deleteLockCategories";
import { acquireLock } from "../utils/redisLock";
import { checkBanStatus } from "../utils/checkBan";
const voteTypes = {
normal: 0,
@@ -128,85 +126,88 @@ async function sendWebhooks(voteData: VoteData) {
webhookURL = config.discordCompletelyIncorrectReportWebhookURL;
}
const videoID = submissionInfoRow.videoID;
const data = await getVideoDetails(videoID);
if (config.newLeafURLs !== null) {
const videoID = submissionInfoRow.videoID;
const { err, data } = await YouTubeAPI.listVideos(videoID);
if (err) return;
const isUpvote = voteData.incrementAmount > 0;
// Send custom webhooks
dispatchEvent(isUpvote ? "vote.up" : "vote.down", {
"user": {
"status": getVoteAuthorRaw(userSubmissionCountRow.submissionCount, voteData.isTempVIP, voteData.isVIP, voteData.isOwnSubmission),
},
"video": {
"id": submissionInfoRow.videoID,
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${videoID}`,
"thumbnail": getMaxResThumbnail(videoID),
},
"submission": {
"UUID": voteData.UUID,
"views": voteData.row.views,
"category": voteData.category,
"startTime": submissionInfoRow.startTime,
"endTime": submissionInfoRow.endTime,
const isUpvote = voteData.incrementAmount > 0;
// Send custom webhooks
dispatchEvent(isUpvote ? "vote.up" : "vote.down", {
"user": {
"UUID": submissionInfoRow.userID,
"username": submissionInfoRow.userName,
"submissions": {
"total": submissionInfoRow.count,
"ignored": submissionInfoRow.disregarded,
"status": getVoteAuthorRaw(userSubmissionCountRow.submissionCount, voteData.isTempVIP, voteData.isVIP, voteData.isOwnSubmission),
},
"video": {
"id": submissionInfoRow.videoID,
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${videoID}`,
"thumbnail": getMaxResThumbnail(videoID),
},
"submission": {
"UUID": voteData.UUID,
"views": voteData.row.views,
"category": voteData.category,
"startTime": submissionInfoRow.startTime,
"endTime": submissionInfoRow.endTime,
"user": {
"UUID": submissionInfoRow.userID,
"username": submissionInfoRow.userName,
"submissions": {
"total": submissionInfoRow.count,
"ignored": submissionInfoRow.disregarded,
},
},
},
},
"votes": {
"before": voteData.row.votes,
"after": (voteData.row.votes + voteData.incrementAmount - voteData.oldIncrementAmount),
},
});
"votes": {
"before": voteData.row.votes,
"after": (voteData.row.votes + voteData.incrementAmount - voteData.oldIncrementAmount),
},
});
// Send discord message
if (webhookURL !== null && !isUpvote) {
axios.post(webhookURL, {
"embeds": [{
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${submissionInfoRow.videoID}&t=${(submissionInfoRow.startTime.toFixed(0) - 2)}s#requiredSegment=${voteData.UUID}`,
"description": `**${voteData.row.votes} Votes Prior | \
${(voteData.row.votes + voteData.incrementAmount - voteData.oldIncrementAmount)} Votes Now | ${voteData.row.views} \
Views**\n\n**Locked**: ${voteData.row.locked}\n\n**Submission ID:** ${voteData.UUID}\
\n**Category:** ${submissionInfoRow.category}\
\n\n**Submitted by:** ${submissionInfoRow.userName}\n${submissionInfoRow.userID}\
\n\n**Total User Submissions:** ${submissionInfoRow.count}\
\n**Ignored User Submissions:** ${submissionInfoRow.disregarded}\
\n\n**Timestamp:** \
${getFormattedTime(submissionInfoRow.startTime)} to ${getFormattedTime(submissionInfoRow.endTime)}`,
"color": 10813440,
"author": {
"name": voteData.finalResponse?.webhookMessage ??
voteData.finalResponse?.finalMessage ??
`${getVoteAuthor(userSubmissionCountRow.submissionCount, voteData.isTempVIP, voteData.isVIP, voteData.isOwnSubmission)}${voteData.row.locked ? " (Locked)" : ""}`,
},
"thumbnail": {
"url": getMaxResThumbnail(videoID),
},
}],
})
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending reported submission Discord hook");
Logger.error(JSON.stringify((res.data)));
Logger.error("\n");
}
// Send discord message
if (webhookURL !== null && !isUpvote) {
axios.post(webhookURL, {
"embeds": [{
"title": data?.title,
"url": `https://www.youtube.com/watch?v=${submissionInfoRow.videoID}&t=${(submissionInfoRow.startTime.toFixed(0) - 2)}s#requiredSegment=${voteData.UUID}`,
"description": `**${voteData.row.votes} Votes Prior | \
${(voteData.row.votes + voteData.incrementAmount - voteData.oldIncrementAmount)} Votes Now | ${voteData.row.views} \
Views**\n\n**Locked**: ${voteData.row.locked}\n\n**Submission ID:** ${voteData.UUID}\
\n**Category:** ${submissionInfoRow.category}\
\n\n**Submitted by:** ${submissionInfoRow.userName}\n${submissionInfoRow.userID}\
\n\n**Total User Submissions:** ${submissionInfoRow.count}\
\n**Ignored User Submissions:** ${submissionInfoRow.disregarded}\
\n\n**Timestamp:** \
${getFormattedTime(submissionInfoRow.startTime)} to ${getFormattedTime(submissionInfoRow.endTime)}`,
"color": 10813440,
"author": {
"name": voteData.finalResponse?.webhookMessage ??
voteData.finalResponse?.finalMessage ??
`${getVoteAuthor(userSubmissionCountRow.submissionCount, voteData.isTempVIP, voteData.isVIP, voteData.isOwnSubmission)}${voteData.row.locked ? " (Locked)" : ""}`,
},
"thumbnail": {
"url": getMaxResThumbnail(videoID),
},
}],
})
.catch(err => {
Logger.error("Failed to send reported submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
.then(res => {
if (res.status >= 400) {
Logger.error("Error sending reported submission Discord hook");
Logger.error(JSON.stringify((res.data)));
Logger.error("\n");
}
})
.catch(err => {
Logger.error("Failed to send reported submission Discord hook.");
Logger.error(JSON.stringify(err));
Logger.error("\n");
});
}
}
}
}
async function categoryVote(UUID: SegmentUUID, userID: HashedUserID, isVIP: boolean, isTempVIP: boolean, isOwnSubmission: boolean, category: Category
async function categoryVote(UUID: SegmentUUID, userID: UserID, isVIP: boolean, isTempVIP: boolean, isOwnSubmission: boolean, category: Category
, hashedIP: HashedIP, finalResponse: FinalResponse): Promise<{ status: number, message?: string }> {
// Check if they've already made a vote
const usersLastVoteInfo = await privateDB.prepare("get", `select count(*) as votes, category from "categoryVotes" where "UUID" = ? and "userID" = ? group by category`, [UUID, userID], { useReplica: true });
@@ -242,7 +243,8 @@ async function categoryVote(UUID: SegmentUUID, userID: HashedUserID, isVIP: bool
const timeSubmitted = Date.now();
const voteAmount = (isVIP || isTempVIP) ? 500 : 1;
const ableToVote = finalResponse.finalStatus === 200; // ban status checks handled by vote() (caller function)
const ableToVote = finalResponse.finalStatus === 200
&& (await db.prepare("get", `SELECT "userID" FROM "shadowBannedUsers" WHERE "userID" = ?`, [userID], { useReplica: true })) === undefined;
if (ableToVote) {
// Add the vote
@@ -303,10 +305,9 @@ export async function voteOnSponsorTime(req: Request, res: Response): Promise<Re
const paramUserID = getUserID(req);
const type = req.query.type !== undefined ? parseInt(req.query.type as string) : undefined;
const category = req.query.category as Category;
const videoID = req.query.videoID as VideoID;
const ip = getIP(req);
const result = await vote(ip, UUID, paramUserID, type, videoID, category);
const result = await vote(ip, UUID, paramUserID, type, category);
const response = res.status(result.status);
if (result.message) {
@@ -318,7 +319,7 @@ export async function voteOnSponsorTime(req: Request, res: Response): Promise<Re
}
}
export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID, type: number, videoID?: VideoID, category?: Category): Promise<{ status: number, message?: string, json?: unknown }> {
export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID, type: number, category?: Category): Promise<{ status: number, message?: string, json?: unknown }> {
// missing key parameters
if (!UUID || !paramUserID || !(type !== undefined || category)) {
return { status: 400 };
@@ -328,28 +329,12 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
return { status: 200 };
}
if (videoID && UUID.length < 60) {
// Get the full UUID
const segmentInfo: DBSegment = await db.prepare("get", `SELECT "UUID" from "sponsorTimes" WHERE "UUID" LIKE ? AND "videoID" = ?`, [`${UUID}%`, videoID]);
if (segmentInfo) {
UUID = segmentInfo.UUID;
}
}
const originalType = type;
//hash the userID
const nonAnonUserID = await getHashCache(paramUserID);
const userID = await getHashCache(paramUserID + UUID);
//hash the ip 5000 times so no one can get it from the database
const hashedIP: HashedIP = await getHashCache((ip + config.globalSalt) as IPAddress);
const lock = await acquireLock(`voteOnSponsorTime:${UUID}.${paramUserID}`);
if (!lock.status) {
return { status: 429, message: "Vote already in progress" };
}
// To force a non 200, change this early
const finalResponse: FinalResponse = {
blockVote: false,
@@ -359,51 +344,42 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
webhookMessage: null
};
//hash the ip 5000 times so no one can get it from the database
const hashedIP: HashedIP = await getHashCache((ip + config.globalSalt) as IPAddress);
const segmentInfo: DBSegment = await db.prepare("get", `SELECT * from "sponsorTimes" WHERE "UUID" = ?`, [UUID]);
// segment doesnt exist
if (!segmentInfo) {
lock.unlock();
return { status: 404 };
}
const isTempVIP = await isUserTempVIP(nonAnonUserID, segmentInfo.videoID);
const isVIP = await isUserVIP(nonAnonUserID);
const isBanned = await checkBanStatus(nonAnonUserID, hashedIP); // propagates IP bans
//check if user voting on own submission
const isOwnSubmission = nonAnonUserID === segmentInfo.userID;
// disallow vote types 10/11
if (type === 10 || type === 11) {
lock.unlock();
return { status: 400 };
}
const warning = (await db.prepare("get", `SELECT "reason" FROM warnings WHERE "userID" = ? AND enabled = 1 AND type = 0`,
[nonAnonUserID],
const MILLISECONDS_IN_HOUR = 3600000;
const now = Date.now();
const warnings = (await db.prepare("all", `SELECT "reason" FROM warnings WHERE "userID" = ? AND "issueTime" > ? AND enabled = 1`,
[nonAnonUserID, Math.floor(now - (config.hoursAfterWarningExpires * MILLISECONDS_IN_HOUR))],
));
if (warning != null) {
const warningReason = warning.reason;
lock.unlock();
return { status: 403, message: "Vote rejected due to a tip from a moderator. This means that we noticed you were making some common mistakes that are not malicious, and we just want to clarify the rules. " +
if (warnings.length >= config.maxNumberOfActiveWarnings) {
const warningReason = warnings[0]?.reason;
return { status: 403, message: "Vote rejected due to a warning from a moderator. This means that we noticed you were making some common mistakes that are not malicious, and we just want to clarify the rules. " +
"Could you please send a message in Discord or Matrix so we can further help you?" +
`${(warningReason.length > 0 ? ` Tip message: '${warningReason}'` : "")}` };
}
// we can return out of the function early if the user is banned after warning checks
// returning before warning checks would make them not appear on vote if the user is also banned
if (isBanned) {
lock.unlock();
return { status: 200 };
`${(warningReason.length > 0 ? ` Warning reason: '${warningReason}'` : "")}` };
}
// no type but has category, categoryVote
if (!type && category) {
const result = categoryVote(UUID, nonAnonUserID, isVIP, isTempVIP, isOwnSubmission, category, hashedIP, finalResponse);
lock.unlock();
return result;
return categoryVote(UUID, nonAnonUserID, isVIP, isTempVIP, isOwnSubmission, category, hashedIP, finalResponse);
}
// If not upvote, or an upvote on a dead segment (for ActionType.Full)
@@ -423,11 +399,8 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
if (!isNaN(type) && segmentInfo.votes <= -2 && segmentInfo.actionType !== ActionType.Full &&
!(isVIP || isTempVIP || isOwnSubmission)) {
if (type == 1) {
lock.unlock();
return { status: 403, message: "Not allowed to upvote segment with too many downvotes unless you are VIP." };
} else if (type == 0) {
lock.unlock();
// Already downvoted enough, ignore
return { status: 200 };
}
@@ -460,8 +433,6 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
//undo/cancel vote
incrementAmount = 0;
} else {
lock.unlock();
//unrecongnised type of vote
return { status: 400 };
}
@@ -499,15 +470,16 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
}
// Only change the database if they have made a submission before and haven't voted recently
// ban status check was handled earlier (w/ early return)
const ableToVote = isVIP || isTempVIP || (
(!(isOwnSubmission && incrementAmount > 0 && oldIncrementAmount >= 0)
&& !(originalType === VoteType.Malicious && segmentInfo.actionType !== ActionType.Chapter)
&& !finalResponse.blockVote
&& finalResponse.finalStatus === 200
&& (await db.prepare("get", `SELECT "userID" FROM "sponsorTimes" WHERE "userID" = ? AND "category" = ? AND "votes" > -2 AND "hidden" = 0 AND "shadowHidden" = 0 LIMIT 1`, [nonAnonUserID, segmentInfo.category], { useReplica: true }) !== undefined)
&& (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID], { useReplica: true })) === undefined)
);
const userAbleToVote = (!(isOwnSubmission && incrementAmount > 0 && oldIncrementAmount >= 0)
&& !(originalType === VoteType.Malicious && segmentInfo.actionType !== ActionType.Chapter)
&& !finalResponse.blockVote
&& finalResponse.finalStatus === 200
&& (await db.prepare("get", `SELECT "userID" FROM "sponsorTimes" WHERE "userID" = ?`, [nonAnonUserID], { useReplica: true })) !== undefined
&& (await db.prepare("get", `SELECT "userID" FROM "shadowBannedUsers" WHERE "userID" = ?`, [nonAnonUserID], { useReplica: true })) === undefined
&& (await privateDB.prepare("get", `SELECT "UUID" FROM "votes" WHERE "UUID" = ? AND "hashedIP" = ? AND "userID" != ?`, [UUID, hashedIP, userID], { useReplica: true })) === undefined);
const ableToVote = isVIP || isTempVIP || userAbleToVote;
if (ableToVote) {
//update the votes table
@@ -554,13 +526,8 @@ export async function vote(ip: IPAddress, UUID: SegmentUUID, paramUserID: UserID
finalResponse
}).catch((e) => Logger.error(`Sending vote webhook: ${e}`));
}
lock.unlock();
return { status: finalResponse.finalStatus, message: finalResponse.finalMessage ?? undefined };
} catch (err) {
lock.unlock();
Logger.error(err as string);
return { status: 500, message: finalResponse.finalMessage ?? undefined, json: { error: "Internal error creating segment vote" } };
}

View File

@@ -1,17 +1,12 @@
import { Category, Service, VideoID, VideoIDHash } from "./segments.model";
import { Service, VideoID, VideoIDHash } from "./segments.model";
import { UserID } from "./user.model";
export type BrandingUUID = string & { readonly __brandingUUID: unique symbol };
export type CasualCategory = ("funny" | "creative" | "clever" | "descriptive" | "other" | "downvote") & { __casualCategoryBrand: unknown };
export interface BrandingDBSubmissionData {
videoID: VideoID,
}
export interface BrandingDBSubmission extends BrandingDBSubmissionData {
export interface BrandingDBSubmission {
shadowHidden: number,
UUID: BrandingUUID,
videoID: VideoID,
hashedVideoID: VideoIDHash
}
@@ -19,10 +14,7 @@ export interface TitleDBResult extends BrandingDBSubmission {
title: string,
original: number,
votes: number,
downvotes: number,
locked: number,
verification: number,
userID: UserID
locked: number
}
export interface TitleResult {
@@ -30,17 +22,14 @@ export interface TitleResult {
original: boolean,
votes: number,
locked: boolean,
UUID: BrandingUUID,
userID?: UserID
UUID: BrandingUUID
}
export interface ThumbnailDBResult extends BrandingDBSubmission {
timestamp?: number,
original: number,
votes: number,
downvotes: number,
locked: number,
userID: UserID
locked: number
}
export interface ThumbnailResult {
@@ -48,33 +37,20 @@ export interface ThumbnailResult {
original: boolean,
votes: number,
locked: boolean,
UUID: BrandingUUID,
userID?: UserID
}
export interface CasualVote {
id: string,
count: number,
title: string | null
UUID: BrandingUUID
}
export interface BrandingResult {
titles: TitleResult[],
thumbnails: ThumbnailResult[],
casualVotes: CasualVote[],
randomTime: number,
videoDuration: number | null
thumbnails: ThumbnailResult[]
}
export interface BrandingHashDBResult {
titles: TitleDBResult[];
thumbnails: ThumbnailDBResult[];
segments: BrandingSegmentDBResult[];
casualVotes: CasualVoteDBResult[];
titles: TitleDBResult[],
thumbnails: ThumbnailDBResult[]
}
export interface OriginalThumbnailSubmission {
timestamp?: undefined | null;
original: true;
}
@@ -96,45 +72,4 @@ export interface BrandingSubmission {
videoID: VideoID;
userID: UserID;
service: Service;
autoLock: boolean | undefined;
downvote: boolean | undefined;
videoDuration: number | undefined;
wasWarned: boolean | undefined;
casualMode: boolean | undefined;
}
export interface CasualVoteSubmission {
videoID: VideoID;
userID: UserID;
service: Service;
downvote: boolean | undefined;
categories: CasualCategory[];
title?: string;
}
export interface BrandingSegmentDBResult {
startTime: number;
endTime: number;
category: Category;
videoDuration: number;
}
export interface CasualVoteDBResult {
category: CasualCategory;
upvotes: number;
downvotes: number;
title?: string;
}
export interface BrandingSegmentHashDBResult extends BrandingDBSubmissionData {
startTime: number;
endTime: number;
category: Category;
videoDuration: number;
}
export interface CasualVoteHashDBResult extends BrandingDBSubmissionData {
category: CasualCategory;
upvotes: number;
downvotes: number;
}
}

View File

@@ -1,6 +1,5 @@
import { PoolConfig } from "pg";
import * as redis from "redis";
import { DeArrowType } from "./segments.model";
interface RedisConfig extends redis.RedisClientOptions {
enabled: boolean;
@@ -10,11 +9,7 @@ interface RedisConfig extends redis.RedisClientOptions {
maxWriteConnections: number;
stopWritingAfterResponseTime: number;
responseTimePause: number;
maxReadResponseTime: number;
disableHashCache: boolean;
clientCacheSize: number;
useCompression: boolean;
dragonflyMode: boolean;
}
interface RedisReadOnlyConfig extends redis.RedisClientOptions {
@@ -31,7 +26,6 @@ export interface CustomWritePostgresConfig extends CustomPostgresConfig {
maxActiveRequests: number;
timeout: number;
highLoadThreshold: number;
redisTimeoutThreshold: number;
}
export interface CustomPostgresReadOnlyConfig extends CustomPostgresConfig {
@@ -41,35 +35,6 @@ export interface CustomPostgresReadOnlyConfig extends CustomPostgresConfig {
stopRetryThreshold: number;
}
export type ValidatorPattern = string | [string, string];
export interface RequestValidatorRule {
ruleName?: string;
// mostly universal
userAgent?: ValidatorPattern;
userAgentHeader?: ValidatorPattern;
videoDuration?: ValidatorPattern;
videoID?: ValidatorPattern;
userID?: ValidatorPattern;
service?: ValidatorPattern;
endpoint?: ValidatorPattern;
// sb postSkipSegments
startTime?: ValidatorPattern;
endTime?: ValidatorPattern;
category?: ValidatorPattern;
actionType?: ValidatorPattern;
description?: ValidatorPattern;
// dearrow postBranding
title?: ValidatorPattern;
titleOriginal?: boolean;
thumbnailTimestamp?: ValidatorPattern;
thumbnailOriginal?: boolean;
dearrowDownvote?: boolean;
// postCasual
casualCategory?: ValidatorPattern;
// setUsername
newUsername?: ValidatorPattern;
}
export interface SBSConfig {
[index: string]: any
port: number;
@@ -82,12 +47,8 @@ export interface SBSConfig {
discordFirstTimeSubmissionsWebhookURL?: string;
discordCompletelyIncorrectReportWebhookURL?: string;
discordMaliciousReportWebhookURL?: string;
discordDeArrowLockedWebhookURL?: string,
discordDeArrowWarnedWebhookURL?: string,
discordNewUserWebhookURL?: string;
neuralBlockURL?: string;
discordNeuralBlockRejectWebhookURL?: string;
discordRejectedNewUserWebhookURL?: string;
minReputationToSubmitChapter: number;
minReputationToSubmitFiller: number;
userCounterURL?: string;
@@ -104,16 +65,16 @@ export interface SBSConfig {
readOnly: boolean;
webhooks: WebhookConfig[];
categoryList: string[];
casualCategoryList: string[];
deArrowTypes: DeArrowType[];
categorySupport: Record<string, string[]>;
maxTitleLength: number;
getTopUsersCacheTimeMinutes: number;
maxNumberOfActiveWarnings: number;
hoursAfterWarningExpires: number;
rateLimit: {
vote: RateLimitConfig;
view: RateLimitConfig;
};
requestValidatorRules: RequestValidatorRule[];
mysql?: any;
privateMysql?: any;
minimumPrefix?: string;
maximumPrefix?: string;
redis?: RedisConfig;
@@ -122,7 +83,6 @@ export interface SBSConfig {
maxRewardTimePerSegmentInSeconds?: number;
postgres?: CustomWritePostgresConfig;
postgresReadOnly?: CustomPostgresReadOnlyConfig;
postgresPrivateMax?: number;
dumpDatabase?: DumpDatabase;
diskCacheURL: string;
crons: CronJobOptions;
@@ -135,20 +95,7 @@ export interface SBSConfig {
gumroad: {
productPermalinks: string[],
},
tokenSeed: string,
minUserIDLength: number,
deArrowPaywall: boolean,
useCacheForSegmentGroups: boolean
maxConnections: number;
maxResponseTime: number;
maxResponseTimeWhileLoadingCache: number;
etagExpiry: number;
youTubeKeys: {
visitorData: string | null;
poToken: string | null;
floatieUrl: string | null;
floatieAuth: string | null;
}
minUserIDLength: number
}
export interface WebhookConfig {
@@ -198,4 +145,4 @@ export interface CronJobOptions {
export interface DownvoteSegmentArchiveCron {
voteThreshold: number;
timeThresholdInDays: number;
}
}

View File

@@ -6,7 +6,6 @@ export type SegmentUUID = string & { __segmentUUIDBrand: unknown };
export type VideoID = string & { __videoIDBrand: unknown };
export type VideoDuration = number & { __videoDurationBrand: unknown };
export type Category = ("sponsor" | "selfpromo" | "interaction" | "intro" | "outro" | "preview" | "music_offtopic" | "poi_highlight" | "chapter" | "filler" | "exclusive_access") & { __categoryBrand: unknown };
export type DeArrowType = "title" | "thumbnail";
export type VideoIDHash = VideoID & HashedValue;
export type IPAddress = string & { __ipAddressBrand: unknown };
export type HashedIP = IPAddress & HashedValue;
@@ -22,8 +21,7 @@ export enum ActionType {
// Uncomment as needed
export enum Service {
YouTube = "YouTube",
Spotify = "Spotify",
PeerTube = "PeerTube"
PeerTube = "PeerTube",
// Twitch = 'Twitch',
// Nebula = 'Nebula',
// RSS = 'RSS',
@@ -104,7 +102,7 @@ export interface VideoData {
}
export interface SegmentCache {
shadowHiddenSegmentIPs: SBRecord<VideoID, SBRecord<string, Promise<{hashedIP: HashedIP}[] | null>>>,
shadowHiddenSegmentIPs: SBRecord<VideoID, SBRecord<string, {hashedIP: HashedIP}[]>>,
userHashedIP?: HashedIP
userHashedIPPromise?: Promise<HashedIP>;
}

View File

@@ -5,6 +5,5 @@ export type HashedUserID = UserID & HashedValue;
export enum Feature {
ChapterSubmitter = 0,
FillerSubmitter = 1,
DeArrowTitleSubmitter = 2,
FillerSubmitter = 1
}

View File

@@ -1,4 +0,0 @@
export enum WarningType {
SponsorBlock = 0,
DeArrow = 1
}

View File

@@ -1,26 +0,0 @@
import { HashedUserID } from "../types/user.model";
import { db } from "../databases/databases";
import { Category, HashedIP } from "../types/segments.model";
import { banUser } from "../routes/shadowBanUser";
import { config } from "../config";
import { Logger } from "./logger";
export async function isUserBanned(userID: HashedUserID): Promise<boolean> {
return (await db.prepare("get", `SELECT 1 FROM "shadowBannedUsers" WHERE "userID" = ? LIMIT 1`, [userID], { useReplica: true })) !== undefined;
}
export async function isIPBanned(ip: HashedIP): Promise<boolean> {
return (await db.prepare("get", `SELECT 1 FROM "shadowBannedIPs" WHERE "hashedIP" = ? LIMIT 1`, [ip], { useReplica: true })) !== undefined;
}
// NOTE: this function will propagate IP bans
export async function checkBanStatus(userID: HashedUserID, ip: HashedIP): Promise<boolean> {
const [userBanStatus, ipBanStatus] = await Promise.all([isUserBanned(userID), isIPBanned(ip)]);
if (!userBanStatus && ipBanStatus) {
// Make sure the whole user is banned
banUser(userID, true, true, 1, config.categoryList as Category[], config.deArrowTypes)
.catch((e) => Logger.error(`Error banning user after submitting from a banned IP: ${e}`));
}
return userBanStatus || ipBanStatus;
}

View File

@@ -23,7 +23,9 @@ export function createMemoryCache(memoryFn: (...args: any[]) => void, cacheTimeM
}
}
// create new promise
const promise = Promise.resolve(memoryFn(...args));
const promise = new Promise((resolve) => {
resolve(memoryFn(...args));
});
// store promise reference until fulfilled
promiseMemory.set(cacheKey, promise);
return promise.then(result => {

View File

@@ -2,7 +2,6 @@ import axios from "axios";
import { Logger } from "../utils/logger";
export const getCWSUsers = (extID: string): Promise<number | undefined> =>
axios.post(`https://chrome.google.com/webstore/ajax/detail?pv=20210820&id=${extID}`)
.then(res => res.data.split("\n")[2])
.then(data => JSON.parse(data))
@@ -11,22 +10,4 @@ export const getCWSUsers = (extID: string): Promise<number | undefined> =>
.catch((err) => {
Logger.error(`Error getting chrome users - ${err}`);
return 0;
});
/* istanbul ignore next */
export function getChromeUsers(chromeExtensionUrl: string): Promise<number> {
return axios.get(chromeExtensionUrl)
.then(res => {
const body = res.data;
// 2024-02-09
// >20,000 users<
const match = body.match(/>([\d,]+) users</)?.[1];
if (match) {
return parseInt(match.replace(/,/g, ""));
}
})
.catch(/* istanbul ignore next */ () => {
Logger.debug(`Failing to connect to ${chromeExtensionUrl}`);
return 0;
});
}
});

View File

@@ -1,14 +1,14 @@
import { getHash } from "./getHash";
import { HashedValue } from "../types/hash.model";
import { ActionType, VideoID, Service, Category } from "../types/segments.model";
import { HashedUserID } from "../types/user.model";
import { UserID } from "../types/user.model";
export function getSubmissionUUID(
videoID: VideoID,
category: Category,
actionType: ActionType,
description: string,
userID: HashedUserID,
userID: UserID,
startTime: number,
endTime: number,
service: Service

View File

@@ -46,10 +46,7 @@ async function newLeafWrapper(videoId: string, ignoreCache: boolean) {
export function getVideoDetails(videoId: string, ignoreCache = false): Promise<videoDetails> {
if (!config.newLeafURLs) {
return getPlayerData(videoId, ignoreCache)
.then(data => convertFromInnerTube(data))
.catch(() => {
return null;
});
.then(data => convertFromInnerTube(data));
}
return Promise.any([
newLeafWrapper(videoId, ignoreCache)

View File

@@ -1,12 +1,11 @@
import axios, { AxiosError } from "axios";
import axios from "axios";
import { Logger } from "./logger";
import { innerTubeVideoDetails } from "../types/innerTubeApi.model";
import DiskCache from "./diskCache";
import { config } from "../config";
const privateResponse = (videoId: string, reason: string): innerTubeVideoDetails => ({
const privateResponse = (videoId: string): innerTubeVideoDetails => ({
videoId,
title: reason,
title: "",
channelId: "",
// exclude video duration
isOwnerViewing: false,
@@ -28,58 +27,24 @@ const privateResponse = (videoId: string, reason: string): innerTubeVideoDetails
publishDate: ""
});
export async function getFromITube (videoID: string): Promise<innerTubeVideoDetails> {
if (config.youTubeKeys.floatieUrl) {
try {
const result = await axios.get(config.youTubeKeys.floatieUrl, {
params: {
videoID,
auth: config.youTubeKeys.floatieAuth
}
});
if (result.status === 200) {
return result.data?.videoDetails ?? privateResponse(videoID, result.data?.playabilityStatus?.reason ?? "Bad response");
} else {
return Promise.reject(`Floatie returned non-200 response: ${result.status}`);
}
} catch (e) {
if (e instanceof AxiosError) {
const result = e.response;
if (result && result.status === 500) {
return privateResponse(videoID, result.data ?? "Bad response");
} else {
return Promise.reject(`Floatie returned non-200 response: ${result?.status}`);
}
}
}
}
async function getFromITube (videoID: string): Promise<innerTubeVideoDetails> {
// start subrequest
const url = "https://www.youtube.com/youtubei/v1/player";
const data = {
context: {
client: {
clientName: "WEB",
clientVersion: "2.20221215.04.01",
visitorData: config.youTubeKeys.visitorData
clientVersion: "2.20221215.04.01"
}
},
videoId: videoID,
serviceIntegrityDimensions: {
poToken: config.youTubeKeys.poToken
}
videoId: videoID
};
const result = await axios.post(url, data, {
timeout: 3500,
headers: {
"X-Goog-Visitor-Id": config.youTubeKeys.visitorData
}
timeout: 3500
});
/* istanbul ignore else */
if (result.status === 200) {
return result.data?.videoDetails ?? privateResponse(videoID, result.data?.playabilityStatus?.reason ?? "Bad response");
return result.data?.videoDetails ?? privateResponse(videoID);
} else {
return Promise.reject(`Innertube returned non-200 response: ${result.status}`);
}

Some files were not shown because too many files have changed in this diff Show More