Merge branch 'master' into master

This commit is contained in:
SashaXser
2024-01-20 07:07:03 +04:00
committed by GitHub
6 changed files with 376 additions and 22 deletions

View File

@@ -7,7 +7,7 @@ import { config } from "../config";
async function get<T>(fetchFromDB: () => Promise<T>, key: string): Promise<T> {
try {
const reply = await redis.get(key);
const reply = await redis.getCompressed(key);
if (reply) {
Logger.debug(`Got data from redis: ${reply}`);
@@ -21,7 +21,7 @@ async function get<T>(fetchFromDB: () => Promise<T>, key: string): Promise<T> {
const data = await fetchFromDB();
redis.setEx(key, config.redis?.expiryTime, JSON.stringify(data)).catch((err) => Logger.error(err));
redis.setExCompressed(key, config.redis?.expiryTime, JSON.stringify(data)).catch((err) => Logger.error(err));
return data;
}
@@ -36,7 +36,7 @@ async function getTraced<T>(fetchFromDB: () => Promise<T>, key: string): Promise
const startTime = Date.now();
try {
const reply = await redis.get(key);
const reply = await redis.getCompressed(key);
if (reply) {
Logger.debug(`Got data from redis: ${reply}`);
@@ -55,7 +55,7 @@ async function getTraced<T>(fetchFromDB: () => Promise<T>, key: string): Promise
const dbStartTime = Date.now();
const data = await fetchFromDB();
redis.setEx(key, config.redis?.expiryTime, JSON.stringify(data)).catch((err) => Logger.error(err));
redis.setExCompressed(key, config.redis?.expiryTime, JSON.stringify(data)).catch((err) => Logger.error(err));
return {
data,

View File

@@ -53,7 +53,7 @@ const writeResponseTime: number[] = [];
let lastResponseTimeLimit = 0;
const maxStoredTimes = 200;
export class TooManyActiveConnectionsError extends Error { }
export class TooManyActiveConnectionsError extends Error {}
export let connectionPromise = Promise.resolve();
@@ -106,7 +106,7 @@ if (config.redis?.enabled) {
readResponseTime.push(responseTime);
if (readResponseTime.length > maxStoredTimes) readResponseTime.shift();
if (config.redis.stopWritingAfterResponseTime
&& responseTime > config.redis.stopWritingAfterResponseTime) {
&& responseTime > config.redis.stopWritingAfterResponseTime) {
Logger.error(`Hit response time limit at ${responseTime}ms`);
lastResponseTimeLimit = Date.now();
}
@@ -126,7 +126,7 @@ if (config.redis?.enabled) {
new Promise((resolve, reject) => {
if ((config.redis.maxWriteConnections && activeRequests > config.redis.maxWriteConnections)
|| (config.redis.responseTimePause
&& Date.now() - lastResponseTimeLimit < config.redis.responseTimePause)) {
&& Date.now() - lastResponseTimeLimit < config.redis.responseTimePause)) {
reject(`Too many active requests to write due to ${activeRequests} requests and ${Date.now() - lastResponseTimeLimit}ms since last limit. ${(db as Postgres)?.getStats?.()?.activeRequests} active db requests with ${(db as Postgres)?.getStats?.()?.avgReadTime}ms`);
return;
}
@@ -162,7 +162,7 @@ if (config.redis?.enabled) {
.catch((err) => reject(err))
);
/* istanbul ignore next */
client.on("error", function (error) {
client.on("error", function(error) {
lastClientFail = Date.now();
Logger.error(`Redis Error: ${error}`);
});
@@ -171,7 +171,7 @@ if (config.redis?.enabled) {
Logger.info("Redis: trying to reconnect");
});
/* istanbul ignore next */
readClient?.on("error", function (error) {
readClient?.on("error", function(error) {
lastReadFail = Date.now();
Logger.error(`Redis Read-Only Error: ${error}`);
});
@@ -186,7 +186,7 @@ function pickChoice<T>(client: T, readClient: T): T {
const ignoreReadDueToFailure = lastReadFail > Date.now() - 1000 * 30;
const readDueToFailure = lastClientFail > Date.now() - 1000 * 30;
if (readAvailable && !ignoreReadDueToFailure && (readDueToFailure ||
Math.random() > 1 / (config.redisRead?.weight + 1))) {
Math.random() > 1 / (config.redisRead?.weight + 1))) {
return readClient;
} else {
return client;

View File

@@ -5,43 +5,43 @@ import { Logger } from "./logger";
import { BrandingUUID } from "../types/branding.model";
export const skipSegmentsKey = (videoID: VideoID, service: Service): string =>
`segments.v4.${service}.videoID.${videoID}`;
`segments.v6.${service}.videoID.${videoID}`;
export const skipSegmentGroupsKey = (videoID: VideoID, service: Service): string =>
`segments.groups.v3.${service}.videoID.${videoID}`;
`segments.groups.v5.${service}.videoID.${videoID}`;
export function skipSegmentsHashKey(hashedVideoIDPrefix: VideoIDHash, service: Service): string {
hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 4) as VideoIDHash;
if (hashedVideoIDPrefix.length !== 4) Logger.warn(`Redis skip segment hash-prefix key is not length 4! ${hashedVideoIDPrefix}`);
return `segments.v4.${service}.${hashedVideoIDPrefix}`;
return `segments.v6.${service}.${hashedVideoIDPrefix}`;
}
export const brandingKey = (videoID: VideoID, service: Service): string =>
`branding.v2.${service}.videoID.${videoID}`;
`branding.v4.${service}.videoID.${videoID}`;
export function brandingHashKey(hashedVideoIDPrefix: VideoIDHash, service: Service): string {
hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 4) as VideoIDHash;
if (hashedVideoIDPrefix.length !== 4) Logger.warn(`Redis skip segment hash-prefix key is not length 4! ${hashedVideoIDPrefix}`);
return `branding.v2.${service}.${hashedVideoIDPrefix}`;
return `branding.v4.${service}.${hashedVideoIDPrefix}`;
}
export const brandingIPKey = (uuid: BrandingUUID): string =>
`branding.shadow.${uuid}`;
`branding.v2.shadow.${uuid}`;
export const shadowHiddenIPKey = (videoID: VideoID, timeSubmitted: number, service: Service): string =>
`segments.${service}.videoID.${videoID}.shadow.${timeSubmitted}`;
`segments.v2.${service}.videoID.${videoID}.shadow.${timeSubmitted}`;
export const reputationKey = (userID: UserID): string =>
`reputation.user.${userID}`;
`reputation.v2.user.${userID}`;
export function ratingHashKey(hashPrefix: VideoIDHash, service: Service): string {
hashPrefix = hashPrefix.substring(0, 4) as VideoIDHash;
if (hashPrefix.length !== 4) Logger.warn(`Redis rating hash-prefix key is not length 4! ${hashPrefix}`);
return `rating.${service}.${hashPrefix}`;
return `rating.v2.${service}.${hashPrefix}`;
}
export function shaHashKey(singleIter: HashedValue): string {
@@ -54,15 +54,15 @@ export const tempVIPKey = (userID: HashedUserID): string =>
`vip.temp.${userID}`;
export const videoLabelsKey = (videoID: VideoID, service: Service): string =>
`labels.v1.${service}.videoID.${videoID}`;
`labels.v3.${service}.videoID.${videoID}`;
export function videoLabelsHashKey(hashedVideoIDPrefix: VideoIDHash, service: Service): string {
hashedVideoIDPrefix = hashedVideoIDPrefix.substring(0, 3) as VideoIDHash;
if (hashedVideoIDPrefix.length !== 3) Logger.warn(`Redis video labels hash-prefix key is not length 3! ${hashedVideoIDPrefix}`);
return `labels.v1.${service}.${hashedVideoIDPrefix}`;
return `labels.v3.${service}.${hashedVideoIDPrefix}`;
}
export function userFeatureKey (userID: HashedUserID, feature: Feature): string {
return `user.${userID}.feature.${feature}`;
return `user.v2.${userID}.feature.${feature}`;
}

View File

@@ -37,6 +37,12 @@ export async function acquireLock(key: string, timeout = defaultTimeout): Promis
}
} catch (e) {
Logger.error(e as string);
// Fallback to allowing
return {
status: true,
unlock: () => void 0
};
}
return {