Catch redis exceptions

This commit is contained in:
Ajay
2022-05-23 18:32:40 -04:00
parent ed221c8599
commit 55ff3230ed
3 changed files with 11 additions and 10 deletions

View File

@@ -6,9 +6,10 @@ import { RateLimitConfig } from "../types/config.model";
import { Request } from "express"; import { Request } from "express";
import { isUserVIP } from "../utils/isUserVIP"; import { isUserVIP } from "../utils/isUserVIP";
import { UserID } from "../types/user.model"; import { UserID } from "../types/user.model";
import RedisStore from "rate-limit-redis"; import RedisStore, { RedisReply } from "rate-limit-redis";
import redis from "../utils/redis"; import redis from "../utils/redis";
import { config } from "../config"; import { config } from "../config";
import { Logger } from "../utils/logger";
export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (req: Request) => UserID): RateLimitRequestHandler { export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (req: Request) => UserID): RateLimitRequestHandler {
return rateLimit({ return rateLimit({
@@ -29,7 +30,7 @@ export function rateLimitMiddleware(limitConfig: RateLimitConfig, getUserID?: (r
} }
}, },
store: config.redis?.enabled ? new RedisStore({ store: config.redis?.enabled ? new RedisStore({
sendCommand: (...args: string[]) => redis.sendCommand(args), sendCommand: (...args: string[]) => redis.sendCommand(args).catch((err) => Logger.error(err)) as Promise<RedisReply>,
}) : null, }) : null,
}); });
} }

View File

@@ -30,7 +30,7 @@ async function getFromRedis<T extends string>(key: HashedValue): Promise<T & Has
// Otherwise, calculate it // Otherwise, calculate it
const data = getHash(key, cachedHashTimes); const data = getHash(key, cachedHashTimes);
redis.set(key, data); redis.set(key, data).catch((err) => Logger.error(err));
return data as T & HashedValue; return data as T & HashedValue;
} }

View File

@@ -16,7 +16,7 @@ async function get<T>(fetchFromDB: () => Promise<T>, key: string): Promise<T> {
const data = await fetchFromDB(); const data = await fetchFromDB();
redis.set(key, JSON.stringify(data)); redis.set(key, JSON.stringify(data)).catch((err) => Logger.error(err));
return data; return data;
} }
@@ -67,7 +67,7 @@ async function getAndSplit<T, U extends string>(fetchFromDB: (values: U[]) => Pr
} }
for (const key in newResults) { for (const key in newResults) {
redis.set(key, JSON.stringify(newResults[key])); redis.set(key, JSON.stringify(newResults[key])).catch((err) => Logger.error(err));
} }
}); });
} }
@@ -77,16 +77,16 @@ async function getAndSplit<T, U extends string>(fetchFromDB: (values: U[]) => Pr
function clearSegmentCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID?: UserID; }): void { function clearSegmentCache(videoInfo: { videoID: VideoID; hashedVideoID: VideoIDHash; service: Service; userID?: UserID; }): void {
if (videoInfo) { if (videoInfo) {
redis.del(skipSegmentsKey(videoInfo.videoID, videoInfo.service)); redis.del(skipSegmentsKey(videoInfo.videoID, videoInfo.service)).catch((err) => Logger.error(err));
redis.del(skipSegmentGroupsKey(videoInfo.videoID, videoInfo.service)); redis.del(skipSegmentGroupsKey(videoInfo.videoID, videoInfo.service)).catch((err) => Logger.error(err));
redis.del(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service)); redis.del(skipSegmentsHashKey(videoInfo.hashedVideoID, videoInfo.service)).catch((err) => Logger.error(err));
if (videoInfo.userID) redis.del(reputationKey(videoInfo.userID)); if (videoInfo.userID) redis.del(reputationKey(videoInfo.userID)).catch((err) => Logger.error(err));
} }
} }
function clearRatingCache(videoInfo: { hashedVideoID: VideoIDHash; service: Service;}): void { function clearRatingCache(videoInfo: { hashedVideoID: VideoIDHash; service: Service;}): void {
if (videoInfo) { if (videoInfo) {
redis.del(ratingHashKey(videoInfo.hashedVideoID, videoInfo.service)); redis.del(ratingHashKey(videoInfo.hashedVideoID, videoInfo.service)).catch((err) => Logger.error(err));
} }
} }