feat: migrate from Redis to SQLite for event handling and notifications

This commit is contained in:
Arunavo Ray
2025-05-20 19:09:48 +05:30
parent c179953649
commit 6d13ff29ca
17 changed files with 470 additions and 202 deletions

View File

@@ -66,6 +66,18 @@ export const users = sqliteTable("users", {
.default(new Date()),
});
// New table for event notifications (replacing Redis pub/sub)
export const events = sqliteTable("events", {
id: text("id").primaryKey(),
userId: text("user_id").notNull().references(() => users.id),
channel: text("channel").notNull(),
payload: text("payload", { mode: "json" }).notNull(),
read: integer("read", { mode: "boolean" }).notNull().default(false),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(new Date()),
});
const githubSchema = configSchema.shape.githubConfig;
const giteaSchema = configSchema.shape.giteaConfig;
const scheduleSchema = configSchema.shape.scheduleConfig;

View File

@@ -140,3 +140,15 @@ export const organizationSchema = z.object({
});
export type Organization = z.infer<typeof organizationSchema>;
// Event schema (for SQLite-based pub/sub)
export const eventSchema = z.object({
id: z.string().uuid().optional(),
userId: z.string().uuid(),
channel: z.string().min(1),
payload: z.any(),
read: z.boolean().default(false),
createdAt: z.date().default(() => new Date()),
});
export type Event = z.infer<typeof eventSchema>;

130
src/lib/events.ts Normal file
View File

@@ -0,0 +1,130 @@
import { v4 as uuidv4 } from "uuid";
import { db, events } from "./db";
import { eq, and, gt } from "drizzle-orm";
/**
* Publishes an event to a specific channel for a user
* This replaces Redis pub/sub with SQLite storage
*/
export async function publishEvent({
userId,
channel,
payload,
}: {
userId: string;
channel: string;
payload: any;
}): Promise<string> {
try {
const eventId = uuidv4();
console.log(`Publishing event to channel ${channel} for user ${userId}`);
// Insert the event into the SQLite database
await db.insert(events).values({
id: eventId,
userId,
channel,
payload: JSON.stringify(payload),
createdAt: new Date(),
});
console.log(`Event published successfully with ID ${eventId}`);
return eventId;
} catch (error) {
console.error("Error publishing event:", error);
throw new Error("Failed to publish event");
}
}
/**
* Gets new events for a specific user and channel
* This replaces Redis subscribe with SQLite polling
*/
export async function getNewEvents({
userId,
channel,
lastEventTime,
}: {
userId: string;
channel: string;
lastEventTime?: Date;
}): Promise<any[]> {
try {
console.log(`Getting new events for user ${userId} in channel ${channel}`);
if (lastEventTime) {
console.log(`Looking for events after ${lastEventTime.toISOString()}`);
}
// Build the query
let query = db
.select()
.from(events)
.where(
and(
eq(events.userId, userId),
eq(events.channel, channel),
eq(events.read, false)
)
)
.orderBy(events.createdAt);
// Add time filter if provided
if (lastEventTime) {
query = query.where(gt(events.createdAt, lastEventTime));
}
// Execute the query
const newEvents = await query;
console.log(`Found ${newEvents.length} new events`);
// Mark events as read
if (newEvents.length > 0) {
console.log(`Marking ${newEvents.length} events as read`);
await db
.update(events)
.set({ read: true })
.where(
and(
eq(events.userId, userId),
eq(events.channel, channel),
eq(events.read, false)
)
);
}
// Parse the payloads
return newEvents.map(event => ({
...event,
payload: JSON.parse(event.payload as string),
}));
} catch (error) {
console.error("Error getting new events:", error);
return [];
}
}
/**
* Cleans up old events to prevent the database from growing too large
* Should be called periodically (e.g., daily via a cron job)
*/
export async function cleanupOldEvents(maxAgeInDays: number = 7): Promise<number> {
try {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - maxAgeInDays);
// Delete events older than the cutoff date
const result = await db
.delete(events)
.where(
and(
eq(events.read, true),
gt(cutoffDate, events.createdAt)
)
);
return result.changes || 0;
} catch (error) {
console.error("Error cleaning up old events:", error);
return 0;
}
}

View File

@@ -1,7 +1,7 @@
import type { RepoStatus } from "@/types/Repository";
import { db, mirrorJobs } from "./db";
import { v4 as uuidv4 } from "uuid";
import { redisPublisher } from "./redis";
import { publishEvent } from "./events";
export async function createMirrorJob({
userId,
@@ -40,10 +40,16 @@ export async function createMirrorJob({
};
try {
// Insert the job into the database
await db.insert(mirrorJobs).values(job);
// Publish the event using SQLite instead of Redis
const channel = `mirror-status:${userId}`;
await redisPublisher.publish(channel, JSON.stringify(job));
await publishEvent({
userId,
channel,
payload: job
});
return jobId;
} catch (error) {

View File

@@ -1,39 +0,0 @@
import { RedisClient } from "bun";
// Connect to Redis using REDIS_URL environment variable or default to redis://redis:6379
// This ensures we have a fallback URL when running with Docker Compose
const redisUrl = process.env.REDIS_URL ?? "redis://localhost:6379";
console.log(`Connecting to Redis at: ${redisUrl}`);
// Configure Redis client with connection options and retry logic
function createClient() {
const client = new RedisClient(redisUrl, {
autoReconnect: true,
connectTimeout: 30000, // Increase timeout to 30 seconds
retryStrategy: (attempt: number) => {
// Exponential backoff with jitter
const delay = Math.min(Math.pow(2, attempt) * 100, 10000);
console.log(`Redis connection attempt ${attempt}, retrying in ${delay}ms`);
return delay;
},
});
// Set up event handlers
client.onconnect = () => console.log("Redis client connected successfully");
client.onclose = (err: Error | null) => {
if (err) {
console.error("Redis connection error:", err);
console.log("Redis will attempt to reconnect automatically");
} else {
console.log("Redis connection closed");
}
};
return client;
}
// Create Redis clients with improved error handling
export const redis = createClient();
export const redisPublisher = createClient();
export const redisSubscriber = createClient();