mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2025-12-14 07:26:44 +03:00
feat: migrate from Redis to SQLite for event handling and notifications
This commit is contained in:
@@ -66,6 +66,18 @@ export const users = sqliteTable("users", {
|
||||
.default(new Date()),
|
||||
});
|
||||
|
||||
// New table for event notifications (replacing Redis pub/sub)
|
||||
export const events = sqliteTable("events", {
|
||||
id: text("id").primaryKey(),
|
||||
userId: text("user_id").notNull().references(() => users.id),
|
||||
channel: text("channel").notNull(),
|
||||
payload: text("payload", { mode: "json" }).notNull(),
|
||||
read: integer("read", { mode: "boolean" }).notNull().default(false),
|
||||
createdAt: integer("created_at", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(new Date()),
|
||||
});
|
||||
|
||||
const githubSchema = configSchema.shape.githubConfig;
|
||||
const giteaSchema = configSchema.shape.giteaConfig;
|
||||
const scheduleSchema = configSchema.shape.scheduleConfig;
|
||||
|
||||
@@ -140,3 +140,15 @@ export const organizationSchema = z.object({
|
||||
});
|
||||
|
||||
export type Organization = z.infer<typeof organizationSchema>;
|
||||
|
||||
// Event schema (for SQLite-based pub/sub)
|
||||
export const eventSchema = z.object({
|
||||
id: z.string().uuid().optional(),
|
||||
userId: z.string().uuid(),
|
||||
channel: z.string().min(1),
|
||||
payload: z.any(),
|
||||
read: z.boolean().default(false),
|
||||
createdAt: z.date().default(() => new Date()),
|
||||
});
|
||||
|
||||
export type Event = z.infer<typeof eventSchema>;
|
||||
|
||||
130
src/lib/events.ts
Normal file
130
src/lib/events.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { db, events } from "./db";
|
||||
import { eq, and, gt } from "drizzle-orm";
|
||||
|
||||
/**
|
||||
* Publishes an event to a specific channel for a user
|
||||
* This replaces Redis pub/sub with SQLite storage
|
||||
*/
|
||||
export async function publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload,
|
||||
}: {
|
||||
userId: string;
|
||||
channel: string;
|
||||
payload: any;
|
||||
}): Promise<string> {
|
||||
try {
|
||||
const eventId = uuidv4();
|
||||
console.log(`Publishing event to channel ${channel} for user ${userId}`);
|
||||
|
||||
// Insert the event into the SQLite database
|
||||
await db.insert(events).values({
|
||||
id: eventId,
|
||||
userId,
|
||||
channel,
|
||||
payload: JSON.stringify(payload),
|
||||
createdAt: new Date(),
|
||||
});
|
||||
|
||||
console.log(`Event published successfully with ID ${eventId}`);
|
||||
return eventId;
|
||||
} catch (error) {
|
||||
console.error("Error publishing event:", error);
|
||||
throw new Error("Failed to publish event");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets new events for a specific user and channel
|
||||
* This replaces Redis subscribe with SQLite polling
|
||||
*/
|
||||
export async function getNewEvents({
|
||||
userId,
|
||||
channel,
|
||||
lastEventTime,
|
||||
}: {
|
||||
userId: string;
|
||||
channel: string;
|
||||
lastEventTime?: Date;
|
||||
}): Promise<any[]> {
|
||||
try {
|
||||
console.log(`Getting new events for user ${userId} in channel ${channel}`);
|
||||
if (lastEventTime) {
|
||||
console.log(`Looking for events after ${lastEventTime.toISOString()}`);
|
||||
}
|
||||
|
||||
// Build the query
|
||||
let query = db
|
||||
.select()
|
||||
.from(events)
|
||||
.where(
|
||||
and(
|
||||
eq(events.userId, userId),
|
||||
eq(events.channel, channel),
|
||||
eq(events.read, false)
|
||||
)
|
||||
)
|
||||
.orderBy(events.createdAt);
|
||||
|
||||
// Add time filter if provided
|
||||
if (lastEventTime) {
|
||||
query = query.where(gt(events.createdAt, lastEventTime));
|
||||
}
|
||||
|
||||
// Execute the query
|
||||
const newEvents = await query;
|
||||
console.log(`Found ${newEvents.length} new events`);
|
||||
|
||||
// Mark events as read
|
||||
if (newEvents.length > 0) {
|
||||
console.log(`Marking ${newEvents.length} events as read`);
|
||||
await db
|
||||
.update(events)
|
||||
.set({ read: true })
|
||||
.where(
|
||||
and(
|
||||
eq(events.userId, userId),
|
||||
eq(events.channel, channel),
|
||||
eq(events.read, false)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Parse the payloads
|
||||
return newEvents.map(event => ({
|
||||
...event,
|
||||
payload: JSON.parse(event.payload as string),
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error("Error getting new events:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up old events to prevent the database from growing too large
|
||||
* Should be called periodically (e.g., daily via a cron job)
|
||||
*/
|
||||
export async function cleanupOldEvents(maxAgeInDays: number = 7): Promise<number> {
|
||||
try {
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - maxAgeInDays);
|
||||
|
||||
// Delete events older than the cutoff date
|
||||
const result = await db
|
||||
.delete(events)
|
||||
.where(
|
||||
and(
|
||||
eq(events.read, true),
|
||||
gt(cutoffDate, events.createdAt)
|
||||
)
|
||||
);
|
||||
|
||||
return result.changes || 0;
|
||||
} catch (error) {
|
||||
console.error("Error cleaning up old events:", error);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { RepoStatus } from "@/types/Repository";
|
||||
import { db, mirrorJobs } from "./db";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { redisPublisher } from "./redis";
|
||||
import { publishEvent } from "./events";
|
||||
|
||||
export async function createMirrorJob({
|
||||
userId,
|
||||
@@ -40,10 +40,16 @@ export async function createMirrorJob({
|
||||
};
|
||||
|
||||
try {
|
||||
// Insert the job into the database
|
||||
await db.insert(mirrorJobs).values(job);
|
||||
|
||||
// Publish the event using SQLite instead of Redis
|
||||
const channel = `mirror-status:${userId}`;
|
||||
await redisPublisher.publish(channel, JSON.stringify(job));
|
||||
await publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload: job
|
||||
});
|
||||
|
||||
return jobId;
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
import { RedisClient } from "bun";
|
||||
|
||||
// Connect to Redis using REDIS_URL environment variable or default to redis://redis:6379
|
||||
// This ensures we have a fallback URL when running with Docker Compose
|
||||
const redisUrl = process.env.REDIS_URL ?? "redis://localhost:6379";
|
||||
|
||||
console.log(`Connecting to Redis at: ${redisUrl}`);
|
||||
|
||||
// Configure Redis client with connection options and retry logic
|
||||
function createClient() {
|
||||
const client = new RedisClient(redisUrl, {
|
||||
autoReconnect: true,
|
||||
connectTimeout: 30000, // Increase timeout to 30 seconds
|
||||
retryStrategy: (attempt: number) => {
|
||||
// Exponential backoff with jitter
|
||||
const delay = Math.min(Math.pow(2, attempt) * 100, 10000);
|
||||
console.log(`Redis connection attempt ${attempt}, retrying in ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
});
|
||||
|
||||
// Set up event handlers
|
||||
client.onconnect = () => console.log("Redis client connected successfully");
|
||||
client.onclose = (err: Error | null) => {
|
||||
if (err) {
|
||||
console.error("Redis connection error:", err);
|
||||
console.log("Redis will attempt to reconnect automatically");
|
||||
} else {
|
||||
console.log("Redis connection closed");
|
||||
}
|
||||
};
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
// Create Redis clients with improved error handling
|
||||
export const redis = createClient();
|
||||
export const redisPublisher = createClient();
|
||||
export const redisSubscriber = createClient();
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { redisSubscriber } from "@/lib/redis";
|
||||
import { getNewEvents } from "@/lib/events";
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
@@ -11,13 +11,13 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
|
||||
const channel = `mirror-status:${userId}`;
|
||||
let isClosed = false;
|
||||
let connectionAttempts = 0;
|
||||
const MAX_ATTEMPTS = 5;
|
||||
const RETRY_DELAY = 1000; // 1 second
|
||||
const POLL_INTERVAL = 2000; // Poll every 2 seconds
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
const encoder = new TextEncoder();
|
||||
let lastEventTime: Date | undefined = undefined;
|
||||
let pollIntervalId: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
// Function to send a message to the client
|
||||
const sendMessage = (message: string) => {
|
||||
@@ -29,98 +29,63 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
}
|
||||
};
|
||||
|
||||
// Function to handle Redis connection and subscription
|
||||
const connectToRedis = () => {
|
||||
// Function to poll for new events
|
||||
const pollForEvents = async () => {
|
||||
if (isClosed) return;
|
||||
|
||||
try {
|
||||
// Set up message handler for Bun's Redis client
|
||||
redisSubscriber.onmessage = (message, channelName) => {
|
||||
if (isClosed || channelName !== channel) return;
|
||||
sendMessage(`data: ${message}\n\n`);
|
||||
};
|
||||
console.log(`Polling for events for user ${userId} in channel ${channel}`);
|
||||
|
||||
// Send initial connection message
|
||||
sendMessage(": connecting to Redis...\n\n");
|
||||
// Get new events from SQLite
|
||||
const events = await getNewEvents({
|
||||
userId,
|
||||
channel,
|
||||
lastEventTime,
|
||||
});
|
||||
|
||||
// Use a try-catch block specifically for the subscribe operation
|
||||
let subscribed = false;
|
||||
try {
|
||||
// Bun's Redis client expects a string for the channel
|
||||
// We need to wrap this in a try-catch because it can throw if Redis is down
|
||||
subscribed = redisSubscriber.subscribe(channel);
|
||||
console.log(`Found ${events.length} new events`);
|
||||
|
||||
if (subscribed) {
|
||||
// If we get here, subscription was successful
|
||||
sendMessage(": connected\n\n");
|
||||
// Send events to client
|
||||
if (events.length > 0) {
|
||||
// Update last event time
|
||||
lastEventTime = events[events.length - 1].createdAt;
|
||||
|
||||
// Reset connection attempts on successful connection
|
||||
connectionAttempts = 0;
|
||||
|
||||
// Send a heartbeat every 30 seconds to keep the connection alive
|
||||
const heartbeatInterval = setInterval(() => {
|
||||
if (!isClosed) {
|
||||
sendMessage(": heartbeat\n\n");
|
||||
} else {
|
||||
clearInterval(heartbeatInterval);
|
||||
}
|
||||
}, 30000);
|
||||
} else {
|
||||
throw new Error("Failed to subscribe to Redis channel");
|
||||
}
|
||||
|
||||
} catch (subscribeErr) {
|
||||
// Handle subscription error
|
||||
console.error("Redis subscribe error:", subscribeErr);
|
||||
|
||||
// Retry connection if we haven't exceeded max attempts
|
||||
if (connectionAttempts < MAX_ATTEMPTS) {
|
||||
connectionAttempts++;
|
||||
const nextRetryDelay = RETRY_DELAY * Math.pow(2, connectionAttempts - 1);
|
||||
console.log(`Retrying Redis connection (attempt ${connectionAttempts}/${MAX_ATTEMPTS}) in ${nextRetryDelay}ms...`);
|
||||
|
||||
// Send retry message to client
|
||||
sendMessage(`: retrying connection (${connectionAttempts}/${MAX_ATTEMPTS}) in ${nextRetryDelay}ms...\n\n`);
|
||||
|
||||
// Wait before retrying
|
||||
setTimeout(connectToRedis, nextRetryDelay);
|
||||
} else {
|
||||
// Max retries exceeded, send error but keep the connection open
|
||||
console.error("Max Redis connection attempts exceeded");
|
||||
sendMessage(`data: {"error": "Redis connection failed after ${MAX_ATTEMPTS} attempts"}\n\n`);
|
||||
|
||||
// Set up a longer retry after max attempts
|
||||
setTimeout(() => {
|
||||
connectionAttempts = 0; // Reset counter for a fresh start
|
||||
sendMessage(": attempting to reconnect after cooling period...\n\n");
|
||||
connectToRedis();
|
||||
}, 30000); // Try again after 30 seconds
|
||||
// Send each event to the client
|
||||
for (const event of events) {
|
||||
console.log(`Sending event: ${JSON.stringify(event.payload)}`);
|
||||
sendMessage(`data: ${JSON.stringify(event.payload)}\n\n`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// This catches any other errors outside the subscribe operation
|
||||
console.error("Redis connection error:", err);
|
||||
sendMessage(`data: {"error": "Redis connection error"}\n\n`);
|
||||
|
||||
// Still attempt to retry
|
||||
if (connectionAttempts < MAX_ATTEMPTS) {
|
||||
connectionAttempts++;
|
||||
setTimeout(connectToRedis, RETRY_DELAY * Math.pow(2, connectionAttempts - 1));
|
||||
}
|
||||
console.error("Error polling for events:", err);
|
||||
sendMessage(`data: {"error": "Error polling for events"}\n\n`);
|
||||
}
|
||||
};
|
||||
|
||||
// Start the initial connection
|
||||
connectToRedis();
|
||||
// Send initial connection message
|
||||
sendMessage(": connected\n\n");
|
||||
|
||||
// Start polling for events
|
||||
pollForEvents();
|
||||
|
||||
// Set up polling interval
|
||||
pollIntervalId = setInterval(pollForEvents, POLL_INTERVAL);
|
||||
|
||||
// Send a heartbeat every 30 seconds to keep the connection alive
|
||||
const heartbeatInterval = setInterval(() => {
|
||||
if (!isClosed) {
|
||||
sendMessage(": heartbeat\n\n");
|
||||
} else {
|
||||
clearInterval(heartbeatInterval);
|
||||
}
|
||||
}, 30000);
|
||||
|
||||
// Handle client disconnection
|
||||
request.signal?.addEventListener("abort", () => {
|
||||
if (!isClosed) {
|
||||
isClosed = true;
|
||||
try {
|
||||
redisSubscriber.unsubscribe(channel);
|
||||
} catch (err) {
|
||||
console.error("Error unsubscribing from Redis:", err);
|
||||
if (pollIntervalId) {
|
||||
clearInterval(pollIntervalId);
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
@@ -128,14 +93,7 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
},
|
||||
cancel() {
|
||||
// Extra safety in case cancel is triggered
|
||||
if (!isClosed) {
|
||||
isClosed = true;
|
||||
try {
|
||||
redisSubscriber.unsubscribe(channel);
|
||||
} catch (err) {
|
||||
console.error("Error unsubscribing from Redis:", err);
|
||||
}
|
||||
}
|
||||
isClosed = true;
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
56
src/pages/api/test-event.ts
Normal file
56
src/pages/api/test-event.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { publishEvent } from "@/lib/events";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const { userId, message, status } = body;
|
||||
|
||||
if (!userId || !message || !status) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing required fields: userId, message, status",
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Create a test event
|
||||
const eventData = {
|
||||
id: uuidv4(),
|
||||
userId,
|
||||
repositoryId: uuidv4(),
|
||||
repositoryName: "test-repo",
|
||||
message,
|
||||
status,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
|
||||
// Publish the event
|
||||
const channel = `mirror-status:${userId}`;
|
||||
await publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload: eventData,
|
||||
});
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Event published successfully",
|
||||
event: eventData,
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error publishing test event:", error);
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Failed to publish event",
|
||||
details: error instanceof Error ? error.message : String(error),
|
||||
}),
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
};
|
||||
Reference in New Issue
Block a user