Merge pull request #9 from arunavo4/bun

Migrate to Bun
This commit is contained in:
ARUNAVO RAY
2025-05-21 11:23:08 +05:30
committed by GitHub
41 changed files with 3161 additions and 8426 deletions

View File

@@ -5,10 +5,10 @@
# Node.js # Node.js
node_modules node_modules
# We don't exclude bun.lock* as it's needed for the build
npm-debug.log npm-debug.log
yarn-debug.log yarn-debug.log
yarn-error.log yarn-error.log
pnpm-debug.log
# Build outputs # Build outputs
dist dist
@@ -62,4 +62,3 @@ logs
# Cache # Cache
.cache .cache
.npm .npm
.pnpm-store

View File

@@ -8,6 +8,7 @@ NODE_ENV=production
HOST=0.0.0.0 HOST=0.0.0.0
PORT=4321 PORT=4321
DATABASE_URL=sqlite://data/gitea-mirror.db DATABASE_URL=sqlite://data/gitea-mirror.db
# Note: Redis is no longer required as SQLite is used for all functionality
# Security # Security
JWT_SECRET=change-this-to-a-secure-random-string-in-production JWT_SECRET=change-this-to-a-secure-random-string-in-production

View File

@@ -24,8 +24,7 @@ This workflow runs on all branches and pull requests. It:
- On push to any branch (except changes to README.md and docs) - On push to any branch (except changes to README.md and docs)
- On pull requests to any branch (except changes to README.md and docs) - On pull requests to any branch (except changes to README.md and docs)
**Key features:** - Uses Bun for dependency installation
- Uses pnpm for faster dependency installation
- Caches dependencies to speed up builds - Caches dependencies to speed up builds
- Uploads build artifacts for 7 days - Uploads build artifacts for 7 days

View File

@@ -21,26 +21,27 @@ jobs:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install pnpm - name: Setup Bun
uses: pnpm/action-setup@v3 uses: oven-sh/setup-bun@v1
with: with:
version: 10 bun-version: '1.2.9'
run_install: false
- name: Setup Node.js - name: Check lockfile and install dependencies
uses: actions/setup-node@v4 run: |
with: # Check if bun.lock exists, if not check for bun.lockb
node-version: 'lts/*' if [ -f "bun.lock" ]; then
cache: 'pnpm' echo "Using existing bun.lock file"
elif [ -f "bun.lockb" ]; then
- name: Install dependencies echo "Found bun.lockb, creating symlink to bun.lock"
run: pnpm install ln -s bun.lockb bun.lock
fi
bun install
- name: Run tests - name: Run tests
run: pnpm test run: bunx vitest run
- name: Build Astro project - name: Build Astro project
run: pnpm build run: bunx astro build
- name: Upload build artifacts - name: Upload build artifacts
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4

View File

@@ -7,14 +7,14 @@ on:
- 'Dockerfile' - 'Dockerfile'
- '.dockerignore' - '.dockerignore'
- 'package.json' - 'package.json'
- 'pnpm-lock.yaml' - 'bun.lock*'
pull_request: pull_request:
branches: [ main ] branches: [ main ]
paths: paths:
- 'Dockerfile' - 'Dockerfile'
- '.dockerignore' - '.dockerignore'
- 'package.json' - 'package.json'
- 'pnpm-lock.yaml' - 'bun.lock*'
schedule: schedule:
- cron: '0 0 * * 0' # Run weekly on Sunday at midnight - cron: '0 0 * * 0' # Run weekly on Sunday at midnight

View File

@@ -1,83 +1,48 @@
# syntax=docker/dockerfile:1.4 # syntax=docker/dockerfile:1.4
FROM node:lts-alpine AS base FROM oven/bun:1.2.9-alpine AS base
ENV PNPM_HOME=/usr/local/bin WORKDIR /app
ENV PATH=$PNPM_HOME:$PATH RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite
RUN apk add --no-cache libc6-compat
# ----------------------------------- # ----------------------------
FROM base AS deps FROM base AS deps
WORKDIR /app COPY package.json ./
RUN apk add --no-cache python3 make g++ gcc COPY bun.lock* ./
RUN bun install --frozen-lockfile
RUN --mount=type=cache,target=/root/.npm \ # ----------------------------
corepack enable && corepack prepare pnpm@latest --activate FROM deps AS builder
COPY package.json pnpm-lock.yaml* ./
# Full dev install
RUN --mount=type=cache,target=/root/.local/share/pnpm/store \
pnpm install --frozen-lockfile
# -----------------------------------
FROM base AS builder
WORKDIR /app
RUN apk add --no-cache python3 make g++ gcc
RUN --mount=type=cache,target=/root/.npm \
corepack enable && corepack prepare pnpm@latest --activate
COPY --from=deps /app/node_modules ./node_modules
COPY . . COPY . .
RUN bun run build
RUN pnpm build
# Compile TypeScript scripts to JavaScript
RUN mkdir -p dist/scripts && \ RUN mkdir -p dist/scripts && \
for script in scripts/*.ts; do \ for script in scripts/*.ts; do \
node_modules/.bin/tsc --outDir dist/scripts --module commonjs --target es2020 --esModuleInterop $script || true; \ bun build "$script" --target=bun --outfile=dist/scripts/$(basename "${script%.ts}.js"); \
done done
# ----------------------------------- # ----------------------------
FROM deps AS pruner FROM deps AS pruner
WORKDIR /app RUN bun install --production --frozen-lockfile
# Prune dev dependencies and just keep the production bits # ----------------------------
RUN --mount=type=cache,target=/root/.local/share/pnpm/store \
pnpm prune --prod
# -----------------------------------
FROM base AS runner FROM base AS runner
WORKDIR /app WORKDIR /app
# Only copy production node_modules and built output
COPY --from=pruner /app/node_modules ./node_modules COPY --from=pruner /app/node_modules ./node_modules
COPY --from=builder /app/dist ./dist COPY --from=builder /app/dist ./dist
COPY --from=builder /app/package.json ./package.json COPY --from=builder /app/package.json ./package.json
COPY --from=builder /app/docker-entrypoint.sh ./docker-entrypoint.sh COPY --from=builder /app/docker-entrypoint.sh ./docker-entrypoint.sh
COPY --from=builder /app/scripts ./scripts COPY --from=builder /app/scripts ./scripts
COPY --from=builder /app/data ./data
ENV NODE_ENV=production ENV NODE_ENV=production
ENV HOST=0.0.0.0 ENV HOST=0.0.0.0
ENV PORT=4321 ENV PORT=4321
ENV DATABASE_URL=file:data/gitea-mirror.db ENV DATABASE_URL=file:data/gitea-mirror.db
# Make entrypoint executable RUN chmod +x ./docker-entrypoint.sh && \
RUN chmod +x /app/docker-entrypoint.sh
ENTRYPOINT ["/app/docker-entrypoint.sh"]
RUN apk add --no-cache wget sqlite && \
mkdir -p /app/data && \ mkdir -p /app/data && \
addgroup --system --gid 1001 nodejs && \ addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 gitea-mirror && \ adduser --system --uid 1001 gitea-mirror && \
chown -R gitea-mirror:nodejs /app/data chown -R gitea-mirror:nodejs /app/data
COPY --from=builder --chown=gitea-mirror:nodejs /app/dist ./dist
COPY --from=pruner --chown=gitea-mirror:nodejs /app/node_modules ./node_modules
COPY --from=builder --chown=gitea-mirror:nodejs /app/package.json ./package.json
COPY --from=builder --chown=gitea-mirror:nodejs /app/scripts ./scripts
USER gitea-mirror USER gitea-mirror
VOLUME /app/data VOLUME /app/data
@@ -86,8 +51,4 @@ EXPOSE 4321
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \ HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/ || exit 1 CMD wget --no-verbose --tries=1 --spider http://localhost:4321/ || exit 1
# Create a startup script that initializes the database before starting the application ENTRYPOINT ["./docker-entrypoint.sh"]
COPY --from=builder --chown=gitea-mirror:nodejs /app/docker-entrypoint.sh ./docker-entrypoint.sh
RUN chmod +x ./docker-entrypoint.sh
CMD ["./docker-entrypoint.sh"]

153
README.md
View File

@@ -18,7 +18,7 @@
```bash ```bash
docker compose --profile production up -d docker compose --profile production up -d
# or # or
pnpm setup && pnpm dev bun run setup && bun run dev
``` ```
<p align="center"> <p align="center">
@@ -63,9 +63,9 @@ Easily configure your GitHub and Gitea connections, set up automatic mirroring s
See the [Quick Start Guide](docs/quickstart.md) for detailed instructions on getting up and running quickly. See the [Quick Start Guide](docs/quickstart.md) for detailed instructions on getting up and running quickly.
### Prerequisites -### Prerequisites
- Node.js 22 or later - Bun 1.2.9 or later
- A GitHub account with a personal access token - A GitHub account with a personal access token
- A Gitea instance with an access token - A Gitea instance with an access token
@@ -92,7 +92,7 @@ Before running the application in production mode for the first time, you need t
```bash ```bash
# Initialize the database for production mode # Initialize the database for production mode
pnpm setup bun run setup
``` ```
This will create the necessary tables. On first launch, you'll be guided through creating your admin account with a secure password. This will create the necessary tables. On first launch, you'll be guided through creating your admin account with a secure password.
@@ -110,7 +110,7 @@ Gitea Mirror provides multi-architecture Docker images that work on both ARM64 (
docker compose --profile production up -d docker compose --profile production up -d
# For development mode (requires configuration) # For development mode (requires configuration)
# Ensure you have run pnpm setup first # Ensure you have run bun run setup first
docker compose -f docker-compose.dev.yml up -d docker compose -f docker-compose.dev.yml up -d
``` ```
@@ -124,19 +124,15 @@ docker compose -f docker-compose.dev.yml up -d
##### Using Pre-built Images from GitHub Container Registry ##### Using Pre-built Images from GitHub Container Registry
If you want to run the container directly without Docker Compose, you'll need to set up a Redis instance separately: If you want to run the container directly without Docker Compose:
```bash ```bash
# First, start a Redis container
docker run -d --name gitea-mirror-redis redis:alpine
# Pull the latest multi-architecture image # Pull the latest multi-architecture image
docker pull ghcr.io/arunavo4/gitea-mirror:latest docker pull ghcr.io/arunavo4/gitea-mirror:latest
# Run the application with a link to the Redis container # Run the application with a volume for persistent data
# Note: The REDIS_URL environment variable is required and must point to the Redis container docker run -d -p 4321:4321 \
docker run -d -p 4321:4321 --link gitea-mirror-redis:redis \ -v gitea-mirror-data:/app/data \
-e REDIS_URL=redis://redis:6379 \
ghcr.io/arunavo4/gitea-mirror:latest ghcr.io/arunavo4/gitea-mirror:latest
``` ```
@@ -206,40 +202,40 @@ git clone https://github.com/arunavo4/gitea-mirror.git
cd gitea-mirror cd gitea-mirror
# Quick setup (installs dependencies and initializes the database) # Quick setup (installs dependencies and initializes the database)
pnpm setup bun run setup
# Development Mode Options # Development Mode Options
# Run in development mode # Run in development mode
pnpm dev bun run dev
# Run in development mode with clean database (removes existing DB first) # Run in development mode with clean database (removes existing DB first)
pnpm dev:clean bun run dev:clean
# Production Mode Options # Production Mode Options
# Build the application # Build the application
pnpm build bun run build
# Preview the production build # Preview the production build
pnpm preview bun run preview
# Start the production server (default) # Start the production server (default)
pnpm start bun run start
# Start the production server with a clean setup # Start the production server with a clean setup
pnpm start:fresh bun run start:fresh
# Database Management # Database Management
# Initialize the database # Initialize the database
pnpm init-db bun run init-db
# Reset users for testing first-time signup # Reset users for testing first-time signup
pnpm reset-users bun run reset-users
# Check database status # Check database status
pnpm check-db bun run check-db
``` ```
### Configuration ### Configuration
@@ -254,7 +250,7 @@ Key configuration options include:
- Scheduling options for automatic mirroring - Scheduling options for automatic mirroring
> [!IMPORTANT] > [!IMPORTANT]
> **Redis is a required component for Gitea Mirror** as it's used for job queuing and caching. > **SQLite is the only database required for Gitea Mirror**, handling both data storage and real-time event notifications.
## 🚀 Development ## 🚀 Development
@@ -262,10 +258,10 @@ Key configuration options include:
```bash ```bash
# Install dependencies # Install dependencies
pnpm setup bun run setup
# Start the development server # Start the development server
pnpm dev bun run dev
``` ```
@@ -359,9 +355,8 @@ docker compose -f docker-compose.dev.yml up -d
## Technologies Used ## Technologies Used
- **Frontend**: Astro, React, Shadcn UI, Tailwind CSS v4 - **Frontend**: Astro, React, Shadcn UI, Tailwind CSS v4
- **Backend**: Node.js - **Backend**: Bun
- **Database**: SQLite (default) or PostgreSQL - **Database**: SQLite (handles both data storage and event notifications)
- **Caching/Queue**: Redis
- **API Integration**: GitHub API (Octokit), Gitea API - **API Integration**: GitHub API (Octokit), Gitea API
## Contributing ## Contributing
@@ -439,62 +434,60 @@ Try the following steps:
> external: true > external: true
> ``` > ```
### Redis Connection Issues ### Database Persistence
> [!CAUTION]
> If the application fails to connect to Redis with errors like `ECONNREFUSED 127.0.0.1:6379`, ensure:
>
> 1. The Redis container is running:
> ```bash
> docker ps | grep redis
> ```
> 2. The `REDIS_URL` environment variable is correctly set to `redis://redis:6379` in your Docker Compose file.
> 3. Both the application and Redis containers are on the same Docker network.
> 4. If running without Docker Compose, ensure you've started a Redis container and linked it properly:
> ```bash
> # Start Redis container
> docker run -d --name gitea-mirror-redis redis:alpine
> # Run application with link to Redis
> docker run -d -p 4321:4321 --link gitea-mirror-redis:redis \
> -e REDIS_URL=redis://redis:6379 \
> ghcr.io/arunavo4/gitea-mirror:latest
> ```
#### Improving Redis Connection Resilience
> [!TIP] > [!TIP]
> For better Redis connection handling, you can modify the `src/lib/redis.ts` file to include retry logic and better error handling: > The application uses SQLite for all data storage and event notifications. Make sure the database file is properly mounted when using Docker:
>
> ```bash
> # Run with a volume for persistent data storage
> docker run -d -p 4321:4321 \
> -v gitea-mirror-data:/app/data \
> ghcr.io/arunavo4/gitea-mirror:latest
> ```
>
> For homelab/self-hosted setups, you can use the provided Docker Compose file with automatic event cleanup:
>
> ```bash
> # Clone the repository
> git clone https://github.com/arunavo4/gitea-mirror.git
> cd gitea-mirror
>
> # Start the application with Docker Compose
> docker-compose -f docker-compose.homelab.yml up -d
> ```
>
> This setup includes a cron job that runs daily to clean up old events and prevent the database from growing too large.
```typescript
import Redis from "ioredis";
// Connect to Redis using REDIS_URL environment variable or default to redis://redis:6379 #### Database Maintenance
const redisUrl = process.env.REDIS_URL ?? 'redis://redis:6379';
console.log(`Connecting to Redis at: ${redisUrl}`); > [!TIP]
> For database maintenance, you can use the provided scripts:
// Configure Redis client with connection options >
const redisOptions = { > ```bash
retryStrategy: (times) => { > # Check database integrity
// Retry with exponential backoff up to 30 seconds > bun run check-db
const delay = Math.min(times * 100, 3000); >
console.log(`Redis connection attempt ${times} failed. Retrying in ${delay}ms...`); > # Fix database issues
return delay; > bun run fix-db
}, >
maxRetriesPerRequest: 5, > # Reset user accounts (for development)
enableReadyCheck: true, > bun run reset-users
connectTimeout: 10000, >
}; > # Clean up old events (keeps last 7 days by default)
> bun run cleanup-events
export const redis = new Redis(redisUrl, redisOptions); >
export const redisPublisher = new Redis(redisUrl, redisOptions); > # Clean up old events with custom retention period (e.g., 30 days)
export const redisSubscriber = new Redis(redisUrl, redisOptions); > bun run cleanup-events 30
> ```
// Log connection events >
redis.on('connect', () => console.log('Redis client connected')); > For automated maintenance, consider setting up a cron job to run the cleanup script periodically:
redis.on('error', (err) => console.error('Redis client error:', err)); >
``` > ```bash
> # Add this to your crontab (runs daily at 2 AM)
> 0 2 * * * cd /path/to/gitea-mirror && bun run cleanup-events
> ```
> [!NOTE] > [!NOTE]

View File

@@ -11,7 +11,12 @@ export default defineConfig({
mode: 'standalone', mode: 'standalone',
}), }),
vite: { vite: {
plugins: [tailwindcss()] plugins: [tailwindcss()],
build: {
rollupOptions: {
external: ['bun']
}
}
}, },
integrations: [react()] integrations: [react()]
}); });

1794
bun.lock Normal file

File diff suppressed because it is too large Load Diff

4
crontab Normal file
View File

@@ -0,0 +1,4 @@
# Run event cleanup daily at 2 AM
0 2 * * * cd /app && bun run cleanup-events 30 >> /app/data/cleanup-events.log 2>&1
# Empty line at the end is required for cron to work properly

View File

@@ -51,7 +51,6 @@ services:
- gitea-mirror-data:/app/data - gitea-mirror-data:/app/data
depends_on: depends_on:
- gitea - gitea
- redis
environment: environment:
- NODE_ENV=development - NODE_ENV=development
- DATABASE_URL=file:data/gitea-mirror.db - DATABASE_URL=file:data/gitea-mirror.db
@@ -75,7 +74,6 @@ services:
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors} - GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public} - GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
- DELAY=${DELAY:-3600} - DELAY=${DELAY:-3600}
- REDIS_URL=redis://redis:6379
healthcheck: healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/"] test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/"]
interval: 30s interval: 30s
@@ -85,16 +83,7 @@ services:
networks: networks:
- gitea-network - gitea-network
redis:
image: redis:7-alpine
container_name: redis
restart: unless-stopped
ports:
- "6379:6379"
volumes:
- redis-data:/data
networks:
- gitea-network
# Define named volumes for data persistence # Define named volumes for data persistence
volumes: volumes:
@@ -102,8 +91,6 @@ volumes:
gitea-config: # Gitea config volume gitea-config: # Gitea config volume
gitea-mirror-data: # Gitea Mirror database volume gitea-mirror-data: # Gitea Mirror database volume
redis-data:
# Define networks # Define networks
networks: networks:
gitea-network: gitea-network:

View File

@@ -0,0 +1,38 @@
version: '3.8'
services:
gitea-mirror:
image: ghcr.io/arunavo4/gitea-mirror:latest
container_name: gitea-mirror
restart: unless-stopped
ports:
- "4321:4321"
volumes:
- gitea-mirror-data:/app/data
# Mount the crontab file
- ./crontab:/etc/cron.d/gitea-mirror-cron
environment:
- NODE_ENV=production
- HOST=0.0.0.0
- PORT=4321
- DATABASE_URL=sqlite://data/gitea-mirror.db
- DELAY=${DELAY:-3600}
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:4321/health"]
interval: 1m
timeout: 10s
retries: 3
start_period: 30s
# Install cron in the container and set up the cron job
command: >
sh -c "
apt-get update && apt-get install -y cron curl &&
chmod 0644 /etc/cron.d/gitea-mirror-cron &&
crontab /etc/cron.d/gitea-mirror-cron &&
service cron start &&
bun dist/server/entry.mjs
"
# Define named volumes for database persistence
volumes:
gitea-mirror-data: # Database volume

View File

@@ -19,8 +19,6 @@ services:
- "4321:4321" - "4321:4321"
volumes: volumes:
- gitea-mirror-data:/app/data - gitea-mirror-data:/app/data
depends_on:
- redis
environment: environment:
- NODE_ENV=production - NODE_ENV=production
- DATABASE_URL=file:data/gitea-mirror.db - DATABASE_URL=file:data/gitea-mirror.db
@@ -44,7 +42,6 @@ services:
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors} - GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public} - GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
- DELAY=${DELAY:-3600} - DELAY=${DELAY:-3600}
- REDIS_URL=redis://redis:6379
healthcheck: healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/"] test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/"]
interval: 30s interval: 30s
@@ -53,16 +50,6 @@ services:
start_period: 15s start_period: 15s
profiles: ["production"] profiles: ["production"]
redis:
image: redis:7-alpine
container_name: redis
restart: unless-stopped
ports:
- "6379:6379"
volumes:
- redis-data:/data
# Define named volumes for database persistence # Define named volumes for database persistence
volumes: volumes:
gitea-mirror-data: # Database volume gitea-mirror-data: # Database volume
redis-data:

View File

@@ -5,19 +5,19 @@ set -e
# Ensure data directory exists # Ensure data directory exists
mkdir -p /app/data mkdir -p /app/data
# If pnpm is available, run setup (for dev images), else run node init directly # If bun is available, run setup (for dev images)
if command -v pnpm >/dev/null 2>&1; then if command -v bun >/dev/null 2>&1; then
echo "Running pnpm setup (if needed)..." echo "Running bun setup (if needed)..."
pnpm setup || true bun run setup || true
fi fi
# Initialize the database if it doesn't exist # Initialize the database if it doesn't exist
if [ ! -f "/app/data/gitea-mirror.db" ]; then if [ ! -f "/app/data/gitea-mirror.db" ]; then
echo "Initializing database..." echo "Initializing database..."
if [ -f "dist/scripts/init-db.js" ]; then if [ -f "dist/scripts/init-db.js" ]; then
node dist/scripts/init-db.js bun dist/scripts/init-db.js
elif [ -f "dist/scripts/manage-db.js" ]; then elif [ -f "dist/scripts/manage-db.js" ]; then
node dist/scripts/manage-db.js init bun dist/scripts/manage-db.js init
else else
echo "Warning: Could not find database initialization scripts in dist/scripts." echo "Warning: Could not find database initialization scripts in dist/scripts."
echo "Creating and initializing database manually..." echo "Creating and initializing database manually..."
@@ -113,15 +113,29 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) FOREIGN KEY (user_id) REFERENCES users(id)
); );
CREATE TABLE IF NOT EXISTS events (
id TEXT PRIMARY KEY,
user_id TEXT NOT NULL,
channel TEXT NOT NULL,
payload TEXT NOT NULL,
read INTEGER NOT NULL DEFAULT 0,
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id)
);
CREATE INDEX IF NOT EXISTS idx_events_user_channel ON events(user_id, channel);
CREATE INDEX IF NOT EXISTS idx_events_created_at ON events(created_at);
CREATE INDEX IF NOT EXISTS idx_events_read ON events(read);
EOF EOF
echo "Database initialized with required tables." echo "Database initialized with required tables."
fi fi
else else
echo "Database already exists, checking for issues..." echo "Database already exists, checking for issues..."
if [ -f "dist/scripts/fix-db-issues.js" ]; then if [ -f "dist/scripts/fix-db-issues.js" ]; then
node dist/scripts/fix-db-issues.js bun dist/scripts/fix-db-issues.js
elif [ -f "dist/scripts/manage-db.js" ]; then elif [ -f "dist/scripts/manage-db.js" ]; then
node dist/scripts/manage-db.js fix bun dist/scripts/manage-db.js fix
fi fi
# Since the application is not used by anyone yet, we've removed the schema updates and migrations # Since the application is not used by anyone yet, we've removed the schema updates and migrations
@@ -130,4 +144,4 @@ fi
# Start the application # Start the application
echo "Starting Gitea Mirror..." echo "Starting Gitea Mirror..."
exec node ./dist/server/entry.mjs exec bun ./dist/server/entry.mjs

View File

@@ -3,85 +3,85 @@
"type": "module", "type": "module",
"version": "1.0.0", "version": "1.0.0",
"engines": { "engines": {
"node": ">=22.0.0" "bun": ">=1.2.9"
}, },
"scripts": { "scripts": {
"setup": "pnpm install && pnpm manage-db init", "setup": "bun install && bun run manage-db init",
"dev": "astro dev", "dev": "bunx --bun astro dev",
"dev:clean": "pnpm cleanup-db && pnpm manage-db init && astro dev", "dev:clean": "bun run cleanup-db && bun run manage-db init && bunx --bun astro dev",
"build": "astro build", "build": "bunx --bun astro build",
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db", "cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
"manage-db": "tsx scripts/manage-db.ts", "manage-db": "bun scripts/manage-db.ts",
"init-db": "tsx scripts/manage-db.ts init", "init-db": "bun scripts/manage-db.ts init",
"check-db": "tsx scripts/manage-db.ts check", "check-db": "bun scripts/manage-db.ts check",
"fix-db": "tsx scripts/manage-db.ts fix", "fix-db": "bun scripts/manage-db.ts fix",
"reset-users": "tsx scripts/manage-db.ts reset-users", "reset-users": "bun scripts/manage-db.ts reset-users",
"preview": "astro preview", "migrate-db": "bun scripts/migrate-db.ts",
"start": "node dist/server/entry.mjs", "cleanup-redis": "bun scripts/cleanup-redis.ts",
"start:fresh": "pnpm cleanup-db && pnpm manage-db init && node dist/server/entry.mjs", "cleanup-events": "bun scripts/cleanup-events.ts",
"test": "vitest run", "preview": "bunx --bun astro preview",
"test:watch": "vitest", "start": "bun dist/server/entry.mjs",
"astro": "astro" "start:fresh": "bun run cleanup-db && bun run manage-db init && bun dist/server/entry.mjs",
"test": "bunx --bun vitest run",
"test:watch": "bunx --bun vitest",
"astro": "bunx --bun astro"
}, },
"dependencies": { "dependencies": {
"@astrojs/mdx": "^4.2.6", "@astrojs/mdx": "^4.2.6",
"@astrojs/node": "^9.2.1", "@astrojs/node": "^9.2.1",
"@astrojs/react": "^4.2.7", "@astrojs/react": "^4.2.7",
"@libsql/client": "^0.15.4",
"@octokit/rest": "^21.1.1", "@octokit/rest": "^21.1.1",
"@radix-ui/react-avatar": "^1.1.4", "@radix-ui/react-avatar": "^1.1.9",
"@radix-ui/react-checkbox": "^1.1.5", "@radix-ui/react-checkbox": "^1.3.1",
"@radix-ui/react-dialog": "^1.1.7", "@radix-ui/react-dialog": "^1.1.13",
"@radix-ui/react-dropdown-menu": "^2.1.7", "@radix-ui/react-dropdown-menu": "^2.1.14",
"@radix-ui/react-label": "^2.1.6", "@radix-ui/react-label": "^2.1.6",
"@radix-ui/react-popover": "^1.1.13", "@radix-ui/react-popover": "^1.1.13",
"@radix-ui/react-radio-group": "^1.3.6", "@radix-ui/react-radio-group": "^1.3.6",
"@radix-ui/react-select": "^2.1.7", "@radix-ui/react-select": "^2.2.4",
"@radix-ui/react-slot": "^1.2.0", "@radix-ui/react-slot": "^1.2.2",
"@radix-ui/react-tabs": "^1.1.4", "@radix-ui/react-tabs": "^1.1.11",
"@radix-ui/react-tooltip": "^1.2.6", "@radix-ui/react-tooltip": "^1.2.6",
"@tailwindcss/vite": "^4.1.3", "@tailwindcss/vite": "^4.1.7",
"@tanstack/react-virtual": "^3.13.8", "@tanstack/react-virtual": "^3.13.8",
"@types/canvas-confetti": "^1.9.0", "@types/canvas-confetti": "^1.9.0",
"@types/react": "^19.1.2", "@types/react": "^19.1.4",
"@types/react-dom": "^19.1.2", "@types/react-dom": "^19.1.5",
"astro": "^5.7.10", "astro": "^5.7.13",
"axios": "^1.8.4", "axios": "^1.9.0",
"bcryptjs": "^3.0.2", "bcryptjs": "^3.0.2",
"canvas-confetti": "^1.9.3", "canvas-confetti": "^1.9.3",
"class-variance-authority": "^0.7.1", "class-variance-authority": "^0.7.1",
"clsx": "^2.1.1", "clsx": "^2.1.1",
"cmdk": "^1.1.1", "cmdk": "^1.1.1",
"drizzle-orm": "^0.41.0", "drizzle-orm": "^0.43.1",
"fuse.js": "^7.1.0", "fuse.js": "^7.1.0",
"ioredis": "^5.6.1",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"lucide-react": "^0.488.0", "lucide-react": "^0.511.0",
"next-themes": "^0.4.6", "next-themes": "^0.4.6",
"react": "^19.1.0", "react": "^19.1.0",
"react-dom": "^19.1.0", "react-dom": "^19.1.0",
"react-icons": "^5.5.0", "react-icons": "^5.5.0",
"sonner": "^2.0.3", "sonner": "^2.0.3",
"superagent": "^10.2.0", "sqlite3": "^5.1.7",
"tailwind-merge": "^3.2.0", "superagent": "^10.2.1",
"tailwindcss": "^4.1.3", "tailwind-merge": "^3.3.0",
"tw-animate-css": "^1.2.5", "tailwindcss": "^4.1.7",
"tw-animate-css": "^1.3.0",
"uuid": "^11.1.0", "uuid": "^11.1.0",
"zod": "^3.24.2" "zod": "^3.25.7"
}, },
"devDependencies": { "devDependencies": {
"@testing-library/jest-dom": "^6.6.3", "@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.3.0", "@testing-library/react": "^16.3.0",
"@types/bcryptjs": "^3.0.0", "@types/bcryptjs": "^3.0.0",
"@types/better-sqlite3": "^7.6.13",
"@types/jsonwebtoken": "^9.0.9", "@types/jsonwebtoken": "^9.0.9",
"@types/superagent": "^8.1.9", "@types/superagent": "^8.1.9",
"@types/uuid": "^10.0.0", "@types/uuid": "^10.0.0",
"@vitejs/plugin-react": "^4.4.0", "@vitejs/plugin-react": "^4.4.1",
"better-sqlite3": "^9.6.0",
"jsdom": "^26.1.0", "jsdom": "^26.1.0",
"tsx": "^4.19.3", "tsx": "^4.19.4",
"vitest": "^3.1.1" "vitest": "^3.1.4"
}, },
"packageManager": "pnpm@10.10.0" "packageManager": "bun@1.2.9"
} }

7713
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -43,7 +43,7 @@ The script uses environment variables from the `.env` file in the project root:
3. Using with docker-compose: 3. Using with docker-compose:
```bash ```bash
# Ensure dependencies are installed and database is initialized # Ensure dependencies are installed and database is initialized
pnpm setup bun run setup
# First build the image # First build the image
./scripts/build-docker.sh --load ./scripts/build-docker.sh --load

View File

@@ -19,38 +19,38 @@ This is a consolidated database management tool that handles all database-relate
You can execute the database management tool using your package manager with various commands: You can execute the database management tool using your package manager with various commands:
```bash ```bash
# Checks database status (default action if no command is specified, equivalent to 'pnpm check-db') # Checks database status (default action if no command is specified, equivalent to 'bun run check-db')
pnpm manage-db bun run manage-db
# Check database status # Check database status
pnpm check-db bun run check-db
# Initialize the database (only if it doesn't exist) # Initialize the database (only if it doesn't exist)
pnpm init-db bun run init-db
# Fix database location issues # Fix database location issues
pnpm fix-db bun run fix-db
# Automatic check, fix, and initialize if needed # Automatic check, fix, and initialize if needed
pnpm db-auto bun run db-auto
# Reset all users (for testing signup flow) # Reset all users (for testing signup flow)
pnpm reset-users bun run reset-users
# Update the database schema to the latest version # Update the database schema to the latest version
pnpm update-schema bun run update-schema
# Remove database files completely # Remove database files completely
pnpm cleanup-db bun run cleanup-db
# Complete setup (install dependencies and initialize database) # Complete setup (install dependencies and initialize database)
pnpm setup bun run setup
# Start development server with a fresh database # Start development server with a fresh database
pnpm dev:clean bun run dev:clean
# Start production server with a fresh database # Start production server with a fresh database
pnpm start:fresh bun run start:fresh
``` ```
## Database File Location ## Database File Location

38
scripts/check-events.ts Normal file
View File

@@ -0,0 +1,38 @@
#!/usr/bin/env bun
/**
* Script to check events in the database
*/
import { Database } from "bun:sqlite";
import path from "path";
import fs from "fs";
// Define the database path
const dataDir = path.join(process.cwd(), "data");
if (!fs.existsSync(dataDir)) {
console.error("Data directory not found:", dataDir);
process.exit(1);
}
const dbPath = path.join(dataDir, "gitea-mirror.db");
if (!fs.existsSync(dbPath)) {
console.error("Database file not found:", dbPath);
process.exit(1);
}
// Open the database
const db = new Database(dbPath);
// Check if the events table exists
const tableExists = db.query("SELECT name FROM sqlite_master WHERE type='table' AND name='events'").get();
if (!tableExists) {
console.error("Events table does not exist");
process.exit(1);
}
// Get all events
const events = db.query("SELECT * FROM events").all();
console.log("Events in the database:");
console.log(JSON.stringify(events, null, 2));

43
scripts/cleanup-events.ts Normal file
View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bun
/**
* Script to clean up old events from the database
* This script should be run periodically (e.g., daily) to prevent the events table from growing too large
*
* Usage:
* bun scripts/cleanup-events.ts [days]
*
* Where [days] is the number of days to keep events (default: 7)
*/
import { cleanupOldEvents } from "../src/lib/events";
// Parse command line arguments
const args = process.argv.slice(2);
const daysToKeep = args.length > 0 ? parseInt(args[0], 10) : 7;
if (isNaN(daysToKeep) || daysToKeep < 1) {
console.error("Error: Days to keep must be a positive number");
process.exit(1);
}
async function runCleanup() {
try {
console.log(`Starting event cleanup (retention: ${daysToKeep} days)...`);
// Call the cleanupOldEvents function from the events module
const result = await cleanupOldEvents(daysToKeep);
console.log(`Cleanup summary:`);
console.log(`- Read events deleted: ${result.readEventsDeleted}`);
console.log(`- Unread events deleted: ${result.unreadEventsDeleted}`);
console.log(`- Total events deleted: ${result.readEventsDeleted + result.unreadEventsDeleted}`);
console.log("Event cleanup completed successfully");
} catch (error) {
console.error("Error running event cleanup:", error);
process.exit(1);
}
}
// Run the cleanup
runCleanup();

33
scripts/cleanup-redis.ts Normal file
View File

@@ -0,0 +1,33 @@
#!/usr/bin/env bun
/**
* Cleanup script to remove Redis-related files and code
* This script should be run when migrating from Redis to SQLite
*/
import fs from "fs";
import path from "path";
// Files to remove
const filesToRemove = [
"src/lib/redis.ts"
];
// Remove files
console.log("Removing Redis-related files...");
for (const file of filesToRemove) {
const filePath = path.join(process.cwd(), file);
if (fs.existsSync(filePath)) {
fs.unlinkSync(filePath);
console.log(`Removed: ${file}`);
} else {
console.log(`File not found: ${file}`);
}
}
console.log("\nRedis cleanup completed successfully");
console.log("\nReminder: You should also remove Redis from your Docker Compose files and environment variables.");
console.log("The following files have been updated to use SQLite instead of Redis:");
console.log("- src/lib/helpers.ts");
console.log("- src/pages/api/sse/index.ts");
console.log("\nNew files created:");
console.log("- src/lib/events.ts");

4
scripts/docker-diagnostics.sh Executable file → Normal file
View File

@@ -105,12 +105,12 @@ echo -e "${BLUE} Recommendations ${NC}"
echo -e "${BLUE}=====================================================${NC}" echo -e "${BLUE}=====================================================${NC}"
echo -e "\n${YELLOW}For local development:${NC}" echo -e "\n${YELLOW}For local development:${NC}"
echo -e "1. ${GREEN}pnpm setup${NC} (initialize database and install dependencies)" echo -e "1. ${GREEN}bun run setup${NC} (initialize database and install dependencies)"
echo -e "2. ${GREEN}./scripts/build-docker.sh --load${NC} (build and load into Docker)" echo -e "2. ${GREEN}./scripts/build-docker.sh --load${NC} (build and load into Docker)"
echo -e "3. ${GREEN}docker-compose -f docker-compose.dev.yml up -d${NC} (start the development container)" echo -e "3. ${GREEN}docker-compose -f docker-compose.dev.yml up -d${NC} (start the development container)"
echo -e "\n${YELLOW}For production deployment (using Docker Compose):${NC}" echo -e "\n${YELLOW}For production deployment (using Docker Compose):${NC}"
echo -e "1. ${GREEN}pnpm setup${NC} (if not already done, to ensure database schema is ready)" echo -e "1. ${GREEN}bun run setup${NC} (if not already done, to ensure database schema is ready)"
echo -e "2. ${GREEN}docker-compose --profile production up -d${NC} (start the production container)" echo -e "2. ${GREEN}docker-compose --profile production up -d${NC} (start the production container)"
echo -e "\n${YELLOW}For CI/CD builds:${NC}" echo -e "\n${YELLOW}For CI/CD builds:${NC}"

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bun
/**
* Script to make events appear older for testing cleanup
*/
import { db, events } from "../src/lib/db";
async function makeEventsOld() {
try {
console.log("Making events appear older...");
// Calculate a timestamp from 2 days ago
const oldDate = new Date();
oldDate.setDate(oldDate.getDate() - 2);
// Update all events to have an older timestamp
const result = await db
.update(events)
.set({ createdAt: oldDate });
console.log(`Updated ${result.changes || 0} events to appear older`);
} catch (error) {
console.error("Error updating event timestamps:", error);
process.exit(1);
}
}
// Run the function
makeEventsOld();

View File

@@ -1,7 +1,6 @@
import fs from "fs"; import fs from "fs";
import path from "path"; import path from "path";
import { client, db } from "../src/lib/db"; import { Database } from "bun:sqlite";
import { configs } from "../src/lib/db";
import { v4 as uuidv4 } from "uuid"; import { v4 as uuidv4 } from "uuid";
// Command line arguments // Command line arguments
@@ -21,61 +20,66 @@ const dataDbFile = path.join(dataDir, "gitea-mirror.db");
const dataDevDbFile = path.join(dataDir, "gitea-mirror-dev.db"); const dataDevDbFile = path.join(dataDir, "gitea-mirror-dev.db");
// Database path - ensure we use absolute path // Database path - ensure we use absolute path
const dbPath = const dbPath = path.join(dataDir, "gitea-mirror.db");
process.env.DATABASE_URL || `file:${path.join(dataDir, "gitea-mirror.db")}`;
/** /**
* Ensure all required tables exist * Ensure all required tables exist
*/ */
async function ensureTablesExist() { async function ensureTablesExist() {
// Create or open the database
const db = new Database(dbPath);
const requiredTables = [ const requiredTables = [
"users", "users",
"configs", "configs",
"repositories", "repositories",
"organizations", "organizations",
"mirror_jobs", "mirror_jobs",
"events",
]; ];
for (const table of requiredTables) { for (const table of requiredTables) {
try { try {
await client.execute(`SELECT 1 FROM ${table} LIMIT 1`); // Check if table exists
} catch (error) { const result = db.query(`SELECT name FROM sqlite_master WHERE type='table' AND name='${table}'`).get();
if (error instanceof Error && error.message.includes("SQLITE_ERROR")) {
if (!result) {
console.warn(`⚠️ Table '${table}' is missing. Creating it now...`); console.warn(`⚠️ Table '${table}' is missing. Creating it now...`);
switch (table) { switch (table) {
case "users": case "users":
await client.execute( db.exec(`
`CREATE TABLE users ( CREATE TABLE users (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
username TEXT NOT NULL, username TEXT NOT NULL,
password TEXT NOT NULL, password TEXT NOT NULL,
email TEXT NOT NULL, email TEXT NOT NULL,
created_at INTEGER NOT NULL, created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL updated_at INTEGER NOT NULL
)` )
); `);
break; break;
case "configs": case "configs":
await client.execute( db.exec(`
`CREATE TABLE configs ( CREATE TABLE configs (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
name TEXT NOT NULL, name TEXT NOT NULL,
is_active INTEGER NOT NULL DEFAULT 1, is_active INTEGER NOT NULL DEFAULT 1,
github_config TEXT NOT NULL, github_config TEXT NOT NULL,
gitea_config TEXT NOT NULL, gitea_config TEXT NOT NULL,
include TEXT NOT NULL DEFAULT '[]', include TEXT NOT NULL DEFAULT '["*"]',
exclude TEXT NOT NULL DEFAULT '[]', exclude TEXT NOT NULL DEFAULT '[]',
schedule_config TEXT NOT NULL, schedule_config TEXT NOT NULL,
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id) FOREIGN KEY (user_id) REFERENCES users(id)
)` )
); `);
break; break;
case "repositories": case "repositories":
await client.execute( db.exec(`
`CREATE TABLE repositories ( CREATE TABLE repositories (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
config_id TEXT NOT NULL, config_id TEXT NOT NULL,
@@ -104,12 +108,12 @@ async function ensureTablesExist() {
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id), FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (config_id) REFERENCES configs(id) FOREIGN KEY (config_id) REFERENCES configs(id)
)` )
); `);
break; break;
case "organizations": case "organizations":
await client.execute( db.exec(`
`CREATE TABLE organizations ( CREATE TABLE organizations (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
config_id TEXT NOT NULL, config_id TEXT NOT NULL,
@@ -125,12 +129,12 @@ async function ensureTablesExist() {
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id), FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (config_id) REFERENCES configs(id) FOREIGN KEY (config_id) REFERENCES configs(id)
)` )
); `);
break; break;
case "mirror_jobs": case "mirror_jobs":
await client.execute( db.exec(`
`CREATE TABLE mirror_jobs ( CREATE TABLE mirror_jobs (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
repository_id TEXT, repository_id TEXT,
@@ -142,17 +146,35 @@ async function ensureTablesExist() {
message TEXT NOT NULL, message TEXT NOT NULL,
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) FOREIGN KEY (user_id) REFERENCES users(id)
)` )
); `);
break;
case "events":
db.exec(`
CREATE TABLE events (
id TEXT PRIMARY KEY,
user_id TEXT NOT NULL,
channel TEXT NOT NULL,
payload TEXT NOT NULL,
read INTEGER NOT NULL DEFAULT 0,
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id)
)
`);
db.exec(`
CREATE INDEX idx_events_user_channel ON events(user_id, channel);
CREATE INDEX idx_events_created_at ON events(created_at);
CREATE INDEX idx_events_read ON events(read);
`);
break; break;
} }
console.log(`✅ Table '${table}' created successfully.`); console.log(`✅ Table '${table}' created successfully.`);
} else { }
} catch (error) {
console.error(`❌ Error checking table '${table}':`, error); console.error(`❌ Error checking table '${table}':`, error);
process.exit(1); process.exit(1);
} }
} }
}
} }
/** /**
@@ -168,7 +190,7 @@ async function checkDatabase() {
); );
console.warn("This file should be in the data directory."); console.warn("This file should be in the data directory.");
console.warn( console.warn(
'Run "pnpm manage-db fix" to fix this issue or "pnpm cleanup-db" to remove it.' 'Run "bun run manage-db fix" to fix this issue or "bun run cleanup-db" to remove it.'
); );
} }
@@ -180,10 +202,11 @@ async function checkDatabase() {
// Check for users // Check for users
try { try {
const userCountResult = await client.execute( const db = new Database(dbPath);
`SELECT COUNT(*) as count FROM users`
); // Check for users
const userCount = userCountResult.rows[0].count; const userCountResult = db.query(`SELECT COUNT(*) as count FROM users`).get();
const userCount = userCountResult?.count || 0;
if (userCount === 0) { if (userCount === 0) {
console.log(" No users found in the database."); console.log(" No users found in the database.");
@@ -197,10 +220,8 @@ async function checkDatabase() {
} }
// Check for configurations // Check for configurations
const configCountResult = await client.execute( const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
`SELECT COUNT(*) as count FROM configs` const configCount = configCountResult?.count || 0;
);
const configCount = configCountResult.rows[0].count;
if (configCount === 0) { if (configCount === 0) {
console.log(" No configurations found in the database."); console.log(" No configurations found in the database.");
@@ -215,12 +236,12 @@ async function checkDatabase() {
} catch (error) { } catch (error) {
console.error("❌ Error connecting to the database:", error); console.error("❌ Error connecting to the database:", error);
console.warn( console.warn(
'The database file might be corrupted. Consider running "pnpm manage-db init" to recreate it.' 'The database file might be corrupted. Consider running "bun run manage-db init" to recreate it.'
); );
} }
} else { } else {
console.warn("⚠️ WARNING: Database file not found in data directory."); console.warn("⚠️ WARNING: Database file not found in data directory.");
console.warn('Run "pnpm manage-db init" to create it.'); console.warn('Run "bun run manage-db init" to create it.');
} }
} }
@@ -235,15 +256,16 @@ async function initializeDatabase() {
if (fs.existsSync(dataDbFile)) { if (fs.existsSync(dataDbFile)) {
console.log("⚠️ Database already exists at data/gitea-mirror.db"); console.log("⚠️ Database already exists at data/gitea-mirror.db");
console.log( console.log(
'If you want to recreate the database, run "pnpm cleanup-db" first.' 'If you want to recreate the database, run "bun run cleanup-db" first.'
); );
console.log( console.log(
'Or use "pnpm manage-db reset-users" to just remove users without recreating tables.' 'Or use "bun run manage-db reset-users" to just remove users without recreating tables.'
); );
// Check if we can connect to it // Check if we can connect to it
try { try {
await client.execute(`SELECT COUNT(*) as count FROM users`); const db = new Database(dbPath);
db.query(`SELECT COUNT(*) as count FROM users`).get();
console.log("✅ Database is valid and accessible."); console.log("✅ Database is valid and accessible.");
return; return;
} catch (error) { } catch (error) {
@@ -257,22 +279,24 @@ async function initializeDatabase() {
console.log(`Initializing database at ${dbPath}...`); console.log(`Initializing database at ${dbPath}...`);
try { try {
const db = new Database(dbPath);
// Create tables if they don't exist // Create tables if they don't exist
await client.execute( db.exec(`
`CREATE TABLE IF NOT EXISTS users ( CREATE TABLE IF NOT EXISTS users (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
username TEXT NOT NULL, username TEXT NOT NULL,
password TEXT NOT NULL, password TEXT NOT NULL,
email TEXT NOT NULL, email TEXT NOT NULL,
created_at INTEGER NOT NULL, created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL updated_at INTEGER NOT NULL
)` )
); `);
// NOTE: We no longer create a default admin user - user will create one via signup page // NOTE: We no longer create a default admin user - user will create one via signup page
await client.execute( db.exec(`
`CREATE TABLE IF NOT EXISTS configs ( CREATE TABLE IF NOT EXISTS configs (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
name TEXT NOT NULL, name TEXT NOT NULL,
@@ -285,12 +309,11 @@ async function initializeDatabase() {
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id) FOREIGN KEY (user_id) REFERENCES users(id)
); )
` `);
);
await client.execute( db.exec(`
`CREATE TABLE IF NOT EXISTS repositories ( CREATE TABLE IF NOT EXISTS repositories (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
config_id TEXT NOT NULL, config_id TEXT NOT NULL,
@@ -301,64 +324,49 @@ async function initializeDatabase() {
owner TEXT NOT NULL, owner TEXT NOT NULL,
organization TEXT, organization TEXT,
mirrored_location TEXT DEFAULT '', mirrored_location TEXT DEFAULT '',
is_private INTEGER NOT NULL DEFAULT 0, is_private INTEGER NOT NULL DEFAULT 0,
is_fork INTEGER NOT NULL DEFAULT 0, is_fork INTEGER NOT NULL DEFAULT 0,
forked_from TEXT, forked_from TEXT,
has_issues INTEGER NOT NULL DEFAULT 0, has_issues INTEGER NOT NULL DEFAULT 0,
is_starred INTEGER NOT NULL DEFAULT 0, is_starred INTEGER NOT NULL DEFAULT 0,
is_archived INTEGER NOT NULL DEFAULT 0, is_archived INTEGER NOT NULL DEFAULT 0,
size INTEGER NOT NULL DEFAULT 0, size INTEGER NOT NULL DEFAULT 0,
has_lfs INTEGER NOT NULL DEFAULT 0, has_lfs INTEGER NOT NULL DEFAULT 0,
has_submodules INTEGER NOT NULL DEFAULT 0, has_submodules INTEGER NOT NULL DEFAULT 0,
default_branch TEXT NOT NULL, default_branch TEXT NOT NULL,
visibility TEXT NOT NULL DEFAULT 'public', visibility TEXT NOT NULL DEFAULT 'public',
status TEXT NOT NULL DEFAULT 'imported', status TEXT NOT NULL DEFAULT 'imported',
last_mirrored INTEGER, last_mirrored INTEGER,
error_message TEXT, error_message TEXT,
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id), FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (config_id) REFERENCES configs(id) FOREIGN KEY (config_id) REFERENCES configs(id)
); )
` `);
);
await client.execute( db.exec(`
`CREATE TABLE IF NOT EXISTS organizations ( CREATE TABLE IF NOT EXISTS organizations (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
config_id TEXT NOT NULL, config_id TEXT NOT NULL,
name TEXT NOT NULL, name TEXT NOT NULL,
avatar_url TEXT NOT NULL, avatar_url TEXT NOT NULL,
membership_role TEXT NOT NULL DEFAULT 'member', membership_role TEXT NOT NULL DEFAULT 'member',
is_included INTEGER NOT NULL DEFAULT 1, is_included INTEGER NOT NULL DEFAULT 1,
status TEXT NOT NULL DEFAULT 'imported', status TEXT NOT NULL DEFAULT 'imported',
last_mirrored INTEGER, last_mirrored INTEGER,
error_message TEXT, error_message TEXT,
repository_count INTEGER NOT NULL DEFAULT 0, repository_count INTEGER NOT NULL DEFAULT 0,
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')), updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id), FOREIGN KEY (user_id) REFERENCES users(id),
FOREIGN KEY (config_id) REFERENCES configs(id) FOREIGN KEY (config_id) REFERENCES configs(id)
); )
` `);
);
await client.execute( db.exec(`
`CREATE TABLE IF NOT EXISTS mirror_jobs ( CREATE TABLE IF NOT EXISTS mirror_jobs (
id TEXT PRIMARY KEY, id TEXT PRIMARY KEY,
user_id TEXT NOT NULL, user_id TEXT NOT NULL,
repository_id TEXT, repository_id TEXT,
@@ -370,22 +378,37 @@ async function initializeDatabase() {
message TEXT NOT NULL, message TEXT NOT NULL,
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) FOREIGN KEY (user_id) REFERENCES users(id)
); )
` `);
);
db.exec(`
CREATE TABLE IF NOT EXISTS events (
id TEXT PRIMARY KEY,
user_id TEXT NOT NULL,
channel TEXT NOT NULL,
payload TEXT NOT NULL,
read INTEGER NOT NULL DEFAULT 0,
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
FOREIGN KEY (user_id) REFERENCES users(id)
)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_events_user_channel ON events(user_id, channel);
CREATE INDEX IF NOT EXISTS idx_events_created_at ON events(created_at);
CREATE INDEX IF NOT EXISTS idx_events_read ON events(read);
`);
// Insert default config if none exists // Insert default config if none exists
const configCountResult = await client.execute( const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
`SELECT COUNT(*) as count FROM configs` const configCount = configCountResult?.count || 0;
);
const configCount = configCountResult.rows[0].count;
if (configCount === 0) { if (configCount === 0) {
// Get the first user // Get the first user
const firstUserResult = await client.execute( const firstUserResult = db.query(`SELECT id FROM users LIMIT 1`).get();
`SELECT id FROM users LIMIT 1`
); if (firstUserResult) {
if (firstUserResult.rows.length > 0) { const userId = firstUserResult.id;
const userId = firstUserResult.rows[0].id;
const configId = uuidv4(); const configId = uuidv4();
const githubConfig = JSON.stringify({ const githubConfig = JSON.stringify({
username: process.env.GITHUB_USERNAME || "", username: process.env.GITHUB_USERNAME || "",
@@ -415,12 +438,12 @@ async function initializeDatabase() {
nextRun: null, nextRun: null,
}); });
await client.execute( const stmt = db.prepare(`
`
INSERT INTO configs (id, user_id, name, is_active, github_config, gitea_config, include, exclude, schedule_config, created_at, updated_at) INSERT INTO configs (id, user_id, name, is_active, github_config, gitea_config, include, exclude, schedule_config, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`, `);
[
stmt.run(
configId, configId,
userId, userId,
"Default Configuration", "Default Configuration",
@@ -431,8 +454,7 @@ async function initializeDatabase() {
exclude, exclude,
scheduleConfig, scheduleConfig,
Date.now(), Date.now(),
Date.now(), Date.now()
]
); );
} }
} }
@@ -452,21 +474,20 @@ async function resetUsers() {
try { try {
// Check if the database exists // Check if the database exists
const dbFilePath = dbPath.replace("file:", ""); const doesDbExist = fs.existsSync(dbPath);
const doesDbExist = fs.existsSync(dbFilePath);
if (!doesDbExist) { if (!doesDbExist) {
console.log( console.log(
"❌ Database file doesn't exist. Run 'pnpm manage-db init' first to create it." "❌ Database file doesn't exist. Run 'bun run manage-db init' first to create it."
); );
return; return;
} }
const db = new Database(dbPath);
// Count existing users // Count existing users
const userCountResult = await client.execute( const userCountResult = db.query(`SELECT COUNT(*) as count FROM users`).get();
`SELECT COUNT(*) as count FROM users` const userCount = userCountResult?.count || 0;
);
const userCount = userCountResult.rows[0].count;
if (userCount === 0) { if (userCount === 0) {
console.log(" No users found in the database. Nothing to reset."); console.log(" No users found in the database. Nothing to reset.");
@@ -474,63 +495,43 @@ async function resetUsers() {
} }
// Delete all users // Delete all users
await client.execute(`DELETE FROM users`); db.exec(`DELETE FROM users`);
console.log(`✅ Deleted ${userCount} users from the database.`); console.log(`✅ Deleted ${userCount} users from the database.`);
// Check dependent configurations that need to be removed // Check dependent configurations that need to be removed
const configCount = await client.execute( const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
`SELECT COUNT(*) as count FROM configs` const configCount = configCountResult?.count || 0;
);
if ( if (configCount > 0) {
configCount.rows && db.exec(`DELETE FROM configs`);
configCount.rows[0] && console.log(`✅ Deleted ${configCount} configurations.`);
Number(configCount.rows[0].count) > 0
) {
await client.execute(`DELETE FROM configs`);
console.log(`✅ Deleted ${configCount.rows[0].count} configurations.`);
} }
// Check for dependent repositories // Check for dependent repositories
const repoCount = await client.execute( const repoCountResult = db.query(`SELECT COUNT(*) as count FROM repositories`).get();
`SELECT COUNT(*) as count FROM repositories` const repoCount = repoCountResult?.count || 0;
);
if ( if (repoCount > 0) {
repoCount.rows && db.exec(`DELETE FROM repositories`);
repoCount.rows[0] && console.log(`✅ Deleted ${repoCount} repositories.`);
Number(repoCount.rows[0].count) > 0
) {
await client.execute(`DELETE FROM repositories`);
console.log(`✅ Deleted ${repoCount.rows[0].count} repositories.`);
} }
// Check for dependent organizations // Check for dependent organizations
const orgCount = await client.execute( const orgCountResult = db.query(`SELECT COUNT(*) as count FROM organizations`).get();
`SELECT COUNT(*) as count FROM organizations` const orgCount = orgCountResult?.count || 0;
);
if ( if (orgCount > 0) {
orgCount.rows && db.exec(`DELETE FROM organizations`);
orgCount.rows[0] && console.log(`✅ Deleted ${orgCount} organizations.`);
Number(orgCount.rows[0].count) > 0
) {
await client.execute(`DELETE FROM organizations`);
console.log(`✅ Deleted ${orgCount.rows[0].count} organizations.`);
} }
// Check for dependent mirror jobs // Check for dependent mirror jobs
const jobCount = await client.execute( const jobCountResult = db.query(`SELECT COUNT(*) as count FROM mirror_jobs`).get();
`SELECT COUNT(*) as count FROM mirror_jobs` const jobCount = jobCountResult?.count || 0;
);
if ( if (jobCount > 0) {
jobCount.rows && db.exec(`DELETE FROM mirror_jobs`);
jobCount.rows[0] && console.log(`✅ Deleted ${jobCount} mirror jobs.`);
Number(jobCount.rows[0].count) > 0
) {
await client.execute(`DELETE FROM mirror_jobs`);
console.log(`✅ Deleted ${jobCount.rows[0].count} mirror jobs.`);
} }
console.log( console.log(
@@ -629,19 +630,20 @@ async function fixDatabaseIssues() {
console.warn( console.warn(
"⚠️ WARNING: Production database file not found in data directory." "⚠️ WARNING: Production database file not found in data directory."
); );
console.warn('Run "pnpm manage-db init" to create it.'); console.warn('Run "bun run manage-db init" to create it.');
} else { } else {
console.log("✅ Production database file found in data directory."); console.log("✅ Production database file found in data directory.");
// Check if we can connect to the database // Check if we can connect to the database
try { try {
// Try to query the database // Try to query the database
await db.select().from(configs).limit(1); const db = new Database(dbPath);
db.query(`SELECT 1 FROM sqlite_master LIMIT 1`).get();
console.log(`✅ Successfully connected to the database.`); console.log(`✅ Successfully connected to the database.`);
} catch (error) { } catch (error) {
console.error("❌ Error connecting to the database:", error); console.error("❌ Error connecting to the database:", error);
console.warn( console.warn(
'The database file might be corrupted. Consider running "pnpm manage-db init" to recreate it.' 'The database file might be corrupted. Consider running "bun run manage-db init" to recreate it.'
); );
} }
} }
@@ -692,7 +694,7 @@ Available commands:
reset-users - Remove all users and their data reset-users - Remove all users and their data
auto - Automatic mode: check, fix, and initialize if needed auto - Automatic mode: check, fix, and initialize if needed
Usage: pnpm manage-db [command] Usage: bun run manage-db [command]
`); `);
} }
} }

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env bun
/**
* Script to mark all events as read
*/
import { db, events } from "../src/lib/db";
import { eq } from "drizzle-orm";
async function markEventsAsRead() {
try {
console.log("Marking all events as read...");
// Update all events to mark them as read
const result = await db
.update(events)
.set({ read: true })
.where(eq(events.read, false));
console.log(`Marked ${result.changes || 0} events as read`);
} catch (error) {
console.error("Error marking events as read:", error);
process.exit(1);
}
}
// Run the function
markEventsAsRead();

53
scripts/migrate-db.ts Normal file
View File

@@ -0,0 +1,53 @@
#!/usr/bin/env bun
/**
* Database migration script to add the events table
* This script should be run when upgrading from a version that used Redis
*/
import { Database } from "bun:sqlite";
import fs from "fs";
import path from "path";
// Define the database path
const dataDir = path.join(process.cwd(), "data");
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true });
}
const dbPath = path.join(dataDir, "gitea-mirror.db");
if (!fs.existsSync(dbPath)) {
console.error("Database file not found:", dbPath);
process.exit(1);
}
// Open the database
const db = new Database(dbPath);
// Check if the events table already exists
const tableExists = db.query("SELECT name FROM sqlite_master WHERE type='table' AND name='events'").get();
if (tableExists) {
console.log("Events table already exists, skipping migration");
process.exit(0);
}
// Create the events table
console.log("Creating events table...");
db.exec(`
CREATE TABLE events (
id TEXT PRIMARY KEY,
user_id TEXT NOT NULL,
channel TEXT NOT NULL,
payload TEXT NOT NULL,
read INTEGER NOT NULL DEFAULT 0,
created_at INTEGER NOT NULL DEFAULT (unixepoch()),
FOREIGN KEY (user_id) REFERENCES users(id)
);
-- Create indexes for efficient querying
CREATE INDEX idx_events_user_channel ON events(user_id, channel);
CREATE INDEX idx_events_created_at ON events(created_at);
CREATE INDEX idx_events_read ON events(read);
`);
console.log("Migration completed successfully");

View File

@@ -1,14 +1,14 @@
import { useEffect, useState } from "react"; import { useEffect, useState } from 'react';
import { import {
Card, Card,
CardContent, CardContent,
CardDescription, CardDescription,
CardHeader, CardHeader,
CardTitle, CardTitle,
} from "@/components/ui/card"; } from '@/components/ui/card';
import { GitHubConfigForm } from "./GitHubConfigForm"; import { GitHubConfigForm } from './GitHubConfigForm';
import { GiteaConfigForm } from "./GiteaConfigForm"; import { GiteaConfigForm } from './GiteaConfigForm';
import { ScheduleConfigForm } from "./ScheduleConfigForm"; import { ScheduleConfigForm } from './ScheduleConfigForm';
import type { import type {
ConfigApiResponse, ConfigApiResponse,
GiteaConfig, GiteaConfig,
@@ -16,12 +16,13 @@ import type {
SaveConfigApiRequest, SaveConfigApiRequest,
SaveConfigApiResponse, SaveConfigApiResponse,
ScheduleConfig, ScheduleConfig,
} from "@/types/config"; } from '@/types/config';
import { Button } from "../ui/button"; import { Button } from '../ui/button';
import { useAuth } from "@/hooks/useAuth"; import { useAuth } from '@/hooks/useAuth';
import { apiRequest } from "@/lib/utils"; import { apiRequest } from '@/lib/utils';
import { Copy, CopyCheck, RefreshCw } from "lucide-react"; import { Copy, CopyCheck, RefreshCw } from 'lucide-react';
import { toast } from "sonner"; import { toast } from 'sonner';
import { Skeleton } from '@/components/ui/skeleton';
type ConfigState = { type ConfigState = {
githubConfig: GitHubConfig; githubConfig: GitHubConfig;
@@ -32,8 +33,8 @@ type ConfigState = {
export function ConfigTabs() { export function ConfigTabs() {
const [config, setConfig] = useState<ConfigState>({ const [config, setConfig] = useState<ConfigState>({
githubConfig: { githubConfig: {
username: "", username: '',
token: "", token: '',
skipForks: false, skipForks: false,
privateRepositories: false, privateRepositories: false,
mirrorIssues: false, mirrorIssues: false,
@@ -41,16 +42,14 @@ export function ConfigTabs() {
preserveOrgStructure: false, preserveOrgStructure: false,
skipStarredIssues: false, skipStarredIssues: false,
}, },
giteaConfig: { giteaConfig: {
url: "", url: '',
username: "", username: '',
token: "", token: '',
organization: "github-mirrors", organization: 'github-mirrors',
visibility: "public", visibility: 'public',
starredReposOrg: "github", starredReposOrg: 'github',
}, },
scheduleConfig: { scheduleConfig: {
enabled: false, enabled: false,
interval: 3600, interval: 3600,
@@ -58,27 +57,21 @@ export function ConfigTabs() {
}); });
const { user, refreshUser } = useAuth(); const { user, refreshUser } = useAuth();
const [isLoading, setIsLoading] = useState(true); const [isLoading, setIsLoading] = useState(true);
const [dockerCode, setDockerCode] = useState<string>(""); const [dockerCode, setDockerCode] = useState<string>('');
const [isCopied, setIsCopied] = useState<boolean>(false); const [isCopied, setIsCopied] = useState<boolean>(false);
const [isSyncing, setIsSyncing] = useState<boolean>(false); const [isSyncing, setIsSyncing] = useState<boolean>(false);
const [isConfigSaved, setIsConfigSaved] = useState<boolean>(false); const [isConfigSaved, setIsConfigSaved] = useState<boolean>(false);
// Check if all required fields are filled to enable the Save Configuration button
const isConfigFormValid = (): boolean => { const isConfigFormValid = (): boolean => {
const { githubConfig, giteaConfig } = config; const { githubConfig, giteaConfig } = config;
// Check GitHub required fields
const isGitHubValid = !!( const isGitHubValid = !!(
githubConfig.username?.trim() && githubConfig.token?.trim() githubConfig.username.trim() && githubConfig.token.trim()
); );
// Check Gitea required fields
const isGiteaValid = !!( const isGiteaValid = !!(
giteaConfig.url?.trim() && giteaConfig.url.trim() &&
giteaConfig.username?.trim() && giteaConfig.username.trim() &&
giteaConfig.token?.trim() giteaConfig.token.trim()
); );
return isGitHubValid && isGiteaValid; return isGitHubValid && isGiteaValid;
}; };
@@ -86,11 +79,12 @@ export function ConfigTabs() {
const updateLastAndNextRun = () => { const updateLastAndNextRun = () => {
const lastRun = config.scheduleConfig.lastRun const lastRun = config.scheduleConfig.lastRun
? new Date(config.scheduleConfig.lastRun) ? new Date(config.scheduleConfig.lastRun)
: new Date(); // fallback to now if lastRun is null : new Date();
const intervalInSeconds = config.scheduleConfig.interval; const intervalInSeconds = config.scheduleConfig.interval;
const nextRun = new Date(lastRun.getTime() + intervalInSeconds * 1000); const nextRun = new Date(
lastRun.getTime() + intervalInSeconds * 1000,
setConfig((prev) => ({ );
setConfig(prev => ({
...prev, ...prev,
scheduleConfig: { scheduleConfig: {
...prev.scheduleConfig, ...prev.scheduleConfig,
@@ -99,37 +93,31 @@ export function ConfigTabs() {
}, },
})); }));
}; };
updateLastAndNextRun(); updateLastAndNextRun();
}, [config.scheduleConfig.interval]); }, [config.scheduleConfig.interval]);
const handleImportGitHubData = async () => { const handleImportGitHubData = async () => {
try {
if (!user?.id) return; if (!user?.id) return;
setIsSyncing(true); setIsSyncing(true);
try {
const result = await apiRequest<{ success: boolean; message?: string }>( const result = await apiRequest<{ success: boolean; message?: string }>(
`/sync?userId=${user.id}`, `/sync?userId=${user.id}`,
{ { method: 'POST' },
method: "POST",
}
); );
result.success
if (result.success) { ? toast.success(
toast.success( 'GitHub data imported successfully! Head to the Dashboard to start mirroring repositories.',
"GitHub data imported successfully! Head to the Dashboard to start mirroring repositories." )
: toast.error(
`Failed to import GitHub data: ${
result.message || 'Unknown error'
}`,
); );
} else {
toast.error(
`Failed to import GitHub data: ${result.message || "Unknown error"}`
);
}
} catch (error) { } catch (error) {
toast.error( toast.error(
`Error importing GitHub data: ${ `Error importing GitHub data: ${
error instanceof Error ? error.message : String(error) error instanceof Error ? error.message : String(error)
}` }`,
); );
} finally { } finally {
setIsSyncing(false); setIsSyncing(false);
@@ -137,94 +125,76 @@ export function ConfigTabs() {
}; };
const handleSaveConfig = async () => { const handleSaveConfig = async () => {
try { if (!user?.id) return;
if (!user || !user.id) { const reqPayload: SaveConfigApiRequest = {
return;
}
const reqPyload: SaveConfigApiRequest = {
userId: user.id, userId: user.id,
githubConfig: config.githubConfig, githubConfig: config.githubConfig,
giteaConfig: config.giteaConfig, giteaConfig: config.giteaConfig,
scheduleConfig: config.scheduleConfig, scheduleConfig: config.scheduleConfig,
}; };
const response = await fetch("/api/config", { try {
method: "POST", const response = await fetch('/api/config', {
headers: { method: 'POST',
"Content-Type": "application/json", headers: { 'Content-Type': 'application/json' },
}, body: JSON.stringify(reqPayload),
body: JSON.stringify(reqPyload),
}); });
const result: SaveConfigApiResponse = await response.json(); const result: SaveConfigApiResponse = await response.json();
if (result.success) { if (result.success) {
await refreshUser(); await refreshUser();
setIsConfigSaved(true); setIsConfigSaved(true);
toast.success( toast.success(
"Configuration saved successfully! Now import your GitHub data to begin." 'Configuration saved successfully! Now import your GitHub data to begin.',
); );
} else { } else {
toast.error( toast.error(
`Failed to save configuration: ${result.message || "Unknown error"}` `Failed to save configuration: ${result.message || 'Unknown error'}`,
); );
} }
} catch (error) { } catch (error) {
toast.error( toast.error(
`An error occurred while saving the configuration: ${ `An error occurred while saving the configuration: ${
error instanceof Error ? error.message : String(error) error instanceof Error ? error.message : String(error)
}` }`,
); );
} }
}; };
useEffect(() => { useEffect(() => {
if (!user) return;
const fetchConfig = async () => { const fetchConfig = async () => {
try {
if (!user) {
return;
}
setIsLoading(true); setIsLoading(true);
try {
const response = await apiRequest<ConfigApiResponse>( const response = await apiRequest<ConfigApiResponse>(
`/config?userId=${user.id}`, `/config?userId=${user.id}`,
{ { method: 'GET' },
method: "GET",
}
); );
// Check if we have a valid config response
if (response && !response.error) { if (response && !response.error) {
setConfig({ setConfig({
githubConfig: response.githubConfig || config.githubConfig, githubConfig:
giteaConfig: response.giteaConfig || config.giteaConfig, response.githubConfig || config.githubConfig,
scheduleConfig: response.scheduleConfig || config.scheduleConfig, giteaConfig:
response.giteaConfig || config.giteaConfig,
scheduleConfig:
response.scheduleConfig || config.scheduleConfig,
}); });
if (response.id) setIsConfigSaved(true);
// If we got a valid config from the server, it means it was previously saved
if (response.id) {
setIsConfigSaved(true);
} }
}
// If there's an error, we'll just use the default config defined in state
setIsLoading(false);
} catch (error) { } catch (error) {
// Don't show error for first-time users, just use the default config console.warn(
console.warn("Could not fetch configuration, using defaults:", error); 'Could not fetch configuration, using defaults:',
} finally { error,
setIsLoading(false); );
} }
setIsLoading(false);
}; };
fetchConfig(); fetchConfig();
}, [user]); }, [user]);
useEffect(() => { useEffect(() => {
const generateDockerCode = () => { const generateDockerCode = () => `
return `services: services:
gitea-mirror: gitea-mirror:
image: arunavo4/gitea-mirror:latest image: arunavo4/gitea-mirror:latest
restart: unless-stopped restart: unless-stopped
@@ -243,27 +213,93 @@ export function ConfigTabs() {
- GITEA_ORGANIZATION=${config.giteaConfig.organization} - GITEA_ORGANIZATION=${config.giteaConfig.organization}
- GITEA_ORG_VISIBILITY=${config.giteaConfig.visibility} - GITEA_ORG_VISIBILITY=${config.giteaConfig.visibility}
- DELAY=${config.scheduleConfig.interval}`; - DELAY=${config.scheduleConfig.interval}`;
}; setDockerCode(generateDockerCode());
const code = generateDockerCode();
setDockerCode(code);
}, [config]); }, [config]);
const handleCopyToClipboard = (text: string) => { const handleCopyToClipboard = (text: string) => {
navigator.clipboard.writeText(text).then( navigator.clipboard.writeText(text).then(
() => { () => {
setIsCopied(true); setIsCopied(true);
toast.success("Docker configuration copied to clipboard!"); toast.success('Docker configuration copied to clipboard!');
setTimeout(() => setIsCopied(false), 2000); setTimeout(() => setIsCopied(false), 2000);
}, },
(err) => { () => toast.error('Could not copy text to clipboard.'),
toast.error("Could not copy text to clipboard.");
}
); );
}; };
function ConfigCardSkeleton() {
return (
<Card>
<CardHeader className="flex-row justify-between">
<div className="flex flex-col gap-y-1.5 m-0">
<Skeleton className="h-6 w-48" />
<Skeleton className="h-4 w-72" />
</div>
<div className="flex gap-x-4">
<Skeleton className="h-10 w-36" />
<Skeleton className="h-10 w-36" />
</div>
</CardHeader>
<CardContent>
<div className="flex flex-col gap-y-4">
<div className="flex gap-x-4">
<div className="w-1/2 border rounded-lg p-4">
<div className="flex justify-between items-center mb-4">
<Skeleton className="h-6 w-40" />
<Skeleton className="h-9 w-32" />
</div>
<div className="space-y-4">
<Skeleton className="h-20 w-full" />
<Skeleton className="h-20 w-full" />
<Skeleton className="h-32 w-full" />
</div>
</div>
<div className="w-1/2 border rounded-lg p-4">
<div className="flex justify-between items-center mb-4">
<Skeleton className="h-6 w-40" />
<Skeleton className="h-9 w-32" />
</div>
<div className="space-y-4">
<Skeleton className="h-20 w-full" />
<Skeleton className="h-20 w-full" />
<Skeleton className="h-20 w-full" />
<Skeleton className="h-20 w-full" />
</div>
</div>
</div>
<div className="border rounded-lg p-4">
<div className="space-y-4">
<Skeleton className="h-8 w-48" />
<Skeleton className="h-16 w-full" />
<Skeleton className="h-8 w-32" />
</div>
</div>
</div>
</CardContent>
</Card>
);
}
function DockerConfigSkeleton() {
return (
<Card>
<CardHeader>
<Skeleton className="h-6 w-40" />
<Skeleton className="h-4 w-64" />
</CardHeader>
<CardContent className="relative">
<Skeleton className="h-8 w-8 absolute top-4 right-10 rounded-md" />
<Skeleton className="h-48 w-full rounded-md" />
</CardContent>
</Card>
);
}
return isLoading ? ( return isLoading ? (
<div>loading...</div> <div className="flex flex-col gap-y-6">
<ConfigCardSkeleton />
<DockerConfigSkeleton />
</div>
) : ( ) : (
<div className="flex flex-col gap-y-6"> <div className="flex flex-col gap-y-6">
<Card> <Card>
@@ -275,17 +311,16 @@ export function ConfigTabs() {
mirroring. mirroring.
</CardDescription> </CardDescription>
</div> </div>
<div className="flex gap-x-4"> <div className="flex gap-x-4">
<Button <Button
onClick={handleImportGitHubData} onClick={handleImportGitHubData}
disabled={isSyncing || !isConfigSaved} disabled={isSyncing || !isConfigSaved}
title={ title={
!isConfigSaved !isConfigSaved
? "Save configuration first" ? 'Save configuration first'
: isSyncing : isSyncing
? "Import in progress" ? 'Import in progress'
: "Import GitHub Data" : 'Import GitHub Data'
} }
> >
{isSyncing ? ( {isSyncing ? (
@@ -305,66 +340,57 @@ export function ConfigTabs() {
disabled={!isConfigFormValid()} disabled={!isConfigFormValid()}
title={ title={
!isConfigFormValid() !isConfigFormValid()
? "Please fill all required fields" ? 'Please fill all required fields'
: "Save Configuration" : 'Save Configuration'
} }
> >
Save Configuration Save Configuration
</Button> </Button>
</div> </div>
</CardHeader> </CardHeader>
<CardContent> <CardContent>
<div className="flex flex-col gap-y-4"> <div className="flex flex-col gap-y-4">
<div className="flex gap-x-4"> <div className="flex gap-x-4">
<GitHubConfigForm <GitHubConfigForm
config={config.githubConfig} config={config.githubConfig}
setConfig={(update) => setConfig={update =>
setConfig((prev) => ({ setConfig(prev => ({
...prev, ...prev,
githubConfig: githubConfig:
typeof update === "function" typeof update === 'function'
? update(prev.githubConfig) ? update(prev.githubConfig)
: update, : update,
})) }))
} }
/> />
<GiteaConfigForm <GiteaConfigForm
config={config?.giteaConfig ?? ({} as GiteaConfig)} config={config.giteaConfig}
setConfig={(update) => setConfig={update =>
setConfig((prev) => ({ setConfig(prev => ({
...prev, ...prev,
giteaConfig: giteaConfig:
typeof update === "function" typeof update === 'function'
? update(prev.giteaConfig) ? update(prev.giteaConfig)
: update, : update,
githubConfig: prev?.githubConfig ?? ({} as GitHubConfig),
scheduleConfig:
prev?.scheduleConfig ?? ({} as ScheduleConfig),
})) }))
} }
/> />
</div> </div>
<ScheduleConfigForm <ScheduleConfigForm
config={config?.scheduleConfig ?? ({} as ScheduleConfig)} config={config.scheduleConfig}
setConfig={(update) => setConfig={update =>
setConfig((prev) => ({ setConfig(prev => ({
...prev, ...prev,
scheduleConfig: scheduleConfig:
typeof update === "function" typeof update === 'function'
? update(prev.scheduleConfig) ? update(prev.scheduleConfig)
: update, : update,
githubConfig: prev?.githubConfig ?? ({} as GitHubConfig),
giteaConfig: prev?.giteaConfig ?? ({} as GiteaConfig),
})) }))
} }
/> />
</div> </div>
</CardContent> </CardContent>
</Card> </Card>
<Card> <Card>
<CardHeader> <CardHeader>
<CardTitle>Docker Configuration</CardTitle> <CardTitle>Docker Configuration</CardTitle>
@@ -372,7 +398,6 @@ export function ConfigTabs() {
Equivalent Docker configuration for your current settings. Equivalent Docker configuration for your current settings.
</CardDescription> </CardDescription>
</CardHeader> </CardHeader>
<CardContent className="relative"> <CardContent className="relative">
<Button <Button
variant="outline" variant="outline"
@@ -386,7 +411,6 @@ export function ConfigTabs() {
<Copy className="text-muted-foreground" /> <Copy className="text-muted-foreground" />
)} )}
</Button> </Button>
<pre className="bg-muted p-4 rounded-md overflow-auto text-sm"> <pre className="bg-muted p-4 rounded-md overflow-auto text-sm">
{dockerCode} {dockerCode}
</pre> </pre>

View File

@@ -9,6 +9,8 @@ import { apiRequest } from "@/lib/utils";
import type { DashboardApiResponse } from "@/types/dashboard"; import type { DashboardApiResponse } from "@/types/dashboard";
import { useSSE } from "@/hooks/useSEE"; import { useSSE } from "@/hooks/useSEE";
import { toast } from "sonner"; import { toast } from "sonner";
import { Skeleton } from "@/components/ui/skeleton";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
export function Dashboard() { export function Dashboard() {
const { user } = useAuth(); const { user } = useAuth();
@@ -59,8 +61,6 @@ export function Dashboard() {
return; return;
} }
setIsLoading(false);
const response = await apiRequest<DashboardApiResponse>( const response = await apiRequest<DashboardApiResponse>(
`/dashboard?userId=${user.id}`, `/dashboard?userId=${user.id}`,
{ {
@@ -93,8 +93,61 @@ export function Dashboard() {
fetchDashboardData(); fetchDashboardData();
}, [user]); }, [user]);
// Status Card Skeleton component
function StatusCardSkeleton() {
return (
<Card className="overflow-hidden">
<CardHeader className="flex flex-row items-center justify-between pb-2 space-y-0">
<CardTitle className="text-sm font-medium">
<Skeleton className="h-4 w-24" />
</CardTitle>
<Skeleton className="h-4 w-4 rounded-full" />
</CardHeader>
<CardContent>
<Skeleton className="h-8 w-16 mb-1" />
<Skeleton className="h-3 w-32" />
</CardContent>
</Card>
);
}
return isLoading || !connected ? ( return isLoading || !connected ? (
<div>loading...</div> <div className="flex flex-col gap-y-6">
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
<StatusCardSkeleton />
<StatusCardSkeleton />
<StatusCardSkeleton />
<StatusCardSkeleton />
</div>
<div className="flex gap-x-6 items-start">
{/* Repository List Skeleton */}
<div className="w-1/2 border rounded-lg p-4">
<div className="flex justify-between items-center mb-4">
<Skeleton className="h-6 w-32" />
<Skeleton className="h-9 w-24" />
</div>
<div className="space-y-3">
{Array.from({ length: 3 }).map((_, i) => (
<Skeleton key={i} className="h-16 w-full" />
))}
</div>
</div>
{/* Recent Activity Skeleton */}
<div className="w-1/2 border rounded-lg p-4">
<div className="flex justify-between items-center mb-4">
<Skeleton className="h-6 w-32" />
<Skeleton className="h-9 w-24" />
</div>
<div className="space-y-3">
{Array.from({ length: 3 }).map((_, i) => (
<Skeleton key={i} className="h-16 w-full" />
))}
</div>
</div>
</div>
</div>
) : ( ) : (
<div className="flex flex-col gap-y-6"> <div className="flex flex-col gap-y-6">
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6"> <div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">

View File

@@ -4,9 +4,10 @@ import { SiGitea } from "react-icons/si";
import { ModeToggle } from "@/components/theme/ModeToggle"; import { ModeToggle } from "@/components/theme/ModeToggle";
import { Avatar, AvatarFallback, AvatarImage } from "../ui/avatar"; import { Avatar, AvatarFallback, AvatarImage } from "../ui/avatar";
import { toast } from "sonner"; import { toast } from "sonner";
import { Skeleton } from "@/components/ui/skeleton";
export function Header() { export function Header() {
const { user, logout } = useAuth(); const { user, logout, isLoading } = useAuth();
const handleLogout = async () => { const handleLogout = async () => {
toast.success("Logged out successfully"); toast.success("Logged out successfully");
@@ -15,6 +16,16 @@ export function Header() {
logout(); logout();
}; };
// Auth buttons skeleton loader
function AuthButtonsSkeleton() {
return (
<>
<Skeleton className="h-10 w-10 rounded-full" /> {/* Avatar placeholder */}
<Skeleton className="h-10 w-24" /> {/* Button placeholder */}
</>
);
}
return ( return (
<header className="border-b bg-background"> <header className="border-b bg-background">
<div className="flex h-[4.5rem] items-center justify-between px-6"> <div className="flex h-[4.5rem] items-center justify-between px-6">
@@ -25,7 +36,10 @@ export function Header() {
<div className="flex items-center gap-4"> <div className="flex items-center gap-4">
<ModeToggle /> <ModeToggle />
{user ? (
{isLoading ? (
<AuthButtonsSkeleton />
) : user ? (
<> <>
<Avatar> <Avatar>
<AvatarImage src="" alt="@shadcn" /> <AvatarImage src="" alt="@shadcn" />

View File

@@ -22,7 +22,7 @@ The application is built using:
- <span class="font-semibold text-foreground">React</span>: Component library for interactive UI elements - <span class="font-semibold text-foreground">React</span>: Component library for interactive UI elements
- <span class="font-semibold text-foreground">Shadcn UI</span>: UI component library built on Tailwind CSS - <span class="font-semibold text-foreground">Shadcn UI</span>: UI component library built on Tailwind CSS
- <span class="font-semibold text-foreground">SQLite</span>: Database for storing configuration and state - <span class="font-semibold text-foreground">SQLite</span>: Database for storing configuration and state
- <span class="font-semibold text-foreground">Node.js</span>: Runtime environment for the backend - <span class="font-semibold text-foreground">Bun</span>: Runtime environment for the backend
## Architecture Diagram ## Architecture Diagram
@@ -30,7 +30,7 @@ The application is built using:
graph TD graph TD
subgraph "Gitea Mirror" subgraph "Gitea Mirror"
Frontend["Frontend<br/>(Astro)"] Frontend["Frontend<br/>(Astro)"]
Backend["Backend<br/>(Node.js)"] Backend["Backend<br/>(Bun)"]
Database["Database<br/>(SQLite)"] Database["Database<br/>(SQLite)"]
Frontend <--> Backend Frontend <--> Backend
@@ -60,9 +60,9 @@ Key frontend components:
- **Configuration**: Settings for GitHub and Gitea connections - **Configuration**: Settings for GitHub and Gitea connections
- **Activity Log**: Detailed log of mirroring operations - **Activity Log**: Detailed log of mirroring operations
### Backend (Node.js) ### Backend (Bun)
The backend is built with Node.js and provides API endpoints for the frontend to interact with. It handles: The backend is built with Bun and provides API endpoints for the frontend to interact with. It handles:
- Authentication and user management - Authentication and user management
- GitHub API integration - GitHub API integration

View File

@@ -23,7 +23,7 @@ The following environment variables can be used to configure Gitea Mirror:
| Variable | Description | Default Value | Example | | Variable | Description | Default Value | Example |
|----------|-------------|---------------|---------| |----------|-------------|---------------|---------|
| `NODE_ENV` | Node environment (development, production, test) | `development` | `production` | | `NODE_ENV` | Runtime environment (development, production, test) | `development` | `production` |
| `DATABASE_URL` | SQLite database URL | `sqlite://data/gitea-mirror.db` | `sqlite://path/to/your/database.db` | | `DATABASE_URL` | SQLite database URL | `sqlite://data/gitea-mirror.db` | `sqlite://path/to/your/database.db` |
| `JWT_SECRET` | Secret key for JWT authentication | `your-secret-key-change-this-in-production` | `your-secure-random-string` | | `JWT_SECRET` | Secret key for JWT authentication | `your-secret-key-change-this-in-production` | `your-secure-random-string` |
| `HOST` | Server host | `localhost` | `0.0.0.0` | | `HOST` | Server host | `localhost` | `0.0.0.0` |

View File

@@ -16,7 +16,7 @@ Before you begin, make sure you have:
1. <span class="font-semibold text-foreground">A GitHub account with a personal access token</span> 1. <span class="font-semibold text-foreground">A GitHub account with a personal access token</span>
2. <span class="font-semibold text-foreground">A Gitea instance with an access token</span> 2. <span class="font-semibold text-foreground">A Gitea instance with an access token</span>
3. <span class="font-semibold text-foreground">Docker and docker-compose (recommended) or Node.js 18+ installed</span> 3. <span class="font-semibold text-foreground">Docker and docker-compose (recommended) or Bun 1.2.9+ installed</span>
## Installation Options ## Installation Options
@@ -51,7 +51,7 @@ If you prefer to run the application directly on your system:
2. Run the quick setup script: 2. Run the quick setup script:
```bash ```bash
pnpm setup bun run setup
``` ```
This installs dependencies and initializes the database. This installs dependencies and initializes the database.
@@ -59,13 +59,13 @@ If you prefer to run the application directly on your system:
**Development Mode:** **Development Mode:**
```bash ```bash
pnpm dev bun run dev
``` ```
**Production Mode:** **Production Mode:**
```bash ```bash
pnpm build bun run build
pnpm start bun run start
``` ```
4. Access the application at [http://localhost:4321](http://localhost:4321) 4. Access the application at [http://localhost:4321](http://localhost:4321)

View File

@@ -1,34 +1,61 @@
import { useEffect, useState, useRef } from "react"; import { useEffect, useState, useRef, useCallback } from "react";
import type { MirrorJob } from "@/lib/db/schema"; import type { MirrorJob } from "@/lib/db/schema";
interface UseSSEOptions { interface UseSSEOptions {
userId?: string; userId?: string;
onMessage: (data: MirrorJob) => void; onMessage: (data: MirrorJob) => void;
maxReconnectAttempts?: number;
reconnectDelay?: number;
} }
export const useSSE = ({ userId, onMessage }: UseSSEOptions) => { export const useSSE = ({
userId,
onMessage,
maxReconnectAttempts = 5,
reconnectDelay = 3000
}: UseSSEOptions) => {
const [connected, setConnected] = useState<boolean>(false); const [connected, setConnected] = useState<boolean>(false);
const [reconnectCount, setReconnectCount] = useState<number>(0);
const onMessageRef = useRef(onMessage); const onMessageRef = useRef(onMessage);
const eventSourceRef = useRef<EventSource | null>(null);
const reconnectTimeoutRef = useRef<number | null>(null);
// Update the ref when onMessage changes // Update the ref when onMessage changes
useEffect(() => { useEffect(() => {
onMessageRef.current = onMessage; onMessageRef.current = onMessage;
}, [onMessage]); }, [onMessage]);
useEffect(() => { // Create a stable connect function that can be called for reconnection
const connect = useCallback(() => {
if (!userId) return; if (!userId) return;
// Clean up any existing connection
if (eventSourceRef.current) {
eventSourceRef.current.close();
}
// Clear any pending reconnect timeout
if (reconnectTimeoutRef.current) {
window.clearTimeout(reconnectTimeoutRef.current);
reconnectTimeoutRef.current = null;
}
// Create new EventSource connection
const eventSource = new EventSource(`/api/sse?userId=${userId}`); const eventSource = new EventSource(`/api/sse?userId=${userId}`);
eventSourceRef.current = eventSource;
const handleMessage = (event: MessageEvent) => { const handleMessage = (event: MessageEvent) => {
try { try {
// Check if this is an error message from our server
if (event.data.startsWith('{"error":')) {
console.warn("SSE server error:", event.data);
return;
}
const parsedMessage: MirrorJob = JSON.parse(event.data); const parsedMessage: MirrorJob = JSON.parse(event.data);
onMessageRef.current(parsedMessage);
// console.log("Received new log:", parsedMessage);
onMessageRef.current(parsedMessage); // Use ref instead of prop directly
} catch (error) { } catch (error) {
console.error("Error parsing message:", error); console.error("Error parsing SSE message:", error);
} }
}; };
@@ -36,19 +63,50 @@ export const useSSE = ({ userId, onMessage }: UseSSEOptions) => {
eventSource.onopen = () => { eventSource.onopen = () => {
setConnected(true); setConnected(true);
setReconnectCount(0); // Reset reconnect counter on successful connection
console.log(`Connected to SSE for user: ${userId}`); console.log(`Connected to SSE for user: ${userId}`);
}; };
eventSource.onerror = () => { eventSource.onerror = (error) => {
console.error("SSE connection error"); console.error("SSE connection error:", error);
setConnected(false); setConnected(false);
eventSource.close(); eventSource.close();
}; eventSourceRef.current = null;
return () => { // Attempt to reconnect if we haven't exceeded max attempts
eventSource.close(); if (reconnectCount < maxReconnectAttempts) {
const nextReconnectDelay = Math.min(reconnectDelay * Math.pow(1.5, reconnectCount), 30000);
console.log(`Attempting to reconnect in ${nextReconnectDelay}ms (attempt ${reconnectCount + 1}/${maxReconnectAttempts})`);
reconnectTimeoutRef.current = window.setTimeout(() => {
setReconnectCount(prev => prev + 1);
connect();
}, nextReconnectDelay);
} else {
console.error(`Failed to reconnect after ${maxReconnectAttempts} attempts`);
}
}; };
}, [userId]); // Only depends on userId now }, [userId, maxReconnectAttempts, reconnectDelay, reconnectCount]);
// Set up the connection
useEffect(() => {
if (!userId) return;
connect();
// Cleanup function
return () => {
if (eventSourceRef.current) {
eventSourceRef.current.close();
eventSourceRef.current = null;
}
if (reconnectTimeoutRef.current) {
window.clearTimeout(reconnectTimeoutRef.current);
reconnectTimeoutRef.current = null;
}
};
}, [userId, connect]);
return { connected }; return { connected };
}; };

View File

@@ -4,7 +4,7 @@
// Environment variables // Environment variables
export const ENV = { export const ENV = {
// Node environment (development, production, test) // Runtime environment (development, production, test)
NODE_ENV: process.env.NODE_ENV || "development", NODE_ENV: process.env.NODE_ENV || "development",
// Database URL - use SQLite by default // Database URL - use SQLite by default

View File

@@ -1,21 +1,56 @@
import { z } from "zod"; import { z } from "zod";
import { createClient } from "@libsql/client";
import { drizzle } from "drizzle-orm/libsql";
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core"; import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
import { Database } from "bun:sqlite";
import { drizzle } from "drizzle-orm/bun-sqlite";
import fs from "fs";
import path from "path"; import path from "path";
import { configSchema } from "./schema"; import { configSchema } from "./schema";
// Define the database URL - for development we'll use a local SQLite file // Define the database URL - for development we'll use a local SQLite file
const dataDir = path.join(process.cwd(), "data"); const dataDir = path.join(process.cwd(), "data");
const dbUrl = // Ensure data directory exists
process.env.DATABASE_URL || `file:${path.join(dataDir, "gitea-mirror.db")}`; if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true });
}
// Create a client connection to the database const dbPath = path.join(dataDir, "gitea-mirror.db");
export const client = createClient({ url: dbUrl });
// Create a drizzle instance // Create an empty database file if it doesn't exist
export const db = drizzle(client); if (!fs.existsSync(dbPath)) {
fs.writeFileSync(dbPath, "");
}
// Create SQLite database instance using Bun's native driver
let sqlite: Database;
try {
sqlite = new Database(dbPath);
console.log("Successfully connected to SQLite database using Bun's native driver");
} catch (error) {
console.error("Error opening database:", error);
throw error;
}
// Create drizzle instance with the SQLite client
export const db = drizzle({ client: sqlite });
// Simple async wrapper around SQLite API for compatibility
// This maintains backward compatibility with existing code
export const client = {
async execute(sql: string, params?: any[]) {
try {
const stmt = sqlite.query(sql);
if (/^\s*select/i.test(sql)) {
const rows = stmt.all(params ?? []);
return { rows } as { rows: any[] };
}
stmt.run(params ?? []);
return { rows: [] } as { rows: any[] };
} catch (error) {
console.error(`Error executing SQL: ${sql}`, error);
throw error;
}
},
};
// Define the tables // Define the tables
export const users = sqliteTable("users", { export const users = sqliteTable("users", {
@@ -31,6 +66,18 @@ export const users = sqliteTable("users", {
.default(new Date()), .default(new Date()),
}); });
// New table for event notifications (replacing Redis pub/sub)
export const events = sqliteTable("events", {
id: text("id").primaryKey(),
userId: text("user_id").notNull().references(() => users.id),
channel: text("channel").notNull(),
payload: text("payload", { mode: "json" }).notNull(),
read: integer("read", { mode: "boolean" }).notNull().default(false),
createdAt: integer("created_at", { mode: "timestamp" })
.notNull()
.default(new Date()),
});
const githubSchema = configSchema.shape.githubConfig; const githubSchema = configSchema.shape.githubConfig;
const giteaSchema = configSchema.shape.giteaConfig; const giteaSchema = configSchema.shape.giteaConfig;
const scheduleSchema = configSchema.shape.scheduleConfig; const scheduleSchema = configSchema.shape.scheduleConfig;

View File

@@ -140,3 +140,15 @@ export const organizationSchema = z.object({
}); });
export type Organization = z.infer<typeof organizationSchema>; export type Organization = z.infer<typeof organizationSchema>;
// Event schema (for SQLite-based pub/sub)
export const eventSchema = z.object({
id: z.string().uuid().optional(),
userId: z.string().uuid(),
channel: z.string().min(1),
payload: z.any(),
read: z.boolean().default(false),
createdAt: z.date().default(() => new Date()),
});
export type Event = z.infer<typeof eventSchema>;

161
src/lib/events.ts Normal file
View File

@@ -0,0 +1,161 @@
import { v4 as uuidv4 } from "uuid";
import { db, events } from "./db";
import { eq, and, gt, lt } from "drizzle-orm";
/**
* Publishes an event to a specific channel for a user
* This replaces Redis pub/sub with SQLite storage
*/
export async function publishEvent({
userId,
channel,
payload,
}: {
userId: string;
channel: string;
payload: any;
}): Promise<string> {
try {
const eventId = uuidv4();
console.log(`Publishing event to channel ${channel} for user ${userId}`);
// Insert the event into the SQLite database
await db.insert(events).values({
id: eventId,
userId,
channel,
payload: JSON.stringify(payload),
createdAt: new Date(),
});
console.log(`Event published successfully with ID ${eventId}`);
return eventId;
} catch (error) {
console.error("Error publishing event:", error);
throw new Error("Failed to publish event");
}
}
/**
* Gets new events for a specific user and channel
* This replaces Redis subscribe with SQLite polling
*/
export async function getNewEvents({
userId,
channel,
lastEventTime,
}: {
userId: string;
channel: string;
lastEventTime?: Date;
}): Promise<any[]> {
try {
console.log(`Getting new events for user ${userId} in channel ${channel}`);
if (lastEventTime) {
console.log(`Looking for events after ${lastEventTime.toISOString()}`);
}
// Build the query
let query = db
.select()
.from(events)
.where(
and(
eq(events.userId, userId),
eq(events.channel, channel),
eq(events.read, false)
)
)
.orderBy(events.createdAt);
// Add time filter if provided
if (lastEventTime) {
query = query.where(gt(events.createdAt, lastEventTime));
}
// Execute the query
const newEvents = await query;
console.log(`Found ${newEvents.length} new events`);
// Mark events as read
if (newEvents.length > 0) {
console.log(`Marking ${newEvents.length} events as read`);
await db
.update(events)
.set({ read: true })
.where(
and(
eq(events.userId, userId),
eq(events.channel, channel),
eq(events.read, false)
)
);
}
// Parse the payloads
return newEvents.map(event => ({
...event,
payload: JSON.parse(event.payload as string),
}));
} catch (error) {
console.error("Error getting new events:", error);
return [];
}
}
/**
* Cleans up old events to prevent the database from growing too large
* Should be called periodically (e.g., daily via a cron job)
*
* @param maxAgeInDays Number of days to keep events (default: 7)
* @param cleanupUnreadAfterDays Number of days after which to clean up unread events (default: 2x maxAgeInDays)
* @returns Object containing the number of read and unread events deleted
*/
export async function cleanupOldEvents(
maxAgeInDays: number = 7,
cleanupUnreadAfterDays?: number
): Promise<{ readEventsDeleted: number; unreadEventsDeleted: number }> {
try {
console.log(`Cleaning up events older than ${maxAgeInDays} days...`);
// Calculate the cutoff date for read events
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - maxAgeInDays);
// Delete read events older than the cutoff date
const readResult = await db
.delete(events)
.where(
and(
eq(events.read, true),
lt(events.createdAt, cutoffDate)
)
);
const readEventsDeleted = readResult.changes || 0;
console.log(`Deleted ${readEventsDeleted} read events`);
// Calculate the cutoff date for unread events (default to 2x the retention period)
const unreadCutoffDate = new Date();
const unreadMaxAge = cleanupUnreadAfterDays || (maxAgeInDays * 2);
unreadCutoffDate.setDate(unreadCutoffDate.getDate() - unreadMaxAge);
// Delete unread events that are significantly older
const unreadResult = await db
.delete(events)
.where(
and(
eq(events.read, false),
lt(events.createdAt, unreadCutoffDate)
)
);
const unreadEventsDeleted = unreadResult.changes || 0;
console.log(`Deleted ${unreadEventsDeleted} unread events`);
return { readEventsDeleted, unreadEventsDeleted };
} catch (error) {
console.error("Error cleaning up old events:", error);
return { readEventsDeleted: 0, unreadEventsDeleted: 0 };
}
}

View File

@@ -1,7 +1,7 @@
import type { RepoStatus } from "@/types/Repository"; import type { RepoStatus } from "@/types/Repository";
import { db, mirrorJobs } from "./db"; import { db, mirrorJobs } from "./db";
import { v4 as uuidv4 } from "uuid"; import { v4 as uuidv4 } from "uuid";
import { redisPublisher } from "./redis"; import { publishEvent } from "./events";
export async function createMirrorJob({ export async function createMirrorJob({
userId, userId,
@@ -40,10 +40,16 @@ export async function createMirrorJob({
}; };
try { try {
// Insert the job into the database
await db.insert(mirrorJobs).values(job); await db.insert(mirrorJobs).values(job);
// Publish the event using SQLite instead of Redis
const channel = `mirror-status:${userId}`; const channel = `mirror-status:${userId}`;
await redisPublisher.publish(channel, JSON.stringify(job)); await publishEvent({
userId,
channel,
payload: job
});
return jobId; return jobId;
} catch (error) { } catch (error) {

View File

@@ -1,30 +0,0 @@
import Redis from "ioredis";
// Connect to Redis using REDIS_URL environment variable or default to redis://redis:6379
// This ensures we have a fallback URL when running with Docker Compose
const redisUrl = process.env.REDIS_URL ?? 'redis://redis:6379';
console.log(`Connecting to Redis at: ${redisUrl}`);
// Configure Redis client with connection options
const redisOptions = {
retryStrategy: (times: number) => {
// Retry with exponential backoff up to 30 seconds
const delay = Math.min(times * 100, 3000);
console.log(`Redis connection attempt ${times} failed. Retrying in ${delay}ms...`);
return delay;
},
maxRetriesPerRequest: 5,
enableReadyCheck: true,
connectTimeout: 10000,
};
export const redis = new Redis(redisUrl, redisOptions);
export const redisPublisher = new Redis(redisUrl, redisOptions); // For publishing
export const redisSubscriber = new Redis(redisUrl, redisOptions); // For subscribing
// Log connection events
redis.on('connect', () => console.log('Redis client connected'));
redis.on('error', (err) => console.error('Redis client error:', err));
redis.on('ready', () => console.log('Redis client ready'));
redis.on('reconnecting', () => console.log('Redis client reconnecting...'));

View File

@@ -1,5 +1,5 @@
import type { APIRoute } from "astro"; import type { APIRoute } from "astro";
import { redisSubscriber } from "@/lib/redis"; import { getNewEvents } from "@/lib/events";
export const GET: APIRoute = async ({ request }) => { export const GET: APIRoute = async ({ request }) => {
const url = new URL(request.url); const url = new URL(request.url);
@@ -11,50 +11,89 @@ export const GET: APIRoute = async ({ request }) => {
const channel = `mirror-status:${userId}`; const channel = `mirror-status:${userId}`;
let isClosed = false; let isClosed = false;
const POLL_INTERVAL = 5000; // Poll every 5 seconds (reduced from 2 seconds for low-traffic usage)
const stream = new ReadableStream({ const stream = new ReadableStream({
start(controller) { start(controller) {
const encoder = new TextEncoder(); const encoder = new TextEncoder();
let lastEventTime: Date | undefined = undefined;
let pollIntervalId: ReturnType<typeof setInterval> | null = null;
const handleMessage = (ch: string, message: string) => { // Function to send a message to the client
if (isClosed || ch !== channel) return; const sendMessage = (message: string) => {
if (isClosed) return;
try { try {
controller.enqueue(encoder.encode(`data: ${message}\n\n`)); controller.enqueue(encoder.encode(message));
} catch (err) { } catch (err) {
console.error("Stream enqueue error:", err); console.error("Stream enqueue error:", err);
} }
}; };
redisSubscriber.subscribe(channel, (err) => { // Function to poll for new events
if (err) { const pollForEvents = async () => {
isClosed = true; if (isClosed) return;
controller.error(err);
}
});
redisSubscriber.on("message", handleMessage);
try { try {
controller.enqueue(encoder.encode(": connected\n\n")); console.log(`Polling for events for user ${userId} in channel ${channel}`);
} catch (err) {
console.error("Initial enqueue error:", err);
}
// Get new events from SQLite
const events = await getNewEvents({
userId,
channel,
lastEventTime,
});
console.log(`Found ${events.length} new events`);
// Send events to client
if (events.length > 0) {
// Update last event time
lastEventTime = events[events.length - 1].createdAt;
// Send each event to the client
for (const event of events) {
console.log(`Sending event: ${JSON.stringify(event.payload)}`);
sendMessage(`data: ${JSON.stringify(event.payload)}\n\n`);
}
}
} catch (err) {
console.error("Error polling for events:", err);
sendMessage(`data: {"error": "Error polling for events"}\n\n`);
}
};
// Send initial connection message
sendMessage(": connected\n\n");
// Start polling for events
pollForEvents();
// Set up polling interval
pollIntervalId = setInterval(pollForEvents, POLL_INTERVAL);
// Send a heartbeat every 30 seconds to keep the connection alive
const heartbeatInterval = setInterval(() => {
if (!isClosed) {
sendMessage(": heartbeat\n\n");
} else {
clearInterval(heartbeatInterval);
}
}, 30000);
// Handle client disconnection
request.signal?.addEventListener("abort", () => { request.signal?.addEventListener("abort", () => {
if (!isClosed) { if (!isClosed) {
isClosed = true; isClosed = true;
redisSubscriber.off("message", handleMessage); if (pollIntervalId) {
redisSubscriber.unsubscribe(channel); clearInterval(pollIntervalId);
}
controller.close(); controller.close();
} }
}); });
}, },
cancel() { cancel() {
// extra safety in case cancel is triggered // Extra safety in case cancel is triggered
if (!isClosed) {
isClosed = true; isClosed = true;
redisSubscriber.unsubscribe(channel);
}
}, },
}); });

View File

@@ -0,0 +1,56 @@
import type { APIRoute } from "astro";
import { publishEvent } from "@/lib/events";
import { v4 as uuidv4 } from "uuid";
export const POST: APIRoute = async ({ request }) => {
try {
const body = await request.json();
const { userId, message, status } = body;
if (!userId || !message || !status) {
return new Response(
JSON.stringify({
error: "Missing required fields: userId, message, status",
}),
{ status: 400 }
);
}
// Create a test event
const eventData = {
id: uuidv4(),
userId,
repositoryId: uuidv4(),
repositoryName: "test-repo",
message,
status,
timestamp: new Date(),
};
// Publish the event
const channel = `mirror-status:${userId}`;
await publishEvent({
userId,
channel,
payload: eventData,
});
return new Response(
JSON.stringify({
success: true,
message: "Event published successfully",
event: eventData,
}),
{ status: 200 }
);
} catch (error) {
console.error("Error publishing test event:", error);
return new Response(
JSON.stringify({
error: "Failed to publish event",
details: error instanceof Error ? error.message : String(error),
}),
{ status: 500 }
);
}
};