Compare commits
55 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c4b9a82806 | ||
|
|
38e0fb33b9 | ||
|
|
22a4b71653 | ||
|
|
52568eda36 | ||
|
|
a84191f0a5 | ||
|
|
33829eda20 | ||
|
|
1e63fd2278 | ||
|
|
daf4ab6a93 | ||
|
|
4404af7d40 | ||
|
|
97ff8d190d | ||
|
|
3ff86de67d | ||
|
|
3d8bdff9af | ||
|
|
a28a766f8b | ||
|
|
7afe364a24 | ||
|
|
a4e771d3bd | ||
|
|
703156b15c | ||
|
|
20a771f340 | ||
|
|
d925b3c155 | ||
|
|
47e1c7b493 | ||
|
|
d7ce2a6908 | ||
|
|
4efe741c64 | ||
|
|
773842fa72 | ||
|
|
90944a40c6 | ||
|
|
f436737efb | ||
|
|
a4e4afdaaf | ||
|
|
cbef04d4b4 | ||
|
|
a988be1028 | ||
|
|
98610482ae | ||
|
|
546db472e5 | ||
|
|
70b3e412ad | ||
|
|
a3ac31795c | ||
|
|
f41fb9b91f | ||
|
|
0b568a3b37 | ||
|
|
a1da82a718 | ||
|
|
645d495e80 | ||
|
|
0890ed0bb8 | ||
|
|
fc985f29df | ||
|
|
7d32112369 | ||
|
|
3bb85a4cdb | ||
|
|
30182544ba | ||
|
|
fb73f33aeb | ||
|
|
48f63bdfc8 | ||
|
|
e2506a874e | ||
|
|
b67473ec7e | ||
|
|
4ca4356ad1 | ||
|
|
3136a2120d | ||
|
|
615ebd5079 | ||
|
|
6e48d3f86c | ||
|
|
c5de7e616d | ||
|
|
309f8c4341 | ||
|
|
0c596ac241 | ||
|
|
894be88a28 | ||
|
|
6ab7f0a5a0 | ||
|
|
abe3113755 | ||
|
|
f4bc28e6c2 |
@@ -30,3 +30,9 @@ JWT_SECRET=change-this-to-a-secure-random-string-in-production
|
||||
# GITEA_ORGANIZATION=github-mirrors
|
||||
# GITEA_ORG_VISIBILITY=public
|
||||
# DELAY=3600
|
||||
|
||||
# Optional Database Cleanup Configuration (configured via web UI)
|
||||
# These environment variables are optional and only used as defaults
|
||||
# Users can configure cleanup settings through the web interface
|
||||
# CLEANUP_ENABLED=false
|
||||
# CLEANUP_RETENTION_DAYS=7
|
||||
|
||||
BIN
.github/assets/activity.png
vendored
|
Before Width: | Height: | Size: 140 KiB After Width: | Height: | Size: 816 KiB |
BIN
.github/assets/configuration.png
vendored
|
Before Width: | Height: | Size: 164 KiB After Width: | Height: | Size: 945 KiB |
BIN
.github/assets/dashboard.png
vendored
|
Before Width: | Height: | Size: 194 KiB After Width: | Height: | Size: 943 KiB |
BIN
.github/assets/organisations.png
vendored
|
Before Width: | Height: | Size: 88 KiB After Width: | Height: | Size: 784 KiB |
BIN
.github/assets/repositories.png
vendored
|
Before Width: | Height: | Size: 170 KiB After Width: | Height: | Size: 970 KiB |
4
.github/workflows/astro-build-test.yml
vendored
@@ -38,10 +38,10 @@ jobs:
|
||||
bun install
|
||||
|
||||
- name: Run tests
|
||||
run: bunx vitest run
|
||||
run: bun test --coverage
|
||||
|
||||
- name: Build Astro project
|
||||
run: bunx astro build
|
||||
run: bunx --bun astro build
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
42
CHANGELOG.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to the Gitea Mirror project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [2.5.3] - 2025-05-22
|
||||
|
||||
### Added
|
||||
- Enhanced JWT_SECRET handling with auto-generation and persistence for improved security
|
||||
- Updated Proxmox LXC deployment instructions and replaced deprecated script
|
||||
|
||||
## [2.5.2] - 2024-11-22
|
||||
|
||||
### Fixed
|
||||
- Fixed version information in health API for Docker deployments by setting npm_package_version environment variable in entrypoint script
|
||||
|
||||
## [2.5.1] - 2024-10-01
|
||||
|
||||
### Fixed
|
||||
- Fixed Docker entrypoint script to prevent unnecessary `bun install` on container startup
|
||||
- Removed redundant dependency installation in Docker containers for pre-built images
|
||||
- Fixed "PathAlreadyExists" errors during container initialization
|
||||
|
||||
### Changed
|
||||
- Improved database initialization in Docker entrypoint script
|
||||
- Added additional checks for TypeScript versions of database management scripts
|
||||
|
||||
## [2.5.0] - 2024-09-15
|
||||
|
||||
Initial public release with core functionality:
|
||||
|
||||
### Added
|
||||
- GitHub to Gitea repository mirroring
|
||||
- User authentication and management
|
||||
- Dashboard with mirroring statistics
|
||||
- Configuration management for mirroring settings
|
||||
- Support for organization mirroring
|
||||
- Automated mirroring with configurable schedules
|
||||
- Docker multi-architecture support (amd64, arm64)
|
||||
- LXC container deployment scripts
|
||||
@@ -1,8 +1,8 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
|
||||
FROM oven/bun:1.2.9-alpine AS base
|
||||
FROM oven/bun:1.2.14-alpine AS base
|
||||
WORKDIR /app
|
||||
RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite
|
||||
RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite openssl
|
||||
|
||||
# ----------------------------
|
||||
FROM base AS deps
|
||||
|
||||
82
README.md
@@ -14,14 +14,14 @@
|
||||
|
||||
```bash
|
||||
# Using Docker (recommended)
|
||||
docker compose --profile production up -d
|
||||
docker compose up -d
|
||||
|
||||
# Using Bun
|
||||
bun run setup && bun run dev
|
||||
|
||||
# Using LXC Containers
|
||||
# For Proxmox VE (online)
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh | bash
|
||||
# For Proxmox VE (online) - Community script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh | bash
|
||||
|
||||
# For local testing (offline-friendly)
|
||||
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./scripts/gitea-mirror-lxc-local.sh
|
||||
@@ -30,7 +30,7 @@ sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./scripts/gitea-mirror-lxc-local.
|
||||
See the [LXC Container Deployment Guide](scripts/README-lxc.md).
|
||||
|
||||
<p align="center">
|
||||
<img src=".github/assets/dashboard.png" alt="Dashboard" width="80%"/>
|
||||
<img src=".github/assets/dashboard.png" alt="Dashboard" width="full"/>
|
||||
</p>
|
||||
|
||||
## ✨ Features
|
||||
@@ -50,12 +50,12 @@ See the [LXC Container Deployment Guide](scripts/README-lxc.md).
|
||||
## 📸 Screenshots
|
||||
|
||||
<p align="center">
|
||||
<img src=".github/assets/repositories.png" width="45%"/>
|
||||
<img src=".github/assets/organisations.png" width="45%"/>
|
||||
<img src=".github/assets/repositories.png" width="49%"/>
|
||||
<img src=".github/assets/organisations.png" width="49%"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<img src=".github/assets/configuration.png" width="45%"/>
|
||||
<img src=".github/assets/activity.png" width="45%"/>
|
||||
<img src=".github/assets/configuration.png" width="49%"/>
|
||||
<img src=".github/assets/activity.png" width="49%"/>
|
||||
</p>
|
||||
|
||||
### Dashboard
|
||||
@@ -69,7 +69,7 @@ Easily configure your GitHub and Gitea connections, set up automatic mirroring s
|
||||
|
||||
## Getting Started
|
||||
|
||||
See the [Quick Start Guide](docs/quickstart.md) for detailed instructions on getting up and running quickly.
|
||||
See the [Quick Start Guide](src/content/docs/quickstart.md) for detailed instructions on getting up and running quickly.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
@@ -115,7 +115,7 @@ Gitea Mirror provides multi-architecture Docker images that work on both ARM64 (
|
||||
|
||||
```bash
|
||||
# Start the application using Docker Compose
|
||||
docker compose --profile production up -d
|
||||
docker compose up -d
|
||||
|
||||
# For development mode (requires configuration)
|
||||
# Ensure you have run bun run setup first
|
||||
@@ -162,7 +162,7 @@ cp .env.example .env
|
||||
./scripts/build-docker.sh --push
|
||||
|
||||
# Then run with Docker Compose
|
||||
docker compose --profile production up -d
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
See [Docker build documentation](./scripts/README-docker.md) for more details.
|
||||
@@ -175,8 +175,9 @@ Gitea Mirror offers two deployment options for LXC containers:
|
||||
|
||||
```bash
|
||||
# One-command installation on Proxmox VE
|
||||
# Optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh | bash
|
||||
# Uses the community-maintained script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
# at [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED)
|
||||
curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh | bash
|
||||
```
|
||||
|
||||
**2. Local testing (offline-friendly, works on developer laptops)**
|
||||
@@ -232,8 +233,10 @@ The Docker container can be configured with the following environment variables:
|
||||
- `DATABASE_URL`: SQLite database URL (default: `file:data/gitea-mirror.db`)
|
||||
- `HOST`: Host to bind to (default: `0.0.0.0`)
|
||||
- `PORT`: Port to listen on (default: `4321`)
|
||||
- `JWT_SECRET`: Secret key for JWT token generation (important for security)
|
||||
- `JWT_SECRET`: Secret key for JWT token generation (auto-generated if not provided)
|
||||
|
||||
> [!TIP]
|
||||
> For security, Gitea Mirror will automatically generate a secure random JWT secret on first run if one isn't provided or if the default value is used. This generated secret is stored in the data directory for persistence across container restarts.
|
||||
|
||||
#### Manual Installation
|
||||
|
||||
@@ -279,9 +282,39 @@ bun run reset-users
|
||||
bun run check-db
|
||||
```
|
||||
|
||||
##### Database Permissions for Direct Installation
|
||||
|
||||
> [!IMPORTANT]
|
||||
> **If you're running the application directly** (not using Docker), you may encounter SQLite permission errors. This is because SQLite requires both read/write access to the database file and write access to the directory containing the database.
|
||||
|
||||
**Common Error:**
|
||||
```
|
||||
Error: [ERROR] SQLiteError: attempt to write a readonly database
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Ensure the data directory exists and has proper permissions
|
||||
mkdir -p data
|
||||
chmod 755 data
|
||||
|
||||
# If the database file already exists, ensure it's writable
|
||||
chmod 644 data/gitea-mirror.db
|
||||
|
||||
# Make sure the user running the application owns the data directory
|
||||
chown -R $(whoami) data/
|
||||
```
|
||||
|
||||
**Why Docker doesn't have this issue:**
|
||||
- Docker containers run with a dedicated user (`gitea-mirror`) that owns the `/app/data` directory
|
||||
- The container setup ensures proper permissions are set during image build
|
||||
- Volume mounts are handled by Docker with appropriate permissions
|
||||
|
||||
**Recommended approach:** Use Docker or Docker Compose for deployment to avoid permission issues entirely.
|
||||
|
||||
### Configuration
|
||||
|
||||
Gitea Mirror can be configured through environment variables or through the web UI. See the [Configuration Guide](docs/configuration.md) for more details.
|
||||
Gitea Mirror can be configured through environment variables or through the web UI. See the [Configuration Guide](src/content/docs/configuration.md) for more details.
|
||||
|
||||
Key configuration options include:
|
||||
|
||||
@@ -467,7 +500,7 @@ Try the following steps:
|
||||
> ghcr.io/arunavo4/gitea-mirror:latest
|
||||
> ```
|
||||
>
|
||||
> For homelab/self-hosted setups, you can use the provided Docker Compose file with automatic event cleanup:
|
||||
> For homelab/self-hosted setups, you can use the standard Docker Compose file which includes automatic database cleanup:
|
||||
>
|
||||
> ```bash
|
||||
> # Clone the repository
|
||||
@@ -475,10 +508,10 @@ Try the following steps:
|
||||
> cd gitea-mirror
|
||||
>
|
||||
> # Start the application with Docker Compose
|
||||
> docker-compose -f docker-compose.homelab.yml up -d
|
||||
> docker compose up -d
|
||||
> ```
|
||||
>
|
||||
> This setup includes a cron job that runs daily to clean up old events and prevent the database from growing too large.
|
||||
> This setup provides a complete containerized deployment for the Gitea Mirror application.
|
||||
|
||||
|
||||
#### Database Maintenance
|
||||
@@ -495,20 +528,9 @@ Try the following steps:
|
||||
>
|
||||
> # Reset user accounts (for development)
|
||||
> bun run reset-users
|
||||
>
|
||||
> # Clean up old events (keeps last 7 days by default)
|
||||
> bun run cleanup-events
|
||||
>
|
||||
> # Clean up old events with custom retention period (e.g., 30 days)
|
||||
> bun run cleanup-events 30
|
||||
> ```
|
||||
>
|
||||
> For automated maintenance, consider setting up a cron job to run the cleanup script periodically:
|
||||
>
|
||||
> ```bash
|
||||
> # Add this to your crontab (runs daily at 2 AM)
|
||||
> 0 2 * * * cd /path/to/gitea-mirror && bun run cleanup-events
|
||||
> ```
|
||||
> **Note:** For cleaning up old activities and events, use the cleanup button in the Activity Log page of the web interface.
|
||||
|
||||
|
||||
> [!NOTE]
|
||||
|
||||
4
crontab
@@ -1,4 +0,0 @@
|
||||
# Run event cleanup daily at 2 AM
|
||||
0 2 * * * cd /app && bun run cleanup-events 30 >> /app/data/cleanup-events.log 2>&1
|
||||
|
||||
# Empty line at the end is required for cron to work properly
|
||||
@@ -28,7 +28,7 @@ services:
|
||||
networks:
|
||||
- gitea-network
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/api/healthz"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -75,7 +75,7 @@ services:
|
||||
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
||||
- DELAY=${DELAY:-3600}
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/api/health"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
gitea-mirror:
|
||||
image: ghcr.io/arunavo4/gitea-mirror:latest
|
||||
container_name: gitea-mirror
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "4321:4321"
|
||||
volumes:
|
||||
- gitea-mirror-data:/app/data
|
||||
# Mount the crontab file
|
||||
- ./crontab:/etc/cron.d/gitea-mirror-cron
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- HOST=0.0.0.0
|
||||
- PORT=4321
|
||||
- DATABASE_URL=sqlite://data/gitea-mirror.db
|
||||
- DELAY=${DELAY:-3600}
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:4321/api/health"]
|
||||
interval: 1m
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 30s
|
||||
# Install cron in the container and set up the cron job
|
||||
command: >
|
||||
sh -c "
|
||||
apt-get update && apt-get install -y cron curl &&
|
||||
chmod 0644 /etc/cron.d/gitea-mirror-cron &&
|
||||
crontab /etc/cron.d/gitea-mirror-cron &&
|
||||
service cron start &&
|
||||
bun dist/server/entry.mjs
|
||||
"
|
||||
|
||||
# Define named volumes for database persistence
|
||||
volumes:
|
||||
gitea-mirror-data: # Database volume
|
||||
@@ -1,8 +1,7 @@
|
||||
# Gitea Mirror deployment configuration
|
||||
# - production: Standard deployment with real data
|
||||
# Standard deployment with automatic database maintenance
|
||||
|
||||
services:
|
||||
# Production service with real data
|
||||
gitea-mirror:
|
||||
image: ${DOCKER_REGISTRY:-ghcr.io}/${DOCKER_IMAGE:-arunavo4/gitea-mirror}:${DOCKER_TAG:-latest}
|
||||
build:
|
||||
@@ -43,12 +42,11 @@ services:
|
||||
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
||||
- DELAY=${DELAY:-3600}
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/api/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 15s
|
||||
profiles: ["production"]
|
||||
|
||||
# Define named volumes for database persistence
|
||||
volumes:
|
||||
|
||||
@@ -5,12 +5,36 @@ set -e
|
||||
# Ensure data directory exists
|
||||
mkdir -p /app/data
|
||||
|
||||
# If bun is available, run setup (for dev images)
|
||||
if command -v bun >/dev/null 2>&1; then
|
||||
echo "Running bun setup (if needed)..."
|
||||
bun run setup || true
|
||||
# Generate a secure JWT secret if one isn't provided or is using the default value
|
||||
JWT_SECRET_FILE="/app/data/.jwt_secret"
|
||||
if [ "$JWT_SECRET" = "your-secret-key-change-this-in-production" ] || [ -z "$JWT_SECRET" ]; then
|
||||
# Check if we have a previously generated secret
|
||||
if [ -f "$JWT_SECRET_FILE" ]; then
|
||||
echo "Using previously generated JWT secret"
|
||||
export JWT_SECRET=$(cat "$JWT_SECRET_FILE")
|
||||
else
|
||||
echo "Generating a secure random JWT secret"
|
||||
# Try to generate a secure random string using OpenSSL
|
||||
if command -v openssl >/dev/null 2>&1; then
|
||||
GENERATED_SECRET=$(openssl rand -hex 32)
|
||||
else
|
||||
# Fallback to using /dev/urandom if openssl is not available
|
||||
echo "OpenSSL not found, using fallback method for random generation"
|
||||
GENERATED_SECRET=$(head -c 32 /dev/urandom | sha256sum | cut -d' ' -f1)
|
||||
fi
|
||||
export JWT_SECRET="$GENERATED_SECRET"
|
||||
# Save the secret to a file for persistence across container restarts
|
||||
echo "$GENERATED_SECRET" > "$JWT_SECRET_FILE"
|
||||
chmod 600 "$JWT_SECRET_FILE"
|
||||
fi
|
||||
echo "JWT_SECRET has been set to a secure random value"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
# Skip dependency installation entirely for pre-built images
|
||||
# Dependencies are already installed during the Docker build process
|
||||
|
||||
# Initialize the database if it doesn't exist
|
||||
if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
echo "Initializing database..."
|
||||
@@ -18,6 +42,8 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
bun dist/scripts/init-db.js
|
||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||
bun dist/scripts/manage-db.js init
|
||||
elif [ -f "scripts/manage-db.ts" ]; then
|
||||
bun scripts/manage-db.ts init
|
||||
else
|
||||
echo "Warning: Could not find database initialization scripts in dist/scripts."
|
||||
echo "Creating and initializing database manually..."
|
||||
@@ -111,9 +137,28 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- New fields for job resilience
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
batch_id TEXT,
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array as text
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
@@ -136,12 +181,74 @@ else
|
||||
bun dist/scripts/fix-db-issues.js
|
||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||
bun dist/scripts/manage-db.js fix
|
||||
elif [ -f "scripts/manage-db.ts" ]; then
|
||||
bun scripts/manage-db.ts fix
|
||||
fi
|
||||
|
||||
# Since the application is not used by anyone yet, we've removed the schema updates and migrations
|
||||
echo "Database already exists, no migrations needed."
|
||||
# Run database migrations
|
||||
echo "Running database migrations..."
|
||||
|
||||
# Update mirror_jobs table with new columns for resilience
|
||||
if [ -f "dist/scripts/update-mirror-jobs-table.js" ]; then
|
||||
echo "Updating mirror_jobs table..."
|
||||
bun dist/scripts/update-mirror-jobs-table.js
|
||||
elif [ -f "scripts/update-mirror-jobs-table.ts" ]; then
|
||||
echo "Updating mirror_jobs table using TypeScript script..."
|
||||
bun scripts/update-mirror-jobs-table.ts
|
||||
else
|
||||
echo "Warning: Could not find mirror_jobs table update script."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Extract version from package.json and set as environment variable
|
||||
if [ -f "package.json" ]; then
|
||||
export npm_package_version=$(grep -o '"version": *"[^"]*"' package.json | cut -d'"' -f4)
|
||||
echo "Setting application version: $npm_package_version"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
# Run startup recovery to handle any interrupted jobs
|
||||
echo "Running startup recovery..."
|
||||
if [ -f "dist/scripts/startup-recovery.js" ]; then
|
||||
echo "Running startup recovery using compiled script..."
|
||||
bun dist/scripts/startup-recovery.js --timeout=30000
|
||||
RECOVERY_EXIT_CODE=$?
|
||||
elif [ -f "scripts/startup-recovery.ts" ]; then
|
||||
echo "Running startup recovery using TypeScript script..."
|
||||
bun scripts/startup-recovery.ts --timeout=30000
|
||||
RECOVERY_EXIT_CODE=$?
|
||||
else
|
||||
echo "Warning: Startup recovery script not found. Skipping recovery."
|
||||
RECOVERY_EXIT_CODE=0
|
||||
fi
|
||||
|
||||
# Log recovery result
|
||||
if [ $RECOVERY_EXIT_CODE -eq 0 ]; then
|
||||
echo "✅ Startup recovery completed successfully"
|
||||
elif [ $RECOVERY_EXIT_CODE -eq 1 ]; then
|
||||
echo "⚠️ Startup recovery completed with warnings"
|
||||
else
|
||||
echo "❌ Startup recovery failed with exit code $RECOVERY_EXIT_CODE"
|
||||
fi
|
||||
|
||||
# Function to handle shutdown signals
|
||||
shutdown_handler() {
|
||||
echo "🛑 Received shutdown signal, forwarding to application..."
|
||||
if [ ! -z "$APP_PID" ]; then
|
||||
kill -TERM "$APP_PID"
|
||||
wait "$APP_PID"
|
||||
fi
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Set up signal handlers
|
||||
trap 'shutdown_handler' TERM INT HUP
|
||||
|
||||
# Start the application
|
||||
echo "Starting Gitea Mirror..."
|
||||
exec bun ./dist/server/entry.mjs
|
||||
bun ./dist/server/entry.mjs &
|
||||
APP_PID=$!
|
||||
|
||||
# Wait for the application to finish
|
||||
wait "$APP_PID"
|
||||
|
||||
249
docs/GRACEFUL_SHUTDOWN.md
Normal file
@@ -0,0 +1,249 @@
|
||||
# Graceful Shutdown and Enhanced Job Recovery
|
||||
|
||||
This document describes the graceful shutdown and enhanced job recovery capabilities implemented in gitea-mirror v2.8.0+.
|
||||
|
||||
## Overview
|
||||
|
||||
The gitea-mirror application now includes comprehensive graceful shutdown handling and enhanced job recovery mechanisms designed specifically for containerized environments. These features ensure:
|
||||
|
||||
- **No data loss** during container restarts or shutdowns
|
||||
- **Automatic job resumption** after application restarts
|
||||
- **Clean termination** of all active processes and connections
|
||||
- **Container-aware design** optimized for Docker/LXC deployments
|
||||
|
||||
## Features
|
||||
|
||||
### 1. Graceful Shutdown Manager
|
||||
|
||||
The shutdown manager (`src/lib/shutdown-manager.ts`) provides centralized coordination of application termination:
|
||||
|
||||
#### Key Capabilities:
|
||||
- **Active Job Tracking**: Monitors all running mirroring/sync jobs
|
||||
- **State Persistence**: Saves job progress to database before shutdown
|
||||
- **Callback System**: Allows services to register cleanup functions
|
||||
- **Timeout Protection**: Prevents hanging shutdowns with configurable timeouts
|
||||
- **Signal Coordination**: Works with signal handlers for proper container lifecycle
|
||||
|
||||
#### Configuration:
|
||||
- **Shutdown Timeout**: 30 seconds maximum (configurable)
|
||||
- **Job Save Timeout**: 10 seconds per job (configurable)
|
||||
|
||||
### 2. Signal Handlers
|
||||
|
||||
The signal handler system (`src/lib/signal-handlers.ts`) ensures proper response to container lifecycle events:
|
||||
|
||||
#### Supported Signals:
|
||||
- **SIGTERM**: Docker stop, Kubernetes pod termination
|
||||
- **SIGINT**: Ctrl+C, manual interruption
|
||||
- **SIGHUP**: Terminal hangup, service reload
|
||||
- **Uncaught Exceptions**: Emergency shutdown on critical errors
|
||||
- **Unhandled Rejections**: Graceful handling of promise failures
|
||||
|
||||
### 3. Enhanced Job Recovery
|
||||
|
||||
Building on the existing recovery system, new enhancements include:
|
||||
|
||||
#### Shutdown-Aware Processing:
|
||||
- Jobs check for shutdown signals during execution
|
||||
- Automatic state saving when shutdown is detected
|
||||
- Proper job status management (interrupted vs failed)
|
||||
|
||||
#### Container Integration:
|
||||
- Docker entrypoint script forwards signals correctly
|
||||
- Startup recovery runs before main application
|
||||
- Recovery timeouts prevent startup delays
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Operation
|
||||
|
||||
The graceful shutdown system is automatically initialized when the application starts. No manual configuration is required for basic operation.
|
||||
|
||||
### Testing
|
||||
|
||||
Test the graceful shutdown functionality:
|
||||
|
||||
```bash
|
||||
# Run the integration test
|
||||
bun run test-shutdown
|
||||
|
||||
# Clean up test data
|
||||
bun run test-shutdown-cleanup
|
||||
|
||||
# Run unit tests
|
||||
bun test src/lib/shutdown-manager.test.ts
|
||||
bun test src/lib/signal-handlers.test.ts
|
||||
```
|
||||
|
||||
### Manual Testing
|
||||
|
||||
1. **Start the application**:
|
||||
```bash
|
||||
bun run dev
|
||||
# or in production
|
||||
bun run start
|
||||
```
|
||||
|
||||
2. **Start a mirroring job** through the web interface
|
||||
|
||||
3. **Send shutdown signal**:
|
||||
```bash
|
||||
# Send SIGTERM (recommended)
|
||||
kill -TERM <process_id>
|
||||
|
||||
# Or use Ctrl+C for SIGINT
|
||||
```
|
||||
|
||||
4. **Verify job state** is saved and can be resumed on restart
|
||||
|
||||
### Container Testing
|
||||
|
||||
Test with Docker:
|
||||
|
||||
```bash
|
||||
# Build and run container
|
||||
docker build -t gitea-mirror .
|
||||
docker run -d --name test-shutdown gitea-mirror
|
||||
|
||||
# Start a job, then stop container
|
||||
docker stop test-shutdown
|
||||
|
||||
# Restart and verify recovery
|
||||
docker start test-shutdown
|
||||
docker logs test-shutdown
|
||||
```
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Shutdown Flow
|
||||
|
||||
1. **Signal Reception**: Signal handlers detect termination request
|
||||
2. **Shutdown Initiation**: Shutdown manager begins graceful termination
|
||||
3. **Job State Saving**: All active jobs save current progress to database
|
||||
4. **Service Cleanup**: Registered callbacks stop background services
|
||||
5. **Connection Cleanup**: Database connections and resources are released
|
||||
6. **Process Termination**: Application exits with appropriate code
|
||||
|
||||
### Job State Management
|
||||
|
||||
During shutdown, active jobs are updated with:
|
||||
- `inProgress: false` - Mark as not currently running
|
||||
- `lastCheckpoint: <timestamp>` - Record shutdown time
|
||||
- `message: "Job interrupted by application shutdown - will resume on restart"`
|
||||
- Status remains as `"imported"` (not `"failed"`) to enable recovery
|
||||
|
||||
### Recovery Integration
|
||||
|
||||
The existing recovery system automatically detects and resumes interrupted jobs:
|
||||
- Jobs with `inProgress: false` and incomplete status are candidates for recovery
|
||||
- Recovery runs during application startup (before serving requests)
|
||||
- Jobs resume from their last checkpoint with remaining items
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```bash
|
||||
# Optional: Adjust shutdown timeout (default: 30000ms)
|
||||
SHUTDOWN_TIMEOUT=30000
|
||||
|
||||
# Optional: Adjust job save timeout (default: 10000ms)
|
||||
JOB_SAVE_TIMEOUT=10000
|
||||
```
|
||||
|
||||
### Docker Configuration
|
||||
|
||||
The Docker entrypoint script includes proper signal handling:
|
||||
|
||||
```dockerfile
|
||||
# Signals are forwarded to the application process
|
||||
# SIGTERM is handled gracefully with 30-second timeout
|
||||
# Container stops cleanly without force-killing processes
|
||||
```
|
||||
|
||||
### Kubernetes Configuration
|
||||
|
||||
For Kubernetes deployments, configure appropriate termination grace period:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Pod
|
||||
spec:
|
||||
terminationGracePeriodSeconds: 45 # Allow time for graceful shutdown
|
||||
containers:
|
||||
- name: gitea-mirror
|
||||
# ... other configuration
|
||||
```
|
||||
|
||||
## Monitoring and Debugging
|
||||
|
||||
### Logs
|
||||
|
||||
The application provides detailed logging during shutdown:
|
||||
|
||||
```
|
||||
🛑 Graceful shutdown initiated by signal: SIGTERM
|
||||
📊 Shutdown status: 2 active jobs, 1 callbacks
|
||||
📝 Step 1: Saving active job states...
|
||||
Saving state for job abc-123...
|
||||
✅ Saved state for job abc-123
|
||||
🔧 Step 2: Executing shutdown callbacks...
|
||||
✅ Shutdown callback 1 completed
|
||||
💾 Step 3: Closing database connections...
|
||||
✅ Graceful shutdown completed successfully
|
||||
```
|
||||
|
||||
### Status Endpoints
|
||||
|
||||
Check shutdown manager status via API:
|
||||
|
||||
```bash
|
||||
# Get current status (if application is running)
|
||||
curl http://localhost:4321/api/health
|
||||
```
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
**Problem**: Jobs not resuming after restart
|
||||
- **Check**: Startup recovery logs for errors
|
||||
- **Verify**: Database contains interrupted jobs with correct status
|
||||
- **Test**: Run `bun run startup-recovery` manually
|
||||
|
||||
**Problem**: Shutdown timeout reached
|
||||
- **Check**: Job complexity and database performance
|
||||
- **Adjust**: Increase `SHUTDOWN_TIMEOUT` environment variable
|
||||
- **Monitor**: Database connection and disk I/O during shutdown
|
||||
|
||||
**Problem**: Container force-killed
|
||||
- **Check**: Container orchestrator termination grace period
|
||||
- **Adjust**: Increase grace period to allow shutdown completion
|
||||
- **Monitor**: Application shutdown logs for timing issues
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Development
|
||||
- Always test graceful shutdown during development
|
||||
- Use the provided test scripts to verify functionality
|
||||
- Monitor logs for shutdown timing and job state persistence
|
||||
|
||||
### Production
|
||||
- Set appropriate container termination grace periods
|
||||
- Monitor shutdown logs for performance issues
|
||||
- Use health checks to verify application readiness after restart
|
||||
- Consider job complexity when planning maintenance windows
|
||||
|
||||
### Monitoring
|
||||
- Track job recovery success rates
|
||||
- Monitor shutdown duration metrics
|
||||
- Alert on forced terminations or recovery failures
|
||||
- Log analysis for shutdown pattern optimization
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
Planned improvements for future versions:
|
||||
|
||||
1. **Configurable Timeouts**: Environment variable configuration for all timeouts
|
||||
2. **Shutdown Metrics**: Prometheus metrics for shutdown performance
|
||||
3. **Progressive Shutdown**: Graceful degradation of service capabilities
|
||||
4. **Job Prioritization**: Priority-based job saving during shutdown
|
||||
5. **Health Check Integration**: Readiness probes during shutdown process
|
||||
170
docs/RECOVERY_IMPROVEMENTS.md
Normal file
@@ -0,0 +1,170 @@
|
||||
# Job Recovery and Resume Process Improvements
|
||||
|
||||
This document outlines the comprehensive improvements made to the job recovery and resume process to make it more robust to application restarts, container restarts, and application crashes.
|
||||
|
||||
## Problems Addressed
|
||||
|
||||
The original recovery system had several critical issues:
|
||||
|
||||
1. **Middleware-based initialization**: Recovery only ran when the first request came in
|
||||
2. **Database connection issues**: No validation of database connectivity before recovery attempts
|
||||
3. **Limited error handling**: Insufficient error handling for various failure scenarios
|
||||
4. **No startup recovery**: No mechanism to handle recovery before serving requests
|
||||
5. **Incomplete job state management**: Jobs could remain in inconsistent states
|
||||
6. **No retry mechanisms**: Single-attempt recovery with no fallback strategies
|
||||
|
||||
## Improvements Implemented
|
||||
|
||||
### 1. Enhanced Recovery System (`src/lib/recovery.ts`)
|
||||
|
||||
#### New Features:
|
||||
- **Database connection validation** before attempting recovery
|
||||
- **Stale job cleanup** for jobs older than 24 hours
|
||||
- **Retry mechanisms** with configurable attempts and delays
|
||||
- **Individual job error handling** to prevent one failed job from stopping recovery
|
||||
- **Recovery state tracking** to prevent concurrent recovery attempts
|
||||
- **Enhanced logging** with detailed job information
|
||||
|
||||
#### Key Functions:
|
||||
- `initializeRecovery()` - Main recovery function with enhanced error handling
|
||||
- `validateDatabaseConnection()` - Ensures database is accessible
|
||||
- `cleanupStaleJobs()` - Removes jobs that are too old to recover
|
||||
- `getRecoveryStatus()` - Returns current recovery system status
|
||||
- `forceRecovery()` - Bypasses recent attempt checks
|
||||
- `hasJobsNeedingRecovery()` - Checks if recovery is needed
|
||||
|
||||
### 2. Startup Recovery Script (`scripts/startup-recovery.ts`)
|
||||
|
||||
A dedicated script that runs recovery before the application starts serving requests:
|
||||
|
||||
#### Features:
|
||||
- **Timeout protection** (default: 30 seconds)
|
||||
- **Force recovery option** to bypass recent attempt checks
|
||||
- **Graceful signal handling** (SIGINT, SIGTERM)
|
||||
- **Detailed logging** with progress indicators
|
||||
- **Exit codes** for different scenarios (success, warnings, errors)
|
||||
|
||||
#### Usage:
|
||||
```bash
|
||||
bun scripts/startup-recovery.ts [--force] [--timeout=30000]
|
||||
```
|
||||
|
||||
### 3. Improved Middleware (`src/middleware.ts`)
|
||||
|
||||
The middleware now serves as a fallback recovery mechanism:
|
||||
|
||||
#### Changes:
|
||||
- **Checks if recovery is needed** before attempting
|
||||
- **Shorter timeout** (15 seconds) for request-time recovery
|
||||
- **Better error handling** with status logging
|
||||
- **Prevents multiple attempts** with proper state tracking
|
||||
|
||||
### 4. Enhanced Database Queries (`src/lib/helpers.ts`)
|
||||
|
||||
#### Improvements:
|
||||
- **Proper Drizzle ORM syntax** for all database queries
|
||||
- **Enhanced interrupted job detection** with multiple criteria:
|
||||
- Jobs with no recent checkpoint (10+ minutes)
|
||||
- Jobs running too long (2+ hours)
|
||||
- **Detailed logging** of found interrupted jobs
|
||||
- **Better error handling** for database operations
|
||||
|
||||
### 5. Docker Integration (`docker-entrypoint.sh`)
|
||||
|
||||
#### Changes:
|
||||
- **Automatic startup recovery** runs before application start
|
||||
- **Exit code handling** with appropriate logging
|
||||
- **Fallback mechanisms** if recovery script is not found
|
||||
- **Non-blocking execution** - application starts even if recovery fails
|
||||
|
||||
### 6. Health Check Integration (`src/pages/api/health.ts`)
|
||||
|
||||
#### New Features:
|
||||
- **Recovery system status** in health endpoint
|
||||
- **Job recovery metrics** (jobs needing recovery, recovery in progress)
|
||||
- **Overall health status** considers recovery state
|
||||
- **Detailed recovery information** for monitoring
|
||||
|
||||
### 7. Testing Infrastructure (`scripts/test-recovery.ts`)
|
||||
|
||||
A comprehensive test script to verify recovery functionality:
|
||||
|
||||
#### Features:
|
||||
- **Creates test interrupted jobs** with realistic scenarios
|
||||
- **Verifies recovery detection** and execution
|
||||
- **Checks final job states** after recovery
|
||||
- **Cleanup functionality** for test data
|
||||
- **Comprehensive logging** of test progress
|
||||
|
||||
## Configuration Options
|
||||
|
||||
### Recovery System Options:
|
||||
- `maxRetries`: Number of recovery attempts (default: 3)
|
||||
- `retryDelay`: Delay between attempts in ms (default: 5000)
|
||||
- `skipIfRecentAttempt`: Skip if recent attempt made (default: true)
|
||||
|
||||
### Startup Recovery Options:
|
||||
- `--force`: Force recovery even if recent attempt was made
|
||||
- `--timeout`: Maximum time to wait for recovery (default: 30000ms)
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Manual Recovery:
|
||||
```bash
|
||||
# Run startup recovery
|
||||
bun run startup-recovery
|
||||
|
||||
# Force recovery
|
||||
bun run startup-recovery-force
|
||||
|
||||
# Test recovery system
|
||||
bun run test-recovery
|
||||
|
||||
# Clean up test data
|
||||
bun run test-recovery-cleanup
|
||||
```
|
||||
|
||||
### Programmatic Usage:
|
||||
```typescript
|
||||
import { initializeRecovery, hasJobsNeedingRecovery } from '@/lib/recovery';
|
||||
|
||||
// Check if recovery is needed
|
||||
const needsRecovery = await hasJobsNeedingRecovery();
|
||||
|
||||
// Run recovery with custom options
|
||||
const success = await initializeRecovery({
|
||||
maxRetries: 5,
|
||||
retryDelay: 3000,
|
||||
skipIfRecentAttempt: false
|
||||
});
|
||||
```
|
||||
|
||||
## Monitoring and Observability
|
||||
|
||||
### Health Check Endpoint:
|
||||
- **URL**: `/api/health`
|
||||
- **Recovery Status**: Included in response
|
||||
- **Monitoring**: Can be used with external monitoring systems
|
||||
|
||||
### Log Messages:
|
||||
- **Startup**: Clear indicators of recovery attempts and results
|
||||
- **Progress**: Detailed logging of recovery steps
|
||||
- **Errors**: Comprehensive error information for debugging
|
||||
|
||||
## Benefits
|
||||
|
||||
1. **Reliability**: Jobs are automatically recovered after application restarts
|
||||
2. **Resilience**: Multiple retry mechanisms and fallback strategies
|
||||
3. **Observability**: Comprehensive logging and health check integration
|
||||
4. **Performance**: Efficient detection and processing of interrupted jobs
|
||||
5. **Maintainability**: Clear separation of concerns and modular design
|
||||
6. **Testing**: Built-in testing infrastructure for verification
|
||||
|
||||
## Migration Notes
|
||||
|
||||
- **Backward Compatible**: All existing functionality is preserved
|
||||
- **Automatic**: Recovery runs automatically on startup
|
||||
- **Configurable**: All timeouts and retry counts can be adjusted
|
||||
- **Monitoring**: Health checks now include recovery status
|
||||
|
||||
This comprehensive improvement ensures that the gitea-mirror application can reliably handle job recovery in all deployment scenarios, from development to production container environments.
|
||||
236
docs/SHUTDOWN_PROCESS.md
Normal file
@@ -0,0 +1,236 @@
|
||||
# Graceful Shutdown Process
|
||||
|
||||
This document details how the gitea-mirror application handles graceful shutdown during active mirroring operations, with specific focus on job interruption and recovery.
|
||||
|
||||
## Overview
|
||||
|
||||
The graceful shutdown system is designed for **fast, clean termination** without waiting for long-running jobs to complete. It prioritizes **quick shutdown times** (under 30 seconds) while **preserving all progress** for seamless recovery.
|
||||
|
||||
## Key Principle
|
||||
|
||||
**The application does NOT wait for jobs to finish before shutting down.** Instead, it saves the current state and resumes after restart.
|
||||
|
||||
## Shutdown Scenario Example
|
||||
|
||||
### Initial State
|
||||
- **Job**: Mirror 500 repositories
|
||||
- **Progress**: 200 repositories completed
|
||||
- **Remaining**: 300 repositories pending
|
||||
- **Action**: User initiates shutdown (SIGTERM, Ctrl+C, Docker stop)
|
||||
|
||||
### Shutdown Process (Under 30 seconds)
|
||||
|
||||
#### Step 1: Signal Detection (Immediate)
|
||||
```
|
||||
📡 Received SIGTERM signal
|
||||
🛑 Graceful shutdown initiated by signal: SIGTERM
|
||||
📊 Shutdown status: 1 active jobs, 2 callbacks
|
||||
```
|
||||
|
||||
#### Step 2: Job State Saving (1-10 seconds)
|
||||
```
|
||||
📝 Step 1: Saving active job states...
|
||||
Saving state for job abc-123...
|
||||
✅ Saved state for job abc-123
|
||||
```
|
||||
|
||||
**What gets saved:**
|
||||
- `inProgress: false` - Mark job as not currently running
|
||||
- `completedItems: 200` - Number of repos successfully mirrored
|
||||
- `totalItems: 500` - Total repos in the job
|
||||
- `completedItemIds: [repo1, repo2, ..., repo200]` - List of completed repos
|
||||
- `itemIds: [repo1, repo2, ..., repo500]` - Full list of repos
|
||||
- `lastCheckpoint: 2025-05-24T17:30:00Z` - Exact shutdown time
|
||||
- `message: "Job interrupted by application shutdown - will resume on restart"`
|
||||
- `status: "imported"` - Keeps status as resumable (not "failed")
|
||||
|
||||
#### Step 3: Service Cleanup (1-5 seconds)
|
||||
```
|
||||
🔧 Step 2: Executing shutdown callbacks...
|
||||
🛑 Shutting down cleanup service...
|
||||
✅ Cleanup service stopped
|
||||
✅ Shutdown callback 1 completed
|
||||
```
|
||||
|
||||
#### Step 4: Clean Exit (Immediate)
|
||||
```
|
||||
💾 Step 3: Closing database connections...
|
||||
✅ Graceful shutdown completed successfully
|
||||
```
|
||||
|
||||
**Total shutdown time: ~15 seconds** (well under the 30-second limit)
|
||||
|
||||
## What Happens to the Remaining 300 Repos?
|
||||
|
||||
### During Shutdown
|
||||
- **NOT processed** - The remaining 300 repos are not mirrored
|
||||
- **NOT lost** - Their IDs are preserved in the job state
|
||||
- **NOT marked as failed** - Job status remains "imported" for recovery
|
||||
|
||||
### After Restart
|
||||
The recovery system automatically:
|
||||
|
||||
1. **Detects interrupted job** during startup
|
||||
2. **Calculates remaining work**: 500 - 200 = 300 repos
|
||||
3. **Extracts remaining repo IDs**: repos 201-500 from the original list
|
||||
4. **Resumes processing** from exactly where it left off
|
||||
5. **Continues until completion** of all 500 repos
|
||||
|
||||
## Timeout Configuration
|
||||
|
||||
### Shutdown Timeouts
|
||||
```typescript
|
||||
const SHUTDOWN_TIMEOUT = 30000; // 30 seconds max shutdown time
|
||||
const JOB_SAVE_TIMEOUT = 10000; // 10 seconds to save job state
|
||||
```
|
||||
|
||||
### Timeout Behavior
|
||||
- **Normal case**: Shutdown completes in 10-20 seconds
|
||||
- **Slow database**: Up to 30 seconds allowed
|
||||
- **Timeout exceeded**: Force exit with code 1
|
||||
- **Container kill**: Orchestrator should allow 45+ seconds grace period
|
||||
|
||||
## Job State Persistence
|
||||
|
||||
### Database Schema
|
||||
The `mirror_jobs` table stores complete job state:
|
||||
|
||||
```sql
|
||||
-- Job identification
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
|
||||
-- Progress tracking
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array of all repo IDs
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array of completed repo IDs
|
||||
|
||||
-- State management
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean: currently running
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP, -- Last progress save
|
||||
|
||||
-- Status and messaging
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL
|
||||
```
|
||||
|
||||
### Recovery Query
|
||||
The recovery system finds interrupted jobs:
|
||||
|
||||
```sql
|
||||
SELECT * FROM mirror_jobs
|
||||
WHERE in_progress = 0
|
||||
AND status = 'imported'
|
||||
AND completed_at IS NULL
|
||||
AND total_items > completed_items;
|
||||
```
|
||||
|
||||
## Shutdown-Aware Processing
|
||||
|
||||
### Concurrency Check
|
||||
During job execution, each repo processing checks for shutdown:
|
||||
|
||||
```typescript
|
||||
// Before processing each repository
|
||||
if (isShuttingDown()) {
|
||||
throw new Error('Processing interrupted by application shutdown');
|
||||
}
|
||||
```
|
||||
|
||||
### Checkpoint Intervals
|
||||
Jobs save progress periodically (every 10 repos by default):
|
||||
|
||||
```typescript
|
||||
checkpointInterval: 10, // Save progress every 10 repositories
|
||||
```
|
||||
|
||||
This ensures minimal work loss even if shutdown occurs between checkpoints.
|
||||
|
||||
## Container Integration
|
||||
|
||||
### Docker Entrypoint
|
||||
The Docker entrypoint properly forwards signals:
|
||||
|
||||
```bash
|
||||
# Set up signal handlers
|
||||
trap 'shutdown_handler' TERM INT HUP
|
||||
|
||||
# Start application in background
|
||||
bun ./dist/server/entry.mjs &
|
||||
APP_PID=$!
|
||||
|
||||
# Wait for application to finish
|
||||
wait "$APP_PID"
|
||||
```
|
||||
|
||||
### Kubernetes Configuration
|
||||
Recommended pod configuration:
|
||||
|
||||
```yaml
|
||||
apiVersion: v1
|
||||
kind: Pod
|
||||
spec:
|
||||
terminationGracePeriodSeconds: 45 # Allow time for graceful shutdown
|
||||
containers:
|
||||
- name: gitea-mirror
|
||||
# ... other configuration
|
||||
```
|
||||
|
||||
## Monitoring and Logging
|
||||
|
||||
### Shutdown Logs
|
||||
```
|
||||
🛑 Graceful shutdown initiated by signal: SIGTERM
|
||||
📊 Shutdown status: 1 active jobs, 2 callbacks
|
||||
📝 Step 1: Saving active job states...
|
||||
Saving state for 1 active jobs...
|
||||
✅ Completed saving all active jobs
|
||||
🔧 Step 2: Executing shutdown callbacks...
|
||||
✅ Completed all shutdown callbacks
|
||||
💾 Step 3: Closing database connections...
|
||||
✅ Graceful shutdown completed successfully
|
||||
```
|
||||
|
||||
### Recovery Logs
|
||||
```
|
||||
⚠️ Jobs found that need recovery. Starting recovery process...
|
||||
Resuming job abc-123 with 300 remaining items...
|
||||
✅ Recovery completed successfully
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### For Operations
|
||||
1. **Monitor shutdown times** - Should complete under 30 seconds
|
||||
2. **Check recovery logs** - Verify jobs resume correctly after restart
|
||||
3. **Set appropriate grace periods** - Allow 45+ seconds in orchestrators
|
||||
4. **Plan maintenance windows** - Jobs will resume but may take time to complete
|
||||
|
||||
### For Development
|
||||
1. **Test shutdown scenarios** - Use `bun run test-shutdown`
|
||||
2. **Monitor job progress** - Check checkpoint frequency and timing
|
||||
3. **Verify recovery** - Ensure interrupted jobs resume correctly
|
||||
4. **Handle edge cases** - Test shutdown during different job phases
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Shutdown Takes Too Long
|
||||
- **Check**: Database performance during job state saving
|
||||
- **Solution**: Increase `SHUTDOWN_TIMEOUT` environment variable
|
||||
- **Monitor**: Job complexity and checkpoint frequency
|
||||
|
||||
### Jobs Don't Resume
|
||||
- **Check**: Recovery logs for errors during startup
|
||||
- **Verify**: Database contains interrupted jobs with correct status
|
||||
- **Test**: Run `bun run startup-recovery` manually
|
||||
|
||||
### Container Force-Killed
|
||||
- **Check**: Container orchestrator termination grace period
|
||||
- **Increase**: Grace period to 45+ seconds
|
||||
- **Monitor**: Application shutdown completion time
|
||||
|
||||
This design ensures **production-ready graceful shutdown** with **zero data loss** and **fast recovery times** suitable for modern containerized deployments.
|
||||
127
docs/testing.md
Normal file
@@ -0,0 +1,127 @@
|
||||
# Testing in Gitea Mirror
|
||||
|
||||
This document provides guidance on testing in the Gitea Mirror project.
|
||||
|
||||
## Current Status
|
||||
|
||||
The project now uses Bun's built-in test runner, which is Jest-compatible and provides a fast, reliable testing experience. We've migrated away from Vitest due to compatibility issues with Bun.
|
||||
|
||||
## Running Tests
|
||||
|
||||
To run tests, use the following commands:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
bun test
|
||||
|
||||
# Run tests in watch mode (automatically re-run when files change)
|
||||
bun test --watch
|
||||
|
||||
# Run tests with coverage reporting
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
## Test File Naming Conventions
|
||||
|
||||
Bun's test runner automatically discovers test files that match the following patterns:
|
||||
|
||||
- `*.test.{js|jsx|ts|tsx}`
|
||||
- `*_test.{js|jsx|ts|tsx}`
|
||||
- `*.spec.{js|jsx|ts|tsx}`
|
||||
- `*_spec.{js|jsx|ts|tsx}`
|
||||
|
||||
## Writing Tests
|
||||
|
||||
The project uses Bun's test runner with a Jest-compatible API. Here's an example test:
|
||||
|
||||
```typescript
|
||||
// example.test.ts
|
||||
import { describe, test, expect } from "bun:test";
|
||||
|
||||
describe("Example Test", () => {
|
||||
test("should pass", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Testing React Components
|
||||
|
||||
For testing React components, we use React Testing Library:
|
||||
|
||||
```typescript
|
||||
// component.test.tsx
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import MyComponent from "../components/MyComponent";
|
||||
|
||||
describe("MyComponent", () => {
|
||||
test("renders correctly", () => {
|
||||
render(<MyComponent />);
|
||||
expect(screen.getByText("Hello World")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Test Setup
|
||||
|
||||
The test setup is defined in `src/tests/setup.bun.ts` and includes:
|
||||
|
||||
- Automatic cleanup after each test
|
||||
- Setup for any global test environment needs
|
||||
|
||||
## Mocking
|
||||
|
||||
Bun's test runner provides built-in mocking capabilities:
|
||||
|
||||
```typescript
|
||||
import { test, expect, mock } from "bun:test";
|
||||
|
||||
// Create a mock function
|
||||
const mockFn = mock(() => "mocked value");
|
||||
|
||||
test("mock function", () => {
|
||||
const result = mockFn();
|
||||
expect(result).toBe("mocked value");
|
||||
expect(mockFn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// Mock a module
|
||||
mock.module("./some-module", () => {
|
||||
return {
|
||||
someFunction: () => "mocked module function"
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
## CI Integration
|
||||
|
||||
The CI workflow has been updated to use Bun's test runner. Tests are automatically run as part of the CI pipeline.
|
||||
|
||||
## Test Coverage
|
||||
|
||||
To generate test coverage reports, run:
|
||||
|
||||
```bash
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
This will generate a coverage report in the `coverage` directory.
|
||||
|
||||
## Types of Tests
|
||||
|
||||
The project includes several types of tests:
|
||||
|
||||
1. **Unit Tests**: Testing individual functions and utilities
|
||||
2. **API Tests**: Testing API endpoints
|
||||
3. **Component Tests**: Testing React components
|
||||
4. **Integration Tests**: Testing how components work together
|
||||
|
||||
## Future Improvements
|
||||
|
||||
When expanding the test suite, consider:
|
||||
|
||||
1. Adding more comprehensive API endpoint tests
|
||||
2. Increasing component test coverage
|
||||
3. Setting up end-to-end tests with a tool like Playwright
|
||||
4. Adding performance tests for critical paths
|
||||
19
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "gitea-mirror",
|
||||
"type": "module",
|
||||
"version": "2.1.0",
|
||||
"version": "2.9.1",
|
||||
"engines": {
|
||||
"bun": ">=1.2.9"
|
||||
},
|
||||
@@ -16,12 +16,19 @@
|
||||
"check-db": "bun scripts/manage-db.ts check",
|
||||
"fix-db": "bun scripts/manage-db.ts fix",
|
||||
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||
"cleanup-events": "bun scripts/cleanup-events.ts",
|
||||
|
||||
"startup-recovery": "bun scripts/startup-recovery.ts",
|
||||
"startup-recovery-force": "bun scripts/startup-recovery.ts --force",
|
||||
"test-recovery": "bun scripts/test-recovery.ts",
|
||||
"test-recovery-cleanup": "bun scripts/test-recovery.ts --cleanup",
|
||||
"test-shutdown": "bun scripts/test-graceful-shutdown.ts",
|
||||
"test-shutdown-cleanup": "bun scripts/test-graceful-shutdown.ts --cleanup",
|
||||
"preview": "bunx --bun astro preview",
|
||||
"start": "bun dist/server/entry.mjs",
|
||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun dist/server/entry.mjs",
|
||||
"test": "bunx --bun vitest run",
|
||||
"test:watch": "bunx --bun vitest",
|
||||
"test": "bun test",
|
||||
"test:watch": "bun test --watch",
|
||||
"test:coverage": "bun test --coverage",
|
||||
"astro": "bunx --bun astro"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -38,6 +45,7 @@
|
||||
"@radix-ui/react-radio-group": "^1.3.6",
|
||||
"@radix-ui/react-select": "^2.2.4",
|
||||
"@radix-ui/react-slot": "^1.2.2",
|
||||
"@radix-ui/react-switch": "^1.2.5",
|
||||
"@radix-ui/react-tabs": "^1.1.11",
|
||||
"@radix-ui/react-tooltip": "^1.2.6",
|
||||
"@tailwindcss/vite": "^4.1.7",
|
||||
@@ -46,7 +54,6 @@
|
||||
"@types/react": "^19.1.4",
|
||||
"@types/react-dom": "^19.1.5",
|
||||
"astro": "^5.7.13",
|
||||
"axios": "^1.9.0",
|
||||
"bcryptjs": "^3.0.2",
|
||||
"canvas-confetti": "^1.9.3",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
@@ -61,7 +68,6 @@
|
||||
"react-dom": "^19.1.0",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.3",
|
||||
"superagent": "^10.2.1",
|
||||
"tailwind-merge": "^3.3.0",
|
||||
"tailwindcss": "^4.1.7",
|
||||
"tw-animate-css": "^1.3.0",
|
||||
@@ -73,7 +79,6 @@
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"@types/jsonwebtoken": "^9.0.9",
|
||||
"@types/superagent": "^8.1.9",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@vitejs/plugin-react": "^4.4.1",
|
||||
"jsdom": "^26.1.0",
|
||||
|
||||
@@ -47,12 +47,12 @@ The script uses environment variables from the `.env` file in the project root:
|
||||
|
||||
# First build the image
|
||||
./scripts/build-docker.sh --load
|
||||
|
||||
|
||||
# Then run using docker-compose for development
|
||||
docker-compose -f ../docker-compose.dev.yml up -d
|
||||
|
||||
# Or for production
|
||||
docker-compose --profile production up -d
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
## Diagnostics Script
|
||||
|
||||
@@ -18,17 +18,18 @@ Run **Gitea Mirror** in an isolated LXC container, either:
|
||||
### One-command install
|
||||
|
||||
```bash
|
||||
# optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh)"
|
||||
# Community-maintained script for Proxmox VE by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
# at [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED)
|
||||
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh)"
|
||||
```
|
||||
|
||||
What it does:
|
||||
|
||||
* Creates **privileged** CT `$CTID` with nesting enabled
|
||||
* Installs curl / git / Bun (official installer)
|
||||
* Uses the community-maintained script from ProxmoxVED
|
||||
* Installs dependencies and Bun runtime
|
||||
* Clones & builds `arunavo4/gitea-mirror`
|
||||
* Writes a root-run systemd service and starts it
|
||||
* Prints the container IP + random `JWT_SECRET`
|
||||
* Creates a systemd service and starts it
|
||||
* Sets up a random `JWT_SECRET` for security
|
||||
|
||||
Browse to:
|
||||
|
||||
|
||||
@@ -60,43 +60,60 @@ The database file should be located in the `./data/gitea-mirror.db` directory. I
|
||||
|
||||
The following scripts help manage events in the SQLite database:
|
||||
|
||||
### Event Inspection (check-events.ts)
|
||||
> **Note**: For a more user-friendly approach, you can use the cleanup button in the Activity Log page of the web interface to delete all activities with a single click.
|
||||
|
||||
Displays all events currently stored in the database.
|
||||
|
||||
|
||||
|
||||
|
||||
### Remove Duplicate Events (remove-duplicate-events.ts)
|
||||
|
||||
Specifically removes duplicate events based on deduplication keys without affecting old events.
|
||||
|
||||
```bash
|
||||
bun scripts/check-events.ts
|
||||
# Remove duplicate events for all users
|
||||
bun scripts/remove-duplicate-events.ts
|
||||
|
||||
# Remove duplicate events for a specific user
|
||||
bun scripts/remove-duplicate-events.ts <userId>
|
||||
```
|
||||
|
||||
### Event Cleanup (cleanup-events.ts)
|
||||
|
||||
Removes old events from the database to prevent it from growing too large.
|
||||
|
||||
### Fix Interrupted Jobs (fix-interrupted-jobs.ts)
|
||||
|
||||
Fixes interrupted jobs that might be preventing cleanup by marking them as failed.
|
||||
|
||||
```bash
|
||||
# Remove events older than 7 days (default)
|
||||
bun scripts/cleanup-events.ts
|
||||
# Fix all interrupted jobs
|
||||
bun scripts/fix-interrupted-jobs.ts
|
||||
|
||||
# Remove events older than X days
|
||||
bun scripts/cleanup-events.ts 14
|
||||
# Fix interrupted jobs for a specific user
|
||||
bun scripts/fix-interrupted-jobs.ts <userId>
|
||||
```
|
||||
|
||||
This script can be scheduled to run periodically (e.g., daily) using cron or another scheduler.
|
||||
Use this script if you're having trouble cleaning up activities due to "interrupted" jobs that won't delete.
|
||||
|
||||
### Mark Events as Read (mark-events-read.ts)
|
||||
### Startup Recovery (startup-recovery.ts)
|
||||
|
||||
Marks all unread events as read.
|
||||
Runs job recovery during application startup to handle any interrupted jobs from previous runs.
|
||||
|
||||
```bash
|
||||
bun scripts/mark-events-read.ts
|
||||
# Run startup recovery (normal mode)
|
||||
bun scripts/startup-recovery.ts
|
||||
|
||||
# Force recovery even if recent attempt was made
|
||||
bun scripts/startup-recovery.ts --force
|
||||
|
||||
# Set custom timeout (default: 30000ms)
|
||||
bun scripts/startup-recovery.ts --timeout=60000
|
||||
|
||||
# Using npm scripts
|
||||
bun run startup-recovery
|
||||
bun run startup-recovery-force
|
||||
```
|
||||
|
||||
### Make Events Appear Older (make-events-old.ts)
|
||||
|
||||
For testing purposes, this script modifies event timestamps to make them appear older.
|
||||
|
||||
```bash
|
||||
bun scripts/make-events-old.ts
|
||||
```
|
||||
This script is automatically run by the Docker entrypoint during container startup. It ensures that any jobs interrupted by container restarts or application crashes are properly recovered or marked as failed.
|
||||
|
||||
## Deployment Scripts
|
||||
|
||||
@@ -107,9 +124,11 @@ bun scripts/make-events-old.ts
|
||||
|
||||
### LXC Container Deployment
|
||||
|
||||
Two scripts are provided for deploying Gitea Mirror in LXC containers:
|
||||
Two deployment options are available for LXC containers:
|
||||
|
||||
1. **gitea-mirror-lxc-proxmox.sh**: For online deployment on a Proxmox VE host
|
||||
1. **Proxmox VE (online)**: Using the community-maintained script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
- Author: Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
- Available at: [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED/blob/main/install/gitea-mirror-install.sh)
|
||||
- Pulls everything from GitHub
|
||||
- Creates a privileged container with the application
|
||||
- Sets up systemd service
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to check events in the database
|
||||
*/
|
||||
|
||||
import { Database } from "bun:sqlite";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
// Define the database path
|
||||
const dataDir = path.join(process.cwd(), "data");
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
console.error("Data directory not found:", dataDir);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||
if (!fs.existsSync(dbPath)) {
|
||||
console.error("Database file not found:", dbPath);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Open the database
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Check if the events table exists
|
||||
const tableExists = db.query("SELECT name FROM sqlite_master WHERE type='table' AND name='events'").get();
|
||||
|
||||
if (!tableExists) {
|
||||
console.error("Events table does not exist");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Get all events
|
||||
const events = db.query("SELECT * FROM events").all();
|
||||
|
||||
console.log("Events in the database:");
|
||||
console.log(JSON.stringify(events, null, 2));
|
||||
@@ -1,43 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to clean up old events from the database
|
||||
* This script should be run periodically (e.g., daily) to prevent the events table from growing too large
|
||||
*
|
||||
* Usage:
|
||||
* bun scripts/cleanup-events.ts [days]
|
||||
*
|
||||
* Where [days] is the number of days to keep events (default: 7)
|
||||
*/
|
||||
|
||||
import { cleanupOldEvents } from "../src/lib/events";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const daysToKeep = args.length > 0 ? parseInt(args[0], 10) : 7;
|
||||
|
||||
if (isNaN(daysToKeep) || daysToKeep < 1) {
|
||||
console.error("Error: Days to keep must be a positive number");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function runCleanup() {
|
||||
try {
|
||||
console.log(`Starting event cleanup (retention: ${daysToKeep} days)...`);
|
||||
|
||||
// Call the cleanupOldEvents function from the events module
|
||||
const result = await cleanupOldEvents(daysToKeep);
|
||||
|
||||
console.log(`Cleanup summary:`);
|
||||
console.log(`- Read events deleted: ${result.readEventsDeleted}`);
|
||||
console.log(`- Unread events deleted: ${result.unreadEventsDeleted}`);
|
||||
console.log(`- Total events deleted: ${result.readEventsDeleted + result.unreadEventsDeleted}`);
|
||||
|
||||
console.log("Event cleanup completed successfully");
|
||||
} catch (error) {
|
||||
console.error("Error running event cleanup:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the cleanup
|
||||
runCleanup();
|
||||
74
scripts/fix-interrupted-jobs.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to fix interrupted jobs that might be preventing cleanup
|
||||
* This script marks all in-progress jobs as failed to allow them to be deleted
|
||||
*
|
||||
* Usage:
|
||||
* bun scripts/fix-interrupted-jobs.ts [userId]
|
||||
*
|
||||
* Where [userId] is optional - if provided, only fixes jobs for that user
|
||||
*/
|
||||
|
||||
import { db, mirrorJobs } from "../src/lib/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const userId = args.length > 0 ? args[0] : undefined;
|
||||
|
||||
async function fixInterruptedJobs() {
|
||||
try {
|
||||
console.log("Checking for interrupted jobs...");
|
||||
|
||||
// Build the query
|
||||
let query = db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.inProgress, true));
|
||||
|
||||
if (userId) {
|
||||
console.log(`Filtering for user: ${userId}`);
|
||||
query = query.where(eq(mirrorJobs.userId, userId));
|
||||
}
|
||||
|
||||
// Find all in-progress jobs
|
||||
const inProgressJobs = await query;
|
||||
|
||||
if (inProgressJobs.length === 0) {
|
||||
console.log("No interrupted jobs found.");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${inProgressJobs.length} interrupted jobs:`);
|
||||
inProgressJobs.forEach(job => {
|
||||
console.log(`- Job ${job.id}: ${job.message} (${job.repositoryName || job.organizationName || 'Unknown'})`);
|
||||
});
|
||||
|
||||
// Mark all in-progress jobs as failed
|
||||
let updateQuery = db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "failed",
|
||||
message: "Job interrupted and marked as failed by cleanup script"
|
||||
})
|
||||
.where(eq(mirrorJobs.inProgress, true));
|
||||
|
||||
if (userId) {
|
||||
updateQuery = updateQuery.where(eq(mirrorJobs.userId, userId));
|
||||
}
|
||||
|
||||
await updateQuery;
|
||||
|
||||
console.log(`✅ Successfully marked ${inProgressJobs.length} interrupted jobs as failed.`);
|
||||
console.log("These jobs can now be deleted through the normal cleanup process.");
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error fixing interrupted jobs:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the fix
|
||||
fixInterruptedJobs();
|
||||
@@ -1,97 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# gitea-mirror-lxc-proxmox.sh
|
||||
# Fully online installer for a Proxmox LXC guest running Gitea Mirror + Bun.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ────── adjustable defaults ──────────────────────────────────────────────
|
||||
CTID=${CTID:-106} # container ID
|
||||
HOSTNAME=${HOSTNAME:-gitea-mirror}
|
||||
STORAGE=${STORAGE:-local-lvm} # where rootfs lives
|
||||
DISK_SIZE=${DISK_SIZE:-8G}
|
||||
CORES=${CORES:-2}
|
||||
MEMORY=${MEMORY:-2048} # MiB
|
||||
BRIDGE=${BRIDGE:-vmbr0}
|
||||
IP_CONF=${IP_CONF:-dhcp} # or "192.168.1.240/24,gw=192.168.1.1"
|
||||
|
||||
PORT=4321
|
||||
JWT_SECRET=$(openssl rand -hex 32)
|
||||
|
||||
REPO="https://github.com/arunavo4/gitea-mirror.git"
|
||||
# ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
TEMPLATE='ubuntu-22.04-standard_22.04-1_amd64.tar.zst'
|
||||
TEMPLATE_PATH="/var/lib/vz/template/cache/${TEMPLATE}"
|
||||
|
||||
echo "▶️ Ensuring template exists…"
|
||||
if [[ ! -f $TEMPLATE_PATH ]]; then
|
||||
pveam update >/dev/null
|
||||
pveam download "$STORAGE" "$TEMPLATE"
|
||||
fi
|
||||
|
||||
echo "▶️ Creating container $CTID (if missing)…"
|
||||
if ! pct status "$CTID" &>/dev/null; then
|
||||
pct create "$CTID" "$TEMPLATE_PATH" \
|
||||
--rootfs "$STORAGE:$DISK_SIZE" \
|
||||
--hostname "$HOSTNAME" \
|
||||
--cores "$CORES" --memory "$MEMORY" \
|
||||
--net0 "name=eth0,bridge=$BRIDGE,ip=$IP_CONF" \
|
||||
--features nesting=1 \
|
||||
--unprivileged 0
|
||||
fi
|
||||
|
||||
pct start "$CTID"
|
||||
|
||||
echo "▶️ Installing base packages inside CT $CTID…"
|
||||
pct exec "$CTID" -- bash -c 'apt update && apt install -y curl git build-essential openssl sqlite3 unzip'
|
||||
|
||||
echo "▶️ Installing Bun runtime…"
|
||||
pct exec "$CTID" -- bash -c '
|
||||
export BUN_INSTALL=/opt/bun
|
||||
curl -fsSL https://bun.sh/install | bash -s -- --yes
|
||||
ln -sf /opt/bun/bin/bun /usr/local/bin/bun
|
||||
ln -sf /opt/bun/bin/bun /usr/local/bin/bunx
|
||||
bun --version
|
||||
'
|
||||
|
||||
echo "▶️ Cloning & building Gitea Mirror…"
|
||||
pct exec "$CTID" -- bash -c "
|
||||
git clone --depth=1 '$REPO' /opt/gitea-mirror || (cd /opt/gitea-mirror && git pull)
|
||||
cd /opt/gitea-mirror
|
||||
bun install
|
||||
bun run build
|
||||
bun run manage-db init
|
||||
"
|
||||
|
||||
echo "▶️ Creating systemd service…"
|
||||
pct exec "$CTID" -- bash -c "
|
||||
cat >/etc/systemd/system/gitea-mirror.service <<SERVICE
|
||||
[Unit]
|
||||
Description=Gitea Mirror
|
||||
After=network.target
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=/opt/gitea-mirror
|
||||
ExecStart=/usr/local/bin/bun dist/server/entry.mjs
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
Environment=NODE_ENV=production
|
||||
Environment=HOST=0.0.0.0
|
||||
Environment=PORT=$PORT
|
||||
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||
Environment=JWT_SECRET=$JWT_SECRET
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
SERVICE
|
||||
systemctl daemon-reload
|
||||
systemctl enable gitea-mirror
|
||||
systemctl restart gitea-mirror
|
||||
"
|
||||
|
||||
echo -e "\n🔍 Service status:"
|
||||
pct exec "$CTID" -- systemctl status gitea-mirror --no-pager | head -n15
|
||||
|
||||
GUEST_IP=$(pct exec "$CTID" -- hostname -I | awk '{print $1}')
|
||||
echo -e "\n🌐 Browse to: http://$GUEST_IP:$PORT\n"
|
||||
echo "🗝️ JWT_SECRET = $JWT_SECRET"
|
||||
echo -e "\n✅ Done – Gitea Mirror is running in CT $CTID."
|
||||
@@ -1,29 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to make events appear older for testing cleanup
|
||||
*/
|
||||
|
||||
import { db, events } from "../src/lib/db";
|
||||
|
||||
async function makeEventsOld() {
|
||||
try {
|
||||
console.log("Making events appear older...");
|
||||
|
||||
// Calculate a timestamp from 2 days ago
|
||||
const oldDate = new Date();
|
||||
oldDate.setDate(oldDate.getDate() - 2);
|
||||
|
||||
// Update all events to have an older timestamp
|
||||
const result = await db
|
||||
.update(events)
|
||||
.set({ createdAt: oldDate });
|
||||
|
||||
console.log(`Updated ${result.changes || 0} events to appear older`);
|
||||
} catch (error) {
|
||||
console.error("Error updating event timestamps:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the function
|
||||
makeEventsOld();
|
||||
@@ -145,9 +145,31 @@ async function ensureTablesExist() {
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- New fields for job resilience
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
batch_id TEXT,
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array as text
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
|
||||
// Create indexes for better performance
|
||||
db.exec(`
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||
`);
|
||||
break;
|
||||
case "events":
|
||||
db.exec(`
|
||||
@@ -175,6 +197,33 @@ async function ensureTablesExist() {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Migration: Add cleanup_config column to existing configs table
|
||||
try {
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Check if cleanup_config column exists
|
||||
const tableInfo = db.query(`PRAGMA table_info(configs)`).all();
|
||||
const hasCleanupConfig = tableInfo.some((column: any) => column.name === 'cleanup_config');
|
||||
|
||||
if (!hasCleanupConfig) {
|
||||
console.log("Adding cleanup_config column to configs table...");
|
||||
|
||||
// Add the column with a default value
|
||||
const defaultCleanupConfig = JSON.stringify({
|
||||
enabled: false,
|
||||
retentionDays: 7,
|
||||
lastRun: null,
|
||||
nextRun: null,
|
||||
});
|
||||
|
||||
db.exec(`ALTER TABLE configs ADD COLUMN cleanup_config TEXT NOT NULL DEFAULT '${defaultCleanupConfig}'`);
|
||||
console.log("✅ cleanup_config column added successfully.");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("❌ Error during cleanup_config migration:", error);
|
||||
// Don't exit here as this is not critical for basic functionality
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -306,6 +355,7 @@ async function initializeDatabase() {
|
||||
include TEXT NOT NULL DEFAULT '["*"]',
|
||||
exclude TEXT NOT NULL DEFAULT '[]',
|
||||
schedule_config TEXT NOT NULL,
|
||||
cleanup_config TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
@@ -437,10 +487,16 @@ async function initializeDatabase() {
|
||||
lastRun: null,
|
||||
nextRun: null,
|
||||
});
|
||||
const cleanupConfig = JSON.stringify({
|
||||
enabled: false,
|
||||
retentionDays: 7,
|
||||
lastRun: null,
|
||||
nextRun: null,
|
||||
});
|
||||
|
||||
const stmt = db.prepare(`
|
||||
INSERT INTO configs (id, user_id, name, is_active, github_config, gitea_config, include, exclude, schedule_config, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
INSERT INTO configs (id, user_id, name, is_active, github_config, gitea_config, include, exclude, schedule_config, cleanup_config, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
@@ -453,6 +509,7 @@ async function initializeDatabase() {
|
||||
include,
|
||||
exclude,
|
||||
scheduleConfig,
|
||||
cleanupConfig,
|
||||
Date.now(),
|
||||
Date.now()
|
||||
);
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to mark all events as read
|
||||
*/
|
||||
|
||||
import { db, events } from "../src/lib/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
|
||||
async function markEventsAsRead() {
|
||||
try {
|
||||
console.log("Marking all events as read...");
|
||||
|
||||
// Update all events to mark them as read
|
||||
const result = await db
|
||||
.update(events)
|
||||
.set({ read: true })
|
||||
.where(eq(events.read, false));
|
||||
|
||||
console.log(`Marked ${result.changes || 0} events as read`);
|
||||
} catch (error) {
|
||||
console.error("Error marking events as read:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the function
|
||||
markEventsAsRead();
|
||||
44
scripts/remove-duplicate-events.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to remove duplicate events from the database
|
||||
* This script identifies and removes events with duplicate deduplication keys
|
||||
*
|
||||
* Usage:
|
||||
* bun scripts/remove-duplicate-events.ts [userId]
|
||||
*
|
||||
* Where [userId] is optional - if provided, only removes duplicates for that user
|
||||
*/
|
||||
|
||||
import { removeDuplicateEvents } from "../src/lib/events";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const userId = args.length > 0 ? args[0] : undefined;
|
||||
|
||||
async function runDuplicateRemoval() {
|
||||
try {
|
||||
if (userId) {
|
||||
console.log(`Starting duplicate event removal for user: ${userId}...`);
|
||||
} else {
|
||||
console.log("Starting duplicate event removal for all users...");
|
||||
}
|
||||
|
||||
// Call the removeDuplicateEvents function
|
||||
const result = await removeDuplicateEvents(userId);
|
||||
|
||||
console.log(`Duplicate removal summary:`);
|
||||
console.log(`- Duplicate events removed: ${result.duplicatesRemoved}`);
|
||||
|
||||
if (result.duplicatesRemoved > 0) {
|
||||
console.log("Duplicate event removal completed successfully");
|
||||
} else {
|
||||
console.log("No duplicate events found");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error running duplicate event removal:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the duplicate removal
|
||||
runDuplicateRemoval();
|
||||
113
scripts/startup-recovery.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Startup recovery script
|
||||
* This script runs job recovery before the application starts serving requests
|
||||
* It ensures that any interrupted jobs from previous runs are properly handled
|
||||
*
|
||||
* Usage:
|
||||
* bun scripts/startup-recovery.ts [--force] [--timeout=30000]
|
||||
*
|
||||
* Options:
|
||||
* --force: Force recovery even if a recent attempt was made
|
||||
* --timeout: Maximum time to wait for recovery (in milliseconds, default: 30000)
|
||||
*/
|
||||
|
||||
import { initializeRecovery, hasJobsNeedingRecovery, getRecoveryStatus } from "../src/lib/recovery";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const forceRecovery = args.includes('--force');
|
||||
const timeoutArg = args.find(arg => arg.startsWith('--timeout='));
|
||||
const timeout = timeoutArg ? parseInt(timeoutArg.split('=')[1], 10) : 30000;
|
||||
|
||||
if (isNaN(timeout) || timeout < 1000) {
|
||||
console.error("Error: Timeout must be at least 1000ms");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function runStartupRecovery() {
|
||||
console.log('=== Gitea Mirror Startup Recovery ===');
|
||||
console.log(`Timeout: ${timeout}ms`);
|
||||
console.log(`Force recovery: ${forceRecovery}`);
|
||||
console.log('');
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
// Set up timeout
|
||||
const timeoutPromise = new Promise<never>((_, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new Error(`Recovery timeout after ${timeout}ms`));
|
||||
}, timeout);
|
||||
});
|
||||
|
||||
// Check if recovery is needed first
|
||||
console.log('Checking if recovery is needed...');
|
||||
const needsRecovery = await hasJobsNeedingRecovery();
|
||||
|
||||
if (!needsRecovery) {
|
||||
console.log('✅ No jobs need recovery. Startup can proceed.');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
console.log('⚠️ Jobs found that need recovery. Starting recovery process...');
|
||||
|
||||
// Run recovery with timeout
|
||||
const recoveryPromise = initializeRecovery({
|
||||
skipIfRecentAttempt: !forceRecovery,
|
||||
maxRetries: 3,
|
||||
retryDelay: 5000,
|
||||
});
|
||||
|
||||
const recoveryResult = await Promise.race([recoveryPromise, timeoutPromise]);
|
||||
|
||||
const endTime = Date.now();
|
||||
const duration = endTime - startTime;
|
||||
|
||||
if (recoveryResult) {
|
||||
console.log(`✅ Recovery completed successfully in ${duration}ms`);
|
||||
console.log('Application startup can proceed.');
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.log(`⚠️ Recovery completed with some failures in ${duration}ms`);
|
||||
console.log('Application startup can proceed, but some jobs may have failed.');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
const endTime = Date.now();
|
||||
const duration = endTime - startTime;
|
||||
|
||||
if (error instanceof Error && error.message.includes('timeout')) {
|
||||
console.error(`❌ Recovery timed out after ${duration}ms`);
|
||||
console.error('Application will start anyway, but some jobs may remain interrupted.');
|
||||
|
||||
// Get current recovery status
|
||||
const status = getRecoveryStatus();
|
||||
console.log('Recovery status:', status);
|
||||
|
||||
// Exit with warning code but allow startup to continue
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.error(`❌ Recovery failed after ${duration}ms:`, error);
|
||||
console.error('Application will start anyway, but recovery was unsuccessful.');
|
||||
|
||||
// Exit with error code but allow startup to continue
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle process signals gracefully
|
||||
process.on('SIGINT', () => {
|
||||
console.log('\n⚠️ Recovery interrupted by SIGINT');
|
||||
process.exit(130);
|
||||
});
|
||||
|
||||
process.on('SIGTERM', () => {
|
||||
console.log('\n⚠️ Recovery interrupted by SIGTERM');
|
||||
process.exit(143);
|
||||
});
|
||||
|
||||
// Run the startup recovery
|
||||
runStartupRecovery();
|
||||
238
scripts/test-graceful-shutdown.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Integration test for graceful shutdown functionality
|
||||
*
|
||||
* This script tests the complete graceful shutdown flow:
|
||||
* 1. Starts a mock job
|
||||
* 2. Initiates shutdown
|
||||
* 3. Verifies job state is saved correctly
|
||||
* 4. Tests recovery after restart
|
||||
*
|
||||
* Usage:
|
||||
* bun scripts/test-graceful-shutdown.ts [--cleanup]
|
||||
*/
|
||||
|
||||
import { db, mirrorJobs } from "../src/lib/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import {
|
||||
initializeShutdownManager,
|
||||
registerActiveJob,
|
||||
unregisterActiveJob,
|
||||
gracefulShutdown,
|
||||
getShutdownStatus,
|
||||
registerShutdownCallback
|
||||
} from "../src/lib/shutdown-manager";
|
||||
import { setupSignalHandlers, removeSignalHandlers } from "../src/lib/signal-handlers";
|
||||
import { createMirrorJob } from "../src/lib/helpers";
|
||||
|
||||
// Test configuration
|
||||
const TEST_USER_ID = "test-user-shutdown";
|
||||
const TEST_JOB_PREFIX = "test-shutdown-job";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const shouldCleanup = args.includes('--cleanup');
|
||||
|
||||
/**
|
||||
* Create a test job for shutdown testing
|
||||
*/
|
||||
async function createTestJob(): Promise<string> {
|
||||
console.log('📝 Creating test job...');
|
||||
|
||||
const jobId = await createMirrorJob({
|
||||
userId: TEST_USER_ID,
|
||||
message: 'Test job for graceful shutdown testing',
|
||||
details: 'This job simulates a long-running mirroring operation',
|
||||
status: "mirroring",
|
||||
jobType: "mirror",
|
||||
totalItems: 10,
|
||||
itemIds: ['item-1', 'item-2', 'item-3', 'item-4', 'item-5'],
|
||||
completedItemIds: ['item-1', 'item-2'], // Simulate partial completion
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
console.log(`✅ Created test job: ${jobId}`);
|
||||
return jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that job state was saved correctly during shutdown
|
||||
*/
|
||||
async function verifyJobState(jobId: string): Promise<boolean> {
|
||||
console.log(`🔍 Verifying job state for ${jobId}...`);
|
||||
|
||||
const jobs = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.id, jobId));
|
||||
|
||||
if (jobs.length === 0) {
|
||||
console.error(`❌ Job ${jobId} not found in database`);
|
||||
return false;
|
||||
}
|
||||
|
||||
const job = jobs[0];
|
||||
|
||||
// Check that the job was marked as interrupted
|
||||
if (job.inProgress) {
|
||||
console.error(`❌ Job ${jobId} is still marked as in progress`);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!job.message?.includes('interrupted by application shutdown')) {
|
||||
console.error(`❌ Job ${jobId} does not have shutdown message. Message: ${job.message}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!job.lastCheckpoint) {
|
||||
console.error(`❌ Job ${jobId} does not have a checkpoint timestamp`);
|
||||
return false;
|
||||
}
|
||||
|
||||
console.log(`✅ Job ${jobId} state verified correctly`);
|
||||
console.log(` - In Progress: ${job.inProgress}`);
|
||||
console.log(` - Message: ${job.message}`);
|
||||
console.log(` - Last Checkpoint: ${job.lastCheckpoint}`);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the graceful shutdown process
|
||||
*/
|
||||
async function testGracefulShutdown(): Promise<void> {
|
||||
console.log('\n🧪 Testing Graceful Shutdown Process');
|
||||
console.log('=====================================\n');
|
||||
|
||||
try {
|
||||
// Step 1: Initialize shutdown manager
|
||||
console.log('Step 1: Initializing shutdown manager...');
|
||||
initializeShutdownManager();
|
||||
setupSignalHandlers();
|
||||
|
||||
// Step 2: Create and register a test job
|
||||
console.log('\nStep 2: Creating and registering test job...');
|
||||
const jobId = await createTestJob();
|
||||
registerActiveJob(jobId);
|
||||
|
||||
// Step 3: Register a test shutdown callback
|
||||
console.log('\nStep 3: Registering shutdown callback...');
|
||||
let callbackExecuted = false;
|
||||
registerShutdownCallback(async () => {
|
||||
console.log('🔧 Test shutdown callback executed');
|
||||
callbackExecuted = true;
|
||||
});
|
||||
|
||||
// Step 4: Check initial status
|
||||
console.log('\nStep 4: Checking initial status...');
|
||||
const initialStatus = getShutdownStatus();
|
||||
console.log(` - Active jobs: ${initialStatus.activeJobs.length}`);
|
||||
console.log(` - Registered callbacks: ${initialStatus.registeredCallbacks}`);
|
||||
console.log(` - Shutdown in progress: ${initialStatus.inProgress}`);
|
||||
|
||||
// Step 5: Simulate graceful shutdown
|
||||
console.log('\nStep 5: Simulating graceful shutdown...');
|
||||
|
||||
// Override process.exit to prevent actual exit during test
|
||||
const originalExit = process.exit;
|
||||
let exitCode: number | undefined;
|
||||
process.exit = ((code?: number) => {
|
||||
exitCode = code;
|
||||
console.log(`🚪 Process.exit called with code: ${code}`);
|
||||
// Don't actually exit during test
|
||||
}) as any;
|
||||
|
||||
try {
|
||||
// This should save job state and execute callbacks
|
||||
await gracefulShutdown('TEST_SIGNAL');
|
||||
} catch (error) {
|
||||
// Expected since we're not actually exiting
|
||||
console.log(`⚠️ Graceful shutdown completed (exit intercepted)`);
|
||||
}
|
||||
|
||||
// Restore original process.exit
|
||||
process.exit = originalExit;
|
||||
|
||||
// Step 6: Verify job state was saved
|
||||
console.log('\nStep 6: Verifying job state was saved...');
|
||||
const jobStateValid = await verifyJobState(jobId);
|
||||
|
||||
// Step 7: Verify callback was executed
|
||||
console.log('\nStep 7: Verifying callback execution...');
|
||||
if (callbackExecuted) {
|
||||
console.log('✅ Shutdown callback was executed');
|
||||
} else {
|
||||
console.error('❌ Shutdown callback was not executed');
|
||||
}
|
||||
|
||||
// Step 8: Test results
|
||||
console.log('\n📊 Test Results:');
|
||||
console.log(` - Job state saved correctly: ${jobStateValid ? '✅' : '❌'}`);
|
||||
console.log(` - Shutdown callback executed: ${callbackExecuted ? '✅' : '❌'}`);
|
||||
console.log(` - Exit code: ${exitCode}`);
|
||||
|
||||
if (jobStateValid && callbackExecuted) {
|
||||
console.log('\n🎉 All tests passed! Graceful shutdown is working correctly.');
|
||||
} else {
|
||||
console.error('\n❌ Some tests failed. Please check the implementation.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('\n💥 Test failed with error:', error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
// Clean up signal handlers
|
||||
removeSignalHandlers();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up test data
|
||||
*/
|
||||
async function cleanupTestData(): Promise<void> {
|
||||
console.log('🧹 Cleaning up test data...');
|
||||
|
||||
const result = await db
|
||||
.delete(mirrorJobs)
|
||||
.where(eq(mirrorJobs.userId, TEST_USER_ID));
|
||||
|
||||
console.log('✅ Test data cleaned up');
|
||||
}
|
||||
|
||||
/**
|
||||
* Main test runner
|
||||
*/
|
||||
async function runTest(): Promise<void> {
|
||||
console.log('🧪 Graceful Shutdown Integration Test');
|
||||
console.log('====================================\n');
|
||||
|
||||
if (shouldCleanup) {
|
||||
await cleanupTestData();
|
||||
console.log('✅ Cleanup completed');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await testGracefulShutdown();
|
||||
} finally {
|
||||
// Always clean up test data
|
||||
await cleanupTestData();
|
||||
}
|
||||
}
|
||||
|
||||
// Handle process signals gracefully during testing
|
||||
process.on('SIGINT', async () => {
|
||||
console.log('\n⚠️ Test interrupted by SIGINT');
|
||||
await cleanupTestData();
|
||||
process.exit(130);
|
||||
});
|
||||
|
||||
process.on('SIGTERM', async () => {
|
||||
console.log('\n⚠️ Test interrupted by SIGTERM');
|
||||
await cleanupTestData();
|
||||
process.exit(143);
|
||||
});
|
||||
|
||||
// Run the test
|
||||
runTest();
|
||||
183
scripts/test-recovery.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Test script for the recovery system
|
||||
* This script creates test jobs and verifies that the recovery system can handle them
|
||||
*
|
||||
* Usage:
|
||||
* bun scripts/test-recovery.ts [--cleanup]
|
||||
*
|
||||
* Options:
|
||||
* --cleanup: Clean up test jobs after testing
|
||||
*/
|
||||
|
||||
import { db, mirrorJobs } from "../src/lib/db";
|
||||
import { createMirrorJob } from "../src/lib/helpers";
|
||||
import { initializeRecovery, hasJobsNeedingRecovery, getRecoveryStatus } from "../src/lib/recovery";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const cleanup = args.includes('--cleanup');
|
||||
|
||||
// Test configuration
|
||||
const TEST_USER_ID = "test-user-recovery";
|
||||
const TEST_BATCH_ID = "test-batch-recovery";
|
||||
|
||||
async function runRecoveryTest() {
|
||||
console.log('=== Recovery System Test ===');
|
||||
console.log(`Cleanup mode: ${cleanup}`);
|
||||
console.log('');
|
||||
|
||||
try {
|
||||
if (cleanup) {
|
||||
await cleanupTestJobs();
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 1: Create test jobs that simulate interrupted state
|
||||
console.log('Step 1: Creating test interrupted jobs...');
|
||||
await createTestInterruptedJobs();
|
||||
|
||||
// Step 2: Check if recovery system detects them
|
||||
console.log('Step 2: Checking if recovery system detects interrupted jobs...');
|
||||
const needsRecovery = await hasJobsNeedingRecovery();
|
||||
console.log(`Jobs needing recovery: ${needsRecovery}`);
|
||||
|
||||
if (!needsRecovery) {
|
||||
console.log('❌ Recovery system did not detect interrupted jobs');
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 3: Get recovery status
|
||||
console.log('Step 3: Getting recovery status...');
|
||||
const status = getRecoveryStatus();
|
||||
console.log('Recovery status:', status);
|
||||
|
||||
// Step 4: Run recovery
|
||||
console.log('Step 4: Running recovery...');
|
||||
const recoveryResult = await initializeRecovery({
|
||||
skipIfRecentAttempt: false,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
});
|
||||
|
||||
console.log(`Recovery result: ${recoveryResult}`);
|
||||
|
||||
// Step 5: Verify recovery completed
|
||||
console.log('Step 5: Verifying recovery completed...');
|
||||
const stillNeedsRecovery = await hasJobsNeedingRecovery();
|
||||
console.log(`Jobs still needing recovery: ${stillNeedsRecovery}`);
|
||||
|
||||
// Step 6: Check final job states
|
||||
console.log('Step 6: Checking final job states...');
|
||||
await checkTestJobStates();
|
||||
|
||||
console.log('');
|
||||
console.log('✅ Recovery test completed successfully!');
|
||||
console.log('Run with --cleanup to remove test jobs');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Recovery test failed:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create test jobs that simulate interrupted state
|
||||
*/
|
||||
async function createTestInterruptedJobs() {
|
||||
const testJobs = [
|
||||
{
|
||||
repositoryId: uuidv4(),
|
||||
repositoryName: "test-repo-1",
|
||||
message: "Test mirror job 1",
|
||||
status: "mirroring" as const,
|
||||
jobType: "mirror" as const,
|
||||
},
|
||||
{
|
||||
repositoryId: uuidv4(),
|
||||
repositoryName: "test-repo-2",
|
||||
message: "Test sync job 2",
|
||||
status: "syncing" as const,
|
||||
jobType: "sync" as const,
|
||||
},
|
||||
];
|
||||
|
||||
for (const job of testJobs) {
|
||||
const jobId = await createMirrorJob({
|
||||
userId: TEST_USER_ID,
|
||||
repositoryId: job.repositoryId,
|
||||
repositoryName: job.repositoryName,
|
||||
message: job.message,
|
||||
status: job.status,
|
||||
jobType: job.jobType,
|
||||
batchId: TEST_BATCH_ID,
|
||||
totalItems: 5,
|
||||
itemIds: [job.repositoryId, uuidv4(), uuidv4(), uuidv4(), uuidv4()],
|
||||
inProgress: true,
|
||||
skipDuplicateEvent: true,
|
||||
});
|
||||
|
||||
// Manually set the job to look interrupted (old timestamp)
|
||||
const oldTimestamp = new Date();
|
||||
oldTimestamp.setMinutes(oldTimestamp.getMinutes() - 15); // 15 minutes ago
|
||||
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
startedAt: oldTimestamp,
|
||||
lastCheckpoint: oldTimestamp,
|
||||
})
|
||||
.where(eq(mirrorJobs.id, jobId));
|
||||
|
||||
console.log(`Created test job: ${jobId} (${job.repositoryName})`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check the final states of test jobs
|
||||
*/
|
||||
async function checkTestJobStates() {
|
||||
const testJobs = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.userId, TEST_USER_ID));
|
||||
|
||||
console.log(`Found ${testJobs.length} test jobs:`);
|
||||
|
||||
for (const job of testJobs) {
|
||||
console.log(`- Job ${job.id}: ${job.status} (inProgress: ${job.inProgress})`);
|
||||
console.log(` Message: ${job.message}`);
|
||||
console.log(` Started: ${job.startedAt ? new Date(job.startedAt).toISOString() : 'never'}`);
|
||||
console.log(` Completed: ${job.completedAt ? new Date(job.completedAt).toISOString() : 'never'}`);
|
||||
console.log('');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up test jobs
|
||||
*/
|
||||
async function cleanupTestJobs() {
|
||||
console.log('Cleaning up test jobs...');
|
||||
|
||||
const result = await db
|
||||
.delete(mirrorJobs)
|
||||
.where(eq(mirrorJobs.userId, TEST_USER_ID));
|
||||
|
||||
console.log('✅ Test jobs cleaned up successfully');
|
||||
}
|
||||
|
||||
// Handle process signals gracefully
|
||||
process.on('SIGINT', () => {
|
||||
console.log('\n⚠️ Test interrupted by SIGINT');
|
||||
process.exit(130);
|
||||
});
|
||||
|
||||
process.on('SIGTERM', () => {
|
||||
console.log('\n⚠️ Test interrupted by SIGTERM');
|
||||
process.exit(143);
|
||||
});
|
||||
|
||||
// Run the test
|
||||
runRecoveryTest();
|
||||
@@ -1,17 +1,20 @@
|
||||
import { useMemo, useRef, useState, useEffect } from "react";
|
||||
import { useVirtualizer } from "@tanstack/react-virtual";
|
||||
import type { MirrorJob } from "@/lib/db/schema";
|
||||
import Fuse from "fuse.js";
|
||||
import { Button } from "../ui/button";
|
||||
import { RefreshCw } from "lucide-react";
|
||||
import { Card } from "../ui/card";
|
||||
import { formatDate, getStatusColor } from "@/lib/utils";
|
||||
import { Skeleton } from "../ui/skeleton";
|
||||
import type { FilterParams } from "@/types/filter";
|
||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useVirtualizer } from '@tanstack/react-virtual';
|
||||
import type { MirrorJob } from '@/lib/db/schema';
|
||||
import Fuse from 'fuse.js';
|
||||
import { Button } from '../ui/button';
|
||||
import { RefreshCw } from 'lucide-react';
|
||||
import { Card } from '../ui/card';
|
||||
import { formatDate, getStatusColor } from '@/lib/utils';
|
||||
import { Skeleton } from '../ui/skeleton';
|
||||
import type { FilterParams } from '@/types/filter';
|
||||
|
||||
type MirrorJobWithKey = MirrorJob & { _rowKey: string };
|
||||
|
||||
interface ActivityListProps {
|
||||
activities: MirrorJob[];
|
||||
activities: MirrorJobWithKey[];
|
||||
isLoading: boolean;
|
||||
isLiveActive?: boolean;
|
||||
filter: FilterParams;
|
||||
setFilter: (filter: FilterParams) => void;
|
||||
}
|
||||
@@ -19,41 +22,48 @@ interface ActivityListProps {
|
||||
export default function ActivityList({
|
||||
activities,
|
||||
isLoading,
|
||||
isLiveActive = false,
|
||||
filter,
|
||||
setFilter,
|
||||
}: ActivityListProps) {
|
||||
const [expandedItems, setExpandedItems] = useState<Set<string>>(new Set());
|
||||
const [expandedItems, setExpandedItems] = useState<Set<string>>(
|
||||
() => new Set(),
|
||||
);
|
||||
|
||||
const parentRef = useRef<HTMLDivElement>(null);
|
||||
const rowRefs = useRef<Map<string, HTMLDivElement | null>>(new Map());
|
||||
// We keep the ref only for possible future scroll-to-row logic.
|
||||
const rowRefs = useRef<Map<string, HTMLDivElement | null>>(new Map()); // eslint-disable-line @typescript-eslint/no-unused-vars
|
||||
|
||||
const filteredActivities = useMemo(() => {
|
||||
let result = activities;
|
||||
|
||||
if (filter.status) {
|
||||
result = result.filter((activity) => activity.status === filter.status);
|
||||
result = result.filter((a) => a.status === filter.status);
|
||||
}
|
||||
|
||||
if (filter.type) {
|
||||
if (filter.type === 'repository') {
|
||||
result = result.filter((activity) => !!activity.repositoryId);
|
||||
} else if (filter.type === 'organization') {
|
||||
result = result.filter((activity) => !!activity.organizationId);
|
||||
}
|
||||
result =
|
||||
filter.type === 'repository'
|
||||
? result.filter((a) => !!a.repositoryId)
|
||||
: filter.type === 'organization'
|
||||
? result.filter((a) => !!a.organizationId)
|
||||
: result;
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
result = result.filter((activity) =>
|
||||
activity.repositoryName === filter.name ||
|
||||
activity.organizationName === filter.name
|
||||
result = result.filter(
|
||||
(a) =>
|
||||
a.repositoryName === filter.name ||
|
||||
a.organizationName === filter.name,
|
||||
);
|
||||
}
|
||||
|
||||
if (filter.searchTerm) {
|
||||
const fuse = new Fuse(result, {
|
||||
keys: ["message", "details", "organizationName", "repositoryName"],
|
||||
keys: ['message', 'details', 'organizationName', 'repositoryName'],
|
||||
threshold: 0.3,
|
||||
});
|
||||
result = fuse.search(filter.searchTerm).map((res) => res.item);
|
||||
result = fuse.search(filter.searchTerm).map((r) => r.item);
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -62,10 +72,8 @@ export default function ActivityList({
|
||||
const virtualizer = useVirtualizer({
|
||||
count: filteredActivities.length,
|
||||
getScrollElement: () => parentRef.current,
|
||||
estimateSize: (index) => {
|
||||
const activity = filteredActivities[index];
|
||||
return expandedItems.has(activity.id || "") ? 217 : 120;
|
||||
},
|
||||
estimateSize: (idx) =>
|
||||
expandedItems.has(filteredActivities[idx]._rowKey) ? 217 : 120,
|
||||
overscan: 5,
|
||||
measureElement: (el) => el.getBoundingClientRect().height + 8,
|
||||
});
|
||||
@@ -74,118 +82,128 @@ export default function ActivityList({
|
||||
virtualizer.measure();
|
||||
}, [expandedItems, virtualizer]);
|
||||
|
||||
return isLoading ? (
|
||||
<div className="flex flex-col gap-y-4">
|
||||
{Array.from({ length: 5 }, (_, index) => (
|
||||
<Skeleton key={index} className="h-28 w-full rounded-md" />
|
||||
))}
|
||||
</div>
|
||||
) : filteredActivities.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center py-12 text-center">
|
||||
<RefreshCw className="h-12 w-12 text-muted-foreground mb-4" />
|
||||
<h3 className="text-lg font-medium">No activities found</h3>
|
||||
<p className="text-sm text-muted-foreground mt-1 mb-4 max-w-md">
|
||||
{filter.searchTerm || filter.status || filter.type || filter.name
|
||||
? "Try adjusting your search or filter criteria."
|
||||
: "No mirroring activities have been recorded yet."}
|
||||
</p>
|
||||
{filter.searchTerm || filter.status || filter.type || filter.name ? (
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setFilter({ searchTerm: "", status: "", type: "", name: "" });
|
||||
/* ------------------------------ render ------------------------------ */
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex flex-col gap-y-4'>
|
||||
{Array.from({ length: 5 }, (_, i) => (
|
||||
<Skeleton key={i} className='h-28 w-full rounded-md' />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
const hasFilter =
|
||||
filter.searchTerm || filter.status || filter.type || filter.name;
|
||||
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center py-12 text-center'>
|
||||
<RefreshCw className='mb-4 h-12 w-12 text-muted-foreground' />
|
||||
<h3 className='text-lg font-medium'>No activities found</h3>
|
||||
<p className='mt-1 mb-4 max-w-md text-sm text-muted-foreground'>
|
||||
{hasFilter
|
||||
? 'Try adjusting your search or filter criteria.'
|
||||
: 'No mirroring activities have been recorded yet.'}
|
||||
</p>
|
||||
{hasFilter && (
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={() =>
|
||||
setFilter({ searchTerm: '', status: '', type: '', name: '' })
|
||||
}
|
||||
>
|
||||
Clear Filters
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col border rounded-md">
|
||||
<Card
|
||||
ref={parentRef}
|
||||
className='relative max-h-[calc(100dvh-231px)] overflow-y-auto rounded-none border-0'
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
height: virtualizer.getTotalSize(),
|
||||
position: 'relative',
|
||||
width: '100%',
|
||||
}}
|
||||
>
|
||||
Clear Filters
|
||||
</Button>
|
||||
) : (
|
||||
<Button>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Refresh
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<Card
|
||||
className="border rounded-md max-h-[calc(100dvh-191px)] overflow-y-auto relative"
|
||||
ref={parentRef}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
height: virtualizer.getTotalSize(),
|
||||
position: "relative",
|
||||
width: "100%",
|
||||
}}
|
||||
>
|
||||
{virtualizer.getVirtualItems().map((virtualRow) => {
|
||||
const activity = filteredActivities[virtualRow.index];
|
||||
const isExpanded = expandedItems.has(activity.id || "");
|
||||
const key = activity.id || String(virtualRow.index);
|
||||
{virtualizer.getVirtualItems().map((vRow) => {
|
||||
const activity = filteredActivities[vRow.index];
|
||||
const isExpanded = expandedItems.has(activity._rowKey);
|
||||
|
||||
return (
|
||||
<div
|
||||
key={key}
|
||||
key={activity._rowKey}
|
||||
ref={(node) => {
|
||||
if (node) {
|
||||
rowRefs.current.set(key, node);
|
||||
virtualizer.measureElement(node);
|
||||
}
|
||||
rowRefs.current.set(activity._rowKey, node);
|
||||
if (node) virtualizer.measureElement(node);
|
||||
}}
|
||||
style={{
|
||||
position: "absolute",
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: "100%",
|
||||
transform: `translateY(${virtualRow.start}px)`,
|
||||
paddingBottom: "8px",
|
||||
width: '100%',
|
||||
transform: `translateY(${vRow.start}px)`,
|
||||
paddingBottom: '8px',
|
||||
}}
|
||||
className="border-b px-4 pt-4"
|
||||
className='border-b px-4 pt-4'
|
||||
>
|
||||
<div className="flex items-start gap-4">
|
||||
<div className="relative mt-2">
|
||||
<div className='flex items-start gap-4'>
|
||||
<div className='relative mt-2'>
|
||||
<div
|
||||
className={`h-2 w-2 rounded-full ${getStatusColor(
|
||||
activity.status
|
||||
activity.status,
|
||||
)}`}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<div className="flex flex-col sm:flex-row sm:items-center sm:justify-between mb-1">
|
||||
<p className="font-medium">{activity.message}</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
|
||||
<div className='flex-1'>
|
||||
<div className='mb-1 flex flex-col sm:flex-row sm:items-center sm:justify-between'>
|
||||
<p className='font-medium'>{activity.message}</p>
|
||||
<p className='text-sm text-muted-foreground'>
|
||||
{formatDate(activity.timestamp)}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{activity.repositoryName && (
|
||||
<p className="text-sm text-muted-foreground mb-2">
|
||||
<p className='mb-2 text-sm text-muted-foreground'>
|
||||
Repository: {activity.repositoryName}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{activity.organizationName && (
|
||||
<p className="text-sm text-muted-foreground mb-2">
|
||||
<p className='mb-2 text-sm text-muted-foreground'>
|
||||
Organization: {activity.organizationName}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{activity.details && (
|
||||
<div className="mt-2">
|
||||
<div className='mt-2'>
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => {
|
||||
const newSet = new Set(expandedItems);
|
||||
const id = activity.id || "";
|
||||
newSet.has(id) ? newSet.delete(id) : newSet.add(id);
|
||||
setExpandedItems(newSet);
|
||||
}}
|
||||
className="text-xs h-7 px-2"
|
||||
variant='ghost'
|
||||
className='h-7 px-2 text-xs'
|
||||
onClick={() =>
|
||||
setExpandedItems((prev) => {
|
||||
const next = new Set(prev);
|
||||
next.has(activity._rowKey)
|
||||
? next.delete(activity._rowKey)
|
||||
: next.add(activity._rowKey);
|
||||
return next;
|
||||
})
|
||||
}
|
||||
>
|
||||
{isExpanded ? "Hide Details" : "Show Details"}
|
||||
{isExpanded ? 'Hide Details' : 'Show Details'}
|
||||
</Button>
|
||||
|
||||
{isExpanded && (
|
||||
<pre className="mt-2 p-3 bg-muted rounded-md text-xs overflow-auto whitespace-pre-wrap min-h-[100px]">
|
||||
<pre className='mt-2 min-h-[100px] whitespace-pre-wrap overflow-auto rounded-md bg-muted p-3 text-xs'>
|
||||
{activity.details}
|
||||
</pre>
|
||||
)}
|
||||
@@ -198,5 +216,44 @@ export default function ActivityList({
|
||||
})}
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
{/* Status Bar */}
|
||||
<div className="h-[40px] flex items-center justify-between border-t bg-muted/30 px-3 relative">
|
||||
<div className="flex items-center gap-2">
|
||||
<div className={`h-1.5 w-1.5 rounded-full ${isLiveActive ? 'bg-emerald-500' : 'bg-primary'}`} />
|
||||
<span className="text-sm font-medium text-foreground">
|
||||
{filteredActivities.length} {filteredActivities.length === 1 ? 'activity' : 'activities'} total
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Center - Live active indicator */}
|
||||
{isLiveActive && (
|
||||
<div className="flex items-center gap-1.5 absolute left-1/2 transform -translate-x-1/2">
|
||||
<div
|
||||
className="h-1 w-1 rounded-full bg-emerald-500"
|
||||
style={{
|
||||
animation: 'pulse 2s ease-in-out infinite'
|
||||
}}
|
||||
/>
|
||||
<span className="text-xs text-emerald-600 dark:text-emerald-400 font-medium">
|
||||
Live active
|
||||
</span>
|
||||
<div
|
||||
className="h-1 w-1 rounded-full bg-emerald-500"
|
||||
style={{
|
||||
animation: 'pulse 2s ease-in-out infinite',
|
||||
animationDelay: '1s'
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{(filter.searchTerm || filter.status || filter.type || filter.name) && (
|
||||
<span className="text-xs text-muted-foreground">
|
||||
Filters applied
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,289 +1,427 @@
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Search, Download, RefreshCw, ChevronDown } from "lucide-react";
|
||||
import { useCallback, useEffect, useState, useRef } from 'react';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { ChevronDown, Download, RefreshCw, Search, Trash2 } from 'lucide-react';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "../ui/dropdown-menu";
|
||||
import { apiRequest, formatDate } from "@/lib/utils";
|
||||
import { useAuth } from "@/hooks/useAuth";
|
||||
import type { MirrorJob } from "@/lib/db/schema";
|
||||
import type { ActivityApiResponse } from "@/types/activities";
|
||||
} from '../ui/dropdown-menu';
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
DialogTrigger,
|
||||
} from '../ui/dialog';
|
||||
import { apiRequest, formatDate } from '@/lib/utils';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import type { MirrorJob } from '@/lib/db/schema';
|
||||
import type { ActivityApiResponse } from '@/types/activities';
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "../ui/select";
|
||||
import { repoStatusEnum, type RepoStatus } from "@/types/Repository";
|
||||
import ActivityList from "./ActivityList";
|
||||
import { ActivityNameCombobox } from "./ActivityNameCombobox";
|
||||
import { useSSE } from "@/hooks/useSEE";
|
||||
import { useFilterParams } from "@/hooks/useFilterParams";
|
||||
import { toast } from "sonner";
|
||||
} from '../ui/select';
|
||||
import { repoStatusEnum, type RepoStatus } from '@/types/Repository';
|
||||
import ActivityList from './ActivityList';
|
||||
import { ActivityNameCombobox } from './ActivityNameCombobox';
|
||||
import { useSSE } from '@/hooks/useSEE';
|
||||
import { useFilterParams } from '@/hooks/useFilterParams';
|
||||
import { toast } from 'sonner';
|
||||
import { useLiveRefresh } from '@/hooks/useLiveRefresh';
|
||||
import { useConfigStatus } from '@/hooks/useConfigStatus';
|
||||
import { useNavigation } from '@/components/layout/MainLayout';
|
||||
|
||||
type MirrorJobWithKey = MirrorJob & { _rowKey: string };
|
||||
|
||||
// Maximum number of activities to keep in memory to prevent performance issues
|
||||
const MAX_ACTIVITIES = 1000;
|
||||
|
||||
// More robust key generation to prevent collisions
|
||||
function genKey(job: MirrorJob, index?: number): string {
|
||||
const baseId = job.id || `temp-${Date.now()}-${Math.random().toString(36).slice(2)}`;
|
||||
const timestamp = job.timestamp instanceof Date ? job.timestamp.getTime() : new Date(job.timestamp).getTime();
|
||||
const indexSuffix = index !== undefined ? `-${index}` : '';
|
||||
return `${baseId}-${timestamp}${indexSuffix}`;
|
||||
}
|
||||
|
||||
// Create a deep clone without structuredClone for better browser compatibility
|
||||
function deepClone<T>(obj: T): T {
|
||||
if (obj === null || typeof obj !== 'object') return obj;
|
||||
if (obj instanceof Date) return new Date(obj.getTime()) as T;
|
||||
if (Array.isArray(obj)) return obj.map(item => deepClone(item)) as T;
|
||||
|
||||
const cloned = {} as T;
|
||||
for (const key in obj) {
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
||||
cloned[key] = deepClone(obj[key]);
|
||||
}
|
||||
}
|
||||
return cloned;
|
||||
}
|
||||
|
||||
export function ActivityLog() {
|
||||
const { user } = useAuth();
|
||||
const [activities, setActivities] = useState<MirrorJob[]>([]);
|
||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||
const { filter, setFilter } = useFilterParams({
|
||||
searchTerm: "",
|
||||
status: "",
|
||||
type: "",
|
||||
name: "",
|
||||
});
|
||||
const { registerRefreshCallback, isLiveEnabled } = useLiveRefresh();
|
||||
const { isFullyConfigured } = useConfigStatus();
|
||||
const { navigationKey } = useNavigation();
|
||||
|
||||
const handleNewMessage = useCallback((data: MirrorJob) => {
|
||||
setActivities((prevActivities) => [data, ...prevActivities]);
|
||||
const [activities, setActivities] = useState<MirrorJobWithKey[]>([]);
|
||||
const [isInitialLoading, setIsInitialLoading] = useState(false);
|
||||
const [showCleanupDialog, setShowCleanupDialog] = useState(false);
|
||||
|
||||
console.log("Received new log:", data);
|
||||
// Ref to track if component is mounted to prevent state updates after unmount
|
||||
const isMountedRef = useRef(true);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
isMountedRef.current = false;
|
||||
};
|
||||
}, []);
|
||||
|
||||
const { filter, setFilter } = useFilterParams({
|
||||
searchTerm: '',
|
||||
status: '',
|
||||
type: '',
|
||||
name: '',
|
||||
});
|
||||
|
||||
/* ----------------------------- SSE hook ----------------------------- */
|
||||
|
||||
const handleNewMessage = useCallback((data: MirrorJob) => {
|
||||
if (!isMountedRef.current) return;
|
||||
|
||||
setActivities((prev) => {
|
||||
// Create a deep clone of the new activity
|
||||
const clonedData = deepClone(data);
|
||||
|
||||
// Check if this activity already exists to prevent duplicates
|
||||
const existingIndex = prev.findIndex(activity =>
|
||||
activity.id === clonedData.id ||
|
||||
(activity.repositoryId === clonedData.repositoryId &&
|
||||
activity.organizationId === clonedData.organizationId &&
|
||||
activity.message === clonedData.message &&
|
||||
Math.abs(new Date(activity.timestamp).getTime() - new Date(clonedData.timestamp).getTime()) < 1000)
|
||||
);
|
||||
|
||||
if (existingIndex !== -1) {
|
||||
// Update existing activity instead of adding duplicate
|
||||
const updated = [...prev];
|
||||
updated[existingIndex] = {
|
||||
...clonedData,
|
||||
_rowKey: prev[existingIndex]._rowKey, // Keep the same key
|
||||
};
|
||||
return updated;
|
||||
}
|
||||
|
||||
// Add new activity with unique key
|
||||
const withKey: MirrorJobWithKey = {
|
||||
...clonedData,
|
||||
_rowKey: genKey(clonedData, prev.length),
|
||||
};
|
||||
|
||||
// Limit the number of activities to prevent memory issues
|
||||
const newActivities = [withKey, ...prev];
|
||||
return newActivities.slice(0, MAX_ACTIVITIES);
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Use the SSE hook
|
||||
const { connected } = useSSE({
|
||||
userId: user?.id,
|
||||
onMessage: handleNewMessage,
|
||||
});
|
||||
|
||||
const fetchActivities = useCallback(async () => {
|
||||
if (!user) return false;
|
||||
/* ------------------------- initial fetch --------------------------- */
|
||||
|
||||
const fetchActivities = useCallback(async (isLiveRefresh = false) => {
|
||||
if (!user?.id) return false;
|
||||
|
||||
try {
|
||||
setIsLoading(true);
|
||||
// Set appropriate loading state based on refresh type
|
||||
if (!isLiveRefresh) {
|
||||
setIsInitialLoading(true);
|
||||
}
|
||||
|
||||
const response = await apiRequest<ActivityApiResponse>(
|
||||
const res = await apiRequest<ActivityApiResponse>(
|
||||
`/activities?userId=${user.id}`,
|
||||
{
|
||||
method: "GET",
|
||||
}
|
||||
{ method: 'GET' },
|
||||
);
|
||||
|
||||
if (response.success) {
|
||||
setActivities(response.activities);
|
||||
return true;
|
||||
} else {
|
||||
toast.error(response.message || "Failed to fetch activities.");
|
||||
if (!res.success) {
|
||||
// Only show error toast for manual refreshes to avoid spam during live updates
|
||||
if (!isLiveRefresh) {
|
||||
toast.error(res.message ?? 'Failed to fetch activities.');
|
||||
}
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
error instanceof Error ? error.message : "Failed to fetch activities."
|
||||
);
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchActivities();
|
||||
}, [fetchActivities]);
|
||||
// Process activities with robust cloning and unique keys
|
||||
const data: MirrorJobWithKey[] = res.activities.map((activity, index) => {
|
||||
const clonedActivity = deepClone(activity);
|
||||
return {
|
||||
...clonedActivity,
|
||||
_rowKey: genKey(clonedActivity, index),
|
||||
};
|
||||
});
|
||||
|
||||
const handleRefreshActivities = async () => {
|
||||
const success = await fetchActivities();
|
||||
if (success) {
|
||||
toast.success("Activities refreshed successfully.");
|
||||
}
|
||||
};
|
||||
// Sort by timestamp (newest first) to ensure consistent ordering
|
||||
data.sort((a, b) => {
|
||||
const timeA = new Date(a.timestamp).getTime();
|
||||
const timeB = new Date(b.timestamp).getTime();
|
||||
return timeB - timeA;
|
||||
});
|
||||
|
||||
// Get the currently filtered activities
|
||||
const getFilteredActivities = () => {
|
||||
return activities.filter(activity => {
|
||||
let isIncluded = true;
|
||||
|
||||
if (filter.status) {
|
||||
isIncluded = isIncluded && activity.status === filter.status;
|
||||
if (isMountedRef.current) {
|
||||
setActivities(data);
|
||||
}
|
||||
|
||||
if (filter.type) {
|
||||
if (filter.type === 'repository') {
|
||||
isIncluded = isIncluded && !!activity.repositoryId;
|
||||
} else if (filter.type === 'organization') {
|
||||
isIncluded = isIncluded && !!activity.organizationId;
|
||||
return true;
|
||||
} catch (err) {
|
||||
if (isMountedRef.current) {
|
||||
// Only show error toast for manual refreshes to avoid spam during live updates
|
||||
if (!isLiveRefresh) {
|
||||
toast.error(
|
||||
err instanceof Error ? err.message : 'Failed to fetch activities.',
|
||||
);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
} finally {
|
||||
if (isMountedRef.current && !isLiveRefresh) {
|
||||
setIsInitialLoading(false);
|
||||
}
|
||||
}
|
||||
}, [user?.id]); // Only depend on user.id, not entire user object
|
||||
|
||||
if (filter.name) {
|
||||
isIncluded = isIncluded && (
|
||||
activity.repositoryName === filter.name ||
|
||||
activity.organizationName === filter.name
|
||||
);
|
||||
useEffect(() => {
|
||||
// Reset loading state when component becomes active
|
||||
setIsInitialLoading(true);
|
||||
fetchActivities(false); // Manual refresh, not live
|
||||
}, [fetchActivities, navigationKey]); // Include navigationKey to trigger on navigation
|
||||
|
||||
// Register with global live refresh system
|
||||
useEffect(() => {
|
||||
// Only register for live refresh if configuration is complete
|
||||
// Activity logs can exist from previous runs, but new activities won't be generated without config
|
||||
if (!isFullyConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
const unregister = registerRefreshCallback(() => {
|
||||
fetchActivities(true); // Live refresh
|
||||
});
|
||||
|
||||
return unregister;
|
||||
}, [registerRefreshCallback, fetchActivities, isFullyConfigured]);
|
||||
|
||||
/* ---------------------- filtering + exporting ---------------------- */
|
||||
|
||||
const applyLightFilter = (list: MirrorJobWithKey[]) => {
|
||||
return list.filter((a) => {
|
||||
if (filter.status && a.status !== filter.status) return false;
|
||||
|
||||
if (filter.type === 'repository' && !a.repositoryId) return false;
|
||||
if (filter.type === 'organization' && !a.organizationId) return false;
|
||||
|
||||
if (
|
||||
filter.name &&
|
||||
a.repositoryName !== filter.name &&
|
||||
a.organizationName !== filter.name
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Note: We're not applying the search term filter here as that would require
|
||||
// re-implementing the Fuse.js search logic
|
||||
|
||||
return isIncluded;
|
||||
return true;
|
||||
});
|
||||
};
|
||||
|
||||
// Function to export activities as CSV
|
||||
const exportAsCSV = () => {
|
||||
const filteredActivities = getFilteredActivities();
|
||||
const rows = applyLightFilter(activities);
|
||||
if (!rows.length) return toast.error('No activities to export.');
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
toast.error("No activities to export.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create CSV content
|
||||
const headers = ["Timestamp", "Message", "Status", "Repository", "Organization", "Details"];
|
||||
const csvRows = [
|
||||
headers.join(","),
|
||||
...filteredActivities.map(activity => {
|
||||
const formattedDate = formatDate(activity.timestamp);
|
||||
// Escape fields that might contain commas or quotes
|
||||
const escapeCsvField = (field: string | null | undefined) => {
|
||||
if (!field) return '';
|
||||
if (field.includes(',') || field.includes('"') || field.includes('\n')) {
|
||||
return `"${field.replace(/"/g, '""')}"`;
|
||||
}
|
||||
return field;
|
||||
};
|
||||
|
||||
return [
|
||||
formattedDate,
|
||||
escapeCsvField(activity.message),
|
||||
activity.status,
|
||||
escapeCsvField(activity.repositoryName || ''),
|
||||
escapeCsvField(activity.organizationName || ''),
|
||||
escapeCsvField(activity.details || '')
|
||||
].join(',');
|
||||
})
|
||||
const headers = [
|
||||
'Timestamp',
|
||||
'Message',
|
||||
'Status',
|
||||
'Repository',
|
||||
'Organization',
|
||||
'Details',
|
||||
];
|
||||
|
||||
const csvContent = csvRows.join('\n');
|
||||
const escape = (v: string | null | undefined) =>
|
||||
v && /[,\"\n]/.test(v) ? `"${v.replace(/"/g, '""')}"` : v ?? '';
|
||||
|
||||
// Download the CSV file
|
||||
downloadFile(csvContent, 'text/csv;charset=utf-8;', 'activity_log_export.csv');
|
||||
const csv = [
|
||||
headers.join(','),
|
||||
...rows.map((a) =>
|
||||
[
|
||||
formatDate(a.timestamp),
|
||||
escape(a.message),
|
||||
a.status,
|
||||
escape(a.repositoryName),
|
||||
escape(a.organizationName),
|
||||
escape(a.details),
|
||||
].join(','),
|
||||
),
|
||||
].join('\n');
|
||||
|
||||
toast.success("Activity log exported as CSV successfully.");
|
||||
downloadFile(csv, 'text/csv;charset=utf-8;', 'activity_log_export.csv');
|
||||
toast.success('CSV exported.');
|
||||
};
|
||||
|
||||
// Function to export activities as JSON
|
||||
const exportAsJSON = () => {
|
||||
const filteredActivities = getFilteredActivities();
|
||||
const rows = applyLightFilter(activities);
|
||||
if (!rows.length) return toast.error('No activities to export.');
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
toast.error("No activities to export.");
|
||||
return;
|
||||
}
|
||||
const json = JSON.stringify(
|
||||
rows.map((a) => ({
|
||||
...a,
|
||||
formattedTime: formatDate(a.timestamp),
|
||||
})),
|
||||
null,
|
||||
2,
|
||||
);
|
||||
|
||||
// Format the activities for export (removing any sensitive or unnecessary fields if needed)
|
||||
const activitiesForExport = filteredActivities.map(activity => ({
|
||||
id: activity.id,
|
||||
timestamp: activity.timestamp,
|
||||
formattedTime: formatDate(activity.timestamp),
|
||||
message: activity.message,
|
||||
status: activity.status,
|
||||
repositoryId: activity.repositoryId,
|
||||
repositoryName: activity.repositoryName,
|
||||
organizationId: activity.organizationId,
|
||||
organizationName: activity.organizationName,
|
||||
details: activity.details
|
||||
}));
|
||||
|
||||
const jsonContent = JSON.stringify(activitiesForExport, null, 2);
|
||||
|
||||
// Download the JSON file
|
||||
downloadFile(jsonContent, 'application/json', 'activity_log_export.json');
|
||||
|
||||
toast.success("Activity log exported as JSON successfully.");
|
||||
downloadFile(json, 'application/json', 'activity_log_export.json');
|
||||
toast.success('JSON exported.');
|
||||
};
|
||||
|
||||
// Generic function to download a file
|
||||
const downloadFile = (content: string, mimeType: string, filename: string) => {
|
||||
// Add date to filename
|
||||
const date = new Date();
|
||||
const dateStr = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')}`;
|
||||
const filenameWithDate = filename.replace('.', `_${dateStr}.`);
|
||||
|
||||
// Create a download link
|
||||
const blob = new Blob([content], { type: mimeType });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const downloadFile = (
|
||||
content: string,
|
||||
mime: string,
|
||||
filename: string,
|
||||
): void => {
|
||||
const date = new Date().toISOString().slice(0, 10); // yyyy-mm-dd
|
||||
const link = document.createElement('a');
|
||||
|
||||
link.href = url;
|
||||
link.setAttribute('download', filenameWithDate);
|
||||
document.body.appendChild(link);
|
||||
link.href = URL.createObjectURL(new Blob([content], { type: mime }));
|
||||
link.download = filename.replace('.', `_${date}.`);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
};
|
||||
|
||||
const handleCleanupClick = () => {
|
||||
setShowCleanupDialog(true);
|
||||
};
|
||||
|
||||
const confirmCleanup = async () => {
|
||||
if (!user?.id) return;
|
||||
|
||||
try {
|
||||
setIsInitialLoading(true);
|
||||
setShowCleanupDialog(false);
|
||||
|
||||
const response = await fetch('/api/activities/cleanup', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ userId: user.id }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: 'Unknown error occurred' }));
|
||||
throw new Error(errorData.error || `HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const res = await response.json();
|
||||
|
||||
if (res.success) {
|
||||
// Clear the activities from the UI
|
||||
setActivities([]);
|
||||
toast.success(`All activities cleaned up successfully. Deleted ${res.result.mirrorJobsDeleted} mirror jobs and ${res.result.eventsDeleted} events.`);
|
||||
} else {
|
||||
toast.error(res.error || 'Failed to cleanup activities.');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error cleaning up activities:', error);
|
||||
toast.error(error instanceof Error ? error.message : 'Failed to cleanup activities.');
|
||||
} finally {
|
||||
setIsInitialLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const cancelCleanup = () => {
|
||||
setShowCleanupDialog(false);
|
||||
};
|
||||
|
||||
/* ------------------------------ UI ------------------------------ */
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-y-8">
|
||||
<div className="flex flex-row items-center gap-4 w-full">
|
||||
<div className="relative flex-1">
|
||||
<Search className="absolute left-2 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
<div className='flex flex-col gap-y-8'>
|
||||
<div className='flex w-full flex-row items-center gap-4'>
|
||||
{/* search input */}
|
||||
<div className='relative flex-1'>
|
||||
<Search className='absolute left-2 top-2.5 h-4 w-4 text-muted-foreground' />
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search activities..."
|
||||
className="pl-8 h-9 w-full rounded-md border border-input bg-background px-3 py-1 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
type='text'
|
||||
placeholder='Search activities...'
|
||||
className='h-9 w-full rounded-md border border-input bg-background px-3 py-1 pl-8 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring'
|
||||
value={filter.searchTerm}
|
||||
onChange={(e) =>
|
||||
setFilter((prev) => ({ ...prev, searchTerm: e.target.value }))
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
searchTerm: e.target.value,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* status select */}
|
||||
<Select
|
||||
value={filter.status || "all"}
|
||||
onValueChange={(value) =>
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
status: value === "all" ? "" : (value as RepoStatus),
|
||||
value={filter.status || 'all'}
|
||||
onValueChange={(v) =>
|
||||
setFilter((p) => ({
|
||||
...p,
|
||||
status: v === 'all' ? '' : (v as RepoStatus),
|
||||
}))
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-[140px] h-9 max-h-9">
|
||||
<SelectValue placeholder="All Status" />
|
||||
<SelectTrigger className='h-9 w-[140px] max-h-9'>
|
||||
<SelectValue placeholder='All Status' />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{["all", ...repoStatusEnum.options].map((status) => (
|
||||
<SelectItem key={status} value={status}>
|
||||
{status === "all"
|
||||
? "All Status"
|
||||
: status.charAt(0).toUpperCase() + status.slice(1)}
|
||||
{['all', ...repoStatusEnum.options].map((s) => (
|
||||
<SelectItem key={s} value={s}>
|
||||
{s === 'all' ? 'All Status' : s[0].toUpperCase() + s.slice(1)}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
{/* Repository/Organization Name Combobox */}
|
||||
{/* repo/org name combobox */}
|
||||
<ActivityNameCombobox
|
||||
activities={activities}
|
||||
value={filter.name || ""}
|
||||
onChange={(name: string) => setFilter((prev) => ({ ...prev, name }))}
|
||||
value={filter.name || ''}
|
||||
onChange={(name) => setFilter((p) => ({ ...p, name }))}
|
||||
/>
|
||||
{/* Filter by type: repository/org/all */}
|
||||
|
||||
{/* type select */}
|
||||
<Select
|
||||
value={filter.type || "all"}
|
||||
onValueChange={(value) =>
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
type: value === "all" ? "" : value,
|
||||
}))
|
||||
value={filter.type || 'all'}
|
||||
onValueChange={(v) =>
|
||||
setFilter((p) => ({ ...p, type: v === 'all' ? '' : v }))
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-[140px] h-9 max-h-9">
|
||||
<SelectValue placeholder="All Types" />
|
||||
<SelectTrigger className='h-9 w-[140px] max-h-9'>
|
||||
<SelectValue placeholder='All Types' />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{['all', 'repository', 'organization'].map((type) => (
|
||||
<SelectItem key={type} value={type}>
|
||||
{type === 'all' ? 'All Types' : type.charAt(0).toUpperCase() + type.slice(1)}
|
||||
{['all', 'repository', 'organization'].map((t) => (
|
||||
<SelectItem key={t} value={t}>
|
||||
{t === 'all' ? 'All Types' : t[0].toUpperCase() + t.slice(1)}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
{/* export dropdown */}
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="outline" className="flex items-center gap-1">
|
||||
<Download className="h-4 w-4 mr-1" />
|
||||
<Button variant='outline' className='flex items-center gap-1'>
|
||||
<Download className='mr-1 h-4 w-4' />
|
||||
Export
|
||||
<ChevronDown className="h-4 w-4 ml-1" />
|
||||
<ChevronDown className='ml-1 h-4 w-4' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent>
|
||||
@@ -295,19 +433,61 @@ export function ActivityLog() {
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
<Button onClick={handleRefreshActivities}>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Refresh
|
||||
|
||||
{/* refresh */}
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={() => fetchActivities(false)} // Manual refresh, show loading skeleton
|
||||
title="Refresh activity log"
|
||||
>
|
||||
<RefreshCw className='h-4 w-4' />
|
||||
</Button>
|
||||
|
||||
{/* cleanup all activities */}
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={handleCleanupClick}
|
||||
title="Delete all activities"
|
||||
className="text-destructive hover:text-destructive"
|
||||
>
|
||||
<Trash2 className='h-4 w-4' />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<ActivityList
|
||||
activities={activities}
|
||||
isLoading={isLoading || !connected}
|
||||
filter={filter}
|
||||
setFilter={setFilter}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* activity list */}
|
||||
<ActivityList
|
||||
activities={applyLightFilter(activities)}
|
||||
isLoading={isInitialLoading || !connected}
|
||||
isLiveActive={isLiveEnabled && isFullyConfigured}
|
||||
filter={filter}
|
||||
setFilter={setFilter}
|
||||
/>
|
||||
|
||||
{/* cleanup confirmation dialog */}
|
||||
<Dialog open={showCleanupDialog} onOpenChange={setShowCleanupDialog}>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>Delete All Activities</DialogTitle>
|
||||
<DialogDescription>
|
||||
Are you sure you want to delete ALL activities? This action cannot be undone and will remove all mirror jobs and events from the database.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={cancelCleanup}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={confirmCleanup}
|
||||
disabled={isInitialLoading}
|
||||
>
|
||||
{isInitialLoading ? 'Deleting...' : 'Delete All Activities'}
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,8 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardDescription,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from '@/components/ui/card';
|
||||
import { useEffect, useState, useCallback, useRef } from 'react';
|
||||
import { GitHubConfigForm } from './GitHubConfigForm';
|
||||
import { GiteaConfigForm } from './GiteaConfigForm';
|
||||
import { ScheduleConfigForm } from './ScheduleConfigForm';
|
||||
import { DatabaseCleanupConfigForm } from './DatabaseCleanupConfigForm';
|
||||
import type {
|
||||
ConfigApiResponse,
|
||||
GiteaConfig,
|
||||
@@ -16,18 +10,21 @@ import type {
|
||||
SaveConfigApiRequest,
|
||||
SaveConfigApiResponse,
|
||||
ScheduleConfig,
|
||||
DatabaseCleanupConfig,
|
||||
} from '@/types/config';
|
||||
import { Button } from '../ui/button';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import { apiRequest } from '@/lib/utils';
|
||||
import { Copy, CopyCheck, RefreshCw } from 'lucide-react';
|
||||
import { RefreshCw } from 'lucide-react';
|
||||
import { toast } from 'sonner';
|
||||
import { Skeleton } from '@/components/ui/skeleton';
|
||||
import { invalidateConfigCache } from '@/hooks/useConfigStatus';
|
||||
|
||||
type ConfigState = {
|
||||
githubConfig: GitHubConfig;
|
||||
giteaConfig: GiteaConfig;
|
||||
scheduleConfig: ScheduleConfig;
|
||||
cleanupConfig: DatabaseCleanupConfig;
|
||||
};
|
||||
|
||||
export function ConfigTabs() {
|
||||
@@ -54,13 +51,19 @@ export function ConfigTabs() {
|
||||
enabled: false,
|
||||
interval: 3600,
|
||||
},
|
||||
cleanupConfig: {
|
||||
enabled: false,
|
||||
retentionDays: 604800, // 7 days in seconds
|
||||
},
|
||||
});
|
||||
const { user, refreshUser } = useAuth();
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [dockerCode, setDockerCode] = useState<string>('');
|
||||
const [isCopied, setIsCopied] = useState<boolean>(false);
|
||||
const [isSyncing, setIsSyncing] = useState<boolean>(false);
|
||||
const [isConfigSaved, setIsConfigSaved] = useState<boolean>(false);
|
||||
const [isAutoSavingSchedule, setIsAutoSavingSchedule] = useState<boolean>(false);
|
||||
const [isAutoSavingCleanup, setIsAutoSavingCleanup] = useState<boolean>(false);
|
||||
const autoSaveScheduleTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const autoSaveCleanupTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
const isConfigFormValid = (): boolean => {
|
||||
const { githubConfig, giteaConfig } = config;
|
||||
@@ -75,26 +78,8 @@ export function ConfigTabs() {
|
||||
return isGitHubValid && isGiteaValid;
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const updateLastAndNextRun = () => {
|
||||
const lastRun = config.scheduleConfig.lastRun
|
||||
? new Date(config.scheduleConfig.lastRun)
|
||||
: new Date();
|
||||
const intervalInSeconds = config.scheduleConfig.interval;
|
||||
const nextRun = new Date(
|
||||
lastRun.getTime() + intervalInSeconds * 1000,
|
||||
);
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
scheduleConfig: {
|
||||
...prev.scheduleConfig,
|
||||
lastRun,
|
||||
nextRun,
|
||||
},
|
||||
}));
|
||||
};
|
||||
updateLastAndNextRun();
|
||||
}, [config.scheduleConfig.interval]);
|
||||
// Removed the problematic useEffect that was causing circular dependencies
|
||||
// The lastRun and nextRun should be managed by the backend and fetched via API
|
||||
|
||||
const handleImportGitHubData = async () => {
|
||||
if (!user?.id) return;
|
||||
@@ -106,7 +91,7 @@ export function ConfigTabs() {
|
||||
);
|
||||
result.success
|
||||
? toast.success(
|
||||
'GitHub data imported successfully! Head to the Dashboard to start mirroring repositories.',
|
||||
'GitHub data imported successfully! Head to the Repositories page to start mirroring.',
|
||||
)
|
||||
: toast.error(
|
||||
`Failed to import GitHub data: ${
|
||||
@@ -131,6 +116,7 @@ export function ConfigTabs() {
|
||||
githubConfig: config.githubConfig,
|
||||
giteaConfig: config.giteaConfig,
|
||||
scheduleConfig: config.scheduleConfig,
|
||||
cleanupConfig: config.cleanupConfig,
|
||||
};
|
||||
try {
|
||||
const response = await fetch('/api/config', {
|
||||
@@ -142,6 +128,8 @@ export function ConfigTabs() {
|
||||
if (result.success) {
|
||||
await refreshUser();
|
||||
setIsConfigSaved(true);
|
||||
// Invalidate config cache so other components get fresh data
|
||||
invalidateConfigCache();
|
||||
toast.success(
|
||||
'Configuration saved successfully! Now import your GitHub data to begin.',
|
||||
);
|
||||
@@ -159,8 +147,157 @@ export function ConfigTabs() {
|
||||
}
|
||||
};
|
||||
|
||||
// Auto-save function specifically for schedule config changes
|
||||
const autoSaveScheduleConfig = useCallback(async (scheduleConfig: ScheduleConfig) => {
|
||||
if (!user?.id || !isConfigSaved) return; // Only auto-save if config was previously saved
|
||||
|
||||
// Clear any existing timeout
|
||||
if (autoSaveScheduleTimeoutRef.current) {
|
||||
clearTimeout(autoSaveScheduleTimeoutRef.current);
|
||||
}
|
||||
|
||||
// Debounce the auto-save to prevent excessive API calls
|
||||
autoSaveScheduleTimeoutRef.current = setTimeout(async () => {
|
||||
setIsAutoSavingSchedule(true);
|
||||
|
||||
const reqPayload: SaveConfigApiRequest = {
|
||||
userId: user.id!,
|
||||
githubConfig: config.githubConfig,
|
||||
giteaConfig: config.giteaConfig,
|
||||
scheduleConfig: scheduleConfig,
|
||||
cleanupConfig: config.cleanupConfig,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/config', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(reqPayload),
|
||||
});
|
||||
const result: SaveConfigApiResponse = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
// Silent success - no toast for auto-save
|
||||
// Removed refreshUser() call to prevent page reload
|
||||
// Invalidate config cache so other components get fresh data
|
||||
invalidateConfigCache();
|
||||
|
||||
// Fetch updated config to get the recalculated nextRun time
|
||||
try {
|
||||
const updatedResponse = await apiRequest<ConfigApiResponse>(
|
||||
`/config?userId=${user.id}`,
|
||||
{ method: 'GET' },
|
||||
);
|
||||
if (updatedResponse && !updatedResponse.error) {
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
scheduleConfig: updatedResponse.scheduleConfig || prev.scheduleConfig,
|
||||
}));
|
||||
}
|
||||
} catch (fetchError) {
|
||||
console.warn('Failed to fetch updated config after auto-save:', fetchError);
|
||||
}
|
||||
} else {
|
||||
toast.error(
|
||||
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
||||
{ duration: 3000 }
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
`Auto-save error: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
{ duration: 3000 }
|
||||
);
|
||||
} finally {
|
||||
setIsAutoSavingSchedule(false);
|
||||
}
|
||||
}, 500); // 500ms debounce
|
||||
}, [user?.id, isConfigSaved, config.githubConfig, config.giteaConfig, config.cleanupConfig]);
|
||||
|
||||
// Auto-save function specifically for cleanup config changes
|
||||
const autoSaveCleanupConfig = useCallback(async (cleanupConfig: DatabaseCleanupConfig) => {
|
||||
if (!user?.id || !isConfigSaved) return; // Only auto-save if config was previously saved
|
||||
|
||||
// Clear any existing timeout
|
||||
if (autoSaveCleanupTimeoutRef.current) {
|
||||
clearTimeout(autoSaveCleanupTimeoutRef.current);
|
||||
}
|
||||
|
||||
// Debounce the auto-save to prevent excessive API calls
|
||||
autoSaveCleanupTimeoutRef.current = setTimeout(async () => {
|
||||
setIsAutoSavingCleanup(true);
|
||||
|
||||
const reqPayload: SaveConfigApiRequest = {
|
||||
userId: user.id!,
|
||||
githubConfig: config.githubConfig,
|
||||
giteaConfig: config.giteaConfig,
|
||||
scheduleConfig: config.scheduleConfig,
|
||||
cleanupConfig: cleanupConfig,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/config', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(reqPayload),
|
||||
});
|
||||
const result: SaveConfigApiResponse = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
// Silent success - no toast for auto-save
|
||||
// Invalidate config cache so other components get fresh data
|
||||
invalidateConfigCache();
|
||||
|
||||
// Fetch updated config to get the recalculated nextRun time
|
||||
try {
|
||||
const updatedResponse = await apiRequest<ConfigApiResponse>(
|
||||
`/config?userId=${user.id}`,
|
||||
{ method: 'GET' },
|
||||
);
|
||||
if (updatedResponse && !updatedResponse.error) {
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
cleanupConfig: updatedResponse.cleanupConfig || prev.cleanupConfig,
|
||||
}));
|
||||
}
|
||||
} catch (fetchError) {
|
||||
console.warn('Failed to fetch updated config after auto-save:', fetchError);
|
||||
}
|
||||
} else {
|
||||
toast.error(
|
||||
`Auto-save failed: ${result.message || 'Unknown error'}`,
|
||||
{ duration: 3000 }
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
`Auto-save error: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
{ duration: 3000 }
|
||||
);
|
||||
} finally {
|
||||
setIsAutoSavingCleanup(false);
|
||||
}
|
||||
}, 500); // 500ms debounce
|
||||
}, [user?.id, isConfigSaved, config.githubConfig, config.giteaConfig, config.scheduleConfig]);
|
||||
|
||||
// Cleanup timeouts on unmount
|
||||
useEffect(() => {
|
||||
if (!user) return;
|
||||
return () => {
|
||||
if (autoSaveScheduleTimeoutRef.current) {
|
||||
clearTimeout(autoSaveScheduleTimeoutRef.current);
|
||||
}
|
||||
if (autoSaveCleanupTimeoutRef.current) {
|
||||
clearTimeout(autoSaveCleanupTimeoutRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!user?.id) return;
|
||||
|
||||
const fetchConfig = async () => {
|
||||
setIsLoading(true);
|
||||
@@ -177,6 +314,8 @@ export function ConfigTabs() {
|
||||
response.giteaConfig || config.giteaConfig,
|
||||
scheduleConfig:
|
||||
response.scheduleConfig || config.scheduleConfig,
|
||||
cleanupConfig:
|
||||
response.cleanupConfig || config.cleanupConfig,
|
||||
});
|
||||
if (response.id) setIsConfigSaved(true);
|
||||
}
|
||||
@@ -190,84 +329,59 @@ export function ConfigTabs() {
|
||||
};
|
||||
|
||||
fetchConfig();
|
||||
}, [user]);
|
||||
|
||||
useEffect(() => {
|
||||
const generateDockerCode = () => `
|
||||
services:
|
||||
gitea-mirror:
|
||||
image: arunavo4/gitea-mirror:latest
|
||||
restart: unless-stopped
|
||||
container_name: gitea-mirror
|
||||
environment:
|
||||
- GITHUB_USERNAME=${config.githubConfig.username}
|
||||
- GITEA_URL=${config.giteaConfig.url}
|
||||
- GITEA_TOKEN=${config.giteaConfig.token}
|
||||
- GITHUB_TOKEN=${config.githubConfig.token}
|
||||
- SKIP_FORKS=${config.githubConfig.skipForks}
|
||||
- PRIVATE_REPOSITORIES=${config.githubConfig.privateRepositories}
|
||||
- MIRROR_ISSUES=${config.githubConfig.mirrorIssues}
|
||||
- MIRROR_STARRED=${config.githubConfig.mirrorStarred}
|
||||
- PRESERVE_ORG_STRUCTURE=${config.githubConfig.preserveOrgStructure}
|
||||
- SKIP_STARRED_ISSUES=${config.githubConfig.skipStarredIssues}
|
||||
- GITEA_ORGANIZATION=${config.giteaConfig.organization}
|
||||
- GITEA_ORG_VISIBILITY=${config.giteaConfig.visibility}
|
||||
- DELAY=${config.scheduleConfig.interval}`;
|
||||
setDockerCode(generateDockerCode());
|
||||
}, [config]);
|
||||
|
||||
const handleCopyToClipboard = (text: string) => {
|
||||
navigator.clipboard.writeText(text).then(
|
||||
() => {
|
||||
setIsCopied(true);
|
||||
toast.success('Docker configuration copied to clipboard!');
|
||||
setTimeout(() => setIsCopied(false), 2000);
|
||||
},
|
||||
() => toast.error('Could not copy text to clipboard.'),
|
||||
);
|
||||
};
|
||||
}, [user?.id]); // Only depend on user.id, not the entire user object
|
||||
|
||||
function ConfigCardSkeleton() {
|
||||
return (
|
||||
<Card>
|
||||
<CardHeader className="flex-row justify-between">
|
||||
<div className="flex flex-col gap-y-1.5 m-0">
|
||||
<Skeleton className="h-6 w-48" />
|
||||
<div className="space-y-6">
|
||||
{/* Header section */}
|
||||
<div className="flex flex-row justify-between items-start">
|
||||
<div className="flex flex-col gap-y-1.5">
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-4 w-72" />
|
||||
</div>
|
||||
<div className="flex gap-x-4">
|
||||
<Skeleton className="h-10 w-36" />
|
||||
<Skeleton className="h-10 w-36" />
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex gap-x-4">
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Skeleton className="h-6 w-40" />
|
||||
<Skeleton className="h-9 w-32" />
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-32 w-full" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Content section */}
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex gap-x-4">
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Skeleton className="h-6 w-40" />
|
||||
<Skeleton className="h-9 w-32" />
|
||||
</div>
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Skeleton className="h-6 w-40" />
|
||||
<Skeleton className="h-9 w-32" />
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-32 w-full" />
|
||||
</div>
|
||||
</div>
|
||||
<div className="border rounded-lg p-4">
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Skeleton className="h-6 w-40" />
|
||||
<Skeleton className="h-9 w-32" />
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex gap-x-4">
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-16 w-full" />
|
||||
<Skeleton className="h-8 w-32" />
|
||||
</div>
|
||||
</div>
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-16 w-full" />
|
||||
@@ -275,107 +389,96 @@ services:
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
function DockerConfigSkeleton() {
|
||||
return (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<Skeleton className="h-6 w-40" />
|
||||
<Skeleton className="h-4 w-64" />
|
||||
</CardHeader>
|
||||
<CardContent className="relative">
|
||||
<Skeleton className="h-8 w-8 absolute top-4 right-10 rounded-md" />
|
||||
<Skeleton className="h-48 w-full rounded-md" />
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return isLoading ? (
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<div className="space-y-6">
|
||||
<ConfigCardSkeleton />
|
||||
<DockerConfigSkeleton />
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<Card>
|
||||
<CardHeader className="flex-row justify-between">
|
||||
<div className="flex flex-col gap-y-1.5 m-0">
|
||||
<CardTitle>Configuration Settings</CardTitle>
|
||||
<CardDescription>
|
||||
Configure your GitHub and Gitea connections, and set up automatic
|
||||
mirroring.
|
||||
</CardDescription>
|
||||
</div>
|
||||
<div className="flex gap-x-4">
|
||||
<Button
|
||||
onClick={handleImportGitHubData}
|
||||
disabled={isSyncing || !isConfigSaved}
|
||||
title={
|
||||
!isConfigSaved
|
||||
? 'Save configuration first'
|
||||
: isSyncing
|
||||
? 'Import in progress'
|
||||
: 'Import GitHub Data'
|
||||
}
|
||||
>
|
||||
{isSyncing ? (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 animate-spin mr-1" />
|
||||
Import GitHub Data
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 mr-1" />
|
||||
Import GitHub Data
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveConfig}
|
||||
disabled={!isConfigFormValid()}
|
||||
title={
|
||||
!isConfigFormValid()
|
||||
? 'Please fill all required fields'
|
||||
: 'Save Configuration'
|
||||
}
|
||||
>
|
||||
Save Configuration
|
||||
</Button>
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex gap-x-4">
|
||||
<GitHubConfigForm
|
||||
config={config.githubConfig}
|
||||
setConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
githubConfig:
|
||||
typeof update === 'function'
|
||||
? update(prev.githubConfig)
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
<GiteaConfigForm
|
||||
config={config.giteaConfig}
|
||||
setConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
giteaConfig:
|
||||
typeof update === 'function'
|
||||
? update(prev.giteaConfig)
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-6">
|
||||
{/* Header section */}
|
||||
<div className="flex flex-row justify-between items-start">
|
||||
<div className="flex flex-col gap-y-1.5">
|
||||
<h1 className="text-2xl font-semibold leading-none tracking-tight">
|
||||
Configuration Settings
|
||||
</h1>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Configure your GitHub and Gitea connections, and set up automatic
|
||||
mirroring.
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex gap-x-4">
|
||||
<Button
|
||||
onClick={handleImportGitHubData}
|
||||
disabled={isSyncing || !isConfigSaved}
|
||||
title={
|
||||
!isConfigSaved
|
||||
? 'Save configuration first'
|
||||
: isSyncing
|
||||
? 'Import in progress'
|
||||
: 'Import GitHub Data'
|
||||
}
|
||||
>
|
||||
{isSyncing ? (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 animate-spin mr-1" />
|
||||
Import GitHub Data
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 mr-1" />
|
||||
Import GitHub Data
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveConfig}
|
||||
disabled={!isConfigFormValid()}
|
||||
title={
|
||||
!isConfigFormValid()
|
||||
? 'Please fill all required fields'
|
||||
: 'Save Configuration'
|
||||
}
|
||||
>
|
||||
Save Configuration
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Content section */}
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex gap-x-4">
|
||||
<GitHubConfigForm
|
||||
config={config.githubConfig}
|
||||
setConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
githubConfig:
|
||||
typeof update === 'function'
|
||||
? update(prev.githubConfig)
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
<GiteaConfigForm
|
||||
config={config.giteaConfig}
|
||||
setConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
giteaConfig:
|
||||
typeof update === 'function'
|
||||
? update(prev.giteaConfig)
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex gap-x-4">
|
||||
<div className="w-1/2">
|
||||
<ScheduleConfigForm
|
||||
config={config.scheduleConfig}
|
||||
setConfig={update =>
|
||||
@@ -387,35 +490,28 @@ services:
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
onAutoSave={autoSaveScheduleConfig}
|
||||
isAutoSaving={isAutoSavingSchedule}
|
||||
/>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Docker Configuration</CardTitle>
|
||||
<CardDescription>
|
||||
Equivalent Docker configuration for your current settings.
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="relative">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
className="absolute top-4 right-10"
|
||||
onClick={() => handleCopyToClipboard(dockerCode)}
|
||||
>
|
||||
{isCopied ? (
|
||||
<CopyCheck className="text-green-500" />
|
||||
) : (
|
||||
<Copy className="text-muted-foreground" />
|
||||
)}
|
||||
</Button>
|
||||
<pre className="bg-muted p-4 rounded-md overflow-auto text-sm">
|
||||
{dockerCode}
|
||||
</pre>
|
||||
</CardContent>
|
||||
</Card>
|
||||
<div className="w-1/2">
|
||||
<DatabaseCleanupConfigForm
|
||||
config={config.cleanupConfig}
|
||||
setConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
cleanupConfig:
|
||||
typeof update === 'function'
|
||||
? update(prev.cleanupConfig)
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
onAutoSave={autoSaveCleanupConfig}
|
||||
isAutoSaving={isAutoSavingCleanup}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
201
src/components/config/DatabaseCleanupConfigForm.tsx
Normal file
@@ -0,0 +1,201 @@
|
||||
import { Card, CardContent } from "@/components/ui/card";
|
||||
import { Checkbox } from "../ui/checkbox";
|
||||
import type { DatabaseCleanupConfig } from "@/types/config";
|
||||
import { formatDate } from "@/lib/utils";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "../ui/select";
|
||||
import { RefreshCw, Database } from "lucide-react";
|
||||
|
||||
interface DatabaseCleanupConfigFormProps {
|
||||
config: DatabaseCleanupConfig;
|
||||
setConfig: React.Dispatch<React.SetStateAction<DatabaseCleanupConfig>>;
|
||||
onAutoSave?: (config: DatabaseCleanupConfig) => void;
|
||||
isAutoSaving?: boolean;
|
||||
}
|
||||
|
||||
|
||||
// Helper to calculate cleanup interval in hours (should match backend logic)
|
||||
function calculateCleanupInterval(retentionSeconds: number): number {
|
||||
const retentionDays = retentionSeconds / (24 * 60 * 60);
|
||||
if (retentionDays <= 1) {
|
||||
return 6;
|
||||
} else if (retentionDays <= 3) {
|
||||
return 12;
|
||||
} else if (retentionDays <= 7) {
|
||||
return 24;
|
||||
} else if (retentionDays <= 30) {
|
||||
return 48;
|
||||
} else {
|
||||
return 168;
|
||||
}
|
||||
}
|
||||
|
||||
export function DatabaseCleanupConfigForm({
|
||||
config,
|
||||
setConfig,
|
||||
onAutoSave,
|
||||
isAutoSaving = false,
|
||||
}: DatabaseCleanupConfigFormProps) {
|
||||
// Optimistically update nextRun when enabled or retention changes
|
||||
const handleChange = (
|
||||
e: React.ChangeEvent<HTMLInputElement | HTMLSelectElement>
|
||||
) => {
|
||||
const { name, value, type } = e.target;
|
||||
let newConfig = {
|
||||
...config,
|
||||
[name]: type === "checkbox" ? (e.target as HTMLInputElement).checked : value,
|
||||
};
|
||||
|
||||
// If enabling or changing retention, recalculate nextRun
|
||||
if (
|
||||
(name === "enabled" && (e.target as HTMLInputElement).checked) ||
|
||||
(name === "retentionDays" && config.enabled)
|
||||
) {
|
||||
const now = new Date();
|
||||
const retentionSeconds =
|
||||
name === "retentionDays"
|
||||
? Number(value)
|
||||
: Number(newConfig.retentionDays);
|
||||
const intervalHours = calculateCleanupInterval(retentionSeconds);
|
||||
const nextRun = new Date(now.getTime() + intervalHours * 60 * 60 * 1000);
|
||||
newConfig = {
|
||||
...newConfig,
|
||||
nextRun,
|
||||
};
|
||||
}
|
||||
// If disabling, clear nextRun
|
||||
if (name === "enabled" && !(e.target as HTMLInputElement).checked) {
|
||||
newConfig = {
|
||||
...newConfig,
|
||||
nextRun: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
setConfig(newConfig);
|
||||
if (onAutoSave) {
|
||||
onAutoSave(newConfig);
|
||||
}
|
||||
};
|
||||
|
||||
// Predefined retention periods (in seconds, like schedule intervals)
|
||||
const retentionOptions: { value: number; label: string }[] = [
|
||||
{ value: 86400, label: "1 day" }, // 24 * 60 * 60
|
||||
{ value: 259200, label: "3 days" }, // 3 * 24 * 60 * 60
|
||||
{ value: 604800, label: "7 days" }, // 7 * 24 * 60 * 60
|
||||
{ value: 1209600, label: "14 days" }, // 14 * 24 * 60 * 60
|
||||
{ value: 2592000, label: "30 days" }, // 30 * 24 * 60 * 60
|
||||
{ value: 5184000, label: "60 days" }, // 60 * 24 * 60 * 60
|
||||
{ value: 7776000, label: "90 days" }, // 90 * 24 * 60 * 60
|
||||
];
|
||||
|
||||
return (
|
||||
<Card>
|
||||
<CardContent className="pt-6 relative">
|
||||
{isAutoSaving && (
|
||||
<div className="absolute top-4 right-4 flex items-center text-sm text-muted-foreground">
|
||||
<RefreshCw className="h-3 w-3 animate-spin mr-1" />
|
||||
<span className="text-xs">Auto-saving...</span>
|
||||
</div>
|
||||
)}
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex items-center">
|
||||
<Checkbox
|
||||
id="cleanup-enabled"
|
||||
name="enabled"
|
||||
checked={config.enabled}
|
||||
onCheckedChange={(checked) =>
|
||||
handleChange({
|
||||
target: {
|
||||
name: "enabled",
|
||||
type: "checkbox",
|
||||
checked: Boolean(checked),
|
||||
value: "",
|
||||
},
|
||||
} as React.ChangeEvent<HTMLInputElement>)
|
||||
}
|
||||
/>
|
||||
<label
|
||||
htmlFor="cleanup-enabled"
|
||||
className="select-none ml-2 block text-sm font-medium"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<Database className="h-4 w-4" />
|
||||
Enable Automatic Database Cleanup
|
||||
</div>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
{config.enabled && (
|
||||
<div>
|
||||
<label className="block text-sm font-medium mb-2">
|
||||
Data Retention Period
|
||||
</label>
|
||||
|
||||
<Select
|
||||
name="retentionDays"
|
||||
value={String(config.retentionDays)}
|
||||
onValueChange={(value) =>
|
||||
handleChange({
|
||||
target: { name: "retentionDays", value },
|
||||
} as React.ChangeEvent<HTMLInputElement>)
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-full border border-input dark:bg-background dark:hover:bg-background">
|
||||
<SelectValue placeholder="Select retention period" />
|
||||
</SelectTrigger>
|
||||
<SelectContent className="bg-background text-foreground border border-input shadow-sm">
|
||||
{retentionOptions.map((option) => (
|
||||
<SelectItem
|
||||
key={option.value}
|
||||
value={option.value.toString()}
|
||||
className="cursor-pointer text-sm px-3 py-2 hover:bg-accent focus:bg-accent focus:text-accent-foreground"
|
||||
>
|
||||
{option.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Activities and events older than this period will be automatically deleted.
|
||||
</p>
|
||||
<div className="mt-2 p-2 bg-muted/50 rounded-md">
|
||||
<p className="text-xs text-muted-foreground">
|
||||
<strong>Cleanup Frequency:</strong> The cleanup process runs automatically at optimal intervals:
|
||||
shorter retention periods trigger more frequent cleanups, longer periods trigger less frequent cleanups.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex gap-x-4">
|
||||
<div className="flex-1">
|
||||
<label className="block text-sm font-medium mb-1">Last Cleanup</label>
|
||||
<div className="text-sm">
|
||||
{config.lastRun ? formatDate(config.lastRun) : "Never"}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{config.enabled && (
|
||||
<div className="flex-1">
|
||||
<label className="block text-sm font-medium mb-1">Next Cleanup</label>
|
||||
<div className="text-sm">
|
||||
{config.nextRun
|
||||
? formatDate(config.nextRun)
|
||||
: config.enabled
|
||||
? "Calculating..."
|
||||
: "Never"}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@@ -9,40 +9,40 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "../ui/select";
|
||||
import { RefreshCw } from "lucide-react";
|
||||
|
||||
interface ScheduleConfigFormProps {
|
||||
config: ScheduleConfig;
|
||||
setConfig: React.Dispatch<React.SetStateAction<ScheduleConfig>>;
|
||||
onAutoSave?: (config: ScheduleConfig) => void;
|
||||
isAutoSaving?: boolean;
|
||||
}
|
||||
|
||||
export function ScheduleConfigForm({
|
||||
config,
|
||||
setConfig,
|
||||
onAutoSave,
|
||||
isAutoSaving = false,
|
||||
}: ScheduleConfigFormProps) {
|
||||
const handleChange = (
|
||||
e: React.ChangeEvent<HTMLInputElement | HTMLSelectElement>
|
||||
) => {
|
||||
const { name, value, type } = e.target;
|
||||
setConfig({
|
||||
const newConfig = {
|
||||
...config,
|
||||
[name]:
|
||||
type === "checkbox" ? (e.target as HTMLInputElement).checked : value,
|
||||
});
|
||||
};
|
||||
};
|
||||
setConfig(newConfig);
|
||||
|
||||
// Convert seconds to human-readable format
|
||||
const formatInterval = (seconds: number): string => {
|
||||
if (seconds < 60) return `${seconds} seconds`;
|
||||
if (seconds < 3600) return `${Math.floor(seconds / 60)} minutes`;
|
||||
if (seconds < 86400) return `${Math.floor(seconds / 3600)} hours`;
|
||||
return `${Math.floor(seconds / 86400)} days`;
|
||||
// Trigger auto-save for schedule config changes
|
||||
if (onAutoSave) {
|
||||
onAutoSave(newConfig);
|
||||
}
|
||||
};
|
||||
|
||||
// Predefined intervals
|
||||
const intervals: { value: number; label: string }[] = [
|
||||
// { value: 120, label: "2 minutes" }, //for testing
|
||||
{ value: 900, label: "15 minutes" },
|
||||
{ value: 1800, label: "30 minutes" },
|
||||
{ value: 3600, label: "1 hour" },
|
||||
{ value: 7200, label: "2 hours" },
|
||||
{ value: 14400, label: "4 hours" },
|
||||
@@ -55,7 +55,13 @@ export function ScheduleConfigForm({
|
||||
|
||||
return (
|
||||
<Card>
|
||||
<CardContent className="pt-6">
|
||||
<CardContent className="pt-6 relative">
|
||||
{isAutoSaving && (
|
||||
<div className="absolute top-4 right-4 flex items-center text-sm text-muted-foreground">
|
||||
<RefreshCw className="h-3 w-3 animate-spin mr-1" />
|
||||
<span className="text-xs">Auto-saving...</span>
|
||||
</div>
|
||||
)}
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex items-center">
|
||||
<Checkbox
|
||||
@@ -81,57 +87,69 @@ export function ScheduleConfigForm({
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="interval"
|
||||
className="block text-sm font-medium mb-1.5"
|
||||
>
|
||||
Mirroring Interval
|
||||
</label>
|
||||
{config.enabled && (
|
||||
<div>
|
||||
<label
|
||||
htmlFor="interval"
|
||||
className="block text-sm font-medium mb-1.5"
|
||||
>
|
||||
Mirroring Interval
|
||||
</label>
|
||||
|
||||
<Select
|
||||
name="interval"
|
||||
value={String(config.interval)}
|
||||
onValueChange={(value) =>
|
||||
handleChange({
|
||||
target: { name: "interval", value },
|
||||
} as React.ChangeEvent<HTMLInputElement>)
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-full border border-input dark:bg-background dark:hover:bg-background">
|
||||
<SelectValue placeholder="Select interval" />
|
||||
</SelectTrigger>
|
||||
<SelectContent className="bg-background text-foreground border border-input shadow-sm">
|
||||
{intervals.map((interval) => (
|
||||
<SelectItem
|
||||
key={interval.value}
|
||||
value={interval.value.toString()}
|
||||
className="cursor-pointer text-sm px-3 py-2 hover:bg-accent focus:bg-accent focus:text-accent-foreground"
|
||||
>
|
||||
{interval.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
<Select
|
||||
name="interval"
|
||||
value={String(config.interval)}
|
||||
onValueChange={(value) =>
|
||||
handleChange({
|
||||
target: { name: "interval", value },
|
||||
} as React.ChangeEvent<HTMLInputElement>)
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-full border border-input dark:bg-background dark:hover:bg-background">
|
||||
<SelectValue placeholder="Select interval" />
|
||||
</SelectTrigger>
|
||||
<SelectContent className="bg-background text-foreground border border-input shadow-sm">
|
||||
{intervals.map((interval) => (
|
||||
<SelectItem
|
||||
key={interval.value}
|
||||
value={interval.value.toString()}
|
||||
className="cursor-pointer text-sm px-3 py-2 hover:bg-accent focus:bg-accent focus:text-accent-foreground"
|
||||
>
|
||||
{interval.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
How often the mirroring process should run.
|
||||
</p>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
How often the mirroring process should run.
|
||||
</p>
|
||||
<div className="mt-2 p-2 bg-muted/50 rounded-md">
|
||||
<p className="text-xs text-muted-foreground">
|
||||
<strong>Sync Schedule:</strong> Repositories will be synchronized at the specified interval.
|
||||
Choose shorter intervals for frequently updated repositories, longer intervals for stable ones.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex gap-x-4">
|
||||
<div className="flex-1">
|
||||
<label className="block text-sm font-medium mb-1">Last Sync</label>
|
||||
<div className="text-sm">
|
||||
{config.lastRun ? formatDate(config.lastRun) : "Never"}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{config.enabled && (
|
||||
<div className="flex-1">
|
||||
<label className="block text-sm font-medium mb-1">Next Sync</label>
|
||||
<div className="text-sm">
|
||||
{config.nextRun ? formatDate(config.nextRun) : "Never"}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{config.lastRun && (
|
||||
<div>
|
||||
<label className="block text-sm font-medium mb-1">Last Run</label>
|
||||
<div className="text-sm">{formatDate(config.lastRun)}</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{config.nextRun && config.enabled && (
|
||||
<div>
|
||||
<label className="block text-sm font-medium mb-1">Next Run</label>
|
||||
<div className="text-sm">{formatDate(config.nextRun)}</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
@@ -2,7 +2,7 @@ import { StatusCard } from "./StatusCard";
|
||||
import { RecentActivity } from "./RecentActivity";
|
||||
import { RepositoryList } from "./RepositoryList";
|
||||
import { GitFork, Clock, FlipHorizontal, Building2 } from "lucide-react";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import type { MirrorJob, Organization, Repository } from "@/lib/db/schema";
|
||||
import { useAuth } from "@/hooks/useAuth";
|
||||
import { apiRequest } from "@/lib/utils";
|
||||
@@ -11,9 +11,18 @@ import { useSSE } from "@/hooks/useSEE";
|
||||
import { toast } from "sonner";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { useLiveRefresh } from "@/hooks/useLiveRefresh";
|
||||
import { usePageVisibility } from "@/hooks/usePageVisibility";
|
||||
import { useConfigStatus } from "@/hooks/useConfigStatus";
|
||||
import { useNavigation } from "@/components/layout/MainLayout";
|
||||
|
||||
export function Dashboard() {
|
||||
const { user } = useAuth();
|
||||
const { registerRefreshCallback } = useLiveRefresh();
|
||||
const isPageVisible = usePageVisibility();
|
||||
const { isFullyConfigured } = useConfigStatus();
|
||||
const { navigationKey } = useNavigation();
|
||||
|
||||
const [repositories, setRepositories] = useState<Repository[]>([]);
|
||||
const [organizations, setOrganizations] = useState<Organization[]>([]);
|
||||
const [activities, setActivities] = useState<MirrorJob[]>([]);
|
||||
@@ -23,6 +32,10 @@ export function Dashboard() {
|
||||
const [mirroredCount, setMirroredCount] = useState<number>(0);
|
||||
const [lastSync, setLastSync] = useState<Date | null>(null);
|
||||
|
||||
// Dashboard auto-refresh timer (30 seconds)
|
||||
const dashboardTimerRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const DASHBOARD_REFRESH_INTERVAL = 30000; // 30 seconds
|
||||
|
||||
// Create a stable callback using useCallback
|
||||
const handleNewMessage = useCallback((data: MirrorJob) => {
|
||||
if (data.repositoryId) {
|
||||
@@ -54,44 +67,101 @@ export function Dashboard() {
|
||||
onMessage: handleNewMessage,
|
||||
});
|
||||
|
||||
// Extract fetchDashboardData as a stable callback
|
||||
const fetchDashboardData = useCallback(async (showToast = false) => {
|
||||
try {
|
||||
if (!user?.id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't fetch data if configuration is not complete
|
||||
if (!isFullyConfigured) {
|
||||
if (showToast) {
|
||||
toast.info("Please configure GitHub and Gitea settings first");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
const response = await apiRequest<DashboardApiResponse>(
|
||||
`/dashboard?userId=${user.id}`,
|
||||
{
|
||||
method: "GET",
|
||||
}
|
||||
);
|
||||
|
||||
if (response.success) {
|
||||
setRepositories(response.repositories);
|
||||
setOrganizations(response.organizations);
|
||||
setActivities(response.activities);
|
||||
setRepoCount(response.repoCount);
|
||||
setOrgCount(response.orgCount);
|
||||
setMirroredCount(response.mirroredCount);
|
||||
setLastSync(response.lastSync);
|
||||
|
||||
if (showToast) {
|
||||
toast.success("Dashboard data refreshed successfully");
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
toast.error(response.error || "Error fetching dashboard data");
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "Error fetching dashboard data"
|
||||
);
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user?.id, isFullyConfigured]); // Only depend on user.id, not entire user object
|
||||
|
||||
// Initial data fetch and reset loading state when component becomes active
|
||||
useEffect(() => {
|
||||
const fetchDashboardData = async () => {
|
||||
try {
|
||||
if (!user || !user.id) {
|
||||
return;
|
||||
}
|
||||
// Reset loading state when component mounts or becomes active
|
||||
setIsLoading(true);
|
||||
fetchDashboardData();
|
||||
}, [fetchDashboardData, navigationKey]); // Include navigationKey to trigger on navigation
|
||||
|
||||
const response = await apiRequest<DashboardApiResponse>(
|
||||
`/dashboard?userId=${user.id}`,
|
||||
{
|
||||
method: "GET",
|
||||
}
|
||||
);
|
||||
// Setup dashboard auto-refresh (30 seconds) and register with live refresh
|
||||
useEffect(() => {
|
||||
// Clear any existing timer
|
||||
if (dashboardTimerRef.current) {
|
||||
clearInterval(dashboardTimerRef.current);
|
||||
dashboardTimerRef.current = null;
|
||||
}
|
||||
|
||||
if (response.success) {
|
||||
setRepositories(response.repositories);
|
||||
setOrganizations(response.organizations);
|
||||
setActivities(response.activities);
|
||||
setRepoCount(response.repoCount);
|
||||
setOrgCount(response.orgCount);
|
||||
setMirroredCount(response.mirroredCount);
|
||||
setLastSync(response.lastSync);
|
||||
} else {
|
||||
toast.error(response.error || "Error fetching dashboard data");
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "Error fetching dashboard data"
|
||||
);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
// Set up 30-second auto-refresh only when page is visible and configuration is complete
|
||||
if (isPageVisible && isFullyConfigured) {
|
||||
dashboardTimerRef.current = setInterval(() => {
|
||||
fetchDashboardData();
|
||||
}, DASHBOARD_REFRESH_INTERVAL);
|
||||
}
|
||||
|
||||
// Cleanup on unmount or when page becomes invisible
|
||||
return () => {
|
||||
if (dashboardTimerRef.current) {
|
||||
clearInterval(dashboardTimerRef.current);
|
||||
dashboardTimerRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [isPageVisible, isFullyConfigured, fetchDashboardData]);
|
||||
|
||||
fetchDashboardData();
|
||||
}, [user]);
|
||||
// Register with global live refresh system
|
||||
useEffect(() => {
|
||||
// Only register if configuration is complete
|
||||
if (!isFullyConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
const unregister = registerRefreshCallback(() => {
|
||||
fetchDashboardData();
|
||||
});
|
||||
|
||||
return unregister;
|
||||
}, [registerRefreshCallback, fetchDashboardData, isFullyConfigured]);
|
||||
|
||||
// Status Card Skeleton component
|
||||
function StatusCardSkeleton() {
|
||||
@@ -150,6 +220,7 @@ export function Dashboard() {
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-col gap-y-6">
|
||||
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||
<StatusCard
|
||||
title="Total Repositories"
|
||||
|
||||
@@ -1,15 +1,50 @@
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { GitFork } from "lucide-react";
|
||||
import { SiGithub } from "react-icons/si";
|
||||
import { SiGithub, SiGitea } from "react-icons/si";
|
||||
import type { Repository } from "@/lib/db/schema";
|
||||
import { getStatusColor } from "@/lib/utils";
|
||||
import { useGiteaConfig } from "@/hooks/useGiteaConfig";
|
||||
|
||||
interface RepositoryListProps {
|
||||
repositories: Repository[];
|
||||
}
|
||||
|
||||
export function RepositoryList({ repositories }: RepositoryListProps) {
|
||||
const { giteaConfig } = useGiteaConfig();
|
||||
|
||||
// Helper function to construct Gitea repository URL
|
||||
const getGiteaRepoUrl = (repository: Repository): string | null => {
|
||||
if (!giteaConfig?.url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Only provide Gitea links for repositories that have been or are being mirrored
|
||||
const validStatuses = ['mirroring', 'mirrored', 'syncing', 'synced'];
|
||||
if (!validStatuses.includes(repository.status)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Use mirroredLocation if available, otherwise construct from repository data
|
||||
let repoPath: string;
|
||||
if (repository.mirroredLocation) {
|
||||
repoPath = repository.mirroredLocation;
|
||||
} else {
|
||||
// Fallback: construct the path based on repository data
|
||||
// If repository has organization and preserveOrgStructure would be true, use org
|
||||
// Otherwise use the repository owner
|
||||
const owner = repository.organization || repository.owner;
|
||||
repoPath = `${owner}/${repository.name}`;
|
||||
}
|
||||
|
||||
// Ensure the base URL doesn't have a trailing slash
|
||||
const baseUrl = giteaConfig.url.endsWith('/')
|
||||
? giteaConfig.url.slice(0, -1)
|
||||
: giteaConfig.url;
|
||||
|
||||
return `${baseUrl}/${repoPath}`;
|
||||
};
|
||||
|
||||
return (
|
||||
<Card className="w-full">
|
||||
{/* calculating the max height based non the other elements and sizing styles */}
|
||||
@@ -69,14 +104,48 @@ export function RepositoryList({ repositories }: RepositoryListProps) {
|
||||
{/* setting the minimum width to 3rem corresponding to the largest status (mirrored) so that all are left alligned */}
|
||||
{repo.status}
|
||||
</span>
|
||||
<Button variant="ghost" size="icon">
|
||||
<GitFork className="h-4 w-4" />
|
||||
</Button>
|
||||
{(() => {
|
||||
const giteaUrl = getGiteaRepoUrl(repo);
|
||||
|
||||
// Determine tooltip based on status and configuration
|
||||
let tooltip: string;
|
||||
if (!giteaConfig?.url) {
|
||||
tooltip = "Gitea not configured";
|
||||
} else if (repo.status === 'imported') {
|
||||
tooltip = "Repository not yet mirrored to Gitea";
|
||||
} else if (repo.status === 'failed') {
|
||||
tooltip = "Repository mirroring failed";
|
||||
} else if (repo.status === 'mirroring') {
|
||||
tooltip = "Repository is being mirrored to Gitea";
|
||||
} else if (giteaUrl) {
|
||||
tooltip = "View on Gitea";
|
||||
} else {
|
||||
tooltip = "Gitea repository not available";
|
||||
}
|
||||
|
||||
return giteaUrl ? (
|
||||
<Button variant="ghost" size="icon" asChild>
|
||||
<a
|
||||
href={giteaUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
title={tooltip}
|
||||
>
|
||||
<SiGitea className="h-4 w-4" />
|
||||
</a>
|
||||
</Button>
|
||||
) : (
|
||||
<Button variant="ghost" size="icon" disabled title={tooltip}>
|
||||
<SiGitea className="h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
})()}
|
||||
<Button variant="ghost" size="icon" asChild>
|
||||
<a
|
||||
href={repo.url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
title="View on GitHub"
|
||||
>
|
||||
<SiGithub className="h-4 w-4" />
|
||||
</a>
|
||||
|
||||
@@ -5,9 +5,35 @@ import { ModeToggle } from "@/components/theme/ModeToggle";
|
||||
import { Avatar, AvatarFallback, AvatarImage } from "../ui/avatar";
|
||||
import { toast } from "sonner";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { useLiveRefresh } from "@/hooks/useLiveRefresh";
|
||||
import { useConfigStatus } from "@/hooks/useConfigStatus";
|
||||
|
||||
export function Header() {
|
||||
interface HeaderProps {
|
||||
currentPage?: "dashboard" | "repositories" | "organizations" | "configuration" | "activity-log";
|
||||
onNavigate?: (page: string) => void;
|
||||
}
|
||||
|
||||
export function Header({ currentPage, onNavigate }: HeaderProps) {
|
||||
const { user, logout, isLoading } = useAuth();
|
||||
const { isLiveEnabled, toggleLive } = useLiveRefresh();
|
||||
const { isFullyConfigured, isLoading: configLoading } = useConfigStatus();
|
||||
|
||||
// Show Live button on all pages except configuration
|
||||
const showLiveButton = currentPage && currentPage !== "configuration";
|
||||
|
||||
// Determine button state and tooltip
|
||||
const isLiveActive = isLiveEnabled && isFullyConfigured;
|
||||
const getTooltip = () => {
|
||||
if (configLoading) {
|
||||
return 'Loading configuration...';
|
||||
}
|
||||
if (!isFullyConfigured) {
|
||||
return isLiveEnabled
|
||||
? 'Live refresh enabled but requires GitHub and Gitea configuration to function'
|
||||
: 'Enable live refresh (requires GitHub and Gitea configuration)';
|
||||
}
|
||||
return isLiveEnabled ? 'Disable live refresh' : 'Enable live refresh';
|
||||
};
|
||||
|
||||
const handleLogout = async () => {
|
||||
toast.success("Logged out successfully");
|
||||
@@ -29,12 +55,41 @@ export function Header() {
|
||||
return (
|
||||
<header className="border-b bg-background">
|
||||
<div className="flex h-[4.5rem] items-center justify-between px-6">
|
||||
<a href="/" className="flex items-center gap-2 py-1">
|
||||
<button
|
||||
onClick={() => {
|
||||
if (currentPage !== 'dashboard') {
|
||||
window.history.pushState({}, '', '/');
|
||||
onNavigate?.('dashboard');
|
||||
}
|
||||
}}
|
||||
className="flex items-center gap-2 py-1 hover:opacity-80 transition-opacity"
|
||||
>
|
||||
<SiGitea className="h-6 w-6" />
|
||||
<span className="text-xl font-bold">Gitea Mirror</span>
|
||||
</a>
|
||||
</button>
|
||||
|
||||
<div className="flex items-center gap-4">
|
||||
{showLiveButton && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="lg"
|
||||
className="flex items-center gap-2"
|
||||
onClick={toggleLive}
|
||||
title={getTooltip()}
|
||||
>
|
||||
<div className={`w-3 h-3 rounded-full ${
|
||||
configLoading
|
||||
? 'bg-yellow-400 animate-pulse'
|
||||
: isLiveActive
|
||||
? 'bg-emerald-400 animate-pulse'
|
||||
: isLiveEnabled
|
||||
? 'bg-orange-400'
|
||||
: 'bg-gray-500'
|
||||
}`} />
|
||||
<span>LIVE</span>
|
||||
</Button>
|
||||
)}
|
||||
|
||||
<ModeToggle />
|
||||
|
||||
{isLoading ? (
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { useState, useEffect, createContext, useContext } from "react";
|
||||
import { Header } from "./Header";
|
||||
import { Sidebar } from "./Sidebar";
|
||||
import { Dashboard } from "@/components/dashboard/Dashboard";
|
||||
@@ -9,6 +10,12 @@ import { Organization } from "../organizations/Organization";
|
||||
import { Toaster } from "@/components/ui/sonner";
|
||||
import { useAuth } from "@/hooks/useAuth";
|
||||
import { useRepoSync } from "@/hooks/useSyncRepo";
|
||||
import { useConfigStatus } from "@/hooks/useConfigStatus";
|
||||
|
||||
// Navigation context to signal when navigation happens
|
||||
const NavigationContext = createContext<{ navigationKey: number }>({ navigationKey: 0 });
|
||||
|
||||
export const useNavigation = () => useContext(NavigationContext);
|
||||
|
||||
interface AppProps {
|
||||
page:
|
||||
@@ -32,8 +39,12 @@ export default function App({ page }: AppProps) {
|
||||
);
|
||||
}
|
||||
|
||||
function AppWithProviders({ page }: AppProps) {
|
||||
const { user } = useAuth();
|
||||
function AppWithProviders({ page: initialPage }: AppProps) {
|
||||
const { user, isLoading: authLoading } = useAuth();
|
||||
const { isLoading: configLoading } = useConfigStatus();
|
||||
const [currentPage, setCurrentPage] = useState<AppProps['page']>(initialPage);
|
||||
const [navigationKey, setNavigationKey] = useState(0);
|
||||
|
||||
useRepoSync({
|
||||
userId: user?.id,
|
||||
enabled: user?.syncEnabled,
|
||||
@@ -42,20 +53,65 @@ function AppWithProviders({ page }: AppProps) {
|
||||
nextSync: user?.nextSync,
|
||||
});
|
||||
|
||||
// Handle navigation from sidebar
|
||||
const handleNavigation = (pageName: string) => {
|
||||
setCurrentPage(pageName as AppProps['page']);
|
||||
// Increment navigation key to force components to refresh their loading state
|
||||
setNavigationKey(prev => prev + 1);
|
||||
};
|
||||
|
||||
// Handle browser back/forward navigation
|
||||
useEffect(() => {
|
||||
const handlePopState = () => {
|
||||
const path = window.location.pathname;
|
||||
const pageMap: Record<string, AppProps['page']> = {
|
||||
'/': 'dashboard',
|
||||
'/repositories': 'repositories',
|
||||
'/organizations': 'organizations',
|
||||
'/config': 'configuration',
|
||||
'/activity': 'activity-log'
|
||||
};
|
||||
|
||||
const pageName = pageMap[path] || 'dashboard';
|
||||
setCurrentPage(pageName);
|
||||
// Also increment navigation key for browser navigation to trigger loading states
|
||||
setNavigationKey(prev => prev + 1);
|
||||
};
|
||||
|
||||
window.addEventListener('popstate', handlePopState);
|
||||
return () => window.removeEventListener('popstate', handlePopState);
|
||||
}, []);
|
||||
|
||||
// Show loading state only during initial auth/config loading
|
||||
const isInitialLoading = authLoading || (configLoading && !user);
|
||||
|
||||
if (isInitialLoading) {
|
||||
return (
|
||||
<main className="flex min-h-screen flex-col items-center justify-center">
|
||||
<div className="flex flex-col items-center gap-4">
|
||||
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary"></div>
|
||||
<p className="text-sm text-muted-foreground">Loading...</p>
|
||||
</div>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<main className="flex min-h-screen flex-col">
|
||||
<Header />
|
||||
<div className="flex flex-1">
|
||||
<Sidebar />
|
||||
<section className="flex-1 p-6 overflow-y-auto h-[calc(100dvh-4.55rem)]">
|
||||
{page === "dashboard" && <Dashboard />}
|
||||
{page === "repositories" && <Repository />}
|
||||
{page === "organizations" && <Organization />}
|
||||
{page === "configuration" && <ConfigTabs />}
|
||||
{page === "activity-log" && <ActivityLog />}
|
||||
</section>
|
||||
</div>
|
||||
<Toaster />
|
||||
</main>
|
||||
<NavigationContext.Provider value={{ navigationKey }}>
|
||||
<main className="flex min-h-screen flex-col">
|
||||
<Header currentPage={currentPage} onNavigate={handleNavigation} />
|
||||
<div className="flex flex-1">
|
||||
<Sidebar onNavigate={handleNavigation} />
|
||||
<section className="flex-1 p-6 overflow-y-auto h-[calc(100dvh-4.55rem)]">
|
||||
{currentPage === "dashboard" && <Dashboard />}
|
||||
{currentPage === "repositories" && <Repository />}
|
||||
{currentPage === "organizations" && <Organization />}
|
||||
{currentPage === "configuration" && <ConfigTabs />}
|
||||
{currentPage === "activity-log" && <ActivityLog />}
|
||||
</section>
|
||||
</div>
|
||||
<Toaster />
|
||||
</main>
|
||||
</NavigationContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import * as React from "react";
|
||||
import { AuthProvider } from "@/hooks/useAuth";
|
||||
import { TooltipProvider } from "@/components/ui/tooltip";
|
||||
import { LiveRefreshProvider } from "@/hooks/useLiveRefresh";
|
||||
|
||||
export default function Providers({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<AuthProvider>
|
||||
<TooltipProvider>
|
||||
{children}
|
||||
</TooltipProvider>
|
||||
<LiveRefreshProvider>
|
||||
<TooltipProvider>
|
||||
{children}
|
||||
</TooltipProvider>
|
||||
</LiveRefreshProvider>
|
||||
</AuthProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,12 +2,14 @@ import { useEffect, useState } from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ExternalLink } from "lucide-react";
|
||||
import { links } from "@/data/Sidebar";
|
||||
import { VersionInfo } from "./VersionInfo";
|
||||
|
||||
interface SidebarProps {
|
||||
className?: string;
|
||||
onNavigate?: (page: string) => void;
|
||||
}
|
||||
|
||||
export function Sidebar({ className }: SidebarProps) {
|
||||
export function Sidebar({ className, onNavigate }: SidebarProps) {
|
||||
const [currentPath, setCurrentPath] = useState<string>("");
|
||||
|
||||
useEffect(() => {
|
||||
@@ -17,20 +19,53 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
console.log("Hydrated path:", path); // Should log now
|
||||
}, []);
|
||||
|
||||
// Listen for URL changes (browser back/forward)
|
||||
useEffect(() => {
|
||||
const handlePopState = () => {
|
||||
setCurrentPath(window.location.pathname);
|
||||
};
|
||||
|
||||
window.addEventListener('popstate', handlePopState);
|
||||
return () => window.removeEventListener('popstate', handlePopState);
|
||||
}, []);
|
||||
|
||||
const handleNavigation = (href: string, event: React.MouseEvent) => {
|
||||
event.preventDefault();
|
||||
|
||||
// Don't navigate if already on the same page
|
||||
if (currentPath === href) return;
|
||||
|
||||
// Update URL without page reload
|
||||
window.history.pushState({}, '', href);
|
||||
setCurrentPath(href);
|
||||
|
||||
// Map href to page name for the parent component
|
||||
const pageMap: Record<string, string> = {
|
||||
'/': 'dashboard',
|
||||
'/repositories': 'repositories',
|
||||
'/organizations': 'organizations',
|
||||
'/config': 'configuration',
|
||||
'/activity': 'activity-log'
|
||||
};
|
||||
|
||||
const pageName = pageMap[href] || 'dashboard';
|
||||
onNavigate?.(pageName);
|
||||
};
|
||||
|
||||
return (
|
||||
<aside className={cn("w-64 border-r bg-background", className)}>
|
||||
<div className="flex flex-col h-full py-4">
|
||||
<div className="flex flex-col h-full pt-4">
|
||||
<nav className="flex flex-col gap-y-1 pl-2 pr-3">
|
||||
{links.map((link, index) => {
|
||||
const isActive = currentPath === link.href;
|
||||
const Icon = link.icon;
|
||||
|
||||
return (
|
||||
<a
|
||||
<button
|
||||
key={index}
|
||||
href={link.href}
|
||||
onClick={(e) => handleNavigation(link.href, e)}
|
||||
className={cn(
|
||||
"flex items-center gap-3 rounded-md px-3 py-2 text-sm font-medium transition-colors",
|
||||
"flex items-center gap-3 rounded-md px-3 py-2 text-sm font-medium transition-colors w-full text-left",
|
||||
isActive
|
||||
? "bg-primary text-primary-foreground"
|
||||
: "text-muted-foreground hover:bg-accent hover:text-accent-foreground"
|
||||
@@ -38,7 +73,7 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
>
|
||||
<Icon className="h-4 w-4" />
|
||||
{link.label}
|
||||
</a>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</nav>
|
||||
@@ -59,6 +94,7 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
<ExternalLink className="h-3 w-3" />
|
||||
</a>
|
||||
</div>
|
||||
<VersionInfo />
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
49
src/components/layout/VersionInfo.tsx
Normal file
@@ -0,0 +1,49 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { healthApi } from "@/lib/api";
|
||||
|
||||
export function VersionInfo() {
|
||||
const [versionInfo, setVersionInfo] = useState<{
|
||||
current: string;
|
||||
latest: string;
|
||||
updateAvailable: boolean;
|
||||
}>({
|
||||
current: "loading...",
|
||||
latest: "",
|
||||
updateAvailable: false
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const fetchVersion = async () => {
|
||||
try {
|
||||
const healthData = await healthApi.check();
|
||||
setVersionInfo({
|
||||
current: healthData.version || "unknown",
|
||||
latest: healthData.latestVersion || "unknown",
|
||||
updateAvailable: healthData.updateAvailable || false
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch version:", error);
|
||||
setVersionInfo({
|
||||
current: "unknown",
|
||||
latest: "",
|
||||
updateAvailable: false
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
fetchVersion();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="text-xs text-muted-foreground text-center pt-2 pb-3 border-t border-border mt-2">
|
||||
{versionInfo.updateAvailable ? (
|
||||
<div className="flex flex-col">
|
||||
<span>v{versionInfo.current}</span>
|
||||
<span className="text-primary">v{versionInfo.latest} available</span>
|
||||
</div>
|
||||
) : (
|
||||
<span>v{versionInfo.current}</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -24,12 +24,16 @@ import type { MirrorOrgRequest, MirrorOrgResponse } from "@/types/mirror";
|
||||
import { useSSE } from "@/hooks/useSEE";
|
||||
import { useFilterParams } from "@/hooks/useFilterParams";
|
||||
import { toast } from "sonner";
|
||||
import { useConfigStatus } from "@/hooks/useConfigStatus";
|
||||
import { useNavigation } from "@/components/layout/MainLayout";
|
||||
|
||||
export function Organization() {
|
||||
const [organizations, setOrganizations] = useState<Organization[]>([]);
|
||||
const [isLoading, setIsLoading] = useState<boolean>(true);
|
||||
const [isDialogOpen, setIsDialogOpen] = useState<boolean>(false);
|
||||
const { user } = useAuth();
|
||||
const { isGitHubConfigured } = useConfigStatus();
|
||||
const { navigationKey } = useNavigation();
|
||||
const { filter, setFilter } = useFilterParams({
|
||||
searchTerm: "",
|
||||
membershipRole: "",
|
||||
@@ -59,7 +63,13 @@ export function Organization() {
|
||||
});
|
||||
|
||||
const fetchOrganizations = useCallback(async () => {
|
||||
if (!user || !user.id) {
|
||||
if (!user?.id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't fetch organizations if GitHub is not configured
|
||||
if (!isGitHubConfigured) {
|
||||
setIsLoading(false);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -88,11 +98,13 @@ export function Organization() {
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user]);
|
||||
}, [user?.id, isGitHubConfigured]); // Only depend on user.id, not entire user object
|
||||
|
||||
useEffect(() => {
|
||||
// Reset loading state when component becomes active
|
||||
setIsLoading(true);
|
||||
fetchOrganizations();
|
||||
}, [fetchOrganizations]);
|
||||
}, [fetchOrganizations, navigationKey]); // Include navigationKey to trigger on navigation
|
||||
|
||||
const handleRefresh = async () => {
|
||||
const success = await fetchOrganizations();
|
||||
@@ -342,9 +354,13 @@ export function Organization() {
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
<Button variant="default" onClick={handleRefresh}>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Refresh
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={handleRefresh}
|
||||
title="Refresh organizations"
|
||||
>
|
||||
<RefreshCw className="h-4 w-4" />
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
|
||||
@@ -27,13 +27,18 @@ import type { SyncRepoRequest, SyncRepoResponse } from "@/types/sync";
|
||||
import { OwnerCombobox, OrganizationCombobox } from "./RepositoryComboboxes";
|
||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||
import AddRepositoryDialog from "./AddRepositoryDialog";
|
||||
import type { ConfigApiResponse } from "@/types/config";
|
||||
|
||||
import { useLiveRefresh } from "@/hooks/useLiveRefresh";
|
||||
import { useConfigStatus } from "@/hooks/useConfigStatus";
|
||||
import { useNavigation } from "@/components/layout/MainLayout";
|
||||
|
||||
export default function Repository() {
|
||||
const [repositories, setRepositories] = useState<Repository[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [isGitHubConfigured, setIsGitHubConfigured] = useState<boolean>(true);
|
||||
const [isInitialLoading, setIsInitialLoading] = useState(true);
|
||||
const { user } = useAuth();
|
||||
const { registerRefreshCallback, isLiveEnabled } = useLiveRefresh();
|
||||
const { isGitHubConfigured, isFullyConfigured } = useConfigStatus();
|
||||
const { navigationKey } = useNavigation();
|
||||
const { filter, setFilter } = useFilterParams({
|
||||
searchTerm: "",
|
||||
status: "",
|
||||
@@ -75,30 +80,21 @@ export default function Repository() {
|
||||
onMessage: handleNewMessage,
|
||||
});
|
||||
|
||||
const fetchRepositories = useCallback(async () => {
|
||||
if (!user) return;
|
||||
const fetchRepositories = useCallback(async (isLiveRefresh = false) => {
|
||||
if (!user?.id) return;
|
||||
|
||||
// Don't fetch repositories if GitHub is not configured or still loading config
|
||||
if (!isGitHubConfigured) {
|
||||
setIsInitialLoading(false);
|
||||
return false;
|
||||
}
|
||||
|
||||
// First, check if GitHub is configured by fetching the user's config
|
||||
try {
|
||||
const configResponse = await apiRequest<ConfigApiResponse>(
|
||||
`/config?userId=${user.id}`,
|
||||
{
|
||||
method: "GET",
|
||||
}
|
||||
);
|
||||
|
||||
// Check if GitHub credentials are configured
|
||||
if (!configResponse?.githubConfig?.username || !configResponse?.githubConfig?.token) {
|
||||
setIsLoading(false);
|
||||
setIsGitHubConfigured(false);
|
||||
// Don't show error toast for unconfigured GitHub - just return silently
|
||||
return false;
|
||||
// Set appropriate loading state based on refresh type
|
||||
if (!isLiveRefresh) {
|
||||
setIsInitialLoading(true);
|
||||
}
|
||||
|
||||
// GitHub is configured
|
||||
setIsGitHubConfigured(true);
|
||||
setIsLoading(true);
|
||||
|
||||
const response = await apiRequest<RepositoryApiResponse>(
|
||||
`/github/repositories?userId=${user.id}`,
|
||||
{
|
||||
@@ -110,25 +106,49 @@ export default function Repository() {
|
||||
setRepositories(response.repositories);
|
||||
return true;
|
||||
} else {
|
||||
toast.error(response.error || "Error fetching repositories");
|
||||
// Only show error toast for manual refreshes to avoid spam during live updates
|
||||
if (!isLiveRefresh) {
|
||||
toast.error(response.error || "Error fetching repositories");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
error instanceof Error ? error.message : "Error fetching repositories"
|
||||
);
|
||||
// Only show error toast for manual refreshes to avoid spam during live updates
|
||||
if (!isLiveRefresh) {
|
||||
toast.error(
|
||||
error instanceof Error ? error.message : "Error fetching repositories"
|
||||
);
|
||||
}
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
if (!isLiveRefresh) {
|
||||
setIsInitialLoading(false);
|
||||
}
|
||||
}
|
||||
}, [user]);
|
||||
}, [user?.id, isGitHubConfigured]); // Only depend on user.id, not entire user object
|
||||
|
||||
useEffect(() => {
|
||||
fetchRepositories();
|
||||
}, [fetchRepositories]);
|
||||
// Reset loading state when component becomes active
|
||||
setIsInitialLoading(true);
|
||||
fetchRepositories(false); // Manual refresh, not live
|
||||
}, [fetchRepositories, navigationKey]); // Include navigationKey to trigger on navigation
|
||||
|
||||
// Register with global live refresh system
|
||||
useEffect(() => {
|
||||
// Only register for live refresh if GitHub is configured
|
||||
if (!isGitHubConfigured) {
|
||||
return;
|
||||
}
|
||||
|
||||
const unregister = registerRefreshCallback(() => {
|
||||
fetchRepositories(true); // Live refresh
|
||||
});
|
||||
|
||||
return unregister;
|
||||
}, [registerRefreshCallback, fetchRepositories, isGitHubConfigured]);
|
||||
|
||||
const handleRefresh = async () => {
|
||||
const success = await fetchRepositories();
|
||||
const success = await fetchRepositories(false); // Manual refresh, show loading skeleton
|
||||
if (success) {
|
||||
toast.success("Repositories refreshed successfully.");
|
||||
}
|
||||
@@ -354,7 +374,7 @@ export default function Repository() {
|
||||
toast.success(`Repository added successfully`);
|
||||
setRepositories((prevRepos) => [...prevRepos, response.repository]);
|
||||
|
||||
await fetchRepositories();
|
||||
await fetchRepositories(false); // Manual refresh after adding repository
|
||||
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
@@ -442,15 +462,19 @@ export default function Repository() {
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
<Button variant="default" onClick={handleRefresh}>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Refresh
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={handleRefresh}
|
||||
title="Refresh repositories"
|
||||
>
|
||||
<RefreshCw className="h-4 w-4" />
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
variant="default"
|
||||
onClick={handleMirrorAllRepos}
|
||||
disabled={isLoading || loadingRepoIds.size > 0}
|
||||
disabled={isInitialLoading || loadingRepoIds.size > 0}
|
||||
>
|
||||
<FlipHorizontal className="h-4 w-4 mr-2" />
|
||||
Mirror All
|
||||
@@ -465,7 +489,11 @@ export default function Repository() {
|
||||
</p>
|
||||
<Button
|
||||
variant="default"
|
||||
onClick={() => window.location.href = "/config"}
|
||||
onClick={() => {
|
||||
window.history.pushState({}, '', '/config');
|
||||
// We need to trigger a page change event for the navigation system
|
||||
window.dispatchEvent(new PopStateEvent('popstate'));
|
||||
}}
|
||||
>
|
||||
Go to Configuration
|
||||
</Button>
|
||||
@@ -473,7 +501,8 @@ export default function Repository() {
|
||||
) : (
|
||||
<RepositoryTable
|
||||
repositories={repositories}
|
||||
isLoading={isLoading || !connected}
|
||||
isLoading={isInitialLoading || !connected}
|
||||
isLiveActive={isLiveEnabled && isFullyConfigured}
|
||||
filter={filter}
|
||||
setFilter={setFilter}
|
||||
onMirror={handleMirrorRepo}
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
import { useMemo, useRef } from "react";
|
||||
import Fuse from "fuse.js";
|
||||
import { useVirtualizer } from "@tanstack/react-virtual";
|
||||
import { GitFork, RefreshCw, RotateCcw } from "lucide-react";
|
||||
import { SiGithub } from "react-icons/si";
|
||||
import { FlipHorizontal, GitFork, RefreshCw, RotateCcw } from "lucide-react";
|
||||
import { SiGithub, SiGitea } from "react-icons/si";
|
||||
import type { Repository } from "@/lib/db/schema";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { formatDate, getStatusColor } from "@/lib/utils";
|
||||
import type { FilterParams } from "@/types/filter";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { useGiteaConfig } from "@/hooks/useGiteaConfig";
|
||||
|
||||
interface RepositoryTableProps {
|
||||
repositories: Repository[];
|
||||
isLoading: boolean;
|
||||
isLiveActive?: boolean;
|
||||
filter: FilterParams;
|
||||
setFilter: (filter: FilterParams) => void;
|
||||
onMirror: ({ repoId }: { repoId: string }) => Promise<void>;
|
||||
@@ -23,6 +25,7 @@ interface RepositoryTableProps {
|
||||
export default function RepositoryTable({
|
||||
repositories,
|
||||
isLoading,
|
||||
isLiveActive = false,
|
||||
filter,
|
||||
setFilter,
|
||||
onMirror,
|
||||
@@ -31,6 +34,37 @@ export default function RepositoryTable({
|
||||
loadingRepoIds,
|
||||
}: RepositoryTableProps) {
|
||||
const tableParentRef = useRef<HTMLDivElement>(null);
|
||||
const { giteaConfig } = useGiteaConfig();
|
||||
|
||||
// Helper function to construct Gitea repository URL
|
||||
const getGiteaRepoUrl = (repository: Repository): string | null => {
|
||||
if (!giteaConfig?.url) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Only provide Gitea links for repositories that have been or are being mirrored
|
||||
const validStatuses = ['mirroring', 'mirrored', 'syncing', 'synced'];
|
||||
if (!validStatuses.includes(repository.status)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Use mirroredLocation if available, otherwise construct from repository data
|
||||
let repoPath: string;
|
||||
if (repository.mirroredLocation) {
|
||||
repoPath = repository.mirroredLocation;
|
||||
} else {
|
||||
// Fallback: construct the path based on repository data
|
||||
const owner = repository.organization || repository.owner;
|
||||
repoPath = `${owner}/${repository.name}`;
|
||||
}
|
||||
|
||||
// Ensure the base URL doesn't have a trailing slash
|
||||
const baseUrl = giteaConfig.url.endsWith('/')
|
||||
? giteaConfig.url.slice(0, -1)
|
||||
: giteaConfig.url;
|
||||
|
||||
return `${baseUrl}/${repoPath}`;
|
||||
};
|
||||
|
||||
const hasAnyFilter = Object.values(filter).some(
|
||||
(val) => val?.toString().trim() !== ""
|
||||
@@ -85,9 +119,12 @@ export default function RepositoryTable({
|
||||
Last Mirrored
|
||||
</div>
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1]">Status</div>
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1] text-right">
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1]">
|
||||
Actions
|
||||
</div>
|
||||
<div className="h-full p-3 text-sm font-medium flex-[0.8] text-center">
|
||||
Links
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{Array.from({ length: 5 }).map((_, i) => (
|
||||
@@ -110,7 +147,10 @@ export default function RepositoryTable({
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1]">
|
||||
<Skeleton className="h-full w-full" />
|
||||
</div>
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1] text-right">
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1]">
|
||||
<Skeleton className="h-full w-full" />
|
||||
</div>
|
||||
<div className="h-full p-3 text-sm font-medium flex-[0.8] text-center">
|
||||
<Skeleton className="h-full w-full" />
|
||||
</div>
|
||||
</div>
|
||||
@@ -158,15 +198,18 @@ export default function RepositoryTable({
|
||||
Last Mirrored
|
||||
</div>
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1]">Status</div>
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1] text-right">
|
||||
<div className="h-full p-3 text-sm font-medium flex-[1]">
|
||||
Actions
|
||||
</div>
|
||||
<div className="h-full p-3 text-sm font-medium flex-[0.8] text-center">
|
||||
Links
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* table body wrapper (for a parent in virtualization) */}
|
||||
<div
|
||||
ref={tableParentRef}
|
||||
className="flex flex-col max-h-[calc(100dvh-236px)] overflow-y-auto" //the height is set according to the other contents
|
||||
className="flex flex-col max-h-[calc(100dvh-276px)] overflow-y-auto" //adjusted height to account for status bar
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
@@ -238,61 +281,60 @@ export default function RepositoryTable({
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="h-full p-3 flex items-center justify-end gap-x-2 flex-[1]">
|
||||
{/* {repo.status === "mirrored" ||
|
||||
repo.status === "syncing" ||
|
||||
repo.status === "synced" ? (
|
||||
<Button
|
||||
variant="ghost"
|
||||
disabled={repo.status === "syncing" || isLoading}
|
||||
onClick={() => onSync({ repoId: repo.id ?? "" })}
|
||||
>
|
||||
{isLoading ? (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 animate-spin mr-1" />
|
||||
Sync
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 mr-1" />
|
||||
Sync
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
variant="ghost"
|
||||
disabled={repo.status === "mirroring" || isLoading}
|
||||
onClick={() => onMirror({ repoId: repo.id ?? "" })}
|
||||
>
|
||||
{isLoading ? (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 animate-spin mr-1" />
|
||||
Mirror
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 mr-1" />
|
||||
Mirror
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
)} */}
|
||||
|
||||
<div className="h-full p-3 flex items-center justify-start flex-[1]">
|
||||
<RepoActionButton
|
||||
repo={{ id: repo.id ?? "", status: repo.status }}
|
||||
isLoading={isLoading}
|
||||
onMirror={({ repoId }) =>
|
||||
onMirror({ repoId: repo.id ?? "" })
|
||||
}
|
||||
onSync={({ repoId }) => onSync({ repoId: repo.id ?? "" })}
|
||||
onRetry={({ repoId }) => onRetry({ repoId: repo.id ?? "" })}
|
||||
onMirror={() => onMirror({ repoId: repo.id ?? "" })}
|
||||
onSync={() => onSync({ repoId: repo.id ?? "" })}
|
||||
onRetry={() => onRetry({ repoId: repo.id ?? "" })}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Links */}
|
||||
<div className="h-full p-3 flex items-center justify-center gap-x-2 flex-[0.8]">
|
||||
{(() => {
|
||||
const giteaUrl = getGiteaRepoUrl(repo);
|
||||
|
||||
// Determine tooltip based on status and configuration
|
||||
let tooltip: string;
|
||||
if (!giteaConfig?.url) {
|
||||
tooltip = "Gitea not configured";
|
||||
} else if (repo.status === 'imported') {
|
||||
tooltip = "Repository not yet mirrored to Gitea";
|
||||
} else if (repo.status === 'failed') {
|
||||
tooltip = "Repository mirroring failed";
|
||||
} else if (repo.status === 'mirroring') {
|
||||
tooltip = "Repository is being mirrored to Gitea";
|
||||
} else if (giteaUrl) {
|
||||
tooltip = "View on Gitea";
|
||||
} else {
|
||||
tooltip = "Gitea repository not available";
|
||||
}
|
||||
|
||||
return giteaUrl ? (
|
||||
<Button variant="ghost" size="icon" asChild>
|
||||
<a
|
||||
href={giteaUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
title={tooltip}
|
||||
>
|
||||
<SiGitea className="h-4 w-4" />
|
||||
</a>
|
||||
</Button>
|
||||
) : (
|
||||
<Button variant="ghost" size="icon" disabled title={tooltip}>
|
||||
<SiGitea className="h-4 w-4" />
|
||||
</Button>
|
||||
);
|
||||
})()}
|
||||
<Button variant="ghost" size="icon" asChild>
|
||||
<a
|
||||
href={repo.url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
title="View on GitHub"
|
||||
>
|
||||
<SiGithub className="h-4 w-4" />
|
||||
</a>
|
||||
@@ -303,6 +345,46 @@ export default function RepositoryTable({
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Status Bar */}
|
||||
<div className="h-[40px] flex items-center justify-between border-t bg-muted/30 px-3 relative">
|
||||
<div className="flex items-center gap-2">
|
||||
<div className={`h-1.5 w-1.5 rounded-full ${isLiveActive ? 'bg-emerald-500' : 'bg-primary'}`} />
|
||||
<span className="text-sm font-medium text-foreground">
|
||||
{hasAnyFilter
|
||||
? `Showing ${filteredRepositories.length} of ${repositories.length} repositories`
|
||||
: `${repositories.length} ${repositories.length === 1 ? 'repository' : 'repositories'} total`}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Center - Live active indicator */}
|
||||
{isLiveActive && (
|
||||
<div className="flex items-center gap-1.5 absolute left-1/2 transform -translate-x-1/2">
|
||||
<div
|
||||
className="h-1 w-1 rounded-full bg-emerald-500"
|
||||
style={{
|
||||
animation: 'pulse 2s ease-in-out infinite'
|
||||
}}
|
||||
/>
|
||||
<span className="text-xs text-emerald-600 dark:text-emerald-400 font-medium">
|
||||
Live active
|
||||
</span>
|
||||
<div
|
||||
className="h-1 w-1 rounded-full bg-emerald-500"
|
||||
style={{
|
||||
animation: 'pulse 2s ease-in-out infinite',
|
||||
animationDelay: '1s'
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{hasAnyFilter && (
|
||||
<span className="text-xs text-muted-foreground">
|
||||
Filters applied
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -316,12 +398,10 @@ function RepoActionButton({
|
||||
}: {
|
||||
repo: { id: string; status: string };
|
||||
isLoading: boolean;
|
||||
onMirror: ({ repoId }: { repoId: string }) => void;
|
||||
onSync: ({ repoId }: { repoId: string }) => void;
|
||||
onRetry: ({ repoId }: { repoId: string }) => void;
|
||||
onMirror: () => void;
|
||||
onSync: () => void;
|
||||
onRetry: () => void;
|
||||
}) {
|
||||
const repoId = repo.id ?? "";
|
||||
|
||||
let label = "";
|
||||
let icon = <></>;
|
||||
let onClick = () => {};
|
||||
@@ -330,23 +410,28 @@ function RepoActionButton({
|
||||
if (repo.status === "failed") {
|
||||
label = "Retry";
|
||||
icon = <RotateCcw className="h-4 w-4 mr-1" />;
|
||||
onClick = () => onRetry({ repoId });
|
||||
onClick = onRetry;
|
||||
} else if (["mirrored", "synced", "syncing"].includes(repo.status)) {
|
||||
label = "Sync";
|
||||
icon = <RefreshCw className="h-4 w-4 mr-1" />;
|
||||
onClick = () => onSync({ repoId });
|
||||
onClick = onSync;
|
||||
disabled ||= repo.status === "syncing";
|
||||
} else if (["imported", "mirroring"].includes(repo.status)) {
|
||||
label = "Mirror";
|
||||
icon = <RefreshCw className="h-4 w-4 mr-1" />;
|
||||
onClick = () => onMirror({ repoId });
|
||||
icon = <FlipHorizontal className="h-4 w-4 mr-1" />; // Don't change this icon to GitFork.
|
||||
onClick = onMirror;
|
||||
disabled ||= repo.status === "mirroring";
|
||||
} else {
|
||||
return null; // unsupported status
|
||||
}
|
||||
|
||||
return (
|
||||
<Button variant="ghost" disabled={disabled} onClick={onClick}>
|
||||
<Button
|
||||
variant="ghost"
|
||||
disabled={disabled}
|
||||
onClick={onClick}
|
||||
className="min-w-[80px] justify-start"
|
||||
>
|
||||
{isLoading ? (
|
||||
<>
|
||||
<RefreshCw className="h-4 w-4 animate-spin mr-1" />
|
||||
|
||||
29
src/components/ui/switch.tsx
Normal file
@@ -0,0 +1,29 @@
|
||||
import * as React from "react"
|
||||
import * as SwitchPrimitive from "@radix-ui/react-switch"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
function Switch({
|
||||
className,
|
||||
...props
|
||||
}: React.ComponentProps<typeof SwitchPrimitive.Root>) {
|
||||
return (
|
||||
<SwitchPrimitive.Root
|
||||
data-slot="switch"
|
||||
className={cn(
|
||||
"peer data-[state=checked]:bg-primary data-[state=unchecked]:bg-input focus-visible:border-ring focus-visible:ring-ring/50 dark:data-[state=unchecked]:bg-input/80 inline-flex h-[1.15rem] w-8 shrink-0 items-center rounded-full border border-transparent shadow-xs transition-all outline-none focus-visible:ring-[3px] disabled:cursor-not-allowed disabled:opacity-50",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<SwitchPrimitive.Thumb
|
||||
data-slot="switch-thumb"
|
||||
className={cn(
|
||||
"bg-background dark:data-[state=unchecked]:bg-foreground dark:data-[state=checked]:bg-primary-foreground pointer-events-none block size-4 rounded-full ring-0 transition-transform data-[state=checked]:translate-x-[calc(100%-2px)] data-[state=unchecked]:translate-x-0"
|
||||
)}
|
||||
/>
|
||||
</SwitchPrimitive.Root>
|
||||
)
|
||||
}
|
||||
|
||||
export { Switch }
|
||||
@@ -104,7 +104,6 @@ gitea-mirror/
|
||||
├── data/ # Database and persistent data
|
||||
├── docker/ # Docker configuration
|
||||
└── scripts/ # Utility scripts for deployment and maintenance
|
||||
├── gitea-mirror-lxc-proxmox.sh # Proxmox LXC deployment script
|
||||
├── gitea-mirror-lxc-local.sh # Local LXC deployment script
|
||||
└── manage-db.ts # Database management tool
|
||||
```
|
||||
@@ -114,7 +113,7 @@ gitea-mirror/
|
||||
Gitea Mirror supports multiple deployment options:
|
||||
|
||||
1. **Docker**: Run as a containerized application using Docker and docker-compose
|
||||
2. **LXC Containers**: Deploy in Linux Containers (LXC) on Proxmox VE or local workstations
|
||||
2. **LXC Containers**: Deploy in Linux Containers (LXC) on Proxmox VE (using community script by [Tobias/CrazyWolf13](https://github.com/CrazyWolf13)) or local workstations
|
||||
3. **Native**: Run directly on the host system using Bun runtime
|
||||
|
||||
Each deployment method has its own advantages:
|
||||
|
||||
@@ -25,13 +25,15 @@ The following environment variables can be used to configure Gitea Mirror:
|
||||
|----------|-------------|---------------|---------|
|
||||
| `NODE_ENV` | Runtime environment (development, production, test) | `development` | `production` |
|
||||
| `DATABASE_URL` | SQLite database URL | `file:data/gitea-mirror.db` | `file:path/to/your/database.db` |
|
||||
| `JWT_SECRET` | Secret key for JWT authentication | `your-secret-key-change-this-in-production` | `your-secure-random-string` |
|
||||
| `JWT_SECRET` | Secret key for JWT authentication | Auto-generated secure random string | `your-secure-random-string` |
|
||||
| `HOST` | Server host | `localhost` | `0.0.0.0` |
|
||||
| `PORT` | Server port | `4321` | `8080` |
|
||||
|
||||
### Important Security Note
|
||||
|
||||
In production environments, you should always set a strong, unique `JWT_SECRET` to ensure secure authentication.
|
||||
The application will automatically generate a secure random `JWT_SECRET` on first run if one isn't provided or if the default value is used. This generated secret is stored in the data directory for persistence across container restarts.
|
||||
|
||||
While this auto-generation feature provides good security by default, you can still explicitly set your own `JWT_SECRET` for complete control over your deployment.
|
||||
|
||||
## Web UI Configuration
|
||||
|
||||
@@ -148,13 +150,12 @@ Events in Gitea Mirror (such as repository mirroring operations) are stored in t
|
||||
# View all events in the database
|
||||
bun scripts/check-events.ts
|
||||
|
||||
# Clean up old events (default: older than 7 days)
|
||||
bun scripts/cleanup-events.ts
|
||||
|
||||
# Mark all events as read
|
||||
bun scripts/mark-events-read.ts
|
||||
```
|
||||
|
||||
For cleaning up old activities and events, use the cleanup button in the Activity Log page of the web interface.
|
||||
|
||||
### Health Check Endpoint
|
||||
|
||||
Gitea Mirror includes a built-in health check endpoint at `/api/health` that provides:
|
||||
|
||||
@@ -37,7 +37,7 @@ Docker provides the easiest way to get started with minimal configuration.
|
||||
|
||||
2. Start the application in production mode:
|
||||
```bash
|
||||
docker-compose --profile production up -d
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
3. Access the application at [http://localhost:4321](http://localhost:4321)
|
||||
@@ -179,4 +179,4 @@ After your initial setup:
|
||||
- Check out the [Configuration Guide](/configuration) for advanced settings
|
||||
- Review the [Architecture Documentation](/architecture) to understand the system
|
||||
- For server deployments, set up monitoring using the health check endpoint
|
||||
- Consider setting up a cron job to clean up old events: `bun scripts/cleanup-events.ts`
|
||||
- Use the cleanup button in the Activity Log page to manage old events and activities
|
||||
|
||||
154
src/hooks/useConfigStatus.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { useCallback, useEffect, useState, useRef } from 'react';
|
||||
import { useAuth } from './useAuth';
|
||||
import { apiRequest } from '@/lib/utils';
|
||||
import type { ConfigApiResponse } from '@/types/config';
|
||||
|
||||
interface ConfigStatus {
|
||||
isGitHubConfigured: boolean;
|
||||
isGiteaConfigured: boolean;
|
||||
isFullyConfigured: boolean;
|
||||
isLoading: boolean;
|
||||
error: string | null;
|
||||
}
|
||||
|
||||
// Cache to prevent duplicate API calls across components
|
||||
let configCache: { data: ConfigApiResponse | null; timestamp: number; userId: string | null } = {
|
||||
data: null,
|
||||
timestamp: 0,
|
||||
userId: null
|
||||
};
|
||||
|
||||
const CACHE_DURATION = 30000; // 30 seconds cache
|
||||
|
||||
/**
|
||||
* Hook to check if GitHub and Gitea are properly configured
|
||||
* Returns configuration status and prevents unnecessary API calls when not configured
|
||||
* Uses caching to prevent duplicate API calls across components
|
||||
*/
|
||||
export function useConfigStatus(): ConfigStatus {
|
||||
const { user } = useAuth();
|
||||
const [configStatus, setConfigStatus] = useState<ConfigStatus>({
|
||||
isGitHubConfigured: false,
|
||||
isGiteaConfigured: false,
|
||||
isFullyConfigured: false,
|
||||
isLoading: true,
|
||||
error: null,
|
||||
});
|
||||
|
||||
// Track if this hook has already checked config to prevent multiple calls
|
||||
const hasCheckedRef = useRef(false);
|
||||
|
||||
const checkConfiguration = useCallback(async () => {
|
||||
if (!user?.id) {
|
||||
setConfigStatus({
|
||||
isGitHubConfigured: false,
|
||||
isGiteaConfigured: false,
|
||||
isFullyConfigured: false,
|
||||
isLoading: false,
|
||||
error: 'No user found',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check cache first
|
||||
const now = Date.now();
|
||||
const isCacheValid = configCache.data &&
|
||||
configCache.userId === user.id &&
|
||||
(now - configCache.timestamp) < CACHE_DURATION;
|
||||
|
||||
if (isCacheValid && hasCheckedRef.current) {
|
||||
const configResponse = configCache.data!;
|
||||
|
||||
const isGitHubConfigured = !!(
|
||||
configResponse?.githubConfig?.username &&
|
||||
configResponse?.githubConfig?.token
|
||||
);
|
||||
|
||||
const isGiteaConfigured = !!(
|
||||
configResponse?.giteaConfig?.url &&
|
||||
configResponse?.giteaConfig?.username &&
|
||||
configResponse?.giteaConfig?.token
|
||||
);
|
||||
|
||||
const isFullyConfigured = isGitHubConfigured && isGiteaConfigured;
|
||||
|
||||
setConfigStatus({
|
||||
isGitHubConfigured,
|
||||
isGiteaConfigured,
|
||||
isFullyConfigured,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Only show loading if we haven't checked before or cache is invalid
|
||||
if (!hasCheckedRef.current) {
|
||||
setConfigStatus(prev => ({ ...prev, isLoading: true, error: null }));
|
||||
}
|
||||
|
||||
const configResponse = await apiRequest<ConfigApiResponse>(
|
||||
`/config?userId=${user.id}`,
|
||||
{ method: 'GET' }
|
||||
);
|
||||
|
||||
// Update cache
|
||||
configCache = {
|
||||
data: configResponse,
|
||||
timestamp: now,
|
||||
userId: user.id
|
||||
};
|
||||
|
||||
const isGitHubConfigured = !!(
|
||||
configResponse?.githubConfig?.username &&
|
||||
configResponse?.githubConfig?.token
|
||||
);
|
||||
|
||||
const isGiteaConfigured = !!(
|
||||
configResponse?.giteaConfig?.url &&
|
||||
configResponse?.giteaConfig?.username &&
|
||||
configResponse?.giteaConfig?.token
|
||||
);
|
||||
|
||||
const isFullyConfigured = isGitHubConfigured && isGiteaConfigured;
|
||||
|
||||
setConfigStatus({
|
||||
isGitHubConfigured,
|
||||
isGiteaConfigured,
|
||||
isFullyConfigured,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
});
|
||||
|
||||
hasCheckedRef.current = true;
|
||||
} catch (error) {
|
||||
setConfigStatus({
|
||||
isGitHubConfigured: false,
|
||||
isGiteaConfigured: false,
|
||||
isFullyConfigured: false,
|
||||
isLoading: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to check configuration',
|
||||
});
|
||||
hasCheckedRef.current = true;
|
||||
}
|
||||
}, [user?.id]);
|
||||
|
||||
useEffect(() => {
|
||||
checkConfiguration();
|
||||
}, [checkConfiguration]);
|
||||
|
||||
return configStatus;
|
||||
}
|
||||
|
||||
// Export function to invalidate cache when config is updated
|
||||
export function invalidateConfigCache() {
|
||||
configCache = { data: null, timestamp: 0, userId: null };
|
||||
}
|
||||
|
||||
// Export function to get cached config data for other hooks
|
||||
export function getCachedConfig(): ConfigApiResponse | null {
|
||||
const now = Date.now();
|
||||
const isCacheValid = configCache.data && (now - configCache.timestamp) < CACHE_DURATION;
|
||||
return isCacheValid ? configCache.data : null;
|
||||
}
|
||||
73
src/hooks/useGiteaConfig.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useAuth } from './useAuth';
|
||||
import { apiRequest } from '@/lib/utils';
|
||||
import type { ConfigApiResponse, GiteaConfig } from '@/types/config';
|
||||
import { getCachedConfig } from './useConfigStatus';
|
||||
|
||||
interface GiteaConfigHook {
|
||||
giteaConfig: GiteaConfig | null;
|
||||
isLoading: boolean;
|
||||
error: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to get Gitea configuration data
|
||||
* Uses the same cache as useConfigStatus to prevent duplicate API calls
|
||||
*/
|
||||
export function useGiteaConfig(): GiteaConfigHook {
|
||||
const { user } = useAuth();
|
||||
const [giteaConfigState, setGiteaConfigState] = useState<GiteaConfigHook>({
|
||||
giteaConfig: null,
|
||||
isLoading: true,
|
||||
error: null,
|
||||
});
|
||||
|
||||
const fetchGiteaConfig = useCallback(async () => {
|
||||
if (!user?.id) {
|
||||
setGiteaConfigState({
|
||||
giteaConfig: null,
|
||||
isLoading: false,
|
||||
error: 'User not authenticated',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Try to get from cache first
|
||||
const cachedConfig = getCachedConfig();
|
||||
if (cachedConfig) {
|
||||
setGiteaConfigState({
|
||||
giteaConfig: cachedConfig.giteaConfig || null,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
setGiteaConfigState(prev => ({ ...prev, isLoading: true, error: null }));
|
||||
|
||||
const configResponse = await apiRequest<ConfigApiResponse>(
|
||||
`/config?userId=${user.id}`,
|
||||
{ method: 'GET' }
|
||||
);
|
||||
|
||||
setGiteaConfigState({
|
||||
giteaConfig: configResponse?.giteaConfig || null,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
});
|
||||
} catch (error) {
|
||||
setGiteaConfigState({
|
||||
giteaConfig: null,
|
||||
isLoading: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to fetch Gitea configuration',
|
||||
});
|
||||
}
|
||||
}, [user?.id]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchGiteaConfig();
|
||||
}, [fetchGiteaConfig]);
|
||||
|
||||
return giteaConfigState;
|
||||
}
|
||||
102
src/hooks/useLiveRefresh.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import * as React from "react";
|
||||
import { useState, useEffect, createContext, useContext, useCallback, useRef } from "react";
|
||||
import { usePageVisibility } from "./usePageVisibility";
|
||||
import { useConfigStatus } from "./useConfigStatus";
|
||||
|
||||
interface LiveRefreshContextType {
|
||||
isLiveEnabled: boolean;
|
||||
toggleLive: () => void;
|
||||
registerRefreshCallback: (callback: () => void) => () => void;
|
||||
}
|
||||
|
||||
const LiveRefreshContext = createContext<LiveRefreshContextType | undefined>(undefined);
|
||||
|
||||
const LIVE_REFRESH_INTERVAL = 3000; // 3 seconds
|
||||
const SESSION_STORAGE_KEY = 'gitea-mirror-live-refresh';
|
||||
|
||||
export function LiveRefreshProvider({ children }: { children: React.ReactNode }) {
|
||||
const [isLiveEnabled, setIsLiveEnabled] = useState<boolean>(false);
|
||||
const isPageVisible = usePageVisibility();
|
||||
const { isFullyConfigured } = useConfigStatus();
|
||||
const refreshCallbacksRef = useRef<Set<() => void>>(new Set());
|
||||
const intervalRef = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
// Load initial state from session storage
|
||||
useEffect(() => {
|
||||
const savedState = sessionStorage.getItem(SESSION_STORAGE_KEY);
|
||||
if (savedState === 'true') {
|
||||
setIsLiveEnabled(true);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Save state to session storage whenever it changes
|
||||
useEffect(() => {
|
||||
sessionStorage.setItem(SESSION_STORAGE_KEY, isLiveEnabled.toString());
|
||||
}, [isLiveEnabled]);
|
||||
|
||||
// Execute all registered refresh callbacks
|
||||
const executeRefreshCallbacks = useCallback(() => {
|
||||
refreshCallbacksRef.current.forEach(callback => {
|
||||
try {
|
||||
callback();
|
||||
} catch (error) {
|
||||
console.error('Error executing refresh callback:', error);
|
||||
}
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Setup/cleanup the refresh interval
|
||||
useEffect(() => {
|
||||
// Clear existing interval
|
||||
if (intervalRef.current) {
|
||||
clearInterval(intervalRef.current);
|
||||
intervalRef.current = null;
|
||||
}
|
||||
|
||||
// Only set up interval if live is enabled, page is visible, and configuration is complete
|
||||
if (isLiveEnabled && isPageVisible && isFullyConfigured) {
|
||||
intervalRef.current = setInterval(executeRefreshCallbacks, LIVE_REFRESH_INTERVAL);
|
||||
}
|
||||
|
||||
// Cleanup on unmount
|
||||
return () => {
|
||||
if (intervalRef.current) {
|
||||
clearInterval(intervalRef.current);
|
||||
intervalRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [isLiveEnabled, isPageVisible, isFullyConfigured, executeRefreshCallbacks]);
|
||||
|
||||
const toggleLive = useCallback(() => {
|
||||
setIsLiveEnabled(prev => !prev);
|
||||
}, []);
|
||||
|
||||
const registerRefreshCallback = useCallback((callback: () => void) => {
|
||||
refreshCallbacksRef.current.add(callback);
|
||||
|
||||
// Return cleanup function
|
||||
return () => {
|
||||
refreshCallbacksRef.current.delete(callback);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const contextValue = {
|
||||
isLiveEnabled,
|
||||
toggleLive,
|
||||
registerRefreshCallback,
|
||||
};
|
||||
|
||||
return React.createElement(
|
||||
LiveRefreshContext.Provider,
|
||||
{ value: contextValue },
|
||||
children
|
||||
);
|
||||
}
|
||||
|
||||
export function useLiveRefresh() {
|
||||
const context = useContext(LiveRefreshContext);
|
||||
if (context === undefined) {
|
||||
throw new Error("useLiveRefresh must be used within a LiveRefreshProvider");
|
||||
}
|
||||
return context;
|
||||
}
|
||||
28
src/hooks/usePageVisibility.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
/**
|
||||
* Hook to detect if the page/tab is currently visible
|
||||
* Returns false when user switches to another tab or minimizes the window
|
||||
*/
|
||||
export function usePageVisibility(): boolean {
|
||||
const [isVisible, setIsVisible] = useState<boolean>(true);
|
||||
|
||||
useEffect(() => {
|
||||
const handleVisibilityChange = () => {
|
||||
setIsVisible(!document.hidden);
|
||||
};
|
||||
|
||||
// Set initial state
|
||||
setIsVisible(!document.hidden);
|
||||
|
||||
// Listen for visibility changes
|
||||
document.addEventListener('visibilitychange', handleVisibilityChange);
|
||||
|
||||
// Cleanup
|
||||
return () => {
|
||||
document.removeEventListener('visibilitychange', handleVisibilityChange);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return isVisible;
|
||||
}
|
||||
@@ -94,6 +94,8 @@ export interface HealthResponse {
|
||||
status: "ok" | "error";
|
||||
timestamp: string;
|
||||
version: string;
|
||||
latestVersion: string;
|
||||
updateAvailable: boolean;
|
||||
database: {
|
||||
connected: boolean;
|
||||
message: string;
|
||||
@@ -147,6 +149,8 @@ export const healthApi = {
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : "Unknown error checking health",
|
||||
version: "unknown",
|
||||
latestVersion: "unknown",
|
||||
updateAvailable: false,
|
||||
database: { connected: false, message: "Failed to connect to API" },
|
||||
system: {
|
||||
uptime: { startTime: "", uptimeMs: 0, formatted: "N/A" },
|
||||
|
||||
258
src/lib/cleanup-service.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
/**
|
||||
* Background cleanup service for automatic database maintenance
|
||||
* This service runs periodically to clean up old events and mirror jobs
|
||||
* based on user configuration settings
|
||||
*/
|
||||
|
||||
import { db, configs, events, mirrorJobs } from "@/lib/db";
|
||||
import { eq, lt, and } from "drizzle-orm";
|
||||
|
||||
interface CleanupResult {
|
||||
userId: string;
|
||||
eventsDeleted: number;
|
||||
mirrorJobsDeleted: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate cleanup interval in hours based on retention period
|
||||
* For shorter retention periods, run more frequently
|
||||
* For longer retention periods, run less frequently
|
||||
* @param retentionSeconds - Retention period in seconds
|
||||
*/
|
||||
export function calculateCleanupInterval(retentionSeconds: number): number {
|
||||
const retentionDays = retentionSeconds / (24 * 60 * 60); // Convert seconds to days
|
||||
|
||||
if (retentionDays <= 1) {
|
||||
return 6; // Every 6 hours for 1 day retention
|
||||
} else if (retentionDays <= 3) {
|
||||
return 12; // Every 12 hours for 1-3 days retention
|
||||
} else if (retentionDays <= 7) {
|
||||
return 24; // Daily for 4-7 days retention
|
||||
} else if (retentionDays <= 30) {
|
||||
return 48; // Every 2 days for 8-30 days retention
|
||||
} else {
|
||||
return 168; // Weekly for 30+ days retention
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old events and mirror jobs for a specific user
|
||||
* @param retentionSeconds - Retention period in seconds
|
||||
*/
|
||||
async function cleanupForUser(userId: string, retentionSeconds: number): Promise<CleanupResult> {
|
||||
try {
|
||||
const retentionDays = retentionSeconds / (24 * 60 * 60); // Convert to days for logging
|
||||
console.log(`Running cleanup for user ${userId} with ${retentionDays} days retention (${retentionSeconds} seconds)`);
|
||||
|
||||
// Calculate cutoff date using seconds
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setTime(cutoffDate.getTime() - retentionSeconds * 1000);
|
||||
|
||||
let eventsDeleted = 0;
|
||||
let mirrorJobsDeleted = 0;
|
||||
|
||||
// Clean up old events
|
||||
const eventsResult = await db
|
||||
.delete(events)
|
||||
.where(
|
||||
and(
|
||||
eq(events.userId, userId),
|
||||
lt(events.createdAt, cutoffDate)
|
||||
)
|
||||
);
|
||||
eventsDeleted = eventsResult.changes || 0;
|
||||
|
||||
// Clean up old mirror jobs (only completed ones)
|
||||
const jobsResult = await db
|
||||
.delete(mirrorJobs)
|
||||
.where(
|
||||
and(
|
||||
eq(mirrorJobs.userId, userId),
|
||||
eq(mirrorJobs.inProgress, false),
|
||||
lt(mirrorJobs.timestamp, cutoffDate)
|
||||
)
|
||||
);
|
||||
mirrorJobsDeleted = jobsResult.changes || 0;
|
||||
|
||||
console.log(`Cleanup completed for user ${userId}: ${eventsDeleted} events, ${mirrorJobsDeleted} jobs deleted`);
|
||||
|
||||
return {
|
||||
userId,
|
||||
eventsDeleted,
|
||||
mirrorJobsDeleted,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error during cleanup for user ${userId}:`, error);
|
||||
return {
|
||||
userId,
|
||||
eventsDeleted: 0,
|
||||
mirrorJobsDeleted: 0,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the cleanup configuration with last run time and calculate next run
|
||||
*/
|
||||
async function updateCleanupConfig(userId: string, cleanupConfig: any) {
|
||||
try {
|
||||
const now = new Date();
|
||||
const retentionSeconds = cleanupConfig.retentionDays || 604800; // Default 7 days in seconds
|
||||
const cleanupIntervalHours = calculateCleanupInterval(retentionSeconds);
|
||||
const nextRun = new Date(now.getTime() + cleanupIntervalHours * 60 * 60 * 1000);
|
||||
|
||||
const updatedConfig = {
|
||||
...cleanupConfig,
|
||||
lastRun: now,
|
||||
nextRun: nextRun,
|
||||
};
|
||||
|
||||
await db
|
||||
.update(configs)
|
||||
.set({
|
||||
cleanupConfig: updatedConfig,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(configs.userId, userId));
|
||||
|
||||
const retentionDays = retentionSeconds / (24 * 60 * 60);
|
||||
console.log(`Updated cleanup config for user ${userId}, next run: ${nextRun.toISOString()} (${cleanupIntervalHours}h interval for ${retentionDays}d retention)`);
|
||||
} catch (error) {
|
||||
console.error(`Error updating cleanup config for user ${userId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run automatic cleanup for all users with cleanup enabled
|
||||
*/
|
||||
export async function runAutomaticCleanup(): Promise<CleanupResult[]> {
|
||||
try {
|
||||
console.log('Starting automatic cleanup service...');
|
||||
|
||||
// Get all users with cleanup enabled
|
||||
const userConfigs = await db
|
||||
.select()
|
||||
.from(configs)
|
||||
.where(eq(configs.isActive, true));
|
||||
|
||||
const results: CleanupResult[] = [];
|
||||
const now = new Date();
|
||||
|
||||
for (const config of userConfigs) {
|
||||
try {
|
||||
const cleanupConfig = config.cleanupConfig;
|
||||
|
||||
// Skip if cleanup is not enabled
|
||||
if (!cleanupConfig?.enabled) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if it's time to run cleanup
|
||||
const nextRun = cleanupConfig.nextRun ? new Date(cleanupConfig.nextRun) : null;
|
||||
|
||||
// If nextRun is null or in the past, run cleanup
|
||||
if (!nextRun || now >= nextRun) {
|
||||
const result = await cleanupForUser(config.userId, cleanupConfig.retentionDays || 604800);
|
||||
results.push(result);
|
||||
|
||||
// Update the cleanup config with new run times
|
||||
await updateCleanupConfig(config.userId, cleanupConfig);
|
||||
} else {
|
||||
console.log(`Skipping cleanup for user ${config.userId}, next run: ${nextRun.toISOString()}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error processing cleanup for user ${config.userId}:`, error);
|
||||
results.push({
|
||||
userId: config.userId,
|
||||
eventsDeleted: 0,
|
||||
mirrorJobsDeleted: 0,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Automatic cleanup completed. Processed ${results.length} users.`);
|
||||
return results;
|
||||
} catch (error) {
|
||||
console.error('Error in automatic cleanup service:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// Service state tracking
|
||||
let cleanupIntervalId: NodeJS.Timeout | null = null;
|
||||
let initialCleanupTimeoutId: NodeJS.Timeout | null = null;
|
||||
let cleanupServiceRunning = false;
|
||||
|
||||
/**
|
||||
* Start the cleanup service with periodic execution
|
||||
* This should be called when the application starts
|
||||
*/
|
||||
export function startCleanupService() {
|
||||
if (cleanupServiceRunning) {
|
||||
console.log('⚠️ Cleanup service already running, skipping start');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Starting background cleanup service...');
|
||||
|
||||
// Run cleanup every hour
|
||||
const CLEANUP_INTERVAL = 60 * 60 * 1000; // 1 hour in milliseconds
|
||||
|
||||
// Run initial cleanup after 5 minutes to allow app to fully start
|
||||
initialCleanupTimeoutId = setTimeout(() => {
|
||||
runAutomaticCleanup().catch(error => {
|
||||
console.error('Error in initial cleanup run:', error);
|
||||
});
|
||||
}, 5 * 60 * 1000); // 5 minutes
|
||||
|
||||
// Set up periodic cleanup
|
||||
cleanupIntervalId = setInterval(() => {
|
||||
runAutomaticCleanup().catch(error => {
|
||||
console.error('Error in periodic cleanup run:', error);
|
||||
});
|
||||
}, CLEANUP_INTERVAL);
|
||||
|
||||
cleanupServiceRunning = true;
|
||||
console.log(`✅ Cleanup service started. Will run every ${CLEANUP_INTERVAL / 1000 / 60} minutes.`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the cleanup service (for testing or shutdown)
|
||||
*/
|
||||
export function stopCleanupService() {
|
||||
if (!cleanupServiceRunning) {
|
||||
console.log('Cleanup service is not running');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('🛑 Stopping cleanup service...');
|
||||
|
||||
// Clear the periodic interval
|
||||
if (cleanupIntervalId) {
|
||||
clearInterval(cleanupIntervalId);
|
||||
cleanupIntervalId = null;
|
||||
}
|
||||
|
||||
// Clear the initial timeout
|
||||
if (initialCleanupTimeoutId) {
|
||||
clearTimeout(initialCleanupTimeoutId);
|
||||
initialCleanupTimeoutId = null;
|
||||
}
|
||||
|
||||
cleanupServiceRunning = false;
|
||||
console.log('✅ Cleanup service stopped');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cleanup service status
|
||||
*/
|
||||
export function getCleanupServiceStatus() {
|
||||
return {
|
||||
running: cleanupServiceRunning,
|
||||
hasInterval: cleanupIntervalId !== null,
|
||||
hasInitialTimeout: initialCleanupTimeoutId !== null,
|
||||
};
|
||||
}
|
||||
42
src/lib/db/index.test.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { describe, test, expect, mock, beforeAll, afterAll } from "bun:test";
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
|
||||
// Silence console logs during tests
|
||||
let originalConsoleLog: typeof console.log;
|
||||
|
||||
beforeAll(() => {
|
||||
// Save original console.log
|
||||
originalConsoleLog = console.log;
|
||||
// Replace with no-op function
|
||||
console.log = () => {};
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original console.log
|
||||
console.log = originalConsoleLog;
|
||||
});
|
||||
|
||||
// Mock the database module
|
||||
mock.module("bun:sqlite", () => {
|
||||
return {
|
||||
Database: mock(function() {
|
||||
return {
|
||||
query: mock(() => ({
|
||||
all: mock(() => []),
|
||||
run: mock(() => ({}))
|
||||
}))
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the database tables
|
||||
describe("Database Schema", () => {
|
||||
test("database connection can be created", async () => {
|
||||
// Import the db from the module
|
||||
const { db } = await import("./index");
|
||||
|
||||
// Check that db is defined
|
||||
expect(db).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -81,6 +81,7 @@ export const events = sqliteTable("events", {
|
||||
const githubSchema = configSchema.shape.githubConfig;
|
||||
const giteaSchema = configSchema.shape.giteaConfig;
|
||||
const scheduleSchema = configSchema.shape.scheduleConfig;
|
||||
const cleanupSchema = configSchema.shape.cleanupConfig;
|
||||
|
||||
export const configs = sqliteTable("configs", {
|
||||
id: text("id").primaryKey(),
|
||||
@@ -112,6 +113,10 @@ export const configs = sqliteTable("configs", {
|
||||
.$type<z.infer<typeof scheduleSchema>>()
|
||||
.notNull(),
|
||||
|
||||
cleanupConfig: text("cleanup_config", { mode: "json" })
|
||||
.$type<z.infer<typeof cleanupSchema>>()
|
||||
.notNull(),
|
||||
|
||||
createdAt: integer("created_at", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(new Date()),
|
||||
@@ -189,6 +194,18 @@ export const mirrorJobs = sqliteTable("mirror_jobs", {
|
||||
timestamp: integer("timestamp", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(new Date()),
|
||||
|
||||
// New fields for job resilience
|
||||
jobType: text("job_type").notNull().default("mirror"),
|
||||
batchId: text("batch_id"),
|
||||
totalItems: integer("total_items"),
|
||||
completedItems: integer("completed_items").default(0),
|
||||
itemIds: text("item_ids", { mode: "json" }).$type<string[]>(),
|
||||
completedItemIds: text("completed_item_ids", { mode: "json" }).$type<string[]>().default([]),
|
||||
inProgress: integer("in_progress", { mode: "boolean" }).notNull().default(false),
|
||||
startedAt: integer("started_at", { mode: "timestamp" }),
|
||||
completedAt: integer("completed_at", { mode: "timestamp" }),
|
||||
lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
|
||||
});
|
||||
|
||||
export const organizations = sqliteTable("organizations", {
|
||||
|
||||
@@ -52,6 +52,12 @@ export const configSchema = z.object({
|
||||
lastRun: z.date().optional(),
|
||||
nextRun: z.date().optional(),
|
||||
}),
|
||||
cleanupConfig: z.object({
|
||||
enabled: z.boolean().default(false),
|
||||
retentionDays: z.number().min(1).default(604800), // in seconds (default: 7 days)
|
||||
lastRun: z.date().optional(),
|
||||
nextRun: z.date().optional(),
|
||||
}),
|
||||
createdAt: z.date().default(() => new Date()),
|
||||
updatedAt: z.date().default(() => new Date()),
|
||||
});
|
||||
@@ -111,6 +117,18 @@ export const mirrorJobSchema = z.object({
|
||||
status: repoStatusEnum.default("imported"),
|
||||
message: z.string(),
|
||||
timestamp: z.date().default(() => new Date()),
|
||||
|
||||
// New fields for job resilience
|
||||
jobType: z.enum(["mirror", "sync", "retry"]).default("mirror"),
|
||||
batchId: z.string().uuid().optional(), // Group related jobs together
|
||||
totalItems: z.number().optional(), // Total number of items to process
|
||||
completedItems: z.number().optional(), // Number of items completed
|
||||
itemIds: z.array(z.string()).optional(), // IDs of items to process
|
||||
completedItemIds: z.array(z.string()).optional(), // IDs of completed items
|
||||
inProgress: z.boolean().default(false), // Whether the job is currently running
|
||||
startedAt: z.date().optional(), // When the job started
|
||||
completedAt: z.date().optional(), // When the job completed
|
||||
lastCheckpoint: z.date().optional(), // Last time progress was saved
|
||||
});
|
||||
|
||||
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { db, events } from "./db";
|
||||
import { eq, and, gt, lt } from "drizzle-orm";
|
||||
import { eq, and, gt, lt, inArray } from "drizzle-orm";
|
||||
|
||||
/**
|
||||
* Publishes an event to a specific channel for a user
|
||||
@@ -10,21 +10,58 @@ export async function publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload,
|
||||
deduplicationKey,
|
||||
}: {
|
||||
userId: string;
|
||||
channel: string;
|
||||
payload: any;
|
||||
deduplicationKey?: string; // Optional key to prevent duplicate events
|
||||
}): Promise<string> {
|
||||
try {
|
||||
const eventId = uuidv4();
|
||||
console.log(`Publishing event to channel ${channel} for user ${userId}`);
|
||||
|
||||
// Check for duplicate events if deduplication key is provided
|
||||
if (deduplicationKey) {
|
||||
const existingEvent = await db
|
||||
.select()
|
||||
.from(events)
|
||||
.where(
|
||||
and(
|
||||
eq(events.userId, userId),
|
||||
eq(events.channel, channel),
|
||||
eq(events.read, false)
|
||||
)
|
||||
)
|
||||
.limit(10); // Check recent unread events
|
||||
|
||||
// Check if any existing event has the same deduplication key in payload
|
||||
const isDuplicate = existingEvent.some(event => {
|
||||
try {
|
||||
const eventPayload = JSON.parse(event.payload as string);
|
||||
return eventPayload.deduplicationKey === deduplicationKey;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
if (isDuplicate) {
|
||||
console.log(`Skipping duplicate event with key: ${deduplicationKey}`);
|
||||
return eventId; // Return a valid ID but don't create the event
|
||||
}
|
||||
}
|
||||
|
||||
// Add deduplication key to payload if provided
|
||||
const eventPayload = deduplicationKey
|
||||
? { ...payload, deduplicationKey }
|
||||
: payload;
|
||||
|
||||
// Insert the event into the SQLite database
|
||||
await db.insert(events).values({
|
||||
id: eventId,
|
||||
userId,
|
||||
channel,
|
||||
payload: JSON.stringify(payload),
|
||||
payload: JSON.stringify(eventPayload),
|
||||
createdAt: new Date(),
|
||||
});
|
||||
|
||||
@@ -50,36 +87,27 @@ export async function getNewEvents({
|
||||
lastEventTime?: Date;
|
||||
}): Promise<any[]> {
|
||||
try {
|
||||
console.log(`Getting new events for user ${userId} in channel ${channel}`);
|
||||
if (lastEventTime) {
|
||||
console.log(`Looking for events after ${lastEventTime.toISOString()}`);
|
||||
}
|
||||
|
||||
// Build the query
|
||||
let query = db
|
||||
.select()
|
||||
.from(events)
|
||||
.where(
|
||||
and(
|
||||
eq(events.userId, userId),
|
||||
eq(events.channel, channel),
|
||||
eq(events.read, false)
|
||||
)
|
||||
)
|
||||
.orderBy(events.createdAt);
|
||||
// Build the query conditions
|
||||
const conditions = [
|
||||
eq(events.userId, userId),
|
||||
eq(events.channel, channel),
|
||||
eq(events.read, false)
|
||||
];
|
||||
|
||||
// Add time filter if provided
|
||||
if (lastEventTime) {
|
||||
query = query.where(gt(events.createdAt, lastEventTime));
|
||||
conditions.push(gt(events.createdAt, lastEventTime));
|
||||
}
|
||||
|
||||
// Execute the query
|
||||
const newEvents = await query;
|
||||
console.log(`Found ${newEvents.length} new events`);
|
||||
const newEvents = await db
|
||||
.select()
|
||||
.from(events)
|
||||
.where(and(...conditions))
|
||||
.orderBy(events.createdAt);
|
||||
|
||||
// Mark events as read
|
||||
if (newEvents.length > 0) {
|
||||
console.log(`Marking ${newEvents.length} events as read`);
|
||||
await db
|
||||
.update(events)
|
||||
.set({ read: true })
|
||||
@@ -103,9 +131,78 @@ export async function getNewEvents({
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes duplicate events based on deduplication keys
|
||||
* This can be called periodically to clean up any duplicates that may have slipped through
|
||||
*/
|
||||
export async function removeDuplicateEvents(userId?: string): Promise<{ duplicatesRemoved: number }> {
|
||||
try {
|
||||
console.log("Removing duplicate events...");
|
||||
|
||||
// Build the base query
|
||||
const allEvents = userId
|
||||
? await db.select().from(events).where(eq(events.userId, userId))
|
||||
: await db.select().from(events);
|
||||
|
||||
const duplicateIds: string[] = [];
|
||||
|
||||
// Group events by user and channel, then check for duplicates
|
||||
const eventsByUserChannel = new Map<string, typeof allEvents>();
|
||||
|
||||
for (const event of allEvents) {
|
||||
const key = `${event.userId}-${event.channel}`;
|
||||
if (!eventsByUserChannel.has(key)) {
|
||||
eventsByUserChannel.set(key, []);
|
||||
}
|
||||
eventsByUserChannel.get(key)!.push(event);
|
||||
}
|
||||
|
||||
// Check each group for duplicates
|
||||
for (const [, events] of eventsByUserChannel) {
|
||||
const channelSeenKeys = new Set<string>();
|
||||
|
||||
// Sort by creation time (keep the earliest)
|
||||
events.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
|
||||
|
||||
for (const event of events) {
|
||||
try {
|
||||
const payload = JSON.parse(event.payload as string);
|
||||
if (payload.deduplicationKey) {
|
||||
if (channelSeenKeys.has(payload.deduplicationKey)) {
|
||||
duplicateIds.push(event.id);
|
||||
} else {
|
||||
channelSeenKeys.add(payload.deduplicationKey);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Skip events with invalid JSON
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove duplicates
|
||||
if (duplicateIds.length > 0) {
|
||||
console.log(`Removing ${duplicateIds.length} duplicate events`);
|
||||
|
||||
// Delete in batches to avoid query size limits
|
||||
const batchSize = 100;
|
||||
for (let i = 0; i < duplicateIds.length; i += batchSize) {
|
||||
const batch = duplicateIds.slice(i, i + batchSize);
|
||||
await db.delete(events).where(inArray(events.id, batch));
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Removed ${duplicateIds.length} duplicate events`);
|
||||
return { duplicatesRemoved: duplicateIds.length };
|
||||
} catch (error) {
|
||||
console.error("Error removing duplicate events:", error);
|
||||
return { duplicatesRemoved: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up old events to prevent the database from growing too large
|
||||
* Should be called periodically (e.g., daily via a cron job)
|
||||
* This function is used by the cleanup button in the Activity Log page
|
||||
*
|
||||
* @param maxAgeInDays Number of days to keep events (default: 7)
|
||||
* @param cleanupUnreadAfterDays Number of days after which to clean up unread events (default: 2x maxAgeInDays)
|
||||
@@ -132,7 +229,7 @@ export async function cleanupOldEvents(
|
||||
)
|
||||
);
|
||||
|
||||
const readEventsDeleted = readResult.changes || 0;
|
||||
const readEventsDeleted = (readResult as any).changes || 0;
|
||||
console.log(`Deleted ${readEventsDeleted} read events`);
|
||||
|
||||
// Calculate the cutoff date for unread events (default to 2x the retention period)
|
||||
@@ -150,7 +247,7 @@ export async function cleanupOldEvents(
|
||||
)
|
||||
);
|
||||
|
||||
const unreadEventsDeleted = unreadResult.changes || 0;
|
||||
const unreadEventsDeleted = (unreadResult as any).changes || 0;
|
||||
console.log(`Deleted ${unreadEventsDeleted} unread events`);
|
||||
|
||||
return { readEventsDeleted, unreadEventsDeleted };
|
||||
|
||||
207
src/lib/gitea.test.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import { getOrCreateGiteaOrg } from "./gitea";
|
||||
|
||||
// Mock the isRepoPresentInGitea function
|
||||
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
|
||||
|
||||
// Mock the database module
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
update: () => ({
|
||||
set: () => ({
|
||||
where: () => Promise.resolve()
|
||||
})
|
||||
})
|
||||
},
|
||||
repositories: {},
|
||||
organizations: {}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the helpers module
|
||||
mock.module("@/lib/helpers", () => {
|
||||
return {
|
||||
createMirrorJob: mock(() => Promise.resolve("job-id"))
|
||||
};
|
||||
});
|
||||
|
||||
// Mock http-client
|
||||
mock.module("@/lib/http-client", () => {
|
||||
return {
|
||||
httpPost: mock(() => Promise.resolve({ data: { id: 123 }, status: 200, statusText: 'OK', headers: new Headers() })),
|
||||
httpGet: mock(() => Promise.resolve({ data: [], status: 200, statusText: 'OK', headers: new Headers() })),
|
||||
HttpError: class MockHttpError extends Error {
|
||||
constructor(message: string, public status: number, public statusText: string, public response?: string) {
|
||||
super(message);
|
||||
this.name = 'HttpError';
|
||||
}
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the gitea module itself
|
||||
mock.module("./gitea", () => {
|
||||
return {
|
||||
isRepoPresentInGitea: mockIsRepoPresentInGitea,
|
||||
mirrorGithubRepoToGitea: mock(async () => {}),
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
|
||||
};
|
||||
});
|
||||
|
||||
describe("Gitea Repository Mirroring", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("mirrorGithubRepoToGitea handles private repositories correctly", async () => {
|
||||
// Import the mocked function
|
||||
const { mirrorGithubRepoToGitea } = await import("./gitea");
|
||||
|
||||
// Create mock Octokit instance
|
||||
const octokit = {} as Octokit;
|
||||
|
||||
// Create mock repository (private)
|
||||
const repository = {
|
||||
id: "repo-id",
|
||||
name: "test-repo",
|
||||
fullName: "testuser/test-repo",
|
||||
url: "https://github.com/testuser/test-repo",
|
||||
cloneUrl: "https://github.com/testuser/test-repo.git",
|
||||
owner: "testuser",
|
||||
isPrivate: true,
|
||||
status: repoStatusEnum.parse("imported")
|
||||
};
|
||||
|
||||
// Create mock config
|
||||
const config = {
|
||||
id: "config-id",
|
||||
userId: "user-id",
|
||||
githubConfig: {
|
||||
token: "github-token",
|
||||
mirrorIssues: false
|
||||
},
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "gitea-token",
|
||||
username: "giteauser"
|
||||
}
|
||||
};
|
||||
|
||||
// Call the function
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: repository as any,
|
||||
config
|
||||
});
|
||||
|
||||
// Check that the function was called
|
||||
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("getOrCreateGiteaOrg handles JSON parsing errors gracefully", async () => {
|
||||
// Mock fetch to return invalid JSON
|
||||
const originalFetch = global.fetch;
|
||||
global.fetch = mock(async (url: string) => {
|
||||
if (url.includes("/api/v1/orgs/")) {
|
||||
// Mock response that looks successful but has invalid JSON
|
||||
return {
|
||||
ok: true,
|
||||
status: 200,
|
||||
headers: {
|
||||
get: (name: string) => name === "content-type" ? "application/json" : null
|
||||
},
|
||||
json: () => Promise.reject(new Error("Unexpected token in JSON")),
|
||||
text: () => Promise.resolve("Invalid JSON response"),
|
||||
clone: function() {
|
||||
return {
|
||||
text: () => Promise.resolve("Invalid JSON response")
|
||||
};
|
||||
}
|
||||
} as any;
|
||||
}
|
||||
return originalFetch(url);
|
||||
});
|
||||
|
||||
const config = {
|
||||
userId: "user-id",
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "gitea-token"
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
await getOrCreateGiteaOrg({
|
||||
orgName: "test-org",
|
||||
config
|
||||
});
|
||||
// Should not reach here
|
||||
expect(true).toBe(false);
|
||||
} catch (error) {
|
||||
// Should catch the JSON parsing error with a descriptive message
|
||||
expect(error).toBeInstanceOf(Error);
|
||||
expect((error as Error).message).toContain("Failed to parse JSON response from Gitea API");
|
||||
} finally {
|
||||
// Restore original fetch
|
||||
global.fetch = originalFetch;
|
||||
}
|
||||
});
|
||||
|
||||
test("getOrCreateGiteaOrg handles non-JSON content-type gracefully", async () => {
|
||||
// Mock fetch to return HTML instead of JSON
|
||||
const originalFetch = global.fetch;
|
||||
global.fetch = mock(async (url: string) => {
|
||||
if (url.includes("/api/v1/orgs/")) {
|
||||
return {
|
||||
ok: true,
|
||||
status: 200,
|
||||
headers: {
|
||||
get: (name: string) => name === "content-type" ? "text/html" : null
|
||||
},
|
||||
text: () => Promise.resolve("<html><body>Error page</body></html>")
|
||||
} as any;
|
||||
}
|
||||
return originalFetch(url);
|
||||
});
|
||||
|
||||
const config = {
|
||||
userId: "user-id",
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "gitea-token"
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
await getOrCreateGiteaOrg({
|
||||
orgName: "test-org",
|
||||
config
|
||||
});
|
||||
// Should not reach here
|
||||
expect(true).toBe(false);
|
||||
} catch (error) {
|
||||
// Should catch the content-type error
|
||||
expect(error).toBeInstanceOf(Error);
|
||||
expect((error as Error).message).toContain("Invalid response format from Gitea API");
|
||||
expect((error as Error).message).toContain("text/html");
|
||||
} finally {
|
||||
// Restore original fetch
|
||||
global.fetch = originalFetch;
|
||||
}
|
||||
});
|
||||
});
|
||||
406
src/lib/gitea.ts
@@ -6,7 +6,7 @@ import {
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import type { Config } from "@/types/config";
|
||||
import type { Organization, Repository } from "./db/schema";
|
||||
import superagent from "superagent";
|
||||
import { httpPost, httpGet } from "./http-client";
|
||||
import { createMirrorJob } from "./helpers";
|
||||
import { db, organizations, repositories } from "./db";
|
||||
import { eq } from "drizzle-orm";
|
||||
@@ -181,19 +181,17 @@ export const mirrorGithubRepoToGitea = async ({
|
||||
|
||||
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/migrate`;
|
||||
|
||||
const response = await superagent
|
||||
.post(apiUrl)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.set("Content-Type", "application/json")
|
||||
.send({
|
||||
clone_addr: cloneAddress,
|
||||
repo_name: repository.name,
|
||||
mirror: true,
|
||||
private: repository.isPrivate,
|
||||
repo_owner: config.giteaConfig.username,
|
||||
description: "",
|
||||
service: "git",
|
||||
});
|
||||
const response = await httpPost(apiUrl, {
|
||||
clone_addr: cloneAddress,
|
||||
repo_name: repository.name,
|
||||
mirror: true,
|
||||
private: repository.isPrivate,
|
||||
repo_owner: config.giteaConfig.username,
|
||||
description: "",
|
||||
service: "git",
|
||||
}, {
|
||||
"Authorization": `token ${config.giteaConfig.token}`,
|
||||
});
|
||||
|
||||
// clone issues
|
||||
if (config.githubConfig.mirrorIssues) {
|
||||
@@ -229,7 +227,7 @@ export const mirrorGithubRepoToGitea = async ({
|
||||
status: "mirrored",
|
||||
});
|
||||
|
||||
return response.body;
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error while mirroring repository ${repository.name}: ${
|
||||
@@ -283,6 +281,8 @@ export async function getOrCreateGiteaOrg({
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(`Attempting to get or create Gitea organization: ${orgName}`);
|
||||
|
||||
const orgRes = await fetch(
|
||||
`${config.giteaConfig.url}/api/v1/orgs/${orgName}`,
|
||||
{
|
||||
@@ -293,20 +293,36 @@ export async function getOrCreateGiteaOrg({
|
||||
}
|
||||
);
|
||||
|
||||
if (orgRes.ok) {
|
||||
const org = await orgRes.json();
|
||||
console.log(`Get org response status: ${orgRes.status} for org: ${orgName}`);
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
organizationId: org.id,
|
||||
organizationName: orgName,
|
||||
status: "imported",
|
||||
message: `Organization ${orgName} fetched successfully`,
|
||||
details: `Organization ${orgName} was fetched from GitHub`,
|
||||
});
|
||||
return org.id;
|
||||
if (orgRes.ok) {
|
||||
// Check if response is actually JSON
|
||||
const contentType = orgRes.headers.get("content-type");
|
||||
if (!contentType || !contentType.includes("application/json")) {
|
||||
console.warn(`Expected JSON response but got content-type: ${contentType}`);
|
||||
const responseText = await orgRes.text();
|
||||
console.warn(`Response body: ${responseText}`);
|
||||
throw new Error(`Invalid response format from Gitea API. Expected JSON but got: ${contentType}`);
|
||||
}
|
||||
|
||||
// Clone the response to handle potential JSON parsing errors
|
||||
const orgResClone = orgRes.clone();
|
||||
|
||||
try {
|
||||
const org = await orgRes.json();
|
||||
console.log(`Successfully retrieved existing org: ${orgName} with ID: ${org.id}`);
|
||||
// Note: Organization events are handled by the main mirroring process
|
||||
// to avoid duplicate events
|
||||
return org.id;
|
||||
} catch (jsonError) {
|
||||
const responseText = await orgResClone.text();
|
||||
console.error(`Failed to parse JSON response for existing org: ${responseText}`);
|
||||
throw new Error(`Failed to parse JSON response from Gitea API: ${jsonError instanceof Error ? jsonError.message : String(jsonError)}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Organization ${orgName} not found, attempting to create it`);
|
||||
|
||||
const createRes = await fetch(`${config.giteaConfig.url}/api/v1/orgs`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
@@ -321,26 +337,46 @@ export async function getOrCreateGiteaOrg({
|
||||
}),
|
||||
});
|
||||
|
||||
console.log(`Create org response status: ${createRes.status} for org: ${orgName}`);
|
||||
|
||||
if (!createRes.ok) {
|
||||
throw new Error(`Failed to create Gitea org: ${await createRes.text()}`);
|
||||
const errorText = await createRes.text();
|
||||
console.error(`Failed to create org ${orgName}. Status: ${createRes.status}, Response: ${errorText}`);
|
||||
throw new Error(`Failed to create Gitea org: ${errorText}`);
|
||||
}
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
organizationName: orgName,
|
||||
status: "imported",
|
||||
message: `Organization ${orgName} created successfully`,
|
||||
details: `Organization ${orgName} was created in Gitea`,
|
||||
});
|
||||
// Check if response is actually JSON
|
||||
const createContentType = createRes.headers.get("content-type");
|
||||
if (!createContentType || !createContentType.includes("application/json")) {
|
||||
console.warn(`Expected JSON response but got content-type: ${createContentType}`);
|
||||
const responseText = await createRes.text();
|
||||
console.warn(`Response body: ${responseText}`);
|
||||
throw new Error(`Invalid response format from Gitea API. Expected JSON but got: ${createContentType}`);
|
||||
}
|
||||
|
||||
const newOrg = await createRes.json();
|
||||
return newOrg.id;
|
||||
// Note: Organization creation events are handled by the main mirroring process
|
||||
// to avoid duplicate events
|
||||
|
||||
// Clone the response to handle potential JSON parsing errors
|
||||
const createResClone = createRes.clone();
|
||||
|
||||
try {
|
||||
const newOrg = await createRes.json();
|
||||
console.log(`Successfully created new org: ${orgName} with ID: ${newOrg.id}`);
|
||||
return newOrg.id;
|
||||
} catch (jsonError) {
|
||||
const responseText = await createResClone.text();
|
||||
console.error(`Failed to parse JSON response for new org: ${responseText}`);
|
||||
throw new Error(`Failed to parse JSON response from Gitea API: ${jsonError instanceof Error ? jsonError.message : String(jsonError)}`);
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "Unknown error occurred in getOrCreateGiteaOrg.";
|
||||
|
||||
console.error(`Error in getOrCreateGiteaOrg for ${orgName}: ${errorMessage}`);
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
organizationId: orgId,
|
||||
@@ -417,29 +453,20 @@ export async function mirrorGitHubRepoToGiteaOrg({
|
||||
})
|
||||
.where(eq(repositories.id, repository.id!));
|
||||
|
||||
// Append log for "mirroring" status
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Started mirroring repository: ${repository.name}`,
|
||||
details: `Repository ${repository.name} is now in the mirroring state.`,
|
||||
status: "mirroring",
|
||||
});
|
||||
// Note: "mirroring" status events are handled by the concurrency system
|
||||
// to avoid duplicate events during batch operations
|
||||
|
||||
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/migrate`;
|
||||
|
||||
const migrateRes = await superagent
|
||||
.post(apiUrl)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.set("Content-Type", "application/json")
|
||||
.send({
|
||||
clone_addr: cloneAddress,
|
||||
uid: giteaOrgId,
|
||||
repo_name: repository.name,
|
||||
mirror: true,
|
||||
private: repository.isPrivate,
|
||||
});
|
||||
const migrateRes = await httpPost(apiUrl, {
|
||||
clone_addr: cloneAddress,
|
||||
uid: giteaOrgId,
|
||||
repo_name: repository.name,
|
||||
mirror: true,
|
||||
private: repository.isPrivate,
|
||||
}, {
|
||||
"Authorization": `token ${config.giteaConfig.token}`,
|
||||
});
|
||||
|
||||
// Clone issues
|
||||
if (config.githubConfig?.mirrorIssues) {
|
||||
@@ -477,7 +504,7 @@ export async function mirrorGitHubRepoToGiteaOrg({
|
||||
status: "mirrored",
|
||||
});
|
||||
|
||||
return migrateRes.body;
|
||||
return migrateRes.data;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error while mirroring repository ${repository.name}: ${
|
||||
@@ -601,11 +628,22 @@ export async function mirrorGitHubOrgToGitea({
|
||||
.from(repositories)
|
||||
.where(eq(repositories.organization, organization.name));
|
||||
|
||||
for (const repo of orgRepos) {
|
||||
await mirrorGitHubRepoToGiteaOrg({
|
||||
octokit,
|
||||
config,
|
||||
repository: {
|
||||
if (orgRepos.length === 0) {
|
||||
console.log(`No repositories found for organization ${organization.name}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
||||
|
||||
// Import the processWithRetry function
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
// Process repositories in parallel with concurrency control
|
||||
await processWithRetry(
|
||||
orgRepos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repo.status as RepoStatus,
|
||||
visibility: repo.visibility as RepositoryVisibility,
|
||||
@@ -614,11 +652,37 @@ export async function mirrorGitHubOrgToGitea({
|
||||
organization: repo.organization ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for repository: ${repo.name} in organization ${organization.name}`);
|
||||
|
||||
// Mirror the repository
|
||||
await mirrorGitHubRepoToGiteaOrg({
|
||||
octokit,
|
||||
config,
|
||||
repository: repoData,
|
||||
giteaOrgId,
|
||||
orgName: organization.name,
|
||||
});
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 3, // Process 3 repositories at a time
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
if (result) {
|
||||
console.log(`Mirrored repository "${result.name}" in organization ${organization.name} (${completed}/${total}, ${percentComplete}%)`);
|
||||
}
|
||||
},
|
||||
giteaOrgId,
|
||||
orgName: organization.name,
|
||||
});
|
||||
}
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} in organization ${organization.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Organization ${organization.name} mirrored successfully`);
|
||||
|
||||
@@ -733,9 +797,9 @@ export const syncGiteaRepo = async ({
|
||||
// Use the actual owner where the repo was found
|
||||
const apiUrl = `${config.giteaConfig.url}/api/v1/repos/${actualOwner}/${repository.name}/mirror-sync`;
|
||||
|
||||
const response = await superagent
|
||||
.post(apiUrl)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`);
|
||||
const response = await httpPost(apiUrl, undefined, {
|
||||
"Authorization": `token ${config.giteaConfig.token}`,
|
||||
});
|
||||
|
||||
// Mark repo as "synced" in DB
|
||||
await db
|
||||
@@ -761,7 +825,7 @@ export const syncGiteaRepo = async ({
|
||||
|
||||
console.log(`Repository ${repository.name} synced successfully`);
|
||||
|
||||
return response.body;
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error while syncing repository ${repository.name}: ${
|
||||
@@ -837,78 +901,91 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
(res) => res.data
|
||||
);
|
||||
|
||||
console.log(`Mirroring ${issues.length} issues from ${repository.fullName}`);
|
||||
// Filter out pull requests
|
||||
const filteredIssues = issues.filter(issue => !(issue as any).pull_request);
|
||||
|
||||
console.log(`Mirroring ${filteredIssues.length} issues from ${repository.fullName}`);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
console.log(`No issues to mirror for ${repository.fullName}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing labels from Gitea
|
||||
const giteaLabelsRes = await superagent
|
||||
.get(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`);
|
||||
const giteaLabelsRes = await httpGet(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`,
|
||||
{
|
||||
"Authorization": `token ${config.giteaConfig.token}`,
|
||||
}
|
||||
);
|
||||
|
||||
const giteaLabels = giteaLabelsRes.body;
|
||||
const giteaLabels = giteaLabelsRes.data;
|
||||
const labelMap = new Map<string, number>(
|
||||
giteaLabels.map((label: any) => [label.name, label.id])
|
||||
);
|
||||
|
||||
for (const issue of issues) {
|
||||
if ((issue as any).pull_request) {
|
||||
continue;
|
||||
}
|
||||
// Import the processWithRetry function
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
const githubLabelNames =
|
||||
issue.labels
|
||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||
.filter((l): l is string => !!l) || [];
|
||||
// Process issues in parallel with concurrency control
|
||||
await processWithRetry(
|
||||
filteredIssues,
|
||||
async (issue) => {
|
||||
const githubLabelNames =
|
||||
issue.labels
|
||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||
.filter((l): l is string => !!l) || [];
|
||||
|
||||
const giteaLabelIds: number[] = [];
|
||||
const giteaLabelIds: number[] = [];
|
||||
|
||||
// Resolve or create labels in Gitea
|
||||
for (const name of githubLabelNames) {
|
||||
if (labelMap.has(name)) {
|
||||
giteaLabelIds.push(labelMap.get(name)!);
|
||||
} else {
|
||||
try {
|
||||
const created = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({ name, color: "#ededed" }); // Default color
|
||||
// Resolve or create labels in Gitea
|
||||
for (const name of githubLabelNames) {
|
||||
if (labelMap.has(name)) {
|
||||
giteaLabelIds.push(labelMap.get(name)!);
|
||||
} else {
|
||||
try {
|
||||
const created = await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`,
|
||||
{ name, color: "#ededed" }, // Default color
|
||||
{
|
||||
"Authorization": `token ${config.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
|
||||
labelMap.set(name, created.body.id);
|
||||
giteaLabelIds.push(created.body.id);
|
||||
} catch (labelErr) {
|
||||
console.error(
|
||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||
);
|
||||
labelMap.set(name, created.data.id);
|
||||
giteaLabelIds.push(created.data.id);
|
||||
} catch (labelErr) {
|
||||
console.error(
|
||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const originalAssignees =
|
||||
issue.assignees && issue.assignees.length > 0
|
||||
? `\n\nOriginally assigned to: ${issue.assignees
|
||||
.map((a) => `@${a.login}`)
|
||||
.join(", ")} on GitHub.`
|
||||
: "";
|
||||
const originalAssignees =
|
||||
issue.assignees && issue.assignees.length > 0
|
||||
? `\n\nOriginally assigned to: ${issue.assignees
|
||||
.map((a) => `@${a.login}`)
|
||||
.join(", ")} on GitHub.`
|
||||
: "";
|
||||
|
||||
const issuePayload: any = {
|
||||
title: issue.title,
|
||||
body: `Originally created by @${
|
||||
issue.user?.login
|
||||
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
||||
closed: issue.state === "closed",
|
||||
labels: giteaLabelIds,
|
||||
};
|
||||
const issuePayload: any = {
|
||||
title: issue.title,
|
||||
body: `Originally created by @${
|
||||
issue.user?.login
|
||||
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
||||
closed: issue.state === "closed",
|
||||
labels: giteaLabelIds,
|
||||
};
|
||||
|
||||
try {
|
||||
const createdIssue = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send(issuePayload);
|
||||
// Create the issue in Gitea
|
||||
const createdIssue = await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`,
|
||||
issuePayload,
|
||||
{
|
||||
"Authorization": `token ${config.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
|
||||
// Clone comments
|
||||
const comments = await octokit.paginate(
|
||||
@@ -922,41 +999,50 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
(res) => res.data
|
||||
);
|
||||
|
||||
for (const comment of comments) {
|
||||
try {
|
||||
await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({
|
||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||
});
|
||||
} catch (commentErr) {
|
||||
console.error(
|
||||
`Failed to copy comment to Gitea for issue "${issue.title}": ${
|
||||
commentErr instanceof Error
|
||||
? commentErr.message
|
||||
: String(commentErr)
|
||||
}`
|
||||
);
|
||||
}
|
||||
// Process comments in parallel with concurrency control
|
||||
if (comments.length > 0) {
|
||||
await processWithRetry(
|
||||
comments,
|
||||
async (comment) => {
|
||||
await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.data.number}/comments`,
|
||||
{
|
||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||
},
|
||||
{
|
||||
"Authorization": `token ${config.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
return comment;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 5,
|
||||
maxRetries: 2,
|
||||
retryDelay: 1000,
|
||||
onRetry: (_comment, error, attempt) => {
|
||||
console.log(`Retrying comment (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && (err as any).response) {
|
||||
console.error(
|
||||
`Failed to create issue "${issue.title}" in Gitea: ${err.message}`
|
||||
);
|
||||
console.error(
|
||||
`Response body: ${JSON.stringify((err as any).response.body)}`
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
`Failed to create issue "${issue.title}" in Gitea: ${
|
||||
err instanceof Error ? err.message : String(err)
|
||||
}`
|
||||
);
|
||||
|
||||
return issue;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 3, // Process 3 issues at a time
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
if (result) {
|
||||
console.log(`Mirrored issue "${result.title}" (${completed}/${total}, ${percentComplete}%)`);
|
||||
}
|
||||
},
|
||||
onRetry: (issue, error, attempt) => {
|
||||
console.log(`Retrying issue "${issue.title}" (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Completed mirroring ${filteredIssues.length} issues for ${repository.fullName}`);
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { RepoStatus } from "@/types/Repository";
|
||||
import { db, mirrorJobs } from "./db";
|
||||
import { eq, and, or, lt, isNull } from "drizzle-orm";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { publishEvent } from "./events";
|
||||
|
||||
@@ -12,6 +13,12 @@ export async function createMirrorJob({
|
||||
message,
|
||||
status,
|
||||
details,
|
||||
jobType,
|
||||
batchId,
|
||||
totalItems,
|
||||
itemIds,
|
||||
inProgress,
|
||||
skipDuplicateEvent,
|
||||
}: {
|
||||
userId: string;
|
||||
organizationId?: string;
|
||||
@@ -21,6 +28,12 @@ export async function createMirrorJob({
|
||||
details?: string;
|
||||
message: string;
|
||||
status: RepoStatus;
|
||||
jobType?: "mirror" | "sync" | "retry";
|
||||
batchId?: string;
|
||||
totalItems?: number;
|
||||
itemIds?: string[];
|
||||
inProgress?: boolean;
|
||||
skipDuplicateEvent?: boolean; // Option to skip event publishing for internal operations
|
||||
}) {
|
||||
const jobId = uuidv4();
|
||||
const currentTimestamp = new Date();
|
||||
@@ -32,24 +45,49 @@ export async function createMirrorJob({
|
||||
repositoryName,
|
||||
organizationId,
|
||||
organizationName,
|
||||
configId: uuidv4(),
|
||||
details,
|
||||
message: message,
|
||||
status: status,
|
||||
timestamp: currentTimestamp,
|
||||
|
||||
// New resilience fields
|
||||
jobType: jobType || "mirror",
|
||||
batchId: batchId || undefined,
|
||||
totalItems: totalItems || undefined,
|
||||
completedItems: 0,
|
||||
itemIds: itemIds || undefined,
|
||||
completedItemIds: [],
|
||||
inProgress: inProgress !== undefined ? inProgress : false,
|
||||
startedAt: inProgress ? currentTimestamp : undefined,
|
||||
completedAt: undefined,
|
||||
lastCheckpoint: undefined,
|
||||
};
|
||||
|
||||
try {
|
||||
// Insert the job into the database
|
||||
await db.insert(mirrorJobs).values(job);
|
||||
|
||||
// Publish the event using SQLite instead of Redis
|
||||
const channel = `mirror-status:${userId}`;
|
||||
await publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload: job
|
||||
});
|
||||
// Publish the event using SQLite instead of Redis (unless skipped)
|
||||
if (!skipDuplicateEvent) {
|
||||
const channel = `mirror-status:${userId}`;
|
||||
|
||||
// Create deduplication key based on the operation
|
||||
let deduplicationKey: string | undefined;
|
||||
if (repositoryId && status) {
|
||||
deduplicationKey = `repo-${repositoryId}-${status}`;
|
||||
} else if (organizationId && status) {
|
||||
deduplicationKey = `org-${organizationId}-${status}`;
|
||||
} else if (batchId) {
|
||||
deduplicationKey = `batch-${batchId}-${status}`;
|
||||
}
|
||||
|
||||
await publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload: job,
|
||||
deduplicationKey
|
||||
});
|
||||
}
|
||||
|
||||
return jobId;
|
||||
} catch (error) {
|
||||
@@ -57,3 +95,217 @@ export async function createMirrorJob({
|
||||
throw new Error("Error creating mirror job");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the progress of a mirror job
|
||||
*/
|
||||
export async function updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
status,
|
||||
message,
|
||||
details,
|
||||
inProgress,
|
||||
isCompleted,
|
||||
}: {
|
||||
jobId: string;
|
||||
completedItemId?: string;
|
||||
status?: RepoStatus;
|
||||
message?: string;
|
||||
details?: string;
|
||||
inProgress?: boolean;
|
||||
isCompleted?: boolean;
|
||||
}) {
|
||||
try {
|
||||
// Get the current job
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.id, jobId));
|
||||
|
||||
if (!job) {
|
||||
throw new Error(`Mirror job with ID ${jobId} not found`);
|
||||
}
|
||||
|
||||
// Update the job with new progress
|
||||
const updates: Record<string, any> = {
|
||||
lastCheckpoint: new Date(),
|
||||
};
|
||||
|
||||
// Add completed item if provided
|
||||
if (completedItemId) {
|
||||
const completedItemIds = job.completedItemIds || [];
|
||||
if (!completedItemIds.includes(completedItemId)) {
|
||||
updates.completedItemIds = [...completedItemIds, completedItemId];
|
||||
updates.completedItems = (job.completedItems || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Update status if provided
|
||||
if (status) {
|
||||
updates.status = status;
|
||||
}
|
||||
|
||||
// Update message if provided
|
||||
if (message) {
|
||||
updates.message = message;
|
||||
}
|
||||
|
||||
// Update details if provided
|
||||
if (details) {
|
||||
updates.details = details;
|
||||
}
|
||||
|
||||
// Update in-progress status if provided
|
||||
if (inProgress !== undefined) {
|
||||
updates.inProgress = inProgress;
|
||||
}
|
||||
|
||||
// Mark as completed if specified
|
||||
if (isCompleted) {
|
||||
updates.inProgress = false;
|
||||
updates.completedAt = new Date();
|
||||
}
|
||||
|
||||
// Update the job in the database
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set(updates)
|
||||
.where(eq(mirrorJobs.id, jobId));
|
||||
|
||||
// Publish the event with deduplication
|
||||
const updatedJob = {
|
||||
...job,
|
||||
...updates,
|
||||
};
|
||||
|
||||
// Create deduplication key for progress updates
|
||||
let deduplicationKey: string | undefined;
|
||||
if (completedItemId) {
|
||||
deduplicationKey = `progress-${jobId}-${completedItemId}`;
|
||||
} else if (isCompleted) {
|
||||
deduplicationKey = `completed-${jobId}`;
|
||||
} else {
|
||||
deduplicationKey = `update-${jobId}-${Date.now()}`;
|
||||
}
|
||||
|
||||
await publishEvent({
|
||||
userId: job.userId,
|
||||
channel: `mirror-status:${job.userId}`,
|
||||
payload: updatedJob,
|
||||
deduplicationKey
|
||||
});
|
||||
|
||||
return updatedJob;
|
||||
} catch (error) {
|
||||
console.error("Error updating mirror job progress:", error);
|
||||
throw new Error("Error updating mirror job progress");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds interrupted jobs that need to be resumed with enhanced criteria
|
||||
*/
|
||||
export async function findInterruptedJobs() {
|
||||
try {
|
||||
// Find jobs that are marked as in-progress but haven't been updated recently
|
||||
const cutoffTime = new Date();
|
||||
cutoffTime.setMinutes(cutoffTime.getMinutes() - 10); // Consider jobs inactive after 10 minutes without updates
|
||||
|
||||
// Also check for jobs that have been running for too long (over 2 hours)
|
||||
const staleCutoffTime = new Date();
|
||||
staleCutoffTime.setHours(staleCutoffTime.getHours() - 2);
|
||||
|
||||
const interruptedJobs = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(
|
||||
and(
|
||||
eq(mirrorJobs.inProgress, true),
|
||||
or(
|
||||
// Jobs with no recent checkpoint
|
||||
or(isNull(mirrorJobs.lastCheckpoint), lt(mirrorJobs.lastCheckpoint, cutoffTime)),
|
||||
// Jobs that started too long ago (likely stale)
|
||||
lt(mirrorJobs.startedAt, staleCutoffTime)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// Log details about found jobs for debugging
|
||||
if (interruptedJobs.length > 0) {
|
||||
console.log(`Found ${interruptedJobs.length} interrupted jobs:`);
|
||||
interruptedJobs.forEach(job => {
|
||||
const lastCheckpoint = job.lastCheckpoint ? new Date(job.lastCheckpoint).toISOString() : 'never';
|
||||
const startedAt = job.startedAt ? new Date(job.startedAt).toISOString() : 'unknown';
|
||||
console.log(`- Job ${job.id}: ${job.jobType} (started: ${startedAt}, last checkpoint: ${lastCheckpoint})`);
|
||||
});
|
||||
}
|
||||
|
||||
return interruptedJobs;
|
||||
} catch (error) {
|
||||
console.error("Error finding interrupted jobs:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resumes an interrupted job
|
||||
*/
|
||||
export async function resumeInterruptedJob(job: any) {
|
||||
try {
|
||||
console.log(`Resuming interrupted job: ${job.id}`);
|
||||
|
||||
// Skip if job doesn't have the necessary data to resume
|
||||
if (!job.itemIds || !job.completedItemIds) {
|
||||
console.log(`Cannot resume job ${job.id}: missing item data`);
|
||||
|
||||
// Mark the job as failed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
status: "failed",
|
||||
message: "Job interrupted and could not be resumed",
|
||||
details: "The job was interrupted and did not have enough information to resume",
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Calculate remaining items
|
||||
const remainingItemIds = job.itemIds.filter(
|
||||
(id: string) => !job.completedItemIds.includes(id)
|
||||
);
|
||||
|
||||
if (remainingItemIds.length === 0) {
|
||||
console.log(`Job ${job.id} has no remaining items, marking as completed`);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
status: "mirrored",
|
||||
message: "Job completed after resuming",
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
message: `Resuming job with ${remainingItemIds.length} remaining items`,
|
||||
details: `Job was interrupted and is being resumed. ${job.completedItemIds.length} of ${job.itemIds.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
return {
|
||||
job,
|
||||
remainingItemIds,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error resuming job ${job.id}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
194
src/lib/http-client.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
/**
|
||||
* HTTP client utility functions using fetch() for consistent error handling
|
||||
*/
|
||||
|
||||
export interface HttpResponse<T = any> {
|
||||
data: T;
|
||||
status: number;
|
||||
statusText: string;
|
||||
headers: Headers;
|
||||
}
|
||||
|
||||
export class HttpError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public status: number,
|
||||
public statusText: string,
|
||||
public response?: string
|
||||
) {
|
||||
super(message);
|
||||
this.name = 'HttpError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhanced fetch with consistent error handling and JSON parsing
|
||||
*/
|
||||
export async function httpRequest<T = any>(
|
||||
url: string,
|
||||
options: RequestInit = {}
|
||||
): Promise<HttpResponse<T>> {
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
|
||||
// Clone response for error handling
|
||||
const responseClone = response.clone();
|
||||
|
||||
if (!response.ok) {
|
||||
let errorMessage = `HTTP ${response.status}: ${response.statusText}`;
|
||||
let responseText = '';
|
||||
|
||||
try {
|
||||
responseText = await responseClone.text();
|
||||
if (responseText) {
|
||||
errorMessage += ` - ${responseText}`;
|
||||
}
|
||||
} catch {
|
||||
// Ignore text parsing errors
|
||||
}
|
||||
|
||||
throw new HttpError(
|
||||
errorMessage,
|
||||
response.status,
|
||||
response.statusText,
|
||||
responseText
|
||||
);
|
||||
}
|
||||
|
||||
// Check content type for JSON responses
|
||||
const contentType = response.headers.get('content-type');
|
||||
let data: T;
|
||||
|
||||
if (contentType && contentType.includes('application/json')) {
|
||||
try {
|
||||
data = await response.json();
|
||||
} catch (jsonError) {
|
||||
const responseText = await responseClone.text();
|
||||
console.error(`Failed to parse JSON response: ${responseText}`);
|
||||
throw new HttpError(
|
||||
`Failed to parse JSON response: ${jsonError instanceof Error ? jsonError.message : String(jsonError)}`,
|
||||
response.status,
|
||||
response.statusText,
|
||||
responseText
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// For non-JSON responses, return text as data
|
||||
data = (await response.text()) as unknown as T;
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: response.headers,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error instanceof HttpError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Handle network errors, etc.
|
||||
throw new HttpError(
|
||||
`Network error: ${error instanceof Error ? error.message : String(error)}`,
|
||||
0,
|
||||
'Network Error'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET request
|
||||
*/
|
||||
export async function httpGet<T = any>(
|
||||
url: string,
|
||||
headers?: Record<string, string>
|
||||
): Promise<HttpResponse<T>> {
|
||||
return httpRequest<T>(url, {
|
||||
method: 'GET',
|
||||
headers,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* POST request
|
||||
*/
|
||||
export async function httpPost<T = any>(
|
||||
url: string,
|
||||
body?: any,
|
||||
headers?: Record<string, string>
|
||||
): Promise<HttpResponse<T>> {
|
||||
return httpRequest<T>(url, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT request
|
||||
*/
|
||||
export async function httpPut<T = any>(
|
||||
url: string,
|
||||
body?: any,
|
||||
headers?: Record<string, string>
|
||||
): Promise<HttpResponse<T>> {
|
||||
return httpRequest<T>(url, {
|
||||
method: 'PUT',
|
||||
headers,
|
||||
body: body ? JSON.stringify(body) : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE request
|
||||
*/
|
||||
export async function httpDelete<T = any>(
|
||||
url: string,
|
||||
headers?: Record<string, string>
|
||||
): Promise<HttpResponse<T>> {
|
||||
return httpRequest<T>(url, {
|
||||
method: 'DELETE',
|
||||
headers,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Gitea-specific HTTP client with authentication
|
||||
*/
|
||||
export class GiteaHttpClient {
|
||||
constructor(
|
||||
private baseUrl: string,
|
||||
private token: string
|
||||
) {}
|
||||
|
||||
private getHeaders(additionalHeaders?: Record<string, string>): Record<string, string> {
|
||||
return {
|
||||
'Authorization': `token ${this.token}`,
|
||||
'Content-Type': 'application/json',
|
||||
...additionalHeaders,
|
||||
};
|
||||
}
|
||||
|
||||
async get<T = any>(endpoint: string): Promise<HttpResponse<T>> {
|
||||
return httpGet<T>(`${this.baseUrl}${endpoint}`, this.getHeaders());
|
||||
}
|
||||
|
||||
async post<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
|
||||
return httpPost<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
|
||||
}
|
||||
|
||||
async put<T = any>(endpoint: string, body?: any): Promise<HttpResponse<T>> {
|
||||
return httpPut<T>(`${this.baseUrl}${endpoint}`, body, this.getHeaders());
|
||||
}
|
||||
|
||||
async delete<T = any>(endpoint: string): Promise<HttpResponse<T>> {
|
||||
return httpDelete<T>(`${this.baseUrl}${endpoint}`, this.getHeaders());
|
||||
}
|
||||
}
|
||||
504
src/lib/recovery.ts
Normal file
@@ -0,0 +1,504 @@
|
||||
/**
|
||||
* Recovery mechanism for interrupted jobs
|
||||
* This module handles detecting and resuming jobs that were interrupted by container restarts
|
||||
*/
|
||||
|
||||
import { findInterruptedJobs, resumeInterruptedJob } from './helpers';
|
||||
import { db, repositories, organizations, mirrorJobs } from './db';
|
||||
import { eq, and, lt } from 'drizzle-orm';
|
||||
import { mirrorGithubRepoToGitea, mirrorGitHubOrgRepoToGiteaOrg, syncGiteaRepo } from './gitea';
|
||||
import { createGitHubClient } from './github';
|
||||
import { processWithResilience } from './utils/concurrency';
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from '@/types/Repository';
|
||||
import type { Repository } from './db/schema';
|
||||
|
||||
// Recovery state tracking
|
||||
let recoveryInProgress = false;
|
||||
let lastRecoveryAttempt: Date | null = null;
|
||||
|
||||
/**
|
||||
* Validates database connection before attempting recovery
|
||||
*/
|
||||
async function validateDatabaseConnection(): Promise<boolean> {
|
||||
try {
|
||||
// Simple query to test database connectivity
|
||||
await db.select().from(mirrorJobs).limit(1);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('Database connection validation failed:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up stale jobs that are too old to recover
|
||||
*/
|
||||
async function cleanupStaleJobs(): Promise<void> {
|
||||
try {
|
||||
const staleThreshold = new Date();
|
||||
staleThreshold.setHours(staleThreshold.getHours() - 24); // Jobs older than 24 hours
|
||||
|
||||
const staleJobs = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(
|
||||
and(
|
||||
eq(mirrorJobs.inProgress, true),
|
||||
lt(mirrorJobs.startedAt, staleThreshold)
|
||||
)
|
||||
);
|
||||
|
||||
if (staleJobs.length > 0) {
|
||||
console.log(`Found ${staleJobs.length} stale jobs to clean up`);
|
||||
|
||||
// Mark stale jobs as failed
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "failed",
|
||||
message: "Job marked as failed due to being stale (older than 24 hours)"
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(mirrorJobs.inProgress, true),
|
||||
lt(mirrorJobs.startedAt, staleThreshold)
|
||||
)
|
||||
);
|
||||
|
||||
console.log(`Cleaned up ${staleJobs.length} stale jobs`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error cleaning up stale jobs:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the recovery system with enhanced error handling and resilience
|
||||
* This should be called when the application starts
|
||||
*/
|
||||
export async function initializeRecovery(options: {
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
skipIfRecentAttempt?: boolean;
|
||||
} = {}): Promise<boolean> {
|
||||
const { maxRetries = 3, retryDelay = 5000, skipIfRecentAttempt = true } = options;
|
||||
|
||||
// Prevent concurrent recovery attempts
|
||||
if (recoveryInProgress) {
|
||||
console.log('Recovery already in progress, skipping...');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Skip if recent attempt (within last 5 minutes) unless forced
|
||||
if (skipIfRecentAttempt && lastRecoveryAttempt) {
|
||||
const timeSinceLastAttempt = Date.now() - lastRecoveryAttempt.getTime();
|
||||
if (timeSinceLastAttempt < 5 * 60 * 1000) {
|
||||
console.log('Recent recovery attempt detected, skipping...');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
recoveryInProgress = true;
|
||||
lastRecoveryAttempt = new Date();
|
||||
|
||||
console.log('Initializing recovery system...');
|
||||
|
||||
let attempt = 0;
|
||||
while (attempt < maxRetries) {
|
||||
try {
|
||||
attempt++;
|
||||
console.log(`Recovery attempt ${attempt}/${maxRetries}`);
|
||||
|
||||
// Validate database connection first
|
||||
const dbConnected = await validateDatabaseConnection();
|
||||
if (!dbConnected) {
|
||||
throw new Error('Database connection validation failed');
|
||||
}
|
||||
|
||||
// Clean up stale jobs first
|
||||
await cleanupStaleJobs();
|
||||
|
||||
// Find interrupted jobs
|
||||
const interruptedJobs = await findInterruptedJobs();
|
||||
|
||||
if (interruptedJobs.length === 0) {
|
||||
console.log('No interrupted jobs found.');
|
||||
recoveryInProgress = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
console.log(`Found ${interruptedJobs.length} interrupted jobs. Starting recovery...`);
|
||||
|
||||
// Process each interrupted job with individual error handling
|
||||
let successCount = 0;
|
||||
let failureCount = 0;
|
||||
|
||||
for (const job of interruptedJobs) {
|
||||
try {
|
||||
const resumeData = await resumeInterruptedJob(job);
|
||||
|
||||
if (!resumeData) {
|
||||
console.log(`Job ${job.id} could not be resumed.`);
|
||||
failureCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const { job: updatedJob, remainingItemIds } = resumeData;
|
||||
|
||||
// Handle different job types
|
||||
switch (updatedJob.jobType) {
|
||||
case 'mirror':
|
||||
await recoverMirrorJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
case 'sync':
|
||||
await recoverSyncJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
case 'retry':
|
||||
await recoverRetryJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
default:
|
||||
console.log(`Unknown job type: ${updatedJob.jobType}`);
|
||||
failureCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
successCount++;
|
||||
} catch (jobError) {
|
||||
console.error(`Error recovering individual job ${job.id}:`, jobError);
|
||||
failureCount++;
|
||||
|
||||
// Mark the job as failed if recovery fails
|
||||
try {
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "failed",
|
||||
message: `Job recovery failed: ${jobError instanceof Error ? jobError.message : String(jobError)}`
|
||||
})
|
||||
.where(eq(mirrorJobs.id, job.id));
|
||||
} catch (updateError) {
|
||||
console.error(`Failed to mark job ${job.id} as failed:`, updateError);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Recovery process completed. Success: ${successCount}, Failures: ${failureCount}`);
|
||||
recoveryInProgress = false;
|
||||
return true;
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Recovery attempt ${attempt} failed:`, error);
|
||||
|
||||
if (attempt < maxRetries) {
|
||||
console.log(`Retrying in ${retryDelay}ms...`);
|
||||
await new Promise(resolve => setTimeout(resolve, retryDelay));
|
||||
} else {
|
||||
console.error('All recovery attempts failed');
|
||||
recoveryInProgress = false;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
recoveryInProgress = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a mirror job with enhanced error handling
|
||||
*/
|
||||
async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
|
||||
console.log(`Recovering mirror job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// Get the config for this user with better error handling
|
||||
const configs = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, job.userId))
|
||||
.limit(1);
|
||||
|
||||
if (configs.length === 0) {
|
||||
throw new Error(`No configuration found for user ${job.userId}`);
|
||||
}
|
||||
|
||||
const config = configs[0];
|
||||
if (!config.configId) {
|
||||
throw new Error(`Configuration missing configId for user ${job.userId}`);
|
||||
}
|
||||
|
||||
// Get repositories to process with validation
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.id, remainingItemIds));
|
||||
|
||||
if (repos.length === 0) {
|
||||
console.warn(`No repositories found for remaining item IDs: ${remainingItemIds.join(', ')}`);
|
||||
// Mark job as completed since there's nothing to process
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "mirrored",
|
||||
message: "Job completed - no repositories found to process"
|
||||
})
|
||||
.where(eq(mirrorJobs.id, job.id));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${repos.length} repositories to process for recovery`);
|
||||
|
||||
// Validate GitHub configuration before creating client
|
||||
if (!config.githubConfig?.token) {
|
||||
throw new Error('GitHub token not found in configuration');
|
||||
}
|
||||
|
||||
// Create GitHub client with error handling
|
||||
let octokit;
|
||||
try {
|
||||
octokit = createGitHubClient(config.githubConfig.token);
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to create GitHub client: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
|
||||
// Process repositories with resilience and reduced concurrency for recovery
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data with validation
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility || "public"),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Mirror the repository based on whether it's in an organization
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
octokit,
|
||||
orgName: repo.organization,
|
||||
repository: repoData,
|
||||
});
|
||||
} else {
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: repoData,
|
||||
config,
|
||||
});
|
||||
}
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: job.userId,
|
||||
jobType: 'mirror',
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
resumeFromJobId: job.id,
|
||||
concurrencyLimit: 2, // Reduced concurrency for recovery to be more stable
|
||||
maxRetries: 3, // Increased retries for recovery
|
||||
retryDelay: 3000, // Longer delay for recovery
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Successfully recovered mirror job ${job.id}`);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering mirror job ${job.id}:`, error);
|
||||
|
||||
// Mark the job as failed
|
||||
try {
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "failed",
|
||||
message: `Mirror job recovery failed: ${error instanceof Error ? error.message : String(error)}`
|
||||
})
|
||||
.where(eq(mirrorJobs.id, job.id));
|
||||
} catch (updateError) {
|
||||
console.error(`Failed to mark mirror job ${job.id} as failed:`, updateError);
|
||||
}
|
||||
|
||||
throw error; // Re-throw to be handled by the caller
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a sync job with enhanced error handling
|
||||
*/
|
||||
async function recoverSyncJob(job: any, remainingItemIds: string[]) {
|
||||
console.log(`Recovering sync job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// Get the config for this user with better error handling
|
||||
const configs = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, job.userId))
|
||||
.limit(1);
|
||||
|
||||
if (configs.length === 0) {
|
||||
throw new Error(`No configuration found for user ${job.userId}`);
|
||||
}
|
||||
|
||||
const config = configs[0];
|
||||
if (!config.configId) {
|
||||
throw new Error(`Configuration missing configId for user ${job.userId}`);
|
||||
}
|
||||
|
||||
// Get repositories to process with validation
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.id, remainingItemIds));
|
||||
|
||||
if (repos.length === 0) {
|
||||
console.warn(`No repositories found for remaining item IDs: ${remainingItemIds.join(', ')}`);
|
||||
// Mark job as completed since there's nothing to process
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "mirrored",
|
||||
message: "Job completed - no repositories found to process"
|
||||
})
|
||||
.where(eq(mirrorJobs.id, job.id));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${repos.length} repositories to process for sync recovery`);
|
||||
|
||||
// Process repositories with resilience and reduced concurrency for recovery
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data with validation
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status || "imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility || "public"),
|
||||
};
|
||||
|
||||
// Sync the repository
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: repoData,
|
||||
});
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: job.userId,
|
||||
jobType: 'sync',
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
resumeFromJobId: job.id,
|
||||
concurrencyLimit: 3, // Reduced concurrency for recovery
|
||||
maxRetries: 3, // Increased retries for recovery
|
||||
retryDelay: 3000, // Longer delay for recovery
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Successfully recovered sync job ${job.id}`);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering sync job ${job.id}:`, error);
|
||||
|
||||
// Mark the job as failed
|
||||
try {
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "failed",
|
||||
message: `Sync job recovery failed: ${error instanceof Error ? error.message : String(error)}`
|
||||
})
|
||||
.where(eq(mirrorJobs.id, job.id));
|
||||
} catch (updateError) {
|
||||
console.error(`Failed to mark sync job ${job.id} as failed:`, updateError);
|
||||
}
|
||||
|
||||
throw error; // Re-throw to be handled by the caller
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a retry job with enhanced error handling
|
||||
*/
|
||||
async function recoverRetryJob(job: any, remainingItemIds: string[]) {
|
||||
console.log(`Recovering retry job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// For now, retry jobs are treated similarly to mirror jobs
|
||||
// In the future, this could have specific retry logic
|
||||
await recoverMirrorJob(job, remainingItemIds);
|
||||
console.log(`Successfully recovered retry job ${job.id}`);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering retry job ${job.id}:`, error);
|
||||
|
||||
// Mark the job as failed
|
||||
try {
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "failed",
|
||||
message: `Retry job recovery failed: ${error instanceof Error ? error.message : String(error)}`
|
||||
})
|
||||
.where(eq(mirrorJobs.id, job.id));
|
||||
} catch (updateError) {
|
||||
console.error(`Failed to mark retry job ${job.id} as failed:`, updateError);
|
||||
}
|
||||
|
||||
throw error; // Re-throw to be handled by the caller
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recovery system status
|
||||
*/
|
||||
export function getRecoveryStatus() {
|
||||
return {
|
||||
inProgress: recoveryInProgress,
|
||||
lastAttempt: lastRecoveryAttempt,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Force recovery to run (bypassing recent attempt check)
|
||||
*/
|
||||
export async function forceRecovery(): Promise<boolean> {
|
||||
return initializeRecovery({ skipIfRecentAttempt: false });
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are any jobs that need recovery
|
||||
*/
|
||||
export async function hasJobsNeedingRecovery(): Promise<boolean> {
|
||||
try {
|
||||
const interruptedJobs = await findInterruptedJobs();
|
||||
return interruptedJobs.length > 0;
|
||||
} catch (error) {
|
||||
console.error('Error checking for jobs needing recovery:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
240
src/lib/shutdown-manager.ts
Normal file
@@ -0,0 +1,240 @@
|
||||
/**
|
||||
* Shutdown Manager for Graceful Application Termination
|
||||
*
|
||||
* This module provides centralized shutdown coordination for the gitea-mirror application.
|
||||
* It ensures that:
|
||||
* - In-progress jobs are properly saved to the database
|
||||
* - Database connections are closed cleanly
|
||||
* - Background services are stopped gracefully
|
||||
* - No data loss occurs during container restarts
|
||||
*/
|
||||
|
||||
import { db, mirrorJobs } from './db';
|
||||
import { eq, and } from 'drizzle-orm';
|
||||
import type { MirrorJob } from './db/schema';
|
||||
|
||||
// Shutdown state tracking
|
||||
let shutdownInProgress = false;
|
||||
let shutdownStartTime: Date | null = null;
|
||||
let shutdownCallbacks: Array<() => Promise<void>> = [];
|
||||
let activeJobs = new Set<string>();
|
||||
let shutdownTimeout: NodeJS.Timeout | null = null;
|
||||
|
||||
// Configuration
|
||||
const SHUTDOWN_TIMEOUT = 30000; // 30 seconds max shutdown time
|
||||
const JOB_SAVE_TIMEOUT = 10000; // 10 seconds to save job state
|
||||
|
||||
/**
|
||||
* Register a callback to be executed during shutdown
|
||||
*/
|
||||
export function registerShutdownCallback(callback: () => Promise<void>): void {
|
||||
shutdownCallbacks.push(callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register an active job that needs to be tracked during shutdown
|
||||
*/
|
||||
export function registerActiveJob(jobId: string): void {
|
||||
activeJobs.add(jobId);
|
||||
console.log(`Registered active job: ${jobId} (${activeJobs.size} total active jobs)`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister a job when it completes normally
|
||||
*/
|
||||
export function unregisterActiveJob(jobId: string): void {
|
||||
activeJobs.delete(jobId);
|
||||
console.log(`Unregistered job: ${jobId} (${activeJobs.size} remaining active jobs)`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if shutdown is currently in progress
|
||||
*/
|
||||
export function isShuttingDown(): boolean {
|
||||
return shutdownInProgress;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get shutdown status information
|
||||
*/
|
||||
export function getShutdownStatus() {
|
||||
return {
|
||||
inProgress: shutdownInProgress,
|
||||
startTime: shutdownStartTime,
|
||||
activeJobs: Array.from(activeJobs),
|
||||
registeredCallbacks: shutdownCallbacks.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the current state of an active job to the database
|
||||
*/
|
||||
async function saveJobState(jobId: string): Promise<void> {
|
||||
try {
|
||||
console.log(`Saving state for job ${jobId}...`);
|
||||
|
||||
// Update the job to mark it as interrupted but not failed
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
lastCheckpoint: new Date(),
|
||||
message: 'Job interrupted by application shutdown - will resume on restart',
|
||||
})
|
||||
.where(eq(mirrorJobs.id, jobId));
|
||||
|
||||
console.log(`✅ Saved state for job ${jobId}`);
|
||||
} catch (error) {
|
||||
console.error(`❌ Failed to save state for job ${jobId}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save all active jobs to the database
|
||||
*/
|
||||
async function saveAllActiveJobs(): Promise<void> {
|
||||
if (activeJobs.size === 0) {
|
||||
console.log('No active jobs to save');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Saving state for ${activeJobs.size} active jobs...`);
|
||||
|
||||
const savePromises = Array.from(activeJobs).map(async (jobId) => {
|
||||
try {
|
||||
await Promise.race([
|
||||
saveJobState(jobId),
|
||||
new Promise<never>((_, reject) => {
|
||||
setTimeout(() => reject(new Error(`Timeout saving job ${jobId}`)), JOB_SAVE_TIMEOUT);
|
||||
})
|
||||
]);
|
||||
} catch (error) {
|
||||
console.error(`Failed to save job ${jobId} within timeout:`, error);
|
||||
// Continue with other jobs even if one fails
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(savePromises);
|
||||
console.log('✅ Completed saving all active jobs');
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute all registered shutdown callbacks
|
||||
*/
|
||||
async function executeShutdownCallbacks(): Promise<void> {
|
||||
if (shutdownCallbacks.length === 0) {
|
||||
console.log('No shutdown callbacks to execute');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Executing ${shutdownCallbacks.length} shutdown callbacks...`);
|
||||
|
||||
const callbackPromises = shutdownCallbacks.map(async (callback, index) => {
|
||||
try {
|
||||
await callback();
|
||||
console.log(`✅ Shutdown callback ${index + 1} completed`);
|
||||
} catch (error) {
|
||||
console.error(`❌ Shutdown callback ${index + 1} failed:`, error);
|
||||
// Continue with other callbacks even if one fails
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(callbackPromises);
|
||||
console.log('✅ Completed all shutdown callbacks');
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform graceful shutdown of the application
|
||||
*/
|
||||
export async function gracefulShutdown(signal: string = 'UNKNOWN'): Promise<void> {
|
||||
if (shutdownInProgress) {
|
||||
console.log('⚠️ Shutdown already in progress, ignoring additional signal');
|
||||
return;
|
||||
}
|
||||
|
||||
shutdownInProgress = true;
|
||||
shutdownStartTime = new Date();
|
||||
|
||||
console.log(`\n🛑 Graceful shutdown initiated by signal: ${signal}`);
|
||||
console.log(`📊 Shutdown status: ${activeJobs.size} active jobs, ${shutdownCallbacks.length} callbacks`);
|
||||
|
||||
// Set up shutdown timeout
|
||||
shutdownTimeout = setTimeout(() => {
|
||||
console.error(`❌ Shutdown timeout reached (${SHUTDOWN_TIMEOUT}ms), forcing exit`);
|
||||
process.exit(1);
|
||||
}, SHUTDOWN_TIMEOUT);
|
||||
|
||||
try {
|
||||
// Step 1: Save all active job states
|
||||
console.log('\n📝 Step 1: Saving active job states...');
|
||||
await saveAllActiveJobs();
|
||||
|
||||
// Step 2: Execute shutdown callbacks (stop services, close connections, etc.)
|
||||
console.log('\n🔧 Step 2: Executing shutdown callbacks...');
|
||||
await executeShutdownCallbacks();
|
||||
|
||||
// Step 3: Close database connections
|
||||
console.log('\n💾 Step 3: Closing database connections...');
|
||||
// Note: Drizzle with bun:sqlite doesn't require explicit connection closing
|
||||
// but we'll add this for completeness and future database changes
|
||||
|
||||
console.log('\n✅ Graceful shutdown completed successfully');
|
||||
|
||||
// Clear the timeout since we completed successfully
|
||||
if (shutdownTimeout) {
|
||||
clearTimeout(shutdownTimeout);
|
||||
shutdownTimeout = null;
|
||||
}
|
||||
|
||||
// Exit with success code
|
||||
process.exit(0);
|
||||
|
||||
} catch (error) {
|
||||
console.error('\n❌ Error during graceful shutdown:', error);
|
||||
|
||||
// Clear the timeout
|
||||
if (shutdownTimeout) {
|
||||
clearTimeout(shutdownTimeout);
|
||||
shutdownTimeout = null;
|
||||
}
|
||||
|
||||
// Exit with error code
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the shutdown manager
|
||||
* This should be called early in the application lifecycle
|
||||
*/
|
||||
export function initializeShutdownManager(): void {
|
||||
console.log('🔧 Initializing shutdown manager...');
|
||||
|
||||
// Reset state in case of re-initialization
|
||||
shutdownInProgress = false;
|
||||
shutdownStartTime = null;
|
||||
activeJobs.clear();
|
||||
shutdownCallbacks = []; // Reset callbacks too
|
||||
|
||||
// Clear any existing timeout
|
||||
if (shutdownTimeout) {
|
||||
clearTimeout(shutdownTimeout);
|
||||
shutdownTimeout = null;
|
||||
}
|
||||
|
||||
console.log('✅ Shutdown manager initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Force immediate shutdown (for emergencies)
|
||||
*/
|
||||
export function forceShutdown(exitCode: number = 1): void {
|
||||
console.error('🚨 Force shutdown requested');
|
||||
|
||||
if (shutdownTimeout) {
|
||||
clearTimeout(shutdownTimeout);
|
||||
}
|
||||
|
||||
process.exit(exitCode);
|
||||
}
|
||||
141
src/lib/signal-handlers.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
/**
|
||||
* Signal Handlers for Graceful Shutdown
|
||||
*
|
||||
* This module sets up proper signal handling for container environments.
|
||||
* It ensures the application responds correctly to SIGTERM, SIGINT, and other signals.
|
||||
*/
|
||||
|
||||
import { gracefulShutdown, isShuttingDown } from './shutdown-manager';
|
||||
|
||||
// Track if signal handlers have been registered
|
||||
let signalHandlersRegistered = false;
|
||||
|
||||
/**
|
||||
* Setup signal handlers for graceful shutdown
|
||||
* This should be called early in the application lifecycle
|
||||
*/
|
||||
export function setupSignalHandlers(): void {
|
||||
if (signalHandlersRegistered) {
|
||||
console.log('⚠️ Signal handlers already registered, skipping');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('🔧 Setting up signal handlers for graceful shutdown...');
|
||||
|
||||
// Handle SIGTERM (Docker stop, Kubernetes termination)
|
||||
process.on('SIGTERM', () => {
|
||||
console.log('\n📡 Received SIGTERM signal');
|
||||
if (!isShuttingDown()) {
|
||||
gracefulShutdown('SIGTERM').catch((error) => {
|
||||
console.error('Error during SIGTERM shutdown:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle SIGINT (Ctrl+C)
|
||||
process.on('SIGINT', () => {
|
||||
console.log('\n📡 Received SIGINT signal');
|
||||
if (!isShuttingDown()) {
|
||||
gracefulShutdown('SIGINT').catch((error) => {
|
||||
console.error('Error during SIGINT shutdown:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle SIGHUP (terminal hangup)
|
||||
process.on('SIGHUP', () => {
|
||||
console.log('\n📡 Received SIGHUP signal');
|
||||
if (!isShuttingDown()) {
|
||||
gracefulShutdown('SIGHUP').catch((error) => {
|
||||
console.error('Error during SIGHUP shutdown:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle uncaught exceptions
|
||||
process.on('uncaughtException', (error) => {
|
||||
console.error('\n💥 Uncaught Exception:', error);
|
||||
console.error('Stack trace:', error.stack);
|
||||
|
||||
if (!isShuttingDown()) {
|
||||
console.log('Initiating emergency shutdown due to uncaught exception...');
|
||||
gracefulShutdown('UNCAUGHT_EXCEPTION').catch((shutdownError) => {
|
||||
console.error('Error during emergency shutdown:', shutdownError);
|
||||
process.exit(1);
|
||||
});
|
||||
} else {
|
||||
// If already shutting down, force exit
|
||||
console.error('Uncaught exception during shutdown, forcing exit');
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle unhandled promise rejections
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
console.error('\n💥 Unhandled Promise Rejection at:', promise);
|
||||
console.error('Reason:', reason);
|
||||
|
||||
if (!isShuttingDown()) {
|
||||
console.log('Initiating emergency shutdown due to unhandled rejection...');
|
||||
gracefulShutdown('UNHANDLED_REJECTION').catch((shutdownError) => {
|
||||
console.error('Error during emergency shutdown:', shutdownError);
|
||||
process.exit(1);
|
||||
});
|
||||
} else {
|
||||
// If already shutting down, force exit
|
||||
console.error('Unhandled rejection during shutdown, forcing exit');
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
// Handle process warnings (for debugging)
|
||||
process.on('warning', (warning) => {
|
||||
console.warn('⚠️ Process Warning:', warning.name);
|
||||
console.warn('Message:', warning.message);
|
||||
if (warning.stack) {
|
||||
console.warn('Stack:', warning.stack);
|
||||
}
|
||||
});
|
||||
|
||||
signalHandlersRegistered = true;
|
||||
console.log('✅ Signal handlers registered successfully');
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove signal handlers (for testing)
|
||||
*/
|
||||
export function removeSignalHandlers(): void {
|
||||
if (!signalHandlersRegistered) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('🔧 Removing signal handlers...');
|
||||
|
||||
process.removeAllListeners('SIGTERM');
|
||||
process.removeAllListeners('SIGINT');
|
||||
process.removeAllListeners('SIGHUP');
|
||||
process.removeAllListeners('uncaughtException');
|
||||
process.removeAllListeners('unhandledRejection');
|
||||
process.removeAllListeners('warning');
|
||||
|
||||
signalHandlersRegistered = false;
|
||||
console.log('✅ Signal handlers removed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if signal handlers are registered
|
||||
*/
|
||||
export function areSignalHandlersRegistered(): boolean {
|
||||
return signalHandlersRegistered;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a test signal to the current process (for testing)
|
||||
*/
|
||||
export function sendTestSignal(signal: NodeJS.Signals = 'SIGTERM'): void {
|
||||
console.log(`🧪 Sending test signal: ${signal}`);
|
||||
process.kill(process.pid, signal);
|
||||
}
|
||||
110
src/lib/utils.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { jsonResponse, formatDate, truncate, safeParse } from "./utils";
|
||||
|
||||
describe("jsonResponse", () => {
|
||||
test("creates a Response with JSON content", () => {
|
||||
const data = { message: "Hello, world!" };
|
||||
const response = jsonResponse({ data });
|
||||
|
||||
expect(response).toBeInstanceOf(Response);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers.get("Content-Type")).toBe("application/json");
|
||||
});
|
||||
|
||||
test("uses the provided status code", () => {
|
||||
const data = { error: "Not found" };
|
||||
const response = jsonResponse({ data, status: 404 });
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
test("correctly serializes complex objects", async () => {
|
||||
const now = new Date();
|
||||
const data = {
|
||||
message: "Complex object",
|
||||
date: now,
|
||||
nested: { foo: "bar" },
|
||||
array: [1, 2, 3]
|
||||
};
|
||||
|
||||
const response = jsonResponse({ data });
|
||||
const responseBody = await response.json();
|
||||
|
||||
expect(responseBody).toEqual({
|
||||
message: "Complex object",
|
||||
date: now.toISOString(),
|
||||
nested: { foo: "bar" },
|
||||
array: [1, 2, 3]
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatDate", () => {
|
||||
test("formats a date object", () => {
|
||||
const date = new Date("2023-01-15T12:30:45Z");
|
||||
const formatted = formatDate(date);
|
||||
|
||||
// The exact format might depend on the locale, so we'll check for parts
|
||||
expect(formatted).toContain("2023");
|
||||
expect(formatted).toContain("January");
|
||||
expect(formatted).toContain("15");
|
||||
});
|
||||
|
||||
test("formats a date string", () => {
|
||||
const dateStr = "2023-01-15T12:30:45Z";
|
||||
const formatted = formatDate(dateStr);
|
||||
|
||||
expect(formatted).toContain("2023");
|
||||
expect(formatted).toContain("January");
|
||||
expect(formatted).toContain("15");
|
||||
});
|
||||
|
||||
test("returns 'Never' for null or undefined", () => {
|
||||
expect(formatDate(null)).toBe("Never");
|
||||
expect(formatDate(undefined)).toBe("Never");
|
||||
});
|
||||
});
|
||||
|
||||
describe("truncate", () => {
|
||||
test("truncates a string that exceeds the length", () => {
|
||||
const str = "This is a long string that needs truncation";
|
||||
const truncated = truncate(str, 10);
|
||||
|
||||
expect(truncated).toBe("This is a ...");
|
||||
expect(truncated.length).toBe(13); // 10 chars + "..."
|
||||
});
|
||||
|
||||
test("does not truncate a string that is shorter than the length", () => {
|
||||
const str = "Short";
|
||||
const truncated = truncate(str, 10);
|
||||
|
||||
expect(truncated).toBe("Short");
|
||||
});
|
||||
|
||||
test("handles empty strings", () => {
|
||||
expect(truncate("", 10)).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("safeParse", () => {
|
||||
test("parses valid JSON strings", () => {
|
||||
const jsonStr = '{"name":"John","age":30}';
|
||||
const parsed = safeParse(jsonStr);
|
||||
|
||||
expect(parsed).toEqual({ name: "John", age: 30 });
|
||||
});
|
||||
|
||||
test("returns undefined for invalid JSON strings", () => {
|
||||
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
|
||||
const parsed = safeParse(invalidJson);
|
||||
|
||||
expect(parsed).toBeUndefined();
|
||||
});
|
||||
|
||||
test("returns the original value for non-string inputs", () => {
|
||||
const obj = { name: "John", age: 30 };
|
||||
const parsed = safeParse(obj);
|
||||
|
||||
expect(parsed).toBe(obj);
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,6 @@
|
||||
import { clsx, type ClassValue } from "clsx";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
import axios from "axios";
|
||||
import type { AxiosError, AxiosRequestConfig } from "axios";
|
||||
import { httpRequest, HttpError } from "@/lib/http-client";
|
||||
import type { RepoStatus } from "@/types/Repository";
|
||||
|
||||
export const API_BASE = "/api";
|
||||
@@ -41,10 +40,10 @@ export function safeParse<T>(value: unknown): T | undefined {
|
||||
|
||||
export async function apiRequest<T>(
|
||||
endpoint: string,
|
||||
options: AxiosRequestConfig = {}
|
||||
options: RequestInit = {}
|
||||
): Promise<T> {
|
||||
try {
|
||||
const response = await axios<T>(`${API_BASE}${endpoint}`, {
|
||||
const response = await httpRequest<T>(`${API_BASE}${endpoint}`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...(options.headers || {}),
|
||||
@@ -54,10 +53,10 @@ export async function apiRequest<T>(
|
||||
|
||||
return response.data;
|
||||
} catch (err) {
|
||||
const error = err as AxiosError<{ message?: string }>;
|
||||
const error = err as HttpError;
|
||||
|
||||
const message =
|
||||
error.response?.data?.message ||
|
||||
error.response ||
|
||||
error.message ||
|
||||
"An unknown error occurred";
|
||||
|
||||
|
||||
167
src/lib/utils/concurrency.test.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { describe, test, expect, mock } from "bun:test";
|
||||
import { processInParallel, processWithRetry } from "./concurrency";
|
||||
|
||||
describe("processInParallel", () => {
|
||||
test("processes items in parallel with concurrency control", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
|
||||
// Create a mock function to track execution
|
||||
const processItem = mock(async (item: number) => {
|
||||
// Simulate async work
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock progress callback
|
||||
const onProgress = mock((completed: number, total: number, result?: number) => {
|
||||
// Progress tracking
|
||||
});
|
||||
|
||||
// Process the items with a concurrency limit of 3
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processItem,
|
||||
3,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify that onProgress was called for each item
|
||||
expect(onProgress).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify the last call to onProgress had the correct completed/total values
|
||||
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
||||
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
||||
});
|
||||
|
||||
test("handles errors in processing", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5];
|
||||
|
||||
// Create a mock function that throws an error for item 3
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 3) {
|
||||
throw new Error("Test error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items
|
||||
const results = await processInParallel(items, processItem);
|
||||
|
||||
// Verify results (should have 4 items, missing the one that errored)
|
||||
expect(results).toEqual([2, 4, 8, 10]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(5);
|
||||
|
||||
// Verify that console.error was called once
|
||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("processWithRetry", () => {
|
||||
test("retries failed operations", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3];
|
||||
|
||||
// Create a counter to track retry attempts
|
||||
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
||||
|
||||
// Create a mock function that fails on first attempt for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
attemptCounts[item]++;
|
||||
|
||||
if (item === 2 && attemptCounts[item] === 1) {
|
||||
throw new Error("Temporary error");
|
||||
}
|
||||
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6]);
|
||||
|
||||
// Verify that item 2 was retried once
|
||||
expect(attemptCounts[1]).toBe(1); // No retries
|
||||
expect(attemptCounts[2]).toBe(2); // One retry
|
||||
expect(attemptCounts[3]).toBe(1); // No retries
|
||||
|
||||
// Verify that onRetry was called once
|
||||
expect(onRetry).toHaveBeenCalledTimes(1);
|
||||
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
||||
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
||||
});
|
||||
|
||||
test("gives up after max retries", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2];
|
||||
|
||||
// Create a mock function that always fails for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 2) {
|
||||
throw new Error("Persistent error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results (should have 1 item, missing the one that errored)
|
||||
expect(results).toEqual([2]);
|
||||
|
||||
// Verify that onRetry was called twice (for 2 retry attempts)
|
||||
expect(onRetry).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Verify that console.error was called once
|
||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
336
src/lib/utils/concurrency.ts
Normal file
@@ -0,0 +1,336 @@
|
||||
/**
|
||||
* Utility for processing items in parallel with concurrency control
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param concurrencyLimit Maximum number of concurrent operations
|
||||
* @param onProgress Optional callback for progress updates
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processInParallel<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
concurrencyLimit: number = 5,
|
||||
onProgress?: (completed: number, total: number, result?: R) => void
|
||||
): Promise<R[]> {
|
||||
const results: R[] = [];
|
||||
let completed = 0;
|
||||
const total = items.length;
|
||||
|
||||
// Process items in batches to control concurrency
|
||||
for (let i = 0; i < total; i += concurrencyLimit) {
|
||||
const batch = items.slice(i, i + concurrencyLimit);
|
||||
|
||||
const batchPromises = batch.map(async (item) => {
|
||||
try {
|
||||
const result = await processItem(item);
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for the current batch to complete before starting the next batch
|
||||
const batchResults = await Promise.allSettled(batchPromises);
|
||||
|
||||
// Process results and handle errors
|
||||
for (const result of batchResults) {
|
||||
if (result.status === 'fulfilled') {
|
||||
results.push(result.value);
|
||||
} else {
|
||||
console.error('Error processing item:', result.reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility for processing items in parallel with automatic retry for failed operations
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param options Configuration options
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processWithRetry<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
jobId?: string; // Optional job ID for checkpointing
|
||||
getItemId?: (item: T) => string; // Function to get a unique ID for each item
|
||||
onCheckpoint?: (jobId: string, completedItemId: string) => Promise<void>; // Callback for checkpointing
|
||||
checkpointInterval?: number; // How many items to process before checkpointing
|
||||
} = {}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
concurrencyLimit = 5,
|
||||
maxRetries = 3,
|
||||
retryDelay = 1000,
|
||||
onProgress,
|
||||
onRetry,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval = 1 // Default to checkpointing after each item
|
||||
} = options;
|
||||
|
||||
// Track checkpoint counter
|
||||
let itemsProcessedSinceLastCheckpoint = 0;
|
||||
|
||||
// Wrap the process function with retry logic
|
||||
const processWithRetryLogic = async (item: T): Promise<R> => {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
||||
try {
|
||||
// Check for shutdown before processing each item (only in production)
|
||||
try {
|
||||
const { isShuttingDown } = await import('@/lib/shutdown-manager');
|
||||
if (isShuttingDown()) {
|
||||
throw new Error('Processing interrupted by application shutdown');
|
||||
}
|
||||
} catch (importError) {
|
||||
// Ignore import errors during testing
|
||||
}
|
||||
|
||||
const result = await processItem(item);
|
||||
|
||||
// Handle checkpointing if enabled
|
||||
if (jobId && getItemId && onCheckpoint) {
|
||||
const itemId = getItemId(item);
|
||||
itemsProcessedSinceLastCheckpoint++;
|
||||
|
||||
// Checkpoint based on the interval
|
||||
if (itemsProcessedSinceLastCheckpoint >= checkpointInterval) {
|
||||
await onCheckpoint(jobId, itemId);
|
||||
itemsProcessedSinceLastCheckpoint = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
if (attempt <= maxRetries) {
|
||||
if (onRetry) {
|
||||
onRetry(item, lastError, attempt);
|
||||
}
|
||||
|
||||
// Exponential backoff
|
||||
const delay = retryDelay * Math.pow(2, attempt - 1);
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
} else {
|
||||
throw lastError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This should never be reached due to the throw in the catch block
|
||||
throw lastError || new Error('Unknown error occurred');
|
||||
};
|
||||
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processWithRetryLogic,
|
||||
concurrencyLimit,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Final checkpoint if there are remaining items since the last checkpoint
|
||||
if (jobId && getItemId && onCheckpoint && itemsProcessedSinceLastCheckpoint > 0) {
|
||||
// We don't have a specific item ID for the final checkpoint, so we'll use a placeholder
|
||||
await onCheckpoint(jobId, 'final');
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process items in parallel with resilience to container restarts
|
||||
* This version supports resuming from a previous checkpoint
|
||||
*/
|
||||
export async function processWithResilience<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
userId: string; // Required for creating mirror jobs
|
||||
jobType: "mirror" | "sync" | "retry";
|
||||
getItemId: (item: T) => string; // Required function to get a unique ID for each item
|
||||
getItemName: (item: T) => string; // Required function to get a display name for each item
|
||||
checkpointInterval?: number;
|
||||
resumeFromJobId?: string; // Optional job ID to resume from
|
||||
}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
userId,
|
||||
jobType,
|
||||
getItemId,
|
||||
getItemName,
|
||||
resumeFromJobId,
|
||||
checkpointInterval = 10, // Increased from 5 to reduce event frequency
|
||||
...otherOptions
|
||||
} = options;
|
||||
|
||||
// Import helpers for job management and shutdown handling
|
||||
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
||||
|
||||
// Import shutdown manager (with fallback for testing)
|
||||
let registerActiveJob: (jobId: string) => void = () => {};
|
||||
let unregisterActiveJob: (jobId: string) => void = () => {};
|
||||
let isShuttingDown: () => boolean = () => false;
|
||||
|
||||
try {
|
||||
const shutdownManager = await import('@/lib/shutdown-manager');
|
||||
registerActiveJob = shutdownManager.registerActiveJob;
|
||||
unregisterActiveJob = shutdownManager.unregisterActiveJob;
|
||||
isShuttingDown = shutdownManager.isShuttingDown;
|
||||
} catch (importError) {
|
||||
// Use fallback functions during testing
|
||||
console.log('Using fallback shutdown manager functions (testing mode)');
|
||||
}
|
||||
|
||||
// Get item IDs for all items
|
||||
const allItemIds = items.map(getItemId);
|
||||
|
||||
// Create or resume a job
|
||||
let jobId: string;
|
||||
let completedItemIds: string[] = [];
|
||||
let itemsToProcess = [...items];
|
||||
|
||||
if (resumeFromJobId) {
|
||||
// We're resuming an existing job
|
||||
jobId = resumeFromJobId;
|
||||
|
||||
// Get the job from the database to find completed items
|
||||
const { db, mirrorJobs } = await import('@/lib/db');
|
||||
const { eq } = await import('drizzle-orm');
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.id, resumeFromJobId));
|
||||
|
||||
if (job && job.completedItemIds) {
|
||||
completedItemIds = job.completedItemIds;
|
||||
|
||||
// Filter out already completed items
|
||||
itemsToProcess = items.filter(item => !completedItemIds.includes(getItemId(item)));
|
||||
|
||||
console.log(`Resuming job ${jobId} with ${itemsToProcess.length} remaining items`);
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
message: `Resuming job with ${itemsToProcess.length} remaining items`,
|
||||
details: `Job is being resumed. ${completedItemIds.length} of ${items.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create a new job
|
||||
jobId = await createMirrorJob({
|
||||
userId,
|
||||
message: `Started ${jobType} job with ${items.length} items`,
|
||||
details: `Processing ${items.length} items in parallel with checkpointing`,
|
||||
status: "mirroring",
|
||||
jobType,
|
||||
totalItems: items.length,
|
||||
itemIds: allItemIds,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
console.log(`Created new job ${jobId} with ${items.length} items`);
|
||||
}
|
||||
|
||||
// Register the job with the shutdown manager
|
||||
registerActiveJob(jobId);
|
||||
|
||||
// Define the checkpoint function
|
||||
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
||||
const itemName = items.find(item => getItemId(item) === completedItemId)
|
||||
? getItemName(items.find(item => getItemId(item) === completedItemId)!)
|
||||
: 'unknown';
|
||||
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
message: `Processed item: ${itemName}`,
|
||||
});
|
||||
};
|
||||
|
||||
try {
|
||||
// Check if shutdown is in progress before starting
|
||||
if (isShuttingDown()) {
|
||||
console.log(`⚠️ Shutdown in progress, aborting job ${jobId}`);
|
||||
throw new Error('Job aborted due to application shutdown');
|
||||
}
|
||||
|
||||
// Process the items with checkpointing
|
||||
const results = await processWithRetry(
|
||||
itemsToProcess,
|
||||
processItem,
|
||||
{
|
||||
...otherOptions,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval,
|
||||
}
|
||||
);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: "mirrored",
|
||||
message: `Completed ${jobType} job with ${items.length} items`,
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
// Unregister the job from shutdown manager
|
||||
unregisterActiveJob(jobId);
|
||||
|
||||
return results;
|
||||
} catch (error) {
|
||||
// Mark the job as failed (unless it was interrupted by shutdown)
|
||||
const isShutdownError = error instanceof Error && error.message.includes('shutdown');
|
||||
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: isShutdownError ? "imported" : "failed", // Keep as imported if shutdown interrupted
|
||||
message: isShutdownError
|
||||
? 'Job interrupted by application shutdown - will resume on restart'
|
||||
: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
||||
inProgress: false,
|
||||
isCompleted: !isShutdownError, // Don't mark as completed if shutdown interrupted
|
||||
});
|
||||
|
||||
// Unregister the job from shutdown manager
|
||||
unregisterActiveJob(jobId);
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
96
src/middleware.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { defineMiddleware } from 'astro:middleware';
|
||||
import { initializeRecovery, hasJobsNeedingRecovery, getRecoveryStatus } from './lib/recovery';
|
||||
import { startCleanupService, stopCleanupService } from './lib/cleanup-service';
|
||||
import { initializeShutdownManager, registerShutdownCallback } from './lib/shutdown-manager';
|
||||
import { setupSignalHandlers } from './lib/signal-handlers';
|
||||
|
||||
// Flag to track if recovery has been initialized
|
||||
let recoveryInitialized = false;
|
||||
let recoveryAttempted = false;
|
||||
let cleanupServiceStarted = false;
|
||||
let shutdownManagerInitialized = false;
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
// Initialize shutdown manager and signal handlers first
|
||||
if (!shutdownManagerInitialized) {
|
||||
try {
|
||||
console.log('🔧 Initializing shutdown manager and signal handlers...');
|
||||
initializeShutdownManager();
|
||||
setupSignalHandlers();
|
||||
shutdownManagerInitialized = true;
|
||||
console.log('✅ Shutdown manager and signal handlers initialized');
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to initialize shutdown manager:', error);
|
||||
// Continue anyway - this shouldn't block the application
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize recovery system only once when the server starts
|
||||
// This is a fallback in case the startup script didn't run
|
||||
if (!recoveryInitialized && !recoveryAttempted) {
|
||||
recoveryAttempted = true;
|
||||
|
||||
try {
|
||||
// Check if recovery is actually needed before attempting
|
||||
const needsRecovery = await hasJobsNeedingRecovery();
|
||||
|
||||
if (needsRecovery) {
|
||||
console.log('⚠️ Middleware detected jobs needing recovery (startup script may not have run)');
|
||||
console.log('Attempting recovery from middleware...');
|
||||
|
||||
// Run recovery with a shorter timeout since this is during request handling
|
||||
const recoveryResult = await Promise.race([
|
||||
initializeRecovery({
|
||||
skipIfRecentAttempt: true,
|
||||
maxRetries: 2,
|
||||
retryDelay: 3000,
|
||||
}),
|
||||
new Promise<boolean>((_, reject) => {
|
||||
setTimeout(() => reject(new Error('Middleware recovery timeout')), 15000);
|
||||
})
|
||||
]);
|
||||
|
||||
if (recoveryResult) {
|
||||
console.log('✅ Middleware recovery completed successfully');
|
||||
} else {
|
||||
console.log('⚠️ Middleware recovery completed with some issues');
|
||||
}
|
||||
} else {
|
||||
console.log('✅ No recovery needed (startup script likely handled it)');
|
||||
}
|
||||
|
||||
recoveryInitialized = true;
|
||||
} catch (error) {
|
||||
console.error('⚠️ Middleware recovery failed or timed out:', error);
|
||||
console.log('Application will continue, but some jobs may remain interrupted');
|
||||
|
||||
// Log recovery status for debugging
|
||||
const status = getRecoveryStatus();
|
||||
console.log('Recovery status:', status);
|
||||
|
||||
recoveryInitialized = true; // Mark as attempted to avoid retries
|
||||
}
|
||||
}
|
||||
|
||||
// Start cleanup service only once after recovery is complete
|
||||
if (recoveryInitialized && !cleanupServiceStarted) {
|
||||
try {
|
||||
console.log('Starting automatic database cleanup service...');
|
||||
startCleanupService();
|
||||
|
||||
// Register cleanup service shutdown callback
|
||||
registerShutdownCallback(async () => {
|
||||
console.log('🛑 Shutting down cleanup service...');
|
||||
stopCleanupService();
|
||||
});
|
||||
|
||||
cleanupServiceStarted = true;
|
||||
} catch (error) {
|
||||
console.error('Failed to start cleanup service:', error);
|
||||
// Don't fail the request if cleanup service fails to start
|
||||
}
|
||||
}
|
||||
|
||||
// Continue with the request
|
||||
return next();
|
||||
});
|
||||
115
src/pages/api/activities/cleanup.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { db, mirrorJobs, events } from "@/lib/db";
|
||||
import { eq, count } from "drizzle-orm";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
let body;
|
||||
try {
|
||||
body = await request.json();
|
||||
} catch (jsonError) {
|
||||
console.error("Invalid JSON in request body:", jsonError);
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Invalid JSON in request body." }),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
const { userId } = body || {};
|
||||
|
||||
if (!userId) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Missing 'userId' in request body." }),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
// Start a transaction to ensure all operations succeed or fail together
|
||||
const result = await db.transaction(async (tx) => {
|
||||
// Count activities before deletion
|
||||
const mirrorJobsCountResult = await tx
|
||||
.select({ count: count() })
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.userId, userId));
|
||||
|
||||
const eventsCountResult = await tx
|
||||
.select({ count: count() })
|
||||
.from(events)
|
||||
.where(eq(events.userId, userId));
|
||||
|
||||
const totalMirrorJobs = mirrorJobsCountResult[0]?.count || 0;
|
||||
const totalEvents = eventsCountResult[0]?.count || 0;
|
||||
|
||||
console.log(`Found ${totalMirrorJobs} mirror jobs and ${totalEvents} events to delete for user ${userId}`);
|
||||
|
||||
// First, mark all in-progress jobs as completed/failed to allow deletion
|
||||
await tx
|
||||
.update(mirrorJobs)
|
||||
.set({
|
||||
inProgress: false,
|
||||
completedAt: new Date(),
|
||||
status: "failed",
|
||||
message: "Job interrupted and cleaned up by user"
|
||||
})
|
||||
.where(eq(mirrorJobs.userId, userId));
|
||||
|
||||
console.log(`Updated in-progress jobs to allow deletion`);
|
||||
|
||||
// Delete all mirror jobs for the user (now that none are in progress)
|
||||
await tx
|
||||
.delete(mirrorJobs)
|
||||
.where(eq(mirrorJobs.userId, userId));
|
||||
|
||||
// Delete all events for the user
|
||||
await tx
|
||||
.delete(events)
|
||||
.where(eq(events.userId, userId));
|
||||
|
||||
return {
|
||||
mirrorJobsDeleted: totalMirrorJobs,
|
||||
eventsDeleted: totalEvents,
|
||||
totalMirrorJobs,
|
||||
totalEvents,
|
||||
};
|
||||
});
|
||||
|
||||
console.log(`Cleaned up activities for user ${userId}:`, result);
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "All activities cleaned up successfully.",
|
||||
result: {
|
||||
mirrorJobsDeleted: result.mirrorJobsDeleted,
|
||||
eventsDeleted: result.eventsDeleted,
|
||||
},
|
||||
}),
|
||||
{ status: 200, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error cleaning up activities:", error);
|
||||
|
||||
// Provide more specific error messages
|
||||
let errorMessage = "An unknown error occurred.";
|
||||
if (error instanceof Error) {
|
||||
errorMessage = error.message;
|
||||
|
||||
// Check for common database errors
|
||||
if (error.message.includes("FOREIGN KEY constraint failed")) {
|
||||
errorMessage = "Cannot delete activities due to database constraints. Some jobs may still be referenced by other records.";
|
||||
} else if (error.message.includes("database is locked")) {
|
||||
errorMessage = "Database is currently locked. Please try again in a moment.";
|
||||
} else if (error.message.includes("no such table")) {
|
||||
errorMessage = "Database tables are missing. Please check your database setup.";
|
||||
}
|
||||
}
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
}),
|
||||
{ status: 500, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
};
|
||||
72
src/pages/api/cleanup/auto.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
/**
|
||||
* API endpoint to manually trigger automatic cleanup
|
||||
* This is useful for testing and debugging the cleanup service
|
||||
*/
|
||||
|
||||
import type { APIRoute } from 'astro';
|
||||
import { runAutomaticCleanup } from '@/lib/cleanup-service';
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
console.log('Manual cleanup trigger requested');
|
||||
|
||||
// Run the automatic cleanup
|
||||
const results = await runAutomaticCleanup();
|
||||
|
||||
// Calculate totals
|
||||
const totalEventsDeleted = results.reduce((sum, result) => sum + result.eventsDeleted, 0);
|
||||
const totalJobsDeleted = results.reduce((sum, result) => sum + result.mirrorJobsDeleted, 0);
|
||||
const errors = results.filter(result => result.error);
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: 'Automatic cleanup completed',
|
||||
results: {
|
||||
usersProcessed: results.length,
|
||||
totalEventsDeleted,
|
||||
totalJobsDeleted,
|
||||
errors: errors.length,
|
||||
details: results,
|
||||
},
|
||||
}),
|
||||
{
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Error in manual cleanup trigger:', error);
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: 'Failed to run automatic cleanup',
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
}),
|
||||
{
|
||||
status: 500,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export const GET: APIRoute = async () => {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: 'Use POST method to trigger cleanup',
|
||||
}),
|
||||
{
|
||||
status: 405,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
};
|
||||
@@ -2,18 +2,19 @@ import type { APIRoute } from "astro";
|
||||
import { db, configs, users } from "@/lib/db";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { calculateCleanupInterval } from "@/lib/cleanup-service";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const { userId, githubConfig, giteaConfig, scheduleConfig } = body;
|
||||
const { userId, githubConfig, giteaConfig, scheduleConfig, cleanupConfig } = body;
|
||||
|
||||
if (!userId || !githubConfig || !giteaConfig || !scheduleConfig) {
|
||||
if (!userId || !githubConfig || !giteaConfig || !scheduleConfig || !cleanupConfig) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message:
|
||||
"userId, githubConfig, giteaConfig, and scheduleConfig are required.",
|
||||
"userId, githubConfig, giteaConfig, scheduleConfig, and cleanupConfig are required.",
|
||||
}),
|
||||
{
|
||||
status: 400,
|
||||
@@ -56,6 +57,63 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
}
|
||||
}
|
||||
|
||||
// Process schedule config - set/update nextRun if enabled, clear if disabled
|
||||
const processedScheduleConfig = { ...scheduleConfig };
|
||||
if (scheduleConfig.enabled) {
|
||||
const now = new Date();
|
||||
const interval = scheduleConfig.interval || 3600; // Default to 1 hour
|
||||
|
||||
// Check if we need to recalculate nextRun
|
||||
// Recalculate if: no nextRun exists, or interval changed from existing config
|
||||
let shouldRecalculate = !scheduleConfig.nextRun;
|
||||
|
||||
if (existingConfig && existingConfig.scheduleConfig) {
|
||||
const existingScheduleConfig = existingConfig.scheduleConfig;
|
||||
const existingInterval = existingScheduleConfig.interval || 3600;
|
||||
|
||||
// If interval changed, recalculate nextRun
|
||||
if (interval !== existingInterval) {
|
||||
shouldRecalculate = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldRecalculate) {
|
||||
processedScheduleConfig.nextRun = new Date(now.getTime() + interval * 1000);
|
||||
}
|
||||
} else {
|
||||
// Clear nextRun when disabled
|
||||
processedScheduleConfig.nextRun = null;
|
||||
}
|
||||
|
||||
// Process cleanup config - set/update nextRun if enabled, clear if disabled
|
||||
const processedCleanupConfig = { ...cleanupConfig };
|
||||
if (cleanupConfig.enabled) {
|
||||
const now = new Date();
|
||||
const retentionSeconds = cleanupConfig.retentionDays || 604800; // Default 7 days in seconds
|
||||
const cleanupIntervalHours = calculateCleanupInterval(retentionSeconds);
|
||||
|
||||
// Check if we need to recalculate nextRun
|
||||
// Recalculate if: no nextRun exists, or retention period changed from existing config
|
||||
let shouldRecalculate = !cleanupConfig.nextRun;
|
||||
|
||||
if (existingConfig && existingConfig.cleanupConfig) {
|
||||
const existingCleanupConfig = existingConfig.cleanupConfig;
|
||||
const existingRetentionSeconds = existingCleanupConfig.retentionDays || 604800;
|
||||
|
||||
// If retention period changed, recalculate nextRun
|
||||
if (retentionSeconds !== existingRetentionSeconds) {
|
||||
shouldRecalculate = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldRecalculate) {
|
||||
processedCleanupConfig.nextRun = new Date(now.getTime() + cleanupIntervalHours * 60 * 60 * 1000);
|
||||
}
|
||||
} else {
|
||||
// Clear nextRun when disabled
|
||||
processedCleanupConfig.nextRun = null;
|
||||
}
|
||||
|
||||
if (existingConfig) {
|
||||
// Update path
|
||||
await db
|
||||
@@ -63,7 +121,8 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
.set({
|
||||
githubConfig,
|
||||
giteaConfig,
|
||||
scheduleConfig,
|
||||
scheduleConfig: processedScheduleConfig,
|
||||
cleanupConfig: processedCleanupConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(configs.id, existingConfig.id));
|
||||
@@ -112,7 +171,8 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
giteaConfig,
|
||||
include: [],
|
||||
exclude: [],
|
||||
scheduleConfig,
|
||||
scheduleConfig: processedScheduleConfig,
|
||||
cleanupConfig: processedCleanupConfig,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
@@ -197,6 +257,12 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
lastRun: null,
|
||||
nextRun: null,
|
||||
},
|
||||
cleanupConfig: {
|
||||
enabled: false,
|
||||
retentionDays: 604800, // 7 days in seconds
|
||||
lastRun: null,
|
||||
nextRun: null,
|
||||
},
|
||||
}),
|
||||
{
|
||||
status: 200,
|
||||
|
||||
186
src/pages/api/gitea/test-connection.test.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Mock the POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing URL or token
|
||||
if (!body.url || !body.token) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Gitea URL and token are required"
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Check for username mismatch
|
||||
if (body.username && body.username !== "giteauser") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Token belongs to giteauser, not " + body.username
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Handle invalid token
|
||||
if (body.token === "invalid-token") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Invalid Gitea token"
|
||||
}),
|
||||
{ status: 401 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Successfully connected to Gitea as giteauser",
|
||||
user: {
|
||||
login: "giteauser",
|
||||
name: "Gitea User",
|
||||
avatar_url: "https://gitea.example.com/avatar.png"
|
||||
}
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Mock the module
|
||||
mock.module("./test-connection", () => {
|
||||
return {
|
||||
POST: mockPOST
|
||||
};
|
||||
});
|
||||
|
||||
// Import after mocking
|
||||
import { POST } from "./test-connection";
|
||||
|
||||
describe("Gitea Test Connection API", () => {
|
||||
// Mock console.error to prevent test output noise
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if url or token is missing", async () => {
|
||||
// Test missing URL
|
||||
const requestMissingUrl = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const responseMissingUrl = await POST({ request: requestMissingUrl } as any);
|
||||
|
||||
expect(responseMissingUrl.status).toBe(400);
|
||||
|
||||
const dataMissingUrl = await responseMissingUrl.json();
|
||||
expect(dataMissingUrl.success).toBe(false);
|
||||
expect(dataMissingUrl.message).toBe("Gitea URL and token are required");
|
||||
|
||||
// Test missing token
|
||||
const requestMissingToken = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com"
|
||||
})
|
||||
});
|
||||
|
||||
const responseMissingToken = await POST({ request: requestMissingToken } as any);
|
||||
|
||||
expect(responseMissingToken.status).toBe(400);
|
||||
|
||||
const dataMissingToken = await responseMissingToken.json();
|
||||
expect(dataMissingToken.success).toBe(false);
|
||||
expect(dataMissingToken.message).toBe("Gitea URL and token are required");
|
||||
});
|
||||
|
||||
test("returns 200 with user data on successful connection", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Successfully connected to Gitea as giteauser");
|
||||
expect(data.user).toEqual({
|
||||
login: "giteauser",
|
||||
name: "Gitea User",
|
||||
avatar_url: "https://gitea.example.com/avatar.png"
|
||||
});
|
||||
});
|
||||
|
||||
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "valid-token",
|
||||
username: "differentuser"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Token belongs to giteauser, not differentuser");
|
||||
});
|
||||
|
||||
test("handles authentication errors", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "invalid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Invalid Gitea token");
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { APIRoute } from 'astro';
|
||||
import axios from 'axios';
|
||||
import { httpGet, HttpError } from '@/lib/http-client';
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -25,11 +25,9 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
const baseUrl = url.endsWith('/') ? url.slice(0, -1) : url;
|
||||
|
||||
// Test the connection by fetching the authenticated user
|
||||
const response = await axios.get(`${baseUrl}/api/v1/user`, {
|
||||
headers: {
|
||||
'Authorization': `token ${token}`,
|
||||
'Accept': 'application/json',
|
||||
},
|
||||
const response = await httpGet(`${baseUrl}/api/v1/user`, {
|
||||
'Authorization': `token ${token}`,
|
||||
'Accept': 'application/json',
|
||||
});
|
||||
|
||||
const data = response.data;
|
||||
@@ -72,8 +70,8 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
console.error('Gitea connection test failed:', error);
|
||||
|
||||
// Handle specific error types
|
||||
if (axios.isAxiosError(error) && error.response) {
|
||||
if (error.response.status === 401) {
|
||||
if (error instanceof HttpError) {
|
||||
if (error.status === 401) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
@@ -86,7 +84,7 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
},
|
||||
}
|
||||
);
|
||||
} else if (error.response.status === 404) {
|
||||
} else if (error.status === 404) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
@@ -99,25 +97,23 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
},
|
||||
}
|
||||
);
|
||||
} else if (error.status === 0) {
|
||||
// Network error
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: 'Could not connect to Gitea server. Please check the URL.',
|
||||
}),
|
||||
{
|
||||
status: 500,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle connection errors
|
||||
if (axios.isAxiosError(error) && (error.code === 'ECONNREFUSED' || error.code === 'ENOTFOUND')) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: 'Could not connect to Gitea server. Please check the URL.',
|
||||
}),
|
||||
{
|
||||
status: 500,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Generic error response
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
|
||||
133
src/pages/api/github/test-connection.test.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { POST } from "./test-connection";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
|
||||
// Mock the Octokit class
|
||||
mock.module("@octokit/rest", () => {
|
||||
return {
|
||||
Octokit: mock(function() {
|
||||
return {
|
||||
users: {
|
||||
getAuthenticated: mock(() => Promise.resolve({
|
||||
data: {
|
||||
login: "testuser",
|
||||
name: "Test User",
|
||||
avatar_url: "https://example.com/avatar.png"
|
||||
}
|
||||
}))
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
describe("GitHub Test Connection API", () => {
|
||||
// Mock console.error to prevent test output noise
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if token is missing", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("GitHub token is required");
|
||||
});
|
||||
|
||||
test("returns 200 with user data on successful connection", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Successfully connected to GitHub as testuser");
|
||||
expect(data.user).toEqual({
|
||||
login: "testuser",
|
||||
name: "Test User",
|
||||
avatar_url: "https://example.com/avatar.png"
|
||||
});
|
||||
});
|
||||
|
||||
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token",
|
||||
username: "differentuser"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Token belongs to testuser, not differentuser");
|
||||
});
|
||||
|
||||
test("handles authentication errors", async () => {
|
||||
// Mock Octokit to throw an error
|
||||
mock.module("@octokit/rest", () => {
|
||||
return {
|
||||
Octokit: mock(function() {
|
||||
return {
|
||||
users: {
|
||||
getAuthenticated: mock(() => Promise.reject(new Error("Bad credentials")))
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "invalid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toContain("Bad credentials");
|
||||
});
|
||||
});
|
||||
@@ -2,16 +2,27 @@ import type { APIRoute } from "astro";
|
||||
import { jsonResponse } from "@/lib/utils";
|
||||
import { db } from "@/lib/db";
|
||||
import { ENV } from "@/lib/config";
|
||||
import { getRecoveryStatus, hasJobsNeedingRecovery } from "@/lib/recovery";
|
||||
import os from "os";
|
||||
import { httpGet } from "@/lib/http-client";
|
||||
|
||||
// Track when the server started
|
||||
const serverStartTime = new Date();
|
||||
|
||||
// Cache for the latest version to avoid frequent GitHub API calls
|
||||
interface VersionCache {
|
||||
latestVersion: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
let versionCache: VersionCache | null = null;
|
||||
const CACHE_TTL = 3600000; // 1 hour in milliseconds
|
||||
|
||||
export const GET: APIRoute = async () => {
|
||||
try {
|
||||
// Check database connection by running a simple query
|
||||
const dbStatus = await checkDatabaseConnection();
|
||||
|
||||
|
||||
// Get system information
|
||||
const systemInfo = {
|
||||
uptime: getUptime(),
|
||||
@@ -23,28 +34,51 @@ export const GET: APIRoute = async () => {
|
||||
},
|
||||
env: ENV.NODE_ENV,
|
||||
};
|
||||
|
||||
|
||||
// Get current and latest versions
|
||||
const currentVersion = process.env.npm_package_version || "unknown";
|
||||
const latestVersion = await checkLatestVersion();
|
||||
|
||||
// Get recovery system status
|
||||
const recoveryStatus = await getRecoverySystemStatus();
|
||||
|
||||
// Determine overall health status
|
||||
let overallStatus = "ok";
|
||||
if (!dbStatus.connected) {
|
||||
overallStatus = "error";
|
||||
} else if (recoveryStatus.jobsNeedingRecovery > 0 && !recoveryStatus.inProgress) {
|
||||
overallStatus = "degraded";
|
||||
}
|
||||
|
||||
// Build response
|
||||
const healthData = {
|
||||
status: "ok",
|
||||
status: overallStatus,
|
||||
timestamp: new Date().toISOString(),
|
||||
version: process.env.npm_package_version || "unknown",
|
||||
version: currentVersion,
|
||||
latestVersion: latestVersion,
|
||||
updateAvailable: latestVersion !== "unknown" &&
|
||||
currentVersion !== "unknown" &&
|
||||
latestVersion !== currentVersion,
|
||||
database: dbStatus,
|
||||
recovery: recoveryStatus,
|
||||
system: systemInfo,
|
||||
};
|
||||
|
||||
|
||||
return jsonResponse({
|
||||
data: healthData,
|
||||
status: 200,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Health check failed:", error);
|
||||
|
||||
|
||||
return jsonResponse({
|
||||
data: {
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : "Unknown error",
|
||||
version: process.env.npm_package_version || "unknown",
|
||||
latestVersion: "unknown",
|
||||
updateAvailable: false,
|
||||
},
|
||||
status: 503, // Service Unavailable
|
||||
});
|
||||
@@ -58,14 +92,14 @@ async function checkDatabaseConnection() {
|
||||
try {
|
||||
// Run a simple query to check if the database is accessible
|
||||
const result = await db.select({ test: sql`1` }).from(sql`sqlite_master`).limit(1);
|
||||
|
||||
|
||||
return {
|
||||
connected: true,
|
||||
message: "Database connection successful",
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Database connection check failed:", error);
|
||||
|
||||
|
||||
return {
|
||||
connected: false,
|
||||
message: error instanceof Error ? error.message : "Database connection failed",
|
||||
@@ -73,19 +107,49 @@ async function checkDatabaseConnection() {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recovery system status
|
||||
*/
|
||||
async function getRecoverySystemStatus() {
|
||||
try {
|
||||
const recoveryStatus = getRecoveryStatus();
|
||||
const needsRecovery = await hasJobsNeedingRecovery();
|
||||
|
||||
return {
|
||||
status: needsRecovery ? 'jobs-pending' : 'healthy',
|
||||
inProgress: recoveryStatus.inProgress,
|
||||
lastAttempt: recoveryStatus.lastAttempt?.toISOString() || null,
|
||||
jobsNeedingRecovery: needsRecovery ? 1 : 0, // Simplified count for health check
|
||||
message: needsRecovery
|
||||
? 'Jobs found that need recovery'
|
||||
: 'No jobs need recovery',
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Recovery system status check failed:', error);
|
||||
|
||||
return {
|
||||
status: 'error',
|
||||
inProgress: false,
|
||||
lastAttempt: null,
|
||||
jobsNeedingRecovery: -1,
|
||||
message: error instanceof Error ? error.message : 'Recovery status check failed',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get server uptime information
|
||||
*/
|
||||
function getUptime() {
|
||||
const now = new Date();
|
||||
const uptimeMs = now.getTime() - serverStartTime.getTime();
|
||||
|
||||
|
||||
// Convert to human-readable format
|
||||
const seconds = Math.floor(uptimeMs / 1000);
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const days = Math.floor(hours / 24);
|
||||
|
||||
|
||||
return {
|
||||
startTime: serverStartTime.toISOString(),
|
||||
uptimeMs,
|
||||
@@ -98,7 +162,7 @@ function getUptime() {
|
||||
*/
|
||||
function getMemoryUsage() {
|
||||
const memoryUsage = process.memoryUsage();
|
||||
|
||||
|
||||
return {
|
||||
rss: formatBytes(memoryUsage.rss),
|
||||
heapTotal: formatBytes(memoryUsage.heapTotal),
|
||||
@@ -114,13 +178,45 @@ function getMemoryUsage() {
|
||||
*/
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
|
||||
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
|
||||
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for the latest version from GitHub releases
|
||||
*/
|
||||
async function checkLatestVersion(): Promise<string> {
|
||||
// Return cached version if available and not expired
|
||||
if (versionCache && (Date.now() - versionCache.timestamp) < CACHE_TTL) {
|
||||
return versionCache.latestVersion;
|
||||
}
|
||||
|
||||
try {
|
||||
// Fetch the latest release from GitHub
|
||||
const response = await httpGet(
|
||||
'https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest',
|
||||
{ 'Accept': 'application/vnd.github.v3+json' }
|
||||
);
|
||||
|
||||
// Extract version from tag_name (remove 'v' prefix if present)
|
||||
const latestVersion = response.data.tag_name.replace(/^v/, '');
|
||||
|
||||
// Update cache
|
||||
versionCache = {
|
||||
latestVersion,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
return latestVersion;
|
||||
} catch (error) {
|
||||
console.error('Failed to check for latest version:', error);
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
// Import sql tag for raw SQL queries
|
||||
import { sql } from "drizzle-orm";
|
||||
|
||||
109
src/pages/api/job/mirror-org.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Create a mock POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing userId or organizationIds
|
||||
if (!body.userId || !body.organizationIds) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing userId or organizationIds."
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Organization mirroring started",
|
||||
batchId: "test-batch-id"
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Create a mock module
|
||||
const mockModule = {
|
||||
POST: mockPOST
|
||||
};
|
||||
|
||||
describe("Organization Mirroring API", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
organizationIds: ["org-id-1", "org-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||
});
|
||||
|
||||
test("returns 400 if organizationIds is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring organizations", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id",
|
||||
organizationIds: ["org-id-1", "org-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Organization mirroring started");
|
||||
expect(data.batchId).toBe("test-batch-id");
|
||||
});
|
||||
});
|
||||
@@ -6,6 +6,8 @@ import { createGitHubClient } from "@/lib/github";
|
||||
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import { type MembershipRole } from "@/types/organizations";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -61,31 +63,72 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Fire async mirroring without blocking response
|
||||
// Fire async mirroring without blocking response, using parallel processing with resilience
|
||||
setTimeout(async () => {
|
||||
for (const org of orgs) {
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing in config.");
|
||||
}
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing in config.");
|
||||
}
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
// Create a single Octokit instance to be reused
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
// Using a lower concurrency for organizations since each org might contain many repos
|
||||
const CONCURRENCY_LIMIT = 2;
|
||||
|
||||
// Generate a batch ID to group related organizations
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process organizations in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
orgs,
|
||||
async (org) => {
|
||||
// Prepare organization data
|
||||
const orgData = {
|
||||
...org,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
membershipRole: org.membershipRole as MembershipRole,
|
||||
lastMirrored: org.lastMirrored ?? undefined,
|
||||
errorMessage: org.errorMessage ?? undefined,
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for organization: ${org.name}`);
|
||||
|
||||
// Mirror the organization
|
||||
await mirrorGitHubOrgToGitea({
|
||||
config,
|
||||
octokit,
|
||||
organization: {
|
||||
...org,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
membershipRole: org.membershipRole as MembershipRole,
|
||||
lastMirrored: org.lastMirrored ?? undefined,
|
||||
errorMessage: org.errorMessage ?? undefined,
|
||||
},
|
||||
organization: orgData,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Mirror failed for organization ${org.name}:`, error);
|
||||
|
||||
return org;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "mirror",
|
||||
batchId,
|
||||
getItemId: (org) => org.id,
|
||||
getItemName: (org) => org.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 3000,
|
||||
checkpointInterval: 1, // Checkpoint after each organization
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Organization mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully mirrored organization: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (org, error, attempt) => {
|
||||
console.log(`Retrying organization ${org.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All organization mirroring tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: MirrorOrgResponse = {
|
||||
|
||||
109
src/pages/api/job/mirror-repo.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Create a mock POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing userId or repositoryIds
|
||||
if (!body.userId || !body.repositoryIds) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing userId or repositoryIds."
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Repository mirroring started",
|
||||
batchId: "test-batch-id"
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Create a mock module
|
||||
const mockModule = {
|
||||
POST: mockPOST
|
||||
};
|
||||
|
||||
describe("Repository Mirroring API", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||
});
|
||||
|
||||
test("returns 400 if repositoryIds is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring repositories", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id",
|
||||
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Repository mirroring started");
|
||||
expect(data.batchId).toBe("test-batch-id");
|
||||
});
|
||||
});
|
||||
@@ -8,6 +8,8 @@ import {
|
||||
mirrorGitHubOrgRepoToGiteaOrg,
|
||||
} from "@/lib/gitea";
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -63,52 +65,83 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start async mirroring in background
|
||||
// Start async mirroring in background with parallel processing and resilience
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
// Create a single Octokit instance to be reused
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 3;
|
||||
|
||||
// Generate a batch ID to group related repositories
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process repositories in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for repository: ${repo.name}`);
|
||||
|
||||
// Mirror the repository based on whether it's in an organization
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
octokit,
|
||||
orgName: repo.organization,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
repository: repoData,
|
||||
});
|
||||
} else {
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
repository: repoData,
|
||||
config,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Mirror failed for repo ${repo.name}:`, error);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "mirror",
|
||||
batchId,
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
checkpointInterval: 5, // Checkpoint every 5 repositories to reduce event frequency
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully mirrored repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository mirroring tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: MirrorRepoResponse = {
|
||||
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||
import { processWithRetry } from "@/lib/utils/concurrency";
|
||||
import { createMirrorJob } from "@/lib/helpers";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -65,10 +67,21 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start background retry
|
||||
// Start background retry with parallel processing
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
// Create a single Octokit instance to be reused if needed
|
||||
const octokit = config.githubConfig.token
|
||||
? createGitHubClient(config.githubConfig.token)
|
||||
: null;
|
||||
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 3;
|
||||
|
||||
// Process repositories in parallel with retry capability
|
||||
await processWithRetry(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const visibility = repositoryVisibilityEnum.parse(repo.visibility);
|
||||
const status = repoStatusEnum.parse(repo.status);
|
||||
const repoData = {
|
||||
@@ -81,6 +94,20 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
};
|
||||
|
||||
// Log the start of retry operation
|
||||
console.log(`Starting retry for repository: ${repo.name}`);
|
||||
|
||||
// Create a mirror job entry to track progress
|
||||
await createMirrorJob({
|
||||
userId: config.userId || "",
|
||||
repositoryId: repo.id,
|
||||
repositoryName: repo.name,
|
||||
message: `Started retry operation for repository: ${repo.name}`,
|
||||
details: `Repository ${repo.name} is now in the retry queue.`,
|
||||
status: "imported",
|
||||
});
|
||||
|
||||
// Determine if the repository exists in Gitea
|
||||
let owner = getGiteaRepoOwner({
|
||||
config,
|
||||
repository: repoData,
|
||||
@@ -93,16 +120,21 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
});
|
||||
|
||||
if (present) {
|
||||
// If the repository exists, sync it
|
||||
await syncGiteaRepo({ config, repository: repoData });
|
||||
console.log(`Synced existing repo: ${repo.name}`);
|
||||
} else {
|
||||
// If the repository doesn't exist, mirror it
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
|
||||
if (!octokit) {
|
||||
throw new Error("Octokit client is not initialized.");
|
||||
}
|
||||
|
||||
console.log(`Importing repo: ${repo.name} ${owner}`);
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
@@ -124,10 +156,28 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to retry repo ${repo.name}:`, err);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Retry progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully processed repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository retry tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: RetryRepoResponse = {
|
||||
|
||||
@@ -5,6 +5,8 @@ import { eq, inArray } from "drizzle-orm";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import { syncGiteaRepo } from "@/lib/gitea";
|
||||
import type { SyncRepoResponse } from "@/types/sync";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -60,26 +62,65 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start async mirroring in background
|
||||
// Start async mirroring in background with parallel processing and resilience
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 5;
|
||||
|
||||
// Generate a batch ID to group related repositories
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process repositories in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
};
|
||||
|
||||
// Log the start of syncing
|
||||
console.log(`Starting sync for repository: ${repo.name}`);
|
||||
|
||||
// Sync the repository
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
},
|
||||
repository: repoData,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Sync failed for repo ${repo.name}:`, error);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "sync",
|
||||
batchId,
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
checkpointInterval: 1, // Checkpoint after each repository
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Syncing progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully synced repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying sync for repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository syncing tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: SyncRepoResponse = {
|
||||
|
||||
@@ -34,8 +34,6 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
if (isClosed) return;
|
||||
|
||||
try {
|
||||
console.log(`Polling for events for user ${userId} in channel ${channel}`);
|
||||
|
||||
// Get new events from SQLite
|
||||
const events = await getNewEvents({
|
||||
userId,
|
||||
@@ -43,8 +41,6 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
lastEventTime,
|
||||
});
|
||||
|
||||
console.log(`Found ${events.length} new events`);
|
||||
|
||||
// Send events to client
|
||||
if (events.length > 0) {
|
||||
// Update last event time
|
||||
@@ -52,7 +48,6 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
|
||||
// Send each event to the client
|
||||
for (const event of events) {
|
||||
console.log(`Sending event: ${JSON.stringify(event.payload)}`);
|
||||
sendMessage(`data: ${JSON.stringify(event.payload)}\n\n`);
|
||||
}
|
||||
}
|
||||
|
||||
20
src/tests/setup.bun.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* Bun test setup file
|
||||
* This file is automatically loaded before running tests
|
||||
*/
|
||||
|
||||
import { afterEach, beforeEach } from "bun:test";
|
||||
|
||||
// Clean up after each test
|
||||
afterEach(() => {
|
||||
// Add any cleanup logic here
|
||||
});
|
||||
|
||||
// Setup before each test
|
||||
beforeEach(() => {
|
||||
// Add any setup logic here
|
||||
});
|
||||
|
||||
// Add DOM testing support if needed
|
||||
// import { DOMParser } from "linkedom";
|
||||
// global.DOMParser = DOMParser;
|
||||
@@ -18,6 +18,13 @@ export interface ScheduleConfig {
|
||||
nextRun?: Date;
|
||||
}
|
||||
|
||||
export interface DatabaseCleanupConfig {
|
||||
enabled: boolean;
|
||||
retentionDays: number; // Actually stores seconds, but keeping the name for compatibility
|
||||
lastRun?: Date;
|
||||
nextRun?: Date;
|
||||
}
|
||||
|
||||
export interface GitHubConfig {
|
||||
username: string;
|
||||
token: string;
|
||||
@@ -34,6 +41,7 @@ export interface SaveConfigApiRequest {
|
||||
githubConfig: GitHubConfig;
|
||||
giteaConfig: GiteaConfig;
|
||||
scheduleConfig: ScheduleConfig;
|
||||
cleanupConfig: DatabaseCleanupConfig;
|
||||
}
|
||||
|
||||
export interface SaveConfigApiResponse {
|
||||
@@ -55,6 +63,7 @@ export interface ConfigApiResponse {
|
||||
githubConfig: GitHubConfig;
|
||||
giteaConfig: GiteaConfig;
|
||||
scheduleConfig: ScheduleConfig;
|
||||
cleanupConfig: DatabaseCleanupConfig;
|
||||
include: string[];
|
||||
exclude: string[];
|
||||
createdAt: Date;
|
||||
|
||||