mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2025-12-06 19:46:44 +03:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3bb85a4cdb | ||
|
|
30182544ba | ||
|
|
fb73f33aeb | ||
|
|
48f63bdfc8 | ||
|
|
e2506a874e | ||
|
|
b67473ec7e | ||
|
|
4ca4356ad1 | ||
|
|
3136a2120d | ||
|
|
615ebd5079 | ||
|
|
6e48d3f86c | ||
|
|
c5de7e616d | ||
|
|
309f8c4341 | ||
|
|
0c596ac241 | ||
|
|
894be88a28 | ||
|
|
6ab7f0a5a0 | ||
|
|
abe3113755 | ||
|
|
f4bc28e6c2 |
4
.github/workflows/astro-build-test.yml
vendored
4
.github/workflows/astro-build-test.yml
vendored
@@ -38,10 +38,10 @@ jobs:
|
||||
bun install
|
||||
|
||||
- name: Run tests
|
||||
run: bunx vitest run
|
||||
run: bun test --coverage
|
||||
|
||||
- name: Build Astro project
|
||||
run: bunx astro build
|
||||
run: bunx --bun astro build
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
42
CHANGELOG.md
Normal file
42
CHANGELOG.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to the Gitea Mirror project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [2.5.3] - 2025-05-22
|
||||
|
||||
### Added
|
||||
- Enhanced JWT_SECRET handling with auto-generation and persistence for improved security
|
||||
- Updated Proxmox LXC deployment instructions and replaced deprecated script
|
||||
|
||||
## [2.5.2] - 2024-11-22
|
||||
|
||||
### Fixed
|
||||
- Fixed version information in health API for Docker deployments by setting npm_package_version environment variable in entrypoint script
|
||||
|
||||
## [2.5.1] - 2024-10-01
|
||||
|
||||
### Fixed
|
||||
- Fixed Docker entrypoint script to prevent unnecessary `bun install` on container startup
|
||||
- Removed redundant dependency installation in Docker containers for pre-built images
|
||||
- Fixed "PathAlreadyExists" errors during container initialization
|
||||
|
||||
### Changed
|
||||
- Improved database initialization in Docker entrypoint script
|
||||
- Added additional checks for TypeScript versions of database management scripts
|
||||
|
||||
## [2.5.0] - 2024-09-15
|
||||
|
||||
Initial public release with core functionality:
|
||||
|
||||
### Added
|
||||
- GitHub to Gitea repository mirroring
|
||||
- User authentication and management
|
||||
- Dashboard with mirroring statistics
|
||||
- Configuration management for mirroring settings
|
||||
- Support for organization mirroring
|
||||
- Automated mirroring with configurable schedules
|
||||
- Docker multi-architecture support (amd64, arm64)
|
||||
- LXC container deployment scripts
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
FROM oven/bun:1.2.9-alpine AS base
|
||||
WORKDIR /app
|
||||
RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite
|
||||
RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite openssl
|
||||
|
||||
# ----------------------------
|
||||
FROM base AS deps
|
||||
|
||||
13
README.md
13
README.md
@@ -20,8 +20,8 @@ docker compose --profile production up -d
|
||||
bun run setup && bun run dev
|
||||
|
||||
# Using LXC Containers
|
||||
# For Proxmox VE (online)
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh | bash
|
||||
# For Proxmox VE (online) - Community script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh | bash
|
||||
|
||||
# For local testing (offline-friendly)
|
||||
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./scripts/gitea-mirror-lxc-local.sh
|
||||
@@ -175,8 +175,9 @@ Gitea Mirror offers two deployment options for LXC containers:
|
||||
|
||||
```bash
|
||||
# One-command installation on Proxmox VE
|
||||
# Optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh | bash
|
||||
# Uses the community-maintained script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
# at [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED)
|
||||
curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh | bash
|
||||
```
|
||||
|
||||
**2. Local testing (offline-friendly, works on developer laptops)**
|
||||
@@ -232,8 +233,10 @@ The Docker container can be configured with the following environment variables:
|
||||
- `DATABASE_URL`: SQLite database URL (default: `file:data/gitea-mirror.db`)
|
||||
- `HOST`: Host to bind to (default: `0.0.0.0`)
|
||||
- `PORT`: Port to listen on (default: `4321`)
|
||||
- `JWT_SECRET`: Secret key for JWT token generation (important for security)
|
||||
- `JWT_SECRET`: Secret key for JWT token generation (auto-generated if not provided)
|
||||
|
||||
> [!TIP]
|
||||
> For security, Gitea Mirror will automatically generate a secure random JWT secret on first run if one isn't provided or if the default value is used. This generated secret is stored in the data directory for persistence across container restarts.
|
||||
|
||||
#### Manual Installation
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ services:
|
||||
networks:
|
||||
- gitea-network
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/api/healthz"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -75,7 +75,7 @@ services:
|
||||
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
||||
- DELAY=${DELAY:-3600}
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/api/health"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
@@ -43,7 +43,7 @@ services:
|
||||
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
||||
- DELAY=${DELAY:-3600}
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/api/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
@@ -5,12 +5,34 @@ set -e
|
||||
# Ensure data directory exists
|
||||
mkdir -p /app/data
|
||||
|
||||
# If bun is available, run setup (for dev images)
|
||||
if command -v bun >/dev/null 2>&1; then
|
||||
echo "Running bun setup (if needed)..."
|
||||
bun run setup || true
|
||||
# Generate a secure JWT secret if one isn't provided or is using the default value
|
||||
JWT_SECRET_FILE="/app/data/.jwt_secret"
|
||||
if [ "$JWT_SECRET" = "your-secret-key-change-this-in-production" ] || [ -z "$JWT_SECRET" ]; then
|
||||
# Check if we have a previously generated secret
|
||||
if [ -f "$JWT_SECRET_FILE" ]; then
|
||||
echo "Using previously generated JWT secret"
|
||||
export JWT_SECRET=$(cat "$JWT_SECRET_FILE")
|
||||
else
|
||||
echo "Generating a secure random JWT secret"
|
||||
# Try to generate a secure random string using OpenSSL
|
||||
if command -v openssl >/dev/null 2>&1; then
|
||||
GENERATED_SECRET=$(openssl rand -hex 32)
|
||||
else
|
||||
# Fallback to using /dev/urandom if openssl is not available
|
||||
echo "OpenSSL not found, using fallback method for random generation"
|
||||
GENERATED_SECRET=$(head -c 32 /dev/urandom | sha256sum | cut -d' ' -f1)
|
||||
fi
|
||||
export JWT_SECRET="$GENERATED_SECRET"
|
||||
# Save the secret to a file for persistence across container restarts
|
||||
echo "$GENERATED_SECRET" > "$JWT_SECRET_FILE"
|
||||
chmod 600 "$JWT_SECRET_FILE"
|
||||
fi
|
||||
echo "JWT_SECRET has been set to a secure random value"
|
||||
fi
|
||||
|
||||
# Skip dependency installation entirely for pre-built images
|
||||
# Dependencies are already installed during the Docker build process
|
||||
|
||||
# Initialize the database if it doesn't exist
|
||||
if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
echo "Initializing database..."
|
||||
@@ -18,6 +40,8 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
bun dist/scripts/init-db.js
|
||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||
bun dist/scripts/manage-db.js init
|
||||
elif [ -f "scripts/manage-db.ts" ]; then
|
||||
bun scripts/manage-db.ts init
|
||||
else
|
||||
echo "Warning: Could not find database initialization scripts in dist/scripts."
|
||||
echo "Creating and initializing database manually..."
|
||||
@@ -111,9 +135,28 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- New fields for job resilience
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
batch_id TEXT,
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array as text
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
@@ -136,10 +179,29 @@ else
|
||||
bun dist/scripts/fix-db-issues.js
|
||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||
bun dist/scripts/manage-db.js fix
|
||||
elif [ -f "scripts/manage-db.ts" ]; then
|
||||
bun scripts/manage-db.ts fix
|
||||
fi
|
||||
|
||||
# Since the application is not used by anyone yet, we've removed the schema updates and migrations
|
||||
echo "Database already exists, no migrations needed."
|
||||
# Run database migrations
|
||||
echo "Running database migrations..."
|
||||
|
||||
# Update mirror_jobs table with new columns for resilience
|
||||
if [ -f "dist/scripts/update-mirror-jobs-table.js" ]; then
|
||||
echo "Updating mirror_jobs table..."
|
||||
bun dist/scripts/update-mirror-jobs-table.js
|
||||
elif [ -f "scripts/update-mirror-jobs-table.ts" ]; then
|
||||
echo "Updating mirror_jobs table using TypeScript script..."
|
||||
bun scripts/update-mirror-jobs-table.ts
|
||||
else
|
||||
echo "Warning: Could not find mirror_jobs table update script."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Extract version from package.json and set as environment variable
|
||||
if [ -f "package.json" ]; then
|
||||
export npm_package_version=$(grep -o '"version": *"[^"]*"' package.json | cut -d'"' -f4)
|
||||
echo "Setting application version: $npm_package_version"
|
||||
fi
|
||||
|
||||
# Start the application
|
||||
|
||||
127
docs/testing.md
Normal file
127
docs/testing.md
Normal file
@@ -0,0 +1,127 @@
|
||||
# Testing in Gitea Mirror
|
||||
|
||||
This document provides guidance on testing in the Gitea Mirror project.
|
||||
|
||||
## Current Status
|
||||
|
||||
The project now uses Bun's built-in test runner, which is Jest-compatible and provides a fast, reliable testing experience. We've migrated away from Vitest due to compatibility issues with Bun.
|
||||
|
||||
## Running Tests
|
||||
|
||||
To run tests, use the following commands:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
bun test
|
||||
|
||||
# Run tests in watch mode (automatically re-run when files change)
|
||||
bun test --watch
|
||||
|
||||
# Run tests with coverage reporting
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
## Test File Naming Conventions
|
||||
|
||||
Bun's test runner automatically discovers test files that match the following patterns:
|
||||
|
||||
- `*.test.{js|jsx|ts|tsx}`
|
||||
- `*_test.{js|jsx|ts|tsx}`
|
||||
- `*.spec.{js|jsx|ts|tsx}`
|
||||
- `*_spec.{js|jsx|ts|tsx}`
|
||||
|
||||
## Writing Tests
|
||||
|
||||
The project uses Bun's test runner with a Jest-compatible API. Here's an example test:
|
||||
|
||||
```typescript
|
||||
// example.test.ts
|
||||
import { describe, test, expect } from "bun:test";
|
||||
|
||||
describe("Example Test", () => {
|
||||
test("should pass", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Testing React Components
|
||||
|
||||
For testing React components, we use React Testing Library:
|
||||
|
||||
```typescript
|
||||
// component.test.tsx
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import MyComponent from "../components/MyComponent";
|
||||
|
||||
describe("MyComponent", () => {
|
||||
test("renders correctly", () => {
|
||||
render(<MyComponent />);
|
||||
expect(screen.getByText("Hello World")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Test Setup
|
||||
|
||||
The test setup is defined in `src/tests/setup.bun.ts` and includes:
|
||||
|
||||
- Automatic cleanup after each test
|
||||
- Setup for any global test environment needs
|
||||
|
||||
## Mocking
|
||||
|
||||
Bun's test runner provides built-in mocking capabilities:
|
||||
|
||||
```typescript
|
||||
import { test, expect, mock } from "bun:test";
|
||||
|
||||
// Create a mock function
|
||||
const mockFn = mock(() => "mocked value");
|
||||
|
||||
test("mock function", () => {
|
||||
const result = mockFn();
|
||||
expect(result).toBe("mocked value");
|
||||
expect(mockFn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// Mock a module
|
||||
mock.module("./some-module", () => {
|
||||
return {
|
||||
someFunction: () => "mocked module function"
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
## CI Integration
|
||||
|
||||
The CI workflow has been updated to use Bun's test runner. Tests are automatically run as part of the CI pipeline.
|
||||
|
||||
## Test Coverage
|
||||
|
||||
To generate test coverage reports, run:
|
||||
|
||||
```bash
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
This will generate a coverage report in the `coverage` directory.
|
||||
|
||||
## Types of Tests
|
||||
|
||||
The project includes several types of tests:
|
||||
|
||||
1. **Unit Tests**: Testing individual functions and utilities
|
||||
2. **API Tests**: Testing API endpoints
|
||||
3. **Component Tests**: Testing React components
|
||||
4. **Integration Tests**: Testing how components work together
|
||||
|
||||
## Future Improvements
|
||||
|
||||
When expanding the test suite, consider:
|
||||
|
||||
1. Adding more comprehensive API endpoint tests
|
||||
2. Increasing component test coverage
|
||||
3. Setting up end-to-end tests with a tool like Playwright
|
||||
4. Adding performance tests for critical paths
|
||||
14
package.json
14
package.json
@@ -1,27 +1,29 @@
|
||||
{
|
||||
"name": "gitea-mirror",
|
||||
"type": "module",
|
||||
"version": "2.1.0",
|
||||
"version": "2.5.4",
|
||||
"engines": {
|
||||
"bun": ">=1.2.9"
|
||||
},
|
||||
"scripts": {
|
||||
"setup": "bun install && bun run manage-db init",
|
||||
"setup": "bun install && bun run manage-db init && bun run update-db",
|
||||
"dev": "bunx --bun astro dev",
|
||||
"dev:clean": "bun run cleanup-db && bun run manage-db init && bunx --bun astro dev",
|
||||
"dev:clean": "bun run cleanup-db && bun run manage-db init && bun run update-db && bunx --bun astro dev",
|
||||
"build": "bunx --bun astro build",
|
||||
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
||||
"manage-db": "bun scripts/manage-db.ts",
|
||||
"init-db": "bun scripts/manage-db.ts init",
|
||||
"update-db": "bun scripts/update-mirror-jobs-table.ts",
|
||||
"check-db": "bun scripts/manage-db.ts check",
|
||||
"fix-db": "bun scripts/manage-db.ts fix",
|
||||
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||
"cleanup-events": "bun scripts/cleanup-events.ts",
|
||||
"preview": "bunx --bun astro preview",
|
||||
"start": "bun dist/server/entry.mjs",
|
||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun dist/server/entry.mjs",
|
||||
"test": "bunx --bun vitest run",
|
||||
"test:watch": "bunx --bun vitest",
|
||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun run update-db && bun dist/server/entry.mjs",
|
||||
"test": "bun test",
|
||||
"test:watch": "bun test --watch",
|
||||
"test:coverage": "bun test --coverage",
|
||||
"astro": "bunx --bun astro"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@@ -18,17 +18,18 @@ Run **Gitea Mirror** in an isolated LXC container, either:
|
||||
### One-command install
|
||||
|
||||
```bash
|
||||
# optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh)"
|
||||
# Community-maintained script for Proxmox VE by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
# at [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED)
|
||||
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh)"
|
||||
```
|
||||
|
||||
What it does:
|
||||
|
||||
* Creates **privileged** CT `$CTID` with nesting enabled
|
||||
* Installs curl / git / Bun (official installer)
|
||||
* Uses the community-maintained script from ProxmoxVED
|
||||
* Installs dependencies and Bun runtime
|
||||
* Clones & builds `arunavo4/gitea-mirror`
|
||||
* Writes a root-run systemd service and starts it
|
||||
* Prints the container IP + random `JWT_SECRET`
|
||||
* Creates a systemd service and starts it
|
||||
* Sets up a random `JWT_SECRET` for security
|
||||
|
||||
Browse to:
|
||||
|
||||
|
||||
@@ -107,9 +107,11 @@ bun scripts/make-events-old.ts
|
||||
|
||||
### LXC Container Deployment
|
||||
|
||||
Two scripts are provided for deploying Gitea Mirror in LXC containers:
|
||||
Two deployment options are available for LXC containers:
|
||||
|
||||
1. **gitea-mirror-lxc-proxmox.sh**: For online deployment on a Proxmox VE host
|
||||
1. **Proxmox VE (online)**: Using the community-maintained script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
- Author: Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
- Available at: [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED/blob/main/install/gitea-mirror-install.sh)
|
||||
- Pulls everything from GitHub
|
||||
- Creates a privileged container with the application
|
||||
- Sets up systemd service
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# gitea-mirror-lxc-proxmox.sh
|
||||
# Fully online installer for a Proxmox LXC guest running Gitea Mirror + Bun.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ────── adjustable defaults ──────────────────────────────────────────────
|
||||
CTID=${CTID:-106} # container ID
|
||||
HOSTNAME=${HOSTNAME:-gitea-mirror}
|
||||
STORAGE=${STORAGE:-local-lvm} # where rootfs lives
|
||||
DISK_SIZE=${DISK_SIZE:-8G}
|
||||
CORES=${CORES:-2}
|
||||
MEMORY=${MEMORY:-2048} # MiB
|
||||
BRIDGE=${BRIDGE:-vmbr0}
|
||||
IP_CONF=${IP_CONF:-dhcp} # or "192.168.1.240/24,gw=192.168.1.1"
|
||||
|
||||
PORT=4321
|
||||
JWT_SECRET=$(openssl rand -hex 32)
|
||||
|
||||
REPO="https://github.com/arunavo4/gitea-mirror.git"
|
||||
# ─────────────────────────────────────────────────────────────────────────
|
||||
|
||||
TEMPLATE='ubuntu-22.04-standard_22.04-1_amd64.tar.zst'
|
||||
TEMPLATE_PATH="/var/lib/vz/template/cache/${TEMPLATE}"
|
||||
|
||||
echo "▶️ Ensuring template exists…"
|
||||
if [[ ! -f $TEMPLATE_PATH ]]; then
|
||||
pveam update >/dev/null
|
||||
pveam download "$STORAGE" "$TEMPLATE"
|
||||
fi
|
||||
|
||||
echo "▶️ Creating container $CTID (if missing)…"
|
||||
if ! pct status "$CTID" &>/dev/null; then
|
||||
pct create "$CTID" "$TEMPLATE_PATH" \
|
||||
--rootfs "$STORAGE:$DISK_SIZE" \
|
||||
--hostname "$HOSTNAME" \
|
||||
--cores "$CORES" --memory "$MEMORY" \
|
||||
--net0 "name=eth0,bridge=$BRIDGE,ip=$IP_CONF" \
|
||||
--features nesting=1 \
|
||||
--unprivileged 0
|
||||
fi
|
||||
|
||||
pct start "$CTID"
|
||||
|
||||
echo "▶️ Installing base packages inside CT $CTID…"
|
||||
pct exec "$CTID" -- bash -c 'apt update && apt install -y curl git build-essential openssl sqlite3 unzip'
|
||||
|
||||
echo "▶️ Installing Bun runtime…"
|
||||
pct exec "$CTID" -- bash -c '
|
||||
export BUN_INSTALL=/opt/bun
|
||||
curl -fsSL https://bun.sh/install | bash -s -- --yes
|
||||
ln -sf /opt/bun/bin/bun /usr/local/bin/bun
|
||||
ln -sf /opt/bun/bin/bun /usr/local/bin/bunx
|
||||
bun --version
|
||||
'
|
||||
|
||||
echo "▶️ Cloning & building Gitea Mirror…"
|
||||
pct exec "$CTID" -- bash -c "
|
||||
git clone --depth=1 '$REPO' /opt/gitea-mirror || (cd /opt/gitea-mirror && git pull)
|
||||
cd /opt/gitea-mirror
|
||||
bun install
|
||||
bun run build
|
||||
bun run manage-db init
|
||||
"
|
||||
|
||||
echo "▶️ Creating systemd service…"
|
||||
pct exec "$CTID" -- bash -c "
|
||||
cat >/etc/systemd/system/gitea-mirror.service <<SERVICE
|
||||
[Unit]
|
||||
Description=Gitea Mirror
|
||||
After=network.target
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=/opt/gitea-mirror
|
||||
ExecStart=/usr/local/bin/bun dist/server/entry.mjs
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
Environment=NODE_ENV=production
|
||||
Environment=HOST=0.0.0.0
|
||||
Environment=PORT=$PORT
|
||||
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||
Environment=JWT_SECRET=$JWT_SECRET
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
SERVICE
|
||||
systemctl daemon-reload
|
||||
systemctl enable gitea-mirror
|
||||
systemctl restart gitea-mirror
|
||||
"
|
||||
|
||||
echo -e "\n🔍 Service status:"
|
||||
pct exec "$CTID" -- systemctl status gitea-mirror --no-pager | head -n15
|
||||
|
||||
GUEST_IP=$(pct exec "$CTID" -- hostname -I | awk '{print $1}')
|
||||
echo -e "\n🌐 Browse to: http://$GUEST_IP:$PORT\n"
|
||||
echo "🗝️ JWT_SECRET = $JWT_SECRET"
|
||||
echo -e "\n✅ Done – Gitea Mirror is running in CT $CTID."
|
||||
@@ -145,9 +145,31 @@ async function ensureTablesExist() {
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- New fields for job resilience
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
batch_id TEXT,
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array as text
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
|
||||
// Create indexes for better performance
|
||||
db.exec(`
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||
`);
|
||||
break;
|
||||
case "events":
|
||||
db.exec(`
|
||||
|
||||
133
scripts/update-mirror-jobs-table.ts
Normal file
133
scripts/update-mirror-jobs-table.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to update the mirror_jobs table with new columns for resilience
|
||||
*/
|
||||
|
||||
import { Database } from "bun:sqlite";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
// Define the database paths
|
||||
const dataDir = path.join(process.cwd(), "data");
|
||||
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||
|
||||
// Ensure data directory exists
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
fs.mkdirSync(dataDir, { recursive: true });
|
||||
console.log(`Created data directory at ${dataDir}`);
|
||||
}
|
||||
|
||||
// Check if database exists
|
||||
if (!fs.existsSync(dbPath)) {
|
||||
console.error(`Database file not found at ${dbPath}`);
|
||||
console.error("Please run 'bun run init-db' first to create the database.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Connect to the database
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Enable foreign keys
|
||||
db.exec("PRAGMA foreign_keys = ON;");
|
||||
|
||||
// Function to check if a column exists in a table
|
||||
function columnExists(tableName: string, columnName: string): boolean {
|
||||
const result = db.query(
|
||||
`PRAGMA table_info(${tableName})`
|
||||
).all() as { name: string }[];
|
||||
|
||||
return result.some(column => column.name === columnName);
|
||||
}
|
||||
|
||||
// Main function to update the mirror_jobs table
|
||||
async function updateMirrorJobsTable() {
|
||||
console.log("Checking mirror_jobs table for missing columns...");
|
||||
|
||||
// Start a transaction
|
||||
db.exec("BEGIN TRANSACTION;");
|
||||
|
||||
try {
|
||||
// Check and add each new column if it doesn't exist
|
||||
const columnsToAdd = [
|
||||
{ name: "job_type", definition: "TEXT NOT NULL DEFAULT 'mirror'" },
|
||||
{ name: "batch_id", definition: "TEXT" },
|
||||
{ name: "total_items", definition: "INTEGER" },
|
||||
{ name: "completed_items", definition: "INTEGER DEFAULT 0" },
|
||||
{ name: "item_ids", definition: "TEXT" }, // JSON array as text
|
||||
{ name: "completed_item_ids", definition: "TEXT DEFAULT '[]'" }, // JSON array as text
|
||||
{ name: "in_progress", definition: "INTEGER NOT NULL DEFAULT 0" }, // Boolean as integer
|
||||
{ name: "started_at", definition: "TIMESTAMP" },
|
||||
{ name: "completed_at", definition: "TIMESTAMP" },
|
||||
{ name: "last_checkpoint", definition: "TIMESTAMP" }
|
||||
];
|
||||
|
||||
let columnsAdded = 0;
|
||||
|
||||
for (const column of columnsToAdd) {
|
||||
if (!columnExists("mirror_jobs", column.name)) {
|
||||
console.log(`Adding column '${column.name}' to mirror_jobs table...`);
|
||||
db.exec(`ALTER TABLE mirror_jobs ADD COLUMN ${column.name} ${column.definition};`);
|
||||
columnsAdded++;
|
||||
}
|
||||
}
|
||||
|
||||
// Commit the transaction
|
||||
db.exec("COMMIT;");
|
||||
|
||||
if (columnsAdded > 0) {
|
||||
console.log(`✅ Added ${columnsAdded} new columns to mirror_jobs table.`);
|
||||
} else {
|
||||
console.log("✅ All required columns already exist in mirror_jobs table.");
|
||||
}
|
||||
|
||||
// Create indexes for better performance
|
||||
console.log("Creating indexes for mirror_jobs table...");
|
||||
|
||||
// Only create indexes if they don't exist
|
||||
const indexesResult = db.query(
|
||||
`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='mirror_jobs'`
|
||||
).all() as { name: string }[];
|
||||
|
||||
const existingIndexes = indexesResult.map(idx => idx.name);
|
||||
|
||||
const indexesToCreate = [
|
||||
{ name: "idx_mirror_jobs_user_id", columns: "user_id" },
|
||||
{ name: "idx_mirror_jobs_batch_id", columns: "batch_id" },
|
||||
{ name: "idx_mirror_jobs_in_progress", columns: "in_progress" },
|
||||
{ name: "idx_mirror_jobs_job_type", columns: "job_type" },
|
||||
{ name: "idx_mirror_jobs_timestamp", columns: "timestamp" }
|
||||
];
|
||||
|
||||
let indexesCreated = 0;
|
||||
|
||||
for (const index of indexesToCreate) {
|
||||
if (!existingIndexes.includes(index.name)) {
|
||||
console.log(`Creating index '${index.name}'...`);
|
||||
db.exec(`CREATE INDEX ${index.name} ON mirror_jobs(${index.columns});`);
|
||||
indexesCreated++;
|
||||
}
|
||||
}
|
||||
|
||||
if (indexesCreated > 0) {
|
||||
console.log(`✅ Created ${indexesCreated} new indexes for mirror_jobs table.`);
|
||||
} else {
|
||||
console.log("✅ All required indexes already exist for mirror_jobs table.");
|
||||
}
|
||||
|
||||
console.log("Mirror jobs table update completed successfully.");
|
||||
} catch (error) {
|
||||
// Rollback the transaction in case of error
|
||||
db.exec("ROLLBACK;");
|
||||
console.error("❌ Error updating mirror_jobs table:", error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
// Close the database connection
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Run the update function
|
||||
updateMirrorJobsTable().catch(error => {
|
||||
console.error("Unhandled error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,16 +1,18 @@
|
||||
import { useMemo, useRef, useState, useEffect } from "react";
|
||||
import { useVirtualizer } from "@tanstack/react-virtual";
|
||||
import type { MirrorJob } from "@/lib/db/schema";
|
||||
import Fuse from "fuse.js";
|
||||
import { Button } from "../ui/button";
|
||||
import { RefreshCw } from "lucide-react";
|
||||
import { Card } from "../ui/card";
|
||||
import { formatDate, getStatusColor } from "@/lib/utils";
|
||||
import { Skeleton } from "../ui/skeleton";
|
||||
import type { FilterParams } from "@/types/filter";
|
||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useVirtualizer } from '@tanstack/react-virtual';
|
||||
import type { MirrorJob } from '@/lib/db/schema';
|
||||
import Fuse from 'fuse.js';
|
||||
import { Button } from '../ui/button';
|
||||
import { RefreshCw } from 'lucide-react';
|
||||
import { Card } from '../ui/card';
|
||||
import { formatDate, getStatusColor } from '@/lib/utils';
|
||||
import { Skeleton } from '../ui/skeleton';
|
||||
import type { FilterParams } from '@/types/filter';
|
||||
|
||||
type MirrorJobWithKey = MirrorJob & { _rowKey: string };
|
||||
|
||||
interface ActivityListProps {
|
||||
activities: MirrorJob[];
|
||||
activities: MirrorJobWithKey[];
|
||||
isLoading: boolean;
|
||||
filter: FilterParams;
|
||||
setFilter: (filter: FilterParams) => void;
|
||||
@@ -22,38 +24,44 @@ export default function ActivityList({
|
||||
filter,
|
||||
setFilter,
|
||||
}: ActivityListProps) {
|
||||
const [expandedItems, setExpandedItems] = useState<Set<string>>(new Set());
|
||||
const [expandedItems, setExpandedItems] = useState<Set<string>>(
|
||||
() => new Set(),
|
||||
);
|
||||
|
||||
const parentRef = useRef<HTMLDivElement>(null);
|
||||
const rowRefs = useRef<Map<string, HTMLDivElement | null>>(new Map());
|
||||
// We keep the ref only for possible future scroll-to-row logic.
|
||||
const rowRefs = useRef<Map<string, HTMLDivElement | null>>(new Map()); // eslint-disable-line @typescript-eslint/no-unused-vars
|
||||
|
||||
const filteredActivities = useMemo(() => {
|
||||
let result = activities;
|
||||
|
||||
if (filter.status) {
|
||||
result = result.filter((activity) => activity.status === filter.status);
|
||||
result = result.filter((a) => a.status === filter.status);
|
||||
}
|
||||
|
||||
if (filter.type) {
|
||||
if (filter.type === 'repository') {
|
||||
result = result.filter((activity) => !!activity.repositoryId);
|
||||
} else if (filter.type === 'organization') {
|
||||
result = result.filter((activity) => !!activity.organizationId);
|
||||
}
|
||||
result =
|
||||
filter.type === 'repository'
|
||||
? result.filter((a) => !!a.repositoryId)
|
||||
: filter.type === 'organization'
|
||||
? result.filter((a) => !!a.organizationId)
|
||||
: result;
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
result = result.filter((activity) =>
|
||||
activity.repositoryName === filter.name ||
|
||||
activity.organizationName === filter.name
|
||||
result = result.filter(
|
||||
(a) =>
|
||||
a.repositoryName === filter.name ||
|
||||
a.organizationName === filter.name,
|
||||
);
|
||||
}
|
||||
|
||||
if (filter.searchTerm) {
|
||||
const fuse = new Fuse(result, {
|
||||
keys: ["message", "details", "organizationName", "repositoryName"],
|
||||
keys: ['message', 'details', 'organizationName', 'repositoryName'],
|
||||
threshold: 0.3,
|
||||
});
|
||||
result = fuse.search(filter.searchTerm).map((res) => res.item);
|
||||
result = fuse.search(filter.searchTerm).map((r) => r.item);
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -62,10 +70,8 @@ export default function ActivityList({
|
||||
const virtualizer = useVirtualizer({
|
||||
count: filteredActivities.length,
|
||||
getScrollElement: () => parentRef.current,
|
||||
estimateSize: (index) => {
|
||||
const activity = filteredActivities[index];
|
||||
return expandedItems.has(activity.id || "") ? 217 : 120;
|
||||
},
|
||||
estimateSize: (idx) =>
|
||||
expandedItems.has(filteredActivities[idx]._rowKey) ? 217 : 120,
|
||||
overscan: 5,
|
||||
measureElement: (el) => el.getBoundingClientRect().height + 8,
|
||||
});
|
||||
@@ -74,118 +80,132 @@ export default function ActivityList({
|
||||
virtualizer.measure();
|
||||
}, [expandedItems, virtualizer]);
|
||||
|
||||
return isLoading ? (
|
||||
<div className="flex flex-col gap-y-4">
|
||||
{Array.from({ length: 5 }, (_, index) => (
|
||||
<Skeleton key={index} className="h-28 w-full rounded-md" />
|
||||
))}
|
||||
</div>
|
||||
) : filteredActivities.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center py-12 text-center">
|
||||
<RefreshCw className="h-12 w-12 text-muted-foreground mb-4" />
|
||||
<h3 className="text-lg font-medium">No activities found</h3>
|
||||
<p className="text-sm text-muted-foreground mt-1 mb-4 max-w-md">
|
||||
{filter.searchTerm || filter.status || filter.type || filter.name
|
||||
? "Try adjusting your search or filter criteria."
|
||||
: "No mirroring activities have been recorded yet."}
|
||||
</p>
|
||||
{filter.searchTerm || filter.status || filter.type || filter.name ? (
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setFilter({ searchTerm: "", status: "", type: "", name: "" });
|
||||
}}
|
||||
>
|
||||
Clear Filters
|
||||
</Button>
|
||||
) : (
|
||||
<Button>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Refresh
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
/* ------------------------------ render ------------------------------ */
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex flex-col gap-y-4'>
|
||||
{Array.from({ length: 5 }, (_, i) => (
|
||||
<Skeleton key={i} className='h-28 w-full rounded-md' />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
const hasFilter =
|
||||
filter.searchTerm || filter.status || filter.type || filter.name;
|
||||
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center py-12 text-center'>
|
||||
<RefreshCw className='mb-4 h-12 w-12 text-muted-foreground' />
|
||||
<h3 className='text-lg font-medium'>No activities found</h3>
|
||||
<p className='mt-1 mb-4 max-w-md text-sm text-muted-foreground'>
|
||||
{hasFilter
|
||||
? 'Try adjusting your search or filter criteria.'
|
||||
: 'No mirroring activities have been recorded yet.'}
|
||||
</p>
|
||||
{hasFilter ? (
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={() =>
|
||||
setFilter({ searchTerm: '', status: '', type: '', name: '' })
|
||||
}
|
||||
>
|
||||
Clear Filters
|
||||
</Button>
|
||||
) : (
|
||||
<Button>
|
||||
<RefreshCw className='mr-2 h-4 w-4' />
|
||||
Refresh
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Card
|
||||
className="border rounded-md max-h-[calc(100dvh-191px)] overflow-y-auto relative"
|
||||
ref={parentRef}
|
||||
className='relative max-h-[calc(100dvh-191px)] overflow-y-auto rounded-md border'
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
height: virtualizer.getTotalSize(),
|
||||
position: "relative",
|
||||
width: "100%",
|
||||
position: 'relative',
|
||||
width: '100%',
|
||||
}}
|
||||
>
|
||||
{virtualizer.getVirtualItems().map((virtualRow) => {
|
||||
const activity = filteredActivities[virtualRow.index];
|
||||
const isExpanded = expandedItems.has(activity.id || "");
|
||||
const key = activity.id || String(virtualRow.index);
|
||||
{virtualizer.getVirtualItems().map((vRow) => {
|
||||
const activity = filteredActivities[vRow.index];
|
||||
const isExpanded = expandedItems.has(activity._rowKey);
|
||||
|
||||
return (
|
||||
<div
|
||||
key={key}
|
||||
key={activity._rowKey}
|
||||
ref={(node) => {
|
||||
if (node) {
|
||||
rowRefs.current.set(key, node);
|
||||
virtualizer.measureElement(node);
|
||||
}
|
||||
rowRefs.current.set(activity._rowKey, node);
|
||||
if (node) virtualizer.measureElement(node);
|
||||
}}
|
||||
style={{
|
||||
position: "absolute",
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: "100%",
|
||||
transform: `translateY(${virtualRow.start}px)`,
|
||||
paddingBottom: "8px",
|
||||
width: '100%',
|
||||
transform: `translateY(${vRow.start}px)`,
|
||||
paddingBottom: '8px',
|
||||
}}
|
||||
className="border-b px-4 pt-4"
|
||||
className='border-b px-4 pt-4'
|
||||
>
|
||||
<div className="flex items-start gap-4">
|
||||
<div className="relative mt-2">
|
||||
<div className='flex items-start gap-4'>
|
||||
<div className='relative mt-2'>
|
||||
<div
|
||||
className={`h-2 w-2 rounded-full ${getStatusColor(
|
||||
activity.status
|
||||
activity.status,
|
||||
)}`}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<div className="flex flex-col sm:flex-row sm:items-center sm:justify-between mb-1">
|
||||
<p className="font-medium">{activity.message}</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
|
||||
<div className='flex-1'>
|
||||
<div className='mb-1 flex flex-col sm:flex-row sm:items-center sm:justify-between'>
|
||||
<p className='font-medium'>{activity.message}</p>
|
||||
<p className='text-sm text-muted-foreground'>
|
||||
{formatDate(activity.timestamp)}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{activity.repositoryName && (
|
||||
<p className="text-sm text-muted-foreground mb-2">
|
||||
<p className='mb-2 text-sm text-muted-foreground'>
|
||||
Repository: {activity.repositoryName}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{activity.organizationName && (
|
||||
<p className="text-sm text-muted-foreground mb-2">
|
||||
<p className='mb-2 text-sm text-muted-foreground'>
|
||||
Organization: {activity.organizationName}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{activity.details && (
|
||||
<div className="mt-2">
|
||||
<div className='mt-2'>
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => {
|
||||
const newSet = new Set(expandedItems);
|
||||
const id = activity.id || "";
|
||||
newSet.has(id) ? newSet.delete(id) : newSet.add(id);
|
||||
setExpandedItems(newSet);
|
||||
}}
|
||||
className="text-xs h-7 px-2"
|
||||
variant='ghost'
|
||||
className='h-7 px-2 text-xs'
|
||||
onClick={() =>
|
||||
setExpandedItems((prev) => {
|
||||
const next = new Set(prev);
|
||||
next.has(activity._rowKey)
|
||||
? next.delete(activity._rowKey)
|
||||
: next.add(activity._rowKey);
|
||||
return next;
|
||||
})
|
||||
}
|
||||
>
|
||||
{isExpanded ? "Hide Details" : "Show Details"}
|
||||
{isExpanded ? 'Hide Details' : 'Show Details'}
|
||||
</Button>
|
||||
|
||||
{isExpanded && (
|
||||
<pre className="mt-2 p-3 bg-muted rounded-md text-xs overflow-auto whitespace-pre-wrap min-h-[100px]">
|
||||
<pre className='mt-2 min-h-[100px] whitespace-pre-wrap overflow-auto rounded-md bg-muted p-3 text-xs'>
|
||||
{activity.details}
|
||||
</pre>
|
||||
)}
|
||||
|
||||
@@ -1,76 +1,97 @@
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Search, Download, RefreshCw, ChevronDown } from "lucide-react";
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { ChevronDown, Download, RefreshCw, Search } from 'lucide-react';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "../ui/dropdown-menu";
|
||||
import { apiRequest, formatDate } from "@/lib/utils";
|
||||
import { useAuth } from "@/hooks/useAuth";
|
||||
import type { MirrorJob } from "@/lib/db/schema";
|
||||
import type { ActivityApiResponse } from "@/types/activities";
|
||||
} from '../ui/dropdown-menu';
|
||||
import { apiRequest, formatDate } from '@/lib/utils';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import type { MirrorJob } from '@/lib/db/schema';
|
||||
import type { ActivityApiResponse } from '@/types/activities';
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "../ui/select";
|
||||
import { repoStatusEnum, type RepoStatus } from "@/types/Repository";
|
||||
import ActivityList from "./ActivityList";
|
||||
import { ActivityNameCombobox } from "./ActivityNameCombobox";
|
||||
import { useSSE } from "@/hooks/useSEE";
|
||||
import { useFilterParams } from "@/hooks/useFilterParams";
|
||||
import { toast } from "sonner";
|
||||
} from '../ui/select';
|
||||
import { repoStatusEnum, type RepoStatus } from '@/types/Repository';
|
||||
import ActivityList from './ActivityList';
|
||||
import { ActivityNameCombobox } from './ActivityNameCombobox';
|
||||
import { useSSE } from '@/hooks/useSEE';
|
||||
import { useFilterParams } from '@/hooks/useFilterParams';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
type MirrorJobWithKey = MirrorJob & { _rowKey: string };
|
||||
|
||||
function genKey(job: MirrorJob): string {
|
||||
return `${
|
||||
job.id ?? (typeof crypto !== 'undefined'
|
||||
? crypto.randomUUID()
|
||||
: Math.random().toString(36).slice(2))
|
||||
}-${job.timestamp}`;
|
||||
}
|
||||
|
||||
export function ActivityLog() {
|
||||
const { user } = useAuth();
|
||||
const [activities, setActivities] = useState<MirrorJob[]>([]);
|
||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||
|
||||
const [activities, setActivities] = useState<MirrorJobWithKey[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const { filter, setFilter } = useFilterParams({
|
||||
searchTerm: "",
|
||||
status: "",
|
||||
type: "",
|
||||
name: "",
|
||||
searchTerm: '',
|
||||
status: '',
|
||||
type: '',
|
||||
name: '',
|
||||
});
|
||||
|
||||
const handleNewMessage = useCallback((data: MirrorJob) => {
|
||||
setActivities((prevActivities) => [data, ...prevActivities]);
|
||||
/* ----------------------------- SSE hook ----------------------------- */
|
||||
|
||||
console.log("Received new log:", data);
|
||||
const handleNewMessage = useCallback((data: MirrorJob) => {
|
||||
const withKey: MirrorJobWithKey = {
|
||||
...structuredClone(data),
|
||||
_rowKey: genKey(data),
|
||||
};
|
||||
|
||||
setActivities((prev) => [withKey, ...prev]);
|
||||
}, []);
|
||||
|
||||
// Use the SSE hook
|
||||
const { connected } = useSSE({
|
||||
userId: user?.id,
|
||||
onMessage: handleNewMessage,
|
||||
});
|
||||
|
||||
/* ------------------------- initial fetch --------------------------- */
|
||||
|
||||
const fetchActivities = useCallback(async () => {
|
||||
if (!user) return false;
|
||||
|
||||
try {
|
||||
setIsLoading(true);
|
||||
|
||||
const response = await apiRequest<ActivityApiResponse>(
|
||||
const res = await apiRequest<ActivityApiResponse>(
|
||||
`/activities?userId=${user.id}`,
|
||||
{
|
||||
method: "GET",
|
||||
}
|
||||
{ method: 'GET' },
|
||||
);
|
||||
|
||||
if (response.success) {
|
||||
setActivities(response.activities);
|
||||
return true;
|
||||
} else {
|
||||
toast.error(response.message || "Failed to fetch activities.");
|
||||
if (!res.success) {
|
||||
toast.error(res.message ?? 'Failed to fetch activities.');
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
const data: MirrorJobWithKey[] = res.activities.map((a) => ({
|
||||
...structuredClone(a),
|
||||
_rowKey: genKey(a),
|
||||
}));
|
||||
|
||||
setActivities(data);
|
||||
return true;
|
||||
} catch (err) {
|
||||
toast.error(
|
||||
error instanceof Error ? error.message : "Failed to fetch activities."
|
||||
err instanceof Error ? err.message : 'Failed to fetch activities.',
|
||||
);
|
||||
return false;
|
||||
} finally {
|
||||
@@ -82,208 +103,167 @@ export function ActivityLog() {
|
||||
fetchActivities();
|
||||
}, [fetchActivities]);
|
||||
|
||||
const handleRefreshActivities = async () => {
|
||||
const success = await fetchActivities();
|
||||
if (success) {
|
||||
toast.success("Activities refreshed successfully.");
|
||||
}
|
||||
};
|
||||
/* ---------------------- filtering + exporting ---------------------- */
|
||||
|
||||
// Get the currently filtered activities
|
||||
const getFilteredActivities = () => {
|
||||
return activities.filter(activity => {
|
||||
let isIncluded = true;
|
||||
const applyLightFilter = (list: MirrorJobWithKey[]) => {
|
||||
return list.filter((a) => {
|
||||
if (filter.status && a.status !== filter.status) return false;
|
||||
|
||||
if (filter.status) {
|
||||
isIncluded = isIncluded && activity.status === filter.status;
|
||||
if (filter.type === 'repository' && !a.repositoryId) return false;
|
||||
if (filter.type === 'organization' && !a.organizationId) return false;
|
||||
|
||||
if (
|
||||
filter.name &&
|
||||
a.repositoryName !== filter.name &&
|
||||
a.organizationName !== filter.name
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (filter.type) {
|
||||
if (filter.type === 'repository') {
|
||||
isIncluded = isIncluded && !!activity.repositoryId;
|
||||
} else if (filter.type === 'organization') {
|
||||
isIncluded = isIncluded && !!activity.organizationId;
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
isIncluded = isIncluded && (
|
||||
activity.repositoryName === filter.name ||
|
||||
activity.organizationName === filter.name
|
||||
);
|
||||
}
|
||||
|
||||
// Note: We're not applying the search term filter here as that would require
|
||||
// re-implementing the Fuse.js search logic
|
||||
|
||||
return isIncluded;
|
||||
return true;
|
||||
});
|
||||
};
|
||||
|
||||
// Function to export activities as CSV
|
||||
const exportAsCSV = () => {
|
||||
const filteredActivities = getFilteredActivities();
|
||||
const rows = applyLightFilter(activities);
|
||||
if (!rows.length) return toast.error('No activities to export.');
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
toast.error("No activities to export.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create CSV content
|
||||
const headers = ["Timestamp", "Message", "Status", "Repository", "Organization", "Details"];
|
||||
const csvRows = [
|
||||
headers.join(","),
|
||||
...filteredActivities.map(activity => {
|
||||
const formattedDate = formatDate(activity.timestamp);
|
||||
// Escape fields that might contain commas or quotes
|
||||
const escapeCsvField = (field: string | null | undefined) => {
|
||||
if (!field) return '';
|
||||
if (field.includes(',') || field.includes('"') || field.includes('\n')) {
|
||||
return `"${field.replace(/"/g, '""')}"`;
|
||||
}
|
||||
return field;
|
||||
};
|
||||
|
||||
return [
|
||||
formattedDate,
|
||||
escapeCsvField(activity.message),
|
||||
activity.status,
|
||||
escapeCsvField(activity.repositoryName || ''),
|
||||
escapeCsvField(activity.organizationName || ''),
|
||||
escapeCsvField(activity.details || '')
|
||||
].join(',');
|
||||
})
|
||||
const headers = [
|
||||
'Timestamp',
|
||||
'Message',
|
||||
'Status',
|
||||
'Repository',
|
||||
'Organization',
|
||||
'Details',
|
||||
];
|
||||
|
||||
const csvContent = csvRows.join('\n');
|
||||
const escape = (v: string | null | undefined) =>
|
||||
v && /[,\"\n]/.test(v) ? `"${v.replace(/"/g, '""')}"` : v ?? '';
|
||||
|
||||
// Download the CSV file
|
||||
downloadFile(csvContent, 'text/csv;charset=utf-8;', 'activity_log_export.csv');
|
||||
const csv = [
|
||||
headers.join(','),
|
||||
...rows.map((a) =>
|
||||
[
|
||||
formatDate(a.timestamp),
|
||||
escape(a.message),
|
||||
a.status,
|
||||
escape(a.repositoryName),
|
||||
escape(a.organizationName),
|
||||
escape(a.details),
|
||||
].join(','),
|
||||
),
|
||||
].join('\n');
|
||||
|
||||
toast.success("Activity log exported as CSV successfully.");
|
||||
downloadFile(csv, 'text/csv;charset=utf-8;', 'activity_log_export.csv');
|
||||
toast.success('CSV exported.');
|
||||
};
|
||||
|
||||
// Function to export activities as JSON
|
||||
const exportAsJSON = () => {
|
||||
const filteredActivities = getFilteredActivities();
|
||||
const rows = applyLightFilter(activities);
|
||||
if (!rows.length) return toast.error('No activities to export.');
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
toast.error("No activities to export.");
|
||||
return;
|
||||
}
|
||||
const json = JSON.stringify(
|
||||
rows.map((a) => ({
|
||||
...a,
|
||||
formattedTime: formatDate(a.timestamp),
|
||||
})),
|
||||
null,
|
||||
2,
|
||||
);
|
||||
|
||||
// Format the activities for export (removing any sensitive or unnecessary fields if needed)
|
||||
const activitiesForExport = filteredActivities.map(activity => ({
|
||||
id: activity.id,
|
||||
timestamp: activity.timestamp,
|
||||
formattedTime: formatDate(activity.timestamp),
|
||||
message: activity.message,
|
||||
status: activity.status,
|
||||
repositoryId: activity.repositoryId,
|
||||
repositoryName: activity.repositoryName,
|
||||
organizationId: activity.organizationId,
|
||||
organizationName: activity.organizationName,
|
||||
details: activity.details
|
||||
}));
|
||||
|
||||
const jsonContent = JSON.stringify(activitiesForExport, null, 2);
|
||||
|
||||
// Download the JSON file
|
||||
downloadFile(jsonContent, 'application/json', 'activity_log_export.json');
|
||||
|
||||
toast.success("Activity log exported as JSON successfully.");
|
||||
downloadFile(json, 'application/json', 'activity_log_export.json');
|
||||
toast.success('JSON exported.');
|
||||
};
|
||||
|
||||
// Generic function to download a file
|
||||
const downloadFile = (content: string, mimeType: string, filename: string) => {
|
||||
// Add date to filename
|
||||
const date = new Date();
|
||||
const dateStr = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')}`;
|
||||
const filenameWithDate = filename.replace('.', `_${dateStr}.`);
|
||||
|
||||
// Create a download link
|
||||
const blob = new Blob([content], { type: mimeType });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const downloadFile = (
|
||||
content: string,
|
||||
mime: string,
|
||||
filename: string,
|
||||
): void => {
|
||||
const date = new Date().toISOString().slice(0, 10); // yyyy-mm-dd
|
||||
const link = document.createElement('a');
|
||||
|
||||
link.href = url;
|
||||
link.setAttribute('download', filenameWithDate);
|
||||
document.body.appendChild(link);
|
||||
link.href = URL.createObjectURL(new Blob([content], { type: mime }));
|
||||
link.download = filename.replace('.', `_${date}.`);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
};
|
||||
|
||||
/* ------------------------------ UI ------------------------------ */
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-y-8">
|
||||
<div className="flex flex-row items-center gap-4 w-full">
|
||||
<div className="relative flex-1">
|
||||
<Search className="absolute left-2 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
<div className='flex flex-col gap-y-8'>
|
||||
<div className='flex w-full flex-row items-center gap-4'>
|
||||
{/* search input */}
|
||||
<div className='relative flex-1'>
|
||||
<Search className='absolute left-2 top-2.5 h-4 w-4 text-muted-foreground' />
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search activities..."
|
||||
className="pl-8 h-9 w-full rounded-md border border-input bg-background px-3 py-1 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
type='text'
|
||||
placeholder='Search activities...'
|
||||
className='h-9 w-full rounded-md border border-input bg-background px-3 py-1 pl-8 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring'
|
||||
value={filter.searchTerm}
|
||||
onChange={(e) =>
|
||||
setFilter((prev) => ({ ...prev, searchTerm: e.target.value }))
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
searchTerm: e.target.value,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* status select */}
|
||||
<Select
|
||||
value={filter.status || "all"}
|
||||
onValueChange={(value) =>
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
status: value === "all" ? "" : (value as RepoStatus),
|
||||
value={filter.status || 'all'}
|
||||
onValueChange={(v) =>
|
||||
setFilter((p) => ({
|
||||
...p,
|
||||
status: v === 'all' ? '' : (v as RepoStatus),
|
||||
}))
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-[140px] h-9 max-h-9">
|
||||
<SelectValue placeholder="All Status" />
|
||||
<SelectTrigger className='h-9 w-[140px] max-h-9'>
|
||||
<SelectValue placeholder='All Status' />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{["all", ...repoStatusEnum.options].map((status) => (
|
||||
<SelectItem key={status} value={status}>
|
||||
{status === "all"
|
||||
? "All Status"
|
||||
: status.charAt(0).toUpperCase() + status.slice(1)}
|
||||
{['all', ...repoStatusEnum.options].map((s) => (
|
||||
<SelectItem key={s} value={s}>
|
||||
{s === 'all' ? 'All Status' : s[0].toUpperCase() + s.slice(1)}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
{/* Repository/Organization Name Combobox */}
|
||||
{/* repo/org name combobox */}
|
||||
<ActivityNameCombobox
|
||||
activities={activities}
|
||||
value={filter.name || ""}
|
||||
onChange={(name: string) => setFilter((prev) => ({ ...prev, name }))}
|
||||
value={filter.name || ''}
|
||||
onChange={(name) => setFilter((p) => ({ ...p, name }))}
|
||||
/>
|
||||
{/* Filter by type: repository/org/all */}
|
||||
|
||||
{/* type select */}
|
||||
<Select
|
||||
value={filter.type || "all"}
|
||||
onValueChange={(value) =>
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
type: value === "all" ? "" : value,
|
||||
}))
|
||||
value={filter.type || 'all'}
|
||||
onValueChange={(v) =>
|
||||
setFilter((p) => ({ ...p, type: v === 'all' ? '' : v }))
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-[140px] h-9 max-h-9">
|
||||
<SelectValue placeholder="All Types" />
|
||||
<SelectTrigger className='h-9 w-[140px] max-h-9'>
|
||||
<SelectValue placeholder='All Types' />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{['all', 'repository', 'organization'].map((type) => (
|
||||
<SelectItem key={type} value={type}>
|
||||
{type === 'all' ? 'All Types' : type.charAt(0).toUpperCase() + type.slice(1)}
|
||||
{['all', 'repository', 'organization'].map((t) => (
|
||||
<SelectItem key={t} value={t}>
|
||||
{t === 'all' ? 'All Types' : t[0].toUpperCase() + t.slice(1)}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
{/* export dropdown */}
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="outline" className="flex items-center gap-1">
|
||||
<Download className="h-4 w-4 mr-1" />
|
||||
<Button variant='outline' className='flex items-center gap-1'>
|
||||
<Download className='mr-1 h-4 w-4' />
|
||||
Export
|
||||
<ChevronDown className="h-4 w-4 ml-1" />
|
||||
<ChevronDown className='ml-1 h-4 w-4' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent>
|
||||
@@ -295,19 +275,21 @@ export function ActivityLog() {
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
<Button onClick={handleRefreshActivities}>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
|
||||
{/* refresh */}
|
||||
<Button onClick={() => fetchActivities()}>
|
||||
<RefreshCw className='mr-2 h-4 w-4' />
|
||||
Refresh
|
||||
</Button>
|
||||
</div>
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<ActivityList
|
||||
activities={activities}
|
||||
isLoading={isLoading || !connected}
|
||||
filter={filter}
|
||||
setFilter={setFilter}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* activity list */}
|
||||
<ActivityList
|
||||
activities={applyLightFilter(activities)}
|
||||
isLoading={isLoading || !connected}
|
||||
filter={filter}
|
||||
setFilter={setFilter}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useEffect, useState } from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ExternalLink } from "lucide-react";
|
||||
import { links } from "@/data/Sidebar";
|
||||
import { VersionInfo } from "./VersionInfo";
|
||||
|
||||
interface SidebarProps {
|
||||
className?: string;
|
||||
@@ -19,7 +20,7 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
|
||||
return (
|
||||
<aside className={cn("w-64 border-r bg-background", className)}>
|
||||
<div className="flex flex-col h-full py-4">
|
||||
<div className="flex flex-col h-full pt-4">
|
||||
<nav className="flex flex-col gap-y-1 pl-2 pr-3">
|
||||
{links.map((link, index) => {
|
||||
const isActive = currentPath === link.href;
|
||||
@@ -59,6 +60,7 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
<ExternalLink className="h-3 w-3" />
|
||||
</a>
|
||||
</div>
|
||||
<VersionInfo />
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
49
src/components/layout/VersionInfo.tsx
Normal file
49
src/components/layout/VersionInfo.tsx
Normal file
@@ -0,0 +1,49 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { healthApi } from "@/lib/api";
|
||||
|
||||
export function VersionInfo() {
|
||||
const [versionInfo, setVersionInfo] = useState<{
|
||||
current: string;
|
||||
latest: string;
|
||||
updateAvailable: boolean;
|
||||
}>({
|
||||
current: "loading...",
|
||||
latest: "",
|
||||
updateAvailable: false
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const fetchVersion = async () => {
|
||||
try {
|
||||
const healthData = await healthApi.check();
|
||||
setVersionInfo({
|
||||
current: healthData.version || "unknown",
|
||||
latest: healthData.latestVersion || "unknown",
|
||||
updateAvailable: healthData.updateAvailable || false
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch version:", error);
|
||||
setVersionInfo({
|
||||
current: "unknown",
|
||||
latest: "",
|
||||
updateAvailable: false
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
fetchVersion();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="text-xs text-muted-foreground text-center pt-2 pb-3 border-t border-border mt-2">
|
||||
{versionInfo.updateAvailable ? (
|
||||
<div className="flex flex-col">
|
||||
<span>v{versionInfo.current}</span>
|
||||
<span className="text-primary">v{versionInfo.latest} available</span>
|
||||
</div>
|
||||
) : (
|
||||
<span>v{versionInfo.current}</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -104,7 +104,6 @@ gitea-mirror/
|
||||
├── data/ # Database and persistent data
|
||||
├── docker/ # Docker configuration
|
||||
└── scripts/ # Utility scripts for deployment and maintenance
|
||||
├── gitea-mirror-lxc-proxmox.sh # Proxmox LXC deployment script
|
||||
├── gitea-mirror-lxc-local.sh # Local LXC deployment script
|
||||
└── manage-db.ts # Database management tool
|
||||
```
|
||||
@@ -114,7 +113,7 @@ gitea-mirror/
|
||||
Gitea Mirror supports multiple deployment options:
|
||||
|
||||
1. **Docker**: Run as a containerized application using Docker and docker-compose
|
||||
2. **LXC Containers**: Deploy in Linux Containers (LXC) on Proxmox VE or local workstations
|
||||
2. **LXC Containers**: Deploy in Linux Containers (LXC) on Proxmox VE (using community script by [Tobias/CrazyWolf13](https://github.com/CrazyWolf13)) or local workstations
|
||||
3. **Native**: Run directly on the host system using Bun runtime
|
||||
|
||||
Each deployment method has its own advantages:
|
||||
|
||||
@@ -25,13 +25,15 @@ The following environment variables can be used to configure Gitea Mirror:
|
||||
|----------|-------------|---------------|---------|
|
||||
| `NODE_ENV` | Runtime environment (development, production, test) | `development` | `production` |
|
||||
| `DATABASE_URL` | SQLite database URL | `file:data/gitea-mirror.db` | `file:path/to/your/database.db` |
|
||||
| `JWT_SECRET` | Secret key for JWT authentication | `your-secret-key-change-this-in-production` | `your-secure-random-string` |
|
||||
| `JWT_SECRET` | Secret key for JWT authentication | Auto-generated secure random string | `your-secure-random-string` |
|
||||
| `HOST` | Server host | `localhost` | `0.0.0.0` |
|
||||
| `PORT` | Server port | `4321` | `8080` |
|
||||
|
||||
### Important Security Note
|
||||
|
||||
In production environments, you should always set a strong, unique `JWT_SECRET` to ensure secure authentication.
|
||||
The application will automatically generate a secure random `JWT_SECRET` on first run if one isn't provided or if the default value is used. This generated secret is stored in the data directory for persistence across container restarts.
|
||||
|
||||
While this auto-generation feature provides good security by default, you can still explicitly set your own `JWT_SECRET` for complete control over your deployment.
|
||||
|
||||
## Web UI Configuration
|
||||
|
||||
|
||||
@@ -94,6 +94,8 @@ export interface HealthResponse {
|
||||
status: "ok" | "error";
|
||||
timestamp: string;
|
||||
version: string;
|
||||
latestVersion: string;
|
||||
updateAvailable: boolean;
|
||||
database: {
|
||||
connected: boolean;
|
||||
message: string;
|
||||
@@ -147,6 +149,8 @@ export const healthApi = {
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : "Unknown error checking health",
|
||||
version: "unknown",
|
||||
latestVersion: "unknown",
|
||||
updateAvailable: false,
|
||||
database: { connected: false, message: "Failed to connect to API" },
|
||||
system: {
|
||||
uptime: { startTime: "", uptimeMs: 0, formatted: "N/A" },
|
||||
|
||||
42
src/lib/db/index.test.ts
Normal file
42
src/lib/db/index.test.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { describe, test, expect, mock, beforeAll, afterAll } from "bun:test";
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
|
||||
// Silence console logs during tests
|
||||
let originalConsoleLog: typeof console.log;
|
||||
|
||||
beforeAll(() => {
|
||||
// Save original console.log
|
||||
originalConsoleLog = console.log;
|
||||
// Replace with no-op function
|
||||
console.log = () => {};
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original console.log
|
||||
console.log = originalConsoleLog;
|
||||
});
|
||||
|
||||
// Mock the database module
|
||||
mock.module("bun:sqlite", () => {
|
||||
return {
|
||||
Database: mock(function() {
|
||||
return {
|
||||
query: mock(() => ({
|
||||
all: mock(() => []),
|
||||
run: mock(() => ({}))
|
||||
}))
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the database tables
|
||||
describe("Database Schema", () => {
|
||||
test("database connection can be created", async () => {
|
||||
// Import the db from the module
|
||||
const { db } = await import("./index");
|
||||
|
||||
// Check that db is defined
|
||||
expect(db).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -189,6 +189,18 @@ export const mirrorJobs = sqliteTable("mirror_jobs", {
|
||||
timestamp: integer("timestamp", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(new Date()),
|
||||
|
||||
// New fields for job resilience
|
||||
jobType: text("job_type").notNull().default("mirror"),
|
||||
batchId: text("batch_id"),
|
||||
totalItems: integer("total_items"),
|
||||
completedItems: integer("completed_items").default(0),
|
||||
itemIds: text("item_ids", { mode: "json" }).$type<string[]>(),
|
||||
completedItemIds: text("completed_item_ids", { mode: "json" }).$type<string[]>().default([]),
|
||||
inProgress: integer("in_progress", { mode: "boolean" }).notNull().default(false),
|
||||
startedAt: integer("started_at", { mode: "timestamp" }),
|
||||
completedAt: integer("completed_at", { mode: "timestamp" }),
|
||||
lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
|
||||
});
|
||||
|
||||
export const organizations = sqliteTable("organizations", {
|
||||
|
||||
@@ -111,6 +111,18 @@ export const mirrorJobSchema = z.object({
|
||||
status: repoStatusEnum.default("imported"),
|
||||
message: z.string(),
|
||||
timestamp: z.date().default(() => new Date()),
|
||||
|
||||
// New fields for job resilience
|
||||
jobType: z.enum(["mirror", "sync", "retry"]).default("mirror"),
|
||||
batchId: z.string().uuid().optional(), // Group related jobs together
|
||||
totalItems: z.number().optional(), // Total number of items to process
|
||||
completedItems: z.number().optional(), // Number of items completed
|
||||
itemIds: z.array(z.string()).optional(), // IDs of items to process
|
||||
completedItemIds: z.array(z.string()).optional(), // IDs of completed items
|
||||
inProgress: z.boolean().default(false), // Whether the job is currently running
|
||||
startedAt: z.date().optional(), // When the job started
|
||||
completedAt: z.date().optional(), // When the job completed
|
||||
lastCheckpoint: z.date().optional(), // Last time progress was saved
|
||||
});
|
||||
|
||||
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
|
||||
|
||||
120
src/lib/gitea.test.ts
Normal file
120
src/lib/gitea.test.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
|
||||
// Mock the isRepoPresentInGitea function
|
||||
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
|
||||
|
||||
// Mock the database module
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
update: () => ({
|
||||
set: () => ({
|
||||
where: () => Promise.resolve()
|
||||
})
|
||||
})
|
||||
},
|
||||
repositories: {},
|
||||
organizations: {}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the helpers module
|
||||
mock.module("@/lib/helpers", () => {
|
||||
return {
|
||||
createMirrorJob: mock(() => Promise.resolve("job-id"))
|
||||
};
|
||||
});
|
||||
|
||||
// Mock superagent
|
||||
mock.module("superagent", () => {
|
||||
const mockPost = mock(() => ({
|
||||
set: () => ({
|
||||
set: () => ({
|
||||
send: () => Promise.resolve({ body: { id: 123 } })
|
||||
})
|
||||
})
|
||||
}));
|
||||
|
||||
const mockGet = mock(() => ({
|
||||
set: () => Promise.resolve({ body: [] })
|
||||
}));
|
||||
|
||||
return {
|
||||
post: mockPost,
|
||||
get: mockGet
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the gitea module itself
|
||||
mock.module("./gitea", () => {
|
||||
return {
|
||||
isRepoPresentInGitea: mockIsRepoPresentInGitea,
|
||||
mirrorGithubRepoToGitea: mock(async () => {}),
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
|
||||
};
|
||||
});
|
||||
|
||||
describe("Gitea Repository Mirroring", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("mirrorGithubRepoToGitea handles private repositories correctly", async () => {
|
||||
// Import the mocked function
|
||||
const { mirrorGithubRepoToGitea } = await import("./gitea");
|
||||
|
||||
// Create mock Octokit instance
|
||||
const octokit = {} as Octokit;
|
||||
|
||||
// Create mock repository (private)
|
||||
const repository = {
|
||||
id: "repo-id",
|
||||
name: "test-repo",
|
||||
fullName: "testuser/test-repo",
|
||||
url: "https://github.com/testuser/test-repo",
|
||||
cloneUrl: "https://github.com/testuser/test-repo.git",
|
||||
owner: "testuser",
|
||||
isPrivate: true,
|
||||
status: repoStatusEnum.parse("imported")
|
||||
};
|
||||
|
||||
// Create mock config
|
||||
const config = {
|
||||
id: "config-id",
|
||||
userId: "user-id",
|
||||
githubConfig: {
|
||||
token: "github-token",
|
||||
mirrorIssues: false
|
||||
},
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "gitea-token",
|
||||
username: "giteauser"
|
||||
}
|
||||
};
|
||||
|
||||
// Call the function
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: repository as any,
|
||||
config
|
||||
});
|
||||
|
||||
// Check that the function was called
|
||||
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
229
src/lib/gitea.ts
229
src/lib/gitea.ts
@@ -601,11 +601,22 @@ export async function mirrorGitHubOrgToGitea({
|
||||
.from(repositories)
|
||||
.where(eq(repositories.organization, organization.name));
|
||||
|
||||
for (const repo of orgRepos) {
|
||||
await mirrorGitHubRepoToGiteaOrg({
|
||||
octokit,
|
||||
config,
|
||||
repository: {
|
||||
if (orgRepos.length === 0) {
|
||||
console.log(`No repositories found for organization ${organization.name}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
||||
|
||||
// Import the processWithRetry function
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
// Process repositories in parallel with concurrency control
|
||||
await processWithRetry(
|
||||
orgRepos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repo.status as RepoStatus,
|
||||
visibility: repo.visibility as RepositoryVisibility,
|
||||
@@ -614,11 +625,37 @@ export async function mirrorGitHubOrgToGitea({
|
||||
organization: repo.organization ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for repository: ${repo.name} in organization ${organization.name}`);
|
||||
|
||||
// Mirror the repository
|
||||
await mirrorGitHubRepoToGiteaOrg({
|
||||
octokit,
|
||||
config,
|
||||
repository: repoData,
|
||||
giteaOrgId,
|
||||
orgName: organization.name,
|
||||
});
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 3, // Process 3 repositories at a time
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
if (result) {
|
||||
console.log(`Mirrored repository "${result.name}" in organization ${organization.name} (${completed}/${total}, ${percentComplete}%)`);
|
||||
}
|
||||
},
|
||||
giteaOrgId,
|
||||
orgName: organization.name,
|
||||
});
|
||||
}
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} in organization ${organization.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Organization ${organization.name} mirrored successfully`);
|
||||
|
||||
@@ -837,7 +874,15 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
(res) => res.data
|
||||
);
|
||||
|
||||
console.log(`Mirroring ${issues.length} issues from ${repository.fullName}`);
|
||||
// Filter out pull requests
|
||||
const filteredIssues = issues.filter(issue => !(issue as any).pull_request);
|
||||
|
||||
console.log(`Mirroring ${filteredIssues.length} issues from ${repository.fullName}`);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
console.log(`No issues to mirror for ${repository.fullName}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing labels from Gitea
|
||||
const giteaLabelsRes = await superagent
|
||||
@@ -851,58 +896,60 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
giteaLabels.map((label: any) => [label.name, label.id])
|
||||
);
|
||||
|
||||
for (const issue of issues) {
|
||||
if ((issue as any).pull_request) {
|
||||
continue;
|
||||
}
|
||||
// Import the processWithRetry function
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
const githubLabelNames =
|
||||
issue.labels
|
||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||
.filter((l): l is string => !!l) || [];
|
||||
// Process issues in parallel with concurrency control
|
||||
await processWithRetry(
|
||||
filteredIssues,
|
||||
async (issue) => {
|
||||
const githubLabelNames =
|
||||
issue.labels
|
||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||
.filter((l): l is string => !!l) || [];
|
||||
|
||||
const giteaLabelIds: number[] = [];
|
||||
const giteaLabelIds: number[] = [];
|
||||
|
||||
// Resolve or create labels in Gitea
|
||||
for (const name of githubLabelNames) {
|
||||
if (labelMap.has(name)) {
|
||||
giteaLabelIds.push(labelMap.get(name)!);
|
||||
} else {
|
||||
try {
|
||||
const created = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({ name, color: "#ededed" }); // Default color
|
||||
// Resolve or create labels in Gitea
|
||||
for (const name of githubLabelNames) {
|
||||
if (labelMap.has(name)) {
|
||||
giteaLabelIds.push(labelMap.get(name)!);
|
||||
} else {
|
||||
try {
|
||||
const created = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({ name, color: "#ededed" }); // Default color
|
||||
|
||||
labelMap.set(name, created.body.id);
|
||||
giteaLabelIds.push(created.body.id);
|
||||
} catch (labelErr) {
|
||||
console.error(
|
||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||
);
|
||||
labelMap.set(name, created.body.id);
|
||||
giteaLabelIds.push(created.body.id);
|
||||
} catch (labelErr) {
|
||||
console.error(
|
||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const originalAssignees =
|
||||
issue.assignees && issue.assignees.length > 0
|
||||
? `\n\nOriginally assigned to: ${issue.assignees
|
||||
.map((a) => `@${a.login}`)
|
||||
.join(", ")} on GitHub.`
|
||||
: "";
|
||||
const originalAssignees =
|
||||
issue.assignees && issue.assignees.length > 0
|
||||
? `\n\nOriginally assigned to: ${issue.assignees
|
||||
.map((a) => `@${a.login}`)
|
||||
.join(", ")} on GitHub.`
|
||||
: "";
|
||||
|
||||
const issuePayload: any = {
|
||||
title: issue.title,
|
||||
body: `Originally created by @${
|
||||
issue.user?.login
|
||||
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
||||
closed: issue.state === "closed",
|
||||
labels: giteaLabelIds,
|
||||
};
|
||||
const issuePayload: any = {
|
||||
title: issue.title,
|
||||
body: `Originally created by @${
|
||||
issue.user?.login
|
||||
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
||||
closed: issue.state === "closed",
|
||||
labels: giteaLabelIds,
|
||||
};
|
||||
|
||||
try {
|
||||
// Create the issue in Gitea
|
||||
const createdIssue = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
||||
@@ -922,41 +969,49 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
(res) => res.data
|
||||
);
|
||||
|
||||
for (const comment of comments) {
|
||||
try {
|
||||
await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({
|
||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||
});
|
||||
} catch (commentErr) {
|
||||
console.error(
|
||||
`Failed to copy comment to Gitea for issue "${issue.title}": ${
|
||||
commentErr instanceof Error
|
||||
? commentErr.message
|
||||
: String(commentErr)
|
||||
}`
|
||||
);
|
||||
}
|
||||
// Process comments in parallel with concurrency control
|
||||
if (comments.length > 0) {
|
||||
await processWithRetry(
|
||||
comments,
|
||||
async (comment) => {
|
||||
await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({
|
||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||
});
|
||||
return comment;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 5,
|
||||
maxRetries: 2,
|
||||
retryDelay: 1000,
|
||||
onRetry: (comment, error, attempt) => {
|
||||
console.log(`Retrying comment (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && (err as any).response) {
|
||||
console.error(
|
||||
`Failed to create issue "${issue.title}" in Gitea: ${err.message}`
|
||||
);
|
||||
console.error(
|
||||
`Response body: ${JSON.stringify((err as any).response.body)}`
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
`Failed to create issue "${issue.title}" in Gitea: ${
|
||||
err instanceof Error ? err.message : String(err)
|
||||
}`
|
||||
);
|
||||
|
||||
return issue;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 3, // Process 3 issues at a time
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
if (result) {
|
||||
console.log(`Mirrored issue "${result.title}" (${completed}/${total}, ${percentComplete}%)`);
|
||||
}
|
||||
},
|
||||
onRetry: (issue, error, attempt) => {
|
||||
console.log(`Retrying issue "${issue.title}" (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Completed mirroring ${filteredIssues.length} issues for ${repository.fullName}`);
|
||||
};
|
||||
|
||||
@@ -12,6 +12,11 @@ export async function createMirrorJob({
|
||||
message,
|
||||
status,
|
||||
details,
|
||||
jobType,
|
||||
batchId,
|
||||
totalItems,
|
||||
itemIds,
|
||||
inProgress,
|
||||
}: {
|
||||
userId: string;
|
||||
organizationId?: string;
|
||||
@@ -21,6 +26,11 @@ export async function createMirrorJob({
|
||||
details?: string;
|
||||
message: string;
|
||||
status: RepoStatus;
|
||||
jobType?: "mirror" | "sync" | "retry";
|
||||
batchId?: string;
|
||||
totalItems?: number;
|
||||
itemIds?: string[];
|
||||
inProgress?: boolean;
|
||||
}) {
|
||||
const jobId = uuidv4();
|
||||
const currentTimestamp = new Date();
|
||||
@@ -32,11 +42,22 @@ export async function createMirrorJob({
|
||||
repositoryName,
|
||||
organizationId,
|
||||
organizationName,
|
||||
configId: uuidv4(),
|
||||
details,
|
||||
message: message,
|
||||
status: status,
|
||||
timestamp: currentTimestamp,
|
||||
|
||||
// New resilience fields
|
||||
jobType: jobType || "mirror",
|
||||
batchId: batchId || undefined,
|
||||
totalItems: totalItems || undefined,
|
||||
completedItems: 0,
|
||||
itemIds: itemIds || undefined,
|
||||
completedItemIds: [],
|
||||
inProgress: inProgress !== undefined ? inProgress : false,
|
||||
startedAt: inProgress ? currentTimestamp : undefined,
|
||||
completedAt: undefined,
|
||||
lastCheckpoint: undefined,
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -57,3 +78,186 @@ export async function createMirrorJob({
|
||||
throw new Error("Error creating mirror job");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the progress of a mirror job
|
||||
*/
|
||||
export async function updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
status,
|
||||
message,
|
||||
details,
|
||||
inProgress,
|
||||
isCompleted,
|
||||
}: {
|
||||
jobId: string;
|
||||
completedItemId?: string;
|
||||
status?: RepoStatus;
|
||||
message?: string;
|
||||
details?: string;
|
||||
inProgress?: boolean;
|
||||
isCompleted?: boolean;
|
||||
}) {
|
||||
try {
|
||||
// Get the current job
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(mirrorJobs.id === jobId);
|
||||
|
||||
if (!job) {
|
||||
throw new Error(`Mirror job with ID ${jobId} not found`);
|
||||
}
|
||||
|
||||
// Update the job with new progress
|
||||
const updates: Record<string, any> = {
|
||||
lastCheckpoint: new Date(),
|
||||
};
|
||||
|
||||
// Add completed item if provided
|
||||
if (completedItemId) {
|
||||
const completedItemIds = job.completedItemIds || [];
|
||||
if (!completedItemIds.includes(completedItemId)) {
|
||||
updates.completedItemIds = [...completedItemIds, completedItemId];
|
||||
updates.completedItems = (job.completedItems || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Update status if provided
|
||||
if (status) {
|
||||
updates.status = status;
|
||||
}
|
||||
|
||||
// Update message if provided
|
||||
if (message) {
|
||||
updates.message = message;
|
||||
}
|
||||
|
||||
// Update details if provided
|
||||
if (details) {
|
||||
updates.details = details;
|
||||
}
|
||||
|
||||
// Update in-progress status if provided
|
||||
if (inProgress !== undefined) {
|
||||
updates.inProgress = inProgress;
|
||||
}
|
||||
|
||||
// Mark as completed if specified
|
||||
if (isCompleted) {
|
||||
updates.inProgress = false;
|
||||
updates.completedAt = new Date();
|
||||
}
|
||||
|
||||
// Update the job in the database
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set(updates)
|
||||
.where(mirrorJobs.id === jobId);
|
||||
|
||||
// Publish the event
|
||||
const updatedJob = {
|
||||
...job,
|
||||
...updates,
|
||||
};
|
||||
|
||||
await publishEvent({
|
||||
userId: job.userId,
|
||||
channel: `mirror-status:${job.userId}`,
|
||||
payload: updatedJob,
|
||||
});
|
||||
|
||||
return updatedJob;
|
||||
} catch (error) {
|
||||
console.error("Error updating mirror job progress:", error);
|
||||
throw new Error("Error updating mirror job progress");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds interrupted jobs that need to be resumed
|
||||
*/
|
||||
export async function findInterruptedJobs() {
|
||||
try {
|
||||
// Find jobs that are marked as in-progress but haven't been updated recently
|
||||
const cutoffTime = new Date();
|
||||
cutoffTime.setMinutes(cutoffTime.getMinutes() - 10); // Consider jobs inactive after 10 minutes without updates
|
||||
|
||||
const interruptedJobs = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(
|
||||
mirrorJobs.inProgress === true &&
|
||||
(mirrorJobs.lastCheckpoint === null ||
|
||||
mirrorJobs.lastCheckpoint < cutoffTime)
|
||||
);
|
||||
|
||||
return interruptedJobs;
|
||||
} catch (error) {
|
||||
console.error("Error finding interrupted jobs:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resumes an interrupted job
|
||||
*/
|
||||
export async function resumeInterruptedJob(job: any) {
|
||||
try {
|
||||
console.log(`Resuming interrupted job: ${job.id}`);
|
||||
|
||||
// Skip if job doesn't have the necessary data to resume
|
||||
if (!job.itemIds || !job.completedItemIds) {
|
||||
console.log(`Cannot resume job ${job.id}: missing item data`);
|
||||
|
||||
// Mark the job as failed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
status: "failed",
|
||||
message: "Job interrupted and could not be resumed",
|
||||
details: "The job was interrupted and did not have enough information to resume",
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Calculate remaining items
|
||||
const remainingItemIds = job.itemIds.filter(
|
||||
(id: string) => !job.completedItemIds.includes(id)
|
||||
);
|
||||
|
||||
if (remainingItemIds.length === 0) {
|
||||
console.log(`Job ${job.id} has no remaining items, marking as completed`);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
status: "mirrored",
|
||||
message: "Job completed after resuming",
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
message: `Resuming job with ${remainingItemIds.length} remaining items`,
|
||||
details: `Job was interrupted and is being resumed. ${job.completedItemIds.length} of ${job.itemIds.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
return {
|
||||
job,
|
||||
remainingItemIds,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error resuming job ${job.id}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
224
src/lib/recovery.ts
Normal file
224
src/lib/recovery.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* Recovery mechanism for interrupted jobs
|
||||
* This module handles detecting and resuming jobs that were interrupted by container restarts
|
||||
*/
|
||||
|
||||
import { findInterruptedJobs, resumeInterruptedJob } from './helpers';
|
||||
import { db, repositories, organizations } from './db';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import { mirrorGithubRepoToGitea, mirrorGitHubOrgRepoToGiteaOrg, syncGiteaRepo } from './gitea';
|
||||
import { createGitHubClient } from './github';
|
||||
import { processWithResilience } from './utils/concurrency';
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from '@/types/Repository';
|
||||
import type { Repository } from './db/schema';
|
||||
|
||||
/**
|
||||
* Initialize the recovery system
|
||||
* This should be called when the application starts
|
||||
*/
|
||||
export async function initializeRecovery() {
|
||||
console.log('Initializing recovery system...');
|
||||
|
||||
try {
|
||||
// Find interrupted jobs
|
||||
const interruptedJobs = await findInterruptedJobs();
|
||||
|
||||
if (interruptedJobs.length === 0) {
|
||||
console.log('No interrupted jobs found.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${interruptedJobs.length} interrupted jobs. Starting recovery...`);
|
||||
|
||||
// Process each interrupted job
|
||||
for (const job of interruptedJobs) {
|
||||
const resumeData = await resumeInterruptedJob(job);
|
||||
|
||||
if (!resumeData) {
|
||||
console.log(`Job ${job.id} could not be resumed.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { job: updatedJob, remainingItemIds } = resumeData;
|
||||
|
||||
// Handle different job types
|
||||
switch (updatedJob.jobType) {
|
||||
case 'mirror':
|
||||
await recoverMirrorJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
case 'sync':
|
||||
await recoverSyncJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
case 'retry':
|
||||
await recoverRetryJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
default:
|
||||
console.log(`Unknown job type: ${updatedJob.jobType}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Recovery process completed.');
|
||||
} catch (error) {
|
||||
console.error('Error during recovery process:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a mirror job
|
||||
*/
|
||||
async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
|
||||
console.log(`Recovering mirror job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// Get the config for this user
|
||||
const [config] = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, job.userId))
|
||||
.limit(1);
|
||||
|
||||
if (!config || !config.configId) {
|
||||
throw new Error('Config not found for user');
|
||||
}
|
||||
|
||||
// Get repositories to process
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.id, remainingItemIds));
|
||||
|
||||
if (repos.length === 0) {
|
||||
throw new Error('No repositories found for the remaining item IDs');
|
||||
}
|
||||
|
||||
// Create GitHub client
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
// Process repositories with resilience
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Mirror the repository based on whether it's in an organization
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
octokit,
|
||||
orgName: repo.organization,
|
||||
repository: repoData,
|
||||
});
|
||||
} else {
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: repoData,
|
||||
config,
|
||||
});
|
||||
}
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: job.userId,
|
||||
jobType: 'mirror',
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
resumeFromJobId: job.id,
|
||||
concurrencyLimit: 3,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering mirror job ${job.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a sync job
|
||||
*/
|
||||
async function recoverSyncJob(job: any, remainingItemIds: string[]) {
|
||||
// Implementation similar to recoverMirrorJob but for sync operations
|
||||
console.log(`Recovering sync job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// Get the config for this user
|
||||
const [config] = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, job.userId))
|
||||
.limit(1);
|
||||
|
||||
if (!config || !config.configId) {
|
||||
throw new Error('Config not found for user');
|
||||
}
|
||||
|
||||
// Get repositories to process
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.id, remainingItemIds));
|
||||
|
||||
if (repos.length === 0) {
|
||||
throw new Error('No repositories found for the remaining item IDs');
|
||||
}
|
||||
|
||||
// Process repositories with resilience
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
};
|
||||
|
||||
// Sync the repository
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: repoData,
|
||||
});
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: job.userId,
|
||||
jobType: 'sync',
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
resumeFromJobId: job.id,
|
||||
concurrencyLimit: 5,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering sync job ${job.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a retry job
|
||||
*/
|
||||
async function recoverRetryJob(job: any, remainingItemIds: string[]) {
|
||||
// Implementation similar to recoverMirrorJob but for retry operations
|
||||
console.log(`Recovering retry job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
// This would be similar to recoverMirrorJob but with retry-specific logic
|
||||
console.log('Retry job recovery not yet implemented');
|
||||
}
|
||||
110
src/lib/utils.test.ts
Normal file
110
src/lib/utils.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { jsonResponse, formatDate, truncate, safeParse } from "./utils";
|
||||
|
||||
describe("jsonResponse", () => {
|
||||
test("creates a Response with JSON content", () => {
|
||||
const data = { message: "Hello, world!" };
|
||||
const response = jsonResponse({ data });
|
||||
|
||||
expect(response).toBeInstanceOf(Response);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers.get("Content-Type")).toBe("application/json");
|
||||
});
|
||||
|
||||
test("uses the provided status code", () => {
|
||||
const data = { error: "Not found" };
|
||||
const response = jsonResponse({ data, status: 404 });
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
test("correctly serializes complex objects", async () => {
|
||||
const now = new Date();
|
||||
const data = {
|
||||
message: "Complex object",
|
||||
date: now,
|
||||
nested: { foo: "bar" },
|
||||
array: [1, 2, 3]
|
||||
};
|
||||
|
||||
const response = jsonResponse({ data });
|
||||
const responseBody = await response.json();
|
||||
|
||||
expect(responseBody).toEqual({
|
||||
message: "Complex object",
|
||||
date: now.toISOString(),
|
||||
nested: { foo: "bar" },
|
||||
array: [1, 2, 3]
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatDate", () => {
|
||||
test("formats a date object", () => {
|
||||
const date = new Date("2023-01-15T12:30:45Z");
|
||||
const formatted = formatDate(date);
|
||||
|
||||
// The exact format might depend on the locale, so we'll check for parts
|
||||
expect(formatted).toContain("2023");
|
||||
expect(formatted).toContain("January");
|
||||
expect(formatted).toContain("15");
|
||||
});
|
||||
|
||||
test("formats a date string", () => {
|
||||
const dateStr = "2023-01-15T12:30:45Z";
|
||||
const formatted = formatDate(dateStr);
|
||||
|
||||
expect(formatted).toContain("2023");
|
||||
expect(formatted).toContain("January");
|
||||
expect(formatted).toContain("15");
|
||||
});
|
||||
|
||||
test("returns 'Never' for null or undefined", () => {
|
||||
expect(formatDate(null)).toBe("Never");
|
||||
expect(formatDate(undefined)).toBe("Never");
|
||||
});
|
||||
});
|
||||
|
||||
describe("truncate", () => {
|
||||
test("truncates a string that exceeds the length", () => {
|
||||
const str = "This is a long string that needs truncation";
|
||||
const truncated = truncate(str, 10);
|
||||
|
||||
expect(truncated).toBe("This is a ...");
|
||||
expect(truncated.length).toBe(13); // 10 chars + "..."
|
||||
});
|
||||
|
||||
test("does not truncate a string that is shorter than the length", () => {
|
||||
const str = "Short";
|
||||
const truncated = truncate(str, 10);
|
||||
|
||||
expect(truncated).toBe("Short");
|
||||
});
|
||||
|
||||
test("handles empty strings", () => {
|
||||
expect(truncate("", 10)).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("safeParse", () => {
|
||||
test("parses valid JSON strings", () => {
|
||||
const jsonStr = '{"name":"John","age":30}';
|
||||
const parsed = safeParse(jsonStr);
|
||||
|
||||
expect(parsed).toEqual({ name: "John", age: 30 });
|
||||
});
|
||||
|
||||
test("returns undefined for invalid JSON strings", () => {
|
||||
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
|
||||
const parsed = safeParse(invalidJson);
|
||||
|
||||
expect(parsed).toBeUndefined();
|
||||
});
|
||||
|
||||
test("returns the original value for non-string inputs", () => {
|
||||
const obj = { name: "John", age: 30 };
|
||||
const parsed = safeParse(obj);
|
||||
|
||||
expect(parsed).toBe(obj);
|
||||
});
|
||||
});
|
||||
167
src/lib/utils/concurrency.test.ts
Normal file
167
src/lib/utils/concurrency.test.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { describe, test, expect, mock } from "bun:test";
|
||||
import { processInParallel, processWithRetry } from "./concurrency";
|
||||
|
||||
describe("processInParallel", () => {
|
||||
test("processes items in parallel with concurrency control", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
|
||||
// Create a mock function to track execution
|
||||
const processItem = mock(async (item: number) => {
|
||||
// Simulate async work
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock progress callback
|
||||
const onProgress = mock((completed: number, total: number, result?: number) => {
|
||||
// Progress tracking
|
||||
});
|
||||
|
||||
// Process the items with a concurrency limit of 3
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processItem,
|
||||
3,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify that onProgress was called for each item
|
||||
expect(onProgress).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify the last call to onProgress had the correct completed/total values
|
||||
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
||||
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
||||
});
|
||||
|
||||
test("handles errors in processing", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5];
|
||||
|
||||
// Create a mock function that throws an error for item 3
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 3) {
|
||||
throw new Error("Test error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items
|
||||
const results = await processInParallel(items, processItem);
|
||||
|
||||
// Verify results (should have 4 items, missing the one that errored)
|
||||
expect(results).toEqual([2, 4, 8, 10]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(5);
|
||||
|
||||
// Verify that console.error was called once
|
||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("processWithRetry", () => {
|
||||
test("retries failed operations", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3];
|
||||
|
||||
// Create a counter to track retry attempts
|
||||
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
||||
|
||||
// Create a mock function that fails on first attempt for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
attemptCounts[item]++;
|
||||
|
||||
if (item === 2 && attemptCounts[item] === 1) {
|
||||
throw new Error("Temporary error");
|
||||
}
|
||||
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6]);
|
||||
|
||||
// Verify that item 2 was retried once
|
||||
expect(attemptCounts[1]).toBe(1); // No retries
|
||||
expect(attemptCounts[2]).toBe(2); // One retry
|
||||
expect(attemptCounts[3]).toBe(1); // No retries
|
||||
|
||||
// Verify that onRetry was called once
|
||||
expect(onRetry).toHaveBeenCalledTimes(1);
|
||||
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
||||
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
||||
});
|
||||
|
||||
test("gives up after max retries", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2];
|
||||
|
||||
// Create a mock function that always fails for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 2) {
|
||||
throw new Error("Persistent error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results (should have 1 item, missing the one that errored)
|
||||
expect(results).toEqual([2]);
|
||||
|
||||
// Verify that onRetry was called twice (for 2 retry attempts)
|
||||
expect(onRetry).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Verify that console.error was called once
|
||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
292
src/lib/utils/concurrency.ts
Normal file
292
src/lib/utils/concurrency.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
/**
|
||||
* Utility for processing items in parallel with concurrency control
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param concurrencyLimit Maximum number of concurrent operations
|
||||
* @param onProgress Optional callback for progress updates
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processInParallel<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
concurrencyLimit: number = 5,
|
||||
onProgress?: (completed: number, total: number, result?: R) => void
|
||||
): Promise<R[]> {
|
||||
const results: R[] = [];
|
||||
let completed = 0;
|
||||
const total = items.length;
|
||||
|
||||
// Process items in batches to control concurrency
|
||||
for (let i = 0; i < total; i += concurrencyLimit) {
|
||||
const batch = items.slice(i, i + concurrencyLimit);
|
||||
|
||||
const batchPromises = batch.map(async (item) => {
|
||||
try {
|
||||
const result = await processItem(item);
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for the current batch to complete before starting the next batch
|
||||
const batchResults = await Promise.allSettled(batchPromises);
|
||||
|
||||
// Process results and handle errors
|
||||
for (const result of batchResults) {
|
||||
if (result.status === 'fulfilled') {
|
||||
results.push(result.value);
|
||||
} else {
|
||||
console.error('Error processing item:', result.reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility for processing items in parallel with automatic retry for failed operations
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param options Configuration options
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processWithRetry<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
jobId?: string; // Optional job ID for checkpointing
|
||||
getItemId?: (item: T) => string; // Function to get a unique ID for each item
|
||||
onCheckpoint?: (jobId: string, completedItemId: string) => Promise<void>; // Callback for checkpointing
|
||||
checkpointInterval?: number; // How many items to process before checkpointing
|
||||
} = {}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
concurrencyLimit = 5,
|
||||
maxRetries = 3,
|
||||
retryDelay = 1000,
|
||||
onProgress,
|
||||
onRetry,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval = 1 // Default to checkpointing after each item
|
||||
} = options;
|
||||
|
||||
// Track checkpoint counter
|
||||
let itemsProcessedSinceLastCheckpoint = 0;
|
||||
|
||||
// Wrap the process function with retry logic
|
||||
const processWithRetryLogic = async (item: T): Promise<R> => {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
||||
try {
|
||||
const result = await processItem(item);
|
||||
|
||||
// Handle checkpointing if enabled
|
||||
if (jobId && getItemId && onCheckpoint) {
|
||||
const itemId = getItemId(item);
|
||||
itemsProcessedSinceLastCheckpoint++;
|
||||
|
||||
// Checkpoint based on the interval
|
||||
if (itemsProcessedSinceLastCheckpoint >= checkpointInterval) {
|
||||
await onCheckpoint(jobId, itemId);
|
||||
itemsProcessedSinceLastCheckpoint = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
if (attempt <= maxRetries) {
|
||||
if (onRetry) {
|
||||
onRetry(item, lastError, attempt);
|
||||
}
|
||||
|
||||
// Exponential backoff
|
||||
const delay = retryDelay * Math.pow(2, attempt - 1);
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
} else {
|
||||
throw lastError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This should never be reached due to the throw in the catch block
|
||||
throw lastError || new Error('Unknown error occurred');
|
||||
};
|
||||
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processWithRetryLogic,
|
||||
concurrencyLimit,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Final checkpoint if there are remaining items since the last checkpoint
|
||||
if (jobId && getItemId && onCheckpoint && itemsProcessedSinceLastCheckpoint > 0) {
|
||||
// We don't have a specific item ID for the final checkpoint, so we'll use a placeholder
|
||||
await onCheckpoint(jobId, 'final');
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process items in parallel with resilience to container restarts
|
||||
* This version supports resuming from a previous checkpoint
|
||||
*/
|
||||
export async function processWithResilience<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
userId: string; // Required for creating mirror jobs
|
||||
jobType: "mirror" | "sync" | "retry";
|
||||
getItemId: (item: T) => string; // Required function to get a unique ID for each item
|
||||
getItemName: (item: T) => string; // Required function to get a display name for each item
|
||||
checkpointInterval?: number;
|
||||
resumeFromJobId?: string; // Optional job ID to resume from
|
||||
}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
userId,
|
||||
jobType,
|
||||
getItemId,
|
||||
getItemName,
|
||||
resumeFromJobId,
|
||||
checkpointInterval = 5,
|
||||
...otherOptions
|
||||
} = options;
|
||||
|
||||
// Import helpers for job management
|
||||
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
||||
|
||||
// Get item IDs for all items
|
||||
const allItemIds = items.map(getItemId);
|
||||
|
||||
// Create or resume a job
|
||||
let jobId: string;
|
||||
let completedItemIds: string[] = [];
|
||||
let itemsToProcess = [...items];
|
||||
|
||||
if (resumeFromJobId) {
|
||||
// We're resuming an existing job
|
||||
jobId = resumeFromJobId;
|
||||
|
||||
// Get the job from the database to find completed items
|
||||
const { db, mirrorJobs } = await import('@/lib/db');
|
||||
const { eq } = await import('drizzle-orm');
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.id, resumeFromJobId));
|
||||
|
||||
if (job && job.completedItemIds) {
|
||||
completedItemIds = job.completedItemIds;
|
||||
|
||||
// Filter out already completed items
|
||||
itemsToProcess = items.filter(item => !completedItemIds.includes(getItemId(item)));
|
||||
|
||||
console.log(`Resuming job ${jobId} with ${itemsToProcess.length} remaining items`);
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
message: `Resuming job with ${itemsToProcess.length} remaining items`,
|
||||
details: `Job is being resumed. ${completedItemIds.length} of ${items.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create a new job
|
||||
jobId = await createMirrorJob({
|
||||
userId,
|
||||
message: `Started ${jobType} job with ${items.length} items`,
|
||||
details: `Processing ${items.length} items in parallel with checkpointing`,
|
||||
status: "mirroring",
|
||||
jobType,
|
||||
totalItems: items.length,
|
||||
itemIds: allItemIds,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
console.log(`Created new job ${jobId} with ${items.length} items`);
|
||||
}
|
||||
|
||||
// Define the checkpoint function
|
||||
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
||||
const itemName = items.find(item => getItemId(item) === completedItemId)
|
||||
? getItemName(items.find(item => getItemId(item) === completedItemId)!)
|
||||
: 'unknown';
|
||||
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
message: `Processed item: ${itemName}`,
|
||||
});
|
||||
};
|
||||
|
||||
try {
|
||||
// Process the items with checkpointing
|
||||
const results = await processWithRetry(
|
||||
itemsToProcess,
|
||||
processItem,
|
||||
{
|
||||
...otherOptions,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval,
|
||||
}
|
||||
);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: "mirrored",
|
||||
message: `Completed ${jobType} job with ${items.length} items`,
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return results;
|
||||
} catch (error) {
|
||||
// Mark the job as failed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: "failed",
|
||||
message: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
22
src/middleware.ts
Normal file
22
src/middleware.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { defineMiddleware } from 'astro:middleware';
|
||||
import { initializeRecovery } from './lib/recovery';
|
||||
|
||||
// Flag to track if recovery has been initialized
|
||||
let recoveryInitialized = false;
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
// Initialize recovery system only once when the server starts
|
||||
if (!recoveryInitialized) {
|
||||
console.log('Initializing recovery system from middleware...');
|
||||
try {
|
||||
await initializeRecovery();
|
||||
console.log('Recovery system initialized successfully');
|
||||
} catch (error) {
|
||||
console.error('Error initializing recovery system:', error);
|
||||
}
|
||||
recoveryInitialized = true;
|
||||
}
|
||||
|
||||
// Continue with the request
|
||||
return next();
|
||||
});
|
||||
187
src/pages/api/gitea/test-connection.test.ts
Normal file
187
src/pages/api/gitea/test-connection.test.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import axios from "axios";
|
||||
|
||||
// Mock the POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing URL or token
|
||||
if (!body.url || !body.token) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Gitea URL and token are required"
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Check for username mismatch
|
||||
if (body.username && body.username !== "giteauser") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Token belongs to giteauser, not " + body.username
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Handle invalid token
|
||||
if (body.token === "invalid-token") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Invalid Gitea token"
|
||||
}),
|
||||
{ status: 401 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Successfully connected to Gitea as giteauser",
|
||||
user: {
|
||||
login: "giteauser",
|
||||
name: "Gitea User",
|
||||
avatar_url: "https://gitea.example.com/avatar.png"
|
||||
}
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Mock the module
|
||||
mock.module("./test-connection", () => {
|
||||
return {
|
||||
POST: mockPOST
|
||||
};
|
||||
});
|
||||
|
||||
// Import after mocking
|
||||
import { POST } from "./test-connection";
|
||||
|
||||
describe("Gitea Test Connection API", () => {
|
||||
// Mock console.error to prevent test output noise
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if url or token is missing", async () => {
|
||||
// Test missing URL
|
||||
const requestMissingUrl = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const responseMissingUrl = await POST({ request: requestMissingUrl } as any);
|
||||
|
||||
expect(responseMissingUrl.status).toBe(400);
|
||||
|
||||
const dataMissingUrl = await responseMissingUrl.json();
|
||||
expect(dataMissingUrl.success).toBe(false);
|
||||
expect(dataMissingUrl.message).toBe("Gitea URL and token are required");
|
||||
|
||||
// Test missing token
|
||||
const requestMissingToken = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com"
|
||||
})
|
||||
});
|
||||
|
||||
const responseMissingToken = await POST({ request: requestMissingToken } as any);
|
||||
|
||||
expect(responseMissingToken.status).toBe(400);
|
||||
|
||||
const dataMissingToken = await responseMissingToken.json();
|
||||
expect(dataMissingToken.success).toBe(false);
|
||||
expect(dataMissingToken.message).toBe("Gitea URL and token are required");
|
||||
});
|
||||
|
||||
test("returns 200 with user data on successful connection", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Successfully connected to Gitea as giteauser");
|
||||
expect(data.user).toEqual({
|
||||
login: "giteauser",
|
||||
name: "Gitea User",
|
||||
avatar_url: "https://gitea.example.com/avatar.png"
|
||||
});
|
||||
});
|
||||
|
||||
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "valid-token",
|
||||
username: "differentuser"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Token belongs to giteauser, not differentuser");
|
||||
});
|
||||
|
||||
test("handles authentication errors", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "invalid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Invalid Gitea token");
|
||||
});
|
||||
});
|
||||
133
src/pages/api/github/test-connection.test.ts
Normal file
133
src/pages/api/github/test-connection.test.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { POST } from "./test-connection";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
|
||||
// Mock the Octokit class
|
||||
mock.module("@octokit/rest", () => {
|
||||
return {
|
||||
Octokit: mock(function() {
|
||||
return {
|
||||
users: {
|
||||
getAuthenticated: mock(() => Promise.resolve({
|
||||
data: {
|
||||
login: "testuser",
|
||||
name: "Test User",
|
||||
avatar_url: "https://example.com/avatar.png"
|
||||
}
|
||||
}))
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
describe("GitHub Test Connection API", () => {
|
||||
// Mock console.error to prevent test output noise
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if token is missing", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("GitHub token is required");
|
||||
});
|
||||
|
||||
test("returns 200 with user data on successful connection", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Successfully connected to GitHub as testuser");
|
||||
expect(data.user).toEqual({
|
||||
login: "testuser",
|
||||
name: "Test User",
|
||||
avatar_url: "https://example.com/avatar.png"
|
||||
});
|
||||
});
|
||||
|
||||
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token",
|
||||
username: "differentuser"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Token belongs to testuser, not differentuser");
|
||||
});
|
||||
|
||||
test("handles authentication errors", async () => {
|
||||
// Mock Octokit to throw an error
|
||||
mock.module("@octokit/rest", () => {
|
||||
return {
|
||||
Octokit: mock(function() {
|
||||
return {
|
||||
users: {
|
||||
getAuthenticated: mock(() => Promise.reject(new Error("Bad credentials")))
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "invalid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toContain("Bad credentials");
|
||||
});
|
||||
});
|
||||
154
src/pages/api/health.test.ts
Normal file
154
src/pages/api/health.test.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { GET } from "./health";
|
||||
import * as dbModule from "@/lib/db";
|
||||
import os from "os";
|
||||
|
||||
// Mock the database module
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
select: () => ({
|
||||
from: () => ({
|
||||
limit: () => Promise.resolve([{ test: 1 }])
|
||||
})
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the os functions individually
|
||||
const originalPlatform = os.platform;
|
||||
const originalVersion = os.version;
|
||||
const originalArch = os.arch;
|
||||
const originalTotalmem = os.totalmem;
|
||||
const originalFreemem = os.freemem;
|
||||
|
||||
describe("Health API Endpoint", () => {
|
||||
beforeEach(() => {
|
||||
// Mock os functions
|
||||
os.platform = mock(() => "test-platform");
|
||||
os.version = mock(() => "test-version");
|
||||
os.arch = mock(() => "test-arch");
|
||||
os.totalmem = mock(() => 16 * 1024 * 1024 * 1024); // 16GB
|
||||
os.freemem = mock(() => 8 * 1024 * 1024 * 1024); // 8GB
|
||||
|
||||
// Mock process.memoryUsage
|
||||
process.memoryUsage = mock(() => ({
|
||||
rss: 100 * 1024 * 1024, // 100MB
|
||||
heapTotal: 50 * 1024 * 1024, // 50MB
|
||||
heapUsed: 30 * 1024 * 1024, // 30MB
|
||||
external: 10 * 1024 * 1024, // 10MB
|
||||
arrayBuffers: 5 * 1024 * 1024, // 5MB
|
||||
}));
|
||||
|
||||
// Mock process.env
|
||||
process.env.npm_package_version = "2.1.0";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original os functions
|
||||
os.platform = originalPlatform;
|
||||
os.version = originalVersion;
|
||||
os.arch = originalArch;
|
||||
os.totalmem = originalTotalmem;
|
||||
os.freemem = originalFreemem;
|
||||
});
|
||||
|
||||
test("returns a successful health check response", async () => {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Check the structure of the response
|
||||
expect(data.status).toBe("ok");
|
||||
expect(data.timestamp).toBeDefined();
|
||||
expect(data.version).toBe("2.1.0");
|
||||
|
||||
// Check database status
|
||||
expect(data.database.connected).toBe(true);
|
||||
|
||||
// Check system info
|
||||
expect(data.system.os.platform).toBe("test-platform");
|
||||
expect(data.system.os.version).toBe("test-version");
|
||||
expect(data.system.os.arch).toBe("test-arch");
|
||||
|
||||
// Check memory info
|
||||
expect(data.system.memory.rss).toBe("100 MB");
|
||||
expect(data.system.memory.heapTotal).toBe("50 MB");
|
||||
expect(data.system.memory.heapUsed).toBe("30 MB");
|
||||
expect(data.system.memory.systemTotal).toBe("16 GB");
|
||||
expect(data.system.memory.systemFree).toBe("8 GB");
|
||||
|
||||
// Check uptime
|
||||
expect(data.system.uptime.startTime).toBeDefined();
|
||||
expect(data.system.uptime.uptimeMs).toBeGreaterThanOrEqual(0);
|
||||
expect(data.system.uptime.formatted).toBeDefined();
|
||||
});
|
||||
|
||||
test("handles database connection failures", async () => {
|
||||
// Mock database failure
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
select: () => ({
|
||||
from: () => ({
|
||||
limit: () => Promise.reject(new Error("Database connection error"))
|
||||
})
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock console.error to prevent test output noise
|
||||
const originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
|
||||
try {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
// Should still return 200 even with DB error, as the service itself is running
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Status should still be ok since the service is running
|
||||
expect(data.status).toBe("ok");
|
||||
|
||||
// Database should show as disconnected
|
||||
expect(data.database.connected).toBe(false);
|
||||
expect(data.database.message).toBe("Database connection error");
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
|
||||
test("handles database connection failures with status 200", async () => {
|
||||
// The health endpoint should return 200 even if the database is down,
|
||||
// as the service itself is still running
|
||||
|
||||
// Mock console.error to prevent test output noise
|
||||
const originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
|
||||
try {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
// Should return 200 as the service is running
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Status should be ok
|
||||
expect(data.status).toBe("ok");
|
||||
|
||||
// Database should show as disconnected
|
||||
expect(data.database.connected).toBe(false);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -3,10 +3,20 @@ import { jsonResponse } from "@/lib/utils";
|
||||
import { db } from "@/lib/db";
|
||||
import { ENV } from "@/lib/config";
|
||||
import os from "os";
|
||||
import axios from "axios";
|
||||
|
||||
// Track when the server started
|
||||
const serverStartTime = new Date();
|
||||
|
||||
// Cache for the latest version to avoid frequent GitHub API calls
|
||||
interface VersionCache {
|
||||
latestVersion: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
let versionCache: VersionCache | null = null;
|
||||
const CACHE_TTL = 3600000; // 1 hour in milliseconds
|
||||
|
||||
export const GET: APIRoute = async () => {
|
||||
try {
|
||||
// Check database connection by running a simple query
|
||||
@@ -24,11 +34,19 @@ export const GET: APIRoute = async () => {
|
||||
env: ENV.NODE_ENV,
|
||||
};
|
||||
|
||||
// Get current and latest versions
|
||||
const currentVersion = process.env.npm_package_version || "unknown";
|
||||
const latestVersion = await checkLatestVersion();
|
||||
|
||||
// Build response
|
||||
const healthData = {
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: process.env.npm_package_version || "unknown",
|
||||
version: currentVersion,
|
||||
latestVersion: latestVersion,
|
||||
updateAvailable: latestVersion !== "unknown" &&
|
||||
currentVersion !== "unknown" &&
|
||||
latestVersion !== currentVersion,
|
||||
database: dbStatus,
|
||||
system: systemInfo,
|
||||
};
|
||||
@@ -45,6 +63,9 @@ export const GET: APIRoute = async () => {
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : "Unknown error",
|
||||
version: process.env.npm_package_version || "unknown",
|
||||
latestVersion: "unknown",
|
||||
updateAvailable: false,
|
||||
},
|
||||
status: 503, // Service Unavailable
|
||||
});
|
||||
@@ -122,5 +143,37 @@ function formatBytes(bytes: number): string {
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for the latest version from GitHub releases
|
||||
*/
|
||||
async function checkLatestVersion(): Promise<string> {
|
||||
// Return cached version if available and not expired
|
||||
if (versionCache && (Date.now() - versionCache.timestamp) < CACHE_TTL) {
|
||||
return versionCache.latestVersion;
|
||||
}
|
||||
|
||||
try {
|
||||
// Fetch the latest release from GitHub
|
||||
const response = await axios.get(
|
||||
'https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest',
|
||||
{ headers: { 'Accept': 'application/vnd.github.v3+json' } }
|
||||
);
|
||||
|
||||
// Extract version from tag_name (remove 'v' prefix if present)
|
||||
const latestVersion = response.data.tag_name.replace(/^v/, '');
|
||||
|
||||
// Update cache
|
||||
versionCache = {
|
||||
latestVersion,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
return latestVersion;
|
||||
} catch (error) {
|
||||
console.error('Failed to check for latest version:', error);
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
// Import sql tag for raw SQL queries
|
||||
import { sql } from "drizzle-orm";
|
||||
|
||||
109
src/pages/api/job/mirror-org.test.ts
Normal file
109
src/pages/api/job/mirror-org.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Create a mock POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing userId or organizationIds
|
||||
if (!body.userId || !body.organizationIds) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing userId or organizationIds."
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Organization mirroring started",
|
||||
batchId: "test-batch-id"
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Create a mock module
|
||||
const mockModule = {
|
||||
POST: mockPOST
|
||||
};
|
||||
|
||||
describe("Organization Mirroring API", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
organizationIds: ["org-id-1", "org-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||
});
|
||||
|
||||
test("returns 400 if organizationIds is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring organizations", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id",
|
||||
organizationIds: ["org-id-1", "org-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Organization mirroring started");
|
||||
expect(data.batchId).toBe("test-batch-id");
|
||||
});
|
||||
});
|
||||
@@ -6,6 +6,8 @@ import { createGitHubClient } from "@/lib/github";
|
||||
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import { type MembershipRole } from "@/types/organizations";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -61,31 +63,72 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Fire async mirroring without blocking response
|
||||
// Fire async mirroring without blocking response, using parallel processing with resilience
|
||||
setTimeout(async () => {
|
||||
for (const org of orgs) {
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing in config.");
|
||||
}
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing in config.");
|
||||
}
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
// Create a single Octokit instance to be reused
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
// Using a lower concurrency for organizations since each org might contain many repos
|
||||
const CONCURRENCY_LIMIT = 2;
|
||||
|
||||
// Generate a batch ID to group related organizations
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process organizations in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
orgs,
|
||||
async (org) => {
|
||||
// Prepare organization data
|
||||
const orgData = {
|
||||
...org,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
membershipRole: org.membershipRole as MembershipRole,
|
||||
lastMirrored: org.lastMirrored ?? undefined,
|
||||
errorMessage: org.errorMessage ?? undefined,
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for organization: ${org.name}`);
|
||||
|
||||
// Mirror the organization
|
||||
await mirrorGitHubOrgToGitea({
|
||||
config,
|
||||
octokit,
|
||||
organization: {
|
||||
...org,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
membershipRole: org.membershipRole as MembershipRole,
|
||||
lastMirrored: org.lastMirrored ?? undefined,
|
||||
errorMessage: org.errorMessage ?? undefined,
|
||||
},
|
||||
organization: orgData,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Mirror failed for organization ${org.name}:`, error);
|
||||
|
||||
return org;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "mirror",
|
||||
batchId,
|
||||
getItemId: (org) => org.id,
|
||||
getItemName: (org) => org.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 3000,
|
||||
checkpointInterval: 1, // Checkpoint after each organization
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Organization mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully mirrored organization: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (org, error, attempt) => {
|
||||
console.log(`Retrying organization ${org.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All organization mirroring tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: MirrorOrgResponse = {
|
||||
|
||||
109
src/pages/api/job/mirror-repo.test.ts
Normal file
109
src/pages/api/job/mirror-repo.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Create a mock POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing userId or repositoryIds
|
||||
if (!body.userId || !body.repositoryIds) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing userId or repositoryIds."
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Repository mirroring started",
|
||||
batchId: "test-batch-id"
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Create a mock module
|
||||
const mockModule = {
|
||||
POST: mockPOST
|
||||
};
|
||||
|
||||
describe("Repository Mirroring API", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||
});
|
||||
|
||||
test("returns 400 if repositoryIds is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring repositories", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id",
|
||||
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Repository mirroring started");
|
||||
expect(data.batchId).toBe("test-batch-id");
|
||||
});
|
||||
});
|
||||
@@ -8,6 +8,8 @@ import {
|
||||
mirrorGitHubOrgRepoToGiteaOrg,
|
||||
} from "@/lib/gitea";
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -63,52 +65,83 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start async mirroring in background
|
||||
// Start async mirroring in background with parallel processing and resilience
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
// Create a single Octokit instance to be reused
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 3;
|
||||
|
||||
// Generate a batch ID to group related repositories
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process repositories in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for repository: ${repo.name}`);
|
||||
|
||||
// Mirror the repository based on whether it's in an organization
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
octokit,
|
||||
orgName: repo.organization,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
repository: repoData,
|
||||
});
|
||||
} else {
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
repository: repoData,
|
||||
config,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Mirror failed for repo ${repo.name}:`, error);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "mirror",
|
||||
batchId,
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
checkpointInterval: 1, // Checkpoint after each repository
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully mirrored repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository mirroring tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: MirrorRepoResponse = {
|
||||
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||
import { processWithRetry } from "@/lib/utils/concurrency";
|
||||
import { createMirrorJob } from "@/lib/helpers";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -65,10 +67,21 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start background retry
|
||||
// Start background retry with parallel processing
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
// Create a single Octokit instance to be reused if needed
|
||||
const octokit = config.githubConfig.token
|
||||
? createGitHubClient(config.githubConfig.token)
|
||||
: null;
|
||||
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 3;
|
||||
|
||||
// Process repositories in parallel with retry capability
|
||||
await processWithRetry(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const visibility = repositoryVisibilityEnum.parse(repo.visibility);
|
||||
const status = repoStatusEnum.parse(repo.status);
|
||||
const repoData = {
|
||||
@@ -81,6 +94,20 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
};
|
||||
|
||||
// Log the start of retry operation
|
||||
console.log(`Starting retry for repository: ${repo.name}`);
|
||||
|
||||
// Create a mirror job entry to track progress
|
||||
await createMirrorJob({
|
||||
userId: config.userId || "",
|
||||
repositoryId: repo.id,
|
||||
repositoryName: repo.name,
|
||||
message: `Started retry operation for repository: ${repo.name}`,
|
||||
details: `Repository ${repo.name} is now in the retry queue.`,
|
||||
status: "imported",
|
||||
});
|
||||
|
||||
// Determine if the repository exists in Gitea
|
||||
let owner = getGiteaRepoOwner({
|
||||
config,
|
||||
repository: repoData,
|
||||
@@ -93,16 +120,21 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
});
|
||||
|
||||
if (present) {
|
||||
// If the repository exists, sync it
|
||||
await syncGiteaRepo({ config, repository: repoData });
|
||||
console.log(`Synced existing repo: ${repo.name}`);
|
||||
} else {
|
||||
// If the repository doesn't exist, mirror it
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
|
||||
if (!octokit) {
|
||||
throw new Error("Octokit client is not initialized.");
|
||||
}
|
||||
|
||||
console.log(`Importing repo: ${repo.name} ${owner}`);
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
@@ -124,10 +156,28 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to retry repo ${repo.name}:`, err);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Retry progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully processed repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository retry tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: RetryRepoResponse = {
|
||||
|
||||
@@ -5,6 +5,8 @@ import { eq, inArray } from "drizzle-orm";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import { syncGiteaRepo } from "@/lib/gitea";
|
||||
import type { SyncRepoResponse } from "@/types/sync";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -60,26 +62,65 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start async mirroring in background
|
||||
// Start async mirroring in background with parallel processing and resilience
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 5;
|
||||
|
||||
// Generate a batch ID to group related repositories
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process repositories in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
};
|
||||
|
||||
// Log the start of syncing
|
||||
console.log(`Starting sync for repository: ${repo.name}`);
|
||||
|
||||
// Sync the repository
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
},
|
||||
repository: repoData,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Sync failed for repo ${repo.name}:`, error);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "sync",
|
||||
batchId,
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
checkpointInterval: 1, // Checkpoint after each repository
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Syncing progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully synced repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying sync for repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository syncing tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: SyncRepoResponse = {
|
||||
|
||||
20
src/tests/setup.bun.ts
Normal file
20
src/tests/setup.bun.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* Bun test setup file
|
||||
* This file is automatically loaded before running tests
|
||||
*/
|
||||
|
||||
import { afterEach, beforeEach } from "bun:test";
|
||||
|
||||
// Clean up after each test
|
||||
afterEach(() => {
|
||||
// Add any cleanup logic here
|
||||
});
|
||||
|
||||
// Setup before each test
|
||||
beforeEach(() => {
|
||||
// Add any setup logic here
|
||||
});
|
||||
|
||||
// Add DOM testing support if needed
|
||||
// import { DOMParser } from "linkedom";
|
||||
// global.DOMParser = DOMParser;
|
||||
Reference in New Issue
Block a user