mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2025-12-06 11:36:44 +03:00
Compare commits
55 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3bb85a4cdb | ||
|
|
30182544ba | ||
|
|
fb73f33aeb | ||
|
|
48f63bdfc8 | ||
|
|
e2506a874e | ||
|
|
b67473ec7e | ||
|
|
4ca4356ad1 | ||
|
|
3136a2120d | ||
|
|
615ebd5079 | ||
|
|
6e48d3f86c | ||
|
|
c5de7e616d | ||
|
|
309f8c4341 | ||
|
|
0c596ac241 | ||
|
|
894be88a28 | ||
|
|
6ab7f0a5a0 | ||
|
|
abe3113755 | ||
|
|
f4bc28e6c2 | ||
|
|
aaf8dc6fe4 | ||
|
|
cda78bc0f5 | ||
|
|
9ccd656734 | ||
|
|
8b5c5d8ed2 | ||
|
|
1ab642c9e7 | ||
|
|
1eae725535 | ||
|
|
5bf52c806f | ||
|
|
a15178d2cd | ||
|
|
32ef9124a7 | ||
|
|
161685b966 | ||
|
|
0cf95b2a0e | ||
|
|
c896194aeb | ||
|
|
315d892cf4 | ||
|
|
b7eaa94ca2 | ||
|
|
52dbe6a2d9 | ||
|
|
e423d78cf9 | ||
|
|
f6b51414a0 | ||
|
|
8a35c0368f | ||
|
|
6f64838b55 | ||
|
|
f37867ea0c | ||
|
|
4aa7e665ac | ||
|
|
4b570f555a | ||
|
|
97676f3b04 | ||
|
|
04e8b817d3 | ||
|
|
6d13ff29ca | ||
|
|
c179953649 | ||
|
|
eb2d76a4b7 | ||
|
|
145bee8d96 | ||
|
|
cad72da016 | ||
|
|
4a01a351f0 | ||
|
|
98973adfe5 | ||
|
|
f6b5df472a | ||
|
|
b09cabd154 | ||
|
|
f9c77bbee0 | ||
|
|
e95f1d99b5 | ||
|
|
d5b0102080 | ||
|
|
94aff30dda | ||
|
|
38206e7d3d |
@@ -5,10 +5,10 @@
|
||||
|
||||
# Node.js
|
||||
node_modules
|
||||
# We don't exclude bun.lock* as it's needed for the build
|
||||
npm-debug.log
|
||||
yarn-debug.log
|
||||
yarn-error.log
|
||||
pnpm-debug.log
|
||||
|
||||
# Build outputs
|
||||
dist
|
||||
@@ -62,4 +62,3 @@ logs
|
||||
# Cache
|
||||
.cache
|
||||
.npm
|
||||
.pnpm-store
|
||||
|
||||
BIN
.github/assets/logo.png
vendored
Normal file
BIN
.github/assets/logo.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.6 MiB |
3
.github/workflows/README.md
vendored
3
.github/workflows/README.md
vendored
@@ -24,8 +24,7 @@ This workflow runs on all branches and pull requests. It:
|
||||
- On push to any branch (except changes to README.md and docs)
|
||||
- On pull requests to any branch (except changes to README.md and docs)
|
||||
|
||||
**Key features:**
|
||||
- Uses pnpm for faster dependency installation
|
||||
- Uses Bun for dependency installation
|
||||
- Caches dependencies to speed up builds
|
||||
- Uploads build artifacts for 7 days
|
||||
|
||||
|
||||
31
.github/workflows/astro-build-test.yml
vendored
31
.github/workflows/astro-build-test.yml
vendored
@@ -16,31 +16,32 @@ jobs:
|
||||
build-and-test:
|
||||
name: Build and Test Astro Project
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v3
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
bun-version: '1.2.9'
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
cache: 'pnpm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install
|
||||
- name: Check lockfile and install dependencies
|
||||
run: |
|
||||
# Check if bun.lock exists, if not check for bun.lockb
|
||||
if [ -f "bun.lock" ]; then
|
||||
echo "Using existing bun.lock file"
|
||||
elif [ -f "bun.lockb" ]; then
|
||||
echo "Found bun.lockb, creating symlink to bun.lock"
|
||||
ln -s bun.lockb bun.lock
|
||||
fi
|
||||
bun install
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm test
|
||||
run: bun test --coverage
|
||||
|
||||
- name: Build Astro project
|
||||
run: pnpm build
|
||||
run: bunx --bun astro build
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
5
.github/workflows/docker-build.yml
vendored
5
.github/workflows/docker-build.yml
vendored
@@ -18,11 +18,6 @@ jobs:
|
||||
contents: write
|
||||
packages: write
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
ports: ['6379:6379']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
|
||||
4
.github/workflows/docker-scan.yml
vendored
4
.github/workflows/docker-scan.yml
vendored
@@ -7,14 +7,14 @@ on:
|
||||
- 'Dockerfile'
|
||||
- '.dockerignore'
|
||||
- 'package.json'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'bun.lock*'
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
paths:
|
||||
- 'Dockerfile'
|
||||
- '.dockerignore'
|
||||
- 'package.json'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'bun.lock*'
|
||||
schedule:
|
||||
- cron: '0 0 * * 0' # Run weekly on Sunday at midnight
|
||||
|
||||
|
||||
42
CHANGELOG.md
Normal file
42
CHANGELOG.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to the Gitea Mirror project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [2.5.3] - 2025-05-22
|
||||
|
||||
### Added
|
||||
- Enhanced JWT_SECRET handling with auto-generation and persistence for improved security
|
||||
- Updated Proxmox LXC deployment instructions and replaced deprecated script
|
||||
|
||||
## [2.5.2] - 2024-11-22
|
||||
|
||||
### Fixed
|
||||
- Fixed version information in health API for Docker deployments by setting npm_package_version environment variable in entrypoint script
|
||||
|
||||
## [2.5.1] - 2024-10-01
|
||||
|
||||
### Fixed
|
||||
- Fixed Docker entrypoint script to prevent unnecessary `bun install` on container startup
|
||||
- Removed redundant dependency installation in Docker containers for pre-built images
|
||||
- Fixed "PathAlreadyExists" errors during container initialization
|
||||
|
||||
### Changed
|
||||
- Improved database initialization in Docker entrypoint script
|
||||
- Added additional checks for TypeScript versions of database management scripts
|
||||
|
||||
## [2.5.0] - 2024-09-15
|
||||
|
||||
Initial public release with core functionality:
|
||||
|
||||
### Added
|
||||
- GitHub to Gitea repository mirroring
|
||||
- User authentication and management
|
||||
- Dashboard with mirroring statistics
|
||||
- Configuration management for mirroring settings
|
||||
- Support for organization mirroring
|
||||
- Automated mirroring with configurable schedules
|
||||
- Docker multi-architecture support (amd64, arm64)
|
||||
- LXC container deployment scripts
|
||||
81
Dockerfile
81
Dockerfile
@@ -1,82 +1,47 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
|
||||
FROM node:lts-alpine AS base
|
||||
ENV PNPM_HOME=/usr/local/bin
|
||||
ENV PATH=$PNPM_HOME:$PATH
|
||||
RUN apk add --no-cache libc6-compat
|
||||
FROM oven/bun:1.2.9-alpine AS base
|
||||
WORKDIR /app
|
||||
RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite openssl
|
||||
|
||||
# -----------------------------------
|
||||
# ----------------------------
|
||||
FROM base AS deps
|
||||
WORKDIR /app
|
||||
RUN apk add --no-cache python3 make g++ gcc
|
||||
COPY package.json ./
|
||||
COPY bun.lock* ./
|
||||
RUN bun install --frozen-lockfile
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm \
|
||||
corepack enable && corepack prepare pnpm@latest --activate
|
||||
|
||||
COPY package.json pnpm-lock.yaml* ./
|
||||
|
||||
# Full dev install
|
||||
RUN --mount=type=cache,target=/root/.local/share/pnpm/store \
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
# -----------------------------------
|
||||
FROM base AS builder
|
||||
WORKDIR /app
|
||||
RUN apk add --no-cache python3 make g++ gcc
|
||||
|
||||
RUN --mount=type=cache,target=/root/.npm \
|
||||
corepack enable && corepack prepare pnpm@latest --activate
|
||||
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
# ----------------------------
|
||||
FROM deps AS builder
|
||||
COPY . .
|
||||
|
||||
RUN pnpm build
|
||||
# Compile TypeScript scripts to JavaScript
|
||||
RUN bun run build
|
||||
RUN mkdir -p dist/scripts && \
|
||||
for script in scripts/*.ts; do \
|
||||
node_modules/.bin/tsc --outDir dist/scripts --module commonjs --target es2020 --esModuleInterop $script || true; \
|
||||
bun build "$script" --target=bun --outfile=dist/scripts/$(basename "${script%.ts}.js"); \
|
||||
done
|
||||
|
||||
# -----------------------------------
|
||||
# ----------------------------
|
||||
FROM deps AS pruner
|
||||
WORKDIR /app
|
||||
RUN bun install --production --frozen-lockfile
|
||||
|
||||
# Prune dev dependencies and just keep the production bits
|
||||
RUN --mount=type=cache,target=/root/.local/share/pnpm/store \
|
||||
pnpm prune --prod
|
||||
|
||||
# -----------------------------------
|
||||
# ----------------------------
|
||||
FROM base AS runner
|
||||
WORKDIR /app
|
||||
|
||||
# Only copy production node_modules and built output
|
||||
COPY --from=pruner /app/node_modules ./node_modules
|
||||
COPY --from=builder /app/dist ./dist
|
||||
COPY --from=builder /app/package.json ./package.json
|
||||
COPY --from=builder /app/docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
COPY --from=builder /app/scripts ./scripts
|
||||
COPY --from=builder /app/data ./data
|
||||
|
||||
ENV NODE_ENV=production
|
||||
ENV HOST=0.0.0.0
|
||||
ENV PORT=4321
|
||||
ENV DATABASE_URL=file:data/gitea-mirror.db
|
||||
|
||||
# Make entrypoint executable
|
||||
RUN chmod +x /app/docker-entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/app/docker-entrypoint.sh"]
|
||||
|
||||
RUN apk add --no-cache wget sqlite && \
|
||||
mkdir -p /app/data && \
|
||||
addgroup --system --gid 1001 nodejs && \
|
||||
adduser --system --uid 1001 gitea-mirror && \
|
||||
chown -R gitea-mirror:nodejs /app/data
|
||||
|
||||
COPY --from=builder --chown=gitea-mirror:nodejs /app/dist ./dist
|
||||
COPY --from=pruner --chown=gitea-mirror:nodejs /app/node_modules ./node_modules
|
||||
COPY --from=builder --chown=gitea-mirror:nodejs /app/package.json ./package.json
|
||||
COPY --from=builder --chown=gitea-mirror:nodejs /app/scripts ./scripts
|
||||
RUN chmod +x ./docker-entrypoint.sh && \
|
||||
mkdir -p /app/data && \
|
||||
addgroup --system --gid 1001 nodejs && \
|
||||
adduser --system --uid 1001 gitea-mirror && \
|
||||
chown -R gitea-mirror:nodejs /app/data
|
||||
|
||||
USER gitea-mirror
|
||||
|
||||
@@ -84,10 +49,6 @@ VOLUME /app/data
|
||||
EXPOSE 4321
|
||||
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/ || exit 1
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/api/health || exit 1
|
||||
|
||||
# Create a startup script that initializes the database before starting the application
|
||||
COPY --from=builder --chown=gitea-mirror:nodejs /app/docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
RUN chmod +x ./docker-entrypoint.sh
|
||||
|
||||
CMD ["./docker-entrypoint.sh"]
|
||||
ENTRYPOINT ["./docker-entrypoint.sh"]
|
||||
|
||||
264
README.md
264
README.md
@@ -1,11 +1,34 @@
|
||||
# Gitea Mirror
|
||||
|
||||
|
||||
<p align="center">
|
||||
<i>A modern web application for automatically mirroring repositories from GitHub to your self-hosted Gitea instance.</i><br>
|
||||
<sub>Designed for developers, teams, and organizations who want to retain full control of their code while still collaborating on GitHub.</sub>
|
||||
<img src=".github/assets/logo.png" alt="Gitea Mirror Logo" width="120" />
|
||||
<h1>Gitea Mirror</h1>
|
||||
<p><i>A modern web app for automatically mirroring repositories from GitHub to your self-hosted Gitea.</i></p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/arunavo4/gitea-mirror/releases/latest"><img src="https://img.shields.io/github/v/tag/arunavo4/gitea-mirror?label=release" alt="release"/></a>
|
||||
<a href="https://github.com/arunavo4/gitea-mirror/actions/workflows/astro-build-test.yml"><img src="https://img.shields.io/github/actions/workflow/status/arunavo4/gitea-mirror/astro-build-test.yml?branch=main" alt="build"/></a>
|
||||
<a href="https://github.com/arunavo4/gitea-mirror/pkgs/container/gitea-mirror"><img src="https://img.shields.io/badge/ghcr.io-container-blue?logo=github" alt="container"/></a>
|
||||
<a href="https://github.com/arunavo4/gitea-mirror/blob/main/LICENSE"><img src="https://img.shields.io/github/license/arunavo4/gitea-mirror" alt="license"/></a>
|
||||
</p>
|
||||
</p>
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
```bash
|
||||
# Using Docker (recommended)
|
||||
docker compose --profile production up -d
|
||||
|
||||
# Using Bun
|
||||
bun run setup && bun run dev
|
||||
|
||||
# Using LXC Containers
|
||||
# For Proxmox VE (online) - Community script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh | bash
|
||||
|
||||
# For local testing (offline-friendly)
|
||||
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./scripts/gitea-mirror-lxc-local.sh
|
||||
````
|
||||
|
||||
See the [LXC Container Deployment Guide](scripts/README-lxc.md).
|
||||
|
||||
<p align="center">
|
||||
<img src=".github/assets/dashboard.png" alt="Dashboard" width="80%"/>
|
||||
</p>
|
||||
@@ -50,7 +73,7 @@ See the [Quick Start Guide](docs/quickstart.md) for detailed instructions on get
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js 22 or later
|
||||
- Bun 1.2.9 or later
|
||||
- A GitHub account with a personal access token
|
||||
- A Gitea instance with an access token
|
||||
|
||||
@@ -77,7 +100,7 @@ Before running the application in production mode for the first time, you need t
|
||||
|
||||
```bash
|
||||
# Initialize the database for production mode
|
||||
pnpm setup
|
||||
bun run setup
|
||||
```
|
||||
|
||||
This will create the necessary tables. On first launch, you'll be guided through creating your admin account with a secure password.
|
||||
@@ -95,13 +118,13 @@ Gitea Mirror provides multi-architecture Docker images that work on both ARM64 (
|
||||
docker compose --profile production up -d
|
||||
|
||||
# For development mode (requires configuration)
|
||||
# Ensure you have run pnpm setup first
|
||||
# Ensure you have run bun run setup first
|
||||
docker compose -f docker-compose.dev.yml up -d
|
||||
```
|
||||
|
||||
|
||||
> [!IMPORTANT]
|
||||
> **Docker Compose is the recommended method for running Gitea Mirror** as it automatically sets up the required Redis sidecar service that the application depends on.
|
||||
> **Docker Compose is the recommended method for running Gitea Mirror** as it provides a consistent environment with proper volume management for the SQLite database.
|
||||
|
||||
|
||||
> [!NOTE]
|
||||
@@ -109,19 +132,15 @@ docker compose -f docker-compose.dev.yml up -d
|
||||
|
||||
##### Using Pre-built Images from GitHub Container Registry
|
||||
|
||||
If you want to run the container directly without Docker Compose, you'll need to set up a Redis instance separately:
|
||||
If you want to run the container directly without Docker Compose:
|
||||
|
||||
```bash
|
||||
# First, start a Redis container
|
||||
docker run -d --name gitea-mirror-redis redis:alpine
|
||||
|
||||
# Pull the latest multi-architecture image
|
||||
docker pull ghcr.io/arunavo4/gitea-mirror:latest
|
||||
|
||||
# Run the application with a link to the Redis container
|
||||
# Note: The REDIS_URL environment variable is required and must point to the Redis container
|
||||
docker run -d -p 4321:4321 --link gitea-mirror-redis:redis \
|
||||
-e REDIS_URL=redis://redis:6379 \
|
||||
# Run the application with a volume for persistent data
|
||||
docker run -d -p 4321:4321 \
|
||||
-v gitea-mirror-data:/app/data \
|
||||
ghcr.io/arunavo4/gitea-mirror:latest
|
||||
```
|
||||
|
||||
@@ -148,6 +167,41 @@ docker compose --profile production up -d
|
||||
|
||||
See [Docker build documentation](./scripts/README-docker.md) for more details.
|
||||
|
||||
##### Using LXC Containers
|
||||
|
||||
Gitea Mirror offers two deployment options for LXC containers:
|
||||
|
||||
**1. Proxmox VE (online, recommended for production)**
|
||||
|
||||
```bash
|
||||
# One-command installation on Proxmox VE
|
||||
# Uses the community-maintained script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
# at [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED)
|
||||
curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh | bash
|
||||
```
|
||||
|
||||
**2. Local testing (offline-friendly, works on developer laptops)**
|
||||
|
||||
```bash
|
||||
# Download the script
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-local.sh -o gitea-mirror-lxc-local.sh
|
||||
chmod +x gitea-mirror-lxc-local.sh
|
||||
|
||||
# Run with your local repo directory
|
||||
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./gitea-mirror-lxc-local.sh
|
||||
```
|
||||
|
||||
Both scripts:
|
||||
- Set up a privileged Ubuntu 22.04 LXC container
|
||||
- Install Bun runtime environment
|
||||
- Build the application
|
||||
- Configure a systemd service
|
||||
- Start the service automatically
|
||||
|
||||
The application includes a health check endpoint at `/api/health` for monitoring.
|
||||
|
||||
See the [LXC Container Deployment Guide](scripts/README-lxc.md) for detailed instructions.
|
||||
|
||||
##### Building Your Own Image
|
||||
|
||||
For manual Docker builds (without the helper script):
|
||||
@@ -179,9 +233,10 @@ The Docker container can be configured with the following environment variables:
|
||||
- `DATABASE_URL`: SQLite database URL (default: `file:data/gitea-mirror.db`)
|
||||
- `HOST`: Host to bind to (default: `0.0.0.0`)
|
||||
- `PORT`: Port to listen on (default: `4321`)
|
||||
- `JWT_SECRET`: Secret key for JWT token generation (important for security)
|
||||
- `REDIS_URL`: URL for Redis connection (required, default: none). When using Docker Compose, this should be set to `redis://redis:6379` to connect to the Redis container.
|
||||
- `JWT_SECRET`: Secret key for JWT token generation (auto-generated if not provided)
|
||||
|
||||
> [!TIP]
|
||||
> For security, Gitea Mirror will automatically generate a secure random JWT secret on first run if one isn't provided or if the default value is used. This generated secret is stored in the data directory for persistence across container restarts.
|
||||
|
||||
#### Manual Installation
|
||||
|
||||
@@ -191,40 +246,40 @@ git clone https://github.com/arunavo4/gitea-mirror.git
|
||||
cd gitea-mirror
|
||||
|
||||
# Quick setup (installs dependencies and initializes the database)
|
||||
pnpm setup
|
||||
bun run setup
|
||||
|
||||
# Development Mode Options
|
||||
|
||||
# Run in development mode
|
||||
pnpm dev
|
||||
bun run dev
|
||||
|
||||
# Run in development mode with clean database (removes existing DB first)
|
||||
pnpm dev:clean
|
||||
bun run dev:clean
|
||||
|
||||
# Production Mode Options
|
||||
|
||||
# Build the application
|
||||
pnpm build
|
||||
bun run build
|
||||
|
||||
# Preview the production build
|
||||
pnpm preview
|
||||
bun run preview
|
||||
|
||||
# Start the production server (default)
|
||||
pnpm start
|
||||
bun run start
|
||||
|
||||
# Start the production server with a clean setup
|
||||
pnpm start:fresh
|
||||
bun run start:fresh
|
||||
|
||||
# Database Management
|
||||
|
||||
# Initialize the database
|
||||
pnpm init-db
|
||||
bun run init-db
|
||||
|
||||
# Reset users for testing first-time signup
|
||||
pnpm reset-users
|
||||
bun run reset-users
|
||||
|
||||
# Check database status
|
||||
pnpm check-db
|
||||
bun run check-db
|
||||
```
|
||||
|
||||
### Configuration
|
||||
@@ -239,7 +294,7 @@ Key configuration options include:
|
||||
- Scheduling options for automatic mirroring
|
||||
|
||||
> [!IMPORTANT]
|
||||
> **Redis is a required component for Gitea Mirror** as it's used for job queuing and caching.
|
||||
> **SQLite is the only database required for Gitea Mirror**, handling both data storage and real-time event notifications.
|
||||
|
||||
## 🚀 Development
|
||||
|
||||
@@ -247,10 +302,10 @@ Key configuration options include:
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
pnpm setup
|
||||
bun run setup
|
||||
|
||||
# Start the development server
|
||||
pnpm dev
|
||||
bun run dev
|
||||
```
|
||||
|
||||
|
||||
@@ -330,12 +385,12 @@ docker compose -f docker-compose.dev.yml up -d
|
||||
|
||||
> [!TIP]
|
||||
> You can also create a `.env` file with your GitHub and Gitea credentials:
|
||||
>
|
||||
>
|
||||
> ```env
|
||||
> # GitHub credentials
|
||||
> GITHUB_TOKEN=your-github-token
|
||||
> GITHUB_USERNAME=your-github-username
|
||||
>
|
||||
>
|
||||
> # Gitea credentials (will be set up after you create a user in the local Gitea instance)
|
||||
> GITEA_TOKEN=your-local-gitea-token
|
||||
> GITEA_USERNAME=your-local-gitea-username
|
||||
@@ -344,10 +399,10 @@ docker compose -f docker-compose.dev.yml up -d
|
||||
## Technologies Used
|
||||
|
||||
- **Frontend**: Astro, React, Shadcn UI, Tailwind CSS v4
|
||||
- **Backend**: Node.js
|
||||
- **Database**: SQLite (default) or PostgreSQL
|
||||
- **Caching/Queue**: Redis
|
||||
- **Backend**: Bun
|
||||
- **Database**: SQLite (handles both data storage and event notifications)
|
||||
- **API Integration**: GitHub API (Octokit), Gitea API
|
||||
- **Deployment Options**: Docker containers, LXC containers (Proxmox VE and local testing)
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -357,27 +412,6 @@ Contributions are welcome! Please feel free to submit a Pull Request.
|
||||
|
||||
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
||||
|
||||
## Project Status
|
||||
|
||||
This project is now complete and ready for production use with version 1.0.0. All planned features have been implemented, thoroughly tested, and optimized for performance:
|
||||
|
||||
- ✅ User-friendly dashboard with status overview
|
||||
- ✅ Repository management interface
|
||||
- ✅ Organization management interface
|
||||
- ✅ Configuration management for GitHub and Gitea
|
||||
- ✅ Scheduling and automation
|
||||
- ✅ Activity logging and monitoring
|
||||
- ✅ Responsive design for all screen sizes
|
||||
- ✅ Modern toast notifications for better user feedback
|
||||
- ✅ First-time user signup experience
|
||||
- ✅ Better error handling and user guidance
|
||||
- ✅ Comprehensive error handling
|
||||
- ✅ Unit tests for components and API
|
||||
- ✅ Direct GitHub to Gitea mirroring (no external dependencies)
|
||||
- ✅ Docker and docker-compose support for easy deployment
|
||||
- ✅ Multi-architecture support (ARM64 and x86_64)
|
||||
- ✅ Light/dark mode toggle
|
||||
- ✅ Persistent configuration storage
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
@@ -385,14 +419,14 @@ This project is now complete and ready for production use with version 1.0.0. Al
|
||||
|
||||
> [!WARNING]
|
||||
> If you encounter network-related warnings or errors when running Docker Compose, such as:
|
||||
>
|
||||
>
|
||||
> ```
|
||||
> WARN[0095] a network with name gitea-network exists but was not created by compose.
|
||||
> Set `external: true` to use an existing network
|
||||
> ```
|
||||
>
|
||||
>
|
||||
> or
|
||||
>
|
||||
>
|
||||
> ```
|
||||
> network gitea-network was found but has incorrect label com.docker.compose.network set to "" (expected: "gitea-network")
|
||||
> ```
|
||||
@@ -416,7 +450,7 @@ Try the following steps:
|
||||
|
||||
> [!TIP]
|
||||
> If you need to share the network with other Docker Compose projects, you can modify the `docker-compose.dev.yml` file to mark the network as external:
|
||||
>
|
||||
>
|
||||
> ```yaml
|
||||
> networks:
|
||||
> gitea-network:
|
||||
@@ -424,62 +458,60 @@ Try the following steps:
|
||||
> external: true
|
||||
> ```
|
||||
|
||||
### Redis Connection Issues
|
||||
|
||||
> [!CAUTION]
|
||||
> If the application fails to connect to Redis with errors like `ECONNREFUSED 127.0.0.1:6379`, ensure:
|
||||
>
|
||||
> 1. The Redis container is running:
|
||||
> ```bash
|
||||
> docker ps | grep redis
|
||||
> ```
|
||||
> 2. The `REDIS_URL` environment variable is correctly set to `redis://redis:6379` in your Docker Compose file.
|
||||
> 3. Both the application and Redis containers are on the same Docker network.
|
||||
> 4. If running without Docker Compose, ensure you've started a Redis container and linked it properly:
|
||||
> ```bash
|
||||
> # Start Redis container
|
||||
> docker run -d --name gitea-mirror-redis redis:alpine
|
||||
> # Run application with link to Redis
|
||||
> docker run -d -p 4321:4321 --link gitea-mirror-redis:redis \
|
||||
> -e REDIS_URL=redis://redis:6379 \
|
||||
> ghcr.io/arunavo4/gitea-mirror:latest
|
||||
> ```
|
||||
|
||||
|
||||
#### Improving Redis Connection Resilience
|
||||
### Database Persistence
|
||||
|
||||
> [!TIP]
|
||||
> For better Redis connection handling, you can modify the `src/lib/redis.ts` file to include retry logic and better error handling:
|
||||
> The application uses SQLite for all data storage and event notifications. Make sure the database file is properly mounted when using Docker:
|
||||
>
|
||||
> ```bash
|
||||
> # Run with a volume for persistent data storage
|
||||
> docker run -d -p 4321:4321 \
|
||||
> -v gitea-mirror-data:/app/data \
|
||||
> ghcr.io/arunavo4/gitea-mirror:latest
|
||||
> ```
|
||||
>
|
||||
> For homelab/self-hosted setups, you can use the provided Docker Compose file with automatic event cleanup:
|
||||
>
|
||||
> ```bash
|
||||
> # Clone the repository
|
||||
> git clone https://github.com/arunavo4/gitea-mirror.git
|
||||
> cd gitea-mirror
|
||||
>
|
||||
> # Start the application with Docker Compose
|
||||
> docker-compose -f docker-compose.homelab.yml up -d
|
||||
> ```
|
||||
>
|
||||
> This setup includes a cron job that runs daily to clean up old events and prevent the database from growing too large.
|
||||
|
||||
```typescript
|
||||
import Redis from "ioredis";
|
||||
|
||||
// Connect to Redis using REDIS_URL environment variable or default to redis://redis:6379
|
||||
const redisUrl = process.env.REDIS_URL ?? 'redis://redis:6379';
|
||||
#### Database Maintenance
|
||||
|
||||
console.log(`Connecting to Redis at: ${redisUrl}`);
|
||||
|
||||
// Configure Redis client with connection options
|
||||
const redisOptions = {
|
||||
retryStrategy: (times) => {
|
||||
// Retry with exponential backoff up to 30 seconds
|
||||
const delay = Math.min(times * 100, 3000);
|
||||
console.log(`Redis connection attempt ${times} failed. Retrying in ${delay}ms...`);
|
||||
return delay;
|
||||
},
|
||||
maxRetriesPerRequest: 5,
|
||||
enableReadyCheck: true,
|
||||
connectTimeout: 10000,
|
||||
};
|
||||
|
||||
export const redis = new Redis(redisUrl, redisOptions);
|
||||
export const redisPublisher = new Redis(redisUrl, redisOptions);
|
||||
export const redisSubscriber = new Redis(redisUrl, redisOptions);
|
||||
|
||||
// Log connection events
|
||||
redis.on('connect', () => console.log('Redis client connected'));
|
||||
redis.on('error', (err) => console.error('Redis client error:', err));
|
||||
```
|
||||
> [!TIP]
|
||||
> For database maintenance, you can use the provided scripts:
|
||||
>
|
||||
> ```bash
|
||||
> # Check database integrity
|
||||
> bun run check-db
|
||||
>
|
||||
> # Fix database issues
|
||||
> bun run fix-db
|
||||
>
|
||||
> # Reset user accounts (for development)
|
||||
> bun run reset-users
|
||||
>
|
||||
> # Clean up old events (keeps last 7 days by default)
|
||||
> bun run cleanup-events
|
||||
>
|
||||
> # Clean up old events with custom retention period (e.g., 30 days)
|
||||
> bun run cleanup-events 30
|
||||
> ```
|
||||
>
|
||||
> For automated maintenance, consider setting up a cron job to run the cleanup script periodically:
|
||||
>
|
||||
> ```bash
|
||||
> # Add this to your crontab (runs daily at 2 AM)
|
||||
> 0 2 * * * cd /path/to/gitea-mirror && bun run cleanup-events
|
||||
> ```
|
||||
|
||||
|
||||
> [!NOTE]
|
||||
@@ -494,13 +526,13 @@ redis.on('error', (err) => console.error('Redis client error:', err));
|
||||
|
||||
> [!TIP]
|
||||
> If containers are not starting properly, check their health status:
|
||||
>
|
||||
>
|
||||
> ```bash
|
||||
> docker ps --format "{{.Names}}: {{.Status}}"
|
||||
> ```
|
||||
>
|
||||
>
|
||||
> For more detailed logs:
|
||||
>
|
||||
>
|
||||
> ```bash
|
||||
> docker logs gitea-mirror-dev
|
||||
> ```
|
||||
|
||||
@@ -11,7 +11,12 @@ export default defineConfig({
|
||||
mode: 'standalone',
|
||||
}),
|
||||
vite: {
|
||||
plugins: [tailwindcss()]
|
||||
plugins: [tailwindcss()],
|
||||
build: {
|
||||
rollupOptions: {
|
||||
external: ['bun']
|
||||
}
|
||||
}
|
||||
},
|
||||
integrations: [react()]
|
||||
});
|
||||
4
crontab
Normal file
4
crontab
Normal file
@@ -0,0 +1,4 @@
|
||||
# Run event cleanup daily at 2 AM
|
||||
0 2 * * * cd /app && bun run cleanup-events 30 >> /app/data/cleanup-events.log 2>&1
|
||||
|
||||
# Empty line at the end is required for cron to work properly
|
||||
@@ -28,7 +28,7 @@ services:
|
||||
networks:
|
||||
- gitea-network
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/api/healthz"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -51,7 +51,6 @@ services:
|
||||
- gitea-mirror-data:/app/data
|
||||
depends_on:
|
||||
- gitea
|
||||
- redis
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- DATABASE_URL=file:data/gitea-mirror.db
|
||||
@@ -75,9 +74,8 @@ services:
|
||||
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
|
||||
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
||||
- DELAY=${DELAY:-3600}
|
||||
- REDIS_URL=redis://redis:6379
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/api/health"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
@@ -85,16 +83,7 @@ services:
|
||||
networks:
|
||||
- gitea-network
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: redis
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis-data:/data
|
||||
networks:
|
||||
- gitea-network
|
||||
|
||||
|
||||
# Define named volumes for data persistence
|
||||
volumes:
|
||||
@@ -102,8 +91,6 @@ volumes:
|
||||
gitea-config: # Gitea config volume
|
||||
gitea-mirror-data: # Gitea Mirror database volume
|
||||
|
||||
redis-data:
|
||||
|
||||
# Define networks
|
||||
networks:
|
||||
gitea-network:
|
||||
|
||||
38
docker-compose.homelab.yml
Normal file
38
docker-compose.homelab.yml
Normal file
@@ -0,0 +1,38 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
gitea-mirror:
|
||||
image: ghcr.io/arunavo4/gitea-mirror:latest
|
||||
container_name: gitea-mirror
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "4321:4321"
|
||||
volumes:
|
||||
- gitea-mirror-data:/app/data
|
||||
# Mount the crontab file
|
||||
- ./crontab:/etc/cron.d/gitea-mirror-cron
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- HOST=0.0.0.0
|
||||
- PORT=4321
|
||||
- DATABASE_URL=sqlite://data/gitea-mirror.db
|
||||
- DELAY=${DELAY:-3600}
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:4321/api/health"]
|
||||
interval: 1m
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 30s
|
||||
# Install cron in the container and set up the cron job
|
||||
command: >
|
||||
sh -c "
|
||||
apt-get update && apt-get install -y cron curl &&
|
||||
chmod 0644 /etc/cron.d/gitea-mirror-cron &&
|
||||
crontab /etc/cron.d/gitea-mirror-cron &&
|
||||
service cron start &&
|
||||
bun dist/server/entry.mjs
|
||||
"
|
||||
|
||||
# Define named volumes for database persistence
|
||||
volumes:
|
||||
gitea-mirror-data: # Database volume
|
||||
@@ -19,8 +19,6 @@ services:
|
||||
- "4321:4321"
|
||||
volumes:
|
||||
- gitea-mirror-data:/app/data
|
||||
depends_on:
|
||||
- redis
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
- DATABASE_URL=file:data/gitea-mirror.db
|
||||
@@ -44,25 +42,14 @@ services:
|
||||
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
|
||||
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
||||
- DELAY=${DELAY:-3600}
|
||||
- REDIS_URL=redis://redis:6379
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/"]
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/api/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 15s
|
||||
profiles: ["production"]
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: redis
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis-data:/data
|
||||
|
||||
# Define named volumes for database persistence
|
||||
volumes:
|
||||
gitea-mirror-data: # Database volume
|
||||
redis-data:
|
||||
|
||||
@@ -5,19 +5,43 @@ set -e
|
||||
# Ensure data directory exists
|
||||
mkdir -p /app/data
|
||||
|
||||
# If pnpm is available, run setup (for dev images), else run node init directly
|
||||
if command -v pnpm >/dev/null 2>&1; then
|
||||
echo "Running pnpm setup (if needed)..."
|
||||
pnpm setup || true
|
||||
# Generate a secure JWT secret if one isn't provided or is using the default value
|
||||
JWT_SECRET_FILE="/app/data/.jwt_secret"
|
||||
if [ "$JWT_SECRET" = "your-secret-key-change-this-in-production" ] || [ -z "$JWT_SECRET" ]; then
|
||||
# Check if we have a previously generated secret
|
||||
if [ -f "$JWT_SECRET_FILE" ]; then
|
||||
echo "Using previously generated JWT secret"
|
||||
export JWT_SECRET=$(cat "$JWT_SECRET_FILE")
|
||||
else
|
||||
echo "Generating a secure random JWT secret"
|
||||
# Try to generate a secure random string using OpenSSL
|
||||
if command -v openssl >/dev/null 2>&1; then
|
||||
GENERATED_SECRET=$(openssl rand -hex 32)
|
||||
else
|
||||
# Fallback to using /dev/urandom if openssl is not available
|
||||
echo "OpenSSL not found, using fallback method for random generation"
|
||||
GENERATED_SECRET=$(head -c 32 /dev/urandom | sha256sum | cut -d' ' -f1)
|
||||
fi
|
||||
export JWT_SECRET="$GENERATED_SECRET"
|
||||
# Save the secret to a file for persistence across container restarts
|
||||
echo "$GENERATED_SECRET" > "$JWT_SECRET_FILE"
|
||||
chmod 600 "$JWT_SECRET_FILE"
|
||||
fi
|
||||
echo "JWT_SECRET has been set to a secure random value"
|
||||
fi
|
||||
|
||||
# Skip dependency installation entirely for pre-built images
|
||||
# Dependencies are already installed during the Docker build process
|
||||
|
||||
# Initialize the database if it doesn't exist
|
||||
if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
echo "Initializing database..."
|
||||
if [ -f "dist/scripts/init-db.js" ]; then
|
||||
node dist/scripts/init-db.js
|
||||
bun dist/scripts/init-db.js
|
||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||
node dist/scripts/manage-db.js init
|
||||
bun dist/scripts/manage-db.js init
|
||||
elif [ -f "scripts/manage-db.ts" ]; then
|
||||
bun scripts/manage-db.ts init
|
||||
else
|
||||
echo "Warning: Could not find database initialization scripts in dist/scripts."
|
||||
echo "Creating and initializing database manually..."
|
||||
@@ -111,23 +135,75 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- New fields for job resilience
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
batch_id TEXT,
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array as text
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
channel TEXT NOT NULL,
|
||||
payload TEXT NOT NULL,
|
||||
read INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_events_user_channel ON events(user_id, channel);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_created_at ON events(created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_read ON events(read);
|
||||
EOF
|
||||
echo "Database initialized with required tables."
|
||||
fi
|
||||
else
|
||||
echo "Database already exists, checking for issues..."
|
||||
if [ -f "dist/scripts/fix-db-issues.js" ]; then
|
||||
node dist/scripts/fix-db-issues.js
|
||||
bun dist/scripts/fix-db-issues.js
|
||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||
node dist/scripts/manage-db.js fix
|
||||
bun dist/scripts/manage-db.js fix
|
||||
elif [ -f "scripts/manage-db.ts" ]; then
|
||||
bun scripts/manage-db.ts fix
|
||||
fi
|
||||
|
||||
# Since the application is not used by anyone yet, we've removed the schema updates and migrations
|
||||
echo "Database already exists, no migrations needed."
|
||||
# Run database migrations
|
||||
echo "Running database migrations..."
|
||||
|
||||
# Update mirror_jobs table with new columns for resilience
|
||||
if [ -f "dist/scripts/update-mirror-jobs-table.js" ]; then
|
||||
echo "Updating mirror_jobs table..."
|
||||
bun dist/scripts/update-mirror-jobs-table.js
|
||||
elif [ -f "scripts/update-mirror-jobs-table.ts" ]; then
|
||||
echo "Updating mirror_jobs table using TypeScript script..."
|
||||
bun scripts/update-mirror-jobs-table.ts
|
||||
else
|
||||
echo "Warning: Could not find mirror_jobs table update script."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Extract version from package.json and set as environment variable
|
||||
if [ -f "package.json" ]; then
|
||||
export npm_package_version=$(grep -o '"version": *"[^"]*"' package.json | cut -d'"' -f4)
|
||||
echo "Setting application version: $npm_package_version"
|
||||
fi
|
||||
|
||||
# Start the application
|
||||
echo "Starting Gitea Mirror..."
|
||||
exec node ./dist/server/entry.mjs
|
||||
exec bun ./dist/server/entry.mjs
|
||||
|
||||
127
docs/testing.md
Normal file
127
docs/testing.md
Normal file
@@ -0,0 +1,127 @@
|
||||
# Testing in Gitea Mirror
|
||||
|
||||
This document provides guidance on testing in the Gitea Mirror project.
|
||||
|
||||
## Current Status
|
||||
|
||||
The project now uses Bun's built-in test runner, which is Jest-compatible and provides a fast, reliable testing experience. We've migrated away from Vitest due to compatibility issues with Bun.
|
||||
|
||||
## Running Tests
|
||||
|
||||
To run tests, use the following commands:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
bun test
|
||||
|
||||
# Run tests in watch mode (automatically re-run when files change)
|
||||
bun test --watch
|
||||
|
||||
# Run tests with coverage reporting
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
## Test File Naming Conventions
|
||||
|
||||
Bun's test runner automatically discovers test files that match the following patterns:
|
||||
|
||||
- `*.test.{js|jsx|ts|tsx}`
|
||||
- `*_test.{js|jsx|ts|tsx}`
|
||||
- `*.spec.{js|jsx|ts|tsx}`
|
||||
- `*_spec.{js|jsx|ts|tsx}`
|
||||
|
||||
## Writing Tests
|
||||
|
||||
The project uses Bun's test runner with a Jest-compatible API. Here's an example test:
|
||||
|
||||
```typescript
|
||||
// example.test.ts
|
||||
import { describe, test, expect } from "bun:test";
|
||||
|
||||
describe("Example Test", () => {
|
||||
test("should pass", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Testing React Components
|
||||
|
||||
For testing React components, we use React Testing Library:
|
||||
|
||||
```typescript
|
||||
// component.test.tsx
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import MyComponent from "../components/MyComponent";
|
||||
|
||||
describe("MyComponent", () => {
|
||||
test("renders correctly", () => {
|
||||
render(<MyComponent />);
|
||||
expect(screen.getByText("Hello World")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Test Setup
|
||||
|
||||
The test setup is defined in `src/tests/setup.bun.ts` and includes:
|
||||
|
||||
- Automatic cleanup after each test
|
||||
- Setup for any global test environment needs
|
||||
|
||||
## Mocking
|
||||
|
||||
Bun's test runner provides built-in mocking capabilities:
|
||||
|
||||
```typescript
|
||||
import { test, expect, mock } from "bun:test";
|
||||
|
||||
// Create a mock function
|
||||
const mockFn = mock(() => "mocked value");
|
||||
|
||||
test("mock function", () => {
|
||||
const result = mockFn();
|
||||
expect(result).toBe("mocked value");
|
||||
expect(mockFn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
// Mock a module
|
||||
mock.module("./some-module", () => {
|
||||
return {
|
||||
someFunction: () => "mocked module function"
|
||||
};
|
||||
});
|
||||
```
|
||||
|
||||
## CI Integration
|
||||
|
||||
The CI workflow has been updated to use Bun's test runner. Tests are automatically run as part of the CI pipeline.
|
||||
|
||||
## Test Coverage
|
||||
|
||||
To generate test coverage reports, run:
|
||||
|
||||
```bash
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
This will generate a coverage report in the `coverage` directory.
|
||||
|
||||
## Types of Tests
|
||||
|
||||
The project includes several types of tests:
|
||||
|
||||
1. **Unit Tests**: Testing individual functions and utilities
|
||||
2. **API Tests**: Testing API endpoints
|
||||
3. **Component Tests**: Testing React components
|
||||
4. **Integration Tests**: Testing how components work together
|
||||
|
||||
## Future Improvements
|
||||
|
||||
When expanding the test suite, consider:
|
||||
|
||||
1. Adding more comprehensive API endpoint tests
|
||||
2. Increasing component test coverage
|
||||
3. Setting up end-to-end tests with a tool like Playwright
|
||||
4. Adding performance tests for critical paths
|
||||
87
package.json
87
package.json
@@ -1,87 +1,86 @@
|
||||
{
|
||||
"name": "gitea-mirror",
|
||||
"type": "module",
|
||||
"version": "1.0.0",
|
||||
"version": "2.5.4",
|
||||
"engines": {
|
||||
"node": ">=22.0.0"
|
||||
"bun": ">=1.2.9"
|
||||
},
|
||||
"scripts": {
|
||||
"setup": "pnpm install && pnpm manage-db init",
|
||||
"dev": "astro dev",
|
||||
"dev:clean": "pnpm cleanup-db && pnpm manage-db init && astro dev",
|
||||
"build": "astro build",
|
||||
"setup": "bun install && bun run manage-db init && bun run update-db",
|
||||
"dev": "bunx --bun astro dev",
|
||||
"dev:clean": "bun run cleanup-db && bun run manage-db init && bun run update-db && bunx --bun astro dev",
|
||||
"build": "bunx --bun astro build",
|
||||
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
||||
"manage-db": "tsx scripts/manage-db.ts",
|
||||
"init-db": "tsx scripts/manage-db.ts init",
|
||||
"check-db": "tsx scripts/manage-db.ts check",
|
||||
"fix-db": "tsx scripts/manage-db.ts fix",
|
||||
"reset-users": "tsx scripts/manage-db.ts reset-users",
|
||||
"preview": "astro preview",
|
||||
"start": "node dist/server/entry.mjs",
|
||||
"start:fresh": "pnpm cleanup-db && pnpm manage-db init && node dist/server/entry.mjs",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest",
|
||||
"astro": "astro"
|
||||
"manage-db": "bun scripts/manage-db.ts",
|
||||
"init-db": "bun scripts/manage-db.ts init",
|
||||
"update-db": "bun scripts/update-mirror-jobs-table.ts",
|
||||
"check-db": "bun scripts/manage-db.ts check",
|
||||
"fix-db": "bun scripts/manage-db.ts fix",
|
||||
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||
"cleanup-events": "bun scripts/cleanup-events.ts",
|
||||
"preview": "bunx --bun astro preview",
|
||||
"start": "bun dist/server/entry.mjs",
|
||||
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun run update-db && bun dist/server/entry.mjs",
|
||||
"test": "bun test",
|
||||
"test:watch": "bun test --watch",
|
||||
"test:coverage": "bun test --coverage",
|
||||
"astro": "bunx --bun astro"
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/mdx": "^4.2.6",
|
||||
"@astrojs/node": "^9.2.1",
|
||||
"@astrojs/react": "^4.2.7",
|
||||
"@libsql/client": "^0.15.4",
|
||||
"@octokit/rest": "^21.1.1",
|
||||
"@radix-ui/react-avatar": "^1.1.4",
|
||||
"@radix-ui/react-checkbox": "^1.1.5",
|
||||
"@radix-ui/react-dialog": "^1.1.7",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.7",
|
||||
"@radix-ui/react-avatar": "^1.1.9",
|
||||
"@radix-ui/react-checkbox": "^1.3.1",
|
||||
"@radix-ui/react-dialog": "^1.1.13",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.14",
|
||||
"@radix-ui/react-label": "^2.1.6",
|
||||
"@radix-ui/react-popover": "^1.1.13",
|
||||
"@radix-ui/react-radio-group": "^1.3.6",
|
||||
"@radix-ui/react-select": "^2.1.7",
|
||||
"@radix-ui/react-slot": "^1.2.0",
|
||||
"@radix-ui/react-tabs": "^1.1.4",
|
||||
"@radix-ui/react-select": "^2.2.4",
|
||||
"@radix-ui/react-slot": "^1.2.2",
|
||||
"@radix-ui/react-tabs": "^1.1.11",
|
||||
"@radix-ui/react-tooltip": "^1.2.6",
|
||||
"@tailwindcss/vite": "^4.1.3",
|
||||
"@tailwindcss/vite": "^4.1.7",
|
||||
"@tanstack/react-virtual": "^3.13.8",
|
||||
"@types/canvas-confetti": "^1.9.0",
|
||||
"@types/react": "^19.1.2",
|
||||
"@types/react-dom": "^19.1.2",
|
||||
"astro": "^5.7.10",
|
||||
"axios": "^1.8.4",
|
||||
"@types/react": "^19.1.4",
|
||||
"@types/react-dom": "^19.1.5",
|
||||
"astro": "^5.7.13",
|
||||
"axios": "^1.9.0",
|
||||
"bcryptjs": "^3.0.2",
|
||||
"canvas-confetti": "^1.9.3",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"drizzle-orm": "^0.41.0",
|
||||
"drizzle-orm": "^0.43.1",
|
||||
"fuse.js": "^7.1.0",
|
||||
"ioredis": "^5.6.1",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"lucide-react": "^0.488.0",
|
||||
"lucide-react": "^0.511.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.3",
|
||||
"superagent": "^10.2.0",
|
||||
"tailwind-merge": "^3.2.0",
|
||||
"tailwindcss": "^4.1.3",
|
||||
"tw-animate-css": "^1.2.5",
|
||||
"superagent": "^10.2.1",
|
||||
"tailwind-merge": "^3.3.0",
|
||||
"tailwindcss": "^4.1.7",
|
||||
"tw-animate-css": "^1.3.0",
|
||||
"uuid": "^11.1.0",
|
||||
"zod": "^3.24.2"
|
||||
"zod": "^3.25.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/jest-dom": "^6.6.3",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"@types/better-sqlite3": "^7.6.13",
|
||||
"@types/jsonwebtoken": "^9.0.9",
|
||||
"@types/superagent": "^8.1.9",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@vitejs/plugin-react": "^4.4.0",
|
||||
"better-sqlite3": "^9.6.0",
|
||||
"@vitejs/plugin-react": "^4.4.1",
|
||||
"jsdom": "^26.1.0",
|
||||
"tsx": "^4.19.3",
|
||||
"vitest": "^3.1.1"
|
||||
"tsx": "^4.19.4",
|
||||
"vitest": "^3.1.4"
|
||||
},
|
||||
"packageManager": "pnpm@10.10.0"
|
||||
"packageManager": "bun@1.2.9"
|
||||
}
|
||||
|
||||
7713
pnpm-lock.yaml
generated
7713
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -43,7 +43,7 @@ The script uses environment variables from the `.env` file in the project root:
|
||||
3. Using with docker-compose:
|
||||
```bash
|
||||
# Ensure dependencies are installed and database is initialized
|
||||
pnpm setup
|
||||
bun run setup
|
||||
|
||||
# First build the image
|
||||
./scripts/build-docker.sh --load
|
||||
|
||||
132
scripts/README-lxc.md
Normal file
132
scripts/README-lxc.md
Normal file
@@ -0,0 +1,132 @@
|
||||
# LXC Container Deployment Guide
|
||||
|
||||
## Overview
|
||||
Run **Gitea Mirror** in an isolated LXC container, either:
|
||||
|
||||
1. **Online, on a Proxmox VE host** – script pulls everything from GitHub
|
||||
2. **Offline / LAN-only, on a developer laptop** – script pushes your local checkout + Bun ZIP
|
||||
|
||||
---
|
||||
|
||||
## 1. Proxmox VE (online, recommended for prod)
|
||||
|
||||
### Prerequisites
|
||||
* Proxmox VE node with the default `vmbr0` bridge
|
||||
* Root shell on the node
|
||||
* Ubuntu 22.04 LXC template present (`pveam update && pveam download ...`)
|
||||
|
||||
### One-command install
|
||||
|
||||
```bash
|
||||
# Community-maintained script for Proxmox VE by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
# at [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED)
|
||||
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/ProxmoxVED/main/install/gitea-mirror-install.sh)"
|
||||
```
|
||||
|
||||
What it does:
|
||||
|
||||
* Uses the community-maintained script from ProxmoxVED
|
||||
* Installs dependencies and Bun runtime
|
||||
* Clones & builds `arunavo4/gitea-mirror`
|
||||
* Creates a systemd service and starts it
|
||||
* Sets up a random `JWT_SECRET` for security
|
||||
|
||||
Browse to:
|
||||
|
||||
```
|
||||
http://<container-ip>:4321
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Local testing (LXD on a workstation, works offline)
|
||||
|
||||
### Prerequisites
|
||||
|
||||
* `lxd` installed (`sudo apt install lxd`; `lxd init --auto`)
|
||||
* Your repo cloned locally – e.g. `~/Development/gitea-mirror`
|
||||
* Bun ZIP downloaded once:
|
||||
`https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip`
|
||||
|
||||
### Offline installer script
|
||||
|
||||
```bash
|
||||
git clone https://github.com/arunavo4/gitea-mirror.git # if not already
|
||||
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-local.sh -o gitea-mirror-lxc-local.sh
|
||||
chmod +x gitea-mirror-lxc-local.sh
|
||||
|
||||
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror \
|
||||
./gitea-mirror-lxc-local.sh
|
||||
```
|
||||
|
||||
What it does:
|
||||
|
||||
* Launches privileged LXC `gitea-test` (`lxc launch ubuntu:22.04 ...`)
|
||||
* Pushes **Bun ZIP** + tarred **local repo** into `/opt`
|
||||
* Unpacks, builds, initializes DB
|
||||
* Symlinks both `bun` and `bunx` → `/usr/local/bin`
|
||||
* Creates a root systemd unit and starts it
|
||||
|
||||
Access from host:
|
||||
|
||||
```
|
||||
http://$(lxc exec gitea-test -- hostname -I | awk '{print $1}'):4321
|
||||
```
|
||||
|
||||
(Optional) forward to host localhost:
|
||||
|
||||
```bash
|
||||
sudo lxc config device add gitea-test mirror proxy \
|
||||
listen=tcp:0.0.0.0:4321 connect=tcp:127.0.0.1:4321
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Health-check endpoint
|
||||
|
||||
Gitea Mirror includes a built-in health check endpoint at `/api/health` that provides:
|
||||
|
||||
- System status and uptime
|
||||
- Database connectivity check
|
||||
- Memory usage statistics
|
||||
- Environment information
|
||||
|
||||
You can use this endpoint for monitoring your deployment:
|
||||
|
||||
```bash
|
||||
# Basic check (returns 200 OK if healthy)
|
||||
curl -I http://<container-ip>:4321/api/health
|
||||
|
||||
# Detailed health information (JSON)
|
||||
curl http://<container-ip>:4321/api/health
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
| Check | Command |
|
||||
| -------------- | ----------------------------------------------------- |
|
||||
| Service status | `systemctl status gitea-mirror` |
|
||||
| Live logs | `journalctl -u gitea-mirror -f` |
|
||||
| Verify Bun | `bun --version && bunx --version` |
|
||||
| DB perms | `chown -R root:root /opt/gitea-mirror/data` (Proxmox) |
|
||||
|
||||
---
|
||||
|
||||
## Connecting LXC and Docker Containers
|
||||
|
||||
If you need your LXC container to communicate with Docker containers:
|
||||
|
||||
1. On your host machine, create a bridge network:
|
||||
```bash
|
||||
docker network create gitea-network
|
||||
```
|
||||
|
||||
2. Find the bridge interface created by Docker:
|
||||
```bash
|
||||
ip a | grep docker
|
||||
# Look for something like docker0 or br-xxxxxxxx
|
||||
```
|
||||
|
||||
3. In Proxmox, edit the LXC container's network configuration to use this bridge.
|
||||
@@ -1,12 +1,14 @@
|
||||
# Scripts Directory
|
||||
|
||||
This folder contains utility scripts for database management.
|
||||
This folder contains utility scripts for database management, event management, Docker builds, and LXC container deployment.
|
||||
|
||||
## Database Management Tool (manage-db.ts)
|
||||
## Database Management
|
||||
|
||||
### Database Management Tool (manage-db.ts)
|
||||
|
||||
This is a consolidated database management tool that handles all database-related operations. It combines the functionality of the previous separate scripts into a single, more intelligent script that can check, fix, and initialize the database as needed.
|
||||
|
||||
### Features
|
||||
#### Features
|
||||
|
||||
- **Check Mode**: Validates the existence and integrity of the database
|
||||
- **Init Mode**: Creates the database only if it doesn't already exist
|
||||
@@ -14,45 +16,108 @@ This is a consolidated database management tool that handles all database-relate
|
||||
- **Reset Users Mode**: Removes all users and their data
|
||||
- **Auto Mode**: Automatically checks, fixes, and initializes the database if needed
|
||||
|
||||
## Running the Database Management Tool
|
||||
#### Running the Database Management Tool
|
||||
|
||||
You can execute the database management tool using your package manager with various commands:
|
||||
|
||||
```bash
|
||||
# Checks database status (default action if no command is specified, equivalent to 'pnpm check-db')
|
||||
pnpm manage-db
|
||||
# Checks database status (default action if no command is specified)
|
||||
bun run manage-db
|
||||
|
||||
# Check database status
|
||||
pnpm check-db
|
||||
bun run check-db
|
||||
|
||||
# Initialize the database (only if it doesn't exist)
|
||||
pnpm init-db
|
||||
bun run init-db
|
||||
|
||||
# Fix database location issues
|
||||
pnpm fix-db
|
||||
bun run fix-db
|
||||
|
||||
# Automatic check, fix, and initialize if needed
|
||||
pnpm db-auto
|
||||
bun run db-auto
|
||||
|
||||
# Reset all users (for testing signup flow)
|
||||
pnpm reset-users
|
||||
|
||||
# Update the database schema to the latest version
|
||||
pnpm update-schema
|
||||
bun run reset-users
|
||||
|
||||
# Remove database files completely
|
||||
pnpm cleanup-db
|
||||
bun run cleanup-db
|
||||
|
||||
# Complete setup (install dependencies and initialize database)
|
||||
pnpm setup
|
||||
bun run setup
|
||||
|
||||
# Start development server with a fresh database
|
||||
pnpm dev:clean
|
||||
bun run dev:clean
|
||||
|
||||
# Start production server with a fresh database
|
||||
pnpm start:fresh
|
||||
bun run start:fresh
|
||||
```
|
||||
|
||||
## Database File Location
|
||||
#### Database File Location
|
||||
|
||||
The database file should be located in the `./data/gitea-mirror.db` directory. If the file is found in the root directory, the fix mode will move it to the correct location.
|
||||
|
||||
## Event Management
|
||||
|
||||
The following scripts help manage events in the SQLite database:
|
||||
|
||||
### Event Inspection (check-events.ts)
|
||||
|
||||
Displays all events currently stored in the database.
|
||||
|
||||
```bash
|
||||
bun scripts/check-events.ts
|
||||
```
|
||||
|
||||
### Event Cleanup (cleanup-events.ts)
|
||||
|
||||
Removes old events from the database to prevent it from growing too large.
|
||||
|
||||
```bash
|
||||
# Remove events older than 7 days (default)
|
||||
bun scripts/cleanup-events.ts
|
||||
|
||||
# Remove events older than X days
|
||||
bun scripts/cleanup-events.ts 14
|
||||
```
|
||||
|
||||
This script can be scheduled to run periodically (e.g., daily) using cron or another scheduler.
|
||||
|
||||
### Mark Events as Read (mark-events-read.ts)
|
||||
|
||||
Marks all unread events as read.
|
||||
|
||||
```bash
|
||||
bun scripts/mark-events-read.ts
|
||||
```
|
||||
|
||||
### Make Events Appear Older (make-events-old.ts)
|
||||
|
||||
For testing purposes, this script modifies event timestamps to make them appear older.
|
||||
|
||||
```bash
|
||||
bun scripts/make-events-old.ts
|
||||
```
|
||||
|
||||
## Deployment Scripts
|
||||
|
||||
### Docker Deployment
|
||||
|
||||
- **build-docker.sh**: Builds the Docker image for the application
|
||||
- **docker-diagnostics.sh**: Provides diagnostic information for Docker deployments
|
||||
|
||||
### LXC Container Deployment
|
||||
|
||||
Two deployment options are available for LXC containers:
|
||||
|
||||
1. **Proxmox VE (online)**: Using the community-maintained script by Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
- Author: Tobias ([CrazyWolf13](https://github.com/CrazyWolf13))
|
||||
- Available at: [community-scripts/ProxmoxVED](https://github.com/community-scripts/ProxmoxVED/blob/main/install/gitea-mirror-install.sh)
|
||||
- Pulls everything from GitHub
|
||||
- Creates a privileged container with the application
|
||||
- Sets up systemd service
|
||||
|
||||
2. **gitea-mirror-lxc-local.sh**: For offline/LAN-only deployment on a developer laptop
|
||||
- Pushes your local checkout + Bun ZIP to the container
|
||||
- Useful for testing without internet access
|
||||
|
||||
For detailed instructions on LXC deployment, see [README-lxc.md](./README-lxc.md).
|
||||
|
||||
38
scripts/check-events.ts
Normal file
38
scripts/check-events.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to check events in the database
|
||||
*/
|
||||
|
||||
import { Database } from "bun:sqlite";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
// Define the database path
|
||||
const dataDir = path.join(process.cwd(), "data");
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
console.error("Data directory not found:", dataDir);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||
if (!fs.existsSync(dbPath)) {
|
||||
console.error("Database file not found:", dbPath);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Open the database
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Check if the events table exists
|
||||
const tableExists = db.query("SELECT name FROM sqlite_master WHERE type='table' AND name='events'").get();
|
||||
|
||||
if (!tableExists) {
|
||||
console.error("Events table does not exist");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Get all events
|
||||
const events = db.query("SELECT * FROM events").all();
|
||||
|
||||
console.log("Events in the database:");
|
||||
console.log(JSON.stringify(events, null, 2));
|
||||
43
scripts/cleanup-events.ts
Normal file
43
scripts/cleanup-events.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to clean up old events from the database
|
||||
* This script should be run periodically (e.g., daily) to prevent the events table from growing too large
|
||||
*
|
||||
* Usage:
|
||||
* bun scripts/cleanup-events.ts [days]
|
||||
*
|
||||
* Where [days] is the number of days to keep events (default: 7)
|
||||
*/
|
||||
|
||||
import { cleanupOldEvents } from "../src/lib/events";
|
||||
|
||||
// Parse command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
const daysToKeep = args.length > 0 ? parseInt(args[0], 10) : 7;
|
||||
|
||||
if (isNaN(daysToKeep) || daysToKeep < 1) {
|
||||
console.error("Error: Days to keep must be a positive number");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function runCleanup() {
|
||||
try {
|
||||
console.log(`Starting event cleanup (retention: ${daysToKeep} days)...`);
|
||||
|
||||
// Call the cleanupOldEvents function from the events module
|
||||
const result = await cleanupOldEvents(daysToKeep);
|
||||
|
||||
console.log(`Cleanup summary:`);
|
||||
console.log(`- Read events deleted: ${result.readEventsDeleted}`);
|
||||
console.log(`- Unread events deleted: ${result.unreadEventsDeleted}`);
|
||||
console.log(`- Total events deleted: ${result.readEventsDeleted + result.unreadEventsDeleted}`);
|
||||
|
||||
console.log("Event cleanup completed successfully");
|
||||
} catch (error) {
|
||||
console.error("Error running event cleanup:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the cleanup
|
||||
runCleanup();
|
||||
4
scripts/docker-diagnostics.sh
Executable file → Normal file
4
scripts/docker-diagnostics.sh
Executable file → Normal file
@@ -105,12 +105,12 @@ echo -e "${BLUE} Recommendations ${NC}"
|
||||
echo -e "${BLUE}=====================================================${NC}"
|
||||
|
||||
echo -e "\n${YELLOW}For local development:${NC}"
|
||||
echo -e "1. ${GREEN}pnpm setup${NC} (initialize database and install dependencies)"
|
||||
echo -e "1. ${GREEN}bun run setup${NC} (initialize database and install dependencies)"
|
||||
echo -e "2. ${GREEN}./scripts/build-docker.sh --load${NC} (build and load into Docker)"
|
||||
echo -e "3. ${GREEN}docker-compose -f docker-compose.dev.yml up -d${NC} (start the development container)"
|
||||
|
||||
echo -e "\n${YELLOW}For production deployment (using Docker Compose):${NC}"
|
||||
echo -e "1. ${GREEN}pnpm setup${NC} (if not already done, to ensure database schema is ready)"
|
||||
echo -e "1. ${GREEN}bun run setup${NC} (if not already done, to ensure database schema is ready)"
|
||||
echo -e "2. ${GREEN}docker-compose --profile production up -d${NC} (start the production container)"
|
||||
|
||||
echo -e "\n${YELLOW}For CI/CD builds:${NC}"
|
||||
|
||||
86
scripts/gitea-mirror-lxc-local.sh
Executable file
86
scripts/gitea-mirror-lxc-local.sh
Executable file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env bash
|
||||
# gitea-mirror-lxc-local.sh (offline, local repo, verbose)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
CONTAINER="gitea-test"
|
||||
IMAGE="ubuntu:22.04"
|
||||
INSTALL_DIR="/opt/gitea-mirror"
|
||||
PORT=4321
|
||||
JWT_SECRET="$(openssl rand -hex 32)"
|
||||
|
||||
BUN_ZIP="/tmp/bun-linux-x64.zip"
|
||||
BUN_URL="https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip"
|
||||
|
||||
LOCAL_REPO_DIR="${LOCAL_REPO_DIR:-./gitea-mirror}"
|
||||
REPO_TAR="/tmp/gitea-mirror-local.tar.gz"
|
||||
|
||||
need() { command -v "$1" >/dev/null || { echo "Missing $1"; exit 1; }; }
|
||||
need curl; need lxc; need tar; need unzip
|
||||
|
||||
# ── build host artefacts ────────────────────────────────────────────────
|
||||
[[ -d $LOCAL_REPO_DIR ]] || { echo "❌ LOCAL_REPO_DIR not found"; exit 1; }
|
||||
[[ -f $LOCAL_REPO_DIR/package.json ]] || { echo "❌ package.json missing"; exit 1; }
|
||||
[[ -f $BUN_ZIP ]] || curl -L --retry 5 --retry-delay 5 -o "$BUN_ZIP" "$BUN_URL"
|
||||
tar -czf "$REPO_TAR" -C "$(dirname "$LOCAL_REPO_DIR")" "$(basename "$LOCAL_REPO_DIR")"
|
||||
|
||||
# ── ensure container exists ─────────────────────────────────────────────
|
||||
lxd init --auto >/dev/null 2>&1 || true
|
||||
lxc info "$CONTAINER" >/dev/null 2>&1 || lxc launch "$IMAGE" "$CONTAINER"
|
||||
|
||||
echo "🔧 installing base packages…"
|
||||
sudo lxc exec "$CONTAINER" -- bash -c 'set -ex; apt update; apt install -y unzip tar openssl sqlite3'
|
||||
|
||||
echo "⬆️ pushing artefacts…"
|
||||
sudo lxc file push "$BUN_ZIP" "$CONTAINER/opt/"
|
||||
sudo lxc file push "$REPO_TAR" "$CONTAINER/opt/"
|
||||
|
||||
echo "📦 unpacking Bun + repo…"
|
||||
sudo lxc exec "$CONTAINER" -- bash -ex <<'IN'
|
||||
cd /opt
|
||||
# Bun
|
||||
unzip -oq bun-linux-x64.zip -d bun
|
||||
BIN=$(find /opt/bun -type f -name bun -perm -111 | head -n1)
|
||||
ln -sf "$BIN" /usr/local/bin/bun # bun
|
||||
ln -sf "$BIN" /usr/local/bin/bunx # bunx shim
|
||||
# Repo
|
||||
rm -rf /opt/gitea-mirror
|
||||
mkdir -p /opt/gitea-mirror
|
||||
tar -xzf gitea-mirror-local.tar.gz --strip-components=1 -C /opt/gitea-mirror
|
||||
IN
|
||||
|
||||
echo "🏗️ bun install / build…"
|
||||
sudo lxc exec "$CONTAINER" -- bash -ex <<'IN'
|
||||
cd /opt/gitea-mirror
|
||||
bun install
|
||||
bun run build
|
||||
bun run manage-db init
|
||||
IN
|
||||
|
||||
echo "📝 systemd unit…"
|
||||
sudo lxc exec "$CONTAINER" -- bash -ex <<IN
|
||||
cat >/etc/systemd/system/gitea-mirror.service <<SERVICE
|
||||
[Unit]
|
||||
Description=Gitea Mirror
|
||||
After=network.target
|
||||
[Service]
|
||||
Type=simple
|
||||
WorkingDirectory=$INSTALL_DIR
|
||||
ExecStart=/usr/local/bin/bun dist/server/entry.mjs
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
Environment=NODE_ENV=production
|
||||
Environment=HOST=0.0.0.0
|
||||
Environment=PORT=$PORT
|
||||
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||
Environment=JWT_SECRET=$JWT_SECRET
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
SERVICE
|
||||
systemctl daemon-reload
|
||||
systemctl enable gitea-mirror
|
||||
systemctl restart gitea-mirror
|
||||
IN
|
||||
|
||||
echo -e "\n✅ finished; service status:"
|
||||
sudo lxc exec "$CONTAINER" -- systemctl status gitea-mirror --no-pager
|
||||
29
scripts/make-events-old.ts
Normal file
29
scripts/make-events-old.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to make events appear older for testing cleanup
|
||||
*/
|
||||
|
||||
import { db, events } from "../src/lib/db";
|
||||
|
||||
async function makeEventsOld() {
|
||||
try {
|
||||
console.log("Making events appear older...");
|
||||
|
||||
// Calculate a timestamp from 2 days ago
|
||||
const oldDate = new Date();
|
||||
oldDate.setDate(oldDate.getDate() - 2);
|
||||
|
||||
// Update all events to have an older timestamp
|
||||
const result = await db
|
||||
.update(events)
|
||||
.set({ createdAt: oldDate });
|
||||
|
||||
console.log(`Updated ${result.changes || 0} events to appear older`);
|
||||
} catch (error) {
|
||||
console.error("Error updating event timestamps:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the function
|
||||
makeEventsOld();
|
||||
@@ -1,7 +1,6 @@
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { client, db } from "../src/lib/db";
|
||||
import { configs } from "../src/lib/db";
|
||||
import { Database } from "bun:sqlite";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
// Command line arguments
|
||||
@@ -21,61 +20,66 @@ const dataDbFile = path.join(dataDir, "gitea-mirror.db");
|
||||
const dataDevDbFile = path.join(dataDir, "gitea-mirror-dev.db");
|
||||
|
||||
// Database path - ensure we use absolute path
|
||||
const dbPath =
|
||||
process.env.DATABASE_URL || `file:${path.join(dataDir, "gitea-mirror.db")}`;
|
||||
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||
|
||||
/**
|
||||
* Ensure all required tables exist
|
||||
*/
|
||||
async function ensureTablesExist() {
|
||||
// Create or open the database
|
||||
const db = new Database(dbPath);
|
||||
|
||||
const requiredTables = [
|
||||
"users",
|
||||
"configs",
|
||||
"repositories",
|
||||
"organizations",
|
||||
"mirror_jobs",
|
||||
"events",
|
||||
];
|
||||
|
||||
for (const table of requiredTables) {
|
||||
try {
|
||||
await client.execute(`SELECT 1 FROM ${table} LIMIT 1`);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.includes("SQLITE_ERROR")) {
|
||||
// Check if table exists
|
||||
const result = db.query(`SELECT name FROM sqlite_master WHERE type='table' AND name='${table}'`).get();
|
||||
|
||||
if (!result) {
|
||||
console.warn(`⚠️ Table '${table}' is missing. Creating it now...`);
|
||||
|
||||
switch (table) {
|
||||
case "users":
|
||||
await client.execute(
|
||||
`CREATE TABLE users (
|
||||
db.exec(`
|
||||
CREATE TABLE users (
|
||||
id TEXT PRIMARY KEY,
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL,
|
||||
email TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)`
|
||||
);
|
||||
)
|
||||
`);
|
||||
break;
|
||||
case "configs":
|
||||
await client.execute(
|
||||
`CREATE TABLE configs (
|
||||
db.exec(`
|
||||
CREATE TABLE configs (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
github_config TEXT NOT NULL,
|
||||
gitea_config TEXT NOT NULL,
|
||||
include TEXT NOT NULL DEFAULT '[]',
|
||||
include TEXT NOT NULL DEFAULT '["*"]',
|
||||
exclude TEXT NOT NULL DEFAULT '[]',
|
||||
schedule_config TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)`
|
||||
);
|
||||
)
|
||||
`);
|
||||
break;
|
||||
case "repositories":
|
||||
await client.execute(
|
||||
`CREATE TABLE repositories (
|
||||
db.exec(`
|
||||
CREATE TABLE repositories (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
config_id TEXT NOT NULL,
|
||||
@@ -104,12 +108,12 @@ async function ensureTablesExist() {
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||
)`
|
||||
);
|
||||
)
|
||||
`);
|
||||
break;
|
||||
case "organizations":
|
||||
await client.execute(
|
||||
`CREATE TABLE organizations (
|
||||
db.exec(`
|
||||
CREATE TABLE organizations (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
config_id TEXT NOT NULL,
|
||||
@@ -125,12 +129,12 @@ async function ensureTablesExist() {
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||
)`
|
||||
);
|
||||
)
|
||||
`);
|
||||
break;
|
||||
case "mirror_jobs":
|
||||
await client.execute(
|
||||
`CREATE TABLE mirror_jobs (
|
||||
db.exec(`
|
||||
CREATE TABLE mirror_jobs (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
repository_id TEXT,
|
||||
@@ -141,16 +145,56 @@ async function ensureTablesExist() {
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
-- New fields for job resilience
|
||||
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||
batch_id TEXT,
|
||||
total_items INTEGER,
|
||||
completed_items INTEGER DEFAULT 0,
|
||||
item_ids TEXT, -- JSON array as text
|
||||
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
last_checkpoint TIMESTAMP,
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)`
|
||||
);
|
||||
)
|
||||
`);
|
||||
|
||||
// Create indexes for better performance
|
||||
db.exec(`
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||
`);
|
||||
break;
|
||||
case "events":
|
||||
db.exec(`
|
||||
CREATE TABLE events (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
channel TEXT NOT NULL,
|
||||
payload TEXT NOT NULL,
|
||||
read INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
db.exec(`
|
||||
CREATE INDEX idx_events_user_channel ON events(user_id, channel);
|
||||
CREATE INDEX idx_events_created_at ON events(created_at);
|
||||
CREATE INDEX idx_events_read ON events(read);
|
||||
`);
|
||||
break;
|
||||
}
|
||||
console.log(`✅ Table '${table}' created successfully.`);
|
||||
} else {
|
||||
console.error(`❌ Error checking table '${table}':`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`❌ Error checking table '${table}':`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -168,7 +212,7 @@ async function checkDatabase() {
|
||||
);
|
||||
console.warn("This file should be in the data directory.");
|
||||
console.warn(
|
||||
'Run "pnpm manage-db fix" to fix this issue or "pnpm cleanup-db" to remove it.'
|
||||
'Run "bun run manage-db fix" to fix this issue or "bun run cleanup-db" to remove it.'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -180,10 +224,11 @@ async function checkDatabase() {
|
||||
|
||||
// Check for users
|
||||
try {
|
||||
const userCountResult = await client.execute(
|
||||
`SELECT COUNT(*) as count FROM users`
|
||||
);
|
||||
const userCount = userCountResult.rows[0].count;
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Check for users
|
||||
const userCountResult = db.query(`SELECT COUNT(*) as count FROM users`).get();
|
||||
const userCount = userCountResult?.count || 0;
|
||||
|
||||
if (userCount === 0) {
|
||||
console.log("ℹ️ No users found in the database.");
|
||||
@@ -197,10 +242,8 @@ async function checkDatabase() {
|
||||
}
|
||||
|
||||
// Check for configurations
|
||||
const configCountResult = await client.execute(
|
||||
`SELECT COUNT(*) as count FROM configs`
|
||||
);
|
||||
const configCount = configCountResult.rows[0].count;
|
||||
const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
|
||||
const configCount = configCountResult?.count || 0;
|
||||
|
||||
if (configCount === 0) {
|
||||
console.log("ℹ️ No configurations found in the database.");
|
||||
@@ -215,12 +258,12 @@ async function checkDatabase() {
|
||||
} catch (error) {
|
||||
console.error("❌ Error connecting to the database:", error);
|
||||
console.warn(
|
||||
'The database file might be corrupted. Consider running "pnpm manage-db init" to recreate it.'
|
||||
'The database file might be corrupted. Consider running "bun run manage-db init" to recreate it.'
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.warn("⚠️ WARNING: Database file not found in data directory.");
|
||||
console.warn('Run "pnpm manage-db init" to create it.');
|
||||
console.warn('Run "bun run manage-db init" to create it.');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -235,15 +278,16 @@ async function initializeDatabase() {
|
||||
if (fs.existsSync(dataDbFile)) {
|
||||
console.log("⚠️ Database already exists at data/gitea-mirror.db");
|
||||
console.log(
|
||||
'If you want to recreate the database, run "pnpm cleanup-db" first.'
|
||||
'If you want to recreate the database, run "bun run cleanup-db" first.'
|
||||
);
|
||||
console.log(
|
||||
'Or use "pnpm manage-db reset-users" to just remove users without recreating tables.'
|
||||
'Or use "bun run manage-db reset-users" to just remove users without recreating tables.'
|
||||
);
|
||||
|
||||
// Check if we can connect to it
|
||||
try {
|
||||
await client.execute(`SELECT COUNT(*) as count FROM users`);
|
||||
const db = new Database(dbPath);
|
||||
db.query(`SELECT COUNT(*) as count FROM users`).get();
|
||||
console.log("✅ Database is valid and accessible.");
|
||||
return;
|
||||
} catch (error) {
|
||||
@@ -257,135 +301,136 @@ async function initializeDatabase() {
|
||||
console.log(`Initializing database at ${dbPath}...`);
|
||||
|
||||
try {
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Create tables if they don't exist
|
||||
await client.execute(
|
||||
`CREATE TABLE IF NOT EXISTS users (
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id TEXT PRIMARY KEY,
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL,
|
||||
email TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL
|
||||
)`
|
||||
);
|
||||
)
|
||||
`);
|
||||
|
||||
// NOTE: We no longer create a default admin user - user will create one via signup page
|
||||
|
||||
await client.execute(
|
||||
`CREATE TABLE IF NOT EXISTS configs (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
github_config TEXT NOT NULL,
|
||||
gitea_config TEXT NOT NULL,
|
||||
include TEXT NOT NULL DEFAULT '["*"]',
|
||||
exclude TEXT NOT NULL DEFAULT '[]',
|
||||
schedule_config TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
`
|
||||
);
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS configs (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
is_active INTEGER NOT NULL DEFAULT 1,
|
||||
github_config TEXT NOT NULL,
|
||||
gitea_config TEXT NOT NULL,
|
||||
include TEXT NOT NULL DEFAULT '["*"]',
|
||||
exclude TEXT NOT NULL DEFAULT '[]',
|
||||
schedule_config TEXT NOT NULL,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
|
||||
await client.execute(
|
||||
`CREATE TABLE IF NOT EXISTS repositories (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
config_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
full_name TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
clone_url TEXT NOT NULL,
|
||||
owner TEXT NOT NULL,
|
||||
organization TEXT,
|
||||
mirrored_location TEXT DEFAULT '',
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS repositories (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
config_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
full_name TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
clone_url TEXT NOT NULL,
|
||||
owner TEXT NOT NULL,
|
||||
organization TEXT,
|
||||
mirrored_location TEXT DEFAULT '',
|
||||
is_private INTEGER NOT NULL DEFAULT 0,
|
||||
is_fork INTEGER NOT NULL DEFAULT 0,
|
||||
forked_from TEXT,
|
||||
has_issues INTEGER NOT NULL DEFAULT 0,
|
||||
is_starred INTEGER NOT NULL DEFAULT 0,
|
||||
is_archived INTEGER NOT NULL DEFAULT 0,
|
||||
size INTEGER NOT NULL DEFAULT 0,
|
||||
has_lfs INTEGER NOT NULL DEFAULT 0,
|
||||
has_submodules INTEGER NOT NULL DEFAULT 0,
|
||||
default_branch TEXT NOT NULL,
|
||||
visibility TEXT NOT NULL DEFAULT 'public',
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
last_mirrored INTEGER,
|
||||
error_message TEXT,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||
)
|
||||
`);
|
||||
|
||||
is_private INTEGER NOT NULL DEFAULT 0,
|
||||
is_fork INTEGER NOT NULL DEFAULT 0,
|
||||
forked_from TEXT,
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS organizations (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
config_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
avatar_url TEXT NOT NULL,
|
||||
membership_role TEXT NOT NULL DEFAULT 'member',
|
||||
is_included INTEGER NOT NULL DEFAULT 1,
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
last_mirrored INTEGER,
|
||||
error_message TEXT,
|
||||
repository_count INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||
)
|
||||
`);
|
||||
|
||||
has_issues INTEGER NOT NULL DEFAULT 0,
|
||||
is_starred INTEGER NOT NULL DEFAULT 0,
|
||||
is_archived INTEGER NOT NULL DEFAULT 0,
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS mirror_jobs (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
repository_id TEXT,
|
||||
repository_name TEXT,
|
||||
organization_id TEXT,
|
||||
organization_name TEXT,
|
||||
details TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
|
||||
size INTEGER NOT NULL DEFAULT 0,
|
||||
has_lfs INTEGER NOT NULL DEFAULT 0,
|
||||
has_submodules INTEGER NOT NULL DEFAULT 0,
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS events (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
channel TEXT NOT NULL,
|
||||
payload TEXT NOT NULL,
|
||||
read INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
)
|
||||
`);
|
||||
|
||||
default_branch TEXT NOT NULL,
|
||||
visibility TEXT NOT NULL DEFAULT 'public',
|
||||
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
last_mirrored INTEGER,
|
||||
error_message TEXT,
|
||||
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||
);
|
||||
`
|
||||
);
|
||||
|
||||
await client.execute(
|
||||
`CREATE TABLE IF NOT EXISTS organizations (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
config_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
|
||||
avatar_url TEXT NOT NULL,
|
||||
membership_role TEXT NOT NULL DEFAULT 'member',
|
||||
|
||||
is_included INTEGER NOT NULL DEFAULT 1,
|
||||
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
last_mirrored INTEGER,
|
||||
error_message TEXT,
|
||||
|
||||
repository_count INTEGER NOT NULL DEFAULT 0,
|
||||
|
||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||
|
||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||
);
|
||||
`
|
||||
);
|
||||
|
||||
await client.execute(
|
||||
`CREATE TABLE IF NOT EXISTS mirror_jobs (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
repository_id TEXT,
|
||||
repository_name TEXT,
|
||||
organization_id TEXT,
|
||||
organization_name TEXT,
|
||||
details TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'imported',
|
||||
message TEXT NOT NULL,
|
||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
`
|
||||
);
|
||||
db.exec(`
|
||||
CREATE INDEX IF NOT EXISTS idx_events_user_channel ON events(user_id, channel);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_created_at ON events(created_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_events_read ON events(read);
|
||||
`);
|
||||
|
||||
// Insert default config if none exists
|
||||
const configCountResult = await client.execute(
|
||||
`SELECT COUNT(*) as count FROM configs`
|
||||
);
|
||||
const configCount = configCountResult.rows[0].count;
|
||||
const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
|
||||
const configCount = configCountResult?.count || 0;
|
||||
|
||||
if (configCount === 0) {
|
||||
// Get the first user
|
||||
const firstUserResult = await client.execute(
|
||||
`SELECT id FROM users LIMIT 1`
|
||||
);
|
||||
if (firstUserResult.rows.length > 0) {
|
||||
const userId = firstUserResult.rows[0].id;
|
||||
const firstUserResult = db.query(`SELECT id FROM users LIMIT 1`).get();
|
||||
|
||||
if (firstUserResult) {
|
||||
const userId = firstUserResult.id;
|
||||
const configId = uuidv4();
|
||||
const githubConfig = JSON.stringify({
|
||||
username: process.env.GITHUB_USERNAME || "",
|
||||
@@ -415,24 +460,23 @@ async function initializeDatabase() {
|
||||
nextRun: null,
|
||||
});
|
||||
|
||||
await client.execute(
|
||||
`
|
||||
const stmt = db.prepare(`
|
||||
INSERT INTO configs (id, user_id, name, is_active, github_config, gitea_config, include, exclude, schedule_config, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`,
|
||||
[
|
||||
configId,
|
||||
userId,
|
||||
"Default Configuration",
|
||||
1,
|
||||
githubConfig,
|
||||
giteaConfig,
|
||||
include,
|
||||
exclude,
|
||||
scheduleConfig,
|
||||
Date.now(),
|
||||
Date.now(),
|
||||
]
|
||||
`);
|
||||
|
||||
stmt.run(
|
||||
configId,
|
||||
userId,
|
||||
"Default Configuration",
|
||||
1,
|
||||
githubConfig,
|
||||
giteaConfig,
|
||||
include,
|
||||
exclude,
|
||||
scheduleConfig,
|
||||
Date.now(),
|
||||
Date.now()
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -452,21 +496,20 @@ async function resetUsers() {
|
||||
|
||||
try {
|
||||
// Check if the database exists
|
||||
const dbFilePath = dbPath.replace("file:", "");
|
||||
const doesDbExist = fs.existsSync(dbFilePath);
|
||||
const doesDbExist = fs.existsSync(dbPath);
|
||||
|
||||
if (!doesDbExist) {
|
||||
console.log(
|
||||
"❌ Database file doesn't exist. Run 'pnpm manage-db init' first to create it."
|
||||
"❌ Database file doesn't exist. Run 'bun run manage-db init' first to create it."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Count existing users
|
||||
const userCountResult = await client.execute(
|
||||
`SELECT COUNT(*) as count FROM users`
|
||||
);
|
||||
const userCount = userCountResult.rows[0].count;
|
||||
const userCountResult = db.query(`SELECT COUNT(*) as count FROM users`).get();
|
||||
const userCount = userCountResult?.count || 0;
|
||||
|
||||
if (userCount === 0) {
|
||||
console.log("ℹ️ No users found in the database. Nothing to reset.");
|
||||
@@ -474,63 +517,43 @@ async function resetUsers() {
|
||||
}
|
||||
|
||||
// Delete all users
|
||||
await client.execute(`DELETE FROM users`);
|
||||
db.exec(`DELETE FROM users`);
|
||||
console.log(`✅ Deleted ${userCount} users from the database.`);
|
||||
|
||||
// Check dependent configurations that need to be removed
|
||||
const configCount = await client.execute(
|
||||
`SELECT COUNT(*) as count FROM configs`
|
||||
);
|
||||
const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
|
||||
const configCount = configCountResult?.count || 0;
|
||||
|
||||
if (
|
||||
configCount.rows &&
|
||||
configCount.rows[0] &&
|
||||
Number(configCount.rows[0].count) > 0
|
||||
) {
|
||||
await client.execute(`DELETE FROM configs`);
|
||||
console.log(`✅ Deleted ${configCount.rows[0].count} configurations.`);
|
||||
if (configCount > 0) {
|
||||
db.exec(`DELETE FROM configs`);
|
||||
console.log(`✅ Deleted ${configCount} configurations.`);
|
||||
}
|
||||
|
||||
// Check for dependent repositories
|
||||
const repoCount = await client.execute(
|
||||
`SELECT COUNT(*) as count FROM repositories`
|
||||
);
|
||||
const repoCountResult = db.query(`SELECT COUNT(*) as count FROM repositories`).get();
|
||||
const repoCount = repoCountResult?.count || 0;
|
||||
|
||||
if (
|
||||
repoCount.rows &&
|
||||
repoCount.rows[0] &&
|
||||
Number(repoCount.rows[0].count) > 0
|
||||
) {
|
||||
await client.execute(`DELETE FROM repositories`);
|
||||
console.log(`✅ Deleted ${repoCount.rows[0].count} repositories.`);
|
||||
if (repoCount > 0) {
|
||||
db.exec(`DELETE FROM repositories`);
|
||||
console.log(`✅ Deleted ${repoCount} repositories.`);
|
||||
}
|
||||
|
||||
// Check for dependent organizations
|
||||
const orgCount = await client.execute(
|
||||
`SELECT COUNT(*) as count FROM organizations`
|
||||
);
|
||||
const orgCountResult = db.query(`SELECT COUNT(*) as count FROM organizations`).get();
|
||||
const orgCount = orgCountResult?.count || 0;
|
||||
|
||||
if (
|
||||
orgCount.rows &&
|
||||
orgCount.rows[0] &&
|
||||
Number(orgCount.rows[0].count) > 0
|
||||
) {
|
||||
await client.execute(`DELETE FROM organizations`);
|
||||
console.log(`✅ Deleted ${orgCount.rows[0].count} organizations.`);
|
||||
if (orgCount > 0) {
|
||||
db.exec(`DELETE FROM organizations`);
|
||||
console.log(`✅ Deleted ${orgCount} organizations.`);
|
||||
}
|
||||
|
||||
// Check for dependent mirror jobs
|
||||
const jobCount = await client.execute(
|
||||
`SELECT COUNT(*) as count FROM mirror_jobs`
|
||||
);
|
||||
const jobCountResult = db.query(`SELECT COUNT(*) as count FROM mirror_jobs`).get();
|
||||
const jobCount = jobCountResult?.count || 0;
|
||||
|
||||
if (
|
||||
jobCount.rows &&
|
||||
jobCount.rows[0] &&
|
||||
Number(jobCount.rows[0].count) > 0
|
||||
) {
|
||||
await client.execute(`DELETE FROM mirror_jobs`);
|
||||
console.log(`✅ Deleted ${jobCount.rows[0].count} mirror jobs.`);
|
||||
if (jobCount > 0) {
|
||||
db.exec(`DELETE FROM mirror_jobs`);
|
||||
console.log(`✅ Deleted ${jobCount} mirror jobs.`);
|
||||
}
|
||||
|
||||
console.log(
|
||||
@@ -629,19 +652,20 @@ async function fixDatabaseIssues() {
|
||||
console.warn(
|
||||
"⚠️ WARNING: Production database file not found in data directory."
|
||||
);
|
||||
console.warn('Run "pnpm manage-db init" to create it.');
|
||||
console.warn('Run "bun run manage-db init" to create it.');
|
||||
} else {
|
||||
console.log("✅ Production database file found in data directory.");
|
||||
|
||||
// Check if we can connect to the database
|
||||
try {
|
||||
// Try to query the database
|
||||
await db.select().from(configs).limit(1);
|
||||
const db = new Database(dbPath);
|
||||
db.query(`SELECT 1 FROM sqlite_master LIMIT 1`).get();
|
||||
console.log(`✅ Successfully connected to the database.`);
|
||||
} catch (error) {
|
||||
console.error("❌ Error connecting to the database:", error);
|
||||
console.warn(
|
||||
'The database file might be corrupted. Consider running "pnpm manage-db init" to recreate it.'
|
||||
'The database file might be corrupted. Consider running "bun run manage-db init" to recreate it.'
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -692,7 +716,7 @@ Available commands:
|
||||
reset-users - Remove all users and their data
|
||||
auto - Automatic mode: check, fix, and initialize if needed
|
||||
|
||||
Usage: pnpm manage-db [command]
|
||||
Usage: bun run manage-db [command]
|
||||
`);
|
||||
}
|
||||
}
|
||||
|
||||
27
scripts/mark-events-read.ts
Normal file
27
scripts/mark-events-read.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to mark all events as read
|
||||
*/
|
||||
|
||||
import { db, events } from "../src/lib/db";
|
||||
import { eq } from "drizzle-orm";
|
||||
|
||||
async function markEventsAsRead() {
|
||||
try {
|
||||
console.log("Marking all events as read...");
|
||||
|
||||
// Update all events to mark them as read
|
||||
const result = await db
|
||||
.update(events)
|
||||
.set({ read: true })
|
||||
.where(eq(events.read, false));
|
||||
|
||||
console.log(`Marked ${result.changes || 0} events as read`);
|
||||
} catch (error) {
|
||||
console.error("Error marking events as read:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the function
|
||||
markEventsAsRead();
|
||||
133
scripts/update-mirror-jobs-table.ts
Normal file
133
scripts/update-mirror-jobs-table.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* Script to update the mirror_jobs table with new columns for resilience
|
||||
*/
|
||||
|
||||
import { Database } from "bun:sqlite";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
// Define the database paths
|
||||
const dataDir = path.join(process.cwd(), "data");
|
||||
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||
|
||||
// Ensure data directory exists
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
fs.mkdirSync(dataDir, { recursive: true });
|
||||
console.log(`Created data directory at ${dataDir}`);
|
||||
}
|
||||
|
||||
// Check if database exists
|
||||
if (!fs.existsSync(dbPath)) {
|
||||
console.error(`Database file not found at ${dbPath}`);
|
||||
console.error("Please run 'bun run init-db' first to create the database.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Connect to the database
|
||||
const db = new Database(dbPath);
|
||||
|
||||
// Enable foreign keys
|
||||
db.exec("PRAGMA foreign_keys = ON;");
|
||||
|
||||
// Function to check if a column exists in a table
|
||||
function columnExists(tableName: string, columnName: string): boolean {
|
||||
const result = db.query(
|
||||
`PRAGMA table_info(${tableName})`
|
||||
).all() as { name: string }[];
|
||||
|
||||
return result.some(column => column.name === columnName);
|
||||
}
|
||||
|
||||
// Main function to update the mirror_jobs table
|
||||
async function updateMirrorJobsTable() {
|
||||
console.log("Checking mirror_jobs table for missing columns...");
|
||||
|
||||
// Start a transaction
|
||||
db.exec("BEGIN TRANSACTION;");
|
||||
|
||||
try {
|
||||
// Check and add each new column if it doesn't exist
|
||||
const columnsToAdd = [
|
||||
{ name: "job_type", definition: "TEXT NOT NULL DEFAULT 'mirror'" },
|
||||
{ name: "batch_id", definition: "TEXT" },
|
||||
{ name: "total_items", definition: "INTEGER" },
|
||||
{ name: "completed_items", definition: "INTEGER DEFAULT 0" },
|
||||
{ name: "item_ids", definition: "TEXT" }, // JSON array as text
|
||||
{ name: "completed_item_ids", definition: "TEXT DEFAULT '[]'" }, // JSON array as text
|
||||
{ name: "in_progress", definition: "INTEGER NOT NULL DEFAULT 0" }, // Boolean as integer
|
||||
{ name: "started_at", definition: "TIMESTAMP" },
|
||||
{ name: "completed_at", definition: "TIMESTAMP" },
|
||||
{ name: "last_checkpoint", definition: "TIMESTAMP" }
|
||||
];
|
||||
|
||||
let columnsAdded = 0;
|
||||
|
||||
for (const column of columnsToAdd) {
|
||||
if (!columnExists("mirror_jobs", column.name)) {
|
||||
console.log(`Adding column '${column.name}' to mirror_jobs table...`);
|
||||
db.exec(`ALTER TABLE mirror_jobs ADD COLUMN ${column.name} ${column.definition};`);
|
||||
columnsAdded++;
|
||||
}
|
||||
}
|
||||
|
||||
// Commit the transaction
|
||||
db.exec("COMMIT;");
|
||||
|
||||
if (columnsAdded > 0) {
|
||||
console.log(`✅ Added ${columnsAdded} new columns to mirror_jobs table.`);
|
||||
} else {
|
||||
console.log("✅ All required columns already exist in mirror_jobs table.");
|
||||
}
|
||||
|
||||
// Create indexes for better performance
|
||||
console.log("Creating indexes for mirror_jobs table...");
|
||||
|
||||
// Only create indexes if they don't exist
|
||||
const indexesResult = db.query(
|
||||
`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='mirror_jobs'`
|
||||
).all() as { name: string }[];
|
||||
|
||||
const existingIndexes = indexesResult.map(idx => idx.name);
|
||||
|
||||
const indexesToCreate = [
|
||||
{ name: "idx_mirror_jobs_user_id", columns: "user_id" },
|
||||
{ name: "idx_mirror_jobs_batch_id", columns: "batch_id" },
|
||||
{ name: "idx_mirror_jobs_in_progress", columns: "in_progress" },
|
||||
{ name: "idx_mirror_jobs_job_type", columns: "job_type" },
|
||||
{ name: "idx_mirror_jobs_timestamp", columns: "timestamp" }
|
||||
];
|
||||
|
||||
let indexesCreated = 0;
|
||||
|
||||
for (const index of indexesToCreate) {
|
||||
if (!existingIndexes.includes(index.name)) {
|
||||
console.log(`Creating index '${index.name}'...`);
|
||||
db.exec(`CREATE INDEX ${index.name} ON mirror_jobs(${index.columns});`);
|
||||
indexesCreated++;
|
||||
}
|
||||
}
|
||||
|
||||
if (indexesCreated > 0) {
|
||||
console.log(`✅ Created ${indexesCreated} new indexes for mirror_jobs table.`);
|
||||
} else {
|
||||
console.log("✅ All required indexes already exist for mirror_jobs table.");
|
||||
}
|
||||
|
||||
console.log("Mirror jobs table update completed successfully.");
|
||||
} catch (error) {
|
||||
// Rollback the transaction in case of error
|
||||
db.exec("ROLLBACK;");
|
||||
console.error("❌ Error updating mirror_jobs table:", error);
|
||||
process.exit(1);
|
||||
} finally {
|
||||
// Close the database connection
|
||||
db.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Run the update function
|
||||
updateMirrorJobsTable().catch(error => {
|
||||
console.error("Unhandled error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,16 +1,18 @@
|
||||
import { useMemo, useRef, useState, useEffect } from "react";
|
||||
import { useVirtualizer } from "@tanstack/react-virtual";
|
||||
import type { MirrorJob } from "@/lib/db/schema";
|
||||
import Fuse from "fuse.js";
|
||||
import { Button } from "../ui/button";
|
||||
import { RefreshCw } from "lucide-react";
|
||||
import { Card } from "../ui/card";
|
||||
import { formatDate, getStatusColor } from "@/lib/utils";
|
||||
import { Skeleton } from "../ui/skeleton";
|
||||
import type { FilterParams } from "@/types/filter";
|
||||
import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useVirtualizer } from '@tanstack/react-virtual';
|
||||
import type { MirrorJob } from '@/lib/db/schema';
|
||||
import Fuse from 'fuse.js';
|
||||
import { Button } from '../ui/button';
|
||||
import { RefreshCw } from 'lucide-react';
|
||||
import { Card } from '../ui/card';
|
||||
import { formatDate, getStatusColor } from '@/lib/utils';
|
||||
import { Skeleton } from '../ui/skeleton';
|
||||
import type { FilterParams } from '@/types/filter';
|
||||
|
||||
type MirrorJobWithKey = MirrorJob & { _rowKey: string };
|
||||
|
||||
interface ActivityListProps {
|
||||
activities: MirrorJob[];
|
||||
activities: MirrorJobWithKey[];
|
||||
isLoading: boolean;
|
||||
filter: FilterParams;
|
||||
setFilter: (filter: FilterParams) => void;
|
||||
@@ -22,38 +24,44 @@ export default function ActivityList({
|
||||
filter,
|
||||
setFilter,
|
||||
}: ActivityListProps) {
|
||||
const [expandedItems, setExpandedItems] = useState<Set<string>>(new Set());
|
||||
const [expandedItems, setExpandedItems] = useState<Set<string>>(
|
||||
() => new Set(),
|
||||
);
|
||||
|
||||
const parentRef = useRef<HTMLDivElement>(null);
|
||||
const rowRefs = useRef<Map<string, HTMLDivElement | null>>(new Map());
|
||||
// We keep the ref only for possible future scroll-to-row logic.
|
||||
const rowRefs = useRef<Map<string, HTMLDivElement | null>>(new Map()); // eslint-disable-line @typescript-eslint/no-unused-vars
|
||||
|
||||
const filteredActivities = useMemo(() => {
|
||||
let result = activities;
|
||||
|
||||
if (filter.status) {
|
||||
result = result.filter((activity) => activity.status === filter.status);
|
||||
result = result.filter((a) => a.status === filter.status);
|
||||
}
|
||||
|
||||
if (filter.type) {
|
||||
if (filter.type === 'repository') {
|
||||
result = result.filter((activity) => !!activity.repositoryId);
|
||||
} else if (filter.type === 'organization') {
|
||||
result = result.filter((activity) => !!activity.organizationId);
|
||||
}
|
||||
result =
|
||||
filter.type === 'repository'
|
||||
? result.filter((a) => !!a.repositoryId)
|
||||
: filter.type === 'organization'
|
||||
? result.filter((a) => !!a.organizationId)
|
||||
: result;
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
result = result.filter((activity) =>
|
||||
activity.repositoryName === filter.name ||
|
||||
activity.organizationName === filter.name
|
||||
result = result.filter(
|
||||
(a) =>
|
||||
a.repositoryName === filter.name ||
|
||||
a.organizationName === filter.name,
|
||||
);
|
||||
}
|
||||
|
||||
if (filter.searchTerm) {
|
||||
const fuse = new Fuse(result, {
|
||||
keys: ["message", "details", "organizationName", "repositoryName"],
|
||||
keys: ['message', 'details', 'organizationName', 'repositoryName'],
|
||||
threshold: 0.3,
|
||||
});
|
||||
result = fuse.search(filter.searchTerm).map((res) => res.item);
|
||||
result = fuse.search(filter.searchTerm).map((r) => r.item);
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -62,10 +70,8 @@ export default function ActivityList({
|
||||
const virtualizer = useVirtualizer({
|
||||
count: filteredActivities.length,
|
||||
getScrollElement: () => parentRef.current,
|
||||
estimateSize: (index) => {
|
||||
const activity = filteredActivities[index];
|
||||
return expandedItems.has(activity.id || "") ? 217 : 120;
|
||||
},
|
||||
estimateSize: (idx) =>
|
||||
expandedItems.has(filteredActivities[idx]._rowKey) ? 217 : 120,
|
||||
overscan: 5,
|
||||
measureElement: (el) => el.getBoundingClientRect().height + 8,
|
||||
});
|
||||
@@ -74,118 +80,132 @@ export default function ActivityList({
|
||||
virtualizer.measure();
|
||||
}, [expandedItems, virtualizer]);
|
||||
|
||||
return isLoading ? (
|
||||
<div className="flex flex-col gap-y-4">
|
||||
{Array.from({ length: 5 }, (_, index) => (
|
||||
<Skeleton key={index} className="h-28 w-full rounded-md" />
|
||||
))}
|
||||
</div>
|
||||
) : filteredActivities.length === 0 ? (
|
||||
<div className="flex flex-col items-center justify-center py-12 text-center">
|
||||
<RefreshCw className="h-12 w-12 text-muted-foreground mb-4" />
|
||||
<h3 className="text-lg font-medium">No activities found</h3>
|
||||
<p className="text-sm text-muted-foreground mt-1 mb-4 max-w-md">
|
||||
{filter.searchTerm || filter.status || filter.type || filter.name
|
||||
? "Try adjusting your search or filter criteria."
|
||||
: "No mirroring activities have been recorded yet."}
|
||||
</p>
|
||||
{filter.searchTerm || filter.status || filter.type || filter.name ? (
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setFilter({ searchTerm: "", status: "", type: "", name: "" });
|
||||
}}
|
||||
>
|
||||
Clear Filters
|
||||
</Button>
|
||||
) : (
|
||||
<Button>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Refresh
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
/* ------------------------------ render ------------------------------ */
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className='flex flex-col gap-y-4'>
|
||||
{Array.from({ length: 5 }, (_, i) => (
|
||||
<Skeleton key={i} className='h-28 w-full rounded-md' />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
const hasFilter =
|
||||
filter.searchTerm || filter.status || filter.type || filter.name;
|
||||
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center py-12 text-center'>
|
||||
<RefreshCw className='mb-4 h-12 w-12 text-muted-foreground' />
|
||||
<h3 className='text-lg font-medium'>No activities found</h3>
|
||||
<p className='mt-1 mb-4 max-w-md text-sm text-muted-foreground'>
|
||||
{hasFilter
|
||||
? 'Try adjusting your search or filter criteria.'
|
||||
: 'No mirroring activities have been recorded yet.'}
|
||||
</p>
|
||||
{hasFilter ? (
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={() =>
|
||||
setFilter({ searchTerm: '', status: '', type: '', name: '' })
|
||||
}
|
||||
>
|
||||
Clear Filters
|
||||
</Button>
|
||||
) : (
|
||||
<Button>
|
||||
<RefreshCw className='mr-2 h-4 w-4' />
|
||||
Refresh
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Card
|
||||
className="border rounded-md max-h-[calc(100dvh-191px)] overflow-y-auto relative"
|
||||
ref={parentRef}
|
||||
className='relative max-h-[calc(100dvh-191px)] overflow-y-auto rounded-md border'
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
height: virtualizer.getTotalSize(),
|
||||
position: "relative",
|
||||
width: "100%",
|
||||
position: 'relative',
|
||||
width: '100%',
|
||||
}}
|
||||
>
|
||||
{virtualizer.getVirtualItems().map((virtualRow) => {
|
||||
const activity = filteredActivities[virtualRow.index];
|
||||
const isExpanded = expandedItems.has(activity.id || "");
|
||||
const key = activity.id || String(virtualRow.index);
|
||||
{virtualizer.getVirtualItems().map((vRow) => {
|
||||
const activity = filteredActivities[vRow.index];
|
||||
const isExpanded = expandedItems.has(activity._rowKey);
|
||||
|
||||
return (
|
||||
<div
|
||||
key={key}
|
||||
key={activity._rowKey}
|
||||
ref={(node) => {
|
||||
if (node) {
|
||||
rowRefs.current.set(key, node);
|
||||
virtualizer.measureElement(node);
|
||||
}
|
||||
rowRefs.current.set(activity._rowKey, node);
|
||||
if (node) virtualizer.measureElement(node);
|
||||
}}
|
||||
style={{
|
||||
position: "absolute",
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: "100%",
|
||||
transform: `translateY(${virtualRow.start}px)`,
|
||||
paddingBottom: "8px",
|
||||
width: '100%',
|
||||
transform: `translateY(${vRow.start}px)`,
|
||||
paddingBottom: '8px',
|
||||
}}
|
||||
className="border-b px-4 pt-4"
|
||||
className='border-b px-4 pt-4'
|
||||
>
|
||||
<div className="flex items-start gap-4">
|
||||
<div className="relative mt-2">
|
||||
<div className='flex items-start gap-4'>
|
||||
<div className='relative mt-2'>
|
||||
<div
|
||||
className={`h-2 w-2 rounded-full ${getStatusColor(
|
||||
activity.status
|
||||
activity.status,
|
||||
)}`}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<div className="flex flex-col sm:flex-row sm:items-center sm:justify-between mb-1">
|
||||
<p className="font-medium">{activity.message}</p>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
|
||||
<div className='flex-1'>
|
||||
<div className='mb-1 flex flex-col sm:flex-row sm:items-center sm:justify-between'>
|
||||
<p className='font-medium'>{activity.message}</p>
|
||||
<p className='text-sm text-muted-foreground'>
|
||||
{formatDate(activity.timestamp)}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{activity.repositoryName && (
|
||||
<p className="text-sm text-muted-foreground mb-2">
|
||||
<p className='mb-2 text-sm text-muted-foreground'>
|
||||
Repository: {activity.repositoryName}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{activity.organizationName && (
|
||||
<p className="text-sm text-muted-foreground mb-2">
|
||||
<p className='mb-2 text-sm text-muted-foreground'>
|
||||
Organization: {activity.organizationName}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{activity.details && (
|
||||
<div className="mt-2">
|
||||
<div className='mt-2'>
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => {
|
||||
const newSet = new Set(expandedItems);
|
||||
const id = activity.id || "";
|
||||
newSet.has(id) ? newSet.delete(id) : newSet.add(id);
|
||||
setExpandedItems(newSet);
|
||||
}}
|
||||
className="text-xs h-7 px-2"
|
||||
variant='ghost'
|
||||
className='h-7 px-2 text-xs'
|
||||
onClick={() =>
|
||||
setExpandedItems((prev) => {
|
||||
const next = new Set(prev);
|
||||
next.has(activity._rowKey)
|
||||
? next.delete(activity._rowKey)
|
||||
: next.add(activity._rowKey);
|
||||
return next;
|
||||
})
|
||||
}
|
||||
>
|
||||
{isExpanded ? "Hide Details" : "Show Details"}
|
||||
{isExpanded ? 'Hide Details' : 'Show Details'}
|
||||
</Button>
|
||||
|
||||
{isExpanded && (
|
||||
<pre className="mt-2 p-3 bg-muted rounded-md text-xs overflow-auto whitespace-pre-wrap min-h-[100px]">
|
||||
<pre className='mt-2 min-h-[100px] whitespace-pre-wrap overflow-auto rounded-md bg-muted p-3 text-xs'>
|
||||
{activity.details}
|
||||
</pre>
|
||||
)}
|
||||
|
||||
@@ -1,76 +1,97 @@
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Search, Download, RefreshCw, ChevronDown } from "lucide-react";
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { ChevronDown, Download, RefreshCw, Search } from 'lucide-react';
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "../ui/dropdown-menu";
|
||||
import { apiRequest, formatDate } from "@/lib/utils";
|
||||
import { useAuth } from "@/hooks/useAuth";
|
||||
import type { MirrorJob } from "@/lib/db/schema";
|
||||
import type { ActivityApiResponse } from "@/types/activities";
|
||||
} from '../ui/dropdown-menu';
|
||||
import { apiRequest, formatDate } from '@/lib/utils';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import type { MirrorJob } from '@/lib/db/schema';
|
||||
import type { ActivityApiResponse } from '@/types/activities';
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "../ui/select";
|
||||
import { repoStatusEnum, type RepoStatus } from "@/types/Repository";
|
||||
import ActivityList from "./ActivityList";
|
||||
import { ActivityNameCombobox } from "./ActivityNameCombobox";
|
||||
import { useSSE } from "@/hooks/useSEE";
|
||||
import { useFilterParams } from "@/hooks/useFilterParams";
|
||||
import { toast } from "sonner";
|
||||
} from '../ui/select';
|
||||
import { repoStatusEnum, type RepoStatus } from '@/types/Repository';
|
||||
import ActivityList from './ActivityList';
|
||||
import { ActivityNameCombobox } from './ActivityNameCombobox';
|
||||
import { useSSE } from '@/hooks/useSEE';
|
||||
import { useFilterParams } from '@/hooks/useFilterParams';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
type MirrorJobWithKey = MirrorJob & { _rowKey: string };
|
||||
|
||||
function genKey(job: MirrorJob): string {
|
||||
return `${
|
||||
job.id ?? (typeof crypto !== 'undefined'
|
||||
? crypto.randomUUID()
|
||||
: Math.random().toString(36).slice(2))
|
||||
}-${job.timestamp}`;
|
||||
}
|
||||
|
||||
export function ActivityLog() {
|
||||
const { user } = useAuth();
|
||||
const [activities, setActivities] = useState<MirrorJob[]>([]);
|
||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||
|
||||
const [activities, setActivities] = useState<MirrorJobWithKey[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const { filter, setFilter } = useFilterParams({
|
||||
searchTerm: "",
|
||||
status: "",
|
||||
type: "",
|
||||
name: "",
|
||||
searchTerm: '',
|
||||
status: '',
|
||||
type: '',
|
||||
name: '',
|
||||
});
|
||||
|
||||
const handleNewMessage = useCallback((data: MirrorJob) => {
|
||||
setActivities((prevActivities) => [data, ...prevActivities]);
|
||||
/* ----------------------------- SSE hook ----------------------------- */
|
||||
|
||||
console.log("Received new log:", data);
|
||||
const handleNewMessage = useCallback((data: MirrorJob) => {
|
||||
const withKey: MirrorJobWithKey = {
|
||||
...structuredClone(data),
|
||||
_rowKey: genKey(data),
|
||||
};
|
||||
|
||||
setActivities((prev) => [withKey, ...prev]);
|
||||
}, []);
|
||||
|
||||
// Use the SSE hook
|
||||
const { connected } = useSSE({
|
||||
userId: user?.id,
|
||||
onMessage: handleNewMessage,
|
||||
});
|
||||
|
||||
/* ------------------------- initial fetch --------------------------- */
|
||||
|
||||
const fetchActivities = useCallback(async () => {
|
||||
if (!user) return false;
|
||||
|
||||
try {
|
||||
setIsLoading(true);
|
||||
|
||||
const response = await apiRequest<ActivityApiResponse>(
|
||||
const res = await apiRequest<ActivityApiResponse>(
|
||||
`/activities?userId=${user.id}`,
|
||||
{
|
||||
method: "GET",
|
||||
}
|
||||
{ method: 'GET' },
|
||||
);
|
||||
|
||||
if (response.success) {
|
||||
setActivities(response.activities);
|
||||
return true;
|
||||
} else {
|
||||
toast.error(response.message || "Failed to fetch activities.");
|
||||
if (!res.success) {
|
||||
toast.error(res.message ?? 'Failed to fetch activities.');
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
const data: MirrorJobWithKey[] = res.activities.map((a) => ({
|
||||
...structuredClone(a),
|
||||
_rowKey: genKey(a),
|
||||
}));
|
||||
|
||||
setActivities(data);
|
||||
return true;
|
||||
} catch (err) {
|
||||
toast.error(
|
||||
error instanceof Error ? error.message : "Failed to fetch activities."
|
||||
err instanceof Error ? err.message : 'Failed to fetch activities.',
|
||||
);
|
||||
return false;
|
||||
} finally {
|
||||
@@ -82,208 +103,167 @@ export function ActivityLog() {
|
||||
fetchActivities();
|
||||
}, [fetchActivities]);
|
||||
|
||||
const handleRefreshActivities = async () => {
|
||||
const success = await fetchActivities();
|
||||
if (success) {
|
||||
toast.success("Activities refreshed successfully.");
|
||||
}
|
||||
};
|
||||
/* ---------------------- filtering + exporting ---------------------- */
|
||||
|
||||
// Get the currently filtered activities
|
||||
const getFilteredActivities = () => {
|
||||
return activities.filter(activity => {
|
||||
let isIncluded = true;
|
||||
const applyLightFilter = (list: MirrorJobWithKey[]) => {
|
||||
return list.filter((a) => {
|
||||
if (filter.status && a.status !== filter.status) return false;
|
||||
|
||||
if (filter.status) {
|
||||
isIncluded = isIncluded && activity.status === filter.status;
|
||||
if (filter.type === 'repository' && !a.repositoryId) return false;
|
||||
if (filter.type === 'organization' && !a.organizationId) return false;
|
||||
|
||||
if (
|
||||
filter.name &&
|
||||
a.repositoryName !== filter.name &&
|
||||
a.organizationName !== filter.name
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (filter.type) {
|
||||
if (filter.type === 'repository') {
|
||||
isIncluded = isIncluded && !!activity.repositoryId;
|
||||
} else if (filter.type === 'organization') {
|
||||
isIncluded = isIncluded && !!activity.organizationId;
|
||||
}
|
||||
}
|
||||
|
||||
if (filter.name) {
|
||||
isIncluded = isIncluded && (
|
||||
activity.repositoryName === filter.name ||
|
||||
activity.organizationName === filter.name
|
||||
);
|
||||
}
|
||||
|
||||
// Note: We're not applying the search term filter here as that would require
|
||||
// re-implementing the Fuse.js search logic
|
||||
|
||||
return isIncluded;
|
||||
return true;
|
||||
});
|
||||
};
|
||||
|
||||
// Function to export activities as CSV
|
||||
const exportAsCSV = () => {
|
||||
const filteredActivities = getFilteredActivities();
|
||||
const rows = applyLightFilter(activities);
|
||||
if (!rows.length) return toast.error('No activities to export.');
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
toast.error("No activities to export.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create CSV content
|
||||
const headers = ["Timestamp", "Message", "Status", "Repository", "Organization", "Details"];
|
||||
const csvRows = [
|
||||
headers.join(","),
|
||||
...filteredActivities.map(activity => {
|
||||
const formattedDate = formatDate(activity.timestamp);
|
||||
// Escape fields that might contain commas or quotes
|
||||
const escapeCsvField = (field: string | null | undefined) => {
|
||||
if (!field) return '';
|
||||
if (field.includes(',') || field.includes('"') || field.includes('\n')) {
|
||||
return `"${field.replace(/"/g, '""')}"`;
|
||||
}
|
||||
return field;
|
||||
};
|
||||
|
||||
return [
|
||||
formattedDate,
|
||||
escapeCsvField(activity.message),
|
||||
activity.status,
|
||||
escapeCsvField(activity.repositoryName || ''),
|
||||
escapeCsvField(activity.organizationName || ''),
|
||||
escapeCsvField(activity.details || '')
|
||||
].join(',');
|
||||
})
|
||||
const headers = [
|
||||
'Timestamp',
|
||||
'Message',
|
||||
'Status',
|
||||
'Repository',
|
||||
'Organization',
|
||||
'Details',
|
||||
];
|
||||
|
||||
const csvContent = csvRows.join('\n');
|
||||
const escape = (v: string | null | undefined) =>
|
||||
v && /[,\"\n]/.test(v) ? `"${v.replace(/"/g, '""')}"` : v ?? '';
|
||||
|
||||
// Download the CSV file
|
||||
downloadFile(csvContent, 'text/csv;charset=utf-8;', 'activity_log_export.csv');
|
||||
const csv = [
|
||||
headers.join(','),
|
||||
...rows.map((a) =>
|
||||
[
|
||||
formatDate(a.timestamp),
|
||||
escape(a.message),
|
||||
a.status,
|
||||
escape(a.repositoryName),
|
||||
escape(a.organizationName),
|
||||
escape(a.details),
|
||||
].join(','),
|
||||
),
|
||||
].join('\n');
|
||||
|
||||
toast.success("Activity log exported as CSV successfully.");
|
||||
downloadFile(csv, 'text/csv;charset=utf-8;', 'activity_log_export.csv');
|
||||
toast.success('CSV exported.');
|
||||
};
|
||||
|
||||
// Function to export activities as JSON
|
||||
const exportAsJSON = () => {
|
||||
const filteredActivities = getFilteredActivities();
|
||||
const rows = applyLightFilter(activities);
|
||||
if (!rows.length) return toast.error('No activities to export.');
|
||||
|
||||
if (filteredActivities.length === 0) {
|
||||
toast.error("No activities to export.");
|
||||
return;
|
||||
}
|
||||
const json = JSON.stringify(
|
||||
rows.map((a) => ({
|
||||
...a,
|
||||
formattedTime: formatDate(a.timestamp),
|
||||
})),
|
||||
null,
|
||||
2,
|
||||
);
|
||||
|
||||
// Format the activities for export (removing any sensitive or unnecessary fields if needed)
|
||||
const activitiesForExport = filteredActivities.map(activity => ({
|
||||
id: activity.id,
|
||||
timestamp: activity.timestamp,
|
||||
formattedTime: formatDate(activity.timestamp),
|
||||
message: activity.message,
|
||||
status: activity.status,
|
||||
repositoryId: activity.repositoryId,
|
||||
repositoryName: activity.repositoryName,
|
||||
organizationId: activity.organizationId,
|
||||
organizationName: activity.organizationName,
|
||||
details: activity.details
|
||||
}));
|
||||
|
||||
const jsonContent = JSON.stringify(activitiesForExport, null, 2);
|
||||
|
||||
// Download the JSON file
|
||||
downloadFile(jsonContent, 'application/json', 'activity_log_export.json');
|
||||
|
||||
toast.success("Activity log exported as JSON successfully.");
|
||||
downloadFile(json, 'application/json', 'activity_log_export.json');
|
||||
toast.success('JSON exported.');
|
||||
};
|
||||
|
||||
// Generic function to download a file
|
||||
const downloadFile = (content: string, mimeType: string, filename: string) => {
|
||||
// Add date to filename
|
||||
const date = new Date();
|
||||
const dateStr = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')}`;
|
||||
const filenameWithDate = filename.replace('.', `_${dateStr}.`);
|
||||
|
||||
// Create a download link
|
||||
const blob = new Blob([content], { type: mimeType });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const downloadFile = (
|
||||
content: string,
|
||||
mime: string,
|
||||
filename: string,
|
||||
): void => {
|
||||
const date = new Date().toISOString().slice(0, 10); // yyyy-mm-dd
|
||||
const link = document.createElement('a');
|
||||
|
||||
link.href = url;
|
||||
link.setAttribute('download', filenameWithDate);
|
||||
document.body.appendChild(link);
|
||||
link.href = URL.createObjectURL(new Blob([content], { type: mime }));
|
||||
link.download = filename.replace('.', `_${date}.`);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
};
|
||||
|
||||
/* ------------------------------ UI ------------------------------ */
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-y-8">
|
||||
<div className="flex flex-row items-center gap-4 w-full">
|
||||
<div className="relative flex-1">
|
||||
<Search className="absolute left-2 top-2.5 h-4 w-4 text-muted-foreground" />
|
||||
<div className='flex flex-col gap-y-8'>
|
||||
<div className='flex w-full flex-row items-center gap-4'>
|
||||
{/* search input */}
|
||||
<div className='relative flex-1'>
|
||||
<Search className='absolute left-2 top-2.5 h-4 w-4 text-muted-foreground' />
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search activities..."
|
||||
className="pl-8 h-9 w-full rounded-md border border-input bg-background px-3 py-1 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
type='text'
|
||||
placeholder='Search activities...'
|
||||
className='h-9 w-full rounded-md border border-input bg-background px-3 py-1 pl-8 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring'
|
||||
value={filter.searchTerm}
|
||||
onChange={(e) =>
|
||||
setFilter((prev) => ({ ...prev, searchTerm: e.target.value }))
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
searchTerm: e.target.value,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* status select */}
|
||||
<Select
|
||||
value={filter.status || "all"}
|
||||
onValueChange={(value) =>
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
status: value === "all" ? "" : (value as RepoStatus),
|
||||
value={filter.status || 'all'}
|
||||
onValueChange={(v) =>
|
||||
setFilter((p) => ({
|
||||
...p,
|
||||
status: v === 'all' ? '' : (v as RepoStatus),
|
||||
}))
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-[140px] h-9 max-h-9">
|
||||
<SelectValue placeholder="All Status" />
|
||||
<SelectTrigger className='h-9 w-[140px] max-h-9'>
|
||||
<SelectValue placeholder='All Status' />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{["all", ...repoStatusEnum.options].map((status) => (
|
||||
<SelectItem key={status} value={status}>
|
||||
{status === "all"
|
||||
? "All Status"
|
||||
: status.charAt(0).toUpperCase() + status.slice(1)}
|
||||
{['all', ...repoStatusEnum.options].map((s) => (
|
||||
<SelectItem key={s} value={s}>
|
||||
{s === 'all' ? 'All Status' : s[0].toUpperCase() + s.slice(1)}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
{/* Repository/Organization Name Combobox */}
|
||||
{/* repo/org name combobox */}
|
||||
<ActivityNameCombobox
|
||||
activities={activities}
|
||||
value={filter.name || ""}
|
||||
onChange={(name: string) => setFilter((prev) => ({ ...prev, name }))}
|
||||
value={filter.name || ''}
|
||||
onChange={(name) => setFilter((p) => ({ ...p, name }))}
|
||||
/>
|
||||
{/* Filter by type: repository/org/all */}
|
||||
|
||||
{/* type select */}
|
||||
<Select
|
||||
value={filter.type || "all"}
|
||||
onValueChange={(value) =>
|
||||
setFilter((prev) => ({
|
||||
...prev,
|
||||
type: value === "all" ? "" : value,
|
||||
}))
|
||||
value={filter.type || 'all'}
|
||||
onValueChange={(v) =>
|
||||
setFilter((p) => ({ ...p, type: v === 'all' ? '' : v }))
|
||||
}
|
||||
>
|
||||
<SelectTrigger className="w-[140px] h-9 max-h-9">
|
||||
<SelectValue placeholder="All Types" />
|
||||
<SelectTrigger className='h-9 w-[140px] max-h-9'>
|
||||
<SelectValue placeholder='All Types' />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{['all', 'repository', 'organization'].map((type) => (
|
||||
<SelectItem key={type} value={type}>
|
||||
{type === 'all' ? 'All Types' : type.charAt(0).toUpperCase() + type.slice(1)}
|
||||
{['all', 'repository', 'organization'].map((t) => (
|
||||
<SelectItem key={t} value={t}>
|
||||
{t === 'all' ? 'All Types' : t[0].toUpperCase() + t.slice(1)}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
|
||||
{/* export dropdown */}
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="outline" className="flex items-center gap-1">
|
||||
<Download className="h-4 w-4 mr-1" />
|
||||
<Button variant='outline' className='flex items-center gap-1'>
|
||||
<Download className='mr-1 h-4 w-4' />
|
||||
Export
|
||||
<ChevronDown className="h-4 w-4 ml-1" />
|
||||
<ChevronDown className='ml-1 h-4 w-4' />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent>
|
||||
@@ -295,19 +275,21 @@ export function ActivityLog() {
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
<Button onClick={handleRefreshActivities}>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
|
||||
{/* refresh */}
|
||||
<Button onClick={() => fetchActivities()}>
|
||||
<RefreshCw className='mr-2 h-4 w-4' />
|
||||
Refresh
|
||||
</Button>
|
||||
</div>
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<ActivityList
|
||||
activities={activities}
|
||||
isLoading={isLoading || !connected}
|
||||
filter={filter}
|
||||
setFilter={setFilter}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* activity list */}
|
||||
<ActivityList
|
||||
activities={applyLightFilter(activities)}
|
||||
isLoading={isLoading || !connected}
|
||||
filter={filter}
|
||||
setFilter={setFilter}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { useEffect, useState } from 'react';
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardDescription,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from "@/components/ui/card";
|
||||
import { GitHubConfigForm } from "./GitHubConfigForm";
|
||||
import { GiteaConfigForm } from "./GiteaConfigForm";
|
||||
import { ScheduleConfigForm } from "./ScheduleConfigForm";
|
||||
} from '@/components/ui/card';
|
||||
import { GitHubConfigForm } from './GitHubConfigForm';
|
||||
import { GiteaConfigForm } from './GiteaConfigForm';
|
||||
import { ScheduleConfigForm } from './ScheduleConfigForm';
|
||||
import type {
|
||||
ConfigApiResponse,
|
||||
GiteaConfig,
|
||||
@@ -16,12 +16,13 @@ import type {
|
||||
SaveConfigApiRequest,
|
||||
SaveConfigApiResponse,
|
||||
ScheduleConfig,
|
||||
} from "@/types/config";
|
||||
import { Button } from "../ui/button";
|
||||
import { useAuth } from "@/hooks/useAuth";
|
||||
import { apiRequest } from "@/lib/utils";
|
||||
import { Copy, CopyCheck, RefreshCw } from "lucide-react";
|
||||
import { toast } from "sonner";
|
||||
} from '@/types/config';
|
||||
import { Button } from '../ui/button';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import { apiRequest } from '@/lib/utils';
|
||||
import { Copy, CopyCheck, RefreshCw } from 'lucide-react';
|
||||
import { toast } from 'sonner';
|
||||
import { Skeleton } from '@/components/ui/skeleton';
|
||||
|
||||
type ConfigState = {
|
||||
githubConfig: GitHubConfig;
|
||||
@@ -32,8 +33,8 @@ type ConfigState = {
|
||||
export function ConfigTabs() {
|
||||
const [config, setConfig] = useState<ConfigState>({
|
||||
githubConfig: {
|
||||
username: "",
|
||||
token: "",
|
||||
username: '',
|
||||
token: '',
|
||||
skipForks: false,
|
||||
privateRepositories: false,
|
||||
mirrorIssues: false,
|
||||
@@ -41,16 +42,14 @@ export function ConfigTabs() {
|
||||
preserveOrgStructure: false,
|
||||
skipStarredIssues: false,
|
||||
},
|
||||
|
||||
giteaConfig: {
|
||||
url: "",
|
||||
username: "",
|
||||
token: "",
|
||||
organization: "github-mirrors",
|
||||
visibility: "public",
|
||||
starredReposOrg: "github",
|
||||
url: '',
|
||||
username: '',
|
||||
token: '',
|
||||
organization: 'github-mirrors',
|
||||
visibility: 'public',
|
||||
starredReposOrg: 'github',
|
||||
},
|
||||
|
||||
scheduleConfig: {
|
||||
enabled: false,
|
||||
interval: 3600,
|
||||
@@ -58,27 +57,21 @@ export function ConfigTabs() {
|
||||
});
|
||||
const { user, refreshUser } = useAuth();
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [dockerCode, setDockerCode] = useState<string>("");
|
||||
const [dockerCode, setDockerCode] = useState<string>('');
|
||||
const [isCopied, setIsCopied] = useState<boolean>(false);
|
||||
const [isSyncing, setIsSyncing] = useState<boolean>(false);
|
||||
const [isConfigSaved, setIsConfigSaved] = useState<boolean>(false);
|
||||
|
||||
// Check if all required fields are filled to enable the Save Configuration button
|
||||
const isConfigFormValid = (): boolean => {
|
||||
const { githubConfig, giteaConfig } = config;
|
||||
|
||||
// Check GitHub required fields
|
||||
const isGitHubValid = !!(
|
||||
githubConfig.username?.trim() && githubConfig.token?.trim()
|
||||
githubConfig.username.trim() && githubConfig.token.trim()
|
||||
);
|
||||
|
||||
// Check Gitea required fields
|
||||
const isGiteaValid = !!(
|
||||
giteaConfig.url?.trim() &&
|
||||
giteaConfig.username?.trim() &&
|
||||
giteaConfig.token?.trim()
|
||||
giteaConfig.url.trim() &&
|
||||
giteaConfig.username.trim() &&
|
||||
giteaConfig.token.trim()
|
||||
);
|
||||
|
||||
return isGitHubValid && isGiteaValid;
|
||||
};
|
||||
|
||||
@@ -86,11 +79,12 @@ export function ConfigTabs() {
|
||||
const updateLastAndNextRun = () => {
|
||||
const lastRun = config.scheduleConfig.lastRun
|
||||
? new Date(config.scheduleConfig.lastRun)
|
||||
: new Date(); // fallback to now if lastRun is null
|
||||
: new Date();
|
||||
const intervalInSeconds = config.scheduleConfig.interval;
|
||||
const nextRun = new Date(lastRun.getTime() + intervalInSeconds * 1000);
|
||||
|
||||
setConfig((prev) => ({
|
||||
const nextRun = new Date(
|
||||
lastRun.getTime() + intervalInSeconds * 1000,
|
||||
);
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
scheduleConfig: {
|
||||
...prev.scheduleConfig,
|
||||
@@ -99,37 +93,31 @@ export function ConfigTabs() {
|
||||
},
|
||||
}));
|
||||
};
|
||||
|
||||
updateLastAndNextRun();
|
||||
}, [config.scheduleConfig.interval]);
|
||||
|
||||
const handleImportGitHubData = async () => {
|
||||
if (!user?.id) return;
|
||||
setIsSyncing(true);
|
||||
try {
|
||||
if (!user?.id) return;
|
||||
|
||||
setIsSyncing(true);
|
||||
|
||||
const result = await apiRequest<{ success: boolean; message?: string }>(
|
||||
`/sync?userId=${user.id}`,
|
||||
{
|
||||
method: "POST",
|
||||
}
|
||||
{ method: 'POST' },
|
||||
);
|
||||
|
||||
if (result.success) {
|
||||
toast.success(
|
||||
"GitHub data imported successfully! Head to the Dashboard to start mirroring repositories."
|
||||
);
|
||||
} else {
|
||||
toast.error(
|
||||
`Failed to import GitHub data: ${result.message || "Unknown error"}`
|
||||
);
|
||||
}
|
||||
result.success
|
||||
? toast.success(
|
||||
'GitHub data imported successfully! Head to the Dashboard to start mirroring repositories.',
|
||||
)
|
||||
: toast.error(
|
||||
`Failed to import GitHub data: ${
|
||||
result.message || 'Unknown error'
|
||||
}`,
|
||||
);
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
`Error importing GitHub data: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
} finally {
|
||||
setIsSyncing(false);
|
||||
@@ -137,94 +125,76 @@ export function ConfigTabs() {
|
||||
};
|
||||
|
||||
const handleSaveConfig = async () => {
|
||||
if (!user?.id) return;
|
||||
const reqPayload: SaveConfigApiRequest = {
|
||||
userId: user.id,
|
||||
githubConfig: config.githubConfig,
|
||||
giteaConfig: config.giteaConfig,
|
||||
scheduleConfig: config.scheduleConfig,
|
||||
};
|
||||
try {
|
||||
if (!user || !user.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
const reqPyload: SaveConfigApiRequest = {
|
||||
userId: user.id,
|
||||
githubConfig: config.githubConfig,
|
||||
giteaConfig: config.giteaConfig,
|
||||
scheduleConfig: config.scheduleConfig,
|
||||
};
|
||||
const response = await fetch("/api/config", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(reqPyload),
|
||||
const response = await fetch('/api/config', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(reqPayload),
|
||||
});
|
||||
|
||||
const result: SaveConfigApiResponse = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
await refreshUser();
|
||||
setIsConfigSaved(true);
|
||||
|
||||
toast.success(
|
||||
"Configuration saved successfully! Now import your GitHub data to begin."
|
||||
'Configuration saved successfully! Now import your GitHub data to begin.',
|
||||
);
|
||||
} else {
|
||||
toast.error(
|
||||
`Failed to save configuration: ${result.message || "Unknown error"}`
|
||||
`Failed to save configuration: ${result.message || 'Unknown error'}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
toast.error(
|
||||
`An error occurred while saving the configuration: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!user) return;
|
||||
|
||||
const fetchConfig = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
if (!user) {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
|
||||
const response = await apiRequest<ConfigApiResponse>(
|
||||
`/config?userId=${user.id}`,
|
||||
{
|
||||
method: "GET",
|
||||
}
|
||||
{ method: 'GET' },
|
||||
);
|
||||
|
||||
// Check if we have a valid config response
|
||||
if (response && !response.error) {
|
||||
setConfig({
|
||||
githubConfig: response.githubConfig || config.githubConfig,
|
||||
giteaConfig: response.giteaConfig || config.giteaConfig,
|
||||
scheduleConfig: response.scheduleConfig || config.scheduleConfig,
|
||||
githubConfig:
|
||||
response.githubConfig || config.githubConfig,
|
||||
giteaConfig:
|
||||
response.giteaConfig || config.giteaConfig,
|
||||
scheduleConfig:
|
||||
response.scheduleConfig || config.scheduleConfig,
|
||||
});
|
||||
|
||||
// If we got a valid config from the server, it means it was previously saved
|
||||
if (response.id) {
|
||||
setIsConfigSaved(true);
|
||||
}
|
||||
if (response.id) setIsConfigSaved(true);
|
||||
}
|
||||
// If there's an error, we'll just use the default config defined in state
|
||||
|
||||
setIsLoading(false);
|
||||
} catch (error) {
|
||||
// Don't show error for first-time users, just use the default config
|
||||
console.warn("Could not fetch configuration, using defaults:", error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
console.warn(
|
||||
'Could not fetch configuration, using defaults:',
|
||||
error,
|
||||
);
|
||||
}
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
fetchConfig();
|
||||
}, [user]);
|
||||
|
||||
useEffect(() => {
|
||||
const generateDockerCode = () => {
|
||||
return `services:
|
||||
const generateDockerCode = () => `
|
||||
services:
|
||||
gitea-mirror:
|
||||
image: arunavo4/gitea-mirror:latest
|
||||
restart: unless-stopped
|
||||
@@ -243,27 +213,93 @@ export function ConfigTabs() {
|
||||
- GITEA_ORGANIZATION=${config.giteaConfig.organization}
|
||||
- GITEA_ORG_VISIBILITY=${config.giteaConfig.visibility}
|
||||
- DELAY=${config.scheduleConfig.interval}`;
|
||||
};
|
||||
|
||||
const code = generateDockerCode();
|
||||
setDockerCode(code);
|
||||
setDockerCode(generateDockerCode());
|
||||
}, [config]);
|
||||
|
||||
const handleCopyToClipboard = (text: string) => {
|
||||
navigator.clipboard.writeText(text).then(
|
||||
() => {
|
||||
setIsCopied(true);
|
||||
toast.success("Docker configuration copied to clipboard!");
|
||||
toast.success('Docker configuration copied to clipboard!');
|
||||
setTimeout(() => setIsCopied(false), 2000);
|
||||
},
|
||||
(err) => {
|
||||
toast.error("Could not copy text to clipboard.");
|
||||
}
|
||||
() => toast.error('Could not copy text to clipboard.'),
|
||||
);
|
||||
};
|
||||
|
||||
function ConfigCardSkeleton() {
|
||||
return (
|
||||
<Card>
|
||||
<CardHeader className="flex-row justify-between">
|
||||
<div className="flex flex-col gap-y-1.5 m-0">
|
||||
<Skeleton className="h-6 w-48" />
|
||||
<Skeleton className="h-4 w-72" />
|
||||
</div>
|
||||
<div className="flex gap-x-4">
|
||||
<Skeleton className="h-10 w-36" />
|
||||
<Skeleton className="h-10 w-36" />
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex gap-x-4">
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Skeleton className="h-6 w-40" />
|
||||
<Skeleton className="h-9 w-32" />
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-32 w-full" />
|
||||
</div>
|
||||
</div>
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Skeleton className="h-6 w-40" />
|
||||
<Skeleton className="h-9 w-32" />
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
<Skeleton className="h-20 w-full" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="border rounded-lg p-4">
|
||||
<div className="space-y-4">
|
||||
<Skeleton className="h-8 w-48" />
|
||||
<Skeleton className="h-16 w-full" />
|
||||
<Skeleton className="h-8 w-32" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
function DockerConfigSkeleton() {
|
||||
return (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<Skeleton className="h-6 w-40" />
|
||||
<Skeleton className="h-4 w-64" />
|
||||
</CardHeader>
|
||||
<CardContent className="relative">
|
||||
<Skeleton className="h-8 w-8 absolute top-4 right-10 rounded-md" />
|
||||
<Skeleton className="h-48 w-full rounded-md" />
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
return isLoading ? (
|
||||
<div>loading...</div>
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<ConfigCardSkeleton />
|
||||
<DockerConfigSkeleton />
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<Card>
|
||||
@@ -275,17 +311,16 @@ export function ConfigTabs() {
|
||||
mirroring.
|
||||
</CardDescription>
|
||||
</div>
|
||||
|
||||
<div className="flex gap-x-4">
|
||||
<Button
|
||||
onClick={handleImportGitHubData}
|
||||
disabled={isSyncing || !isConfigSaved}
|
||||
title={
|
||||
!isConfigSaved
|
||||
? "Save configuration first"
|
||||
? 'Save configuration first'
|
||||
: isSyncing
|
||||
? "Import in progress"
|
||||
: "Import GitHub Data"
|
||||
? 'Import in progress'
|
||||
: 'Import GitHub Data'
|
||||
}
|
||||
>
|
||||
{isSyncing ? (
|
||||
@@ -305,66 +340,57 @@ export function ConfigTabs() {
|
||||
disabled={!isConfigFormValid()}
|
||||
title={
|
||||
!isConfigFormValid()
|
||||
? "Please fill all required fields"
|
||||
: "Save Configuration"
|
||||
? 'Please fill all required fields'
|
||||
: 'Save Configuration'
|
||||
}
|
||||
>
|
||||
Save Configuration
|
||||
</Button>
|
||||
</div>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent>
|
||||
<div className="flex flex-col gap-y-4">
|
||||
<div className="flex gap-x-4">
|
||||
<GitHubConfigForm
|
||||
config={config.githubConfig}
|
||||
setConfig={(update) =>
|
||||
setConfig((prev) => ({
|
||||
setConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
githubConfig:
|
||||
typeof update === "function"
|
||||
typeof update === 'function'
|
||||
? update(prev.githubConfig)
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
/>
|
||||
|
||||
<GiteaConfigForm
|
||||
config={config?.giteaConfig ?? ({} as GiteaConfig)}
|
||||
setConfig={(update) =>
|
||||
setConfig((prev) => ({
|
||||
config={config.giteaConfig}
|
||||
setConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
giteaConfig:
|
||||
typeof update === "function"
|
||||
typeof update === 'function'
|
||||
? update(prev.giteaConfig)
|
||||
: update,
|
||||
githubConfig: prev?.githubConfig ?? ({} as GitHubConfig),
|
||||
scheduleConfig:
|
||||
prev?.scheduleConfig ?? ({} as ScheduleConfig),
|
||||
}))
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<ScheduleConfigForm
|
||||
config={config?.scheduleConfig ?? ({} as ScheduleConfig)}
|
||||
setConfig={(update) =>
|
||||
setConfig((prev) => ({
|
||||
config={config.scheduleConfig}
|
||||
setConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
scheduleConfig:
|
||||
typeof update === "function"
|
||||
typeof update === 'function'
|
||||
? update(prev.scheduleConfig)
|
||||
: update,
|
||||
githubConfig: prev?.githubConfig ?? ({} as GitHubConfig),
|
||||
giteaConfig: prev?.giteaConfig ?? ({} as GiteaConfig),
|
||||
}))
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Docker Configuration</CardTitle>
|
||||
@@ -372,7 +398,6 @@ export function ConfigTabs() {
|
||||
Equivalent Docker configuration for your current settings.
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent className="relative">
|
||||
<Button
|
||||
variant="outline"
|
||||
@@ -386,7 +411,6 @@ export function ConfigTabs() {
|
||||
<Copy className="text-muted-foreground" />
|
||||
)}
|
||||
</Button>
|
||||
|
||||
<pre className="bg-muted p-4 rounded-md overflow-auto text-sm">
|
||||
{dockerCode}
|
||||
</pre>
|
||||
|
||||
@@ -9,6 +9,8 @@ import { apiRequest } from "@/lib/utils";
|
||||
import type { DashboardApiResponse } from "@/types/dashboard";
|
||||
import { useSSE } from "@/hooks/useSEE";
|
||||
import { toast } from "sonner";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
|
||||
export function Dashboard() {
|
||||
const { user } = useAuth();
|
||||
@@ -59,8 +61,6 @@ export function Dashboard() {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(false);
|
||||
|
||||
const response = await apiRequest<DashboardApiResponse>(
|
||||
`/dashboard?userId=${user.id}`,
|
||||
{
|
||||
@@ -93,8 +93,61 @@ export function Dashboard() {
|
||||
fetchDashboardData();
|
||||
}, [user]);
|
||||
|
||||
// Status Card Skeleton component
|
||||
function StatusCardSkeleton() {
|
||||
return (
|
||||
<Card className="overflow-hidden">
|
||||
<CardHeader className="flex flex-row items-center justify-between pb-2 space-y-0">
|
||||
<CardTitle className="text-sm font-medium">
|
||||
<Skeleton className="h-4 w-24" />
|
||||
</CardTitle>
|
||||
<Skeleton className="h-4 w-4 rounded-full" />
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<Skeleton className="h-8 w-16 mb-1" />
|
||||
<Skeleton className="h-3 w-32" />
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
return isLoading || !connected ? (
|
||||
<div>loading...</div>
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||
<StatusCardSkeleton />
|
||||
<StatusCardSkeleton />
|
||||
<StatusCardSkeleton />
|
||||
<StatusCardSkeleton />
|
||||
</div>
|
||||
|
||||
<div className="flex gap-x-6 items-start">
|
||||
{/* Repository List Skeleton */}
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Skeleton className="h-6 w-32" />
|
||||
<Skeleton className="h-9 w-24" />
|
||||
</div>
|
||||
<div className="space-y-3">
|
||||
{Array.from({ length: 3 }).map((_, i) => (
|
||||
<Skeleton key={i} className="h-16 w-full" />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Recent Activity Skeleton */}
|
||||
<div className="w-1/2 border rounded-lg p-4">
|
||||
<div className="flex justify-between items-center mb-4">
|
||||
<Skeleton className="h-6 w-32" />
|
||||
<Skeleton className="h-9 w-24" />
|
||||
</div>
|
||||
<div className="space-y-3">
|
||||
{Array.from({ length: 3 }).map((_, i) => (
|
||||
<Skeleton key={i} className="h-16 w-full" />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-col gap-y-6">
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||
|
||||
@@ -4,9 +4,10 @@ import { SiGitea } from "react-icons/si";
|
||||
import { ModeToggle } from "@/components/theme/ModeToggle";
|
||||
import { Avatar, AvatarFallback, AvatarImage } from "../ui/avatar";
|
||||
import { toast } from "sonner";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
|
||||
export function Header() {
|
||||
const { user, logout } = useAuth();
|
||||
const { user, logout, isLoading } = useAuth();
|
||||
|
||||
const handleLogout = async () => {
|
||||
toast.success("Logged out successfully");
|
||||
@@ -15,6 +16,16 @@ export function Header() {
|
||||
logout();
|
||||
};
|
||||
|
||||
// Auth buttons skeleton loader
|
||||
function AuthButtonsSkeleton() {
|
||||
return (
|
||||
<>
|
||||
<Skeleton className="h-10 w-10 rounded-full" /> {/* Avatar placeholder */}
|
||||
<Skeleton className="h-10 w-24" /> {/* Button placeholder */}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<header className="border-b bg-background">
|
||||
<div className="flex h-[4.5rem] items-center justify-between px-6">
|
||||
@@ -25,7 +36,10 @@ export function Header() {
|
||||
|
||||
<div className="flex items-center gap-4">
|
||||
<ModeToggle />
|
||||
{user ? (
|
||||
|
||||
{isLoading ? (
|
||||
<AuthButtonsSkeleton />
|
||||
) : user ? (
|
||||
<>
|
||||
<Avatar>
|
||||
<AvatarImage src="" alt="@shadcn" />
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useEffect, useState } from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ExternalLink } from "lucide-react";
|
||||
import { links } from "@/data/Sidebar";
|
||||
import { VersionInfo } from "./VersionInfo";
|
||||
|
||||
interface SidebarProps {
|
||||
className?: string;
|
||||
@@ -19,7 +20,7 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
|
||||
return (
|
||||
<aside className={cn("w-64 border-r bg-background", className)}>
|
||||
<div className="flex flex-col h-full py-4">
|
||||
<div className="flex flex-col h-full pt-4">
|
||||
<nav className="flex flex-col gap-y-1 pl-2 pr-3">
|
||||
{links.map((link, index) => {
|
||||
const isActive = currentPath === link.href;
|
||||
@@ -59,6 +60,7 @@ export function Sidebar({ className }: SidebarProps) {
|
||||
<ExternalLink className="h-3 w-3" />
|
||||
</a>
|
||||
</div>
|
||||
<VersionInfo />
|
||||
</div>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
49
src/components/layout/VersionInfo.tsx
Normal file
49
src/components/layout/VersionInfo.tsx
Normal file
@@ -0,0 +1,49 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { healthApi } from "@/lib/api";
|
||||
|
||||
export function VersionInfo() {
|
||||
const [versionInfo, setVersionInfo] = useState<{
|
||||
current: string;
|
||||
latest: string;
|
||||
updateAvailable: boolean;
|
||||
}>({
|
||||
current: "loading...",
|
||||
latest: "",
|
||||
updateAvailable: false
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const fetchVersion = async () => {
|
||||
try {
|
||||
const healthData = await healthApi.check();
|
||||
setVersionInfo({
|
||||
current: healthData.version || "unknown",
|
||||
latest: healthData.latestVersion || "unknown",
|
||||
updateAvailable: healthData.updateAvailable || false
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch version:", error);
|
||||
setVersionInfo({
|
||||
current: "unknown",
|
||||
latest: "",
|
||||
updateAvailable: false
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
fetchVersion();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div className="text-xs text-muted-foreground text-center pt-2 pb-3 border-t border-border mt-2">
|
||||
{versionInfo.updateAvailable ? (
|
||||
<div className="flex flex-col">
|
||||
<span>v{versionInfo.current}</span>
|
||||
<span className="text-primary">v{versionInfo.latest} available</span>
|
||||
</div>
|
||||
) : (
|
||||
<span>v{versionInfo.current}</span>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "Architecture"
|
||||
description: "Comprehensive overview of the Gitea Mirror application architecture."
|
||||
order: 1
|
||||
updatedDate: 2023-10-15
|
||||
updatedDate: 2025-05-22
|
||||
---
|
||||
|
||||
<div class="mb-6">
|
||||
@@ -21,17 +21,18 @@ The application is built using:
|
||||
- <span class="font-semibold text-foreground">Astro</span>: Web framework for the frontend
|
||||
- <span class="font-semibold text-foreground">React</span>: Component library for interactive UI elements
|
||||
- <span class="font-semibold text-foreground">Shadcn UI</span>: UI component library built on Tailwind CSS
|
||||
- <span class="font-semibold text-foreground">SQLite</span>: Database for storing configuration and state
|
||||
- <span class="font-semibold text-foreground">Node.js</span>: Runtime environment for the backend
|
||||
- <span class="font-semibold text-foreground">SQLite</span>: Database for storing configuration, state, and events
|
||||
- <span class="font-semibold text-foreground">Bun</span>: Runtime environment for the backend
|
||||
- <span class="font-semibold text-foreground">Drizzle ORM</span>: Type-safe ORM for database interactions
|
||||
|
||||
## Architecture Diagram
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph "Gitea Mirror"
|
||||
Frontend["Frontend<br/>(Astro)"]
|
||||
Backend["Backend<br/>(Node.js)"]
|
||||
Database["Database<br/>(SQLite)"]
|
||||
Frontend["Frontend<br/>(Astro + React)"]
|
||||
Backend["Backend<br/>(Bun)"]
|
||||
Database["Database<br/>(SQLite + Drizzle)"]
|
||||
|
||||
Frontend <--> Backend
|
||||
Backend <--> Database
|
||||
@@ -60,9 +61,9 @@ Key frontend components:
|
||||
- **Configuration**: Settings for GitHub and Gitea connections
|
||||
- **Activity Log**: Detailed log of mirroring operations
|
||||
|
||||
### Backend (Node.js)
|
||||
### Backend (Bun)
|
||||
|
||||
The backend is built with Node.js and provides API endpoints for the frontend to interact with. It handles:
|
||||
The backend is built with Bun and provides API endpoints for the frontend to interact with. It handles:
|
||||
|
||||
- Authentication and user management
|
||||
- GitHub API integration
|
||||
@@ -70,14 +71,15 @@ The backend is built with Node.js and provides API endpoints for the frontend to
|
||||
- Mirroring operations
|
||||
- Database interactions
|
||||
|
||||
### Database (SQLite)
|
||||
### Database (SQLite + Drizzle ORM)
|
||||
|
||||
SQLite is used for data persistence, storing:
|
||||
SQLite with Bun's native SQLite driver is used for data persistence, with Drizzle ORM providing type-safe database interactions. The database stores:
|
||||
|
||||
- User accounts and authentication data
|
||||
- GitHub and Gitea configuration
|
||||
- Repository and organization information
|
||||
- Mirroring job history and status
|
||||
- Event notifications and their read status
|
||||
|
||||
## Data Flow
|
||||
|
||||
@@ -93,11 +95,29 @@ SQLite is used for data persistence, storing:
|
||||
gitea-mirror/
|
||||
├── src/ # Source code
|
||||
│ ├── components/ # React components
|
||||
│ ├── content/ # Documentation and content
|
||||
│ ├── layouts/ # Astro layout components
|
||||
│ ├── lib/ # Utility functions and database
|
||||
│ ├── pages/ # Astro pages and API routes
|
||||
│ └── styles/ # CSS and Tailwind styles
|
||||
├── public/ # Static assets
|
||||
├── data/ # Database and persistent data
|
||||
└── docker/ # Docker configuration
|
||||
├── docker/ # Docker configuration
|
||||
└── scripts/ # Utility scripts for deployment and maintenance
|
||||
├── gitea-mirror-lxc-local.sh # Local LXC deployment script
|
||||
└── manage-db.ts # Database management tool
|
||||
```
|
||||
|
||||
## Deployment Options
|
||||
|
||||
Gitea Mirror supports multiple deployment options:
|
||||
|
||||
1. **Docker**: Run as a containerized application using Docker and docker-compose
|
||||
2. **LXC Containers**: Deploy in Linux Containers (LXC) on Proxmox VE (using community script by [Tobias/CrazyWolf13](https://github.com/CrazyWolf13)) or local workstations
|
||||
3. **Native**: Run directly on the host system using Bun runtime
|
||||
|
||||
Each deployment method has its own advantages:
|
||||
|
||||
- **Docker**: Isolation, easy updates, consistent environment
|
||||
- **LXC**: Lightweight virtualization, better performance than Docker, system-level isolation
|
||||
- **Native**: Best performance, direct access to system resources
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "Configuration"
|
||||
description: "Guide to configuring Gitea Mirror for your environment."
|
||||
order: 2
|
||||
updatedDate: 2023-10-15
|
||||
updatedDate: 2025-05-22
|
||||
---
|
||||
|
||||
<div class="mb-6">
|
||||
@@ -23,15 +23,17 @@ The following environment variables can be used to configure Gitea Mirror:
|
||||
|
||||
| Variable | Description | Default Value | Example |
|
||||
|----------|-------------|---------------|---------|
|
||||
| `NODE_ENV` | Node environment (development, production, test) | `development` | `production` |
|
||||
| `DATABASE_URL` | SQLite database URL | `sqlite://data/gitea-mirror.db` | `sqlite://path/to/your/database.db` |
|
||||
| `JWT_SECRET` | Secret key for JWT authentication | `your-secret-key-change-this-in-production` | `your-secure-random-string` |
|
||||
| `NODE_ENV` | Runtime environment (development, production, test) | `development` | `production` |
|
||||
| `DATABASE_URL` | SQLite database URL | `file:data/gitea-mirror.db` | `file:path/to/your/database.db` |
|
||||
| `JWT_SECRET` | Secret key for JWT authentication | Auto-generated secure random string | `your-secure-random-string` |
|
||||
| `HOST` | Server host | `localhost` | `0.0.0.0` |
|
||||
| `PORT` | Server port | `3000` | `8080` |
|
||||
| `PORT` | Server port | `4321` | `8080` |
|
||||
|
||||
### Important Security Note
|
||||
|
||||
In production environments, you should always set a strong, unique `JWT_SECRET` to ensure secure authentication.
|
||||
The application will automatically generate a secure random `JWT_SECRET` on first run if one isn't provided or if the default value is used. This generated secret is stored in the data directory for persistence across container restarts.
|
||||
|
||||
While this auto-generation feature provides good security by default, you can still explicitly set your own `JWT_SECRET` for complete control over your deployment.
|
||||
|
||||
## Web UI Configuration
|
||||
|
||||
@@ -118,3 +120,58 @@ Example patterns:
|
||||
- `*` - All repositories
|
||||
- `org-name/*` - All repositories in a specific organization
|
||||
- `username/repo-name` - A specific repository
|
||||
|
||||
### Database Management
|
||||
|
||||
Gitea Mirror includes several database management tools that can be run from the command line:
|
||||
|
||||
```bash
|
||||
# Initialize the database (only if it doesn't exist)
|
||||
bun run init-db
|
||||
|
||||
# Check database status
|
||||
bun run check-db
|
||||
|
||||
# Fix database location issues
|
||||
bun run fix-db
|
||||
|
||||
# Reset all users (for testing signup flow)
|
||||
bun run reset-users
|
||||
|
||||
# Remove database files completely
|
||||
bun run cleanup-db
|
||||
```
|
||||
|
||||
### Event Management
|
||||
|
||||
Events in Gitea Mirror (such as repository mirroring operations) are stored in the SQLite database. You can manage these events using the following scripts:
|
||||
|
||||
```bash
|
||||
# View all events in the database
|
||||
bun scripts/check-events.ts
|
||||
|
||||
# Clean up old events (default: older than 7 days)
|
||||
bun scripts/cleanup-events.ts
|
||||
|
||||
# Mark all events as read
|
||||
bun scripts/mark-events-read.ts
|
||||
```
|
||||
|
||||
### Health Check Endpoint
|
||||
|
||||
Gitea Mirror includes a built-in health check endpoint at `/api/health` that provides:
|
||||
|
||||
- System status and uptime
|
||||
- Database connectivity check
|
||||
- Memory usage statistics
|
||||
- Environment information
|
||||
|
||||
You can use this endpoint for monitoring your deployment:
|
||||
|
||||
```bash
|
||||
# Basic check (returns 200 OK if healthy)
|
||||
curl -I http://your-server:port/api/health
|
||||
|
||||
# Detailed health information (JSON)
|
||||
curl http://your-server:port/api/health
|
||||
```
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
title: "Quick Start Guide"
|
||||
description: "Get started with Gitea Mirror quickly."
|
||||
order: 3
|
||||
updatedDate: 2023-10-15
|
||||
updatedDate: 2025-05-22
|
||||
---
|
||||
|
||||
<div class="mb-6">
|
||||
@@ -16,13 +16,16 @@ Before you begin, make sure you have:
|
||||
|
||||
1. <span class="font-semibold text-foreground">A GitHub account with a personal access token</span>
|
||||
2. <span class="font-semibold text-foreground">A Gitea instance with an access token</span>
|
||||
3. <span class="font-semibold text-foreground">Docker and docker-compose (recommended) or Node.js 18+ installed</span>
|
||||
3. <span class="font-semibold text-foreground">One of the following:</span>
|
||||
- Docker and docker-compose (for Docker deployment)
|
||||
- Bun 1.2.9+ (for native deployment)
|
||||
- Proxmox VE or LXD (for LXC container deployment)
|
||||
|
||||
## Installation Options
|
||||
|
||||
Choose the installation method that works best for your environment.
|
||||
|
||||
### Using Docker (Recommended)
|
||||
### Using Docker (Recommended for most users)
|
||||
|
||||
Docker provides the easiest way to get started with minimal configuration.
|
||||
|
||||
@@ -39,7 +42,7 @@ Docker provides the easiest way to get started with minimal configuration.
|
||||
|
||||
3. Access the application at [http://localhost:4321](http://localhost:4321)
|
||||
|
||||
### Manual Installation
|
||||
### Using Bun (Native Installation)
|
||||
|
||||
If you prefer to run the application directly on your system:
|
||||
|
||||
@@ -51,7 +54,7 @@ If you prefer to run the application directly on your system:
|
||||
|
||||
2. Run the quick setup script:
|
||||
```bash
|
||||
pnpm setup
|
||||
bun run setup
|
||||
```
|
||||
This installs dependencies and initializes the database.
|
||||
|
||||
@@ -59,17 +62,60 @@ If you prefer to run the application directly on your system:
|
||||
|
||||
**Development Mode:**
|
||||
```bash
|
||||
pnpm dev
|
||||
bun run dev
|
||||
```
|
||||
|
||||
Note: For Bun-specific features, use:
|
||||
```bash
|
||||
bunx --bun astro dev
|
||||
```
|
||||
|
||||
**Production Mode:**
|
||||
```bash
|
||||
pnpm build
|
||||
pnpm start
|
||||
bun run build
|
||||
bun run start
|
||||
```
|
||||
|
||||
4. Access the application at [http://localhost:4321](http://localhost:4321)
|
||||
|
||||
### Using LXC Containers (Recommended for server deployments)
|
||||
|
||||
#### Proxmox VE (Online Installation)
|
||||
|
||||
For deploying on a Proxmox VE host with internet access:
|
||||
|
||||
```bash
|
||||
# Optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh)"
|
||||
```
|
||||
|
||||
This script:
|
||||
- Creates a privileged LXC container
|
||||
- Installs Bun and dependencies
|
||||
- Clones and builds the application
|
||||
- Sets up a systemd service
|
||||
|
||||
#### Local LXD (Offline-friendly Installation)
|
||||
|
||||
For testing on a local workstation or in environments without internet access:
|
||||
|
||||
1. Clone the repository locally:
|
||||
```bash
|
||||
git clone https://github.com/arunavo4/gitea-mirror.git
|
||||
```
|
||||
|
||||
2. Download the Bun installer once:
|
||||
```bash
|
||||
curl -L -o /tmp/bun-linux-x64.zip https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip
|
||||
```
|
||||
|
||||
3. Run the local LXC installer:
|
||||
```bash
|
||||
sudo LOCAL_REPO_DIR=~/path/to/gitea-mirror ./gitea-mirror/scripts/gitea-mirror-lxc-local.sh
|
||||
```
|
||||
|
||||
For more details on LXC deployment, see the [LXC Container Deployment Guide](https://github.com/arunavo4/gitea-mirror/blob/main/scripts/README-lxc.md).
|
||||
|
||||
## Initial Configuration
|
||||
|
||||
Follow these steps to configure Gitea Mirror for first use:
|
||||
@@ -116,7 +162,12 @@ If you encounter any issues:
|
||||
- Check the Activity Log for detailed error messages
|
||||
- Verify your GitHub and Gitea tokens have the correct permissions
|
||||
- Ensure your Gitea instance is accessible from the machine running Gitea Mirror
|
||||
- For Docker installations, check container logs with `docker logs gitea-mirror`
|
||||
- Check logs based on your deployment method:
|
||||
- Docker: `docker logs gitea-mirror`
|
||||
- Native: Check the terminal output or system logs
|
||||
- LXC: `systemctl status gitea-mirror` or `journalctl -u gitea-mirror -f`
|
||||
- Use the health check endpoint to verify system status: `curl http://your-server:4321/api/health`
|
||||
- For database issues, try the database management tools: `bun run check-db` or `bun run fix-db`
|
||||
|
||||
## Next Steps
|
||||
|
||||
@@ -125,3 +176,7 @@ After your initial setup:
|
||||
- Explore the dashboard for an overview of your mirroring status
|
||||
- Set up automatic mirroring schedules for hands-off operation
|
||||
- Configure organization mirroring for team repositories
|
||||
- Check out the [Configuration Guide](/configuration) for advanced settings
|
||||
- Review the [Architecture Documentation](/architecture) to understand the system
|
||||
- For server deployments, set up monitoring using the health check endpoint
|
||||
- Consider setting up a cron job to clean up old events: `bun scripts/cleanup-events.ts`
|
||||
|
||||
@@ -1,34 +1,61 @@
|
||||
import { useEffect, useState, useRef } from "react";
|
||||
import { useEffect, useState, useRef, useCallback } from "react";
|
||||
import type { MirrorJob } from "@/lib/db/schema";
|
||||
|
||||
interface UseSSEOptions {
|
||||
userId?: string;
|
||||
onMessage: (data: MirrorJob) => void;
|
||||
maxReconnectAttempts?: number;
|
||||
reconnectDelay?: number;
|
||||
}
|
||||
|
||||
export const useSSE = ({ userId, onMessage }: UseSSEOptions) => {
|
||||
export const useSSE = ({
|
||||
userId,
|
||||
onMessage,
|
||||
maxReconnectAttempts = 5,
|
||||
reconnectDelay = 3000
|
||||
}: UseSSEOptions) => {
|
||||
const [connected, setConnected] = useState<boolean>(false);
|
||||
const [reconnectCount, setReconnectCount] = useState<number>(0);
|
||||
const onMessageRef = useRef(onMessage);
|
||||
const eventSourceRef = useRef<EventSource | null>(null);
|
||||
const reconnectTimeoutRef = useRef<number | null>(null);
|
||||
|
||||
// Update the ref when onMessage changes
|
||||
useEffect(() => {
|
||||
onMessageRef.current = onMessage;
|
||||
}, [onMessage]);
|
||||
|
||||
useEffect(() => {
|
||||
// Create a stable connect function that can be called for reconnection
|
||||
const connect = useCallback(() => {
|
||||
if (!userId) return;
|
||||
|
||||
// Clean up any existing connection
|
||||
if (eventSourceRef.current) {
|
||||
eventSourceRef.current.close();
|
||||
}
|
||||
|
||||
// Clear any pending reconnect timeout
|
||||
if (reconnectTimeoutRef.current) {
|
||||
window.clearTimeout(reconnectTimeoutRef.current);
|
||||
reconnectTimeoutRef.current = null;
|
||||
}
|
||||
|
||||
// Create new EventSource connection
|
||||
const eventSource = new EventSource(`/api/sse?userId=${userId}`);
|
||||
eventSourceRef.current = eventSource;
|
||||
|
||||
const handleMessage = (event: MessageEvent) => {
|
||||
try {
|
||||
// Check if this is an error message from our server
|
||||
if (event.data.startsWith('{"error":')) {
|
||||
console.warn("SSE server error:", event.data);
|
||||
return;
|
||||
}
|
||||
|
||||
const parsedMessage: MirrorJob = JSON.parse(event.data);
|
||||
|
||||
// console.log("Received new log:", parsedMessage);
|
||||
|
||||
onMessageRef.current(parsedMessage); // Use ref instead of prop directly
|
||||
onMessageRef.current(parsedMessage);
|
||||
} catch (error) {
|
||||
console.error("Error parsing message:", error);
|
||||
console.error("Error parsing SSE message:", error);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -36,19 +63,50 @@ export const useSSE = ({ userId, onMessage }: UseSSEOptions) => {
|
||||
|
||||
eventSource.onopen = () => {
|
||||
setConnected(true);
|
||||
setReconnectCount(0); // Reset reconnect counter on successful connection
|
||||
console.log(`Connected to SSE for user: ${userId}`);
|
||||
};
|
||||
|
||||
eventSource.onerror = () => {
|
||||
console.error("SSE connection error");
|
||||
eventSource.onerror = (error) => {
|
||||
console.error("SSE connection error:", error);
|
||||
setConnected(false);
|
||||
eventSource.close();
|
||||
};
|
||||
eventSourceRef.current = null;
|
||||
|
||||
return () => {
|
||||
eventSource.close();
|
||||
// Attempt to reconnect if we haven't exceeded max attempts
|
||||
if (reconnectCount < maxReconnectAttempts) {
|
||||
const nextReconnectDelay = Math.min(reconnectDelay * Math.pow(1.5, reconnectCount), 30000);
|
||||
console.log(`Attempting to reconnect in ${nextReconnectDelay}ms (attempt ${reconnectCount + 1}/${maxReconnectAttempts})`);
|
||||
|
||||
reconnectTimeoutRef.current = window.setTimeout(() => {
|
||||
setReconnectCount(prev => prev + 1);
|
||||
connect();
|
||||
}, nextReconnectDelay);
|
||||
} else {
|
||||
console.error(`Failed to reconnect after ${maxReconnectAttempts} attempts`);
|
||||
}
|
||||
};
|
||||
}, [userId]); // Only depends on userId now
|
||||
}, [userId, maxReconnectAttempts, reconnectDelay, reconnectCount]);
|
||||
|
||||
// Set up the connection
|
||||
useEffect(() => {
|
||||
if (!userId) return;
|
||||
|
||||
connect();
|
||||
|
||||
// Cleanup function
|
||||
return () => {
|
||||
if (eventSourceRef.current) {
|
||||
eventSourceRef.current.close();
|
||||
eventSourceRef.current = null;
|
||||
}
|
||||
|
||||
if (reconnectTimeoutRef.current) {
|
||||
window.clearTimeout(reconnectTimeoutRef.current);
|
||||
reconnectTimeoutRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [userId, connect]);
|
||||
|
||||
return { connected };
|
||||
};
|
||||
|
||||
@@ -88,3 +88,84 @@ export const giteaApi = {
|
||||
body: JSON.stringify({ url, token }),
|
||||
}),
|
||||
};
|
||||
|
||||
// Health API
|
||||
export interface HealthResponse {
|
||||
status: "ok" | "error";
|
||||
timestamp: string;
|
||||
version: string;
|
||||
latestVersion: string;
|
||||
updateAvailable: boolean;
|
||||
database: {
|
||||
connected: boolean;
|
||||
message: string;
|
||||
};
|
||||
system: {
|
||||
uptime: {
|
||||
startTime: string;
|
||||
uptimeMs: number;
|
||||
formatted: string;
|
||||
};
|
||||
memory: {
|
||||
rss: string;
|
||||
heapTotal: string;
|
||||
heapUsed: string;
|
||||
external: string;
|
||||
systemTotal: string;
|
||||
systemFree: string;
|
||||
};
|
||||
os: {
|
||||
platform: string;
|
||||
version: string;
|
||||
arch: string;
|
||||
};
|
||||
env: string;
|
||||
};
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export const healthApi = {
|
||||
check: async (): Promise<HealthResponse> => {
|
||||
try {
|
||||
const response = await fetch(`${API_BASE}/health`);
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({
|
||||
status: "error",
|
||||
error: "Failed to parse error response",
|
||||
}));
|
||||
|
||||
return {
|
||||
...errorData,
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
} as HealthResponse;
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
} catch (error) {
|
||||
return {
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : "Unknown error checking health",
|
||||
version: "unknown",
|
||||
latestVersion: "unknown",
|
||||
updateAvailable: false,
|
||||
database: { connected: false, message: "Failed to connect to API" },
|
||||
system: {
|
||||
uptime: { startTime: "", uptimeMs: 0, formatted: "N/A" },
|
||||
memory: {
|
||||
rss: "N/A",
|
||||
heapTotal: "N/A",
|
||||
heapUsed: "N/A",
|
||||
external: "N/A",
|
||||
systemTotal: "N/A",
|
||||
systemFree: "N/A",
|
||||
},
|
||||
os: { platform: "", version: "", arch: "" },
|
||||
env: "",
|
||||
},
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
// Environment variables
|
||||
export const ENV = {
|
||||
// Node environment (development, production, test)
|
||||
// Runtime environment (development, production, test)
|
||||
NODE_ENV: process.env.NODE_ENV || "development",
|
||||
|
||||
// Database URL - use SQLite by default
|
||||
|
||||
42
src/lib/db/index.test.ts
Normal file
42
src/lib/db/index.test.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { describe, test, expect, mock, beforeAll, afterAll } from "bun:test";
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
|
||||
// Silence console logs during tests
|
||||
let originalConsoleLog: typeof console.log;
|
||||
|
||||
beforeAll(() => {
|
||||
// Save original console.log
|
||||
originalConsoleLog = console.log;
|
||||
// Replace with no-op function
|
||||
console.log = () => {};
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original console.log
|
||||
console.log = originalConsoleLog;
|
||||
});
|
||||
|
||||
// Mock the database module
|
||||
mock.module("bun:sqlite", () => {
|
||||
return {
|
||||
Database: mock(function() {
|
||||
return {
|
||||
query: mock(() => ({
|
||||
all: mock(() => []),
|
||||
run: mock(() => ({}))
|
||||
}))
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the database tables
|
||||
describe("Database Schema", () => {
|
||||
test("database connection can be created", async () => {
|
||||
// Import the db from the module
|
||||
const { db } = await import("./index");
|
||||
|
||||
// Check that db is defined
|
||||
expect(db).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -1,21 +1,56 @@
|
||||
import { z } from "zod";
|
||||
import { createClient } from "@libsql/client";
|
||||
import { drizzle } from "drizzle-orm/libsql";
|
||||
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
|
||||
|
||||
import { Database } from "bun:sqlite";
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { configSchema } from "./schema";
|
||||
|
||||
// Define the database URL - for development we'll use a local SQLite file
|
||||
const dataDir = path.join(process.cwd(), "data");
|
||||
const dbUrl =
|
||||
process.env.DATABASE_URL || `file:${path.join(dataDir, "gitea-mirror.db")}`;
|
||||
// Ensure data directory exists
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
fs.mkdirSync(dataDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Create a client connection to the database
|
||||
export const client = createClient({ url: dbUrl });
|
||||
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||
|
||||
// Create a drizzle instance
|
||||
export const db = drizzle(client);
|
||||
// Create an empty database file if it doesn't exist
|
||||
if (!fs.existsSync(dbPath)) {
|
||||
fs.writeFileSync(dbPath, "");
|
||||
}
|
||||
|
||||
// Create SQLite database instance using Bun's native driver
|
||||
let sqlite: Database;
|
||||
try {
|
||||
sqlite = new Database(dbPath);
|
||||
console.log("Successfully connected to SQLite database using Bun's native driver");
|
||||
} catch (error) {
|
||||
console.error("Error opening database:", error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Create drizzle instance with the SQLite client
|
||||
export const db = drizzle({ client: sqlite });
|
||||
|
||||
// Simple async wrapper around SQLite API for compatibility
|
||||
// This maintains backward compatibility with existing code
|
||||
export const client = {
|
||||
async execute(sql: string, params?: any[]) {
|
||||
try {
|
||||
const stmt = sqlite.query(sql);
|
||||
if (/^\s*select/i.test(sql)) {
|
||||
const rows = stmt.all(params ?? []);
|
||||
return { rows } as { rows: any[] };
|
||||
}
|
||||
stmt.run(params ?? []);
|
||||
return { rows: [] } as { rows: any[] };
|
||||
} catch (error) {
|
||||
console.error(`Error executing SQL: ${sql}`, error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Define the tables
|
||||
export const users = sqliteTable("users", {
|
||||
@@ -31,6 +66,18 @@ export const users = sqliteTable("users", {
|
||||
.default(new Date()),
|
||||
});
|
||||
|
||||
// New table for event notifications (replacing Redis pub/sub)
|
||||
export const events = sqliteTable("events", {
|
||||
id: text("id").primaryKey(),
|
||||
userId: text("user_id").notNull().references(() => users.id),
|
||||
channel: text("channel").notNull(),
|
||||
payload: text("payload", { mode: "json" }).notNull(),
|
||||
read: integer("read", { mode: "boolean" }).notNull().default(false),
|
||||
createdAt: integer("created_at", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(new Date()),
|
||||
});
|
||||
|
||||
const githubSchema = configSchema.shape.githubConfig;
|
||||
const giteaSchema = configSchema.shape.giteaConfig;
|
||||
const scheduleSchema = configSchema.shape.scheduleConfig;
|
||||
@@ -142,6 +189,18 @@ export const mirrorJobs = sqliteTable("mirror_jobs", {
|
||||
timestamp: integer("timestamp", { mode: "timestamp" })
|
||||
.notNull()
|
||||
.default(new Date()),
|
||||
|
||||
// New fields for job resilience
|
||||
jobType: text("job_type").notNull().default("mirror"),
|
||||
batchId: text("batch_id"),
|
||||
totalItems: integer("total_items"),
|
||||
completedItems: integer("completed_items").default(0),
|
||||
itemIds: text("item_ids", { mode: "json" }).$type<string[]>(),
|
||||
completedItemIds: text("completed_item_ids", { mode: "json" }).$type<string[]>().default([]),
|
||||
inProgress: integer("in_progress", { mode: "boolean" }).notNull().default(false),
|
||||
startedAt: integer("started_at", { mode: "timestamp" }),
|
||||
completedAt: integer("completed_at", { mode: "timestamp" }),
|
||||
lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
|
||||
});
|
||||
|
||||
export const organizations = sqliteTable("organizations", {
|
||||
|
||||
@@ -111,6 +111,18 @@ export const mirrorJobSchema = z.object({
|
||||
status: repoStatusEnum.default("imported"),
|
||||
message: z.string(),
|
||||
timestamp: z.date().default(() => new Date()),
|
||||
|
||||
// New fields for job resilience
|
||||
jobType: z.enum(["mirror", "sync", "retry"]).default("mirror"),
|
||||
batchId: z.string().uuid().optional(), // Group related jobs together
|
||||
totalItems: z.number().optional(), // Total number of items to process
|
||||
completedItems: z.number().optional(), // Number of items completed
|
||||
itemIds: z.array(z.string()).optional(), // IDs of items to process
|
||||
completedItemIds: z.array(z.string()).optional(), // IDs of completed items
|
||||
inProgress: z.boolean().default(false), // Whether the job is currently running
|
||||
startedAt: z.date().optional(), // When the job started
|
||||
completedAt: z.date().optional(), // When the job completed
|
||||
lastCheckpoint: z.date().optional(), // Last time progress was saved
|
||||
});
|
||||
|
||||
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
|
||||
@@ -140,3 +152,15 @@ export const organizationSchema = z.object({
|
||||
});
|
||||
|
||||
export type Organization = z.infer<typeof organizationSchema>;
|
||||
|
||||
// Event schema (for SQLite-based pub/sub)
|
||||
export const eventSchema = z.object({
|
||||
id: z.string().uuid().optional(),
|
||||
userId: z.string().uuid(),
|
||||
channel: z.string().min(1),
|
||||
payload: z.any(),
|
||||
read: z.boolean().default(false),
|
||||
createdAt: z.date().default(() => new Date()),
|
||||
});
|
||||
|
||||
export type Event = z.infer<typeof eventSchema>;
|
||||
|
||||
161
src/lib/events.ts
Normal file
161
src/lib/events.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { db, events } from "./db";
|
||||
import { eq, and, gt, lt } from "drizzle-orm";
|
||||
|
||||
/**
|
||||
* Publishes an event to a specific channel for a user
|
||||
* This replaces Redis pub/sub with SQLite storage
|
||||
*/
|
||||
export async function publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload,
|
||||
}: {
|
||||
userId: string;
|
||||
channel: string;
|
||||
payload: any;
|
||||
}): Promise<string> {
|
||||
try {
|
||||
const eventId = uuidv4();
|
||||
console.log(`Publishing event to channel ${channel} for user ${userId}`);
|
||||
|
||||
// Insert the event into the SQLite database
|
||||
await db.insert(events).values({
|
||||
id: eventId,
|
||||
userId,
|
||||
channel,
|
||||
payload: JSON.stringify(payload),
|
||||
createdAt: new Date(),
|
||||
});
|
||||
|
||||
console.log(`Event published successfully with ID ${eventId}`);
|
||||
return eventId;
|
||||
} catch (error) {
|
||||
console.error("Error publishing event:", error);
|
||||
throw new Error("Failed to publish event");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets new events for a specific user and channel
|
||||
* This replaces Redis subscribe with SQLite polling
|
||||
*/
|
||||
export async function getNewEvents({
|
||||
userId,
|
||||
channel,
|
||||
lastEventTime,
|
||||
}: {
|
||||
userId: string;
|
||||
channel: string;
|
||||
lastEventTime?: Date;
|
||||
}): Promise<any[]> {
|
||||
try {
|
||||
console.log(`Getting new events for user ${userId} in channel ${channel}`);
|
||||
if (lastEventTime) {
|
||||
console.log(`Looking for events after ${lastEventTime.toISOString()}`);
|
||||
}
|
||||
|
||||
// Build the query
|
||||
let query = db
|
||||
.select()
|
||||
.from(events)
|
||||
.where(
|
||||
and(
|
||||
eq(events.userId, userId),
|
||||
eq(events.channel, channel),
|
||||
eq(events.read, false)
|
||||
)
|
||||
)
|
||||
.orderBy(events.createdAt);
|
||||
|
||||
// Add time filter if provided
|
||||
if (lastEventTime) {
|
||||
query = query.where(gt(events.createdAt, lastEventTime));
|
||||
}
|
||||
|
||||
// Execute the query
|
||||
const newEvents = await query;
|
||||
console.log(`Found ${newEvents.length} new events`);
|
||||
|
||||
// Mark events as read
|
||||
if (newEvents.length > 0) {
|
||||
console.log(`Marking ${newEvents.length} events as read`);
|
||||
await db
|
||||
.update(events)
|
||||
.set({ read: true })
|
||||
.where(
|
||||
and(
|
||||
eq(events.userId, userId),
|
||||
eq(events.channel, channel),
|
||||
eq(events.read, false)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Parse the payloads
|
||||
return newEvents.map(event => ({
|
||||
...event,
|
||||
payload: JSON.parse(event.payload as string),
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error("Error getting new events:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up old events to prevent the database from growing too large
|
||||
* Should be called periodically (e.g., daily via a cron job)
|
||||
*
|
||||
* @param maxAgeInDays Number of days to keep events (default: 7)
|
||||
* @param cleanupUnreadAfterDays Number of days after which to clean up unread events (default: 2x maxAgeInDays)
|
||||
* @returns Object containing the number of read and unread events deleted
|
||||
*/
|
||||
export async function cleanupOldEvents(
|
||||
maxAgeInDays: number = 7,
|
||||
cleanupUnreadAfterDays?: number
|
||||
): Promise<{ readEventsDeleted: number; unreadEventsDeleted: number }> {
|
||||
try {
|
||||
console.log(`Cleaning up events older than ${maxAgeInDays} days...`);
|
||||
|
||||
// Calculate the cutoff date for read events
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - maxAgeInDays);
|
||||
|
||||
// Delete read events older than the cutoff date
|
||||
const readResult = await db
|
||||
.delete(events)
|
||||
.where(
|
||||
and(
|
||||
eq(events.read, true),
|
||||
lt(events.createdAt, cutoffDate)
|
||||
)
|
||||
);
|
||||
|
||||
const readEventsDeleted = readResult.changes || 0;
|
||||
console.log(`Deleted ${readEventsDeleted} read events`);
|
||||
|
||||
// Calculate the cutoff date for unread events (default to 2x the retention period)
|
||||
const unreadCutoffDate = new Date();
|
||||
const unreadMaxAge = cleanupUnreadAfterDays || (maxAgeInDays * 2);
|
||||
unreadCutoffDate.setDate(unreadCutoffDate.getDate() - unreadMaxAge);
|
||||
|
||||
// Delete unread events that are significantly older
|
||||
const unreadResult = await db
|
||||
.delete(events)
|
||||
.where(
|
||||
and(
|
||||
eq(events.read, false),
|
||||
lt(events.createdAt, unreadCutoffDate)
|
||||
)
|
||||
);
|
||||
|
||||
const unreadEventsDeleted = unreadResult.changes || 0;
|
||||
console.log(`Deleted ${unreadEventsDeleted} unread events`);
|
||||
|
||||
return { readEventsDeleted, unreadEventsDeleted };
|
||||
} catch (error) {
|
||||
console.error("Error cleaning up old events:", error);
|
||||
return { readEventsDeleted: 0, unreadEventsDeleted: 0 };
|
||||
}
|
||||
}
|
||||
120
src/lib/gitea.test.ts
Normal file
120
src/lib/gitea.test.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
|
||||
// Mock the isRepoPresentInGitea function
|
||||
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
|
||||
|
||||
// Mock the database module
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
update: () => ({
|
||||
set: () => ({
|
||||
where: () => Promise.resolve()
|
||||
})
|
||||
})
|
||||
},
|
||||
repositories: {},
|
||||
organizations: {}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the helpers module
|
||||
mock.module("@/lib/helpers", () => {
|
||||
return {
|
||||
createMirrorJob: mock(() => Promise.resolve("job-id"))
|
||||
};
|
||||
});
|
||||
|
||||
// Mock superagent
|
||||
mock.module("superagent", () => {
|
||||
const mockPost = mock(() => ({
|
||||
set: () => ({
|
||||
set: () => ({
|
||||
send: () => Promise.resolve({ body: { id: 123 } })
|
||||
})
|
||||
})
|
||||
}));
|
||||
|
||||
const mockGet = mock(() => ({
|
||||
set: () => Promise.resolve({ body: [] })
|
||||
}));
|
||||
|
||||
return {
|
||||
post: mockPost,
|
||||
get: mockGet
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the gitea module itself
|
||||
mock.module("./gitea", () => {
|
||||
return {
|
||||
isRepoPresentInGitea: mockIsRepoPresentInGitea,
|
||||
mirrorGithubRepoToGitea: mock(async () => {}),
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
|
||||
};
|
||||
});
|
||||
|
||||
describe("Gitea Repository Mirroring", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("mirrorGithubRepoToGitea handles private repositories correctly", async () => {
|
||||
// Import the mocked function
|
||||
const { mirrorGithubRepoToGitea } = await import("./gitea");
|
||||
|
||||
// Create mock Octokit instance
|
||||
const octokit = {} as Octokit;
|
||||
|
||||
// Create mock repository (private)
|
||||
const repository = {
|
||||
id: "repo-id",
|
||||
name: "test-repo",
|
||||
fullName: "testuser/test-repo",
|
||||
url: "https://github.com/testuser/test-repo",
|
||||
cloneUrl: "https://github.com/testuser/test-repo.git",
|
||||
owner: "testuser",
|
||||
isPrivate: true,
|
||||
status: repoStatusEnum.parse("imported")
|
||||
};
|
||||
|
||||
// Create mock config
|
||||
const config = {
|
||||
id: "config-id",
|
||||
userId: "user-id",
|
||||
githubConfig: {
|
||||
token: "github-token",
|
||||
mirrorIssues: false
|
||||
},
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "gitea-token",
|
||||
username: "giteauser"
|
||||
}
|
||||
};
|
||||
|
||||
// Call the function
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: repository as any,
|
||||
config
|
||||
});
|
||||
|
||||
// Check that the function was called
|
||||
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
229
src/lib/gitea.ts
229
src/lib/gitea.ts
@@ -601,11 +601,22 @@ export async function mirrorGitHubOrgToGitea({
|
||||
.from(repositories)
|
||||
.where(eq(repositories.organization, organization.name));
|
||||
|
||||
for (const repo of orgRepos) {
|
||||
await mirrorGitHubRepoToGiteaOrg({
|
||||
octokit,
|
||||
config,
|
||||
repository: {
|
||||
if (orgRepos.length === 0) {
|
||||
console.log(`No repositories found for organization ${organization.name}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
||||
|
||||
// Import the processWithRetry function
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
// Process repositories in parallel with concurrency control
|
||||
await processWithRetry(
|
||||
orgRepos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repo.status as RepoStatus,
|
||||
visibility: repo.visibility as RepositoryVisibility,
|
||||
@@ -614,11 +625,37 @@ export async function mirrorGitHubOrgToGitea({
|
||||
organization: repo.organization ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for repository: ${repo.name} in organization ${organization.name}`);
|
||||
|
||||
// Mirror the repository
|
||||
await mirrorGitHubRepoToGiteaOrg({
|
||||
octokit,
|
||||
config,
|
||||
repository: repoData,
|
||||
giteaOrgId,
|
||||
orgName: organization.name,
|
||||
});
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 3, // Process 3 repositories at a time
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
if (result) {
|
||||
console.log(`Mirrored repository "${result.name}" in organization ${organization.name} (${completed}/${total}, ${percentComplete}%)`);
|
||||
}
|
||||
},
|
||||
giteaOrgId,
|
||||
orgName: organization.name,
|
||||
});
|
||||
}
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} in organization ${organization.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Organization ${organization.name} mirrored successfully`);
|
||||
|
||||
@@ -837,7 +874,15 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
(res) => res.data
|
||||
);
|
||||
|
||||
console.log(`Mirroring ${issues.length} issues from ${repository.fullName}`);
|
||||
// Filter out pull requests
|
||||
const filteredIssues = issues.filter(issue => !(issue as any).pull_request);
|
||||
|
||||
console.log(`Mirroring ${filteredIssues.length} issues from ${repository.fullName}`);
|
||||
|
||||
if (filteredIssues.length === 0) {
|
||||
console.log(`No issues to mirror for ${repository.fullName}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing labels from Gitea
|
||||
const giteaLabelsRes = await superagent
|
||||
@@ -851,58 +896,60 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
giteaLabels.map((label: any) => [label.name, label.id])
|
||||
);
|
||||
|
||||
for (const issue of issues) {
|
||||
if ((issue as any).pull_request) {
|
||||
continue;
|
||||
}
|
||||
// Import the processWithRetry function
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
const githubLabelNames =
|
||||
issue.labels
|
||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||
.filter((l): l is string => !!l) || [];
|
||||
// Process issues in parallel with concurrency control
|
||||
await processWithRetry(
|
||||
filteredIssues,
|
||||
async (issue) => {
|
||||
const githubLabelNames =
|
||||
issue.labels
|
||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||
.filter((l): l is string => !!l) || [];
|
||||
|
||||
const giteaLabelIds: number[] = [];
|
||||
const giteaLabelIds: number[] = [];
|
||||
|
||||
// Resolve or create labels in Gitea
|
||||
for (const name of githubLabelNames) {
|
||||
if (labelMap.has(name)) {
|
||||
giteaLabelIds.push(labelMap.get(name)!);
|
||||
} else {
|
||||
try {
|
||||
const created = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({ name, color: "#ededed" }); // Default color
|
||||
// Resolve or create labels in Gitea
|
||||
for (const name of githubLabelNames) {
|
||||
if (labelMap.has(name)) {
|
||||
giteaLabelIds.push(labelMap.get(name)!);
|
||||
} else {
|
||||
try {
|
||||
const created = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({ name, color: "#ededed" }); // Default color
|
||||
|
||||
labelMap.set(name, created.body.id);
|
||||
giteaLabelIds.push(created.body.id);
|
||||
} catch (labelErr) {
|
||||
console.error(
|
||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||
);
|
||||
labelMap.set(name, created.body.id);
|
||||
giteaLabelIds.push(created.body.id);
|
||||
} catch (labelErr) {
|
||||
console.error(
|
||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const originalAssignees =
|
||||
issue.assignees && issue.assignees.length > 0
|
||||
? `\n\nOriginally assigned to: ${issue.assignees
|
||||
.map((a) => `@${a.login}`)
|
||||
.join(", ")} on GitHub.`
|
||||
: "";
|
||||
const originalAssignees =
|
||||
issue.assignees && issue.assignees.length > 0
|
||||
? `\n\nOriginally assigned to: ${issue.assignees
|
||||
.map((a) => `@${a.login}`)
|
||||
.join(", ")} on GitHub.`
|
||||
: "";
|
||||
|
||||
const issuePayload: any = {
|
||||
title: issue.title,
|
||||
body: `Originally created by @${
|
||||
issue.user?.login
|
||||
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
||||
closed: issue.state === "closed",
|
||||
labels: giteaLabelIds,
|
||||
};
|
||||
const issuePayload: any = {
|
||||
title: issue.title,
|
||||
body: `Originally created by @${
|
||||
issue.user?.login
|
||||
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
||||
closed: issue.state === "closed",
|
||||
labels: giteaLabelIds,
|
||||
};
|
||||
|
||||
try {
|
||||
// Create the issue in Gitea
|
||||
const createdIssue = await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
||||
@@ -922,41 +969,49 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
(res) => res.data
|
||||
);
|
||||
|
||||
for (const comment of comments) {
|
||||
try {
|
||||
await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({
|
||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||
});
|
||||
} catch (commentErr) {
|
||||
console.error(
|
||||
`Failed to copy comment to Gitea for issue "${issue.title}": ${
|
||||
commentErr instanceof Error
|
||||
? commentErr.message
|
||||
: String(commentErr)
|
||||
}`
|
||||
);
|
||||
}
|
||||
// Process comments in parallel with concurrency control
|
||||
if (comments.length > 0) {
|
||||
await processWithRetry(
|
||||
comments,
|
||||
async (comment) => {
|
||||
await superagent
|
||||
.post(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
||||
)
|
||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||
.send({
|
||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||
});
|
||||
return comment;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 5,
|
||||
maxRetries: 2,
|
||||
retryDelay: 1000,
|
||||
onRetry: (comment, error, attempt) => {
|
||||
console.log(`Retrying comment (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && (err as any).response) {
|
||||
console.error(
|
||||
`Failed to create issue "${issue.title}" in Gitea: ${err.message}`
|
||||
);
|
||||
console.error(
|
||||
`Response body: ${JSON.stringify((err as any).response.body)}`
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
`Failed to create issue "${issue.title}" in Gitea: ${
|
||||
err instanceof Error ? err.message : String(err)
|
||||
}`
|
||||
);
|
||||
|
||||
return issue;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: 3, // Process 3 issues at a time
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
if (result) {
|
||||
console.log(`Mirrored issue "${result.title}" (${completed}/${total}, ${percentComplete}%)`);
|
||||
}
|
||||
},
|
||||
onRetry: (issue, error, attempt) => {
|
||||
console.log(`Retrying issue "${issue.title}" (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log(`Completed mirroring ${filteredIssues.length} issues for ${repository.fullName}`);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { RepoStatus } from "@/types/Repository";
|
||||
import { db, mirrorJobs } from "./db";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { redisPublisher } from "./redis";
|
||||
import { publishEvent } from "./events";
|
||||
|
||||
export async function createMirrorJob({
|
||||
userId,
|
||||
@@ -12,6 +12,11 @@ export async function createMirrorJob({
|
||||
message,
|
||||
status,
|
||||
details,
|
||||
jobType,
|
||||
batchId,
|
||||
totalItems,
|
||||
itemIds,
|
||||
inProgress,
|
||||
}: {
|
||||
userId: string;
|
||||
organizationId?: string;
|
||||
@@ -21,6 +26,11 @@ export async function createMirrorJob({
|
||||
details?: string;
|
||||
message: string;
|
||||
status: RepoStatus;
|
||||
jobType?: "mirror" | "sync" | "retry";
|
||||
batchId?: string;
|
||||
totalItems?: number;
|
||||
itemIds?: string[];
|
||||
inProgress?: boolean;
|
||||
}) {
|
||||
const jobId = uuidv4();
|
||||
const currentTimestamp = new Date();
|
||||
@@ -32,18 +42,35 @@ export async function createMirrorJob({
|
||||
repositoryName,
|
||||
organizationId,
|
||||
organizationName,
|
||||
configId: uuidv4(),
|
||||
details,
|
||||
message: message,
|
||||
status: status,
|
||||
timestamp: currentTimestamp,
|
||||
|
||||
// New resilience fields
|
||||
jobType: jobType || "mirror",
|
||||
batchId: batchId || undefined,
|
||||
totalItems: totalItems || undefined,
|
||||
completedItems: 0,
|
||||
itemIds: itemIds || undefined,
|
||||
completedItemIds: [],
|
||||
inProgress: inProgress !== undefined ? inProgress : false,
|
||||
startedAt: inProgress ? currentTimestamp : undefined,
|
||||
completedAt: undefined,
|
||||
lastCheckpoint: undefined,
|
||||
};
|
||||
|
||||
try {
|
||||
// Insert the job into the database
|
||||
await db.insert(mirrorJobs).values(job);
|
||||
|
||||
// Publish the event using SQLite instead of Redis
|
||||
const channel = `mirror-status:${userId}`;
|
||||
await redisPublisher.publish(channel, JSON.stringify(job));
|
||||
await publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload: job
|
||||
});
|
||||
|
||||
return jobId;
|
||||
} catch (error) {
|
||||
@@ -51,3 +78,186 @@ export async function createMirrorJob({
|
||||
throw new Error("Error creating mirror job");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the progress of a mirror job
|
||||
*/
|
||||
export async function updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
status,
|
||||
message,
|
||||
details,
|
||||
inProgress,
|
||||
isCompleted,
|
||||
}: {
|
||||
jobId: string;
|
||||
completedItemId?: string;
|
||||
status?: RepoStatus;
|
||||
message?: string;
|
||||
details?: string;
|
||||
inProgress?: boolean;
|
||||
isCompleted?: boolean;
|
||||
}) {
|
||||
try {
|
||||
// Get the current job
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(mirrorJobs.id === jobId);
|
||||
|
||||
if (!job) {
|
||||
throw new Error(`Mirror job with ID ${jobId} not found`);
|
||||
}
|
||||
|
||||
// Update the job with new progress
|
||||
const updates: Record<string, any> = {
|
||||
lastCheckpoint: new Date(),
|
||||
};
|
||||
|
||||
// Add completed item if provided
|
||||
if (completedItemId) {
|
||||
const completedItemIds = job.completedItemIds || [];
|
||||
if (!completedItemIds.includes(completedItemId)) {
|
||||
updates.completedItemIds = [...completedItemIds, completedItemId];
|
||||
updates.completedItems = (job.completedItems || 0) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Update status if provided
|
||||
if (status) {
|
||||
updates.status = status;
|
||||
}
|
||||
|
||||
// Update message if provided
|
||||
if (message) {
|
||||
updates.message = message;
|
||||
}
|
||||
|
||||
// Update details if provided
|
||||
if (details) {
|
||||
updates.details = details;
|
||||
}
|
||||
|
||||
// Update in-progress status if provided
|
||||
if (inProgress !== undefined) {
|
||||
updates.inProgress = inProgress;
|
||||
}
|
||||
|
||||
// Mark as completed if specified
|
||||
if (isCompleted) {
|
||||
updates.inProgress = false;
|
||||
updates.completedAt = new Date();
|
||||
}
|
||||
|
||||
// Update the job in the database
|
||||
await db
|
||||
.update(mirrorJobs)
|
||||
.set(updates)
|
||||
.where(mirrorJobs.id === jobId);
|
||||
|
||||
// Publish the event
|
||||
const updatedJob = {
|
||||
...job,
|
||||
...updates,
|
||||
};
|
||||
|
||||
await publishEvent({
|
||||
userId: job.userId,
|
||||
channel: `mirror-status:${job.userId}`,
|
||||
payload: updatedJob,
|
||||
});
|
||||
|
||||
return updatedJob;
|
||||
} catch (error) {
|
||||
console.error("Error updating mirror job progress:", error);
|
||||
throw new Error("Error updating mirror job progress");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds interrupted jobs that need to be resumed
|
||||
*/
|
||||
export async function findInterruptedJobs() {
|
||||
try {
|
||||
// Find jobs that are marked as in-progress but haven't been updated recently
|
||||
const cutoffTime = new Date();
|
||||
cutoffTime.setMinutes(cutoffTime.getMinutes() - 10); // Consider jobs inactive after 10 minutes without updates
|
||||
|
||||
const interruptedJobs = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(
|
||||
mirrorJobs.inProgress === true &&
|
||||
(mirrorJobs.lastCheckpoint === null ||
|
||||
mirrorJobs.lastCheckpoint < cutoffTime)
|
||||
);
|
||||
|
||||
return interruptedJobs;
|
||||
} catch (error) {
|
||||
console.error("Error finding interrupted jobs:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resumes an interrupted job
|
||||
*/
|
||||
export async function resumeInterruptedJob(job: any) {
|
||||
try {
|
||||
console.log(`Resuming interrupted job: ${job.id}`);
|
||||
|
||||
// Skip if job doesn't have the necessary data to resume
|
||||
if (!job.itemIds || !job.completedItemIds) {
|
||||
console.log(`Cannot resume job ${job.id}: missing item data`);
|
||||
|
||||
// Mark the job as failed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
status: "failed",
|
||||
message: "Job interrupted and could not be resumed",
|
||||
details: "The job was interrupted and did not have enough information to resume",
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Calculate remaining items
|
||||
const remainingItemIds = job.itemIds.filter(
|
||||
(id: string) => !job.completedItemIds.includes(id)
|
||||
);
|
||||
|
||||
if (remainingItemIds.length === 0) {
|
||||
console.log(`Job ${job.id} has no remaining items, marking as completed`);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
status: "mirrored",
|
||||
message: "Job completed after resuming",
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId: job.id,
|
||||
message: `Resuming job with ${remainingItemIds.length} remaining items`,
|
||||
details: `Job was interrupted and is being resumed. ${job.completedItemIds.length} of ${job.itemIds.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
return {
|
||||
job,
|
||||
remainingItemIds,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error resuming job ${job.id}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
224
src/lib/recovery.ts
Normal file
224
src/lib/recovery.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
/**
|
||||
* Recovery mechanism for interrupted jobs
|
||||
* This module handles detecting and resuming jobs that were interrupted by container restarts
|
||||
*/
|
||||
|
||||
import { findInterruptedJobs, resumeInterruptedJob } from './helpers';
|
||||
import { db, repositories, organizations } from './db';
|
||||
import { eq } from 'drizzle-orm';
|
||||
import { mirrorGithubRepoToGitea, mirrorGitHubOrgRepoToGiteaOrg, syncGiteaRepo } from './gitea';
|
||||
import { createGitHubClient } from './github';
|
||||
import { processWithResilience } from './utils/concurrency';
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from '@/types/Repository';
|
||||
import type { Repository } from './db/schema';
|
||||
|
||||
/**
|
||||
* Initialize the recovery system
|
||||
* This should be called when the application starts
|
||||
*/
|
||||
export async function initializeRecovery() {
|
||||
console.log('Initializing recovery system...');
|
||||
|
||||
try {
|
||||
// Find interrupted jobs
|
||||
const interruptedJobs = await findInterruptedJobs();
|
||||
|
||||
if (interruptedJobs.length === 0) {
|
||||
console.log('No interrupted jobs found.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found ${interruptedJobs.length} interrupted jobs. Starting recovery...`);
|
||||
|
||||
// Process each interrupted job
|
||||
for (const job of interruptedJobs) {
|
||||
const resumeData = await resumeInterruptedJob(job);
|
||||
|
||||
if (!resumeData) {
|
||||
console.log(`Job ${job.id} could not be resumed.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { job: updatedJob, remainingItemIds } = resumeData;
|
||||
|
||||
// Handle different job types
|
||||
switch (updatedJob.jobType) {
|
||||
case 'mirror':
|
||||
await recoverMirrorJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
case 'sync':
|
||||
await recoverSyncJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
case 'retry':
|
||||
await recoverRetryJob(updatedJob, remainingItemIds);
|
||||
break;
|
||||
default:
|
||||
console.log(`Unknown job type: ${updatedJob.jobType}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Recovery process completed.');
|
||||
} catch (error) {
|
||||
console.error('Error during recovery process:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a mirror job
|
||||
*/
|
||||
async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
|
||||
console.log(`Recovering mirror job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// Get the config for this user
|
||||
const [config] = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, job.userId))
|
||||
.limit(1);
|
||||
|
||||
if (!config || !config.configId) {
|
||||
throw new Error('Config not found for user');
|
||||
}
|
||||
|
||||
// Get repositories to process
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.id, remainingItemIds));
|
||||
|
||||
if (repos.length === 0) {
|
||||
throw new Error('No repositories found for the remaining item IDs');
|
||||
}
|
||||
|
||||
// Create GitHub client
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
// Process repositories with resilience
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Mirror the repository based on whether it's in an organization
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
octokit,
|
||||
orgName: repo.organization,
|
||||
repository: repoData,
|
||||
});
|
||||
} else {
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: repoData,
|
||||
config,
|
||||
});
|
||||
}
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: job.userId,
|
||||
jobType: 'mirror',
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
resumeFromJobId: job.id,
|
||||
concurrencyLimit: 3,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering mirror job ${job.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a sync job
|
||||
*/
|
||||
async function recoverSyncJob(job: any, remainingItemIds: string[]) {
|
||||
// Implementation similar to recoverMirrorJob but for sync operations
|
||||
console.log(`Recovering sync job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
try {
|
||||
// Get the config for this user
|
||||
const [config] = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.userId, job.userId))
|
||||
.limit(1);
|
||||
|
||||
if (!config || !config.configId) {
|
||||
throw new Error('Config not found for user');
|
||||
}
|
||||
|
||||
// Get repositories to process
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(eq(repositories.id, remainingItemIds));
|
||||
|
||||
if (repos.length === 0) {
|
||||
throw new Error('No repositories found for the remaining item IDs');
|
||||
}
|
||||
|
||||
// Process repositories with resilience
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
};
|
||||
|
||||
// Sync the repository
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: repoData,
|
||||
});
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: job.userId,
|
||||
jobType: 'sync',
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
resumeFromJobId: job.id,
|
||||
concurrencyLimit: 5,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(`Error recovering sync job ${job.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recover a retry job
|
||||
*/
|
||||
async function recoverRetryJob(job: any, remainingItemIds: string[]) {
|
||||
// Implementation similar to recoverMirrorJob but for retry operations
|
||||
console.log(`Recovering retry job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||
|
||||
// This would be similar to recoverMirrorJob but with retry-specific logic
|
||||
console.log('Retry job recovery not yet implemented');
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
import Redis from "ioredis";
|
||||
|
||||
// Connect to Redis using REDIS_URL environment variable or default to redis://redis:6379
|
||||
// This ensures we have a fallback URL when running with Docker Compose
|
||||
const redisUrl = process.env.REDIS_URL ?? 'redis://redis:6379';
|
||||
|
||||
console.log(`Connecting to Redis at: ${redisUrl}`);
|
||||
|
||||
// Configure Redis client with connection options
|
||||
const redisOptions = {
|
||||
retryStrategy: (times: number) => {
|
||||
// Retry with exponential backoff up to 30 seconds
|
||||
const delay = Math.min(times * 100, 3000);
|
||||
console.log(`Redis connection attempt ${times} failed. Retrying in ${delay}ms...`);
|
||||
return delay;
|
||||
},
|
||||
maxRetriesPerRequest: 5,
|
||||
enableReadyCheck: true,
|
||||
connectTimeout: 10000,
|
||||
};
|
||||
|
||||
export const redis = new Redis(redisUrl, redisOptions);
|
||||
export const redisPublisher = new Redis(redisUrl, redisOptions); // For publishing
|
||||
export const redisSubscriber = new Redis(redisUrl, redisOptions); // For subscribing
|
||||
|
||||
// Log connection events
|
||||
redis.on('connect', () => console.log('Redis client connected'));
|
||||
redis.on('error', (err) => console.error('Redis client error:', err));
|
||||
redis.on('ready', () => console.log('Redis client ready'));
|
||||
redis.on('reconnecting', () => console.log('Redis client reconnecting...'));
|
||||
110
src/lib/utils.test.ts
Normal file
110
src/lib/utils.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import { jsonResponse, formatDate, truncate, safeParse } from "./utils";
|
||||
|
||||
describe("jsonResponse", () => {
|
||||
test("creates a Response with JSON content", () => {
|
||||
const data = { message: "Hello, world!" };
|
||||
const response = jsonResponse({ data });
|
||||
|
||||
expect(response).toBeInstanceOf(Response);
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers.get("Content-Type")).toBe("application/json");
|
||||
});
|
||||
|
||||
test("uses the provided status code", () => {
|
||||
const data = { error: "Not found" };
|
||||
const response = jsonResponse({ data, status: 404 });
|
||||
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
|
||||
test("correctly serializes complex objects", async () => {
|
||||
const now = new Date();
|
||||
const data = {
|
||||
message: "Complex object",
|
||||
date: now,
|
||||
nested: { foo: "bar" },
|
||||
array: [1, 2, 3]
|
||||
};
|
||||
|
||||
const response = jsonResponse({ data });
|
||||
const responseBody = await response.json();
|
||||
|
||||
expect(responseBody).toEqual({
|
||||
message: "Complex object",
|
||||
date: now.toISOString(),
|
||||
nested: { foo: "bar" },
|
||||
array: [1, 2, 3]
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatDate", () => {
|
||||
test("formats a date object", () => {
|
||||
const date = new Date("2023-01-15T12:30:45Z");
|
||||
const formatted = formatDate(date);
|
||||
|
||||
// The exact format might depend on the locale, so we'll check for parts
|
||||
expect(formatted).toContain("2023");
|
||||
expect(formatted).toContain("January");
|
||||
expect(formatted).toContain("15");
|
||||
});
|
||||
|
||||
test("formats a date string", () => {
|
||||
const dateStr = "2023-01-15T12:30:45Z";
|
||||
const formatted = formatDate(dateStr);
|
||||
|
||||
expect(formatted).toContain("2023");
|
||||
expect(formatted).toContain("January");
|
||||
expect(formatted).toContain("15");
|
||||
});
|
||||
|
||||
test("returns 'Never' for null or undefined", () => {
|
||||
expect(formatDate(null)).toBe("Never");
|
||||
expect(formatDate(undefined)).toBe("Never");
|
||||
});
|
||||
});
|
||||
|
||||
describe("truncate", () => {
|
||||
test("truncates a string that exceeds the length", () => {
|
||||
const str = "This is a long string that needs truncation";
|
||||
const truncated = truncate(str, 10);
|
||||
|
||||
expect(truncated).toBe("This is a ...");
|
||||
expect(truncated.length).toBe(13); // 10 chars + "..."
|
||||
});
|
||||
|
||||
test("does not truncate a string that is shorter than the length", () => {
|
||||
const str = "Short";
|
||||
const truncated = truncate(str, 10);
|
||||
|
||||
expect(truncated).toBe("Short");
|
||||
});
|
||||
|
||||
test("handles empty strings", () => {
|
||||
expect(truncate("", 10)).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("safeParse", () => {
|
||||
test("parses valid JSON strings", () => {
|
||||
const jsonStr = '{"name":"John","age":30}';
|
||||
const parsed = safeParse(jsonStr);
|
||||
|
||||
expect(parsed).toEqual({ name: "John", age: 30 });
|
||||
});
|
||||
|
||||
test("returns undefined for invalid JSON strings", () => {
|
||||
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
|
||||
const parsed = safeParse(invalidJson);
|
||||
|
||||
expect(parsed).toBeUndefined();
|
||||
});
|
||||
|
||||
test("returns the original value for non-string inputs", () => {
|
||||
const obj = { name: "John", age: 30 };
|
||||
const parsed = safeParse(obj);
|
||||
|
||||
expect(parsed).toBe(obj);
|
||||
});
|
||||
});
|
||||
167
src/lib/utils/concurrency.test.ts
Normal file
167
src/lib/utils/concurrency.test.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
import { describe, test, expect, mock } from "bun:test";
|
||||
import { processInParallel, processWithRetry } from "./concurrency";
|
||||
|
||||
describe("processInParallel", () => {
|
||||
test("processes items in parallel with concurrency control", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
|
||||
// Create a mock function to track execution
|
||||
const processItem = mock(async (item: number) => {
|
||||
// Simulate async work
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock progress callback
|
||||
const onProgress = mock((completed: number, total: number, result?: number) => {
|
||||
// Progress tracking
|
||||
});
|
||||
|
||||
// Process the items with a concurrency limit of 3
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processItem,
|
||||
3,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify that onProgress was called for each item
|
||||
expect(onProgress).toHaveBeenCalledTimes(10);
|
||||
|
||||
// Verify the last call to onProgress had the correct completed/total values
|
||||
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
||||
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
||||
});
|
||||
|
||||
test("handles errors in processing", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3, 4, 5];
|
||||
|
||||
// Create a mock function that throws an error for item 3
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 3) {
|
||||
throw new Error("Test error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items
|
||||
const results = await processInParallel(items, processItem);
|
||||
|
||||
// Verify results (should have 4 items, missing the one that errored)
|
||||
expect(results).toEqual([2, 4, 8, 10]);
|
||||
|
||||
// Verify that processItem was called for each item
|
||||
expect(processItem).toHaveBeenCalledTimes(5);
|
||||
|
||||
// Verify that console.error was called once
|
||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("processWithRetry", () => {
|
||||
test("retries failed operations", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2, 3];
|
||||
|
||||
// Create a counter to track retry attempts
|
||||
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
||||
|
||||
// Create a mock function that fails on first attempt for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
attemptCounts[item]++;
|
||||
|
||||
if (item === 2 && attemptCounts[item] === 1) {
|
||||
throw new Error("Temporary error");
|
||||
}
|
||||
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results
|
||||
expect(results).toEqual([2, 4, 6]);
|
||||
|
||||
// Verify that item 2 was retried once
|
||||
expect(attemptCounts[1]).toBe(1); // No retries
|
||||
expect(attemptCounts[2]).toBe(2); // One retry
|
||||
expect(attemptCounts[3]).toBe(1); // No retries
|
||||
|
||||
// Verify that onRetry was called once
|
||||
expect(onRetry).toHaveBeenCalledTimes(1);
|
||||
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
||||
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
||||
});
|
||||
|
||||
test("gives up after max retries", async () => {
|
||||
// Create an array of numbers to process
|
||||
const items = [1, 2];
|
||||
|
||||
// Create a mock function that always fails for item 2
|
||||
const processItem = mock(async (item: number) => {
|
||||
if (item === 2) {
|
||||
throw new Error("Persistent error");
|
||||
}
|
||||
return item * 2;
|
||||
});
|
||||
|
||||
// Create a mock for the onRetry callback
|
||||
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||
// Retry tracking
|
||||
});
|
||||
|
||||
// Create a spy for console.error
|
||||
const originalConsoleError = console.error;
|
||||
const consoleErrorMock = mock(() => {});
|
||||
console.error = consoleErrorMock;
|
||||
|
||||
try {
|
||||
// Process the items with retry
|
||||
const results = await processWithRetry(items, processItem, {
|
||||
maxRetries: 2,
|
||||
retryDelay: 10,
|
||||
onRetry,
|
||||
});
|
||||
|
||||
// Verify results (should have 1 item, missing the one that errored)
|
||||
expect(results).toEqual([2]);
|
||||
|
||||
// Verify that onRetry was called twice (for 2 retry attempts)
|
||||
expect(onRetry).toHaveBeenCalledTimes(2);
|
||||
|
||||
// Verify that console.error was called once
|
||||
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
292
src/lib/utils/concurrency.ts
Normal file
292
src/lib/utils/concurrency.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
/**
|
||||
* Utility for processing items in parallel with concurrency control
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param concurrencyLimit Maximum number of concurrent operations
|
||||
* @param onProgress Optional callback for progress updates
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processInParallel<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
concurrencyLimit: number = 5,
|
||||
onProgress?: (completed: number, total: number, result?: R) => void
|
||||
): Promise<R[]> {
|
||||
const results: R[] = [];
|
||||
let completed = 0;
|
||||
const total = items.length;
|
||||
|
||||
// Process items in batches to control concurrency
|
||||
for (let i = 0; i < total; i += concurrencyLimit) {
|
||||
const batch = items.slice(i, i + concurrencyLimit);
|
||||
|
||||
const batchPromises = batch.map(async (item) => {
|
||||
try {
|
||||
const result = await processItem(item);
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
completed++;
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(completed, total);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for the current batch to complete before starting the next batch
|
||||
const batchResults = await Promise.allSettled(batchPromises);
|
||||
|
||||
// Process results and handle errors
|
||||
for (const result of batchResults) {
|
||||
if (result.status === 'fulfilled') {
|
||||
results.push(result.value);
|
||||
} else {
|
||||
console.error('Error processing item:', result.reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility for processing items in parallel with automatic retry for failed operations
|
||||
*
|
||||
* @param items Array of items to process
|
||||
* @param processItem Function to process each item
|
||||
* @param options Configuration options
|
||||
* @returns Promise that resolves when all items are processed
|
||||
*/
|
||||
export async function processWithRetry<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
jobId?: string; // Optional job ID for checkpointing
|
||||
getItemId?: (item: T) => string; // Function to get a unique ID for each item
|
||||
onCheckpoint?: (jobId: string, completedItemId: string) => Promise<void>; // Callback for checkpointing
|
||||
checkpointInterval?: number; // How many items to process before checkpointing
|
||||
} = {}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
concurrencyLimit = 5,
|
||||
maxRetries = 3,
|
||||
retryDelay = 1000,
|
||||
onProgress,
|
||||
onRetry,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval = 1 // Default to checkpointing after each item
|
||||
} = options;
|
||||
|
||||
// Track checkpoint counter
|
||||
let itemsProcessedSinceLastCheckpoint = 0;
|
||||
|
||||
// Wrap the process function with retry logic
|
||||
const processWithRetryLogic = async (item: T): Promise<R> => {
|
||||
let lastError: Error | null = null;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
||||
try {
|
||||
const result = await processItem(item);
|
||||
|
||||
// Handle checkpointing if enabled
|
||||
if (jobId && getItemId && onCheckpoint) {
|
||||
const itemId = getItemId(item);
|
||||
itemsProcessedSinceLastCheckpoint++;
|
||||
|
||||
// Checkpoint based on the interval
|
||||
if (itemsProcessedSinceLastCheckpoint >= checkpointInterval) {
|
||||
await onCheckpoint(jobId, itemId);
|
||||
itemsProcessedSinceLastCheckpoint = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
if (attempt <= maxRetries) {
|
||||
if (onRetry) {
|
||||
onRetry(item, lastError, attempt);
|
||||
}
|
||||
|
||||
// Exponential backoff
|
||||
const delay = retryDelay * Math.pow(2, attempt - 1);
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
} else {
|
||||
throw lastError;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This should never be reached due to the throw in the catch block
|
||||
throw lastError || new Error('Unknown error occurred');
|
||||
};
|
||||
|
||||
const results = await processInParallel(
|
||||
items,
|
||||
processWithRetryLogic,
|
||||
concurrencyLimit,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Final checkpoint if there are remaining items since the last checkpoint
|
||||
if (jobId && getItemId && onCheckpoint && itemsProcessedSinceLastCheckpoint > 0) {
|
||||
// We don't have a specific item ID for the final checkpoint, so we'll use a placeholder
|
||||
await onCheckpoint(jobId, 'final');
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Process items in parallel with resilience to container restarts
|
||||
* This version supports resuming from a previous checkpoint
|
||||
*/
|
||||
export async function processWithResilience<T, R>(
|
||||
items: T[],
|
||||
processItem: (item: T) => Promise<R>,
|
||||
options: {
|
||||
concurrencyLimit?: number;
|
||||
maxRetries?: number;
|
||||
retryDelay?: number;
|
||||
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||
userId: string; // Required for creating mirror jobs
|
||||
jobType: "mirror" | "sync" | "retry";
|
||||
getItemId: (item: T) => string; // Required function to get a unique ID for each item
|
||||
getItemName: (item: T) => string; // Required function to get a display name for each item
|
||||
checkpointInterval?: number;
|
||||
resumeFromJobId?: string; // Optional job ID to resume from
|
||||
}
|
||||
): Promise<R[]> {
|
||||
const {
|
||||
userId,
|
||||
jobType,
|
||||
getItemId,
|
||||
getItemName,
|
||||
resumeFromJobId,
|
||||
checkpointInterval = 5,
|
||||
...otherOptions
|
||||
} = options;
|
||||
|
||||
// Import helpers for job management
|
||||
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
||||
|
||||
// Get item IDs for all items
|
||||
const allItemIds = items.map(getItemId);
|
||||
|
||||
// Create or resume a job
|
||||
let jobId: string;
|
||||
let completedItemIds: string[] = [];
|
||||
let itemsToProcess = [...items];
|
||||
|
||||
if (resumeFromJobId) {
|
||||
// We're resuming an existing job
|
||||
jobId = resumeFromJobId;
|
||||
|
||||
// Get the job from the database to find completed items
|
||||
const { db, mirrorJobs } = await import('@/lib/db');
|
||||
const { eq } = await import('drizzle-orm');
|
||||
const [job] = await db
|
||||
.select()
|
||||
.from(mirrorJobs)
|
||||
.where(eq(mirrorJobs.id, resumeFromJobId));
|
||||
|
||||
if (job && job.completedItemIds) {
|
||||
completedItemIds = job.completedItemIds;
|
||||
|
||||
// Filter out already completed items
|
||||
itemsToProcess = items.filter(item => !completedItemIds.includes(getItemId(item)));
|
||||
|
||||
console.log(`Resuming job ${jobId} with ${itemsToProcess.length} remaining items`);
|
||||
|
||||
// Update the job to show it's being resumed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
message: `Resuming job with ${itemsToProcess.length} remaining items`,
|
||||
details: `Job is being resumed. ${completedItemIds.length} of ${items.length} items were already processed.`,
|
||||
inProgress: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create a new job
|
||||
jobId = await createMirrorJob({
|
||||
userId,
|
||||
message: `Started ${jobType} job with ${items.length} items`,
|
||||
details: `Processing ${items.length} items in parallel with checkpointing`,
|
||||
status: "mirroring",
|
||||
jobType,
|
||||
totalItems: items.length,
|
||||
itemIds: allItemIds,
|
||||
inProgress: true,
|
||||
});
|
||||
|
||||
console.log(`Created new job ${jobId} with ${items.length} items`);
|
||||
}
|
||||
|
||||
// Define the checkpoint function
|
||||
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
||||
const itemName = items.find(item => getItemId(item) === completedItemId)
|
||||
? getItemName(items.find(item => getItemId(item) === completedItemId)!)
|
||||
: 'unknown';
|
||||
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
completedItemId,
|
||||
message: `Processed item: ${itemName}`,
|
||||
});
|
||||
};
|
||||
|
||||
try {
|
||||
// Process the items with checkpointing
|
||||
const results = await processWithRetry(
|
||||
itemsToProcess,
|
||||
processItem,
|
||||
{
|
||||
...otherOptions,
|
||||
jobId,
|
||||
getItemId,
|
||||
onCheckpoint,
|
||||
checkpointInterval,
|
||||
}
|
||||
);
|
||||
|
||||
// Mark the job as completed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: "mirrored",
|
||||
message: `Completed ${jobType} job with ${items.length} items`,
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
return results;
|
||||
} catch (error) {
|
||||
// Mark the job as failed
|
||||
await updateMirrorJobProgress({
|
||||
jobId,
|
||||
status: "failed",
|
||||
message: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
||||
inProgress: false,
|
||||
isCompleted: true,
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
22
src/middleware.ts
Normal file
22
src/middleware.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { defineMiddleware } from 'astro:middleware';
|
||||
import { initializeRecovery } from './lib/recovery';
|
||||
|
||||
// Flag to track if recovery has been initialized
|
||||
let recoveryInitialized = false;
|
||||
|
||||
export const onRequest = defineMiddleware(async (context, next) => {
|
||||
// Initialize recovery system only once when the server starts
|
||||
if (!recoveryInitialized) {
|
||||
console.log('Initializing recovery system from middleware...');
|
||||
try {
|
||||
await initializeRecovery();
|
||||
console.log('Recovery system initialized successfully');
|
||||
} catch (error) {
|
||||
console.error('Error initializing recovery system:', error);
|
||||
}
|
||||
recoveryInitialized = true;
|
||||
}
|
||||
|
||||
// Continue with the request
|
||||
return next();
|
||||
});
|
||||
187
src/pages/api/gitea/test-connection.test.ts
Normal file
187
src/pages/api/gitea/test-connection.test.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import axios from "axios";
|
||||
|
||||
// Mock the POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing URL or token
|
||||
if (!body.url || !body.token) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Gitea URL and token are required"
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Check for username mismatch
|
||||
if (body.username && body.username !== "giteauser") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Token belongs to giteauser, not " + body.username
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Handle invalid token
|
||||
if (body.token === "invalid-token") {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "Invalid Gitea token"
|
||||
}),
|
||||
{ status: 401 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Successfully connected to Gitea as giteauser",
|
||||
user: {
|
||||
login: "giteauser",
|
||||
name: "Gitea User",
|
||||
avatar_url: "https://gitea.example.com/avatar.png"
|
||||
}
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Mock the module
|
||||
mock.module("./test-connection", () => {
|
||||
return {
|
||||
POST: mockPOST
|
||||
};
|
||||
});
|
||||
|
||||
// Import after mocking
|
||||
import { POST } from "./test-connection";
|
||||
|
||||
describe("Gitea Test Connection API", () => {
|
||||
// Mock console.error to prevent test output noise
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if url or token is missing", async () => {
|
||||
// Test missing URL
|
||||
const requestMissingUrl = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const responseMissingUrl = await POST({ request: requestMissingUrl } as any);
|
||||
|
||||
expect(responseMissingUrl.status).toBe(400);
|
||||
|
||||
const dataMissingUrl = await responseMissingUrl.json();
|
||||
expect(dataMissingUrl.success).toBe(false);
|
||||
expect(dataMissingUrl.message).toBe("Gitea URL and token are required");
|
||||
|
||||
// Test missing token
|
||||
const requestMissingToken = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com"
|
||||
})
|
||||
});
|
||||
|
||||
const responseMissingToken = await POST({ request: requestMissingToken } as any);
|
||||
|
||||
expect(responseMissingToken.status).toBe(400);
|
||||
|
||||
const dataMissingToken = await responseMissingToken.json();
|
||||
expect(dataMissingToken.success).toBe(false);
|
||||
expect(dataMissingToken.message).toBe("Gitea URL and token are required");
|
||||
});
|
||||
|
||||
test("returns 200 with user data on successful connection", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Successfully connected to Gitea as giteauser");
|
||||
expect(data.user).toEqual({
|
||||
login: "giteauser",
|
||||
name: "Gitea User",
|
||||
avatar_url: "https://gitea.example.com/avatar.png"
|
||||
});
|
||||
});
|
||||
|
||||
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "valid-token",
|
||||
username: "differentuser"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Token belongs to giteauser, not differentuser");
|
||||
});
|
||||
|
||||
test("handles authentication errors", async () => {
|
||||
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
url: "https://gitea.example.com",
|
||||
token: "invalid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(401);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Invalid Gitea token");
|
||||
});
|
||||
});
|
||||
133
src/pages/api/github/test-connection.test.ts
Normal file
133
src/pages/api/github/test-connection.test.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { POST } from "./test-connection";
|
||||
import { Octokit } from "@octokit/rest";
|
||||
|
||||
// Mock the Octokit class
|
||||
mock.module("@octokit/rest", () => {
|
||||
return {
|
||||
Octokit: mock(function() {
|
||||
return {
|
||||
users: {
|
||||
getAuthenticated: mock(() => Promise.resolve({
|
||||
data: {
|
||||
login: "testuser",
|
||||
name: "Test User",
|
||||
avatar_url: "https://example.com/avatar.png"
|
||||
}
|
||||
}))
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
describe("GitHub Test Connection API", () => {
|
||||
// Mock console.error to prevent test output noise
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if token is missing", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("GitHub token is required");
|
||||
});
|
||||
|
||||
test("returns 200 with user data on successful connection", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Successfully connected to GitHub as testuser");
|
||||
expect(data.user).toEqual({
|
||||
login: "testuser",
|
||||
name: "Test User",
|
||||
avatar_url: "https://example.com/avatar.png"
|
||||
});
|
||||
});
|
||||
|
||||
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "valid-token",
|
||||
username: "differentuser"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("Token belongs to testuser, not differentuser");
|
||||
});
|
||||
|
||||
test("handles authentication errors", async () => {
|
||||
// Mock Octokit to throw an error
|
||||
mock.module("@octokit/rest", () => {
|
||||
return {
|
||||
Octokit: mock(function() {
|
||||
return {
|
||||
users: {
|
||||
getAuthenticated: mock(() => Promise.reject(new Error("Bad credentials")))
|
||||
}
|
||||
};
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
const request = new Request("http://localhost/api/github/test-connection", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
token: "invalid-token"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(500);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toContain("Bad credentials");
|
||||
});
|
||||
});
|
||||
154
src/pages/api/health.test.ts
Normal file
154
src/pages/api/health.test.ts
Normal file
@@ -0,0 +1,154 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
import { GET } from "./health";
|
||||
import * as dbModule from "@/lib/db";
|
||||
import os from "os";
|
||||
|
||||
// Mock the database module
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
select: () => ({
|
||||
from: () => ({
|
||||
limit: () => Promise.resolve([{ test: 1 }])
|
||||
})
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock the os functions individually
|
||||
const originalPlatform = os.platform;
|
||||
const originalVersion = os.version;
|
||||
const originalArch = os.arch;
|
||||
const originalTotalmem = os.totalmem;
|
||||
const originalFreemem = os.freemem;
|
||||
|
||||
describe("Health API Endpoint", () => {
|
||||
beforeEach(() => {
|
||||
// Mock os functions
|
||||
os.platform = mock(() => "test-platform");
|
||||
os.version = mock(() => "test-version");
|
||||
os.arch = mock(() => "test-arch");
|
||||
os.totalmem = mock(() => 16 * 1024 * 1024 * 1024); // 16GB
|
||||
os.freemem = mock(() => 8 * 1024 * 1024 * 1024); // 8GB
|
||||
|
||||
// Mock process.memoryUsage
|
||||
process.memoryUsage = mock(() => ({
|
||||
rss: 100 * 1024 * 1024, // 100MB
|
||||
heapTotal: 50 * 1024 * 1024, // 50MB
|
||||
heapUsed: 30 * 1024 * 1024, // 30MB
|
||||
external: 10 * 1024 * 1024, // 10MB
|
||||
arrayBuffers: 5 * 1024 * 1024, // 5MB
|
||||
}));
|
||||
|
||||
// Mock process.env
|
||||
process.env.npm_package_version = "2.1.0";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original os functions
|
||||
os.platform = originalPlatform;
|
||||
os.version = originalVersion;
|
||||
os.arch = originalArch;
|
||||
os.totalmem = originalTotalmem;
|
||||
os.freemem = originalFreemem;
|
||||
});
|
||||
|
||||
test("returns a successful health check response", async () => {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Check the structure of the response
|
||||
expect(data.status).toBe("ok");
|
||||
expect(data.timestamp).toBeDefined();
|
||||
expect(data.version).toBe("2.1.0");
|
||||
|
||||
// Check database status
|
||||
expect(data.database.connected).toBe(true);
|
||||
|
||||
// Check system info
|
||||
expect(data.system.os.platform).toBe("test-platform");
|
||||
expect(data.system.os.version).toBe("test-version");
|
||||
expect(data.system.os.arch).toBe("test-arch");
|
||||
|
||||
// Check memory info
|
||||
expect(data.system.memory.rss).toBe("100 MB");
|
||||
expect(data.system.memory.heapTotal).toBe("50 MB");
|
||||
expect(data.system.memory.heapUsed).toBe("30 MB");
|
||||
expect(data.system.memory.systemTotal).toBe("16 GB");
|
||||
expect(data.system.memory.systemFree).toBe("8 GB");
|
||||
|
||||
// Check uptime
|
||||
expect(data.system.uptime.startTime).toBeDefined();
|
||||
expect(data.system.uptime.uptimeMs).toBeGreaterThanOrEqual(0);
|
||||
expect(data.system.uptime.formatted).toBeDefined();
|
||||
});
|
||||
|
||||
test("handles database connection failures", async () => {
|
||||
// Mock database failure
|
||||
mock.module("@/lib/db", () => {
|
||||
return {
|
||||
db: {
|
||||
select: () => ({
|
||||
from: () => ({
|
||||
limit: () => Promise.reject(new Error("Database connection error"))
|
||||
})
|
||||
})
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// Mock console.error to prevent test output noise
|
||||
const originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
|
||||
try {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
// Should still return 200 even with DB error, as the service itself is running
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Status should still be ok since the service is running
|
||||
expect(data.status).toBe("ok");
|
||||
|
||||
// Database should show as disconnected
|
||||
expect(data.database.connected).toBe(false);
|
||||
expect(data.database.message).toBe("Database connection error");
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
|
||||
test("handles database connection failures with status 200", async () => {
|
||||
// The health endpoint should return 200 even if the database is down,
|
||||
// as the service itself is still running
|
||||
|
||||
// Mock console.error to prevent test output noise
|
||||
const originalConsoleError = console.error;
|
||||
console.error = mock(() => {});
|
||||
|
||||
try {
|
||||
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||
|
||||
// Should return 200 as the service is running
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Status should be ok
|
||||
expect(data.status).toBe("ok");
|
||||
|
||||
// Database should show as disconnected
|
||||
expect(data.database.connected).toBe(false);
|
||||
} finally {
|
||||
// Restore console.error
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
179
src/pages/api/health.ts
Normal file
179
src/pages/api/health.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { jsonResponse } from "@/lib/utils";
|
||||
import { db } from "@/lib/db";
|
||||
import { ENV } from "@/lib/config";
|
||||
import os from "os";
|
||||
import axios from "axios";
|
||||
|
||||
// Track when the server started
|
||||
const serverStartTime = new Date();
|
||||
|
||||
// Cache for the latest version to avoid frequent GitHub API calls
|
||||
interface VersionCache {
|
||||
latestVersion: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
let versionCache: VersionCache | null = null;
|
||||
const CACHE_TTL = 3600000; // 1 hour in milliseconds
|
||||
|
||||
export const GET: APIRoute = async () => {
|
||||
try {
|
||||
// Check database connection by running a simple query
|
||||
const dbStatus = await checkDatabaseConnection();
|
||||
|
||||
// Get system information
|
||||
const systemInfo = {
|
||||
uptime: getUptime(),
|
||||
memory: getMemoryUsage(),
|
||||
os: {
|
||||
platform: os.platform(),
|
||||
version: os.version(),
|
||||
arch: os.arch(),
|
||||
},
|
||||
env: ENV.NODE_ENV,
|
||||
};
|
||||
|
||||
// Get current and latest versions
|
||||
const currentVersion = process.env.npm_package_version || "unknown";
|
||||
const latestVersion = await checkLatestVersion();
|
||||
|
||||
// Build response
|
||||
const healthData = {
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: currentVersion,
|
||||
latestVersion: latestVersion,
|
||||
updateAvailable: latestVersion !== "unknown" &&
|
||||
currentVersion !== "unknown" &&
|
||||
latestVersion !== currentVersion,
|
||||
database: dbStatus,
|
||||
system: systemInfo,
|
||||
};
|
||||
|
||||
return jsonResponse({
|
||||
data: healthData,
|
||||
status: 200,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Health check failed:", error);
|
||||
|
||||
return jsonResponse({
|
||||
data: {
|
||||
status: "error",
|
||||
timestamp: new Date().toISOString(),
|
||||
error: error instanceof Error ? error.message : "Unknown error",
|
||||
version: process.env.npm_package_version || "unknown",
|
||||
latestVersion: "unknown",
|
||||
updateAvailable: false,
|
||||
},
|
||||
status: 503, // Service Unavailable
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check database connection by running a simple query
|
||||
*/
|
||||
async function checkDatabaseConnection() {
|
||||
try {
|
||||
// Run a simple query to check if the database is accessible
|
||||
const result = await db.select({ test: sql`1` }).from(sql`sqlite_master`).limit(1);
|
||||
|
||||
return {
|
||||
connected: true,
|
||||
message: "Database connection successful",
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Database connection check failed:", error);
|
||||
|
||||
return {
|
||||
connected: false,
|
||||
message: error instanceof Error ? error.message : "Database connection failed",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get server uptime information
|
||||
*/
|
||||
function getUptime() {
|
||||
const now = new Date();
|
||||
const uptimeMs = now.getTime() - serverStartTime.getTime();
|
||||
|
||||
// Convert to human-readable format
|
||||
const seconds = Math.floor(uptimeMs / 1000);
|
||||
const minutes = Math.floor(seconds / 60);
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const days = Math.floor(hours / 24);
|
||||
|
||||
return {
|
||||
startTime: serverStartTime.toISOString(),
|
||||
uptimeMs,
|
||||
formatted: `${days}d ${hours % 24}h ${minutes % 60}m ${seconds % 60}s`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get memory usage information
|
||||
*/
|
||||
function getMemoryUsage() {
|
||||
const memoryUsage = process.memoryUsage();
|
||||
|
||||
return {
|
||||
rss: formatBytes(memoryUsage.rss),
|
||||
heapTotal: formatBytes(memoryUsage.heapTotal),
|
||||
heapUsed: formatBytes(memoryUsage.heapUsed),
|
||||
external: formatBytes(memoryUsage.external),
|
||||
systemTotal: formatBytes(os.totalmem()),
|
||||
systemFree: formatBytes(os.freemem()),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format bytes to human-readable format
|
||||
*/
|
||||
function formatBytes(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes';
|
||||
|
||||
const k = 1024;
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for the latest version from GitHub releases
|
||||
*/
|
||||
async function checkLatestVersion(): Promise<string> {
|
||||
// Return cached version if available and not expired
|
||||
if (versionCache && (Date.now() - versionCache.timestamp) < CACHE_TTL) {
|
||||
return versionCache.latestVersion;
|
||||
}
|
||||
|
||||
try {
|
||||
// Fetch the latest release from GitHub
|
||||
const response = await axios.get(
|
||||
'https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest',
|
||||
{ headers: { 'Accept': 'application/vnd.github.v3+json' } }
|
||||
);
|
||||
|
||||
// Extract version from tag_name (remove 'v' prefix if present)
|
||||
const latestVersion = response.data.tag_name.replace(/^v/, '');
|
||||
|
||||
// Update cache
|
||||
versionCache = {
|
||||
latestVersion,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
return latestVersion;
|
||||
} catch (error) {
|
||||
console.error('Failed to check for latest version:', error);
|
||||
return 'unknown';
|
||||
}
|
||||
}
|
||||
|
||||
// Import sql tag for raw SQL queries
|
||||
import { sql } from "drizzle-orm";
|
||||
109
src/pages/api/job/mirror-org.test.ts
Normal file
109
src/pages/api/job/mirror-org.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Create a mock POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing userId or organizationIds
|
||||
if (!body.userId || !body.organizationIds) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing userId or organizationIds."
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Organization mirroring started",
|
||||
batchId: "test-batch-id"
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Create a mock module
|
||||
const mockModule = {
|
||||
POST: mockPOST
|
||||
};
|
||||
|
||||
describe("Organization Mirroring API", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
organizationIds: ["org-id-1", "org-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||
});
|
||||
|
||||
test("returns 400 if organizationIds is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring organizations", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id",
|
||||
organizationIds: ["org-id-1", "org-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Organization mirroring started");
|
||||
expect(data.batchId).toBe("test-batch-id");
|
||||
});
|
||||
});
|
||||
@@ -6,6 +6,8 @@ import { createGitHubClient } from "@/lib/github";
|
||||
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import { type MembershipRole } from "@/types/organizations";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -61,31 +63,72 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Fire async mirroring without blocking response
|
||||
// Fire async mirroring without blocking response, using parallel processing with resilience
|
||||
setTimeout(async () => {
|
||||
for (const org of orgs) {
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing in config.");
|
||||
}
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing in config.");
|
||||
}
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
// Create a single Octokit instance to be reused
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
// Using a lower concurrency for organizations since each org might contain many repos
|
||||
const CONCURRENCY_LIMIT = 2;
|
||||
|
||||
// Generate a batch ID to group related organizations
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process organizations in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
orgs,
|
||||
async (org) => {
|
||||
// Prepare organization data
|
||||
const orgData = {
|
||||
...org,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
membershipRole: org.membershipRole as MembershipRole,
|
||||
lastMirrored: org.lastMirrored ?? undefined,
|
||||
errorMessage: org.errorMessage ?? undefined,
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for organization: ${org.name}`);
|
||||
|
||||
// Mirror the organization
|
||||
await mirrorGitHubOrgToGitea({
|
||||
config,
|
||||
octokit,
|
||||
organization: {
|
||||
...org,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
membershipRole: org.membershipRole as MembershipRole,
|
||||
lastMirrored: org.lastMirrored ?? undefined,
|
||||
errorMessage: org.errorMessage ?? undefined,
|
||||
},
|
||||
organization: orgData,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Mirror failed for organization ${org.name}:`, error);
|
||||
|
||||
return org;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "mirror",
|
||||
batchId,
|
||||
getItemId: (org) => org.id,
|
||||
getItemName: (org) => org.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 3000,
|
||||
checkpointInterval: 1, // Checkpoint after each organization
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Organization mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully mirrored organization: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (org, error, attempt) => {
|
||||
console.log(`Retrying organization ${org.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All organization mirroring tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: MirrorOrgResponse = {
|
||||
|
||||
109
src/pages/api/job/mirror-repo.test.ts
Normal file
109
src/pages/api/job/mirror-repo.test.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||
|
||||
// Create a mock POST function
|
||||
const mockPOST = mock(async ({ request }) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Check for missing userId or repositoryIds
|
||||
if (!body.userId || !body.repositoryIds) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing userId or repositoryIds."
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Success case
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Repository mirroring started",
|
||||
batchId: "test-batch-id"
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
});
|
||||
|
||||
// Create a mock module
|
||||
const mockModule = {
|
||||
POST: mockPOST
|
||||
};
|
||||
|
||||
describe("Repository Mirroring API", () => {
|
||||
// Mock console.log and console.error to prevent test output noise
|
||||
let originalConsoleLog: typeof console.log;
|
||||
let originalConsoleError: typeof console.error;
|
||||
|
||||
beforeEach(() => {
|
||||
originalConsoleLog = console.log;
|
||||
originalConsoleError = console.error;
|
||||
console.log = mock(() => {});
|
||||
console.error = mock(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
console.log = originalConsoleLog;
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||
});
|
||||
|
||||
test("returns 400 if repositoryIds is missing", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id"
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring repositories", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
userId: "user-id",
|
||||
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||
})
|
||||
});
|
||||
|
||||
const response = await mockModule.POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(true);
|
||||
expect(data.message).toBe("Repository mirroring started");
|
||||
expect(data.batchId).toBe("test-batch-id");
|
||||
});
|
||||
});
|
||||
@@ -8,6 +8,8 @@ import {
|
||||
mirrorGitHubOrgRepoToGiteaOrg,
|
||||
} from "@/lib/gitea";
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -63,52 +65,83 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start async mirroring in background
|
||||
// Start async mirroring in background with parallel processing and resilience
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
// Create a single Octokit instance to be reused
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 3;
|
||||
|
||||
// Generate a batch ID to group related repositories
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process repositories in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
// Log the start of mirroring
|
||||
console.log(`Starting mirror for repository: ${repo.name}`);
|
||||
|
||||
// Mirror the repository based on whether it's in an organization
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
octokit,
|
||||
orgName: repo.organization,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
repository: repoData,
|
||||
});
|
||||
} else {
|
||||
await mirrorGithubRepoToGitea({
|
||||
octokit,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("imported"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
},
|
||||
repository: repoData,
|
||||
config,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Mirror failed for repo ${repo.name}:`, error);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "mirror",
|
||||
batchId,
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
checkpointInterval: 1, // Checkpoint after each repository
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully mirrored repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository mirroring tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: MirrorRepoResponse = {
|
||||
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||
import { processWithRetry } from "@/lib/utils/concurrency";
|
||||
import { createMirrorJob } from "@/lib/helpers";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -65,10 +67,21 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start background retry
|
||||
// Start background retry with parallel processing
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
// Create a single Octokit instance to be reused if needed
|
||||
const octokit = config.githubConfig.token
|
||||
? createGitHubClient(config.githubConfig.token)
|
||||
: null;
|
||||
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 3;
|
||||
|
||||
// Process repositories in parallel with retry capability
|
||||
await processWithRetry(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const visibility = repositoryVisibilityEnum.parse(repo.visibility);
|
||||
const status = repoStatusEnum.parse(repo.status);
|
||||
const repoData = {
|
||||
@@ -81,6 +94,20 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
};
|
||||
|
||||
// Log the start of retry operation
|
||||
console.log(`Starting retry for repository: ${repo.name}`);
|
||||
|
||||
// Create a mirror job entry to track progress
|
||||
await createMirrorJob({
|
||||
userId: config.userId || "",
|
||||
repositoryId: repo.id,
|
||||
repositoryName: repo.name,
|
||||
message: `Started retry operation for repository: ${repo.name}`,
|
||||
details: `Repository ${repo.name} is now in the retry queue.`,
|
||||
status: "imported",
|
||||
});
|
||||
|
||||
// Determine if the repository exists in Gitea
|
||||
let owner = getGiteaRepoOwner({
|
||||
config,
|
||||
repository: repoData,
|
||||
@@ -93,16 +120,21 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
});
|
||||
|
||||
if (present) {
|
||||
// If the repository exists, sync it
|
||||
await syncGiteaRepo({ config, repository: repoData });
|
||||
console.log(`Synced existing repo: ${repo.name}`);
|
||||
} else {
|
||||
// If the repository doesn't exist, mirror it
|
||||
if (!config.githubConfig.token) {
|
||||
throw new Error("GitHub token is missing.");
|
||||
}
|
||||
|
||||
if (!octokit) {
|
||||
throw new Error("Octokit client is not initialized.");
|
||||
}
|
||||
|
||||
console.log(`Importing repo: ${repo.name} ${owner}`);
|
||||
|
||||
const octokit = createGitHubClient(config.githubConfig.token);
|
||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
config,
|
||||
@@ -124,10 +156,28 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Failed to retry repo ${repo.name}:`, err);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Retry progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully processed repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository retry tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: RetryRepoResponse = {
|
||||
|
||||
@@ -5,6 +5,8 @@ import { eq, inArray } from "drizzle-orm";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import { syncGiteaRepo } from "@/lib/gitea";
|
||||
import type { SyncRepoResponse } from "@/types/sync";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
@@ -60,26 +62,65 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
);
|
||||
}
|
||||
|
||||
// Start async mirroring in background
|
||||
// Start async mirroring in background with parallel processing and resilience
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
// Define the concurrency limit - adjust based on API rate limits
|
||||
const CONCURRENCY_LIMIT = 5;
|
||||
|
||||
// Generate a batch ID to group related repositories
|
||||
const batchId = uuidv4();
|
||||
|
||||
// Process repositories in parallel with resilience to container restarts
|
||||
await processWithResilience(
|
||||
repos,
|
||||
async (repo) => {
|
||||
// Prepare repository data
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
};
|
||||
|
||||
// Log the start of syncing
|
||||
console.log(`Starting sync for repository: ${repo.name}`);
|
||||
|
||||
// Sync the repository
|
||||
await syncGiteaRepo({
|
||||
config,
|
||||
repository: {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
},
|
||||
repository: repoData,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Sync failed for repo ${repo.name}:`, error);
|
||||
|
||||
return repo;
|
||||
},
|
||||
{
|
||||
userId: config.userId || "",
|
||||
jobType: "sync",
|
||||
batchId,
|
||||
getItemId: (repo) => repo.id,
|
||||
getItemName: (repo) => repo.name,
|
||||
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||
maxRetries: 2,
|
||||
retryDelay: 2000,
|
||||
checkpointInterval: 1, // Checkpoint after each repository
|
||||
onProgress: (completed, total, result) => {
|
||||
const percentComplete = Math.round((completed / total) * 100);
|
||||
console.log(`Syncing progress: ${percentComplete}% (${completed}/${total})`);
|
||||
|
||||
if (result) {
|
||||
console.log(`Successfully synced repository: ${result.name}`);
|
||||
}
|
||||
},
|
||||
onRetry: (repo, error, attempt) => {
|
||||
console.log(`Retrying sync for repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
console.log("All repository syncing tasks completed");
|
||||
}, 0);
|
||||
|
||||
const responsePayload: SyncRepoResponse = {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { redisSubscriber } from "@/lib/redis";
|
||||
import { getNewEvents } from "@/lib/events";
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
@@ -11,50 +11,89 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
|
||||
const channel = `mirror-status:${userId}`;
|
||||
let isClosed = false;
|
||||
const POLL_INTERVAL = 5000; // Poll every 5 seconds (reduced from 2 seconds for low-traffic usage)
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
const encoder = new TextEncoder();
|
||||
let lastEventTime: Date | undefined = undefined;
|
||||
let pollIntervalId: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
const handleMessage = (ch: string, message: string) => {
|
||||
if (isClosed || ch !== channel) return;
|
||||
// Function to send a message to the client
|
||||
const sendMessage = (message: string) => {
|
||||
if (isClosed) return;
|
||||
try {
|
||||
controller.enqueue(encoder.encode(`data: ${message}\n\n`));
|
||||
controller.enqueue(encoder.encode(message));
|
||||
} catch (err) {
|
||||
console.error("Stream enqueue error:", err);
|
||||
}
|
||||
};
|
||||
|
||||
redisSubscriber.subscribe(channel, (err) => {
|
||||
if (err) {
|
||||
isClosed = true;
|
||||
controller.error(err);
|
||||
// Function to poll for new events
|
||||
const pollForEvents = async () => {
|
||||
if (isClosed) return;
|
||||
|
||||
try {
|
||||
console.log(`Polling for events for user ${userId} in channel ${channel}`);
|
||||
|
||||
// Get new events from SQLite
|
||||
const events = await getNewEvents({
|
||||
userId,
|
||||
channel,
|
||||
lastEventTime,
|
||||
});
|
||||
|
||||
console.log(`Found ${events.length} new events`);
|
||||
|
||||
// Send events to client
|
||||
if (events.length > 0) {
|
||||
// Update last event time
|
||||
lastEventTime = events[events.length - 1].createdAt;
|
||||
|
||||
// Send each event to the client
|
||||
for (const event of events) {
|
||||
console.log(`Sending event: ${JSON.stringify(event.payload)}`);
|
||||
sendMessage(`data: ${JSON.stringify(event.payload)}\n\n`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error polling for events:", err);
|
||||
sendMessage(`data: {"error": "Error polling for events"}\n\n`);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
redisSubscriber.on("message", handleMessage);
|
||||
// Send initial connection message
|
||||
sendMessage(": connected\n\n");
|
||||
|
||||
try {
|
||||
controller.enqueue(encoder.encode(": connected\n\n"));
|
||||
} catch (err) {
|
||||
console.error("Initial enqueue error:", err);
|
||||
}
|
||||
// Start polling for events
|
||||
pollForEvents();
|
||||
|
||||
// Set up polling interval
|
||||
pollIntervalId = setInterval(pollForEvents, POLL_INTERVAL);
|
||||
|
||||
// Send a heartbeat every 30 seconds to keep the connection alive
|
||||
const heartbeatInterval = setInterval(() => {
|
||||
if (!isClosed) {
|
||||
sendMessage(": heartbeat\n\n");
|
||||
} else {
|
||||
clearInterval(heartbeatInterval);
|
||||
}
|
||||
}, 30000);
|
||||
|
||||
// Handle client disconnection
|
||||
request.signal?.addEventListener("abort", () => {
|
||||
if (!isClosed) {
|
||||
isClosed = true;
|
||||
redisSubscriber.off("message", handleMessage);
|
||||
redisSubscriber.unsubscribe(channel);
|
||||
if (pollIntervalId) {
|
||||
clearInterval(pollIntervalId);
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
});
|
||||
},
|
||||
cancel() {
|
||||
// extra safety in case cancel is triggered
|
||||
if (!isClosed) {
|
||||
isClosed = true;
|
||||
redisSubscriber.unsubscribe(channel);
|
||||
}
|
||||
// Extra safety in case cancel is triggered
|
||||
isClosed = true;
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
56
src/pages/api/test-event.ts
Normal file
56
src/pages/api/test-event.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { publishEvent } from "@/lib/events";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const { userId, message, status } = body;
|
||||
|
||||
if (!userId || !message || !status) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing required fields: userId, message, status",
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Create a test event
|
||||
const eventData = {
|
||||
id: uuidv4(),
|
||||
userId,
|
||||
repositoryId: uuidv4(),
|
||||
repositoryName: "test-repo",
|
||||
message,
|
||||
status,
|
||||
timestamp: new Date(),
|
||||
};
|
||||
|
||||
// Publish the event
|
||||
const channel = `mirror-status:${userId}`;
|
||||
await publishEvent({
|
||||
userId,
|
||||
channel,
|
||||
payload: eventData,
|
||||
});
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: "Event published successfully",
|
||||
event: eventData,
|
||||
}),
|
||||
{ status: 200 }
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error publishing test event:", error);
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Failed to publish event",
|
||||
details: error instanceof Error ? error.message : String(error),
|
||||
}),
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
};
|
||||
20
src/tests/setup.bun.ts
Normal file
20
src/tests/setup.bun.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* Bun test setup file
|
||||
* This file is automatically loaded before running tests
|
||||
*/
|
||||
|
||||
import { afterEach, beforeEach } from "bun:test";
|
||||
|
||||
// Clean up after each test
|
||||
afterEach(() => {
|
||||
// Add any cleanup logic here
|
||||
});
|
||||
|
||||
// Setup before each test
|
||||
beforeEach(() => {
|
||||
// Add any setup logic here
|
||||
});
|
||||
|
||||
// Add DOM testing support if needed
|
||||
// import { DOMParser } from "linkedom";
|
||||
// global.DOMParser = DOMParser;
|
||||
Reference in New Issue
Block a user