mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2025-12-07 03:56:46 +03:00
Compare commits
49 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ca4356ad1 | ||
|
|
3136a2120d | ||
|
|
615ebd5079 | ||
|
|
6e48d3f86c | ||
|
|
c5de7e616d | ||
|
|
309f8c4341 | ||
|
|
0c596ac241 | ||
|
|
894be88a28 | ||
|
|
6ab7f0a5a0 | ||
|
|
abe3113755 | ||
|
|
f4bc28e6c2 | ||
|
|
aaf8dc6fe4 | ||
|
|
cda78bc0f5 | ||
|
|
9ccd656734 | ||
|
|
8b5c5d8ed2 | ||
|
|
1ab642c9e7 | ||
|
|
1eae725535 | ||
|
|
5bf52c806f | ||
|
|
a15178d2cd | ||
|
|
32ef9124a7 | ||
|
|
161685b966 | ||
|
|
0cf95b2a0e | ||
|
|
c896194aeb | ||
|
|
315d892cf4 | ||
|
|
b7eaa94ca2 | ||
|
|
52dbe6a2d9 | ||
|
|
e423d78cf9 | ||
|
|
f6b51414a0 | ||
|
|
8a35c0368f | ||
|
|
6f64838b55 | ||
|
|
f37867ea0c | ||
|
|
4aa7e665ac | ||
|
|
4b570f555a | ||
|
|
97676f3b04 | ||
|
|
04e8b817d3 | ||
|
|
6d13ff29ca | ||
|
|
c179953649 | ||
|
|
eb2d76a4b7 | ||
|
|
145bee8d96 | ||
|
|
cad72da016 | ||
|
|
4a01a351f0 | ||
|
|
98973adfe5 | ||
|
|
f6b5df472a | ||
|
|
b09cabd154 | ||
|
|
f9c77bbee0 | ||
|
|
e95f1d99b5 | ||
|
|
d5b0102080 | ||
|
|
94aff30dda | ||
|
|
38206e7d3d |
@@ -5,10 +5,10 @@
|
|||||||
|
|
||||||
# Node.js
|
# Node.js
|
||||||
node_modules
|
node_modules
|
||||||
|
# We don't exclude bun.lock* as it's needed for the build
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
yarn-debug.log
|
yarn-debug.log
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
pnpm-debug.log
|
|
||||||
|
|
||||||
# Build outputs
|
# Build outputs
|
||||||
dist
|
dist
|
||||||
@@ -62,4 +62,3 @@ logs
|
|||||||
# Cache
|
# Cache
|
||||||
.cache
|
.cache
|
||||||
.npm
|
.npm
|
||||||
.pnpm-store
|
|
||||||
|
|||||||
BIN
.github/assets/logo.png
vendored
Normal file
BIN
.github/assets/logo.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.6 MiB |
3
.github/workflows/README.md
vendored
3
.github/workflows/README.md
vendored
@@ -24,8 +24,7 @@ This workflow runs on all branches and pull requests. It:
|
|||||||
- On push to any branch (except changes to README.md and docs)
|
- On push to any branch (except changes to README.md and docs)
|
||||||
- On pull requests to any branch (except changes to README.md and docs)
|
- On pull requests to any branch (except changes to README.md and docs)
|
||||||
|
|
||||||
**Key features:**
|
- Uses Bun for dependency installation
|
||||||
- Uses pnpm for faster dependency installation
|
|
||||||
- Caches dependencies to speed up builds
|
- Caches dependencies to speed up builds
|
||||||
- Uploads build artifacts for 7 days
|
- Uploads build artifacts for 7 days
|
||||||
|
|
||||||
|
|||||||
31
.github/workflows/astro-build-test.yml
vendored
31
.github/workflows/astro-build-test.yml
vendored
@@ -16,31 +16,32 @@ jobs:
|
|||||||
build-and-test:
|
build-and-test:
|
||||||
name: Build and Test Astro Project
|
name: Build and Test Astro Project
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install pnpm
|
- name: Setup Bun
|
||||||
uses: pnpm/action-setup@v3
|
uses: oven-sh/setup-bun@v1
|
||||||
with:
|
with:
|
||||||
version: 10
|
bun-version: '1.2.9'
|
||||||
run_install: false
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Check lockfile and install dependencies
|
||||||
uses: actions/setup-node@v4
|
run: |
|
||||||
with:
|
# Check if bun.lock exists, if not check for bun.lockb
|
||||||
node-version: 'lts/*'
|
if [ -f "bun.lock" ]; then
|
||||||
cache: 'pnpm'
|
echo "Using existing bun.lock file"
|
||||||
|
elif [ -f "bun.lockb" ]; then
|
||||||
- name: Install dependencies
|
echo "Found bun.lockb, creating symlink to bun.lock"
|
||||||
run: pnpm install
|
ln -s bun.lockb bun.lock
|
||||||
|
fi
|
||||||
|
bun install
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pnpm test
|
run: bun test --coverage
|
||||||
|
|
||||||
- name: Build Astro project
|
- name: Build Astro project
|
||||||
run: pnpm build
|
run: bunx --bun astro build
|
||||||
|
|
||||||
- name: Upload build artifacts
|
- name: Upload build artifacts
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
|
|||||||
5
.github/workflows/docker-build.yml
vendored
5
.github/workflows/docker-build.yml
vendored
@@ -18,11 +18,6 @@ jobs:
|
|||||||
contents: write
|
contents: write
|
||||||
packages: write
|
packages: write
|
||||||
|
|
||||||
services:
|
|
||||||
redis:
|
|
||||||
image: redis:7-alpine
|
|
||||||
ports: ['6379:6379']
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/docker-scan.yml
vendored
4
.github/workflows/docker-scan.yml
vendored
@@ -7,14 +7,14 @@ on:
|
|||||||
- 'Dockerfile'
|
- 'Dockerfile'
|
||||||
- '.dockerignore'
|
- '.dockerignore'
|
||||||
- 'package.json'
|
- 'package.json'
|
||||||
- 'pnpm-lock.yaml'
|
- 'bun.lock*'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ main ]
|
branches: [ main ]
|
||||||
paths:
|
paths:
|
||||||
- 'Dockerfile'
|
- 'Dockerfile'
|
||||||
- '.dockerignore'
|
- '.dockerignore'
|
||||||
- 'package.json'
|
- 'package.json'
|
||||||
- 'pnpm-lock.yaml'
|
- 'bun.lock*'
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 0 * * 0' # Run weekly on Sunday at midnight
|
- cron: '0 0 * * 0' # Run weekly on Sunday at midnight
|
||||||
|
|
||||||
|
|||||||
36
CHANGELOG.md
Normal file
36
CHANGELOG.md
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to the Gitea Mirror project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [2.5.2] - 2024-11-22
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Fixed version information in health API for Docker deployments by setting npm_package_version environment variable in entrypoint script
|
||||||
|
|
||||||
|
## [2.5.1] - 2024-10-01
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Fixed Docker entrypoint script to prevent unnecessary `bun install` on container startup
|
||||||
|
- Removed redundant dependency installation in Docker containers for pre-built images
|
||||||
|
- Fixed "PathAlreadyExists" errors during container initialization
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Improved database initialization in Docker entrypoint script
|
||||||
|
- Added additional checks for TypeScript versions of database management scripts
|
||||||
|
|
||||||
|
## [2.5.0] - 2024-09-15
|
||||||
|
|
||||||
|
Initial public release with core functionality:
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- GitHub to Gitea repository mirroring
|
||||||
|
- User authentication and management
|
||||||
|
- Dashboard with mirroring statistics
|
||||||
|
- Configuration management for mirroring settings
|
||||||
|
- Support for organization mirroring
|
||||||
|
- Automated mirroring with configurable schedules
|
||||||
|
- Docker multi-architecture support (amd64, arm64)
|
||||||
|
- LXC container deployment scripts
|
||||||
81
Dockerfile
81
Dockerfile
@@ -1,82 +1,47 @@
|
|||||||
# syntax=docker/dockerfile:1.4
|
# syntax=docker/dockerfile:1.4
|
||||||
|
|
||||||
FROM node:lts-alpine AS base
|
FROM oven/bun:1.2.9-alpine AS base
|
||||||
ENV PNPM_HOME=/usr/local/bin
|
WORKDIR /app
|
||||||
ENV PATH=$PNPM_HOME:$PATH
|
RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite
|
||||||
RUN apk add --no-cache libc6-compat
|
|
||||||
|
|
||||||
# -----------------------------------
|
# ----------------------------
|
||||||
FROM base AS deps
|
FROM base AS deps
|
||||||
WORKDIR /app
|
COPY package.json ./
|
||||||
RUN apk add --no-cache python3 make g++ gcc
|
COPY bun.lock* ./
|
||||||
|
RUN bun install --frozen-lockfile
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.npm \
|
# ----------------------------
|
||||||
corepack enable && corepack prepare pnpm@latest --activate
|
FROM deps AS builder
|
||||||
|
|
||||||
COPY package.json pnpm-lock.yaml* ./
|
|
||||||
|
|
||||||
# Full dev install
|
|
||||||
RUN --mount=type=cache,target=/root/.local/share/pnpm/store \
|
|
||||||
pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# -----------------------------------
|
|
||||||
FROM base AS builder
|
|
||||||
WORKDIR /app
|
|
||||||
RUN apk add --no-cache python3 make g++ gcc
|
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.npm \
|
|
||||||
corepack enable && corepack prepare pnpm@latest --activate
|
|
||||||
|
|
||||||
COPY --from=deps /app/node_modules ./node_modules
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
RUN bun run build
|
||||||
RUN pnpm build
|
|
||||||
# Compile TypeScript scripts to JavaScript
|
|
||||||
RUN mkdir -p dist/scripts && \
|
RUN mkdir -p dist/scripts && \
|
||||||
for script in scripts/*.ts; do \
|
for script in scripts/*.ts; do \
|
||||||
node_modules/.bin/tsc --outDir dist/scripts --module commonjs --target es2020 --esModuleInterop $script || true; \
|
bun build "$script" --target=bun --outfile=dist/scripts/$(basename "${script%.ts}.js"); \
|
||||||
done
|
done
|
||||||
|
|
||||||
# -----------------------------------
|
# ----------------------------
|
||||||
FROM deps AS pruner
|
FROM deps AS pruner
|
||||||
WORKDIR /app
|
RUN bun install --production --frozen-lockfile
|
||||||
|
|
||||||
# Prune dev dependencies and just keep the production bits
|
# ----------------------------
|
||||||
RUN --mount=type=cache,target=/root/.local/share/pnpm/store \
|
|
||||||
pnpm prune --prod
|
|
||||||
|
|
||||||
# -----------------------------------
|
|
||||||
FROM base AS runner
|
FROM base AS runner
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Only copy production node_modules and built output
|
|
||||||
COPY --from=pruner /app/node_modules ./node_modules
|
COPY --from=pruner /app/node_modules ./node_modules
|
||||||
COPY --from=builder /app/dist ./dist
|
COPY --from=builder /app/dist ./dist
|
||||||
COPY --from=builder /app/package.json ./package.json
|
COPY --from=builder /app/package.json ./package.json
|
||||||
COPY --from=builder /app/docker-entrypoint.sh ./docker-entrypoint.sh
|
COPY --from=builder /app/docker-entrypoint.sh ./docker-entrypoint.sh
|
||||||
COPY --from=builder /app/scripts ./scripts
|
COPY --from=builder /app/scripts ./scripts
|
||||||
COPY --from=builder /app/data ./data
|
|
||||||
|
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
ENV PORT=4321
|
ENV PORT=4321
|
||||||
ENV DATABASE_URL=file:data/gitea-mirror.db
|
ENV DATABASE_URL=file:data/gitea-mirror.db
|
||||||
|
|
||||||
# Make entrypoint executable
|
RUN chmod +x ./docker-entrypoint.sh && \
|
||||||
RUN chmod +x /app/docker-entrypoint.sh
|
mkdir -p /app/data && \
|
||||||
|
addgroup --system --gid 1001 nodejs && \
|
||||||
ENTRYPOINT ["/app/docker-entrypoint.sh"]
|
adduser --system --uid 1001 gitea-mirror && \
|
||||||
|
chown -R gitea-mirror:nodejs /app/data
|
||||||
RUN apk add --no-cache wget sqlite && \
|
|
||||||
mkdir -p /app/data && \
|
|
||||||
addgroup --system --gid 1001 nodejs && \
|
|
||||||
adduser --system --uid 1001 gitea-mirror && \
|
|
||||||
chown -R gitea-mirror:nodejs /app/data
|
|
||||||
|
|
||||||
COPY --from=builder --chown=gitea-mirror:nodejs /app/dist ./dist
|
|
||||||
COPY --from=pruner --chown=gitea-mirror:nodejs /app/node_modules ./node_modules
|
|
||||||
COPY --from=builder --chown=gitea-mirror:nodejs /app/package.json ./package.json
|
|
||||||
COPY --from=builder --chown=gitea-mirror:nodejs /app/scripts ./scripts
|
|
||||||
|
|
||||||
USER gitea-mirror
|
USER gitea-mirror
|
||||||
|
|
||||||
@@ -84,10 +49,6 @@ VOLUME /app/data
|
|||||||
EXPOSE 4321
|
EXPOSE 4321
|
||||||
|
|
||||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||||
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/ || exit 1
|
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/api/health || exit 1
|
||||||
|
|
||||||
# Create a startup script that initializes the database before starting the application
|
ENTRYPOINT ["./docker-entrypoint.sh"]
|
||||||
COPY --from=builder --chown=gitea-mirror:nodejs /app/docker-entrypoint.sh ./docker-entrypoint.sh
|
|
||||||
RUN chmod +x ./docker-entrypoint.sh
|
|
||||||
|
|
||||||
CMD ["./docker-entrypoint.sh"]
|
|
||||||
|
|||||||
259
README.md
259
README.md
@@ -1,11 +1,34 @@
|
|||||||
# Gitea Mirror
|
|
||||||
|
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<i>A modern web application for automatically mirroring repositories from GitHub to your self-hosted Gitea instance.</i><br>
|
<img src=".github/assets/logo.png" alt="Gitea Mirror Logo" width="120" />
|
||||||
<sub>Designed for developers, teams, and organizations who want to retain full control of their code while still collaborating on GitHub.</sub>
|
<h1>Gitea Mirror</h1>
|
||||||
|
<p><i>A modern web app for automatically mirroring repositories from GitHub to your self-hosted Gitea.</i></p>
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://github.com/arunavo4/gitea-mirror/releases/latest"><img src="https://img.shields.io/github/v/tag/arunavo4/gitea-mirror?label=release" alt="release"/></a>
|
||||||
|
<a href="https://github.com/arunavo4/gitea-mirror/actions/workflows/astro-build-test.yml"><img src="https://img.shields.io/github/actions/workflow/status/arunavo4/gitea-mirror/astro-build-test.yml?branch=main" alt="build"/></a>
|
||||||
|
<a href="https://github.com/arunavo4/gitea-mirror/pkgs/container/gitea-mirror"><img src="https://img.shields.io/badge/ghcr.io-container-blue?logo=github" alt="container"/></a>
|
||||||
|
<a href="https://github.com/arunavo4/gitea-mirror/blob/main/LICENSE"><img src="https://img.shields.io/github/license/arunavo4/gitea-mirror" alt="license"/></a>
|
||||||
|
</p>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
## 🚀 Quick Start
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Using Docker (recommended)
|
||||||
|
docker compose --profile production up -d
|
||||||
|
|
||||||
|
# Using Bun
|
||||||
|
bun run setup && bun run dev
|
||||||
|
|
||||||
|
# Using LXC Containers
|
||||||
|
# For Proxmox VE (online)
|
||||||
|
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh | bash
|
||||||
|
|
||||||
|
# For local testing (offline-friendly)
|
||||||
|
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./scripts/gitea-mirror-lxc-local.sh
|
||||||
|
````
|
||||||
|
|
||||||
|
See the [LXC Container Deployment Guide](scripts/README-lxc.md).
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src=".github/assets/dashboard.png" alt="Dashboard" width="80%"/>
|
<img src=".github/assets/dashboard.png" alt="Dashboard" width="80%"/>
|
||||||
</p>
|
</p>
|
||||||
@@ -50,7 +73,7 @@ See the [Quick Start Guide](docs/quickstart.md) for detailed instructions on get
|
|||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
- Node.js 22 or later
|
- Bun 1.2.9 or later
|
||||||
- A GitHub account with a personal access token
|
- A GitHub account with a personal access token
|
||||||
- A Gitea instance with an access token
|
- A Gitea instance with an access token
|
||||||
|
|
||||||
@@ -77,7 +100,7 @@ Before running the application in production mode for the first time, you need t
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Initialize the database for production mode
|
# Initialize the database for production mode
|
||||||
pnpm setup
|
bun run setup
|
||||||
```
|
```
|
||||||
|
|
||||||
This will create the necessary tables. On first launch, you'll be guided through creating your admin account with a secure password.
|
This will create the necessary tables. On first launch, you'll be guided through creating your admin account with a secure password.
|
||||||
@@ -95,13 +118,13 @@ Gitea Mirror provides multi-architecture Docker images that work on both ARM64 (
|
|||||||
docker compose --profile production up -d
|
docker compose --profile production up -d
|
||||||
|
|
||||||
# For development mode (requires configuration)
|
# For development mode (requires configuration)
|
||||||
# Ensure you have run pnpm setup first
|
# Ensure you have run bun run setup first
|
||||||
docker compose -f docker-compose.dev.yml up -d
|
docker compose -f docker-compose.dev.yml up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
> [!IMPORTANT]
|
> [!IMPORTANT]
|
||||||
> **Docker Compose is the recommended method for running Gitea Mirror** as it automatically sets up the required Redis sidecar service that the application depends on.
|
> **Docker Compose is the recommended method for running Gitea Mirror** as it provides a consistent environment with proper volume management for the SQLite database.
|
||||||
|
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
@@ -109,19 +132,15 @@ docker compose -f docker-compose.dev.yml up -d
|
|||||||
|
|
||||||
##### Using Pre-built Images from GitHub Container Registry
|
##### Using Pre-built Images from GitHub Container Registry
|
||||||
|
|
||||||
If you want to run the container directly without Docker Compose, you'll need to set up a Redis instance separately:
|
If you want to run the container directly without Docker Compose:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# First, start a Redis container
|
|
||||||
docker run -d --name gitea-mirror-redis redis:alpine
|
|
||||||
|
|
||||||
# Pull the latest multi-architecture image
|
# Pull the latest multi-architecture image
|
||||||
docker pull ghcr.io/arunavo4/gitea-mirror:latest
|
docker pull ghcr.io/arunavo4/gitea-mirror:latest
|
||||||
|
|
||||||
# Run the application with a link to the Redis container
|
# Run the application with a volume for persistent data
|
||||||
# Note: The REDIS_URL environment variable is required and must point to the Redis container
|
docker run -d -p 4321:4321 \
|
||||||
docker run -d -p 4321:4321 --link gitea-mirror-redis:redis \
|
-v gitea-mirror-data:/app/data \
|
||||||
-e REDIS_URL=redis://redis:6379 \
|
|
||||||
ghcr.io/arunavo4/gitea-mirror:latest
|
ghcr.io/arunavo4/gitea-mirror:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -148,6 +167,40 @@ docker compose --profile production up -d
|
|||||||
|
|
||||||
See [Docker build documentation](./scripts/README-docker.md) for more details.
|
See [Docker build documentation](./scripts/README-docker.md) for more details.
|
||||||
|
|
||||||
|
##### Using LXC Containers
|
||||||
|
|
||||||
|
Gitea Mirror offers two deployment options for LXC containers:
|
||||||
|
|
||||||
|
**1. Proxmox VE (online, recommended for production)**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# One-command installation on Proxmox VE
|
||||||
|
# Optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||||
|
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh | bash
|
||||||
|
```
|
||||||
|
|
||||||
|
**2. Local testing (offline-friendly, works on developer laptops)**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Download the script
|
||||||
|
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-local.sh -o gitea-mirror-lxc-local.sh
|
||||||
|
chmod +x gitea-mirror-lxc-local.sh
|
||||||
|
|
||||||
|
# Run with your local repo directory
|
||||||
|
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror ./gitea-mirror-lxc-local.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
Both scripts:
|
||||||
|
- Set up a privileged Ubuntu 22.04 LXC container
|
||||||
|
- Install Bun runtime environment
|
||||||
|
- Build the application
|
||||||
|
- Configure a systemd service
|
||||||
|
- Start the service automatically
|
||||||
|
|
||||||
|
The application includes a health check endpoint at `/api/health` for monitoring.
|
||||||
|
|
||||||
|
See the [LXC Container Deployment Guide](scripts/README-lxc.md) for detailed instructions.
|
||||||
|
|
||||||
##### Building Your Own Image
|
##### Building Your Own Image
|
||||||
|
|
||||||
For manual Docker builds (without the helper script):
|
For manual Docker builds (without the helper script):
|
||||||
@@ -180,7 +233,6 @@ The Docker container can be configured with the following environment variables:
|
|||||||
- `HOST`: Host to bind to (default: `0.0.0.0`)
|
- `HOST`: Host to bind to (default: `0.0.0.0`)
|
||||||
- `PORT`: Port to listen on (default: `4321`)
|
- `PORT`: Port to listen on (default: `4321`)
|
||||||
- `JWT_SECRET`: Secret key for JWT token generation (important for security)
|
- `JWT_SECRET`: Secret key for JWT token generation (important for security)
|
||||||
- `REDIS_URL`: URL for Redis connection (required, default: none). When using Docker Compose, this should be set to `redis://redis:6379` to connect to the Redis container.
|
|
||||||
|
|
||||||
|
|
||||||
#### Manual Installation
|
#### Manual Installation
|
||||||
@@ -191,40 +243,40 @@ git clone https://github.com/arunavo4/gitea-mirror.git
|
|||||||
cd gitea-mirror
|
cd gitea-mirror
|
||||||
|
|
||||||
# Quick setup (installs dependencies and initializes the database)
|
# Quick setup (installs dependencies and initializes the database)
|
||||||
pnpm setup
|
bun run setup
|
||||||
|
|
||||||
# Development Mode Options
|
# Development Mode Options
|
||||||
|
|
||||||
# Run in development mode
|
# Run in development mode
|
||||||
pnpm dev
|
bun run dev
|
||||||
|
|
||||||
# Run in development mode with clean database (removes existing DB first)
|
# Run in development mode with clean database (removes existing DB first)
|
||||||
pnpm dev:clean
|
bun run dev:clean
|
||||||
|
|
||||||
# Production Mode Options
|
# Production Mode Options
|
||||||
|
|
||||||
# Build the application
|
# Build the application
|
||||||
pnpm build
|
bun run build
|
||||||
|
|
||||||
# Preview the production build
|
# Preview the production build
|
||||||
pnpm preview
|
bun run preview
|
||||||
|
|
||||||
# Start the production server (default)
|
# Start the production server (default)
|
||||||
pnpm start
|
bun run start
|
||||||
|
|
||||||
# Start the production server with a clean setup
|
# Start the production server with a clean setup
|
||||||
pnpm start:fresh
|
bun run start:fresh
|
||||||
|
|
||||||
# Database Management
|
# Database Management
|
||||||
|
|
||||||
# Initialize the database
|
# Initialize the database
|
||||||
pnpm init-db
|
bun run init-db
|
||||||
|
|
||||||
# Reset users for testing first-time signup
|
# Reset users for testing first-time signup
|
||||||
pnpm reset-users
|
bun run reset-users
|
||||||
|
|
||||||
# Check database status
|
# Check database status
|
||||||
pnpm check-db
|
bun run check-db
|
||||||
```
|
```
|
||||||
|
|
||||||
### Configuration
|
### Configuration
|
||||||
@@ -239,7 +291,7 @@ Key configuration options include:
|
|||||||
- Scheduling options for automatic mirroring
|
- Scheduling options for automatic mirroring
|
||||||
|
|
||||||
> [!IMPORTANT]
|
> [!IMPORTANT]
|
||||||
> **Redis is a required component for Gitea Mirror** as it's used for job queuing and caching.
|
> **SQLite is the only database required for Gitea Mirror**, handling both data storage and real-time event notifications.
|
||||||
|
|
||||||
## 🚀 Development
|
## 🚀 Development
|
||||||
|
|
||||||
@@ -247,10 +299,10 @@ Key configuration options include:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
pnpm setup
|
bun run setup
|
||||||
|
|
||||||
# Start the development server
|
# Start the development server
|
||||||
pnpm dev
|
bun run dev
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
@@ -330,12 +382,12 @@ docker compose -f docker-compose.dev.yml up -d
|
|||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> You can also create a `.env` file with your GitHub and Gitea credentials:
|
> You can also create a `.env` file with your GitHub and Gitea credentials:
|
||||||
>
|
>
|
||||||
> ```env
|
> ```env
|
||||||
> # GitHub credentials
|
> # GitHub credentials
|
||||||
> GITHUB_TOKEN=your-github-token
|
> GITHUB_TOKEN=your-github-token
|
||||||
> GITHUB_USERNAME=your-github-username
|
> GITHUB_USERNAME=your-github-username
|
||||||
>
|
>
|
||||||
> # Gitea credentials (will be set up after you create a user in the local Gitea instance)
|
> # Gitea credentials (will be set up after you create a user in the local Gitea instance)
|
||||||
> GITEA_TOKEN=your-local-gitea-token
|
> GITEA_TOKEN=your-local-gitea-token
|
||||||
> GITEA_USERNAME=your-local-gitea-username
|
> GITEA_USERNAME=your-local-gitea-username
|
||||||
@@ -344,10 +396,10 @@ docker compose -f docker-compose.dev.yml up -d
|
|||||||
## Technologies Used
|
## Technologies Used
|
||||||
|
|
||||||
- **Frontend**: Astro, React, Shadcn UI, Tailwind CSS v4
|
- **Frontend**: Astro, React, Shadcn UI, Tailwind CSS v4
|
||||||
- **Backend**: Node.js
|
- **Backend**: Bun
|
||||||
- **Database**: SQLite (default) or PostgreSQL
|
- **Database**: SQLite (handles both data storage and event notifications)
|
||||||
- **Caching/Queue**: Redis
|
|
||||||
- **API Integration**: GitHub API (Octokit), Gitea API
|
- **API Integration**: GitHub API (Octokit), Gitea API
|
||||||
|
- **Deployment Options**: Docker containers, LXC containers (Proxmox VE and local testing)
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
@@ -357,27 +409,6 @@ Contributions are welcome! Please feel free to submit a Pull Request.
|
|||||||
|
|
||||||
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
||||||
|
|
||||||
## Project Status
|
|
||||||
|
|
||||||
This project is now complete and ready for production use with version 1.0.0. All planned features have been implemented, thoroughly tested, and optimized for performance:
|
|
||||||
|
|
||||||
- ✅ User-friendly dashboard with status overview
|
|
||||||
- ✅ Repository management interface
|
|
||||||
- ✅ Organization management interface
|
|
||||||
- ✅ Configuration management for GitHub and Gitea
|
|
||||||
- ✅ Scheduling and automation
|
|
||||||
- ✅ Activity logging and monitoring
|
|
||||||
- ✅ Responsive design for all screen sizes
|
|
||||||
- ✅ Modern toast notifications for better user feedback
|
|
||||||
- ✅ First-time user signup experience
|
|
||||||
- ✅ Better error handling and user guidance
|
|
||||||
- ✅ Comprehensive error handling
|
|
||||||
- ✅ Unit tests for components and API
|
|
||||||
- ✅ Direct GitHub to Gitea mirroring (no external dependencies)
|
|
||||||
- ✅ Docker and docker-compose support for easy deployment
|
|
||||||
- ✅ Multi-architecture support (ARM64 and x86_64)
|
|
||||||
- ✅ Light/dark mode toggle
|
|
||||||
- ✅ Persistent configuration storage
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
@@ -385,14 +416,14 @@ This project is now complete and ready for production use with version 1.0.0. Al
|
|||||||
|
|
||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> If you encounter network-related warnings or errors when running Docker Compose, such as:
|
> If you encounter network-related warnings or errors when running Docker Compose, such as:
|
||||||
>
|
>
|
||||||
> ```
|
> ```
|
||||||
> WARN[0095] a network with name gitea-network exists but was not created by compose.
|
> WARN[0095] a network with name gitea-network exists but was not created by compose.
|
||||||
> Set `external: true` to use an existing network
|
> Set `external: true` to use an existing network
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
> or
|
> or
|
||||||
>
|
>
|
||||||
> ```
|
> ```
|
||||||
> network gitea-network was found but has incorrect label com.docker.compose.network set to "" (expected: "gitea-network")
|
> network gitea-network was found but has incorrect label com.docker.compose.network set to "" (expected: "gitea-network")
|
||||||
> ```
|
> ```
|
||||||
@@ -416,7 +447,7 @@ Try the following steps:
|
|||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> If you need to share the network with other Docker Compose projects, you can modify the `docker-compose.dev.yml` file to mark the network as external:
|
> If you need to share the network with other Docker Compose projects, you can modify the `docker-compose.dev.yml` file to mark the network as external:
|
||||||
>
|
>
|
||||||
> ```yaml
|
> ```yaml
|
||||||
> networks:
|
> networks:
|
||||||
> gitea-network:
|
> gitea-network:
|
||||||
@@ -424,62 +455,60 @@ Try the following steps:
|
|||||||
> external: true
|
> external: true
|
||||||
> ```
|
> ```
|
||||||
|
|
||||||
### Redis Connection Issues
|
### Database Persistence
|
||||||
|
|
||||||
> [!CAUTION]
|
|
||||||
> If the application fails to connect to Redis with errors like `ECONNREFUSED 127.0.0.1:6379`, ensure:
|
|
||||||
>
|
|
||||||
> 1. The Redis container is running:
|
|
||||||
> ```bash
|
|
||||||
> docker ps | grep redis
|
|
||||||
> ```
|
|
||||||
> 2. The `REDIS_URL` environment variable is correctly set to `redis://redis:6379` in your Docker Compose file.
|
|
||||||
> 3. Both the application and Redis containers are on the same Docker network.
|
|
||||||
> 4. If running without Docker Compose, ensure you've started a Redis container and linked it properly:
|
|
||||||
> ```bash
|
|
||||||
> # Start Redis container
|
|
||||||
> docker run -d --name gitea-mirror-redis redis:alpine
|
|
||||||
> # Run application with link to Redis
|
|
||||||
> docker run -d -p 4321:4321 --link gitea-mirror-redis:redis \
|
|
||||||
> -e REDIS_URL=redis://redis:6379 \
|
|
||||||
> ghcr.io/arunavo4/gitea-mirror:latest
|
|
||||||
> ```
|
|
||||||
|
|
||||||
|
|
||||||
#### Improving Redis Connection Resilience
|
|
||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> For better Redis connection handling, you can modify the `src/lib/redis.ts` file to include retry logic and better error handling:
|
> The application uses SQLite for all data storage and event notifications. Make sure the database file is properly mounted when using Docker:
|
||||||
|
>
|
||||||
|
> ```bash
|
||||||
|
> # Run with a volume for persistent data storage
|
||||||
|
> docker run -d -p 4321:4321 \
|
||||||
|
> -v gitea-mirror-data:/app/data \
|
||||||
|
> ghcr.io/arunavo4/gitea-mirror:latest
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> For homelab/self-hosted setups, you can use the provided Docker Compose file with automatic event cleanup:
|
||||||
|
>
|
||||||
|
> ```bash
|
||||||
|
> # Clone the repository
|
||||||
|
> git clone https://github.com/arunavo4/gitea-mirror.git
|
||||||
|
> cd gitea-mirror
|
||||||
|
>
|
||||||
|
> # Start the application with Docker Compose
|
||||||
|
> docker-compose -f docker-compose.homelab.yml up -d
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> This setup includes a cron job that runs daily to clean up old events and prevent the database from growing too large.
|
||||||
|
|
||||||
```typescript
|
|
||||||
import Redis from "ioredis";
|
|
||||||
|
|
||||||
// Connect to Redis using REDIS_URL environment variable or default to redis://redis:6379
|
#### Database Maintenance
|
||||||
const redisUrl = process.env.REDIS_URL ?? 'redis://redis:6379';
|
|
||||||
|
|
||||||
console.log(`Connecting to Redis at: ${redisUrl}`);
|
> [!TIP]
|
||||||
|
> For database maintenance, you can use the provided scripts:
|
||||||
// Configure Redis client with connection options
|
>
|
||||||
const redisOptions = {
|
> ```bash
|
||||||
retryStrategy: (times) => {
|
> # Check database integrity
|
||||||
// Retry with exponential backoff up to 30 seconds
|
> bun run check-db
|
||||||
const delay = Math.min(times * 100, 3000);
|
>
|
||||||
console.log(`Redis connection attempt ${times} failed. Retrying in ${delay}ms...`);
|
> # Fix database issues
|
||||||
return delay;
|
> bun run fix-db
|
||||||
},
|
>
|
||||||
maxRetriesPerRequest: 5,
|
> # Reset user accounts (for development)
|
||||||
enableReadyCheck: true,
|
> bun run reset-users
|
||||||
connectTimeout: 10000,
|
>
|
||||||
};
|
> # Clean up old events (keeps last 7 days by default)
|
||||||
|
> bun run cleanup-events
|
||||||
export const redis = new Redis(redisUrl, redisOptions);
|
>
|
||||||
export const redisPublisher = new Redis(redisUrl, redisOptions);
|
> # Clean up old events with custom retention period (e.g., 30 days)
|
||||||
export const redisSubscriber = new Redis(redisUrl, redisOptions);
|
> bun run cleanup-events 30
|
||||||
|
> ```
|
||||||
// Log connection events
|
>
|
||||||
redis.on('connect', () => console.log('Redis client connected'));
|
> For automated maintenance, consider setting up a cron job to run the cleanup script periodically:
|
||||||
redis.on('error', (err) => console.error('Redis client error:', err));
|
>
|
||||||
```
|
> ```bash
|
||||||
|
> # Add this to your crontab (runs daily at 2 AM)
|
||||||
|
> 0 2 * * * cd /path/to/gitea-mirror && bun run cleanup-events
|
||||||
|
> ```
|
||||||
|
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
@@ -494,13 +523,13 @@ redis.on('error', (err) => console.error('Redis client error:', err));
|
|||||||
|
|
||||||
> [!TIP]
|
> [!TIP]
|
||||||
> If containers are not starting properly, check their health status:
|
> If containers are not starting properly, check their health status:
|
||||||
>
|
>
|
||||||
> ```bash
|
> ```bash
|
||||||
> docker ps --format "{{.Names}}: {{.Status}}"
|
> docker ps --format "{{.Names}}: {{.Status}}"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
> For more detailed logs:
|
> For more detailed logs:
|
||||||
>
|
>
|
||||||
> ```bash
|
> ```bash
|
||||||
> docker logs gitea-mirror-dev
|
> docker logs gitea-mirror-dev
|
||||||
> ```
|
> ```
|
||||||
|
|||||||
@@ -11,7 +11,12 @@ export default defineConfig({
|
|||||||
mode: 'standalone',
|
mode: 'standalone',
|
||||||
}),
|
}),
|
||||||
vite: {
|
vite: {
|
||||||
plugins: [tailwindcss()]
|
plugins: [tailwindcss()],
|
||||||
|
build: {
|
||||||
|
rollupOptions: {
|
||||||
|
external: ['bun']
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
integrations: [react()]
|
integrations: [react()]
|
||||||
});
|
});
|
||||||
4
crontab
Normal file
4
crontab
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Run event cleanup daily at 2 AM
|
||||||
|
0 2 * * * cd /app && bun run cleanup-events 30 >> /app/data/cleanup-events.log 2>&1
|
||||||
|
|
||||||
|
# Empty line at the end is required for cron to work properly
|
||||||
@@ -51,7 +51,6 @@ services:
|
|||||||
- gitea-mirror-data:/app/data
|
- gitea-mirror-data:/app/data
|
||||||
depends_on:
|
depends_on:
|
||||||
- gitea
|
- gitea
|
||||||
- redis
|
|
||||||
environment:
|
environment:
|
||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
- DATABASE_URL=file:data/gitea-mirror.db
|
- DATABASE_URL=file:data/gitea-mirror.db
|
||||||
@@ -75,7 +74,6 @@ services:
|
|||||||
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
|
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
|
||||||
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
||||||
- DELAY=${DELAY:-3600}
|
- DELAY=${DELAY:-3600}
|
||||||
- REDIS_URL=redis://redis:6379
|
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/"]
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4321/"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
@@ -85,16 +83,7 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- gitea-network
|
- gitea-network
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:7-alpine
|
|
||||||
container_name: redis
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- "6379:6379"
|
|
||||||
volumes:
|
|
||||||
- redis-data:/data
|
|
||||||
networks:
|
|
||||||
- gitea-network
|
|
||||||
|
|
||||||
# Define named volumes for data persistence
|
# Define named volumes for data persistence
|
||||||
volumes:
|
volumes:
|
||||||
@@ -102,8 +91,6 @@ volumes:
|
|||||||
gitea-config: # Gitea config volume
|
gitea-config: # Gitea config volume
|
||||||
gitea-mirror-data: # Gitea Mirror database volume
|
gitea-mirror-data: # Gitea Mirror database volume
|
||||||
|
|
||||||
redis-data:
|
|
||||||
|
|
||||||
# Define networks
|
# Define networks
|
||||||
networks:
|
networks:
|
||||||
gitea-network:
|
gitea-network:
|
||||||
|
|||||||
38
docker-compose.homelab.yml
Normal file
38
docker-compose.homelab.yml
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
services:
|
||||||
|
gitea-mirror:
|
||||||
|
image: ghcr.io/arunavo4/gitea-mirror:latest
|
||||||
|
container_name: gitea-mirror
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "4321:4321"
|
||||||
|
volumes:
|
||||||
|
- gitea-mirror-data:/app/data
|
||||||
|
# Mount the crontab file
|
||||||
|
- ./crontab:/etc/cron.d/gitea-mirror-cron
|
||||||
|
environment:
|
||||||
|
- NODE_ENV=production
|
||||||
|
- HOST=0.0.0.0
|
||||||
|
- PORT=4321
|
||||||
|
- DATABASE_URL=sqlite://data/gitea-mirror.db
|
||||||
|
- DELAY=${DELAY:-3600}
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:4321/api/health"]
|
||||||
|
interval: 1m
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
start_period: 30s
|
||||||
|
# Install cron in the container and set up the cron job
|
||||||
|
command: >
|
||||||
|
sh -c "
|
||||||
|
apt-get update && apt-get install -y cron curl &&
|
||||||
|
chmod 0644 /etc/cron.d/gitea-mirror-cron &&
|
||||||
|
crontab /etc/cron.d/gitea-mirror-cron &&
|
||||||
|
service cron start &&
|
||||||
|
bun dist/server/entry.mjs
|
||||||
|
"
|
||||||
|
|
||||||
|
# Define named volumes for database persistence
|
||||||
|
volumes:
|
||||||
|
gitea-mirror-data: # Database volume
|
||||||
@@ -19,8 +19,6 @@ services:
|
|||||||
- "4321:4321"
|
- "4321:4321"
|
||||||
volumes:
|
volumes:
|
||||||
- gitea-mirror-data:/app/data
|
- gitea-mirror-data:/app/data
|
||||||
depends_on:
|
|
||||||
- redis
|
|
||||||
environment:
|
environment:
|
||||||
- NODE_ENV=production
|
- NODE_ENV=production
|
||||||
- DATABASE_URL=file:data/gitea-mirror.db
|
- DATABASE_URL=file:data/gitea-mirror.db
|
||||||
@@ -44,7 +42,6 @@ services:
|
|||||||
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
|
- GITEA_ORGANIZATION=${GITEA_ORGANIZATION:-github-mirrors}
|
||||||
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
- GITEA_ORG_VISIBILITY=${GITEA_ORG_VISIBILITY:-public}
|
||||||
- DELAY=${DELAY:-3600}
|
- DELAY=${DELAY:-3600}
|
||||||
- REDIS_URL=redis://redis:6379
|
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/"]
|
test: ["CMD", "wget", "--no-verbose", "--tries=3", "--spider", "http://localhost:4321/"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
@@ -53,16 +50,6 @@ services:
|
|||||||
start_period: 15s
|
start_period: 15s
|
||||||
profiles: ["production"]
|
profiles: ["production"]
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:7-alpine
|
|
||||||
container_name: redis
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- "6379:6379"
|
|
||||||
volumes:
|
|
||||||
- redis-data:/data
|
|
||||||
|
|
||||||
# Define named volumes for database persistence
|
# Define named volumes for database persistence
|
||||||
volumes:
|
volumes:
|
||||||
gitea-mirror-data: # Database volume
|
gitea-mirror-data: # Database volume
|
||||||
redis-data:
|
|
||||||
|
|||||||
@@ -5,19 +5,18 @@ set -e
|
|||||||
# Ensure data directory exists
|
# Ensure data directory exists
|
||||||
mkdir -p /app/data
|
mkdir -p /app/data
|
||||||
|
|
||||||
# If pnpm is available, run setup (for dev images), else run node init directly
|
# Skip dependency installation entirely for pre-built images
|
||||||
if command -v pnpm >/dev/null 2>&1; then
|
# Dependencies are already installed during the Docker build process
|
||||||
echo "Running pnpm setup (if needed)..."
|
|
||||||
pnpm setup || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Initialize the database if it doesn't exist
|
# Initialize the database if it doesn't exist
|
||||||
if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
||||||
echo "Initializing database..."
|
echo "Initializing database..."
|
||||||
if [ -f "dist/scripts/init-db.js" ]; then
|
if [ -f "dist/scripts/init-db.js" ]; then
|
||||||
node dist/scripts/init-db.js
|
bun dist/scripts/init-db.js
|
||||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||||
node dist/scripts/manage-db.js init
|
bun dist/scripts/manage-db.js init
|
||||||
|
elif [ -f "scripts/manage-db.ts" ]; then
|
||||||
|
bun scripts/manage-db.ts init
|
||||||
else
|
else
|
||||||
echo "Warning: Could not find database initialization scripts in dist/scripts."
|
echo "Warning: Could not find database initialization scripts in dist/scripts."
|
||||||
echo "Creating and initializing database manually..."
|
echo "Creating and initializing database manually..."
|
||||||
@@ -111,23 +110,75 @@ if [ ! -f "/app/data/gitea-mirror.db" ]; then
|
|||||||
status TEXT NOT NULL DEFAULT 'imported',
|
status TEXT NOT NULL DEFAULT 'imported',
|
||||||
message TEXT NOT NULL,
|
message TEXT NOT NULL,
|
||||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
-- New fields for job resilience
|
||||||
|
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||||
|
batch_id TEXT,
|
||||||
|
total_items INTEGER,
|
||||||
|
completed_items INTEGER DEFAULT 0,
|
||||||
|
item_ids TEXT, -- JSON array as text
|
||||||
|
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||||
|
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP,
|
||||||
|
last_checkpoint TIMESTAMP,
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS events (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
channel TEXT NOT NULL,
|
||||||
|
payload TEXT NOT NULL,
|
||||||
|
read INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_events_user_channel ON events(user_id, channel);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_events_created_at ON events(created_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_events_read ON events(read);
|
||||||
EOF
|
EOF
|
||||||
echo "Database initialized with required tables."
|
echo "Database initialized with required tables."
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "Database already exists, checking for issues..."
|
echo "Database already exists, checking for issues..."
|
||||||
if [ -f "dist/scripts/fix-db-issues.js" ]; then
|
if [ -f "dist/scripts/fix-db-issues.js" ]; then
|
||||||
node dist/scripts/fix-db-issues.js
|
bun dist/scripts/fix-db-issues.js
|
||||||
elif [ -f "dist/scripts/manage-db.js" ]; then
|
elif [ -f "dist/scripts/manage-db.js" ]; then
|
||||||
node dist/scripts/manage-db.js fix
|
bun dist/scripts/manage-db.js fix
|
||||||
|
elif [ -f "scripts/manage-db.ts" ]; then
|
||||||
|
bun scripts/manage-db.ts fix
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Since the application is not used by anyone yet, we've removed the schema updates and migrations
|
# Run database migrations
|
||||||
echo "Database already exists, no migrations needed."
|
echo "Running database migrations..."
|
||||||
|
|
||||||
|
# Update mirror_jobs table with new columns for resilience
|
||||||
|
if [ -f "dist/scripts/update-mirror-jobs-table.js" ]; then
|
||||||
|
echo "Updating mirror_jobs table..."
|
||||||
|
bun dist/scripts/update-mirror-jobs-table.js
|
||||||
|
elif [ -f "scripts/update-mirror-jobs-table.ts" ]; then
|
||||||
|
echo "Updating mirror_jobs table using TypeScript script..."
|
||||||
|
bun scripts/update-mirror-jobs-table.ts
|
||||||
|
else
|
||||||
|
echo "Warning: Could not find mirror_jobs table update script."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract version from package.json and set as environment variable
|
||||||
|
if [ -f "package.json" ]; then
|
||||||
|
export npm_package_version=$(grep -o '"version": *"[^"]*"' package.json | cut -d'"' -f4)
|
||||||
|
echo "Setting application version: $npm_package_version"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Start the application
|
# Start the application
|
||||||
echo "Starting Gitea Mirror..."
|
echo "Starting Gitea Mirror..."
|
||||||
exec node ./dist/server/entry.mjs
|
exec bun ./dist/server/entry.mjs
|
||||||
|
|||||||
127
docs/testing.md
Normal file
127
docs/testing.md
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
# Testing in Gitea Mirror
|
||||||
|
|
||||||
|
This document provides guidance on testing in the Gitea Mirror project.
|
||||||
|
|
||||||
|
## Current Status
|
||||||
|
|
||||||
|
The project now uses Bun's built-in test runner, which is Jest-compatible and provides a fast, reliable testing experience. We've migrated away from Vitest due to compatibility issues with Bun.
|
||||||
|
|
||||||
|
## Running Tests
|
||||||
|
|
||||||
|
To run tests, use the following commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
bun test
|
||||||
|
|
||||||
|
# Run tests in watch mode (automatically re-run when files change)
|
||||||
|
bun test --watch
|
||||||
|
|
||||||
|
# Run tests with coverage reporting
|
||||||
|
bun test --coverage
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test File Naming Conventions
|
||||||
|
|
||||||
|
Bun's test runner automatically discovers test files that match the following patterns:
|
||||||
|
|
||||||
|
- `*.test.{js|jsx|ts|tsx}`
|
||||||
|
- `*_test.{js|jsx|ts|tsx}`
|
||||||
|
- `*.spec.{js|jsx|ts|tsx}`
|
||||||
|
- `*_spec.{js|jsx|ts|tsx}`
|
||||||
|
|
||||||
|
## Writing Tests
|
||||||
|
|
||||||
|
The project uses Bun's test runner with a Jest-compatible API. Here's an example test:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// example.test.ts
|
||||||
|
import { describe, test, expect } from "bun:test";
|
||||||
|
|
||||||
|
describe("Example Test", () => {
|
||||||
|
test("should pass", () => {
|
||||||
|
expect(true).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing React Components
|
||||||
|
|
||||||
|
For testing React components, we use React Testing Library:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// component.test.tsx
|
||||||
|
import { describe, test, expect } from "bun:test";
|
||||||
|
import { render, screen } from "@testing-library/react";
|
||||||
|
import MyComponent from "../components/MyComponent";
|
||||||
|
|
||||||
|
describe("MyComponent", () => {
|
||||||
|
test("renders correctly", () => {
|
||||||
|
render(<MyComponent />);
|
||||||
|
expect(screen.getByText("Hello World")).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test Setup
|
||||||
|
|
||||||
|
The test setup is defined in `src/tests/setup.bun.ts` and includes:
|
||||||
|
|
||||||
|
- Automatic cleanup after each test
|
||||||
|
- Setup for any global test environment needs
|
||||||
|
|
||||||
|
## Mocking
|
||||||
|
|
||||||
|
Bun's test runner provides built-in mocking capabilities:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { test, expect, mock } from "bun:test";
|
||||||
|
|
||||||
|
// Create a mock function
|
||||||
|
const mockFn = mock(() => "mocked value");
|
||||||
|
|
||||||
|
test("mock function", () => {
|
||||||
|
const result = mockFn();
|
||||||
|
expect(result).toBe("mocked value");
|
||||||
|
expect(mockFn).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock a module
|
||||||
|
mock.module("./some-module", () => {
|
||||||
|
return {
|
||||||
|
someFunction: () => "mocked module function"
|
||||||
|
};
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
## CI Integration
|
||||||
|
|
||||||
|
The CI workflow has been updated to use Bun's test runner. Tests are automatically run as part of the CI pipeline.
|
||||||
|
|
||||||
|
## Test Coverage
|
||||||
|
|
||||||
|
To generate test coverage reports, run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun test --coverage
|
||||||
|
```
|
||||||
|
|
||||||
|
This will generate a coverage report in the `coverage` directory.
|
||||||
|
|
||||||
|
## Types of Tests
|
||||||
|
|
||||||
|
The project includes several types of tests:
|
||||||
|
|
||||||
|
1. **Unit Tests**: Testing individual functions and utilities
|
||||||
|
2. **API Tests**: Testing API endpoints
|
||||||
|
3. **Component Tests**: Testing React components
|
||||||
|
4. **Integration Tests**: Testing how components work together
|
||||||
|
|
||||||
|
## Future Improvements
|
||||||
|
|
||||||
|
When expanding the test suite, consider:
|
||||||
|
|
||||||
|
1. Adding more comprehensive API endpoint tests
|
||||||
|
2. Increasing component test coverage
|
||||||
|
3. Setting up end-to-end tests with a tool like Playwright
|
||||||
|
4. Adding performance tests for critical paths
|
||||||
87
package.json
87
package.json
@@ -1,87 +1,86 @@
|
|||||||
{
|
{
|
||||||
"name": "gitea-mirror",
|
"name": "gitea-mirror",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"version": "1.0.0",
|
"version": "2.5.2",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=22.0.0"
|
"bun": ">=1.2.9"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"setup": "pnpm install && pnpm manage-db init",
|
"setup": "bun install && bun run manage-db init && bun run update-db",
|
||||||
"dev": "astro dev",
|
"dev": "bunx --bun astro dev",
|
||||||
"dev:clean": "pnpm cleanup-db && pnpm manage-db init && astro dev",
|
"dev:clean": "bun run cleanup-db && bun run manage-db init && bun run update-db && bunx --bun astro dev",
|
||||||
"build": "astro build",
|
"build": "bunx --bun astro build",
|
||||||
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
"cleanup-db": "rm -f gitea-mirror.db data/gitea-mirror.db",
|
||||||
"manage-db": "tsx scripts/manage-db.ts",
|
"manage-db": "bun scripts/manage-db.ts",
|
||||||
"init-db": "tsx scripts/manage-db.ts init",
|
"init-db": "bun scripts/manage-db.ts init",
|
||||||
"check-db": "tsx scripts/manage-db.ts check",
|
"update-db": "bun scripts/update-mirror-jobs-table.ts",
|
||||||
"fix-db": "tsx scripts/manage-db.ts fix",
|
"check-db": "bun scripts/manage-db.ts check",
|
||||||
"reset-users": "tsx scripts/manage-db.ts reset-users",
|
"fix-db": "bun scripts/manage-db.ts fix",
|
||||||
"preview": "astro preview",
|
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||||
"start": "node dist/server/entry.mjs",
|
"cleanup-events": "bun scripts/cleanup-events.ts",
|
||||||
"start:fresh": "pnpm cleanup-db && pnpm manage-db init && node dist/server/entry.mjs",
|
"preview": "bunx --bun astro preview",
|
||||||
"test": "vitest run",
|
"start": "bun dist/server/entry.mjs",
|
||||||
"test:watch": "vitest",
|
"start:fresh": "bun run cleanup-db && bun run manage-db init && bun run update-db && bun dist/server/entry.mjs",
|
||||||
"astro": "astro"
|
"test": "bun test",
|
||||||
|
"test:watch": "bun test --watch",
|
||||||
|
"test:coverage": "bun test --coverage",
|
||||||
|
"astro": "bunx --bun astro"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@astrojs/mdx": "^4.2.6",
|
"@astrojs/mdx": "^4.2.6",
|
||||||
"@astrojs/node": "^9.2.1",
|
"@astrojs/node": "^9.2.1",
|
||||||
"@astrojs/react": "^4.2.7",
|
"@astrojs/react": "^4.2.7",
|
||||||
"@libsql/client": "^0.15.4",
|
|
||||||
"@octokit/rest": "^21.1.1",
|
"@octokit/rest": "^21.1.1",
|
||||||
"@radix-ui/react-avatar": "^1.1.4",
|
"@radix-ui/react-avatar": "^1.1.9",
|
||||||
"@radix-ui/react-checkbox": "^1.1.5",
|
"@radix-ui/react-checkbox": "^1.3.1",
|
||||||
"@radix-ui/react-dialog": "^1.1.7",
|
"@radix-ui/react-dialog": "^1.1.13",
|
||||||
"@radix-ui/react-dropdown-menu": "^2.1.7",
|
"@radix-ui/react-dropdown-menu": "^2.1.14",
|
||||||
"@radix-ui/react-label": "^2.1.6",
|
"@radix-ui/react-label": "^2.1.6",
|
||||||
"@radix-ui/react-popover": "^1.1.13",
|
"@radix-ui/react-popover": "^1.1.13",
|
||||||
"@radix-ui/react-radio-group": "^1.3.6",
|
"@radix-ui/react-radio-group": "^1.3.6",
|
||||||
"@radix-ui/react-select": "^2.1.7",
|
"@radix-ui/react-select": "^2.2.4",
|
||||||
"@radix-ui/react-slot": "^1.2.0",
|
"@radix-ui/react-slot": "^1.2.2",
|
||||||
"@radix-ui/react-tabs": "^1.1.4",
|
"@radix-ui/react-tabs": "^1.1.11",
|
||||||
"@radix-ui/react-tooltip": "^1.2.6",
|
"@radix-ui/react-tooltip": "^1.2.6",
|
||||||
"@tailwindcss/vite": "^4.1.3",
|
"@tailwindcss/vite": "^4.1.7",
|
||||||
"@tanstack/react-virtual": "^3.13.8",
|
"@tanstack/react-virtual": "^3.13.8",
|
||||||
"@types/canvas-confetti": "^1.9.0",
|
"@types/canvas-confetti": "^1.9.0",
|
||||||
"@types/react": "^19.1.2",
|
"@types/react": "^19.1.4",
|
||||||
"@types/react-dom": "^19.1.2",
|
"@types/react-dom": "^19.1.5",
|
||||||
"astro": "^5.7.10",
|
"astro": "^5.7.13",
|
||||||
"axios": "^1.8.4",
|
"axios": "^1.9.0",
|
||||||
"bcryptjs": "^3.0.2",
|
"bcryptjs": "^3.0.2",
|
||||||
"canvas-confetti": "^1.9.3",
|
"canvas-confetti": "^1.9.3",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"cmdk": "^1.1.1",
|
"cmdk": "^1.1.1",
|
||||||
"drizzle-orm": "^0.41.0",
|
"drizzle-orm": "^0.43.1",
|
||||||
"fuse.js": "^7.1.0",
|
"fuse.js": "^7.1.0",
|
||||||
"ioredis": "^5.6.1",
|
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
"lucide-react": "^0.488.0",
|
"lucide-react": "^0.511.0",
|
||||||
"next-themes": "^0.4.6",
|
"next-themes": "^0.4.6",
|
||||||
"react": "^19.1.0",
|
"react": "^19.1.0",
|
||||||
"react-dom": "^19.1.0",
|
"react-dom": "^19.1.0",
|
||||||
"react-icons": "^5.5.0",
|
"react-icons": "^5.5.0",
|
||||||
"sonner": "^2.0.3",
|
"sonner": "^2.0.3",
|
||||||
"superagent": "^10.2.0",
|
"superagent": "^10.2.1",
|
||||||
"tailwind-merge": "^3.2.0",
|
"tailwind-merge": "^3.3.0",
|
||||||
"tailwindcss": "^4.1.3",
|
"tailwindcss": "^4.1.7",
|
||||||
"tw-animate-css": "^1.2.5",
|
"tw-animate-css": "^1.3.0",
|
||||||
"uuid": "^11.1.0",
|
"uuid": "^11.1.0",
|
||||||
"zod": "^3.24.2"
|
"zod": "^3.25.7"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@testing-library/jest-dom": "^6.6.3",
|
"@testing-library/jest-dom": "^6.6.3",
|
||||||
"@testing-library/react": "^16.3.0",
|
"@testing-library/react": "^16.3.0",
|
||||||
"@types/bcryptjs": "^3.0.0",
|
"@types/bcryptjs": "^3.0.0",
|
||||||
"@types/better-sqlite3": "^7.6.13",
|
|
||||||
"@types/jsonwebtoken": "^9.0.9",
|
"@types/jsonwebtoken": "^9.0.9",
|
||||||
"@types/superagent": "^8.1.9",
|
"@types/superagent": "^8.1.9",
|
||||||
"@types/uuid": "^10.0.0",
|
"@types/uuid": "^10.0.0",
|
||||||
"@vitejs/plugin-react": "^4.4.0",
|
"@vitejs/plugin-react": "^4.4.1",
|
||||||
"better-sqlite3": "^9.6.0",
|
|
||||||
"jsdom": "^26.1.0",
|
"jsdom": "^26.1.0",
|
||||||
"tsx": "^4.19.3",
|
"tsx": "^4.19.4",
|
||||||
"vitest": "^3.1.1"
|
"vitest": "^3.1.4"
|
||||||
},
|
},
|
||||||
"packageManager": "pnpm@10.10.0"
|
"packageManager": "bun@1.2.9"
|
||||||
}
|
}
|
||||||
|
|||||||
7713
pnpm-lock.yaml
generated
7713
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -43,7 +43,7 @@ The script uses environment variables from the `.env` file in the project root:
|
|||||||
3. Using with docker-compose:
|
3. Using with docker-compose:
|
||||||
```bash
|
```bash
|
||||||
# Ensure dependencies are installed and database is initialized
|
# Ensure dependencies are installed and database is initialized
|
||||||
pnpm setup
|
bun run setup
|
||||||
|
|
||||||
# First build the image
|
# First build the image
|
||||||
./scripts/build-docker.sh --load
|
./scripts/build-docker.sh --load
|
||||||
|
|||||||
131
scripts/README-lxc.md
Normal file
131
scripts/README-lxc.md
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
# LXC Container Deployment Guide
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
Run **Gitea Mirror** in an isolated LXC container, either:
|
||||||
|
|
||||||
|
1. **Online, on a Proxmox VE host** – script pulls everything from GitHub
|
||||||
|
2. **Offline / LAN-only, on a developer laptop** – script pushes your local checkout + Bun ZIP
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Proxmox VE (online, recommended for prod)
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
* Proxmox VE node with the default `vmbr0` bridge
|
||||||
|
* Root shell on the node
|
||||||
|
* Ubuntu 22.04 LXC template present (`pveam update && pveam download ...`)
|
||||||
|
|
||||||
|
### One-command install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||||
|
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh)"
|
||||||
|
```
|
||||||
|
|
||||||
|
What it does:
|
||||||
|
|
||||||
|
* Creates **privileged** CT `$CTID` with nesting enabled
|
||||||
|
* Installs curl / git / Bun (official installer)
|
||||||
|
* Clones & builds `arunavo4/gitea-mirror`
|
||||||
|
* Writes a root-run systemd service and starts it
|
||||||
|
* Prints the container IP + random `JWT_SECRET`
|
||||||
|
|
||||||
|
Browse to:
|
||||||
|
|
||||||
|
```
|
||||||
|
http://<container-ip>:4321
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Local testing (LXD on a workstation, works offline)
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
* `lxd` installed (`sudo apt install lxd`; `lxd init --auto`)
|
||||||
|
* Your repo cloned locally – e.g. `~/Development/gitea-mirror`
|
||||||
|
* Bun ZIP downloaded once:
|
||||||
|
`https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip`
|
||||||
|
|
||||||
|
### Offline installer script
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/arunavo4/gitea-mirror.git # if not already
|
||||||
|
curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-local.sh -o gitea-mirror-lxc-local.sh
|
||||||
|
chmod +x gitea-mirror-lxc-local.sh
|
||||||
|
|
||||||
|
sudo LOCAL_REPO_DIR=~/Development/gitea-mirror \
|
||||||
|
./gitea-mirror-lxc-local.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
What it does:
|
||||||
|
|
||||||
|
* Launches privileged LXC `gitea-test` (`lxc launch ubuntu:22.04 ...`)
|
||||||
|
* Pushes **Bun ZIP** + tarred **local repo** into `/opt`
|
||||||
|
* Unpacks, builds, initializes DB
|
||||||
|
* Symlinks both `bun` and `bunx` → `/usr/local/bin`
|
||||||
|
* Creates a root systemd unit and starts it
|
||||||
|
|
||||||
|
Access from host:
|
||||||
|
|
||||||
|
```
|
||||||
|
http://$(lxc exec gitea-test -- hostname -I | awk '{print $1}'):4321
|
||||||
|
```
|
||||||
|
|
||||||
|
(Optional) forward to host localhost:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo lxc config device add gitea-test mirror proxy \
|
||||||
|
listen=tcp:0.0.0.0:4321 connect=tcp:127.0.0.1:4321
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Health-check endpoint
|
||||||
|
|
||||||
|
Gitea Mirror includes a built-in health check endpoint at `/api/health` that provides:
|
||||||
|
|
||||||
|
- System status and uptime
|
||||||
|
- Database connectivity check
|
||||||
|
- Memory usage statistics
|
||||||
|
- Environment information
|
||||||
|
|
||||||
|
You can use this endpoint for monitoring your deployment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Basic check (returns 200 OK if healthy)
|
||||||
|
curl -I http://<container-ip>:4321/api/health
|
||||||
|
|
||||||
|
# Detailed health information (JSON)
|
||||||
|
curl http://<container-ip>:4321/api/health
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
| Check | Command |
|
||||||
|
| -------------- | ----------------------------------------------------- |
|
||||||
|
| Service status | `systemctl status gitea-mirror` |
|
||||||
|
| Live logs | `journalctl -u gitea-mirror -f` |
|
||||||
|
| Verify Bun | `bun --version && bunx --version` |
|
||||||
|
| DB perms | `chown -R root:root /opt/gitea-mirror/data` (Proxmox) |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Connecting LXC and Docker Containers
|
||||||
|
|
||||||
|
If you need your LXC container to communicate with Docker containers:
|
||||||
|
|
||||||
|
1. On your host machine, create a bridge network:
|
||||||
|
```bash
|
||||||
|
docker network create gitea-network
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Find the bridge interface created by Docker:
|
||||||
|
```bash
|
||||||
|
ip a | grep docker
|
||||||
|
# Look for something like docker0 or br-xxxxxxxx
|
||||||
|
```
|
||||||
|
|
||||||
|
3. In Proxmox, edit the LXC container's network configuration to use this bridge.
|
||||||
@@ -1,12 +1,14 @@
|
|||||||
# Scripts Directory
|
# Scripts Directory
|
||||||
|
|
||||||
This folder contains utility scripts for database management.
|
This folder contains utility scripts for database management, event management, Docker builds, and LXC container deployment.
|
||||||
|
|
||||||
## Database Management Tool (manage-db.ts)
|
## Database Management
|
||||||
|
|
||||||
|
### Database Management Tool (manage-db.ts)
|
||||||
|
|
||||||
This is a consolidated database management tool that handles all database-related operations. It combines the functionality of the previous separate scripts into a single, more intelligent script that can check, fix, and initialize the database as needed.
|
This is a consolidated database management tool that handles all database-related operations. It combines the functionality of the previous separate scripts into a single, more intelligent script that can check, fix, and initialize the database as needed.
|
||||||
|
|
||||||
### Features
|
#### Features
|
||||||
|
|
||||||
- **Check Mode**: Validates the existence and integrity of the database
|
- **Check Mode**: Validates the existence and integrity of the database
|
||||||
- **Init Mode**: Creates the database only if it doesn't already exist
|
- **Init Mode**: Creates the database only if it doesn't already exist
|
||||||
@@ -14,45 +16,106 @@ This is a consolidated database management tool that handles all database-relate
|
|||||||
- **Reset Users Mode**: Removes all users and their data
|
- **Reset Users Mode**: Removes all users and their data
|
||||||
- **Auto Mode**: Automatically checks, fixes, and initializes the database if needed
|
- **Auto Mode**: Automatically checks, fixes, and initializes the database if needed
|
||||||
|
|
||||||
## Running the Database Management Tool
|
#### Running the Database Management Tool
|
||||||
|
|
||||||
You can execute the database management tool using your package manager with various commands:
|
You can execute the database management tool using your package manager with various commands:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Checks database status (default action if no command is specified, equivalent to 'pnpm check-db')
|
# Checks database status (default action if no command is specified)
|
||||||
pnpm manage-db
|
bun run manage-db
|
||||||
|
|
||||||
# Check database status
|
# Check database status
|
||||||
pnpm check-db
|
bun run check-db
|
||||||
|
|
||||||
# Initialize the database (only if it doesn't exist)
|
# Initialize the database (only if it doesn't exist)
|
||||||
pnpm init-db
|
bun run init-db
|
||||||
|
|
||||||
# Fix database location issues
|
# Fix database location issues
|
||||||
pnpm fix-db
|
bun run fix-db
|
||||||
|
|
||||||
# Automatic check, fix, and initialize if needed
|
# Automatic check, fix, and initialize if needed
|
||||||
pnpm db-auto
|
bun run db-auto
|
||||||
|
|
||||||
# Reset all users (for testing signup flow)
|
# Reset all users (for testing signup flow)
|
||||||
pnpm reset-users
|
bun run reset-users
|
||||||
|
|
||||||
# Update the database schema to the latest version
|
|
||||||
pnpm update-schema
|
|
||||||
|
|
||||||
# Remove database files completely
|
# Remove database files completely
|
||||||
pnpm cleanup-db
|
bun run cleanup-db
|
||||||
|
|
||||||
# Complete setup (install dependencies and initialize database)
|
# Complete setup (install dependencies and initialize database)
|
||||||
pnpm setup
|
bun run setup
|
||||||
|
|
||||||
# Start development server with a fresh database
|
# Start development server with a fresh database
|
||||||
pnpm dev:clean
|
bun run dev:clean
|
||||||
|
|
||||||
# Start production server with a fresh database
|
# Start production server with a fresh database
|
||||||
pnpm start:fresh
|
bun run start:fresh
|
||||||
```
|
```
|
||||||
|
|
||||||
## Database File Location
|
#### Database File Location
|
||||||
|
|
||||||
The database file should be located in the `./data/gitea-mirror.db` directory. If the file is found in the root directory, the fix mode will move it to the correct location.
|
The database file should be located in the `./data/gitea-mirror.db` directory. If the file is found in the root directory, the fix mode will move it to the correct location.
|
||||||
|
|
||||||
|
## Event Management
|
||||||
|
|
||||||
|
The following scripts help manage events in the SQLite database:
|
||||||
|
|
||||||
|
### Event Inspection (check-events.ts)
|
||||||
|
|
||||||
|
Displays all events currently stored in the database.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun scripts/check-events.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Event Cleanup (cleanup-events.ts)
|
||||||
|
|
||||||
|
Removes old events from the database to prevent it from growing too large.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Remove events older than 7 days (default)
|
||||||
|
bun scripts/cleanup-events.ts
|
||||||
|
|
||||||
|
# Remove events older than X days
|
||||||
|
bun scripts/cleanup-events.ts 14
|
||||||
|
```
|
||||||
|
|
||||||
|
This script can be scheduled to run periodically (e.g., daily) using cron or another scheduler.
|
||||||
|
|
||||||
|
### Mark Events as Read (mark-events-read.ts)
|
||||||
|
|
||||||
|
Marks all unread events as read.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun scripts/mark-events-read.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Make Events Appear Older (make-events-old.ts)
|
||||||
|
|
||||||
|
For testing purposes, this script modifies event timestamps to make them appear older.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bun scripts/make-events-old.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Deployment Scripts
|
||||||
|
|
||||||
|
### Docker Deployment
|
||||||
|
|
||||||
|
- **build-docker.sh**: Builds the Docker image for the application
|
||||||
|
- **docker-diagnostics.sh**: Provides diagnostic information for Docker deployments
|
||||||
|
|
||||||
|
### LXC Container Deployment
|
||||||
|
|
||||||
|
Two scripts are provided for deploying Gitea Mirror in LXC containers:
|
||||||
|
|
||||||
|
1. **gitea-mirror-lxc-proxmox.sh**: For online deployment on a Proxmox VE host
|
||||||
|
- Pulls everything from GitHub
|
||||||
|
- Creates a privileged container with the application
|
||||||
|
- Sets up systemd service
|
||||||
|
|
||||||
|
2. **gitea-mirror-lxc-local.sh**: For offline/LAN-only deployment on a developer laptop
|
||||||
|
- Pushes your local checkout + Bun ZIP to the container
|
||||||
|
- Useful for testing without internet access
|
||||||
|
|
||||||
|
For detailed instructions on LXC deployment, see [README-lxc.md](./README-lxc.md).
|
||||||
|
|||||||
38
scripts/check-events.ts
Normal file
38
scripts/check-events.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Script to check events in the database
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Database } from "bun:sqlite";
|
||||||
|
import path from "path";
|
||||||
|
import fs from "fs";
|
||||||
|
|
||||||
|
// Define the database path
|
||||||
|
const dataDir = path.join(process.cwd(), "data");
|
||||||
|
if (!fs.existsSync(dataDir)) {
|
||||||
|
console.error("Data directory not found:", dataDir);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||||
|
if (!fs.existsSync(dbPath)) {
|
||||||
|
console.error("Database file not found:", dbPath);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open the database
|
||||||
|
const db = new Database(dbPath);
|
||||||
|
|
||||||
|
// Check if the events table exists
|
||||||
|
const tableExists = db.query("SELECT name FROM sqlite_master WHERE type='table' AND name='events'").get();
|
||||||
|
|
||||||
|
if (!tableExists) {
|
||||||
|
console.error("Events table does not exist");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all events
|
||||||
|
const events = db.query("SELECT * FROM events").all();
|
||||||
|
|
||||||
|
console.log("Events in the database:");
|
||||||
|
console.log(JSON.stringify(events, null, 2));
|
||||||
43
scripts/cleanup-events.ts
Normal file
43
scripts/cleanup-events.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Script to clean up old events from the database
|
||||||
|
* This script should be run periodically (e.g., daily) to prevent the events table from growing too large
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* bun scripts/cleanup-events.ts [days]
|
||||||
|
*
|
||||||
|
* Where [days] is the number of days to keep events (default: 7)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { cleanupOldEvents } from "../src/lib/events";
|
||||||
|
|
||||||
|
// Parse command line arguments
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const daysToKeep = args.length > 0 ? parseInt(args[0], 10) : 7;
|
||||||
|
|
||||||
|
if (isNaN(daysToKeep) || daysToKeep < 1) {
|
||||||
|
console.error("Error: Days to keep must be a positive number");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runCleanup() {
|
||||||
|
try {
|
||||||
|
console.log(`Starting event cleanup (retention: ${daysToKeep} days)...`);
|
||||||
|
|
||||||
|
// Call the cleanupOldEvents function from the events module
|
||||||
|
const result = await cleanupOldEvents(daysToKeep);
|
||||||
|
|
||||||
|
console.log(`Cleanup summary:`);
|
||||||
|
console.log(`- Read events deleted: ${result.readEventsDeleted}`);
|
||||||
|
console.log(`- Unread events deleted: ${result.unreadEventsDeleted}`);
|
||||||
|
console.log(`- Total events deleted: ${result.readEventsDeleted + result.unreadEventsDeleted}`);
|
||||||
|
|
||||||
|
console.log("Event cleanup completed successfully");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error running event cleanup:", error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the cleanup
|
||||||
|
runCleanup();
|
||||||
4
scripts/docker-diagnostics.sh
Executable file → Normal file
4
scripts/docker-diagnostics.sh
Executable file → Normal file
@@ -105,12 +105,12 @@ echo -e "${BLUE} Recommendations ${NC}"
|
|||||||
echo -e "${BLUE}=====================================================${NC}"
|
echo -e "${BLUE}=====================================================${NC}"
|
||||||
|
|
||||||
echo -e "\n${YELLOW}For local development:${NC}"
|
echo -e "\n${YELLOW}For local development:${NC}"
|
||||||
echo -e "1. ${GREEN}pnpm setup${NC} (initialize database and install dependencies)"
|
echo -e "1. ${GREEN}bun run setup${NC} (initialize database and install dependencies)"
|
||||||
echo -e "2. ${GREEN}./scripts/build-docker.sh --load${NC} (build and load into Docker)"
|
echo -e "2. ${GREEN}./scripts/build-docker.sh --load${NC} (build and load into Docker)"
|
||||||
echo -e "3. ${GREEN}docker-compose -f docker-compose.dev.yml up -d${NC} (start the development container)"
|
echo -e "3. ${GREEN}docker-compose -f docker-compose.dev.yml up -d${NC} (start the development container)"
|
||||||
|
|
||||||
echo -e "\n${YELLOW}For production deployment (using Docker Compose):${NC}"
|
echo -e "\n${YELLOW}For production deployment (using Docker Compose):${NC}"
|
||||||
echo -e "1. ${GREEN}pnpm setup${NC} (if not already done, to ensure database schema is ready)"
|
echo -e "1. ${GREEN}bun run setup${NC} (if not already done, to ensure database schema is ready)"
|
||||||
echo -e "2. ${GREEN}docker-compose --profile production up -d${NC} (start the production container)"
|
echo -e "2. ${GREEN}docker-compose --profile production up -d${NC} (start the production container)"
|
||||||
|
|
||||||
echo -e "\n${YELLOW}For CI/CD builds:${NC}"
|
echo -e "\n${YELLOW}For CI/CD builds:${NC}"
|
||||||
|
|||||||
86
scripts/gitea-mirror-lxc-local.sh
Executable file
86
scripts/gitea-mirror-lxc-local.sh
Executable file
@@ -0,0 +1,86 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# gitea-mirror-lxc-local.sh (offline, local repo, verbose)
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
CONTAINER="gitea-test"
|
||||||
|
IMAGE="ubuntu:22.04"
|
||||||
|
INSTALL_DIR="/opt/gitea-mirror"
|
||||||
|
PORT=4321
|
||||||
|
JWT_SECRET="$(openssl rand -hex 32)"
|
||||||
|
|
||||||
|
BUN_ZIP="/tmp/bun-linux-x64.zip"
|
||||||
|
BUN_URL="https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip"
|
||||||
|
|
||||||
|
LOCAL_REPO_DIR="${LOCAL_REPO_DIR:-./gitea-mirror}"
|
||||||
|
REPO_TAR="/tmp/gitea-mirror-local.tar.gz"
|
||||||
|
|
||||||
|
need() { command -v "$1" >/dev/null || { echo "Missing $1"; exit 1; }; }
|
||||||
|
need curl; need lxc; need tar; need unzip
|
||||||
|
|
||||||
|
# ── build host artefacts ────────────────────────────────────────────────
|
||||||
|
[[ -d $LOCAL_REPO_DIR ]] || { echo "❌ LOCAL_REPO_DIR not found"; exit 1; }
|
||||||
|
[[ -f $LOCAL_REPO_DIR/package.json ]] || { echo "❌ package.json missing"; exit 1; }
|
||||||
|
[[ -f $BUN_ZIP ]] || curl -L --retry 5 --retry-delay 5 -o "$BUN_ZIP" "$BUN_URL"
|
||||||
|
tar -czf "$REPO_TAR" -C "$(dirname "$LOCAL_REPO_DIR")" "$(basename "$LOCAL_REPO_DIR")"
|
||||||
|
|
||||||
|
# ── ensure container exists ─────────────────────────────────────────────
|
||||||
|
lxd init --auto >/dev/null 2>&1 || true
|
||||||
|
lxc info "$CONTAINER" >/dev/null 2>&1 || lxc launch "$IMAGE" "$CONTAINER"
|
||||||
|
|
||||||
|
echo "🔧 installing base packages…"
|
||||||
|
sudo lxc exec "$CONTAINER" -- bash -c 'set -ex; apt update; apt install -y unzip tar openssl sqlite3'
|
||||||
|
|
||||||
|
echo "⬆️ pushing artefacts…"
|
||||||
|
sudo lxc file push "$BUN_ZIP" "$CONTAINER/opt/"
|
||||||
|
sudo lxc file push "$REPO_TAR" "$CONTAINER/opt/"
|
||||||
|
|
||||||
|
echo "📦 unpacking Bun + repo…"
|
||||||
|
sudo lxc exec "$CONTAINER" -- bash -ex <<'IN'
|
||||||
|
cd /opt
|
||||||
|
# Bun
|
||||||
|
unzip -oq bun-linux-x64.zip -d bun
|
||||||
|
BIN=$(find /opt/bun -type f -name bun -perm -111 | head -n1)
|
||||||
|
ln -sf "$BIN" /usr/local/bin/bun # bun
|
||||||
|
ln -sf "$BIN" /usr/local/bin/bunx # bunx shim
|
||||||
|
# Repo
|
||||||
|
rm -rf /opt/gitea-mirror
|
||||||
|
mkdir -p /opt/gitea-mirror
|
||||||
|
tar -xzf gitea-mirror-local.tar.gz --strip-components=1 -C /opt/gitea-mirror
|
||||||
|
IN
|
||||||
|
|
||||||
|
echo "🏗️ bun install / build…"
|
||||||
|
sudo lxc exec "$CONTAINER" -- bash -ex <<'IN'
|
||||||
|
cd /opt/gitea-mirror
|
||||||
|
bun install
|
||||||
|
bun run build
|
||||||
|
bun run manage-db init
|
||||||
|
IN
|
||||||
|
|
||||||
|
echo "📝 systemd unit…"
|
||||||
|
sudo lxc exec "$CONTAINER" -- bash -ex <<IN
|
||||||
|
cat >/etc/systemd/system/gitea-mirror.service <<SERVICE
|
||||||
|
[Unit]
|
||||||
|
Description=Gitea Mirror
|
||||||
|
After=network.target
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
WorkingDirectory=$INSTALL_DIR
|
||||||
|
ExecStart=/usr/local/bin/bun dist/server/entry.mjs
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=10
|
||||||
|
Environment=NODE_ENV=production
|
||||||
|
Environment=HOST=0.0.0.0
|
||||||
|
Environment=PORT=$PORT
|
||||||
|
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||||
|
Environment=JWT_SECRET=$JWT_SECRET
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
SERVICE
|
||||||
|
systemctl daemon-reload
|
||||||
|
systemctl enable gitea-mirror
|
||||||
|
systemctl restart gitea-mirror
|
||||||
|
IN
|
||||||
|
|
||||||
|
echo -e "\n✅ finished; service status:"
|
||||||
|
sudo lxc exec "$CONTAINER" -- systemctl status gitea-mirror --no-pager
|
||||||
97
scripts/gitea-mirror-lxc-proxmox.sh
Executable file
97
scripts/gitea-mirror-lxc-proxmox.sh
Executable file
@@ -0,0 +1,97 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# gitea-mirror-lxc-proxmox.sh
|
||||||
|
# Fully online installer for a Proxmox LXC guest running Gitea Mirror + Bun.
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# ────── adjustable defaults ──────────────────────────────────────────────
|
||||||
|
CTID=${CTID:-106} # container ID
|
||||||
|
HOSTNAME=${HOSTNAME:-gitea-mirror}
|
||||||
|
STORAGE=${STORAGE:-local-lvm} # where rootfs lives
|
||||||
|
DISK_SIZE=${DISK_SIZE:-8G}
|
||||||
|
CORES=${CORES:-2}
|
||||||
|
MEMORY=${MEMORY:-2048} # MiB
|
||||||
|
BRIDGE=${BRIDGE:-vmbr0}
|
||||||
|
IP_CONF=${IP_CONF:-dhcp} # or "192.168.1.240/24,gw=192.168.1.1"
|
||||||
|
|
||||||
|
PORT=4321
|
||||||
|
JWT_SECRET=$(openssl rand -hex 32)
|
||||||
|
|
||||||
|
REPO="https://github.com/arunavo4/gitea-mirror.git"
|
||||||
|
# ─────────────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
TEMPLATE='ubuntu-22.04-standard_22.04-1_amd64.tar.zst'
|
||||||
|
TEMPLATE_PATH="/var/lib/vz/template/cache/${TEMPLATE}"
|
||||||
|
|
||||||
|
echo "▶️ Ensuring template exists…"
|
||||||
|
if [[ ! -f $TEMPLATE_PATH ]]; then
|
||||||
|
pveam update >/dev/null
|
||||||
|
pveam download "$STORAGE" "$TEMPLATE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "▶️ Creating container $CTID (if missing)…"
|
||||||
|
if ! pct status "$CTID" &>/dev/null; then
|
||||||
|
pct create "$CTID" "$TEMPLATE_PATH" \
|
||||||
|
--rootfs "$STORAGE:$DISK_SIZE" \
|
||||||
|
--hostname "$HOSTNAME" \
|
||||||
|
--cores "$CORES" --memory "$MEMORY" \
|
||||||
|
--net0 "name=eth0,bridge=$BRIDGE,ip=$IP_CONF" \
|
||||||
|
--features nesting=1 \
|
||||||
|
--unprivileged 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
pct start "$CTID"
|
||||||
|
|
||||||
|
echo "▶️ Installing base packages inside CT $CTID…"
|
||||||
|
pct exec "$CTID" -- bash -c 'apt update && apt install -y curl git build-essential openssl sqlite3 unzip'
|
||||||
|
|
||||||
|
echo "▶️ Installing Bun runtime…"
|
||||||
|
pct exec "$CTID" -- bash -c '
|
||||||
|
export BUN_INSTALL=/opt/bun
|
||||||
|
curl -fsSL https://bun.sh/install | bash -s -- --yes
|
||||||
|
ln -sf /opt/bun/bin/bun /usr/local/bin/bun
|
||||||
|
ln -sf /opt/bun/bin/bun /usr/local/bin/bunx
|
||||||
|
bun --version
|
||||||
|
'
|
||||||
|
|
||||||
|
echo "▶️ Cloning & building Gitea Mirror…"
|
||||||
|
pct exec "$CTID" -- bash -c "
|
||||||
|
git clone --depth=1 '$REPO' /opt/gitea-mirror || (cd /opt/gitea-mirror && git pull)
|
||||||
|
cd /opt/gitea-mirror
|
||||||
|
bun install
|
||||||
|
bun run build
|
||||||
|
bun run manage-db init
|
||||||
|
"
|
||||||
|
|
||||||
|
echo "▶️ Creating systemd service…"
|
||||||
|
pct exec "$CTID" -- bash -c "
|
||||||
|
cat >/etc/systemd/system/gitea-mirror.service <<SERVICE
|
||||||
|
[Unit]
|
||||||
|
Description=Gitea Mirror
|
||||||
|
After=network.target
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
WorkingDirectory=/opt/gitea-mirror
|
||||||
|
ExecStart=/usr/local/bin/bun dist/server/entry.mjs
|
||||||
|
Restart=on-failure
|
||||||
|
RestartSec=10
|
||||||
|
Environment=NODE_ENV=production
|
||||||
|
Environment=HOST=0.0.0.0
|
||||||
|
Environment=PORT=$PORT
|
||||||
|
Environment=DATABASE_URL=file:data/gitea-mirror.db
|
||||||
|
Environment=JWT_SECRET=$JWT_SECRET
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
SERVICE
|
||||||
|
systemctl daemon-reload
|
||||||
|
systemctl enable gitea-mirror
|
||||||
|
systemctl restart gitea-mirror
|
||||||
|
"
|
||||||
|
|
||||||
|
echo -e "\n🔍 Service status:"
|
||||||
|
pct exec "$CTID" -- systemctl status gitea-mirror --no-pager | head -n15
|
||||||
|
|
||||||
|
GUEST_IP=$(pct exec "$CTID" -- hostname -I | awk '{print $1}')
|
||||||
|
echo -e "\n🌐 Browse to: http://$GUEST_IP:$PORT\n"
|
||||||
|
echo "🗝️ JWT_SECRET = $JWT_SECRET"
|
||||||
|
echo -e "\n✅ Done – Gitea Mirror is running in CT $CTID."
|
||||||
29
scripts/make-events-old.ts
Normal file
29
scripts/make-events-old.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Script to make events appear older for testing cleanup
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { db, events } from "../src/lib/db";
|
||||||
|
|
||||||
|
async function makeEventsOld() {
|
||||||
|
try {
|
||||||
|
console.log("Making events appear older...");
|
||||||
|
|
||||||
|
// Calculate a timestamp from 2 days ago
|
||||||
|
const oldDate = new Date();
|
||||||
|
oldDate.setDate(oldDate.getDate() - 2);
|
||||||
|
|
||||||
|
// Update all events to have an older timestamp
|
||||||
|
const result = await db
|
||||||
|
.update(events)
|
||||||
|
.set({ createdAt: oldDate });
|
||||||
|
|
||||||
|
console.log(`Updated ${result.changes || 0} events to appear older`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error updating event timestamps:", error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the function
|
||||||
|
makeEventsOld();
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import { client, db } from "../src/lib/db";
|
import { Database } from "bun:sqlite";
|
||||||
import { configs } from "../src/lib/db";
|
|
||||||
import { v4 as uuidv4 } from "uuid";
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
// Command line arguments
|
// Command line arguments
|
||||||
@@ -21,61 +20,66 @@ const dataDbFile = path.join(dataDir, "gitea-mirror.db");
|
|||||||
const dataDevDbFile = path.join(dataDir, "gitea-mirror-dev.db");
|
const dataDevDbFile = path.join(dataDir, "gitea-mirror-dev.db");
|
||||||
|
|
||||||
// Database path - ensure we use absolute path
|
// Database path - ensure we use absolute path
|
||||||
const dbPath =
|
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||||
process.env.DATABASE_URL || `file:${path.join(dataDir, "gitea-mirror.db")}`;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ensure all required tables exist
|
* Ensure all required tables exist
|
||||||
*/
|
*/
|
||||||
async function ensureTablesExist() {
|
async function ensureTablesExist() {
|
||||||
|
// Create or open the database
|
||||||
|
const db = new Database(dbPath);
|
||||||
|
|
||||||
const requiredTables = [
|
const requiredTables = [
|
||||||
"users",
|
"users",
|
||||||
"configs",
|
"configs",
|
||||||
"repositories",
|
"repositories",
|
||||||
"organizations",
|
"organizations",
|
||||||
"mirror_jobs",
|
"mirror_jobs",
|
||||||
|
"events",
|
||||||
];
|
];
|
||||||
|
|
||||||
for (const table of requiredTables) {
|
for (const table of requiredTables) {
|
||||||
try {
|
try {
|
||||||
await client.execute(`SELECT 1 FROM ${table} LIMIT 1`);
|
// Check if table exists
|
||||||
} catch (error) {
|
const result = db.query(`SELECT name FROM sqlite_master WHERE type='table' AND name='${table}'`).get();
|
||||||
if (error instanceof Error && error.message.includes("SQLITE_ERROR")) {
|
|
||||||
|
if (!result) {
|
||||||
console.warn(`⚠️ Table '${table}' is missing. Creating it now...`);
|
console.warn(`⚠️ Table '${table}' is missing. Creating it now...`);
|
||||||
|
|
||||||
switch (table) {
|
switch (table) {
|
||||||
case "users":
|
case "users":
|
||||||
await client.execute(
|
db.exec(`
|
||||||
`CREATE TABLE users (
|
CREATE TABLE users (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
username TEXT NOT NULL,
|
username TEXT NOT NULL,
|
||||||
password TEXT NOT NULL,
|
password TEXT NOT NULL,
|
||||||
email TEXT NOT NULL,
|
email TEXT NOT NULL,
|
||||||
created_at INTEGER NOT NULL,
|
created_at INTEGER NOT NULL,
|
||||||
updated_at INTEGER NOT NULL
|
updated_at INTEGER NOT NULL
|
||||||
)`
|
)
|
||||||
);
|
`);
|
||||||
break;
|
break;
|
||||||
case "configs":
|
case "configs":
|
||||||
await client.execute(
|
db.exec(`
|
||||||
`CREATE TABLE configs (
|
CREATE TABLE configs (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
is_active INTEGER NOT NULL DEFAULT 1,
|
is_active INTEGER NOT NULL DEFAULT 1,
|
||||||
github_config TEXT NOT NULL,
|
github_config TEXT NOT NULL,
|
||||||
gitea_config TEXT NOT NULL,
|
gitea_config TEXT NOT NULL,
|
||||||
include TEXT NOT NULL DEFAULT '[]',
|
include TEXT NOT NULL DEFAULT '["*"]',
|
||||||
exclude TEXT NOT NULL DEFAULT '[]',
|
exclude TEXT NOT NULL DEFAULT '[]',
|
||||||
schedule_config TEXT NOT NULL,
|
schedule_config TEXT NOT NULL,
|
||||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
)`
|
)
|
||||||
);
|
`);
|
||||||
break;
|
break;
|
||||||
case "repositories":
|
case "repositories":
|
||||||
await client.execute(
|
db.exec(`
|
||||||
`CREATE TABLE repositories (
|
CREATE TABLE repositories (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
config_id TEXT NOT NULL,
|
config_id TEXT NOT NULL,
|
||||||
@@ -104,12 +108,12 @@ async function ensureTablesExist() {
|
|||||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||||
)`
|
)
|
||||||
);
|
`);
|
||||||
break;
|
break;
|
||||||
case "organizations":
|
case "organizations":
|
||||||
await client.execute(
|
db.exec(`
|
||||||
`CREATE TABLE organizations (
|
CREATE TABLE organizations (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
config_id TEXT NOT NULL,
|
config_id TEXT NOT NULL,
|
||||||
@@ -125,12 +129,12 @@ async function ensureTablesExist() {
|
|||||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||||
)`
|
)
|
||||||
);
|
`);
|
||||||
break;
|
break;
|
||||||
case "mirror_jobs":
|
case "mirror_jobs":
|
||||||
await client.execute(
|
db.exec(`
|
||||||
`CREATE TABLE mirror_jobs (
|
CREATE TABLE mirror_jobs (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
repository_id TEXT,
|
repository_id TEXT,
|
||||||
@@ -141,16 +145,56 @@ async function ensureTablesExist() {
|
|||||||
status TEXT NOT NULL DEFAULT 'imported',
|
status TEXT NOT NULL DEFAULT 'imported',
|
||||||
message TEXT NOT NULL,
|
message TEXT NOT NULL,
|
||||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
-- New fields for job resilience
|
||||||
|
job_type TEXT NOT NULL DEFAULT 'mirror',
|
||||||
|
batch_id TEXT,
|
||||||
|
total_items INTEGER,
|
||||||
|
completed_items INTEGER DEFAULT 0,
|
||||||
|
item_ids TEXT, -- JSON array as text
|
||||||
|
completed_item_ids TEXT DEFAULT '[]', -- JSON array as text
|
||||||
|
in_progress INTEGER NOT NULL DEFAULT 0, -- Boolean as integer
|
||||||
|
started_at TIMESTAMP,
|
||||||
|
completed_at TIMESTAMP,
|
||||||
|
last_checkpoint TIMESTAMP,
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
)`
|
)
|
||||||
);
|
`);
|
||||||
|
|
||||||
|
// Create indexes for better performance
|
||||||
|
db.exec(`
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_user_id ON mirror_jobs(user_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_batch_id ON mirror_jobs(batch_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_in_progress ON mirror_jobs(in_progress);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_job_type ON mirror_jobs(job_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_mirror_jobs_timestamp ON mirror_jobs(timestamp);
|
||||||
|
`);
|
||||||
|
break;
|
||||||
|
case "events":
|
||||||
|
db.exec(`
|
||||||
|
CREATE TABLE events (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
channel TEXT NOT NULL,
|
||||||
|
payload TEXT NOT NULL,
|
||||||
|
read INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
db.exec(`
|
||||||
|
CREATE INDEX idx_events_user_channel ON events(user_id, channel);
|
||||||
|
CREATE INDEX idx_events_created_at ON events(created_at);
|
||||||
|
CREATE INDEX idx_events_read ON events(read);
|
||||||
|
`);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
console.log(`✅ Table '${table}' created successfully.`);
|
console.log(`✅ Table '${table}' created successfully.`);
|
||||||
} else {
|
|
||||||
console.error(`❌ Error checking table '${table}':`, error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`❌ Error checking table '${table}':`, error);
|
||||||
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -168,7 +212,7 @@ async function checkDatabase() {
|
|||||||
);
|
);
|
||||||
console.warn("This file should be in the data directory.");
|
console.warn("This file should be in the data directory.");
|
||||||
console.warn(
|
console.warn(
|
||||||
'Run "pnpm manage-db fix" to fix this issue or "pnpm cleanup-db" to remove it.'
|
'Run "bun run manage-db fix" to fix this issue or "bun run cleanup-db" to remove it.'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -180,10 +224,11 @@ async function checkDatabase() {
|
|||||||
|
|
||||||
// Check for users
|
// Check for users
|
||||||
try {
|
try {
|
||||||
const userCountResult = await client.execute(
|
const db = new Database(dbPath);
|
||||||
`SELECT COUNT(*) as count FROM users`
|
|
||||||
);
|
// Check for users
|
||||||
const userCount = userCountResult.rows[0].count;
|
const userCountResult = db.query(`SELECT COUNT(*) as count FROM users`).get();
|
||||||
|
const userCount = userCountResult?.count || 0;
|
||||||
|
|
||||||
if (userCount === 0) {
|
if (userCount === 0) {
|
||||||
console.log("ℹ️ No users found in the database.");
|
console.log("ℹ️ No users found in the database.");
|
||||||
@@ -197,10 +242,8 @@ async function checkDatabase() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check for configurations
|
// Check for configurations
|
||||||
const configCountResult = await client.execute(
|
const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
|
||||||
`SELECT COUNT(*) as count FROM configs`
|
const configCount = configCountResult?.count || 0;
|
||||||
);
|
|
||||||
const configCount = configCountResult.rows[0].count;
|
|
||||||
|
|
||||||
if (configCount === 0) {
|
if (configCount === 0) {
|
||||||
console.log("ℹ️ No configurations found in the database.");
|
console.log("ℹ️ No configurations found in the database.");
|
||||||
@@ -215,12 +258,12 @@ async function checkDatabase() {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("❌ Error connecting to the database:", error);
|
console.error("❌ Error connecting to the database:", error);
|
||||||
console.warn(
|
console.warn(
|
||||||
'The database file might be corrupted. Consider running "pnpm manage-db init" to recreate it.'
|
'The database file might be corrupted. Consider running "bun run manage-db init" to recreate it.'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.warn("⚠️ WARNING: Database file not found in data directory.");
|
console.warn("⚠️ WARNING: Database file not found in data directory.");
|
||||||
console.warn('Run "pnpm manage-db init" to create it.');
|
console.warn('Run "bun run manage-db init" to create it.');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -235,15 +278,16 @@ async function initializeDatabase() {
|
|||||||
if (fs.existsSync(dataDbFile)) {
|
if (fs.existsSync(dataDbFile)) {
|
||||||
console.log("⚠️ Database already exists at data/gitea-mirror.db");
|
console.log("⚠️ Database already exists at data/gitea-mirror.db");
|
||||||
console.log(
|
console.log(
|
||||||
'If you want to recreate the database, run "pnpm cleanup-db" first.'
|
'If you want to recreate the database, run "bun run cleanup-db" first.'
|
||||||
);
|
);
|
||||||
console.log(
|
console.log(
|
||||||
'Or use "pnpm manage-db reset-users" to just remove users without recreating tables.'
|
'Or use "bun run manage-db reset-users" to just remove users without recreating tables.'
|
||||||
);
|
);
|
||||||
|
|
||||||
// Check if we can connect to it
|
// Check if we can connect to it
|
||||||
try {
|
try {
|
||||||
await client.execute(`SELECT COUNT(*) as count FROM users`);
|
const db = new Database(dbPath);
|
||||||
|
db.query(`SELECT COUNT(*) as count FROM users`).get();
|
||||||
console.log("✅ Database is valid and accessible.");
|
console.log("✅ Database is valid and accessible.");
|
||||||
return;
|
return;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -257,135 +301,136 @@ async function initializeDatabase() {
|
|||||||
console.log(`Initializing database at ${dbPath}...`);
|
console.log(`Initializing database at ${dbPath}...`);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const db = new Database(dbPath);
|
||||||
|
|
||||||
// Create tables if they don't exist
|
// Create tables if they don't exist
|
||||||
await client.execute(
|
db.exec(`
|
||||||
`CREATE TABLE IF NOT EXISTS users (
|
CREATE TABLE IF NOT EXISTS users (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
username TEXT NOT NULL,
|
username TEXT NOT NULL,
|
||||||
password TEXT NOT NULL,
|
password TEXT NOT NULL,
|
||||||
email TEXT NOT NULL,
|
email TEXT NOT NULL,
|
||||||
created_at INTEGER NOT NULL,
|
created_at INTEGER NOT NULL,
|
||||||
updated_at INTEGER NOT NULL
|
updated_at INTEGER NOT NULL
|
||||||
)`
|
)
|
||||||
);
|
`);
|
||||||
|
|
||||||
// NOTE: We no longer create a default admin user - user will create one via signup page
|
// NOTE: We no longer create a default admin user - user will create one via signup page
|
||||||
|
|
||||||
await client.execute(
|
db.exec(`
|
||||||
`CREATE TABLE IF NOT EXISTS configs (
|
CREATE TABLE IF NOT EXISTS configs (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
is_active INTEGER NOT NULL DEFAULT 1,
|
is_active INTEGER NOT NULL DEFAULT 1,
|
||||||
github_config TEXT NOT NULL,
|
github_config TEXT NOT NULL,
|
||||||
gitea_config TEXT NOT NULL,
|
gitea_config TEXT NOT NULL,
|
||||||
include TEXT NOT NULL DEFAULT '["*"]',
|
include TEXT NOT NULL DEFAULT '["*"]',
|
||||||
exclude TEXT NOT NULL DEFAULT '[]',
|
exclude TEXT NOT NULL DEFAULT '[]',
|
||||||
schedule_config TEXT NOT NULL,
|
schedule_config TEXT NOT NULL,
|
||||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
);
|
)
|
||||||
`
|
`);
|
||||||
);
|
|
||||||
|
|
||||||
await client.execute(
|
db.exec(`
|
||||||
`CREATE TABLE IF NOT EXISTS repositories (
|
CREATE TABLE IF NOT EXISTS repositories (
|
||||||
id TEXT PRIMARY KEY,
|
id TEXT PRIMARY KEY,
|
||||||
user_id TEXT NOT NULL,
|
user_id TEXT NOT NULL,
|
||||||
config_id TEXT NOT NULL,
|
config_id TEXT NOT NULL,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
full_name TEXT NOT NULL,
|
full_name TEXT NOT NULL,
|
||||||
url TEXT NOT NULL,
|
url TEXT NOT NULL,
|
||||||
clone_url TEXT NOT NULL,
|
clone_url TEXT NOT NULL,
|
||||||
owner TEXT NOT NULL,
|
owner TEXT NOT NULL,
|
||||||
organization TEXT,
|
organization TEXT,
|
||||||
mirrored_location TEXT DEFAULT '',
|
mirrored_location TEXT DEFAULT '',
|
||||||
|
is_private INTEGER NOT NULL DEFAULT 0,
|
||||||
|
is_fork INTEGER NOT NULL DEFAULT 0,
|
||||||
|
forked_from TEXT,
|
||||||
|
has_issues INTEGER NOT NULL DEFAULT 0,
|
||||||
|
is_starred INTEGER NOT NULL DEFAULT 0,
|
||||||
|
is_archived INTEGER NOT NULL DEFAULT 0,
|
||||||
|
size INTEGER NOT NULL DEFAULT 0,
|
||||||
|
has_lfs INTEGER NOT NULL DEFAULT 0,
|
||||||
|
has_submodules INTEGER NOT NULL DEFAULT 0,
|
||||||
|
default_branch TEXT NOT NULL,
|
||||||
|
visibility TEXT NOT NULL DEFAULT 'public',
|
||||||
|
status TEXT NOT NULL DEFAULT 'imported',
|
||||||
|
last_mirrored INTEGER,
|
||||||
|
error_message TEXT,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
|
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||||
|
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
is_private INTEGER NOT NULL DEFAULT 0,
|
db.exec(`
|
||||||
is_fork INTEGER NOT NULL DEFAULT 0,
|
CREATE TABLE IF NOT EXISTS organizations (
|
||||||
forked_from TEXT,
|
id TEXT PRIMARY KEY,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
config_id TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
avatar_url TEXT NOT NULL,
|
||||||
|
membership_role TEXT NOT NULL DEFAULT 'member',
|
||||||
|
is_included INTEGER NOT NULL DEFAULT 1,
|
||||||
|
status TEXT NOT NULL DEFAULT 'imported',
|
||||||
|
last_mirrored INTEGER,
|
||||||
|
error_message TEXT,
|
||||||
|
repository_count INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
|
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id),
|
||||||
|
FOREIGN KEY (config_id) REFERENCES configs(id)
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
has_issues INTEGER NOT NULL DEFAULT 0,
|
db.exec(`
|
||||||
is_starred INTEGER NOT NULL DEFAULT 0,
|
CREATE TABLE IF NOT EXISTS mirror_jobs (
|
||||||
is_archived INTEGER NOT NULL DEFAULT 0,
|
id TEXT PRIMARY KEY,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
repository_id TEXT,
|
||||||
|
repository_name TEXT,
|
||||||
|
organization_id TEXT,
|
||||||
|
organization_name TEXT,
|
||||||
|
details TEXT,
|
||||||
|
status TEXT NOT NULL DEFAULT 'imported',
|
||||||
|
message TEXT NOT NULL,
|
||||||
|
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
size INTEGER NOT NULL DEFAULT 0,
|
db.exec(`
|
||||||
has_lfs INTEGER NOT NULL DEFAULT 0,
|
CREATE TABLE IF NOT EXISTS events (
|
||||||
has_submodules INTEGER NOT NULL DEFAULT 0,
|
id TEXT PRIMARY KEY,
|
||||||
|
user_id TEXT NOT NULL,
|
||||||
|
channel TEXT NOT NULL,
|
||||||
|
payload TEXT NOT NULL,
|
||||||
|
read INTEGER NOT NULL DEFAULT 0,
|
||||||
|
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
||||||
|
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
default_branch TEXT NOT NULL,
|
db.exec(`
|
||||||
visibility TEXT NOT NULL DEFAULT 'public',
|
CREATE INDEX IF NOT EXISTS idx_events_user_channel ON events(user_id, channel);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_events_created_at ON events(created_at);
|
||||||
status TEXT NOT NULL DEFAULT 'imported',
|
CREATE INDEX IF NOT EXISTS idx_events_read ON events(read);
|
||||||
last_mirrored INTEGER,
|
`);
|
||||||
error_message TEXT,
|
|
||||||
|
|
||||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
|
||||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
|
||||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
|
||||||
);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
await client.execute(
|
|
||||||
`CREATE TABLE IF NOT EXISTS organizations (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
user_id TEXT NOT NULL,
|
|
||||||
config_id TEXT NOT NULL,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
|
|
||||||
avatar_url TEXT NOT NULL,
|
|
||||||
membership_role TEXT NOT NULL DEFAULT 'member',
|
|
||||||
|
|
||||||
is_included INTEGER NOT NULL DEFAULT 1,
|
|
||||||
|
|
||||||
status TEXT NOT NULL DEFAULT 'imported',
|
|
||||||
last_mirrored INTEGER,
|
|
||||||
error_message TEXT,
|
|
||||||
|
|
||||||
repository_count INTEGER NOT NULL DEFAULT 0,
|
|
||||||
|
|
||||||
created_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
|
||||||
updated_at INTEGER NOT NULL DEFAULT (strftime('%s','now')),
|
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id),
|
|
||||||
FOREIGN KEY (config_id) REFERENCES configs(id)
|
|
||||||
);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
await client.execute(
|
|
||||||
`CREATE TABLE IF NOT EXISTS mirror_jobs (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
user_id TEXT NOT NULL,
|
|
||||||
repository_id TEXT,
|
|
||||||
repository_name TEXT,
|
|
||||||
organization_id TEXT,
|
|
||||||
organization_name TEXT,
|
|
||||||
details TEXT,
|
|
||||||
status TEXT NOT NULL DEFAULT 'imported',
|
|
||||||
message TEXT NOT NULL,
|
|
||||||
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
|
||||||
);
|
|
||||||
`
|
|
||||||
);
|
|
||||||
|
|
||||||
// Insert default config if none exists
|
// Insert default config if none exists
|
||||||
const configCountResult = await client.execute(
|
const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
|
||||||
`SELECT COUNT(*) as count FROM configs`
|
const configCount = configCountResult?.count || 0;
|
||||||
);
|
|
||||||
const configCount = configCountResult.rows[0].count;
|
|
||||||
if (configCount === 0) {
|
if (configCount === 0) {
|
||||||
// Get the first user
|
// Get the first user
|
||||||
const firstUserResult = await client.execute(
|
const firstUserResult = db.query(`SELECT id FROM users LIMIT 1`).get();
|
||||||
`SELECT id FROM users LIMIT 1`
|
|
||||||
);
|
if (firstUserResult) {
|
||||||
if (firstUserResult.rows.length > 0) {
|
const userId = firstUserResult.id;
|
||||||
const userId = firstUserResult.rows[0].id;
|
|
||||||
const configId = uuidv4();
|
const configId = uuidv4();
|
||||||
const githubConfig = JSON.stringify({
|
const githubConfig = JSON.stringify({
|
||||||
username: process.env.GITHUB_USERNAME || "",
|
username: process.env.GITHUB_USERNAME || "",
|
||||||
@@ -415,24 +460,23 @@ async function initializeDatabase() {
|
|||||||
nextRun: null,
|
nextRun: null,
|
||||||
});
|
});
|
||||||
|
|
||||||
await client.execute(
|
const stmt = db.prepare(`
|
||||||
`
|
|
||||||
INSERT INTO configs (id, user_id, name, is_active, github_config, gitea_config, include, exclude, schedule_config, created_at, updated_at)
|
INSERT INTO configs (id, user_id, name, is_active, github_config, gitea_config, include, exclude, schedule_config, created_at, updated_at)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
`,
|
`);
|
||||||
[
|
|
||||||
configId,
|
stmt.run(
|
||||||
userId,
|
configId,
|
||||||
"Default Configuration",
|
userId,
|
||||||
1,
|
"Default Configuration",
|
||||||
githubConfig,
|
1,
|
||||||
giteaConfig,
|
githubConfig,
|
||||||
include,
|
giteaConfig,
|
||||||
exclude,
|
include,
|
||||||
scheduleConfig,
|
exclude,
|
||||||
Date.now(),
|
scheduleConfig,
|
||||||
Date.now(),
|
Date.now(),
|
||||||
]
|
Date.now()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -452,21 +496,20 @@ async function resetUsers() {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// Check if the database exists
|
// Check if the database exists
|
||||||
const dbFilePath = dbPath.replace("file:", "");
|
const doesDbExist = fs.existsSync(dbPath);
|
||||||
const doesDbExist = fs.existsSync(dbFilePath);
|
|
||||||
|
|
||||||
if (!doesDbExist) {
|
if (!doesDbExist) {
|
||||||
console.log(
|
console.log(
|
||||||
"❌ Database file doesn't exist. Run 'pnpm manage-db init' first to create it."
|
"❌ Database file doesn't exist. Run 'bun run manage-db init' first to create it."
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const db = new Database(dbPath);
|
||||||
|
|
||||||
// Count existing users
|
// Count existing users
|
||||||
const userCountResult = await client.execute(
|
const userCountResult = db.query(`SELECT COUNT(*) as count FROM users`).get();
|
||||||
`SELECT COUNT(*) as count FROM users`
|
const userCount = userCountResult?.count || 0;
|
||||||
);
|
|
||||||
const userCount = userCountResult.rows[0].count;
|
|
||||||
|
|
||||||
if (userCount === 0) {
|
if (userCount === 0) {
|
||||||
console.log("ℹ️ No users found in the database. Nothing to reset.");
|
console.log("ℹ️ No users found in the database. Nothing to reset.");
|
||||||
@@ -474,63 +517,43 @@ async function resetUsers() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Delete all users
|
// Delete all users
|
||||||
await client.execute(`DELETE FROM users`);
|
db.exec(`DELETE FROM users`);
|
||||||
console.log(`✅ Deleted ${userCount} users from the database.`);
|
console.log(`✅ Deleted ${userCount} users from the database.`);
|
||||||
|
|
||||||
// Check dependent configurations that need to be removed
|
// Check dependent configurations that need to be removed
|
||||||
const configCount = await client.execute(
|
const configCountResult = db.query(`SELECT COUNT(*) as count FROM configs`).get();
|
||||||
`SELECT COUNT(*) as count FROM configs`
|
const configCount = configCountResult?.count || 0;
|
||||||
);
|
|
||||||
|
|
||||||
if (
|
if (configCount > 0) {
|
||||||
configCount.rows &&
|
db.exec(`DELETE FROM configs`);
|
||||||
configCount.rows[0] &&
|
console.log(`✅ Deleted ${configCount} configurations.`);
|
||||||
Number(configCount.rows[0].count) > 0
|
|
||||||
) {
|
|
||||||
await client.execute(`DELETE FROM configs`);
|
|
||||||
console.log(`✅ Deleted ${configCount.rows[0].count} configurations.`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for dependent repositories
|
// Check for dependent repositories
|
||||||
const repoCount = await client.execute(
|
const repoCountResult = db.query(`SELECT COUNT(*) as count FROM repositories`).get();
|
||||||
`SELECT COUNT(*) as count FROM repositories`
|
const repoCount = repoCountResult?.count || 0;
|
||||||
);
|
|
||||||
|
|
||||||
if (
|
if (repoCount > 0) {
|
||||||
repoCount.rows &&
|
db.exec(`DELETE FROM repositories`);
|
||||||
repoCount.rows[0] &&
|
console.log(`✅ Deleted ${repoCount} repositories.`);
|
||||||
Number(repoCount.rows[0].count) > 0
|
|
||||||
) {
|
|
||||||
await client.execute(`DELETE FROM repositories`);
|
|
||||||
console.log(`✅ Deleted ${repoCount.rows[0].count} repositories.`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for dependent organizations
|
// Check for dependent organizations
|
||||||
const orgCount = await client.execute(
|
const orgCountResult = db.query(`SELECT COUNT(*) as count FROM organizations`).get();
|
||||||
`SELECT COUNT(*) as count FROM organizations`
|
const orgCount = orgCountResult?.count || 0;
|
||||||
);
|
|
||||||
|
|
||||||
if (
|
if (orgCount > 0) {
|
||||||
orgCount.rows &&
|
db.exec(`DELETE FROM organizations`);
|
||||||
orgCount.rows[0] &&
|
console.log(`✅ Deleted ${orgCount} organizations.`);
|
||||||
Number(orgCount.rows[0].count) > 0
|
|
||||||
) {
|
|
||||||
await client.execute(`DELETE FROM organizations`);
|
|
||||||
console.log(`✅ Deleted ${orgCount.rows[0].count} organizations.`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for dependent mirror jobs
|
// Check for dependent mirror jobs
|
||||||
const jobCount = await client.execute(
|
const jobCountResult = db.query(`SELECT COUNT(*) as count FROM mirror_jobs`).get();
|
||||||
`SELECT COUNT(*) as count FROM mirror_jobs`
|
const jobCount = jobCountResult?.count || 0;
|
||||||
);
|
|
||||||
|
|
||||||
if (
|
if (jobCount > 0) {
|
||||||
jobCount.rows &&
|
db.exec(`DELETE FROM mirror_jobs`);
|
||||||
jobCount.rows[0] &&
|
console.log(`✅ Deleted ${jobCount} mirror jobs.`);
|
||||||
Number(jobCount.rows[0].count) > 0
|
|
||||||
) {
|
|
||||||
await client.execute(`DELETE FROM mirror_jobs`);
|
|
||||||
console.log(`✅ Deleted ${jobCount.rows[0].count} mirror jobs.`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(
|
console.log(
|
||||||
@@ -629,19 +652,20 @@ async function fixDatabaseIssues() {
|
|||||||
console.warn(
|
console.warn(
|
||||||
"⚠️ WARNING: Production database file not found in data directory."
|
"⚠️ WARNING: Production database file not found in data directory."
|
||||||
);
|
);
|
||||||
console.warn('Run "pnpm manage-db init" to create it.');
|
console.warn('Run "bun run manage-db init" to create it.');
|
||||||
} else {
|
} else {
|
||||||
console.log("✅ Production database file found in data directory.");
|
console.log("✅ Production database file found in data directory.");
|
||||||
|
|
||||||
// Check if we can connect to the database
|
// Check if we can connect to the database
|
||||||
try {
|
try {
|
||||||
// Try to query the database
|
// Try to query the database
|
||||||
await db.select().from(configs).limit(1);
|
const db = new Database(dbPath);
|
||||||
|
db.query(`SELECT 1 FROM sqlite_master LIMIT 1`).get();
|
||||||
console.log(`✅ Successfully connected to the database.`);
|
console.log(`✅ Successfully connected to the database.`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("❌ Error connecting to the database:", error);
|
console.error("❌ Error connecting to the database:", error);
|
||||||
console.warn(
|
console.warn(
|
||||||
'The database file might be corrupted. Consider running "pnpm manage-db init" to recreate it.'
|
'The database file might be corrupted. Consider running "bun run manage-db init" to recreate it.'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -692,7 +716,7 @@ Available commands:
|
|||||||
reset-users - Remove all users and their data
|
reset-users - Remove all users and their data
|
||||||
auto - Automatic mode: check, fix, and initialize if needed
|
auto - Automatic mode: check, fix, and initialize if needed
|
||||||
|
|
||||||
Usage: pnpm manage-db [command]
|
Usage: bun run manage-db [command]
|
||||||
`);
|
`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
27
scripts/mark-events-read.ts
Normal file
27
scripts/mark-events-read.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Script to mark all events as read
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { db, events } from "../src/lib/db";
|
||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
|
||||||
|
async function markEventsAsRead() {
|
||||||
|
try {
|
||||||
|
console.log("Marking all events as read...");
|
||||||
|
|
||||||
|
// Update all events to mark them as read
|
||||||
|
const result = await db
|
||||||
|
.update(events)
|
||||||
|
.set({ read: true })
|
||||||
|
.where(eq(events.read, false));
|
||||||
|
|
||||||
|
console.log(`Marked ${result.changes || 0} events as read`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error marking events as read:", error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the function
|
||||||
|
markEventsAsRead();
|
||||||
133
scripts/update-mirror-jobs-table.ts
Normal file
133
scripts/update-mirror-jobs-table.ts
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
#!/usr/bin/env bun
|
||||||
|
/**
|
||||||
|
* Script to update the mirror_jobs table with new columns for resilience
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Database } from "bun:sqlite";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
// Define the database paths
|
||||||
|
const dataDir = path.join(process.cwd(), "data");
|
||||||
|
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||||
|
|
||||||
|
// Ensure data directory exists
|
||||||
|
if (!fs.existsSync(dataDir)) {
|
||||||
|
fs.mkdirSync(dataDir, { recursive: true });
|
||||||
|
console.log(`Created data directory at ${dataDir}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if database exists
|
||||||
|
if (!fs.existsSync(dbPath)) {
|
||||||
|
console.error(`Database file not found at ${dbPath}`);
|
||||||
|
console.error("Please run 'bun run init-db' first to create the database.");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Connect to the database
|
||||||
|
const db = new Database(dbPath);
|
||||||
|
|
||||||
|
// Enable foreign keys
|
||||||
|
db.exec("PRAGMA foreign_keys = ON;");
|
||||||
|
|
||||||
|
// Function to check if a column exists in a table
|
||||||
|
function columnExists(tableName: string, columnName: string): boolean {
|
||||||
|
const result = db.query(
|
||||||
|
`PRAGMA table_info(${tableName})`
|
||||||
|
).all() as { name: string }[];
|
||||||
|
|
||||||
|
return result.some(column => column.name === columnName);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Main function to update the mirror_jobs table
|
||||||
|
async function updateMirrorJobsTable() {
|
||||||
|
console.log("Checking mirror_jobs table for missing columns...");
|
||||||
|
|
||||||
|
// Start a transaction
|
||||||
|
db.exec("BEGIN TRANSACTION;");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check and add each new column if it doesn't exist
|
||||||
|
const columnsToAdd = [
|
||||||
|
{ name: "job_type", definition: "TEXT NOT NULL DEFAULT 'mirror'" },
|
||||||
|
{ name: "batch_id", definition: "TEXT" },
|
||||||
|
{ name: "total_items", definition: "INTEGER" },
|
||||||
|
{ name: "completed_items", definition: "INTEGER DEFAULT 0" },
|
||||||
|
{ name: "item_ids", definition: "TEXT" }, // JSON array as text
|
||||||
|
{ name: "completed_item_ids", definition: "TEXT DEFAULT '[]'" }, // JSON array as text
|
||||||
|
{ name: "in_progress", definition: "INTEGER NOT NULL DEFAULT 0" }, // Boolean as integer
|
||||||
|
{ name: "started_at", definition: "TIMESTAMP" },
|
||||||
|
{ name: "completed_at", definition: "TIMESTAMP" },
|
||||||
|
{ name: "last_checkpoint", definition: "TIMESTAMP" }
|
||||||
|
];
|
||||||
|
|
||||||
|
let columnsAdded = 0;
|
||||||
|
|
||||||
|
for (const column of columnsToAdd) {
|
||||||
|
if (!columnExists("mirror_jobs", column.name)) {
|
||||||
|
console.log(`Adding column '${column.name}' to mirror_jobs table...`);
|
||||||
|
db.exec(`ALTER TABLE mirror_jobs ADD COLUMN ${column.name} ${column.definition};`);
|
||||||
|
columnsAdded++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit the transaction
|
||||||
|
db.exec("COMMIT;");
|
||||||
|
|
||||||
|
if (columnsAdded > 0) {
|
||||||
|
console.log(`✅ Added ${columnsAdded} new columns to mirror_jobs table.`);
|
||||||
|
} else {
|
||||||
|
console.log("✅ All required columns already exist in mirror_jobs table.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create indexes for better performance
|
||||||
|
console.log("Creating indexes for mirror_jobs table...");
|
||||||
|
|
||||||
|
// Only create indexes if they don't exist
|
||||||
|
const indexesResult = db.query(
|
||||||
|
`SELECT name FROM sqlite_master WHERE type='index' AND tbl_name='mirror_jobs'`
|
||||||
|
).all() as { name: string }[];
|
||||||
|
|
||||||
|
const existingIndexes = indexesResult.map(idx => idx.name);
|
||||||
|
|
||||||
|
const indexesToCreate = [
|
||||||
|
{ name: "idx_mirror_jobs_user_id", columns: "user_id" },
|
||||||
|
{ name: "idx_mirror_jobs_batch_id", columns: "batch_id" },
|
||||||
|
{ name: "idx_mirror_jobs_in_progress", columns: "in_progress" },
|
||||||
|
{ name: "idx_mirror_jobs_job_type", columns: "job_type" },
|
||||||
|
{ name: "idx_mirror_jobs_timestamp", columns: "timestamp" }
|
||||||
|
];
|
||||||
|
|
||||||
|
let indexesCreated = 0;
|
||||||
|
|
||||||
|
for (const index of indexesToCreate) {
|
||||||
|
if (!existingIndexes.includes(index.name)) {
|
||||||
|
console.log(`Creating index '${index.name}'...`);
|
||||||
|
db.exec(`CREATE INDEX ${index.name} ON mirror_jobs(${index.columns});`);
|
||||||
|
indexesCreated++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (indexesCreated > 0) {
|
||||||
|
console.log(`✅ Created ${indexesCreated} new indexes for mirror_jobs table.`);
|
||||||
|
} else {
|
||||||
|
console.log("✅ All required indexes already exist for mirror_jobs table.");
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Mirror jobs table update completed successfully.");
|
||||||
|
} catch (error) {
|
||||||
|
// Rollback the transaction in case of error
|
||||||
|
db.exec("ROLLBACK;");
|
||||||
|
console.error("❌ Error updating mirror_jobs table:", error);
|
||||||
|
process.exit(1);
|
||||||
|
} finally {
|
||||||
|
// Close the database connection
|
||||||
|
db.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the update function
|
||||||
|
updateMirrorJobsTable().catch(error => {
|
||||||
|
console.error("Unhandled error:", error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from 'react';
|
||||||
import {
|
import {
|
||||||
Card,
|
Card,
|
||||||
CardContent,
|
CardContent,
|
||||||
CardDescription,
|
CardDescription,
|
||||||
CardHeader,
|
CardHeader,
|
||||||
CardTitle,
|
CardTitle,
|
||||||
} from "@/components/ui/card";
|
} from '@/components/ui/card';
|
||||||
import { GitHubConfigForm } from "./GitHubConfigForm";
|
import { GitHubConfigForm } from './GitHubConfigForm';
|
||||||
import { GiteaConfigForm } from "./GiteaConfigForm";
|
import { GiteaConfigForm } from './GiteaConfigForm';
|
||||||
import { ScheduleConfigForm } from "./ScheduleConfigForm";
|
import { ScheduleConfigForm } from './ScheduleConfigForm';
|
||||||
import type {
|
import type {
|
||||||
ConfigApiResponse,
|
ConfigApiResponse,
|
||||||
GiteaConfig,
|
GiteaConfig,
|
||||||
@@ -16,12 +16,13 @@ import type {
|
|||||||
SaveConfigApiRequest,
|
SaveConfigApiRequest,
|
||||||
SaveConfigApiResponse,
|
SaveConfigApiResponse,
|
||||||
ScheduleConfig,
|
ScheduleConfig,
|
||||||
} from "@/types/config";
|
} from '@/types/config';
|
||||||
import { Button } from "../ui/button";
|
import { Button } from '../ui/button';
|
||||||
import { useAuth } from "@/hooks/useAuth";
|
import { useAuth } from '@/hooks/useAuth';
|
||||||
import { apiRequest } from "@/lib/utils";
|
import { apiRequest } from '@/lib/utils';
|
||||||
import { Copy, CopyCheck, RefreshCw } from "lucide-react";
|
import { Copy, CopyCheck, RefreshCw } from 'lucide-react';
|
||||||
import { toast } from "sonner";
|
import { toast } from 'sonner';
|
||||||
|
import { Skeleton } from '@/components/ui/skeleton';
|
||||||
|
|
||||||
type ConfigState = {
|
type ConfigState = {
|
||||||
githubConfig: GitHubConfig;
|
githubConfig: GitHubConfig;
|
||||||
@@ -32,8 +33,8 @@ type ConfigState = {
|
|||||||
export function ConfigTabs() {
|
export function ConfigTabs() {
|
||||||
const [config, setConfig] = useState<ConfigState>({
|
const [config, setConfig] = useState<ConfigState>({
|
||||||
githubConfig: {
|
githubConfig: {
|
||||||
username: "",
|
username: '',
|
||||||
token: "",
|
token: '',
|
||||||
skipForks: false,
|
skipForks: false,
|
||||||
privateRepositories: false,
|
privateRepositories: false,
|
||||||
mirrorIssues: false,
|
mirrorIssues: false,
|
||||||
@@ -41,16 +42,14 @@ export function ConfigTabs() {
|
|||||||
preserveOrgStructure: false,
|
preserveOrgStructure: false,
|
||||||
skipStarredIssues: false,
|
skipStarredIssues: false,
|
||||||
},
|
},
|
||||||
|
|
||||||
giteaConfig: {
|
giteaConfig: {
|
||||||
url: "",
|
url: '',
|
||||||
username: "",
|
username: '',
|
||||||
token: "",
|
token: '',
|
||||||
organization: "github-mirrors",
|
organization: 'github-mirrors',
|
||||||
visibility: "public",
|
visibility: 'public',
|
||||||
starredReposOrg: "github",
|
starredReposOrg: 'github',
|
||||||
},
|
},
|
||||||
|
|
||||||
scheduleConfig: {
|
scheduleConfig: {
|
||||||
enabled: false,
|
enabled: false,
|
||||||
interval: 3600,
|
interval: 3600,
|
||||||
@@ -58,27 +57,21 @@ export function ConfigTabs() {
|
|||||||
});
|
});
|
||||||
const { user, refreshUser } = useAuth();
|
const { user, refreshUser } = useAuth();
|
||||||
const [isLoading, setIsLoading] = useState(true);
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
const [dockerCode, setDockerCode] = useState<string>("");
|
const [dockerCode, setDockerCode] = useState<string>('');
|
||||||
const [isCopied, setIsCopied] = useState<boolean>(false);
|
const [isCopied, setIsCopied] = useState<boolean>(false);
|
||||||
const [isSyncing, setIsSyncing] = useState<boolean>(false);
|
const [isSyncing, setIsSyncing] = useState<boolean>(false);
|
||||||
const [isConfigSaved, setIsConfigSaved] = useState<boolean>(false);
|
const [isConfigSaved, setIsConfigSaved] = useState<boolean>(false);
|
||||||
|
|
||||||
// Check if all required fields are filled to enable the Save Configuration button
|
|
||||||
const isConfigFormValid = (): boolean => {
|
const isConfigFormValid = (): boolean => {
|
||||||
const { githubConfig, giteaConfig } = config;
|
const { githubConfig, giteaConfig } = config;
|
||||||
|
|
||||||
// Check GitHub required fields
|
|
||||||
const isGitHubValid = !!(
|
const isGitHubValid = !!(
|
||||||
githubConfig.username?.trim() && githubConfig.token?.trim()
|
githubConfig.username.trim() && githubConfig.token.trim()
|
||||||
);
|
);
|
||||||
|
|
||||||
// Check Gitea required fields
|
|
||||||
const isGiteaValid = !!(
|
const isGiteaValid = !!(
|
||||||
giteaConfig.url?.trim() &&
|
giteaConfig.url.trim() &&
|
||||||
giteaConfig.username?.trim() &&
|
giteaConfig.username.trim() &&
|
||||||
giteaConfig.token?.trim()
|
giteaConfig.token.trim()
|
||||||
);
|
);
|
||||||
|
|
||||||
return isGitHubValid && isGiteaValid;
|
return isGitHubValid && isGiteaValid;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -86,11 +79,12 @@ export function ConfigTabs() {
|
|||||||
const updateLastAndNextRun = () => {
|
const updateLastAndNextRun = () => {
|
||||||
const lastRun = config.scheduleConfig.lastRun
|
const lastRun = config.scheduleConfig.lastRun
|
||||||
? new Date(config.scheduleConfig.lastRun)
|
? new Date(config.scheduleConfig.lastRun)
|
||||||
: new Date(); // fallback to now if lastRun is null
|
: new Date();
|
||||||
const intervalInSeconds = config.scheduleConfig.interval;
|
const intervalInSeconds = config.scheduleConfig.interval;
|
||||||
const nextRun = new Date(lastRun.getTime() + intervalInSeconds * 1000);
|
const nextRun = new Date(
|
||||||
|
lastRun.getTime() + intervalInSeconds * 1000,
|
||||||
setConfig((prev) => ({
|
);
|
||||||
|
setConfig(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
scheduleConfig: {
|
scheduleConfig: {
|
||||||
...prev.scheduleConfig,
|
...prev.scheduleConfig,
|
||||||
@@ -99,37 +93,31 @@ export function ConfigTabs() {
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
};
|
};
|
||||||
|
|
||||||
updateLastAndNextRun();
|
updateLastAndNextRun();
|
||||||
}, [config.scheduleConfig.interval]);
|
}, [config.scheduleConfig.interval]);
|
||||||
|
|
||||||
const handleImportGitHubData = async () => {
|
const handleImportGitHubData = async () => {
|
||||||
|
if (!user?.id) return;
|
||||||
|
setIsSyncing(true);
|
||||||
try {
|
try {
|
||||||
if (!user?.id) return;
|
|
||||||
|
|
||||||
setIsSyncing(true);
|
|
||||||
|
|
||||||
const result = await apiRequest<{ success: boolean; message?: string }>(
|
const result = await apiRequest<{ success: boolean; message?: string }>(
|
||||||
`/sync?userId=${user.id}`,
|
`/sync?userId=${user.id}`,
|
||||||
{
|
{ method: 'POST' },
|
||||||
method: "POST",
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
|
result.success
|
||||||
if (result.success) {
|
? toast.success(
|
||||||
toast.success(
|
'GitHub data imported successfully! Head to the Dashboard to start mirroring repositories.',
|
||||||
"GitHub data imported successfully! Head to the Dashboard to start mirroring repositories."
|
)
|
||||||
);
|
: toast.error(
|
||||||
} else {
|
`Failed to import GitHub data: ${
|
||||||
toast.error(
|
result.message || 'Unknown error'
|
||||||
`Failed to import GitHub data: ${result.message || "Unknown error"}`
|
}`,
|
||||||
);
|
);
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
toast.error(
|
||||||
`Error importing GitHub data: ${
|
`Error importing GitHub data: ${
|
||||||
error instanceof Error ? error.message : String(error)
|
error instanceof Error ? error.message : String(error)
|
||||||
}`
|
}`,
|
||||||
);
|
);
|
||||||
} finally {
|
} finally {
|
||||||
setIsSyncing(false);
|
setIsSyncing(false);
|
||||||
@@ -137,94 +125,76 @@ export function ConfigTabs() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const handleSaveConfig = async () => {
|
const handleSaveConfig = async () => {
|
||||||
|
if (!user?.id) return;
|
||||||
|
const reqPayload: SaveConfigApiRequest = {
|
||||||
|
userId: user.id,
|
||||||
|
githubConfig: config.githubConfig,
|
||||||
|
giteaConfig: config.giteaConfig,
|
||||||
|
scheduleConfig: config.scheduleConfig,
|
||||||
|
};
|
||||||
try {
|
try {
|
||||||
if (!user || !user.id) {
|
const response = await fetch('/api/config', {
|
||||||
return;
|
method: 'POST',
|
||||||
}
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(reqPayload),
|
||||||
const reqPyload: SaveConfigApiRequest = {
|
|
||||||
userId: user.id,
|
|
||||||
githubConfig: config.githubConfig,
|
|
||||||
giteaConfig: config.giteaConfig,
|
|
||||||
scheduleConfig: config.scheduleConfig,
|
|
||||||
};
|
|
||||||
const response = await fetch("/api/config", {
|
|
||||||
method: "POST",
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
body: JSON.stringify(reqPyload),
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const result: SaveConfigApiResponse = await response.json();
|
const result: SaveConfigApiResponse = await response.json();
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
await refreshUser();
|
await refreshUser();
|
||||||
setIsConfigSaved(true);
|
setIsConfigSaved(true);
|
||||||
|
|
||||||
toast.success(
|
toast.success(
|
||||||
"Configuration saved successfully! Now import your GitHub data to begin."
|
'Configuration saved successfully! Now import your GitHub data to begin.',
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
toast.error(
|
toast.error(
|
||||||
`Failed to save configuration: ${result.message || "Unknown error"}`
|
`Failed to save configuration: ${result.message || 'Unknown error'}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast.error(
|
toast.error(
|
||||||
`An error occurred while saving the configuration: ${
|
`An error occurred while saving the configuration: ${
|
||||||
error instanceof Error ? error.message : String(error)
|
error instanceof Error ? error.message : String(error)
|
||||||
}`
|
}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
if (!user) return;
|
||||||
|
|
||||||
const fetchConfig = async () => {
|
const fetchConfig = async () => {
|
||||||
|
setIsLoading(true);
|
||||||
try {
|
try {
|
||||||
if (!user) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setIsLoading(true);
|
|
||||||
|
|
||||||
const response = await apiRequest<ConfigApiResponse>(
|
const response = await apiRequest<ConfigApiResponse>(
|
||||||
`/config?userId=${user.id}`,
|
`/config?userId=${user.id}`,
|
||||||
{
|
{ method: 'GET' },
|
||||||
method: "GET",
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Check if we have a valid config response
|
|
||||||
if (response && !response.error) {
|
if (response && !response.error) {
|
||||||
setConfig({
|
setConfig({
|
||||||
githubConfig: response.githubConfig || config.githubConfig,
|
githubConfig:
|
||||||
giteaConfig: response.giteaConfig || config.giteaConfig,
|
response.githubConfig || config.githubConfig,
|
||||||
scheduleConfig: response.scheduleConfig || config.scheduleConfig,
|
giteaConfig:
|
||||||
|
response.giteaConfig || config.giteaConfig,
|
||||||
|
scheduleConfig:
|
||||||
|
response.scheduleConfig || config.scheduleConfig,
|
||||||
});
|
});
|
||||||
|
if (response.id) setIsConfigSaved(true);
|
||||||
// If we got a valid config from the server, it means it was previously saved
|
|
||||||
if (response.id) {
|
|
||||||
setIsConfigSaved(true);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// If there's an error, we'll just use the default config defined in state
|
|
||||||
|
|
||||||
setIsLoading(false);
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Don't show error for first-time users, just use the default config
|
console.warn(
|
||||||
console.warn("Could not fetch configuration, using defaults:", error);
|
'Could not fetch configuration, using defaults:',
|
||||||
} finally {
|
error,
|
||||||
setIsLoading(false);
|
);
|
||||||
}
|
}
|
||||||
|
setIsLoading(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
fetchConfig();
|
fetchConfig();
|
||||||
}, [user]);
|
}, [user]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const generateDockerCode = () => {
|
const generateDockerCode = () => `
|
||||||
return `services:
|
services:
|
||||||
gitea-mirror:
|
gitea-mirror:
|
||||||
image: arunavo4/gitea-mirror:latest
|
image: arunavo4/gitea-mirror:latest
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@@ -243,27 +213,93 @@ export function ConfigTabs() {
|
|||||||
- GITEA_ORGANIZATION=${config.giteaConfig.organization}
|
- GITEA_ORGANIZATION=${config.giteaConfig.organization}
|
||||||
- GITEA_ORG_VISIBILITY=${config.giteaConfig.visibility}
|
- GITEA_ORG_VISIBILITY=${config.giteaConfig.visibility}
|
||||||
- DELAY=${config.scheduleConfig.interval}`;
|
- DELAY=${config.scheduleConfig.interval}`;
|
||||||
};
|
setDockerCode(generateDockerCode());
|
||||||
|
|
||||||
const code = generateDockerCode();
|
|
||||||
setDockerCode(code);
|
|
||||||
}, [config]);
|
}, [config]);
|
||||||
|
|
||||||
const handleCopyToClipboard = (text: string) => {
|
const handleCopyToClipboard = (text: string) => {
|
||||||
navigator.clipboard.writeText(text).then(
|
navigator.clipboard.writeText(text).then(
|
||||||
() => {
|
() => {
|
||||||
setIsCopied(true);
|
setIsCopied(true);
|
||||||
toast.success("Docker configuration copied to clipboard!");
|
toast.success('Docker configuration copied to clipboard!');
|
||||||
setTimeout(() => setIsCopied(false), 2000);
|
setTimeout(() => setIsCopied(false), 2000);
|
||||||
},
|
},
|
||||||
(err) => {
|
() => toast.error('Could not copy text to clipboard.'),
|
||||||
toast.error("Could not copy text to clipboard.");
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function ConfigCardSkeleton() {
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardHeader className="flex-row justify-between">
|
||||||
|
<div className="flex flex-col gap-y-1.5 m-0">
|
||||||
|
<Skeleton className="h-6 w-48" />
|
||||||
|
<Skeleton className="h-4 w-72" />
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-x-4">
|
||||||
|
<Skeleton className="h-10 w-36" />
|
||||||
|
<Skeleton className="h-10 w-36" />
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="flex flex-col gap-y-4">
|
||||||
|
<div className="flex gap-x-4">
|
||||||
|
<div className="w-1/2 border rounded-lg p-4">
|
||||||
|
<div className="flex justify-between items-center mb-4">
|
||||||
|
<Skeleton className="h-6 w-40" />
|
||||||
|
<Skeleton className="h-9 w-32" />
|
||||||
|
</div>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<Skeleton className="h-20 w-full" />
|
||||||
|
<Skeleton className="h-20 w-full" />
|
||||||
|
<Skeleton className="h-32 w-full" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="w-1/2 border rounded-lg p-4">
|
||||||
|
<div className="flex justify-between items-center mb-4">
|
||||||
|
<Skeleton className="h-6 w-40" />
|
||||||
|
<Skeleton className="h-9 w-32" />
|
||||||
|
</div>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<Skeleton className="h-20 w-full" />
|
||||||
|
<Skeleton className="h-20 w-full" />
|
||||||
|
<Skeleton className="h-20 w-full" />
|
||||||
|
<Skeleton className="h-20 w-full" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="border rounded-lg p-4">
|
||||||
|
<div className="space-y-4">
|
||||||
|
<Skeleton className="h-8 w-48" />
|
||||||
|
<Skeleton className="h-16 w-full" />
|
||||||
|
<Skeleton className="h-8 w-32" />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function DockerConfigSkeleton() {
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<Skeleton className="h-6 w-40" />
|
||||||
|
<Skeleton className="h-4 w-64" />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="relative">
|
||||||
|
<Skeleton className="h-8 w-8 absolute top-4 right-10 rounded-md" />
|
||||||
|
<Skeleton className="h-48 w-full rounded-md" />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return isLoading ? (
|
return isLoading ? (
|
||||||
<div>loading...</div>
|
<div className="flex flex-col gap-y-6">
|
||||||
|
<ConfigCardSkeleton />
|
||||||
|
<DockerConfigSkeleton />
|
||||||
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="flex flex-col gap-y-6">
|
<div className="flex flex-col gap-y-6">
|
||||||
<Card>
|
<Card>
|
||||||
@@ -275,17 +311,16 @@ export function ConfigTabs() {
|
|||||||
mirroring.
|
mirroring.
|
||||||
</CardDescription>
|
</CardDescription>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex gap-x-4">
|
<div className="flex gap-x-4">
|
||||||
<Button
|
<Button
|
||||||
onClick={handleImportGitHubData}
|
onClick={handleImportGitHubData}
|
||||||
disabled={isSyncing || !isConfigSaved}
|
disabled={isSyncing || !isConfigSaved}
|
||||||
title={
|
title={
|
||||||
!isConfigSaved
|
!isConfigSaved
|
||||||
? "Save configuration first"
|
? 'Save configuration first'
|
||||||
: isSyncing
|
: isSyncing
|
||||||
? "Import in progress"
|
? 'Import in progress'
|
||||||
: "Import GitHub Data"
|
: 'Import GitHub Data'
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
{isSyncing ? (
|
{isSyncing ? (
|
||||||
@@ -305,66 +340,57 @@ export function ConfigTabs() {
|
|||||||
disabled={!isConfigFormValid()}
|
disabled={!isConfigFormValid()}
|
||||||
title={
|
title={
|
||||||
!isConfigFormValid()
|
!isConfigFormValid()
|
||||||
? "Please fill all required fields"
|
? 'Please fill all required fields'
|
||||||
: "Save Configuration"
|
: 'Save Configuration'
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
Save Configuration
|
Save Configuration
|
||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
|
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<div className="flex flex-col gap-y-4">
|
<div className="flex flex-col gap-y-4">
|
||||||
<div className="flex gap-x-4">
|
<div className="flex gap-x-4">
|
||||||
<GitHubConfigForm
|
<GitHubConfigForm
|
||||||
config={config.githubConfig}
|
config={config.githubConfig}
|
||||||
setConfig={(update) =>
|
setConfig={update =>
|
||||||
setConfig((prev) => ({
|
setConfig(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
githubConfig:
|
githubConfig:
|
||||||
typeof update === "function"
|
typeof update === 'function'
|
||||||
? update(prev.githubConfig)
|
? update(prev.githubConfig)
|
||||||
: update,
|
: update,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<GiteaConfigForm
|
<GiteaConfigForm
|
||||||
config={config?.giteaConfig ?? ({} as GiteaConfig)}
|
config={config.giteaConfig}
|
||||||
setConfig={(update) =>
|
setConfig={update =>
|
||||||
setConfig((prev) => ({
|
setConfig(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
giteaConfig:
|
giteaConfig:
|
||||||
typeof update === "function"
|
typeof update === 'function'
|
||||||
? update(prev.giteaConfig)
|
? update(prev.giteaConfig)
|
||||||
: update,
|
: update,
|
||||||
githubConfig: prev?.githubConfig ?? ({} as GitHubConfig),
|
|
||||||
scheduleConfig:
|
|
||||||
prev?.scheduleConfig ?? ({} as ScheduleConfig),
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<ScheduleConfigForm
|
<ScheduleConfigForm
|
||||||
config={config?.scheduleConfig ?? ({} as ScheduleConfig)}
|
config={config.scheduleConfig}
|
||||||
setConfig={(update) =>
|
setConfig={update =>
|
||||||
setConfig((prev) => ({
|
setConfig(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
scheduleConfig:
|
scheduleConfig:
|
||||||
typeof update === "function"
|
typeof update === 'function'
|
||||||
? update(prev.scheduleConfig)
|
? update(prev.scheduleConfig)
|
||||||
: update,
|
: update,
|
||||||
githubConfig: prev?.githubConfig ?? ({} as GitHubConfig),
|
|
||||||
giteaConfig: prev?.giteaConfig ?? ({} as GiteaConfig),
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
<Card>
|
<Card>
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle>Docker Configuration</CardTitle>
|
<CardTitle>Docker Configuration</CardTitle>
|
||||||
@@ -372,7 +398,6 @@ export function ConfigTabs() {
|
|||||||
Equivalent Docker configuration for your current settings.
|
Equivalent Docker configuration for your current settings.
|
||||||
</CardDescription>
|
</CardDescription>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
|
|
||||||
<CardContent className="relative">
|
<CardContent className="relative">
|
||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
@@ -386,7 +411,6 @@ export function ConfigTabs() {
|
|||||||
<Copy className="text-muted-foreground" />
|
<Copy className="text-muted-foreground" />
|
||||||
)}
|
)}
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
<pre className="bg-muted p-4 rounded-md overflow-auto text-sm">
|
<pre className="bg-muted p-4 rounded-md overflow-auto text-sm">
|
||||||
{dockerCode}
|
{dockerCode}
|
||||||
</pre>
|
</pre>
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ import { apiRequest } from "@/lib/utils";
|
|||||||
import type { DashboardApiResponse } from "@/types/dashboard";
|
import type { DashboardApiResponse } from "@/types/dashboard";
|
||||||
import { useSSE } from "@/hooks/useSEE";
|
import { useSSE } from "@/hooks/useSEE";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
|
import { Skeleton } from "@/components/ui/skeleton";
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
|
|
||||||
export function Dashboard() {
|
export function Dashboard() {
|
||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
@@ -59,8 +61,6 @@ export function Dashboard() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
setIsLoading(false);
|
|
||||||
|
|
||||||
const response = await apiRequest<DashboardApiResponse>(
|
const response = await apiRequest<DashboardApiResponse>(
|
||||||
`/dashboard?userId=${user.id}`,
|
`/dashboard?userId=${user.id}`,
|
||||||
{
|
{
|
||||||
@@ -93,8 +93,61 @@ export function Dashboard() {
|
|||||||
fetchDashboardData();
|
fetchDashboardData();
|
||||||
}, [user]);
|
}, [user]);
|
||||||
|
|
||||||
|
// Status Card Skeleton component
|
||||||
|
function StatusCardSkeleton() {
|
||||||
|
return (
|
||||||
|
<Card className="overflow-hidden">
|
||||||
|
<CardHeader className="flex flex-row items-center justify-between pb-2 space-y-0">
|
||||||
|
<CardTitle className="text-sm font-medium">
|
||||||
|
<Skeleton className="h-4 w-24" />
|
||||||
|
</CardTitle>
|
||||||
|
<Skeleton className="h-4 w-4 rounded-full" />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<Skeleton className="h-8 w-16 mb-1" />
|
||||||
|
<Skeleton className="h-3 w-32" />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return isLoading || !connected ? (
|
return isLoading || !connected ? (
|
||||||
<div>loading...</div>
|
<div className="flex flex-col gap-y-6">
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
|
<StatusCardSkeleton />
|
||||||
|
<StatusCardSkeleton />
|
||||||
|
<StatusCardSkeleton />
|
||||||
|
<StatusCardSkeleton />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex gap-x-6 items-start">
|
||||||
|
{/* Repository List Skeleton */}
|
||||||
|
<div className="w-1/2 border rounded-lg p-4">
|
||||||
|
<div className="flex justify-between items-center mb-4">
|
||||||
|
<Skeleton className="h-6 w-32" />
|
||||||
|
<Skeleton className="h-9 w-24" />
|
||||||
|
</div>
|
||||||
|
<div className="space-y-3">
|
||||||
|
{Array.from({ length: 3 }).map((_, i) => (
|
||||||
|
<Skeleton key={i} className="h-16 w-full" />
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Recent Activity Skeleton */}
|
||||||
|
<div className="w-1/2 border rounded-lg p-4">
|
||||||
|
<div className="flex justify-between items-center mb-4">
|
||||||
|
<Skeleton className="h-6 w-32" />
|
||||||
|
<Skeleton className="h-9 w-24" />
|
||||||
|
</div>
|
||||||
|
<div className="space-y-3">
|
||||||
|
{Array.from({ length: 3 }).map((_, i) => (
|
||||||
|
<Skeleton key={i} className="h-16 w-full" />
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="flex flex-col gap-y-6">
|
<div className="flex flex-col gap-y-6">
|
||||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6">
|
||||||
|
|||||||
@@ -4,9 +4,10 @@ import { SiGitea } from "react-icons/si";
|
|||||||
import { ModeToggle } from "@/components/theme/ModeToggle";
|
import { ModeToggle } from "@/components/theme/ModeToggle";
|
||||||
import { Avatar, AvatarFallback, AvatarImage } from "../ui/avatar";
|
import { Avatar, AvatarFallback, AvatarImage } from "../ui/avatar";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
|
import { Skeleton } from "@/components/ui/skeleton";
|
||||||
|
|
||||||
export function Header() {
|
export function Header() {
|
||||||
const { user, logout } = useAuth();
|
const { user, logout, isLoading } = useAuth();
|
||||||
|
|
||||||
const handleLogout = async () => {
|
const handleLogout = async () => {
|
||||||
toast.success("Logged out successfully");
|
toast.success("Logged out successfully");
|
||||||
@@ -15,6 +16,16 @@ export function Header() {
|
|||||||
logout();
|
logout();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Auth buttons skeleton loader
|
||||||
|
function AuthButtonsSkeleton() {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Skeleton className="h-10 w-10 rounded-full" /> {/* Avatar placeholder */}
|
||||||
|
<Skeleton className="h-10 w-24" /> {/* Button placeholder */}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<header className="border-b bg-background">
|
<header className="border-b bg-background">
|
||||||
<div className="flex h-[4.5rem] items-center justify-between px-6">
|
<div className="flex h-[4.5rem] items-center justify-between px-6">
|
||||||
@@ -25,7 +36,10 @@ export function Header() {
|
|||||||
|
|
||||||
<div className="flex items-center gap-4">
|
<div className="flex items-center gap-4">
|
||||||
<ModeToggle />
|
<ModeToggle />
|
||||||
{user ? (
|
|
||||||
|
{isLoading ? (
|
||||||
|
<AuthButtonsSkeleton />
|
||||||
|
) : user ? (
|
||||||
<>
|
<>
|
||||||
<Avatar>
|
<Avatar>
|
||||||
<AvatarImage src="" alt="@shadcn" />
|
<AvatarImage src="" alt="@shadcn" />
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { useEffect, useState } from "react";
|
|||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { ExternalLink } from "lucide-react";
|
import { ExternalLink } from "lucide-react";
|
||||||
import { links } from "@/data/Sidebar";
|
import { links } from "@/data/Sidebar";
|
||||||
|
import { VersionInfo } from "./VersionInfo";
|
||||||
|
|
||||||
interface SidebarProps {
|
interface SidebarProps {
|
||||||
className?: string;
|
className?: string;
|
||||||
@@ -19,7 +20,7 @@ export function Sidebar({ className }: SidebarProps) {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<aside className={cn("w-64 border-r bg-background", className)}>
|
<aside className={cn("w-64 border-r bg-background", className)}>
|
||||||
<div className="flex flex-col h-full py-4">
|
<div className="flex flex-col h-full pt-4">
|
||||||
<nav className="flex flex-col gap-y-1 pl-2 pr-3">
|
<nav className="flex flex-col gap-y-1 pl-2 pr-3">
|
||||||
{links.map((link, index) => {
|
{links.map((link, index) => {
|
||||||
const isActive = currentPath === link.href;
|
const isActive = currentPath === link.href;
|
||||||
@@ -59,6 +60,7 @@ export function Sidebar({ className }: SidebarProps) {
|
|||||||
<ExternalLink className="h-3 w-3" />
|
<ExternalLink className="h-3 w-3" />
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
<VersionInfo />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</aside>
|
</aside>
|
||||||
|
|||||||
49
src/components/layout/VersionInfo.tsx
Normal file
49
src/components/layout/VersionInfo.tsx
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import { useEffect, useState } from "react";
|
||||||
|
import { healthApi } from "@/lib/api";
|
||||||
|
|
||||||
|
export function VersionInfo() {
|
||||||
|
const [versionInfo, setVersionInfo] = useState<{
|
||||||
|
current: string;
|
||||||
|
latest: string;
|
||||||
|
updateAvailable: boolean;
|
||||||
|
}>({
|
||||||
|
current: "loading...",
|
||||||
|
latest: "",
|
||||||
|
updateAvailable: false
|
||||||
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const fetchVersion = async () => {
|
||||||
|
try {
|
||||||
|
const healthData = await healthApi.check();
|
||||||
|
setVersionInfo({
|
||||||
|
current: healthData.version || "unknown",
|
||||||
|
latest: healthData.latestVersion || "unknown",
|
||||||
|
updateAvailable: healthData.updateAvailable || false
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Failed to fetch version:", error);
|
||||||
|
setVersionInfo({
|
||||||
|
current: "unknown",
|
||||||
|
latest: "",
|
||||||
|
updateAvailable: false
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchVersion();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="text-xs text-muted-foreground text-center pt-2 pb-3 border-t border-border mt-2">
|
||||||
|
{versionInfo.updateAvailable ? (
|
||||||
|
<div className="flex flex-col">
|
||||||
|
<span>v{versionInfo.current}</span>
|
||||||
|
<span className="text-primary">v{versionInfo.latest} available</span>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<span>v{versionInfo.current}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
title: "Architecture"
|
title: "Architecture"
|
||||||
description: "Comprehensive overview of the Gitea Mirror application architecture."
|
description: "Comprehensive overview of the Gitea Mirror application architecture."
|
||||||
order: 1
|
order: 1
|
||||||
updatedDate: 2023-10-15
|
updatedDate: 2025-05-22
|
||||||
---
|
---
|
||||||
|
|
||||||
<div class="mb-6">
|
<div class="mb-6">
|
||||||
@@ -21,17 +21,18 @@ The application is built using:
|
|||||||
- <span class="font-semibold text-foreground">Astro</span>: Web framework for the frontend
|
- <span class="font-semibold text-foreground">Astro</span>: Web framework for the frontend
|
||||||
- <span class="font-semibold text-foreground">React</span>: Component library for interactive UI elements
|
- <span class="font-semibold text-foreground">React</span>: Component library for interactive UI elements
|
||||||
- <span class="font-semibold text-foreground">Shadcn UI</span>: UI component library built on Tailwind CSS
|
- <span class="font-semibold text-foreground">Shadcn UI</span>: UI component library built on Tailwind CSS
|
||||||
- <span class="font-semibold text-foreground">SQLite</span>: Database for storing configuration and state
|
- <span class="font-semibold text-foreground">SQLite</span>: Database for storing configuration, state, and events
|
||||||
- <span class="font-semibold text-foreground">Node.js</span>: Runtime environment for the backend
|
- <span class="font-semibold text-foreground">Bun</span>: Runtime environment for the backend
|
||||||
|
- <span class="font-semibold text-foreground">Drizzle ORM</span>: Type-safe ORM for database interactions
|
||||||
|
|
||||||
## Architecture Diagram
|
## Architecture Diagram
|
||||||
|
|
||||||
```mermaid
|
```mermaid
|
||||||
graph TD
|
graph TD
|
||||||
subgraph "Gitea Mirror"
|
subgraph "Gitea Mirror"
|
||||||
Frontend["Frontend<br/>(Astro)"]
|
Frontend["Frontend<br/>(Astro + React)"]
|
||||||
Backend["Backend<br/>(Node.js)"]
|
Backend["Backend<br/>(Bun)"]
|
||||||
Database["Database<br/>(SQLite)"]
|
Database["Database<br/>(SQLite + Drizzle)"]
|
||||||
|
|
||||||
Frontend <--> Backend
|
Frontend <--> Backend
|
||||||
Backend <--> Database
|
Backend <--> Database
|
||||||
@@ -60,9 +61,9 @@ Key frontend components:
|
|||||||
- **Configuration**: Settings for GitHub and Gitea connections
|
- **Configuration**: Settings for GitHub and Gitea connections
|
||||||
- **Activity Log**: Detailed log of mirroring operations
|
- **Activity Log**: Detailed log of mirroring operations
|
||||||
|
|
||||||
### Backend (Node.js)
|
### Backend (Bun)
|
||||||
|
|
||||||
The backend is built with Node.js and provides API endpoints for the frontend to interact with. It handles:
|
The backend is built with Bun and provides API endpoints for the frontend to interact with. It handles:
|
||||||
|
|
||||||
- Authentication and user management
|
- Authentication and user management
|
||||||
- GitHub API integration
|
- GitHub API integration
|
||||||
@@ -70,14 +71,15 @@ The backend is built with Node.js and provides API endpoints for the frontend to
|
|||||||
- Mirroring operations
|
- Mirroring operations
|
||||||
- Database interactions
|
- Database interactions
|
||||||
|
|
||||||
### Database (SQLite)
|
### Database (SQLite + Drizzle ORM)
|
||||||
|
|
||||||
SQLite is used for data persistence, storing:
|
SQLite with Bun's native SQLite driver is used for data persistence, with Drizzle ORM providing type-safe database interactions. The database stores:
|
||||||
|
|
||||||
- User accounts and authentication data
|
- User accounts and authentication data
|
||||||
- GitHub and Gitea configuration
|
- GitHub and Gitea configuration
|
||||||
- Repository and organization information
|
- Repository and organization information
|
||||||
- Mirroring job history and status
|
- Mirroring job history and status
|
||||||
|
- Event notifications and their read status
|
||||||
|
|
||||||
## Data Flow
|
## Data Flow
|
||||||
|
|
||||||
@@ -93,11 +95,30 @@ SQLite is used for data persistence, storing:
|
|||||||
gitea-mirror/
|
gitea-mirror/
|
||||||
├── src/ # Source code
|
├── src/ # Source code
|
||||||
│ ├── components/ # React components
|
│ ├── components/ # React components
|
||||||
|
│ ├── content/ # Documentation and content
|
||||||
│ ├── layouts/ # Astro layout components
|
│ ├── layouts/ # Astro layout components
|
||||||
│ ├── lib/ # Utility functions and database
|
│ ├── lib/ # Utility functions and database
|
||||||
│ ├── pages/ # Astro pages and API routes
|
│ ├── pages/ # Astro pages and API routes
|
||||||
│ └── styles/ # CSS and Tailwind styles
|
│ └── styles/ # CSS and Tailwind styles
|
||||||
├── public/ # Static assets
|
├── public/ # Static assets
|
||||||
├── data/ # Database and persistent data
|
├── data/ # Database and persistent data
|
||||||
└── docker/ # Docker configuration
|
├── docker/ # Docker configuration
|
||||||
|
└── scripts/ # Utility scripts for deployment and maintenance
|
||||||
|
├── gitea-mirror-lxc-proxmox.sh # Proxmox LXC deployment script
|
||||||
|
├── gitea-mirror-lxc-local.sh # Local LXC deployment script
|
||||||
|
└── manage-db.ts # Database management tool
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Deployment Options
|
||||||
|
|
||||||
|
Gitea Mirror supports multiple deployment options:
|
||||||
|
|
||||||
|
1. **Docker**: Run as a containerized application using Docker and docker-compose
|
||||||
|
2. **LXC Containers**: Deploy in Linux Containers (LXC) on Proxmox VE or local workstations
|
||||||
|
3. **Native**: Run directly on the host system using Bun runtime
|
||||||
|
|
||||||
|
Each deployment method has its own advantages:
|
||||||
|
|
||||||
|
- **Docker**: Isolation, easy updates, consistent environment
|
||||||
|
- **LXC**: Lightweight virtualization, better performance than Docker, system-level isolation
|
||||||
|
- **Native**: Best performance, direct access to system resources
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
title: "Configuration"
|
title: "Configuration"
|
||||||
description: "Guide to configuring Gitea Mirror for your environment."
|
description: "Guide to configuring Gitea Mirror for your environment."
|
||||||
order: 2
|
order: 2
|
||||||
updatedDate: 2023-10-15
|
updatedDate: 2025-05-22
|
||||||
---
|
---
|
||||||
|
|
||||||
<div class="mb-6">
|
<div class="mb-6">
|
||||||
@@ -23,11 +23,11 @@ The following environment variables can be used to configure Gitea Mirror:
|
|||||||
|
|
||||||
| Variable | Description | Default Value | Example |
|
| Variable | Description | Default Value | Example |
|
||||||
|----------|-------------|---------------|---------|
|
|----------|-------------|---------------|---------|
|
||||||
| `NODE_ENV` | Node environment (development, production, test) | `development` | `production` |
|
| `NODE_ENV` | Runtime environment (development, production, test) | `development` | `production` |
|
||||||
| `DATABASE_URL` | SQLite database URL | `sqlite://data/gitea-mirror.db` | `sqlite://path/to/your/database.db` |
|
| `DATABASE_URL` | SQLite database URL | `file:data/gitea-mirror.db` | `file:path/to/your/database.db` |
|
||||||
| `JWT_SECRET` | Secret key for JWT authentication | `your-secret-key-change-this-in-production` | `your-secure-random-string` |
|
| `JWT_SECRET` | Secret key for JWT authentication | `your-secret-key-change-this-in-production` | `your-secure-random-string` |
|
||||||
| `HOST` | Server host | `localhost` | `0.0.0.0` |
|
| `HOST` | Server host | `localhost` | `0.0.0.0` |
|
||||||
| `PORT` | Server port | `3000` | `8080` |
|
| `PORT` | Server port | `4321` | `8080` |
|
||||||
|
|
||||||
### Important Security Note
|
### Important Security Note
|
||||||
|
|
||||||
@@ -118,3 +118,58 @@ Example patterns:
|
|||||||
- `*` - All repositories
|
- `*` - All repositories
|
||||||
- `org-name/*` - All repositories in a specific organization
|
- `org-name/*` - All repositories in a specific organization
|
||||||
- `username/repo-name` - A specific repository
|
- `username/repo-name` - A specific repository
|
||||||
|
|
||||||
|
### Database Management
|
||||||
|
|
||||||
|
Gitea Mirror includes several database management tools that can be run from the command line:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Initialize the database (only if it doesn't exist)
|
||||||
|
bun run init-db
|
||||||
|
|
||||||
|
# Check database status
|
||||||
|
bun run check-db
|
||||||
|
|
||||||
|
# Fix database location issues
|
||||||
|
bun run fix-db
|
||||||
|
|
||||||
|
# Reset all users (for testing signup flow)
|
||||||
|
bun run reset-users
|
||||||
|
|
||||||
|
# Remove database files completely
|
||||||
|
bun run cleanup-db
|
||||||
|
```
|
||||||
|
|
||||||
|
### Event Management
|
||||||
|
|
||||||
|
Events in Gitea Mirror (such as repository mirroring operations) are stored in the SQLite database. You can manage these events using the following scripts:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# View all events in the database
|
||||||
|
bun scripts/check-events.ts
|
||||||
|
|
||||||
|
# Clean up old events (default: older than 7 days)
|
||||||
|
bun scripts/cleanup-events.ts
|
||||||
|
|
||||||
|
# Mark all events as read
|
||||||
|
bun scripts/mark-events-read.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Health Check Endpoint
|
||||||
|
|
||||||
|
Gitea Mirror includes a built-in health check endpoint at `/api/health` that provides:
|
||||||
|
|
||||||
|
- System status and uptime
|
||||||
|
- Database connectivity check
|
||||||
|
- Memory usage statistics
|
||||||
|
- Environment information
|
||||||
|
|
||||||
|
You can use this endpoint for monitoring your deployment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Basic check (returns 200 OK if healthy)
|
||||||
|
curl -I http://your-server:port/api/health
|
||||||
|
|
||||||
|
# Detailed health information (JSON)
|
||||||
|
curl http://your-server:port/api/health
|
||||||
|
```
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
title: "Quick Start Guide"
|
title: "Quick Start Guide"
|
||||||
description: "Get started with Gitea Mirror quickly."
|
description: "Get started with Gitea Mirror quickly."
|
||||||
order: 3
|
order: 3
|
||||||
updatedDate: 2023-10-15
|
updatedDate: 2025-05-22
|
||||||
---
|
---
|
||||||
|
|
||||||
<div class="mb-6">
|
<div class="mb-6">
|
||||||
@@ -16,13 +16,16 @@ Before you begin, make sure you have:
|
|||||||
|
|
||||||
1. <span class="font-semibold text-foreground">A GitHub account with a personal access token</span>
|
1. <span class="font-semibold text-foreground">A GitHub account with a personal access token</span>
|
||||||
2. <span class="font-semibold text-foreground">A Gitea instance with an access token</span>
|
2. <span class="font-semibold text-foreground">A Gitea instance with an access token</span>
|
||||||
3. <span class="font-semibold text-foreground">Docker and docker-compose (recommended) or Node.js 18+ installed</span>
|
3. <span class="font-semibold text-foreground">One of the following:</span>
|
||||||
|
- Docker and docker-compose (for Docker deployment)
|
||||||
|
- Bun 1.2.9+ (for native deployment)
|
||||||
|
- Proxmox VE or LXD (for LXC container deployment)
|
||||||
|
|
||||||
## Installation Options
|
## Installation Options
|
||||||
|
|
||||||
Choose the installation method that works best for your environment.
|
Choose the installation method that works best for your environment.
|
||||||
|
|
||||||
### Using Docker (Recommended)
|
### Using Docker (Recommended for most users)
|
||||||
|
|
||||||
Docker provides the easiest way to get started with minimal configuration.
|
Docker provides the easiest way to get started with minimal configuration.
|
||||||
|
|
||||||
@@ -39,7 +42,7 @@ Docker provides the easiest way to get started with minimal configuration.
|
|||||||
|
|
||||||
3. Access the application at [http://localhost:4321](http://localhost:4321)
|
3. Access the application at [http://localhost:4321](http://localhost:4321)
|
||||||
|
|
||||||
### Manual Installation
|
### Using Bun (Native Installation)
|
||||||
|
|
||||||
If you prefer to run the application directly on your system:
|
If you prefer to run the application directly on your system:
|
||||||
|
|
||||||
@@ -51,7 +54,7 @@ If you prefer to run the application directly on your system:
|
|||||||
|
|
||||||
2. Run the quick setup script:
|
2. Run the quick setup script:
|
||||||
```bash
|
```bash
|
||||||
pnpm setup
|
bun run setup
|
||||||
```
|
```
|
||||||
This installs dependencies and initializes the database.
|
This installs dependencies and initializes the database.
|
||||||
|
|
||||||
@@ -59,17 +62,60 @@ If you prefer to run the application directly on your system:
|
|||||||
|
|
||||||
**Development Mode:**
|
**Development Mode:**
|
||||||
```bash
|
```bash
|
||||||
pnpm dev
|
bun run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: For Bun-specific features, use:
|
||||||
|
```bash
|
||||||
|
bunx --bun astro dev
|
||||||
```
|
```
|
||||||
|
|
||||||
**Production Mode:**
|
**Production Mode:**
|
||||||
```bash
|
```bash
|
||||||
pnpm build
|
bun run build
|
||||||
pnpm start
|
bun run start
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Access the application at [http://localhost:4321](http://localhost:4321)
|
4. Access the application at [http://localhost:4321](http://localhost:4321)
|
||||||
|
|
||||||
|
### Using LXC Containers (Recommended for server deployments)
|
||||||
|
|
||||||
|
#### Proxmox VE (Online Installation)
|
||||||
|
|
||||||
|
For deploying on a Proxmox VE host with internet access:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Optional env overrides: CTID HOSTNAME STORAGE DISK_SIZE CORES MEMORY BRIDGE IP_CONF
|
||||||
|
sudo bash -c "$(curl -fsSL https://raw.githubusercontent.com/arunavo4/gitea-mirror/main/scripts/gitea-mirror-lxc-proxmox.sh)"
|
||||||
|
```
|
||||||
|
|
||||||
|
This script:
|
||||||
|
- Creates a privileged LXC container
|
||||||
|
- Installs Bun and dependencies
|
||||||
|
- Clones and builds the application
|
||||||
|
- Sets up a systemd service
|
||||||
|
|
||||||
|
#### Local LXD (Offline-friendly Installation)
|
||||||
|
|
||||||
|
For testing on a local workstation or in environments without internet access:
|
||||||
|
|
||||||
|
1. Clone the repository locally:
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/arunavo4/gitea-mirror.git
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Download the Bun installer once:
|
||||||
|
```bash
|
||||||
|
curl -L -o /tmp/bun-linux-x64.zip https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Run the local LXC installer:
|
||||||
|
```bash
|
||||||
|
sudo LOCAL_REPO_DIR=~/path/to/gitea-mirror ./gitea-mirror/scripts/gitea-mirror-lxc-local.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
For more details on LXC deployment, see the [LXC Container Deployment Guide](https://github.com/arunavo4/gitea-mirror/blob/main/scripts/README-lxc.md).
|
||||||
|
|
||||||
## Initial Configuration
|
## Initial Configuration
|
||||||
|
|
||||||
Follow these steps to configure Gitea Mirror for first use:
|
Follow these steps to configure Gitea Mirror for first use:
|
||||||
@@ -116,7 +162,12 @@ If you encounter any issues:
|
|||||||
- Check the Activity Log for detailed error messages
|
- Check the Activity Log for detailed error messages
|
||||||
- Verify your GitHub and Gitea tokens have the correct permissions
|
- Verify your GitHub and Gitea tokens have the correct permissions
|
||||||
- Ensure your Gitea instance is accessible from the machine running Gitea Mirror
|
- Ensure your Gitea instance is accessible from the machine running Gitea Mirror
|
||||||
- For Docker installations, check container logs with `docker logs gitea-mirror`
|
- Check logs based on your deployment method:
|
||||||
|
- Docker: `docker logs gitea-mirror`
|
||||||
|
- Native: Check the terminal output or system logs
|
||||||
|
- LXC: `systemctl status gitea-mirror` or `journalctl -u gitea-mirror -f`
|
||||||
|
- Use the health check endpoint to verify system status: `curl http://your-server:4321/api/health`
|
||||||
|
- For database issues, try the database management tools: `bun run check-db` or `bun run fix-db`
|
||||||
|
|
||||||
## Next Steps
|
## Next Steps
|
||||||
|
|
||||||
@@ -125,3 +176,7 @@ After your initial setup:
|
|||||||
- Explore the dashboard for an overview of your mirroring status
|
- Explore the dashboard for an overview of your mirroring status
|
||||||
- Set up automatic mirroring schedules for hands-off operation
|
- Set up automatic mirroring schedules for hands-off operation
|
||||||
- Configure organization mirroring for team repositories
|
- Configure organization mirroring for team repositories
|
||||||
|
- Check out the [Configuration Guide](/configuration) for advanced settings
|
||||||
|
- Review the [Architecture Documentation](/architecture) to understand the system
|
||||||
|
- For server deployments, set up monitoring using the health check endpoint
|
||||||
|
- Consider setting up a cron job to clean up old events: `bun scripts/cleanup-events.ts`
|
||||||
|
|||||||
@@ -1,34 +1,61 @@
|
|||||||
import { useEffect, useState, useRef } from "react";
|
import { useEffect, useState, useRef, useCallback } from "react";
|
||||||
import type { MirrorJob } from "@/lib/db/schema";
|
import type { MirrorJob } from "@/lib/db/schema";
|
||||||
|
|
||||||
interface UseSSEOptions {
|
interface UseSSEOptions {
|
||||||
userId?: string;
|
userId?: string;
|
||||||
onMessage: (data: MirrorJob) => void;
|
onMessage: (data: MirrorJob) => void;
|
||||||
|
maxReconnectAttempts?: number;
|
||||||
|
reconnectDelay?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const useSSE = ({ userId, onMessage }: UseSSEOptions) => {
|
export const useSSE = ({
|
||||||
|
userId,
|
||||||
|
onMessage,
|
||||||
|
maxReconnectAttempts = 5,
|
||||||
|
reconnectDelay = 3000
|
||||||
|
}: UseSSEOptions) => {
|
||||||
const [connected, setConnected] = useState<boolean>(false);
|
const [connected, setConnected] = useState<boolean>(false);
|
||||||
|
const [reconnectCount, setReconnectCount] = useState<number>(0);
|
||||||
const onMessageRef = useRef(onMessage);
|
const onMessageRef = useRef(onMessage);
|
||||||
|
const eventSourceRef = useRef<EventSource | null>(null);
|
||||||
|
const reconnectTimeoutRef = useRef<number | null>(null);
|
||||||
|
|
||||||
// Update the ref when onMessage changes
|
// Update the ref when onMessage changes
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
onMessageRef.current = onMessage;
|
onMessageRef.current = onMessage;
|
||||||
}, [onMessage]);
|
}, [onMessage]);
|
||||||
|
|
||||||
useEffect(() => {
|
// Create a stable connect function that can be called for reconnection
|
||||||
|
const connect = useCallback(() => {
|
||||||
if (!userId) return;
|
if (!userId) return;
|
||||||
|
|
||||||
|
// Clean up any existing connection
|
||||||
|
if (eventSourceRef.current) {
|
||||||
|
eventSourceRef.current.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear any pending reconnect timeout
|
||||||
|
if (reconnectTimeoutRef.current) {
|
||||||
|
window.clearTimeout(reconnectTimeoutRef.current);
|
||||||
|
reconnectTimeoutRef.current = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new EventSource connection
|
||||||
const eventSource = new EventSource(`/api/sse?userId=${userId}`);
|
const eventSource = new EventSource(`/api/sse?userId=${userId}`);
|
||||||
|
eventSourceRef.current = eventSource;
|
||||||
|
|
||||||
const handleMessage = (event: MessageEvent) => {
|
const handleMessage = (event: MessageEvent) => {
|
||||||
try {
|
try {
|
||||||
|
// Check if this is an error message from our server
|
||||||
|
if (event.data.startsWith('{"error":')) {
|
||||||
|
console.warn("SSE server error:", event.data);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const parsedMessage: MirrorJob = JSON.parse(event.data);
|
const parsedMessage: MirrorJob = JSON.parse(event.data);
|
||||||
|
onMessageRef.current(parsedMessage);
|
||||||
// console.log("Received new log:", parsedMessage);
|
|
||||||
|
|
||||||
onMessageRef.current(parsedMessage); // Use ref instead of prop directly
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error parsing message:", error);
|
console.error("Error parsing SSE message:", error);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -36,19 +63,50 @@ export const useSSE = ({ userId, onMessage }: UseSSEOptions) => {
|
|||||||
|
|
||||||
eventSource.onopen = () => {
|
eventSource.onopen = () => {
|
||||||
setConnected(true);
|
setConnected(true);
|
||||||
|
setReconnectCount(0); // Reset reconnect counter on successful connection
|
||||||
console.log(`Connected to SSE for user: ${userId}`);
|
console.log(`Connected to SSE for user: ${userId}`);
|
||||||
};
|
};
|
||||||
|
|
||||||
eventSource.onerror = () => {
|
eventSource.onerror = (error) => {
|
||||||
console.error("SSE connection error");
|
console.error("SSE connection error:", error);
|
||||||
setConnected(false);
|
setConnected(false);
|
||||||
eventSource.close();
|
eventSource.close();
|
||||||
};
|
eventSourceRef.current = null;
|
||||||
|
|
||||||
return () => {
|
// Attempt to reconnect if we haven't exceeded max attempts
|
||||||
eventSource.close();
|
if (reconnectCount < maxReconnectAttempts) {
|
||||||
|
const nextReconnectDelay = Math.min(reconnectDelay * Math.pow(1.5, reconnectCount), 30000);
|
||||||
|
console.log(`Attempting to reconnect in ${nextReconnectDelay}ms (attempt ${reconnectCount + 1}/${maxReconnectAttempts})`);
|
||||||
|
|
||||||
|
reconnectTimeoutRef.current = window.setTimeout(() => {
|
||||||
|
setReconnectCount(prev => prev + 1);
|
||||||
|
connect();
|
||||||
|
}, nextReconnectDelay);
|
||||||
|
} else {
|
||||||
|
console.error(`Failed to reconnect after ${maxReconnectAttempts} attempts`);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}, [userId]); // Only depends on userId now
|
}, [userId, maxReconnectAttempts, reconnectDelay, reconnectCount]);
|
||||||
|
|
||||||
|
// Set up the connection
|
||||||
|
useEffect(() => {
|
||||||
|
if (!userId) return;
|
||||||
|
|
||||||
|
connect();
|
||||||
|
|
||||||
|
// Cleanup function
|
||||||
|
return () => {
|
||||||
|
if (eventSourceRef.current) {
|
||||||
|
eventSourceRef.current.close();
|
||||||
|
eventSourceRef.current = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reconnectTimeoutRef.current) {
|
||||||
|
window.clearTimeout(reconnectTimeoutRef.current);
|
||||||
|
reconnectTimeoutRef.current = null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [userId, connect]);
|
||||||
|
|
||||||
return { connected };
|
return { connected };
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -88,3 +88,84 @@ export const giteaApi = {
|
|||||||
body: JSON.stringify({ url, token }),
|
body: JSON.stringify({ url, token }),
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Health API
|
||||||
|
export interface HealthResponse {
|
||||||
|
status: "ok" | "error";
|
||||||
|
timestamp: string;
|
||||||
|
version: string;
|
||||||
|
latestVersion: string;
|
||||||
|
updateAvailable: boolean;
|
||||||
|
database: {
|
||||||
|
connected: boolean;
|
||||||
|
message: string;
|
||||||
|
};
|
||||||
|
system: {
|
||||||
|
uptime: {
|
||||||
|
startTime: string;
|
||||||
|
uptimeMs: number;
|
||||||
|
formatted: string;
|
||||||
|
};
|
||||||
|
memory: {
|
||||||
|
rss: string;
|
||||||
|
heapTotal: string;
|
||||||
|
heapUsed: string;
|
||||||
|
external: string;
|
||||||
|
systemTotal: string;
|
||||||
|
systemFree: string;
|
||||||
|
};
|
||||||
|
os: {
|
||||||
|
platform: string;
|
||||||
|
version: string;
|
||||||
|
arch: string;
|
||||||
|
};
|
||||||
|
env: string;
|
||||||
|
};
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const healthApi = {
|
||||||
|
check: async (): Promise<HealthResponse> => {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${API_BASE}/health`);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json().catch(() => ({
|
||||||
|
status: "error",
|
||||||
|
error: "Failed to parse error response",
|
||||||
|
}));
|
||||||
|
|
||||||
|
return {
|
||||||
|
...errorData,
|
||||||
|
status: "error",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
} as HealthResponse;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await response.json();
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
status: "error",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error checking health",
|
||||||
|
version: "unknown",
|
||||||
|
latestVersion: "unknown",
|
||||||
|
updateAvailable: false,
|
||||||
|
database: { connected: false, message: "Failed to connect to API" },
|
||||||
|
system: {
|
||||||
|
uptime: { startTime: "", uptimeMs: 0, formatted: "N/A" },
|
||||||
|
memory: {
|
||||||
|
rss: "N/A",
|
||||||
|
heapTotal: "N/A",
|
||||||
|
heapUsed: "N/A",
|
||||||
|
external: "N/A",
|
||||||
|
systemTotal: "N/A",
|
||||||
|
systemFree: "N/A",
|
||||||
|
},
|
||||||
|
os: { platform: "", version: "", arch: "" },
|
||||||
|
env: "",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
// Environment variables
|
// Environment variables
|
||||||
export const ENV = {
|
export const ENV = {
|
||||||
// Node environment (development, production, test)
|
// Runtime environment (development, production, test)
|
||||||
NODE_ENV: process.env.NODE_ENV || "development",
|
NODE_ENV: process.env.NODE_ENV || "development",
|
||||||
|
|
||||||
// Database URL - use SQLite by default
|
// Database URL - use SQLite by default
|
||||||
|
|||||||
42
src/lib/db/index.test.ts
Normal file
42
src/lib/db/index.test.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import { describe, test, expect, mock, beforeAll, afterAll } from "bun:test";
|
||||||
|
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||||
|
|
||||||
|
// Silence console logs during tests
|
||||||
|
let originalConsoleLog: typeof console.log;
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
// Save original console.log
|
||||||
|
originalConsoleLog = console.log;
|
||||||
|
// Replace with no-op function
|
||||||
|
console.log = () => {};
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
// Restore original console.log
|
||||||
|
console.log = originalConsoleLog;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the database module
|
||||||
|
mock.module("bun:sqlite", () => {
|
||||||
|
return {
|
||||||
|
Database: mock(function() {
|
||||||
|
return {
|
||||||
|
query: mock(() => ({
|
||||||
|
all: mock(() => []),
|
||||||
|
run: mock(() => ({}))
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the database tables
|
||||||
|
describe("Database Schema", () => {
|
||||||
|
test("database connection can be created", async () => {
|
||||||
|
// Import the db from the module
|
||||||
|
const { db } = await import("./index");
|
||||||
|
|
||||||
|
// Check that db is defined
|
||||||
|
expect(db).toBeDefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -1,21 +1,56 @@
|
|||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { createClient } from "@libsql/client";
|
|
||||||
import { drizzle } from "drizzle-orm/libsql";
|
|
||||||
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
|
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
|
||||||
|
import { Database } from "bun:sqlite";
|
||||||
|
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||||
|
import fs from "fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import { configSchema } from "./schema";
|
import { configSchema } from "./schema";
|
||||||
|
|
||||||
// Define the database URL - for development we'll use a local SQLite file
|
// Define the database URL - for development we'll use a local SQLite file
|
||||||
const dataDir = path.join(process.cwd(), "data");
|
const dataDir = path.join(process.cwd(), "data");
|
||||||
const dbUrl =
|
// Ensure data directory exists
|
||||||
process.env.DATABASE_URL || `file:${path.join(dataDir, "gitea-mirror.db")}`;
|
if (!fs.existsSync(dataDir)) {
|
||||||
|
fs.mkdirSync(dataDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
// Create a client connection to the database
|
const dbPath = path.join(dataDir, "gitea-mirror.db");
|
||||||
export const client = createClient({ url: dbUrl });
|
|
||||||
|
|
||||||
// Create a drizzle instance
|
// Create an empty database file if it doesn't exist
|
||||||
export const db = drizzle(client);
|
if (!fs.existsSync(dbPath)) {
|
||||||
|
fs.writeFileSync(dbPath, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create SQLite database instance using Bun's native driver
|
||||||
|
let sqlite: Database;
|
||||||
|
try {
|
||||||
|
sqlite = new Database(dbPath);
|
||||||
|
console.log("Successfully connected to SQLite database using Bun's native driver");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error opening database:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create drizzle instance with the SQLite client
|
||||||
|
export const db = drizzle({ client: sqlite });
|
||||||
|
|
||||||
|
// Simple async wrapper around SQLite API for compatibility
|
||||||
|
// This maintains backward compatibility with existing code
|
||||||
|
export const client = {
|
||||||
|
async execute(sql: string, params?: any[]) {
|
||||||
|
try {
|
||||||
|
const stmt = sqlite.query(sql);
|
||||||
|
if (/^\s*select/i.test(sql)) {
|
||||||
|
const rows = stmt.all(params ?? []);
|
||||||
|
return { rows } as { rows: any[] };
|
||||||
|
}
|
||||||
|
stmt.run(params ?? []);
|
||||||
|
return { rows: [] } as { rows: any[] };
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error executing SQL: ${sql}`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
// Define the tables
|
// Define the tables
|
||||||
export const users = sqliteTable("users", {
|
export const users = sqliteTable("users", {
|
||||||
@@ -31,6 +66,18 @@ export const users = sqliteTable("users", {
|
|||||||
.default(new Date()),
|
.default(new Date()),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// New table for event notifications (replacing Redis pub/sub)
|
||||||
|
export const events = sqliteTable("events", {
|
||||||
|
id: text("id").primaryKey(),
|
||||||
|
userId: text("user_id").notNull().references(() => users.id),
|
||||||
|
channel: text("channel").notNull(),
|
||||||
|
payload: text("payload", { mode: "json" }).notNull(),
|
||||||
|
read: integer("read", { mode: "boolean" }).notNull().default(false),
|
||||||
|
createdAt: integer("created_at", { mode: "timestamp" })
|
||||||
|
.notNull()
|
||||||
|
.default(new Date()),
|
||||||
|
});
|
||||||
|
|
||||||
const githubSchema = configSchema.shape.githubConfig;
|
const githubSchema = configSchema.shape.githubConfig;
|
||||||
const giteaSchema = configSchema.shape.giteaConfig;
|
const giteaSchema = configSchema.shape.giteaConfig;
|
||||||
const scheduleSchema = configSchema.shape.scheduleConfig;
|
const scheduleSchema = configSchema.shape.scheduleConfig;
|
||||||
@@ -142,6 +189,18 @@ export const mirrorJobs = sqliteTable("mirror_jobs", {
|
|||||||
timestamp: integer("timestamp", { mode: "timestamp" })
|
timestamp: integer("timestamp", { mode: "timestamp" })
|
||||||
.notNull()
|
.notNull()
|
||||||
.default(new Date()),
|
.default(new Date()),
|
||||||
|
|
||||||
|
// New fields for job resilience
|
||||||
|
jobType: text("job_type").notNull().default("mirror"),
|
||||||
|
batchId: text("batch_id"),
|
||||||
|
totalItems: integer("total_items"),
|
||||||
|
completedItems: integer("completed_items").default(0),
|
||||||
|
itemIds: text("item_ids", { mode: "json" }).$type<string[]>(),
|
||||||
|
completedItemIds: text("completed_item_ids", { mode: "json" }).$type<string[]>().default([]),
|
||||||
|
inProgress: integer("in_progress", { mode: "boolean" }).notNull().default(false),
|
||||||
|
startedAt: integer("started_at", { mode: "timestamp" }),
|
||||||
|
completedAt: integer("completed_at", { mode: "timestamp" }),
|
||||||
|
lastCheckpoint: integer("last_checkpoint", { mode: "timestamp" }),
|
||||||
});
|
});
|
||||||
|
|
||||||
export const organizations = sqliteTable("organizations", {
|
export const organizations = sqliteTable("organizations", {
|
||||||
|
|||||||
@@ -111,6 +111,18 @@ export const mirrorJobSchema = z.object({
|
|||||||
status: repoStatusEnum.default("imported"),
|
status: repoStatusEnum.default("imported"),
|
||||||
message: z.string(),
|
message: z.string(),
|
||||||
timestamp: z.date().default(() => new Date()),
|
timestamp: z.date().default(() => new Date()),
|
||||||
|
|
||||||
|
// New fields for job resilience
|
||||||
|
jobType: z.enum(["mirror", "sync", "retry"]).default("mirror"),
|
||||||
|
batchId: z.string().uuid().optional(), // Group related jobs together
|
||||||
|
totalItems: z.number().optional(), // Total number of items to process
|
||||||
|
completedItems: z.number().optional(), // Number of items completed
|
||||||
|
itemIds: z.array(z.string()).optional(), // IDs of items to process
|
||||||
|
completedItemIds: z.array(z.string()).optional(), // IDs of completed items
|
||||||
|
inProgress: z.boolean().default(false), // Whether the job is currently running
|
||||||
|
startedAt: z.date().optional(), // When the job started
|
||||||
|
completedAt: z.date().optional(), // When the job completed
|
||||||
|
lastCheckpoint: z.date().optional(), // Last time progress was saved
|
||||||
});
|
});
|
||||||
|
|
||||||
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
|
export type MirrorJob = z.infer<typeof mirrorJobSchema>;
|
||||||
@@ -140,3 +152,15 @@ export const organizationSchema = z.object({
|
|||||||
});
|
});
|
||||||
|
|
||||||
export type Organization = z.infer<typeof organizationSchema>;
|
export type Organization = z.infer<typeof organizationSchema>;
|
||||||
|
|
||||||
|
// Event schema (for SQLite-based pub/sub)
|
||||||
|
export const eventSchema = z.object({
|
||||||
|
id: z.string().uuid().optional(),
|
||||||
|
userId: z.string().uuid(),
|
||||||
|
channel: z.string().min(1),
|
||||||
|
payload: z.any(),
|
||||||
|
read: z.boolean().default(false),
|
||||||
|
createdAt: z.date().default(() => new Date()),
|
||||||
|
});
|
||||||
|
|
||||||
|
export type Event = z.infer<typeof eventSchema>;
|
||||||
|
|||||||
161
src/lib/events.ts
Normal file
161
src/lib/events.ts
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
import { db, events } from "./db";
|
||||||
|
import { eq, and, gt, lt } from "drizzle-orm";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Publishes an event to a specific channel for a user
|
||||||
|
* This replaces Redis pub/sub with SQLite storage
|
||||||
|
*/
|
||||||
|
export async function publishEvent({
|
||||||
|
userId,
|
||||||
|
channel,
|
||||||
|
payload,
|
||||||
|
}: {
|
||||||
|
userId: string;
|
||||||
|
channel: string;
|
||||||
|
payload: any;
|
||||||
|
}): Promise<string> {
|
||||||
|
try {
|
||||||
|
const eventId = uuidv4();
|
||||||
|
console.log(`Publishing event to channel ${channel} for user ${userId}`);
|
||||||
|
|
||||||
|
// Insert the event into the SQLite database
|
||||||
|
await db.insert(events).values({
|
||||||
|
id: eventId,
|
||||||
|
userId,
|
||||||
|
channel,
|
||||||
|
payload: JSON.stringify(payload),
|
||||||
|
createdAt: new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Event published successfully with ID ${eventId}`);
|
||||||
|
return eventId;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error publishing event:", error);
|
||||||
|
throw new Error("Failed to publish event");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets new events for a specific user and channel
|
||||||
|
* This replaces Redis subscribe with SQLite polling
|
||||||
|
*/
|
||||||
|
export async function getNewEvents({
|
||||||
|
userId,
|
||||||
|
channel,
|
||||||
|
lastEventTime,
|
||||||
|
}: {
|
||||||
|
userId: string;
|
||||||
|
channel: string;
|
||||||
|
lastEventTime?: Date;
|
||||||
|
}): Promise<any[]> {
|
||||||
|
try {
|
||||||
|
console.log(`Getting new events for user ${userId} in channel ${channel}`);
|
||||||
|
if (lastEventTime) {
|
||||||
|
console.log(`Looking for events after ${lastEventTime.toISOString()}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the query
|
||||||
|
let query = db
|
||||||
|
.select()
|
||||||
|
.from(events)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(events.userId, userId),
|
||||||
|
eq(events.channel, channel),
|
||||||
|
eq(events.read, false)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.orderBy(events.createdAt);
|
||||||
|
|
||||||
|
// Add time filter if provided
|
||||||
|
if (lastEventTime) {
|
||||||
|
query = query.where(gt(events.createdAt, lastEventTime));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute the query
|
||||||
|
const newEvents = await query;
|
||||||
|
console.log(`Found ${newEvents.length} new events`);
|
||||||
|
|
||||||
|
// Mark events as read
|
||||||
|
if (newEvents.length > 0) {
|
||||||
|
console.log(`Marking ${newEvents.length} events as read`);
|
||||||
|
await db
|
||||||
|
.update(events)
|
||||||
|
.set({ read: true })
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(events.userId, userId),
|
||||||
|
eq(events.channel, channel),
|
||||||
|
eq(events.read, false)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the payloads
|
||||||
|
return newEvents.map(event => ({
|
||||||
|
...event,
|
||||||
|
payload: JSON.parse(event.payload as string),
|
||||||
|
}));
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error getting new events:", error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleans up old events to prevent the database from growing too large
|
||||||
|
* Should be called periodically (e.g., daily via a cron job)
|
||||||
|
*
|
||||||
|
* @param maxAgeInDays Number of days to keep events (default: 7)
|
||||||
|
* @param cleanupUnreadAfterDays Number of days after which to clean up unread events (default: 2x maxAgeInDays)
|
||||||
|
* @returns Object containing the number of read and unread events deleted
|
||||||
|
*/
|
||||||
|
export async function cleanupOldEvents(
|
||||||
|
maxAgeInDays: number = 7,
|
||||||
|
cleanupUnreadAfterDays?: number
|
||||||
|
): Promise<{ readEventsDeleted: number; unreadEventsDeleted: number }> {
|
||||||
|
try {
|
||||||
|
console.log(`Cleaning up events older than ${maxAgeInDays} days...`);
|
||||||
|
|
||||||
|
// Calculate the cutoff date for read events
|
||||||
|
const cutoffDate = new Date();
|
||||||
|
cutoffDate.setDate(cutoffDate.getDate() - maxAgeInDays);
|
||||||
|
|
||||||
|
// Delete read events older than the cutoff date
|
||||||
|
const readResult = await db
|
||||||
|
.delete(events)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(events.read, true),
|
||||||
|
lt(events.createdAt, cutoffDate)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
const readEventsDeleted = readResult.changes || 0;
|
||||||
|
console.log(`Deleted ${readEventsDeleted} read events`);
|
||||||
|
|
||||||
|
// Calculate the cutoff date for unread events (default to 2x the retention period)
|
||||||
|
const unreadCutoffDate = new Date();
|
||||||
|
const unreadMaxAge = cleanupUnreadAfterDays || (maxAgeInDays * 2);
|
||||||
|
unreadCutoffDate.setDate(unreadCutoffDate.getDate() - unreadMaxAge);
|
||||||
|
|
||||||
|
// Delete unread events that are significantly older
|
||||||
|
const unreadResult = await db
|
||||||
|
.delete(events)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(events.read, false),
|
||||||
|
lt(events.createdAt, unreadCutoffDate)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
const unreadEventsDeleted = unreadResult.changes || 0;
|
||||||
|
console.log(`Deleted ${unreadEventsDeleted} unread events`);
|
||||||
|
|
||||||
|
return { readEventsDeleted, unreadEventsDeleted };
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error cleaning up old events:", error);
|
||||||
|
return { readEventsDeleted: 0, unreadEventsDeleted: 0 };
|
||||||
|
}
|
||||||
|
}
|
||||||
120
src/lib/gitea.test.ts
Normal file
120
src/lib/gitea.test.ts
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
import { Octokit } from "@octokit/rest";
|
||||||
|
import { repoStatusEnum } from "@/types/Repository";
|
||||||
|
|
||||||
|
// Mock the isRepoPresentInGitea function
|
||||||
|
const mockIsRepoPresentInGitea = mock(() => Promise.resolve(false));
|
||||||
|
|
||||||
|
// Mock the database module
|
||||||
|
mock.module("@/lib/db", () => {
|
||||||
|
return {
|
||||||
|
db: {
|
||||||
|
update: () => ({
|
||||||
|
set: () => ({
|
||||||
|
where: () => Promise.resolve()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
},
|
||||||
|
repositories: {},
|
||||||
|
organizations: {}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the helpers module
|
||||||
|
mock.module("@/lib/helpers", () => {
|
||||||
|
return {
|
||||||
|
createMirrorJob: mock(() => Promise.resolve("job-id"))
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock superagent
|
||||||
|
mock.module("superagent", () => {
|
||||||
|
const mockPost = mock(() => ({
|
||||||
|
set: () => ({
|
||||||
|
set: () => ({
|
||||||
|
send: () => Promise.resolve({ body: { id: 123 } })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockGet = mock(() => ({
|
||||||
|
set: () => Promise.resolve({ body: [] })
|
||||||
|
}));
|
||||||
|
|
||||||
|
return {
|
||||||
|
post: mockPost,
|
||||||
|
get: mockGet
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the gitea module itself
|
||||||
|
mock.module("./gitea", () => {
|
||||||
|
return {
|
||||||
|
isRepoPresentInGitea: mockIsRepoPresentInGitea,
|
||||||
|
mirrorGithubRepoToGitea: mock(async () => {}),
|
||||||
|
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("Gitea Repository Mirroring", () => {
|
||||||
|
// Mock console.log and console.error to prevent test output noise
|
||||||
|
let originalConsoleLog: typeof console.log;
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleLog = console.log;
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.log = mock(() => {});
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.log = originalConsoleLog;
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("mirrorGithubRepoToGitea handles private repositories correctly", async () => {
|
||||||
|
// Import the mocked function
|
||||||
|
const { mirrorGithubRepoToGitea } = await import("./gitea");
|
||||||
|
|
||||||
|
// Create mock Octokit instance
|
||||||
|
const octokit = {} as Octokit;
|
||||||
|
|
||||||
|
// Create mock repository (private)
|
||||||
|
const repository = {
|
||||||
|
id: "repo-id",
|
||||||
|
name: "test-repo",
|
||||||
|
fullName: "testuser/test-repo",
|
||||||
|
url: "https://github.com/testuser/test-repo",
|
||||||
|
cloneUrl: "https://github.com/testuser/test-repo.git",
|
||||||
|
owner: "testuser",
|
||||||
|
isPrivate: true,
|
||||||
|
status: repoStatusEnum.parse("imported")
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create mock config
|
||||||
|
const config = {
|
||||||
|
id: "config-id",
|
||||||
|
userId: "user-id",
|
||||||
|
githubConfig: {
|
||||||
|
token: "github-token",
|
||||||
|
mirrorIssues: false
|
||||||
|
},
|
||||||
|
giteaConfig: {
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "gitea-token",
|
||||||
|
username: "giteauser"
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Call the function
|
||||||
|
await mirrorGithubRepoToGitea({
|
||||||
|
octokit,
|
||||||
|
repository: repository as any,
|
||||||
|
config
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check that the function was called
|
||||||
|
expect(mirrorGithubRepoToGitea).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
229
src/lib/gitea.ts
229
src/lib/gitea.ts
@@ -601,11 +601,22 @@ export async function mirrorGitHubOrgToGitea({
|
|||||||
.from(repositories)
|
.from(repositories)
|
||||||
.where(eq(repositories.organization, organization.name));
|
.where(eq(repositories.organization, organization.name));
|
||||||
|
|
||||||
for (const repo of orgRepos) {
|
if (orgRepos.length === 0) {
|
||||||
await mirrorGitHubRepoToGiteaOrg({
|
console.log(`No repositories found for organization ${organization.name}`);
|
||||||
octokit,
|
return;
|
||||||
config,
|
}
|
||||||
repository: {
|
|
||||||
|
console.log(`Mirroring ${orgRepos.length} repositories for organization ${organization.name}`);
|
||||||
|
|
||||||
|
// Import the processWithRetry function
|
||||||
|
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||||
|
|
||||||
|
// Process repositories in parallel with concurrency control
|
||||||
|
await processWithRetry(
|
||||||
|
orgRepos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
...repo,
|
...repo,
|
||||||
status: repo.status as RepoStatus,
|
status: repo.status as RepoStatus,
|
||||||
visibility: repo.visibility as RepositoryVisibility,
|
visibility: repo.visibility as RepositoryVisibility,
|
||||||
@@ -614,11 +625,37 @@ export async function mirrorGitHubOrgToGitea({
|
|||||||
organization: repo.organization ?? undefined,
|
organization: repo.organization ?? undefined,
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
mirroredLocation: repo.mirroredLocation || "",
|
mirroredLocation: repo.mirroredLocation || "",
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log the start of mirroring
|
||||||
|
console.log(`Starting mirror for repository: ${repo.name} in organization ${organization.name}`);
|
||||||
|
|
||||||
|
// Mirror the repository
|
||||||
|
await mirrorGitHubRepoToGiteaOrg({
|
||||||
|
octokit,
|
||||||
|
config,
|
||||||
|
repository: repoData,
|
||||||
|
giteaOrgId,
|
||||||
|
orgName: organization.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
concurrencyLimit: 3, // Process 3 repositories at a time
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
if (result) {
|
||||||
|
console.log(`Mirrored repository "${result.name}" in organization ${organization.name} (${completed}/${total}, ${percentComplete}%)`);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
giteaOrgId,
|
onRetry: (repo, error, attempt) => {
|
||||||
orgName: organization.name,
|
console.log(`Retrying repository ${repo.name} in organization ${organization.name} (attempt ${attempt}): ${error.message}`);
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
|
);
|
||||||
|
|
||||||
console.log(`Organization ${organization.name} mirrored successfully`);
|
console.log(`Organization ${organization.name} mirrored successfully`);
|
||||||
|
|
||||||
@@ -837,7 +874,15 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
(res) => res.data
|
(res) => res.data
|
||||||
);
|
);
|
||||||
|
|
||||||
console.log(`Mirroring ${issues.length} issues from ${repository.fullName}`);
|
// Filter out pull requests
|
||||||
|
const filteredIssues = issues.filter(issue => !(issue as any).pull_request);
|
||||||
|
|
||||||
|
console.log(`Mirroring ${filteredIssues.length} issues from ${repository.fullName}`);
|
||||||
|
|
||||||
|
if (filteredIssues.length === 0) {
|
||||||
|
console.log(`No issues to mirror for ${repository.fullName}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Get existing labels from Gitea
|
// Get existing labels from Gitea
|
||||||
const giteaLabelsRes = await superagent
|
const giteaLabelsRes = await superagent
|
||||||
@@ -851,58 +896,60 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
giteaLabels.map((label: any) => [label.name, label.id])
|
giteaLabels.map((label: any) => [label.name, label.id])
|
||||||
);
|
);
|
||||||
|
|
||||||
for (const issue of issues) {
|
// Import the processWithRetry function
|
||||||
if ((issue as any).pull_request) {
|
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const githubLabelNames =
|
// Process issues in parallel with concurrency control
|
||||||
issue.labels
|
await processWithRetry(
|
||||||
?.map((l) => (typeof l === "string" ? l : l.name))
|
filteredIssues,
|
||||||
.filter((l): l is string => !!l) || [];
|
async (issue) => {
|
||||||
|
const githubLabelNames =
|
||||||
|
issue.labels
|
||||||
|
?.map((l) => (typeof l === "string" ? l : l.name))
|
||||||
|
.filter((l): l is string => !!l) || [];
|
||||||
|
|
||||||
const giteaLabelIds: number[] = [];
|
const giteaLabelIds: number[] = [];
|
||||||
|
|
||||||
// Resolve or create labels in Gitea
|
// Resolve or create labels in Gitea
|
||||||
for (const name of githubLabelNames) {
|
for (const name of githubLabelNames) {
|
||||||
if (labelMap.has(name)) {
|
if (labelMap.has(name)) {
|
||||||
giteaLabelIds.push(labelMap.get(name)!);
|
giteaLabelIds.push(labelMap.get(name)!);
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
const created = await superagent
|
const created = await superagent
|
||||||
.post(
|
.post(
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/labels`
|
||||||
)
|
)
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||||
.send({ name, color: "#ededed" }); // Default color
|
.send({ name, color: "#ededed" }); // Default color
|
||||||
|
|
||||||
labelMap.set(name, created.body.id);
|
labelMap.set(name, created.body.id);
|
||||||
giteaLabelIds.push(created.body.id);
|
giteaLabelIds.push(created.body.id);
|
||||||
} catch (labelErr) {
|
} catch (labelErr) {
|
||||||
console.error(
|
console.error(
|
||||||
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
`Failed to create label "${name}" in Gitea: ${labelErr}`
|
||||||
);
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const originalAssignees =
|
const originalAssignees =
|
||||||
issue.assignees && issue.assignees.length > 0
|
issue.assignees && issue.assignees.length > 0
|
||||||
? `\n\nOriginally assigned to: ${issue.assignees
|
? `\n\nOriginally assigned to: ${issue.assignees
|
||||||
.map((a) => `@${a.login}`)
|
.map((a) => `@${a.login}`)
|
||||||
.join(", ")} on GitHub.`
|
.join(", ")} on GitHub.`
|
||||||
: "";
|
: "";
|
||||||
|
|
||||||
const issuePayload: any = {
|
const issuePayload: any = {
|
||||||
title: issue.title,
|
title: issue.title,
|
||||||
body: `Originally created by @${
|
body: `Originally created by @${
|
||||||
issue.user?.login
|
issue.user?.login
|
||||||
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
} on GitHub.${originalAssignees}\n\n${issue.body || ""}`,
|
||||||
closed: issue.state === "closed",
|
closed: issue.state === "closed",
|
||||||
labels: giteaLabelIds,
|
labels: giteaLabelIds,
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
// Create the issue in Gitea
|
||||||
const createdIssue = await superagent
|
const createdIssue = await superagent
|
||||||
.post(
|
.post(
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues`
|
||||||
@@ -922,41 +969,49 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
|||||||
(res) => res.data
|
(res) => res.data
|
||||||
);
|
);
|
||||||
|
|
||||||
for (const comment of comments) {
|
// Process comments in parallel with concurrency control
|
||||||
try {
|
if (comments.length > 0) {
|
||||||
await superagent
|
await processWithRetry(
|
||||||
.post(
|
comments,
|
||||||
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
async (comment) => {
|
||||||
)
|
await superagent
|
||||||
.set("Authorization", `token ${config.giteaConfig.token}`)
|
.post(
|
||||||
.send({
|
`${config.giteaConfig.url}/api/v1/repos/${repoOrigin}/${repository.name}/issues/${createdIssue.body.number}/comments`
|
||||||
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
)
|
||||||
});
|
.set("Authorization", `token ${config.giteaConfig.token}`)
|
||||||
} catch (commentErr) {
|
.send({
|
||||||
console.error(
|
body: `@${comment.user?.login} commented on GitHub:\n\n${comment.body}`,
|
||||||
`Failed to copy comment to Gitea for issue "${issue.title}": ${
|
});
|
||||||
commentErr instanceof Error
|
return comment;
|
||||||
? commentErr.message
|
},
|
||||||
: String(commentErr)
|
{
|
||||||
}`
|
concurrencyLimit: 5,
|
||||||
);
|
maxRetries: 2,
|
||||||
}
|
retryDelay: 1000,
|
||||||
|
onRetry: (comment, error, attempt) => {
|
||||||
|
console.log(`Retrying comment (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof Error && (err as any).response) {
|
return issue;
|
||||||
console.error(
|
},
|
||||||
`Failed to create issue "${issue.title}" in Gitea: ${err.message}`
|
{
|
||||||
);
|
concurrencyLimit: 3, // Process 3 issues at a time
|
||||||
console.error(
|
maxRetries: 2,
|
||||||
`Response body: ${JSON.stringify((err as any).response.body)}`
|
retryDelay: 2000,
|
||||||
);
|
onProgress: (completed, total, result) => {
|
||||||
} else {
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
console.error(
|
if (result) {
|
||||||
`Failed to create issue "${issue.title}" in Gitea: ${
|
console.log(`Mirrored issue "${result.title}" (${completed}/${total}, ${percentComplete}%)`);
|
||||||
err instanceof Error ? err.message : String(err)
|
}
|
||||||
}`
|
},
|
||||||
);
|
onRetry: (issue, error, attempt) => {
|
||||||
|
console.log(`Retrying issue "${issue.title}" (attempt ${attempt}): ${error.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log(`Completed mirroring ${filteredIssues.length} issues for ${repository.fullName}`);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import type { RepoStatus } from "@/types/Repository";
|
import type { RepoStatus } from "@/types/Repository";
|
||||||
import { db, mirrorJobs } from "./db";
|
import { db, mirrorJobs } from "./db";
|
||||||
import { v4 as uuidv4 } from "uuid";
|
import { v4 as uuidv4 } from "uuid";
|
||||||
import { redisPublisher } from "./redis";
|
import { publishEvent } from "./events";
|
||||||
|
|
||||||
export async function createMirrorJob({
|
export async function createMirrorJob({
|
||||||
userId,
|
userId,
|
||||||
@@ -12,6 +12,11 @@ export async function createMirrorJob({
|
|||||||
message,
|
message,
|
||||||
status,
|
status,
|
||||||
details,
|
details,
|
||||||
|
jobType,
|
||||||
|
batchId,
|
||||||
|
totalItems,
|
||||||
|
itemIds,
|
||||||
|
inProgress,
|
||||||
}: {
|
}: {
|
||||||
userId: string;
|
userId: string;
|
||||||
organizationId?: string;
|
organizationId?: string;
|
||||||
@@ -21,6 +26,11 @@ export async function createMirrorJob({
|
|||||||
details?: string;
|
details?: string;
|
||||||
message: string;
|
message: string;
|
||||||
status: RepoStatus;
|
status: RepoStatus;
|
||||||
|
jobType?: "mirror" | "sync" | "retry";
|
||||||
|
batchId?: string;
|
||||||
|
totalItems?: number;
|
||||||
|
itemIds?: string[];
|
||||||
|
inProgress?: boolean;
|
||||||
}) {
|
}) {
|
||||||
const jobId = uuidv4();
|
const jobId = uuidv4();
|
||||||
const currentTimestamp = new Date();
|
const currentTimestamp = new Date();
|
||||||
@@ -32,18 +42,35 @@ export async function createMirrorJob({
|
|||||||
repositoryName,
|
repositoryName,
|
||||||
organizationId,
|
organizationId,
|
||||||
organizationName,
|
organizationName,
|
||||||
configId: uuidv4(),
|
|
||||||
details,
|
details,
|
||||||
message: message,
|
message: message,
|
||||||
status: status,
|
status: status,
|
||||||
timestamp: currentTimestamp,
|
timestamp: currentTimestamp,
|
||||||
|
|
||||||
|
// New resilience fields
|
||||||
|
jobType: jobType || "mirror",
|
||||||
|
batchId: batchId || undefined,
|
||||||
|
totalItems: totalItems || undefined,
|
||||||
|
completedItems: 0,
|
||||||
|
itemIds: itemIds || undefined,
|
||||||
|
completedItemIds: [],
|
||||||
|
inProgress: inProgress !== undefined ? inProgress : false,
|
||||||
|
startedAt: inProgress ? currentTimestamp : undefined,
|
||||||
|
completedAt: undefined,
|
||||||
|
lastCheckpoint: undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Insert the job into the database
|
||||||
await db.insert(mirrorJobs).values(job);
|
await db.insert(mirrorJobs).values(job);
|
||||||
|
|
||||||
|
// Publish the event using SQLite instead of Redis
|
||||||
const channel = `mirror-status:${userId}`;
|
const channel = `mirror-status:${userId}`;
|
||||||
await redisPublisher.publish(channel, JSON.stringify(job));
|
await publishEvent({
|
||||||
|
userId,
|
||||||
|
channel,
|
||||||
|
payload: job
|
||||||
|
});
|
||||||
|
|
||||||
return jobId;
|
return jobId;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -51,3 +78,186 @@ export async function createMirrorJob({
|
|||||||
throw new Error("Error creating mirror job");
|
throw new Error("Error creating mirror job");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the progress of a mirror job
|
||||||
|
*/
|
||||||
|
export async function updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
completedItemId,
|
||||||
|
status,
|
||||||
|
message,
|
||||||
|
details,
|
||||||
|
inProgress,
|
||||||
|
isCompleted,
|
||||||
|
}: {
|
||||||
|
jobId: string;
|
||||||
|
completedItemId?: string;
|
||||||
|
status?: RepoStatus;
|
||||||
|
message?: string;
|
||||||
|
details?: string;
|
||||||
|
inProgress?: boolean;
|
||||||
|
isCompleted?: boolean;
|
||||||
|
}) {
|
||||||
|
try {
|
||||||
|
// Get the current job
|
||||||
|
const [job] = await db
|
||||||
|
.select()
|
||||||
|
.from(mirrorJobs)
|
||||||
|
.where(mirrorJobs.id === jobId);
|
||||||
|
|
||||||
|
if (!job) {
|
||||||
|
throw new Error(`Mirror job with ID ${jobId} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the job with new progress
|
||||||
|
const updates: Record<string, any> = {
|
||||||
|
lastCheckpoint: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add completed item if provided
|
||||||
|
if (completedItemId) {
|
||||||
|
const completedItemIds = job.completedItemIds || [];
|
||||||
|
if (!completedItemIds.includes(completedItemId)) {
|
||||||
|
updates.completedItemIds = [...completedItemIds, completedItemId];
|
||||||
|
updates.completedItems = (job.completedItems || 0) + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update status if provided
|
||||||
|
if (status) {
|
||||||
|
updates.status = status;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update message if provided
|
||||||
|
if (message) {
|
||||||
|
updates.message = message;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update details if provided
|
||||||
|
if (details) {
|
||||||
|
updates.details = details;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update in-progress status if provided
|
||||||
|
if (inProgress !== undefined) {
|
||||||
|
updates.inProgress = inProgress;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mark as completed if specified
|
||||||
|
if (isCompleted) {
|
||||||
|
updates.inProgress = false;
|
||||||
|
updates.completedAt = new Date();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the job in the database
|
||||||
|
await db
|
||||||
|
.update(mirrorJobs)
|
||||||
|
.set(updates)
|
||||||
|
.where(mirrorJobs.id === jobId);
|
||||||
|
|
||||||
|
// Publish the event
|
||||||
|
const updatedJob = {
|
||||||
|
...job,
|
||||||
|
...updates,
|
||||||
|
};
|
||||||
|
|
||||||
|
await publishEvent({
|
||||||
|
userId: job.userId,
|
||||||
|
channel: `mirror-status:${job.userId}`,
|
||||||
|
payload: updatedJob,
|
||||||
|
});
|
||||||
|
|
||||||
|
return updatedJob;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error updating mirror job progress:", error);
|
||||||
|
throw new Error("Error updating mirror job progress");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finds interrupted jobs that need to be resumed
|
||||||
|
*/
|
||||||
|
export async function findInterruptedJobs() {
|
||||||
|
try {
|
||||||
|
// Find jobs that are marked as in-progress but haven't been updated recently
|
||||||
|
const cutoffTime = new Date();
|
||||||
|
cutoffTime.setMinutes(cutoffTime.getMinutes() - 10); // Consider jobs inactive after 10 minutes without updates
|
||||||
|
|
||||||
|
const interruptedJobs = await db
|
||||||
|
.select()
|
||||||
|
.from(mirrorJobs)
|
||||||
|
.where(
|
||||||
|
mirrorJobs.inProgress === true &&
|
||||||
|
(mirrorJobs.lastCheckpoint === null ||
|
||||||
|
mirrorJobs.lastCheckpoint < cutoffTime)
|
||||||
|
);
|
||||||
|
|
||||||
|
return interruptedJobs;
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error finding interrupted jobs:", error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resumes an interrupted job
|
||||||
|
*/
|
||||||
|
export async function resumeInterruptedJob(job: any) {
|
||||||
|
try {
|
||||||
|
console.log(`Resuming interrupted job: ${job.id}`);
|
||||||
|
|
||||||
|
// Skip if job doesn't have the necessary data to resume
|
||||||
|
if (!job.itemIds || !job.completedItemIds) {
|
||||||
|
console.log(`Cannot resume job ${job.id}: missing item data`);
|
||||||
|
|
||||||
|
// Mark the job as failed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId: job.id,
|
||||||
|
status: "failed",
|
||||||
|
message: "Job interrupted and could not be resumed",
|
||||||
|
details: "The job was interrupted and did not have enough information to resume",
|
||||||
|
inProgress: false,
|
||||||
|
isCompleted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate remaining items
|
||||||
|
const remainingItemIds = job.itemIds.filter(
|
||||||
|
(id: string) => !job.completedItemIds.includes(id)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (remainingItemIds.length === 0) {
|
||||||
|
console.log(`Job ${job.id} has no remaining items, marking as completed`);
|
||||||
|
|
||||||
|
// Mark the job as completed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId: job.id,
|
||||||
|
status: "mirrored",
|
||||||
|
message: "Job completed after resuming",
|
||||||
|
inProgress: false,
|
||||||
|
isCompleted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the job to show it's being resumed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId: job.id,
|
||||||
|
message: `Resuming job with ${remainingItemIds.length} remaining items`,
|
||||||
|
details: `Job was interrupted and is being resumed. ${job.completedItemIds.length} of ${job.itemIds.length} items were already processed.`,
|
||||||
|
inProgress: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
job,
|
||||||
|
remainingItemIds,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error resuming job ${job.id}:`, error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
224
src/lib/recovery.ts
Normal file
224
src/lib/recovery.ts
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
/**
|
||||||
|
* Recovery mechanism for interrupted jobs
|
||||||
|
* This module handles detecting and resuming jobs that were interrupted by container restarts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { findInterruptedJobs, resumeInterruptedJob } from './helpers';
|
||||||
|
import { db, repositories, organizations } from './db';
|
||||||
|
import { eq } from 'drizzle-orm';
|
||||||
|
import { mirrorGithubRepoToGitea, mirrorGitHubOrgRepoToGiteaOrg, syncGiteaRepo } from './gitea';
|
||||||
|
import { createGitHubClient } from './github';
|
||||||
|
import { processWithResilience } from './utils/concurrency';
|
||||||
|
import { repositoryVisibilityEnum, repoStatusEnum } from '@/types/Repository';
|
||||||
|
import type { Repository } from './db/schema';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the recovery system
|
||||||
|
* This should be called when the application starts
|
||||||
|
*/
|
||||||
|
export async function initializeRecovery() {
|
||||||
|
console.log('Initializing recovery system...');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Find interrupted jobs
|
||||||
|
const interruptedJobs = await findInterruptedJobs();
|
||||||
|
|
||||||
|
if (interruptedJobs.length === 0) {
|
||||||
|
console.log('No interrupted jobs found.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Found ${interruptedJobs.length} interrupted jobs. Starting recovery...`);
|
||||||
|
|
||||||
|
// Process each interrupted job
|
||||||
|
for (const job of interruptedJobs) {
|
||||||
|
const resumeData = await resumeInterruptedJob(job);
|
||||||
|
|
||||||
|
if (!resumeData) {
|
||||||
|
console.log(`Job ${job.id} could not be resumed.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { job: updatedJob, remainingItemIds } = resumeData;
|
||||||
|
|
||||||
|
// Handle different job types
|
||||||
|
switch (updatedJob.jobType) {
|
||||||
|
case 'mirror':
|
||||||
|
await recoverMirrorJob(updatedJob, remainingItemIds);
|
||||||
|
break;
|
||||||
|
case 'sync':
|
||||||
|
await recoverSyncJob(updatedJob, remainingItemIds);
|
||||||
|
break;
|
||||||
|
case 'retry':
|
||||||
|
await recoverRetryJob(updatedJob, remainingItemIds);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
console.log(`Unknown job type: ${updatedJob.jobType}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Recovery process completed.');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error during recovery process:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover a mirror job
|
||||||
|
*/
|
||||||
|
async function recoverMirrorJob(job: any, remainingItemIds: string[]) {
|
||||||
|
console.log(`Recovering mirror job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get the config for this user
|
||||||
|
const [config] = await db
|
||||||
|
.select()
|
||||||
|
.from(repositories)
|
||||||
|
.where(eq(repositories.userId, job.userId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!config || !config.configId) {
|
||||||
|
throw new Error('Config not found for user');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get repositories to process
|
||||||
|
const repos = await db
|
||||||
|
.select()
|
||||||
|
.from(repositories)
|
||||||
|
.where(eq(repositories.id, remainingItemIds));
|
||||||
|
|
||||||
|
if (repos.length === 0) {
|
||||||
|
throw new Error('No repositories found for the remaining item IDs');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create GitHub client
|
||||||
|
const octokit = createGitHubClient(config.githubConfig.token);
|
||||||
|
|
||||||
|
// Process repositories with resilience
|
||||||
|
await processWithResilience(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repoStatusEnum.parse("imported"),
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||||
|
mirroredLocation: repo.mirroredLocation || "",
|
||||||
|
};
|
||||||
|
|
||||||
|
// Mirror the repository based on whether it's in an organization
|
||||||
|
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||||
|
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||||
|
config,
|
||||||
|
octokit,
|
||||||
|
orgName: repo.organization,
|
||||||
|
repository: repoData,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await mirrorGithubRepoToGitea({
|
||||||
|
octokit,
|
||||||
|
repository: repoData,
|
||||||
|
config,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: job.userId,
|
||||||
|
jobType: 'mirror',
|
||||||
|
getItemId: (repo) => repo.id,
|
||||||
|
getItemName: (repo) => repo.name,
|
||||||
|
resumeFromJobId: job.id,
|
||||||
|
concurrencyLimit: 3,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error recovering mirror job ${job.id}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover a sync job
|
||||||
|
*/
|
||||||
|
async function recoverSyncJob(job: any, remainingItemIds: string[]) {
|
||||||
|
// Implementation similar to recoverMirrorJob but for sync operations
|
||||||
|
console.log(`Recovering sync job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get the config for this user
|
||||||
|
const [config] = await db
|
||||||
|
.select()
|
||||||
|
.from(repositories)
|
||||||
|
.where(eq(repositories.userId, job.userId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!config || !config.configId) {
|
||||||
|
throw new Error('Config not found for user');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get repositories to process
|
||||||
|
const repos = await db
|
||||||
|
.select()
|
||||||
|
.from(repositories)
|
||||||
|
.where(eq(repositories.id, remainingItemIds));
|
||||||
|
|
||||||
|
if (repos.length === 0) {
|
||||||
|
throw new Error('No repositories found for the remaining item IDs');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process repositories with resilience
|
||||||
|
await processWithResilience(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repoStatusEnum.parse(repo.status),
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Sync the repository
|
||||||
|
await syncGiteaRepo({
|
||||||
|
config,
|
||||||
|
repository: repoData,
|
||||||
|
});
|
||||||
|
|
||||||
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: job.userId,
|
||||||
|
jobType: 'sync',
|
||||||
|
getItemId: (repo) => repo.id,
|
||||||
|
getItemName: (repo) => repo.name,
|
||||||
|
resumeFromJobId: job.id,
|
||||||
|
concurrencyLimit: 5,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error recovering sync job ${job.id}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recover a retry job
|
||||||
|
*/
|
||||||
|
async function recoverRetryJob(job: any, remainingItemIds: string[]) {
|
||||||
|
// Implementation similar to recoverMirrorJob but for retry operations
|
||||||
|
console.log(`Recovering retry job ${job.id} with ${remainingItemIds.length} remaining items`);
|
||||||
|
|
||||||
|
// This would be similar to recoverMirrorJob but with retry-specific logic
|
||||||
|
console.log('Retry job recovery not yet implemented');
|
||||||
|
}
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
import Redis from "ioredis";
|
|
||||||
|
|
||||||
// Connect to Redis using REDIS_URL environment variable or default to redis://redis:6379
|
|
||||||
// This ensures we have a fallback URL when running with Docker Compose
|
|
||||||
const redisUrl = process.env.REDIS_URL ?? 'redis://redis:6379';
|
|
||||||
|
|
||||||
console.log(`Connecting to Redis at: ${redisUrl}`);
|
|
||||||
|
|
||||||
// Configure Redis client with connection options
|
|
||||||
const redisOptions = {
|
|
||||||
retryStrategy: (times: number) => {
|
|
||||||
// Retry with exponential backoff up to 30 seconds
|
|
||||||
const delay = Math.min(times * 100, 3000);
|
|
||||||
console.log(`Redis connection attempt ${times} failed. Retrying in ${delay}ms...`);
|
|
||||||
return delay;
|
|
||||||
},
|
|
||||||
maxRetriesPerRequest: 5,
|
|
||||||
enableReadyCheck: true,
|
|
||||||
connectTimeout: 10000,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const redis = new Redis(redisUrl, redisOptions);
|
|
||||||
export const redisPublisher = new Redis(redisUrl, redisOptions); // For publishing
|
|
||||||
export const redisSubscriber = new Redis(redisUrl, redisOptions); // For subscribing
|
|
||||||
|
|
||||||
// Log connection events
|
|
||||||
redis.on('connect', () => console.log('Redis client connected'));
|
|
||||||
redis.on('error', (err) => console.error('Redis client error:', err));
|
|
||||||
redis.on('ready', () => console.log('Redis client ready'));
|
|
||||||
redis.on('reconnecting', () => console.log('Redis client reconnecting...'));
|
|
||||||
110
src/lib/utils.test.ts
Normal file
110
src/lib/utils.test.ts
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import { describe, test, expect } from "bun:test";
|
||||||
|
import { jsonResponse, formatDate, truncate, safeParse } from "./utils";
|
||||||
|
|
||||||
|
describe("jsonResponse", () => {
|
||||||
|
test("creates a Response with JSON content", () => {
|
||||||
|
const data = { message: "Hello, world!" };
|
||||||
|
const response = jsonResponse({ data });
|
||||||
|
|
||||||
|
expect(response).toBeInstanceOf(Response);
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
expect(response.headers.get("Content-Type")).toBe("application/json");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("uses the provided status code", () => {
|
||||||
|
const data = { error: "Not found" };
|
||||||
|
const response = jsonResponse({ data, status: 404 });
|
||||||
|
|
||||||
|
expect(response.status).toBe(404);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("correctly serializes complex objects", async () => {
|
||||||
|
const now = new Date();
|
||||||
|
const data = {
|
||||||
|
message: "Complex object",
|
||||||
|
date: now,
|
||||||
|
nested: { foo: "bar" },
|
||||||
|
array: [1, 2, 3]
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = jsonResponse({ data });
|
||||||
|
const responseBody = await response.json();
|
||||||
|
|
||||||
|
expect(responseBody).toEqual({
|
||||||
|
message: "Complex object",
|
||||||
|
date: now.toISOString(),
|
||||||
|
nested: { foo: "bar" },
|
||||||
|
array: [1, 2, 3]
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("formatDate", () => {
|
||||||
|
test("formats a date object", () => {
|
||||||
|
const date = new Date("2023-01-15T12:30:45Z");
|
||||||
|
const formatted = formatDate(date);
|
||||||
|
|
||||||
|
// The exact format might depend on the locale, so we'll check for parts
|
||||||
|
expect(formatted).toContain("2023");
|
||||||
|
expect(formatted).toContain("January");
|
||||||
|
expect(formatted).toContain("15");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formats a date string", () => {
|
||||||
|
const dateStr = "2023-01-15T12:30:45Z";
|
||||||
|
const formatted = formatDate(dateStr);
|
||||||
|
|
||||||
|
expect(formatted).toContain("2023");
|
||||||
|
expect(formatted).toContain("January");
|
||||||
|
expect(formatted).toContain("15");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 'Never' for null or undefined", () => {
|
||||||
|
expect(formatDate(null)).toBe("Never");
|
||||||
|
expect(formatDate(undefined)).toBe("Never");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("truncate", () => {
|
||||||
|
test("truncates a string that exceeds the length", () => {
|
||||||
|
const str = "This is a long string that needs truncation";
|
||||||
|
const truncated = truncate(str, 10);
|
||||||
|
|
||||||
|
expect(truncated).toBe("This is a ...");
|
||||||
|
expect(truncated.length).toBe(13); // 10 chars + "..."
|
||||||
|
});
|
||||||
|
|
||||||
|
test("does not truncate a string that is shorter than the length", () => {
|
||||||
|
const str = "Short";
|
||||||
|
const truncated = truncate(str, 10);
|
||||||
|
|
||||||
|
expect(truncated).toBe("Short");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles empty strings", () => {
|
||||||
|
expect(truncate("", 10)).toBe("");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("safeParse", () => {
|
||||||
|
test("parses valid JSON strings", () => {
|
||||||
|
const jsonStr = '{"name":"John","age":30}';
|
||||||
|
const parsed = safeParse(jsonStr);
|
||||||
|
|
||||||
|
expect(parsed).toEqual({ name: "John", age: 30 });
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns undefined for invalid JSON strings", () => {
|
||||||
|
const invalidJson = '{"name":"John",age:30}'; // Missing quotes around age
|
||||||
|
const parsed = safeParse(invalidJson);
|
||||||
|
|
||||||
|
expect(parsed).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns the original value for non-string inputs", () => {
|
||||||
|
const obj = { name: "John", age: 30 };
|
||||||
|
const parsed = safeParse(obj);
|
||||||
|
|
||||||
|
expect(parsed).toBe(obj);
|
||||||
|
});
|
||||||
|
});
|
||||||
167
src/lib/utils/concurrency.test.ts
Normal file
167
src/lib/utils/concurrency.test.ts
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
import { describe, test, expect, mock } from "bun:test";
|
||||||
|
import { processInParallel, processWithRetry } from "./concurrency";
|
||||||
|
|
||||||
|
describe("processInParallel", () => {
|
||||||
|
test("processes items in parallel with concurrency control", async () => {
|
||||||
|
// Create an array of numbers to process
|
||||||
|
const items = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||||
|
|
||||||
|
// Create a mock function to track execution
|
||||||
|
const processItem = mock(async (item: number) => {
|
||||||
|
// Simulate async work
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 10));
|
||||||
|
return item * 2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock progress callback
|
||||||
|
const onProgress = mock((completed: number, total: number, result?: number) => {
|
||||||
|
// Progress tracking
|
||||||
|
});
|
||||||
|
|
||||||
|
// Process the items with a concurrency limit of 3
|
||||||
|
const results = await processInParallel(
|
||||||
|
items,
|
||||||
|
processItem,
|
||||||
|
3,
|
||||||
|
onProgress
|
||||||
|
);
|
||||||
|
|
||||||
|
// Verify results
|
||||||
|
expect(results).toEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
|
||||||
|
|
||||||
|
// Verify that processItem was called for each item
|
||||||
|
expect(processItem).toHaveBeenCalledTimes(10);
|
||||||
|
|
||||||
|
// Verify that onProgress was called for each item
|
||||||
|
expect(onProgress).toHaveBeenCalledTimes(10);
|
||||||
|
|
||||||
|
// Verify the last call to onProgress had the correct completed/total values
|
||||||
|
expect(onProgress.mock.calls[9][0]).toBe(10); // completed
|
||||||
|
expect(onProgress.mock.calls[9][1]).toBe(10); // total
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles errors in processing", async () => {
|
||||||
|
// Create an array of numbers to process
|
||||||
|
const items = [1, 2, 3, 4, 5];
|
||||||
|
|
||||||
|
// Create a mock function that throws an error for item 3
|
||||||
|
const processItem = mock(async (item: number) => {
|
||||||
|
if (item === 3) {
|
||||||
|
throw new Error("Test error");
|
||||||
|
}
|
||||||
|
return item * 2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a spy for console.error
|
||||||
|
const originalConsoleError = console.error;
|
||||||
|
const consoleErrorMock = mock(() => {});
|
||||||
|
console.error = consoleErrorMock;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Process the items
|
||||||
|
const results = await processInParallel(items, processItem);
|
||||||
|
|
||||||
|
// Verify results (should have 4 items, missing the one that errored)
|
||||||
|
expect(results).toEqual([2, 4, 8, 10]);
|
||||||
|
|
||||||
|
// Verify that processItem was called for each item
|
||||||
|
expect(processItem).toHaveBeenCalledTimes(5);
|
||||||
|
|
||||||
|
// Verify that console.error was called once
|
||||||
|
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||||
|
} finally {
|
||||||
|
// Restore console.error
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("processWithRetry", () => {
|
||||||
|
test("retries failed operations", async () => {
|
||||||
|
// Create an array of numbers to process
|
||||||
|
const items = [1, 2, 3];
|
||||||
|
|
||||||
|
// Create a counter to track retry attempts
|
||||||
|
const attemptCounts: Record<number, number> = { 1: 0, 2: 0, 3: 0 };
|
||||||
|
|
||||||
|
// Create a mock function that fails on first attempt for item 2
|
||||||
|
const processItem = mock(async (item: number) => {
|
||||||
|
attemptCounts[item]++;
|
||||||
|
|
||||||
|
if (item === 2 && attemptCounts[item] === 1) {
|
||||||
|
throw new Error("Temporary error");
|
||||||
|
}
|
||||||
|
|
||||||
|
return item * 2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock for the onRetry callback
|
||||||
|
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||||
|
// Retry tracking
|
||||||
|
});
|
||||||
|
|
||||||
|
// Process the items with retry
|
||||||
|
const results = await processWithRetry(items, processItem, {
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 10,
|
||||||
|
onRetry,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify results
|
||||||
|
expect(results).toEqual([2, 4, 6]);
|
||||||
|
|
||||||
|
// Verify that item 2 was retried once
|
||||||
|
expect(attemptCounts[1]).toBe(1); // No retries
|
||||||
|
expect(attemptCounts[2]).toBe(2); // One retry
|
||||||
|
expect(attemptCounts[3]).toBe(1); // No retries
|
||||||
|
|
||||||
|
// Verify that onRetry was called once
|
||||||
|
expect(onRetry).toHaveBeenCalledTimes(1);
|
||||||
|
expect(onRetry.mock.calls[0][0]).toBe(2); // item
|
||||||
|
expect(onRetry.mock.calls[0][2]).toBe(1); // attempt
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gives up after max retries", async () => {
|
||||||
|
// Create an array of numbers to process
|
||||||
|
const items = [1, 2];
|
||||||
|
|
||||||
|
// Create a mock function that always fails for item 2
|
||||||
|
const processItem = mock(async (item: number) => {
|
||||||
|
if (item === 2) {
|
||||||
|
throw new Error("Persistent error");
|
||||||
|
}
|
||||||
|
return item * 2;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock for the onRetry callback
|
||||||
|
const onRetry = mock((item: number, error: Error, attempt: number) => {
|
||||||
|
// Retry tracking
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a spy for console.error
|
||||||
|
const originalConsoleError = console.error;
|
||||||
|
const consoleErrorMock = mock(() => {});
|
||||||
|
console.error = consoleErrorMock;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Process the items with retry
|
||||||
|
const results = await processWithRetry(items, processItem, {
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 10,
|
||||||
|
onRetry,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify results (should have 1 item, missing the one that errored)
|
||||||
|
expect(results).toEqual([2]);
|
||||||
|
|
||||||
|
// Verify that onRetry was called twice (for 2 retry attempts)
|
||||||
|
expect(onRetry).toHaveBeenCalledTimes(2);
|
||||||
|
|
||||||
|
// Verify that console.error was called once
|
||||||
|
expect(consoleErrorMock).toHaveBeenCalledTimes(1);
|
||||||
|
} finally {
|
||||||
|
// Restore console.error
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
292
src/lib/utils/concurrency.ts
Normal file
292
src/lib/utils/concurrency.ts
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
/**
|
||||||
|
* Utility for processing items in parallel with concurrency control
|
||||||
|
*
|
||||||
|
* @param items Array of items to process
|
||||||
|
* @param processItem Function to process each item
|
||||||
|
* @param concurrencyLimit Maximum number of concurrent operations
|
||||||
|
* @param onProgress Optional callback for progress updates
|
||||||
|
* @returns Promise that resolves when all items are processed
|
||||||
|
*/
|
||||||
|
export async function processInParallel<T, R>(
|
||||||
|
items: T[],
|
||||||
|
processItem: (item: T) => Promise<R>,
|
||||||
|
concurrencyLimit: number = 5,
|
||||||
|
onProgress?: (completed: number, total: number, result?: R) => void
|
||||||
|
): Promise<R[]> {
|
||||||
|
const results: R[] = [];
|
||||||
|
let completed = 0;
|
||||||
|
const total = items.length;
|
||||||
|
|
||||||
|
// Process items in batches to control concurrency
|
||||||
|
for (let i = 0; i < total; i += concurrencyLimit) {
|
||||||
|
const batch = items.slice(i, i + concurrencyLimit);
|
||||||
|
|
||||||
|
const batchPromises = batch.map(async (item) => {
|
||||||
|
try {
|
||||||
|
const result = await processItem(item);
|
||||||
|
completed++;
|
||||||
|
|
||||||
|
if (onProgress) {
|
||||||
|
onProgress(completed, total, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
completed++;
|
||||||
|
|
||||||
|
if (onProgress) {
|
||||||
|
onProgress(completed, total);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for the current batch to complete before starting the next batch
|
||||||
|
const batchResults = await Promise.allSettled(batchPromises);
|
||||||
|
|
||||||
|
// Process results and handle errors
|
||||||
|
for (const result of batchResults) {
|
||||||
|
if (result.status === 'fulfilled') {
|
||||||
|
results.push(result.value);
|
||||||
|
} else {
|
||||||
|
console.error('Error processing item:', result.reason);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Utility for processing items in parallel with automatic retry for failed operations
|
||||||
|
*
|
||||||
|
* @param items Array of items to process
|
||||||
|
* @param processItem Function to process each item
|
||||||
|
* @param options Configuration options
|
||||||
|
* @returns Promise that resolves when all items are processed
|
||||||
|
*/
|
||||||
|
export async function processWithRetry<T, R>(
|
||||||
|
items: T[],
|
||||||
|
processItem: (item: T) => Promise<R>,
|
||||||
|
options: {
|
||||||
|
concurrencyLimit?: number;
|
||||||
|
maxRetries?: number;
|
||||||
|
retryDelay?: number;
|
||||||
|
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||||
|
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||||
|
jobId?: string; // Optional job ID for checkpointing
|
||||||
|
getItemId?: (item: T) => string; // Function to get a unique ID for each item
|
||||||
|
onCheckpoint?: (jobId: string, completedItemId: string) => Promise<void>; // Callback for checkpointing
|
||||||
|
checkpointInterval?: number; // How many items to process before checkpointing
|
||||||
|
} = {}
|
||||||
|
): Promise<R[]> {
|
||||||
|
const {
|
||||||
|
concurrencyLimit = 5,
|
||||||
|
maxRetries = 3,
|
||||||
|
retryDelay = 1000,
|
||||||
|
onProgress,
|
||||||
|
onRetry,
|
||||||
|
jobId,
|
||||||
|
getItemId,
|
||||||
|
onCheckpoint,
|
||||||
|
checkpointInterval = 1 // Default to checkpointing after each item
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
// Track checkpoint counter
|
||||||
|
let itemsProcessedSinceLastCheckpoint = 0;
|
||||||
|
|
||||||
|
// Wrap the process function with retry logic
|
||||||
|
const processWithRetryLogic = async (item: T): Promise<R> => {
|
||||||
|
let lastError: Error | null = null;
|
||||||
|
|
||||||
|
for (let attempt = 1; attempt <= maxRetries + 1; attempt++) {
|
||||||
|
try {
|
||||||
|
const result = await processItem(item);
|
||||||
|
|
||||||
|
// Handle checkpointing if enabled
|
||||||
|
if (jobId && getItemId && onCheckpoint) {
|
||||||
|
const itemId = getItemId(item);
|
||||||
|
itemsProcessedSinceLastCheckpoint++;
|
||||||
|
|
||||||
|
// Checkpoint based on the interval
|
||||||
|
if (itemsProcessedSinceLastCheckpoint >= checkpointInterval) {
|
||||||
|
await onCheckpoint(jobId, itemId);
|
||||||
|
itemsProcessedSinceLastCheckpoint = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
lastError = error instanceof Error ? error : new Error(String(error));
|
||||||
|
|
||||||
|
if (attempt <= maxRetries) {
|
||||||
|
if (onRetry) {
|
||||||
|
onRetry(item, lastError, attempt);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exponential backoff
|
||||||
|
const delay = retryDelay * Math.pow(2, attempt - 1);
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
|
} else {
|
||||||
|
throw lastError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This should never be reached due to the throw in the catch block
|
||||||
|
throw lastError || new Error('Unknown error occurred');
|
||||||
|
};
|
||||||
|
|
||||||
|
const results = await processInParallel(
|
||||||
|
items,
|
||||||
|
processWithRetryLogic,
|
||||||
|
concurrencyLimit,
|
||||||
|
onProgress
|
||||||
|
);
|
||||||
|
|
||||||
|
// Final checkpoint if there are remaining items since the last checkpoint
|
||||||
|
if (jobId && getItemId && onCheckpoint && itemsProcessedSinceLastCheckpoint > 0) {
|
||||||
|
// We don't have a specific item ID for the final checkpoint, so we'll use a placeholder
|
||||||
|
await onCheckpoint(jobId, 'final');
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process items in parallel with resilience to container restarts
|
||||||
|
* This version supports resuming from a previous checkpoint
|
||||||
|
*/
|
||||||
|
export async function processWithResilience<T, R>(
|
||||||
|
items: T[],
|
||||||
|
processItem: (item: T) => Promise<R>,
|
||||||
|
options: {
|
||||||
|
concurrencyLimit?: number;
|
||||||
|
maxRetries?: number;
|
||||||
|
retryDelay?: number;
|
||||||
|
onProgress?: (completed: number, total: number, result?: R) => void;
|
||||||
|
onRetry?: (item: T, error: Error, attempt: number) => void;
|
||||||
|
userId: string; // Required for creating mirror jobs
|
||||||
|
jobType: "mirror" | "sync" | "retry";
|
||||||
|
getItemId: (item: T) => string; // Required function to get a unique ID for each item
|
||||||
|
getItemName: (item: T) => string; // Required function to get a display name for each item
|
||||||
|
checkpointInterval?: number;
|
||||||
|
resumeFromJobId?: string; // Optional job ID to resume from
|
||||||
|
}
|
||||||
|
): Promise<R[]> {
|
||||||
|
const {
|
||||||
|
userId,
|
||||||
|
jobType,
|
||||||
|
getItemId,
|
||||||
|
getItemName,
|
||||||
|
resumeFromJobId,
|
||||||
|
checkpointInterval = 5,
|
||||||
|
...otherOptions
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
// Import helpers for job management
|
||||||
|
const { createMirrorJob, updateMirrorJobProgress } = await import('@/lib/helpers');
|
||||||
|
|
||||||
|
// Get item IDs for all items
|
||||||
|
const allItemIds = items.map(getItemId);
|
||||||
|
|
||||||
|
// Create or resume a job
|
||||||
|
let jobId: string;
|
||||||
|
let completedItemIds: string[] = [];
|
||||||
|
let itemsToProcess = [...items];
|
||||||
|
|
||||||
|
if (resumeFromJobId) {
|
||||||
|
// We're resuming an existing job
|
||||||
|
jobId = resumeFromJobId;
|
||||||
|
|
||||||
|
// Get the job from the database to find completed items
|
||||||
|
const { db, mirrorJobs } = await import('@/lib/db');
|
||||||
|
const { eq } = await import('drizzle-orm');
|
||||||
|
const [job] = await db
|
||||||
|
.select()
|
||||||
|
.from(mirrorJobs)
|
||||||
|
.where(eq(mirrorJobs.id, resumeFromJobId));
|
||||||
|
|
||||||
|
if (job && job.completedItemIds) {
|
||||||
|
completedItemIds = job.completedItemIds;
|
||||||
|
|
||||||
|
// Filter out already completed items
|
||||||
|
itemsToProcess = items.filter(item => !completedItemIds.includes(getItemId(item)));
|
||||||
|
|
||||||
|
console.log(`Resuming job ${jobId} with ${itemsToProcess.length} remaining items`);
|
||||||
|
|
||||||
|
// Update the job to show it's being resumed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
message: `Resuming job with ${itemsToProcess.length} remaining items`,
|
||||||
|
details: `Job is being resumed. ${completedItemIds.length} of ${items.length} items were already processed.`,
|
||||||
|
inProgress: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Create a new job
|
||||||
|
jobId = await createMirrorJob({
|
||||||
|
userId,
|
||||||
|
message: `Started ${jobType} job with ${items.length} items`,
|
||||||
|
details: `Processing ${items.length} items in parallel with checkpointing`,
|
||||||
|
status: "mirroring",
|
||||||
|
jobType,
|
||||||
|
totalItems: items.length,
|
||||||
|
itemIds: allItemIds,
|
||||||
|
inProgress: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Created new job ${jobId} with ${items.length} items`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define the checkpoint function
|
||||||
|
const onCheckpoint = async (jobId: string, completedItemId: string) => {
|
||||||
|
const itemName = items.find(item => getItemId(item) === completedItemId)
|
||||||
|
? getItemName(items.find(item => getItemId(item) === completedItemId)!)
|
||||||
|
: 'unknown';
|
||||||
|
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
completedItemId,
|
||||||
|
message: `Processed item: ${itemName}`,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Process the items with checkpointing
|
||||||
|
const results = await processWithRetry(
|
||||||
|
itemsToProcess,
|
||||||
|
processItem,
|
||||||
|
{
|
||||||
|
...otherOptions,
|
||||||
|
jobId,
|
||||||
|
getItemId,
|
||||||
|
onCheckpoint,
|
||||||
|
checkpointInterval,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Mark the job as completed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
status: "mirrored",
|
||||||
|
message: `Completed ${jobType} job with ${items.length} items`,
|
||||||
|
inProgress: false,
|
||||||
|
isCompleted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return results;
|
||||||
|
} catch (error) {
|
||||||
|
// Mark the job as failed
|
||||||
|
await updateMirrorJobProgress({
|
||||||
|
jobId,
|
||||||
|
status: "failed",
|
||||||
|
message: `Failed ${jobType} job: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
inProgress: false,
|
||||||
|
isCompleted: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
22
src/middleware.ts
Normal file
22
src/middleware.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { defineMiddleware } from 'astro:middleware';
|
||||||
|
import { initializeRecovery } from './lib/recovery';
|
||||||
|
|
||||||
|
// Flag to track if recovery has been initialized
|
||||||
|
let recoveryInitialized = false;
|
||||||
|
|
||||||
|
export const onRequest = defineMiddleware(async (context, next) => {
|
||||||
|
// Initialize recovery system only once when the server starts
|
||||||
|
if (!recoveryInitialized) {
|
||||||
|
console.log('Initializing recovery system from middleware...');
|
||||||
|
try {
|
||||||
|
await initializeRecovery();
|
||||||
|
console.log('Recovery system initialized successfully');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error initializing recovery system:', error);
|
||||||
|
}
|
||||||
|
recoveryInitialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Continue with the request
|
||||||
|
return next();
|
||||||
|
});
|
||||||
187
src/pages/api/gitea/test-connection.test.ts
Normal file
187
src/pages/api/gitea/test-connection.test.ts
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
// Mock the POST function
|
||||||
|
const mockPOST = mock(async ({ request }) => {
|
||||||
|
const body = await request.json();
|
||||||
|
|
||||||
|
// Check for missing URL or token
|
||||||
|
if (!body.url || !body.token) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: false,
|
||||||
|
message: "Gitea URL and token are required"
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for username mismatch
|
||||||
|
if (body.username && body.username !== "giteauser") {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: false,
|
||||||
|
message: "Token belongs to giteauser, not " + body.username
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle invalid token
|
||||||
|
if (body.token === "invalid-token") {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: false,
|
||||||
|
message: "Invalid Gitea token"
|
||||||
|
}),
|
||||||
|
{ status: 401 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success case
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: true,
|
||||||
|
message: "Successfully connected to Gitea as giteauser",
|
||||||
|
user: {
|
||||||
|
login: "giteauser",
|
||||||
|
name: "Gitea User",
|
||||||
|
avatar_url: "https://gitea.example.com/avatar.png"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
{ status: 200 }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the module
|
||||||
|
mock.module("./test-connection", () => {
|
||||||
|
return {
|
||||||
|
POST: mockPOST
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Import after mocking
|
||||||
|
import { POST } from "./test-connection";
|
||||||
|
|
||||||
|
describe("Gitea Test Connection API", () => {
|
||||||
|
// Mock console.error to prevent test output noise
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if url or token is missing", async () => {
|
||||||
|
// Test missing URL
|
||||||
|
const requestMissingUrl = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
token: "valid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseMissingUrl = await POST({ request: requestMissingUrl } as any);
|
||||||
|
|
||||||
|
expect(responseMissingUrl.status).toBe(400);
|
||||||
|
|
||||||
|
const dataMissingUrl = await responseMissingUrl.json();
|
||||||
|
expect(dataMissingUrl.success).toBe(false);
|
||||||
|
expect(dataMissingUrl.message).toBe("Gitea URL and token are required");
|
||||||
|
|
||||||
|
// Test missing token
|
||||||
|
const requestMissingToken = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: "https://gitea.example.com"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const responseMissingToken = await POST({ request: requestMissingToken } as any);
|
||||||
|
|
||||||
|
expect(responseMissingToken.status).toBe(400);
|
||||||
|
|
||||||
|
const dataMissingToken = await responseMissingToken.json();
|
||||||
|
expect(dataMissingToken.success).toBe(false);
|
||||||
|
expect(dataMissingToken.message).toBe("Gitea URL and token are required");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 200 with user data on successful connection", async () => {
|
||||||
|
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "valid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(true);
|
||||||
|
expect(data.message).toBe("Successfully connected to Gitea as giteauser");
|
||||||
|
expect(data.user).toEqual({
|
||||||
|
login: "giteauser",
|
||||||
|
name: "Gitea User",
|
||||||
|
avatar_url: "https://gitea.example.com/avatar.png"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||||
|
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "valid-token",
|
||||||
|
username: "differentuser"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toBe("Token belongs to giteauser, not differentuser");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles authentication errors", async () => {
|
||||||
|
const request = new Request("http://localhost/api/gitea/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
url: "https://gitea.example.com",
|
||||||
|
token: "invalid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(401);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toBe("Invalid Gitea token");
|
||||||
|
});
|
||||||
|
});
|
||||||
133
src/pages/api/github/test-connection.test.ts
Normal file
133
src/pages/api/github/test-connection.test.ts
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
import { POST } from "./test-connection";
|
||||||
|
import { Octokit } from "@octokit/rest";
|
||||||
|
|
||||||
|
// Mock the Octokit class
|
||||||
|
mock.module("@octokit/rest", () => {
|
||||||
|
return {
|
||||||
|
Octokit: mock(function() {
|
||||||
|
return {
|
||||||
|
users: {
|
||||||
|
getAuthenticated: mock(() => Promise.resolve({
|
||||||
|
data: {
|
||||||
|
login: "testuser",
|
||||||
|
name: "Test User",
|
||||||
|
avatar_url: "https://example.com/avatar.png"
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("GitHub Test Connection API", () => {
|
||||||
|
// Mock console.error to prevent test output noise
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if token is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/github/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toBe("GitHub token is required");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 200 with user data on successful connection", async () => {
|
||||||
|
const request = new Request("http://localhost/api/github/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
token: "valid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(true);
|
||||||
|
expect(data.message).toBe("Successfully connected to GitHub as testuser");
|
||||||
|
expect(data.user).toEqual({
|
||||||
|
login: "testuser",
|
||||||
|
name: "Test User",
|
||||||
|
avatar_url: "https://example.com/avatar.png"
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if username doesn't match authenticated user", async () => {
|
||||||
|
const request = new Request("http://localhost/api/github/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
token: "valid-token",
|
||||||
|
username: "differentuser"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toBe("Token belongs to testuser, not differentuser");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles authentication errors", async () => {
|
||||||
|
// Mock Octokit to throw an error
|
||||||
|
mock.module("@octokit/rest", () => {
|
||||||
|
return {
|
||||||
|
Octokit: mock(function() {
|
||||||
|
return {
|
||||||
|
users: {
|
||||||
|
getAuthenticated: mock(() => Promise.reject(new Error("Bad credentials")))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
})
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const request = new Request("http://localhost/api/github/test-connection", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
token: "invalid-token"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(500);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(false);
|
||||||
|
expect(data.message).toContain("Bad credentials");
|
||||||
|
});
|
||||||
|
});
|
||||||
154
src/pages/api/health.test.ts
Normal file
154
src/pages/api/health.test.ts
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
import { GET } from "./health";
|
||||||
|
import * as dbModule from "@/lib/db";
|
||||||
|
import os from "os";
|
||||||
|
|
||||||
|
// Mock the database module
|
||||||
|
mock.module("@/lib/db", () => {
|
||||||
|
return {
|
||||||
|
db: {
|
||||||
|
select: () => ({
|
||||||
|
from: () => ({
|
||||||
|
limit: () => Promise.resolve([{ test: 1 }])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock the os functions individually
|
||||||
|
const originalPlatform = os.platform;
|
||||||
|
const originalVersion = os.version;
|
||||||
|
const originalArch = os.arch;
|
||||||
|
const originalTotalmem = os.totalmem;
|
||||||
|
const originalFreemem = os.freemem;
|
||||||
|
|
||||||
|
describe("Health API Endpoint", () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
// Mock os functions
|
||||||
|
os.platform = mock(() => "test-platform");
|
||||||
|
os.version = mock(() => "test-version");
|
||||||
|
os.arch = mock(() => "test-arch");
|
||||||
|
os.totalmem = mock(() => 16 * 1024 * 1024 * 1024); // 16GB
|
||||||
|
os.freemem = mock(() => 8 * 1024 * 1024 * 1024); // 8GB
|
||||||
|
|
||||||
|
// Mock process.memoryUsage
|
||||||
|
process.memoryUsage = mock(() => ({
|
||||||
|
rss: 100 * 1024 * 1024, // 100MB
|
||||||
|
heapTotal: 50 * 1024 * 1024, // 50MB
|
||||||
|
heapUsed: 30 * 1024 * 1024, // 30MB
|
||||||
|
external: 10 * 1024 * 1024, // 10MB
|
||||||
|
arrayBuffers: 5 * 1024 * 1024, // 5MB
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Mock process.env
|
||||||
|
process.env.npm_package_version = "2.1.0";
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
// Restore original os functions
|
||||||
|
os.platform = originalPlatform;
|
||||||
|
os.version = originalVersion;
|
||||||
|
os.arch = originalArch;
|
||||||
|
os.totalmem = originalTotalmem;
|
||||||
|
os.freemem = originalFreemem;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns a successful health check response", async () => {
|
||||||
|
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Check the structure of the response
|
||||||
|
expect(data.status).toBe("ok");
|
||||||
|
expect(data.timestamp).toBeDefined();
|
||||||
|
expect(data.version).toBe("2.1.0");
|
||||||
|
|
||||||
|
// Check database status
|
||||||
|
expect(data.database.connected).toBe(true);
|
||||||
|
|
||||||
|
// Check system info
|
||||||
|
expect(data.system.os.platform).toBe("test-platform");
|
||||||
|
expect(data.system.os.version).toBe("test-version");
|
||||||
|
expect(data.system.os.arch).toBe("test-arch");
|
||||||
|
|
||||||
|
// Check memory info
|
||||||
|
expect(data.system.memory.rss).toBe("100 MB");
|
||||||
|
expect(data.system.memory.heapTotal).toBe("50 MB");
|
||||||
|
expect(data.system.memory.heapUsed).toBe("30 MB");
|
||||||
|
expect(data.system.memory.systemTotal).toBe("16 GB");
|
||||||
|
expect(data.system.memory.systemFree).toBe("8 GB");
|
||||||
|
|
||||||
|
// Check uptime
|
||||||
|
expect(data.system.uptime.startTime).toBeDefined();
|
||||||
|
expect(data.system.uptime.uptimeMs).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(data.system.uptime.formatted).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles database connection failures", async () => {
|
||||||
|
// Mock database failure
|
||||||
|
mock.module("@/lib/db", () => {
|
||||||
|
return {
|
||||||
|
db: {
|
||||||
|
select: () => ({
|
||||||
|
from: () => ({
|
||||||
|
limit: () => Promise.reject(new Error("Database connection error"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// Mock console.error to prevent test output noise
|
||||||
|
const originalConsoleError = console.error;
|
||||||
|
console.error = mock(() => {});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||||
|
|
||||||
|
// Should still return 200 even with DB error, as the service itself is running
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Status should still be ok since the service is running
|
||||||
|
expect(data.status).toBe("ok");
|
||||||
|
|
||||||
|
// Database should show as disconnected
|
||||||
|
expect(data.database.connected).toBe(false);
|
||||||
|
expect(data.database.message).toBe("Database connection error");
|
||||||
|
} finally {
|
||||||
|
// Restore console.error
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("handles database connection failures with status 200", async () => {
|
||||||
|
// The health endpoint should return 200 even if the database is down,
|
||||||
|
// as the service itself is still running
|
||||||
|
|
||||||
|
// Mock console.error to prevent test output noise
|
||||||
|
const originalConsoleError = console.error;
|
||||||
|
console.error = mock(() => {});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await GET({ request: new Request("http://localhost/api/health") } as any);
|
||||||
|
|
||||||
|
// Should return 200 as the service is running
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
// Status should be ok
|
||||||
|
expect(data.status).toBe("ok");
|
||||||
|
|
||||||
|
// Database should show as disconnected
|
||||||
|
expect(data.database.connected).toBe(false);
|
||||||
|
} finally {
|
||||||
|
// Restore console.error
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
179
src/pages/api/health.ts
Normal file
179
src/pages/api/health.ts
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
import type { APIRoute } from "astro";
|
||||||
|
import { jsonResponse } from "@/lib/utils";
|
||||||
|
import { db } from "@/lib/db";
|
||||||
|
import { ENV } from "@/lib/config";
|
||||||
|
import os from "os";
|
||||||
|
import axios from "axios";
|
||||||
|
|
||||||
|
// Track when the server started
|
||||||
|
const serverStartTime = new Date();
|
||||||
|
|
||||||
|
// Cache for the latest version to avoid frequent GitHub API calls
|
||||||
|
interface VersionCache {
|
||||||
|
latestVersion: string;
|
||||||
|
timestamp: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
let versionCache: VersionCache | null = null;
|
||||||
|
const CACHE_TTL = 3600000; // 1 hour in milliseconds
|
||||||
|
|
||||||
|
export const GET: APIRoute = async () => {
|
||||||
|
try {
|
||||||
|
// Check database connection by running a simple query
|
||||||
|
const dbStatus = await checkDatabaseConnection();
|
||||||
|
|
||||||
|
// Get system information
|
||||||
|
const systemInfo = {
|
||||||
|
uptime: getUptime(),
|
||||||
|
memory: getMemoryUsage(),
|
||||||
|
os: {
|
||||||
|
platform: os.platform(),
|
||||||
|
version: os.version(),
|
||||||
|
arch: os.arch(),
|
||||||
|
},
|
||||||
|
env: ENV.NODE_ENV,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Get current and latest versions
|
||||||
|
const currentVersion = process.env.npm_package_version || "unknown";
|
||||||
|
const latestVersion = await checkLatestVersion();
|
||||||
|
|
||||||
|
// Build response
|
||||||
|
const healthData = {
|
||||||
|
status: "ok",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
version: currentVersion,
|
||||||
|
latestVersion: latestVersion,
|
||||||
|
updateAvailable: latestVersion !== "unknown" &&
|
||||||
|
currentVersion !== "unknown" &&
|
||||||
|
latestVersion !== currentVersion,
|
||||||
|
database: dbStatus,
|
||||||
|
system: systemInfo,
|
||||||
|
};
|
||||||
|
|
||||||
|
return jsonResponse({
|
||||||
|
data: healthData,
|
||||||
|
status: 200,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Health check failed:", error);
|
||||||
|
|
||||||
|
return jsonResponse({
|
||||||
|
data: {
|
||||||
|
status: "error",
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
error: error instanceof Error ? error.message : "Unknown error",
|
||||||
|
version: process.env.npm_package_version || "unknown",
|
||||||
|
latestVersion: "unknown",
|
||||||
|
updateAvailable: false,
|
||||||
|
},
|
||||||
|
status: 503, // Service Unavailable
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check database connection by running a simple query
|
||||||
|
*/
|
||||||
|
async function checkDatabaseConnection() {
|
||||||
|
try {
|
||||||
|
// Run a simple query to check if the database is accessible
|
||||||
|
const result = await db.select({ test: sql`1` }).from(sql`sqlite_master`).limit(1);
|
||||||
|
|
||||||
|
return {
|
||||||
|
connected: true,
|
||||||
|
message: "Database connection successful",
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Database connection check failed:", error);
|
||||||
|
|
||||||
|
return {
|
||||||
|
connected: false,
|
||||||
|
message: error instanceof Error ? error.message : "Database connection failed",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get server uptime information
|
||||||
|
*/
|
||||||
|
function getUptime() {
|
||||||
|
const now = new Date();
|
||||||
|
const uptimeMs = now.getTime() - serverStartTime.getTime();
|
||||||
|
|
||||||
|
// Convert to human-readable format
|
||||||
|
const seconds = Math.floor(uptimeMs / 1000);
|
||||||
|
const minutes = Math.floor(seconds / 60);
|
||||||
|
const hours = Math.floor(minutes / 60);
|
||||||
|
const days = Math.floor(hours / 24);
|
||||||
|
|
||||||
|
return {
|
||||||
|
startTime: serverStartTime.toISOString(),
|
||||||
|
uptimeMs,
|
||||||
|
formatted: `${days}d ${hours % 24}h ${minutes % 60}m ${seconds % 60}s`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get memory usage information
|
||||||
|
*/
|
||||||
|
function getMemoryUsage() {
|
||||||
|
const memoryUsage = process.memoryUsage();
|
||||||
|
|
||||||
|
return {
|
||||||
|
rss: formatBytes(memoryUsage.rss),
|
||||||
|
heapTotal: formatBytes(memoryUsage.heapTotal),
|
||||||
|
heapUsed: formatBytes(memoryUsage.heapUsed),
|
||||||
|
external: formatBytes(memoryUsage.external),
|
||||||
|
systemTotal: formatBytes(os.totalmem()),
|
||||||
|
systemFree: formatBytes(os.freemem()),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format bytes to human-readable format
|
||||||
|
*/
|
||||||
|
function formatBytes(bytes: number): string {
|
||||||
|
if (bytes === 0) return '0 Bytes';
|
||||||
|
|
||||||
|
const k = 1024;
|
||||||
|
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
|
||||||
|
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check for the latest version from GitHub releases
|
||||||
|
*/
|
||||||
|
async function checkLatestVersion(): Promise<string> {
|
||||||
|
// Return cached version if available and not expired
|
||||||
|
if (versionCache && (Date.now() - versionCache.timestamp) < CACHE_TTL) {
|
||||||
|
return versionCache.latestVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Fetch the latest release from GitHub
|
||||||
|
const response = await axios.get(
|
||||||
|
'https://api.github.com/repos/arunavo4/gitea-mirror/releases/latest',
|
||||||
|
{ headers: { 'Accept': 'application/vnd.github.v3+json' } }
|
||||||
|
);
|
||||||
|
|
||||||
|
// Extract version from tag_name (remove 'v' prefix if present)
|
||||||
|
const latestVersion = response.data.tag_name.replace(/^v/, '');
|
||||||
|
|
||||||
|
// Update cache
|
||||||
|
versionCache = {
|
||||||
|
latestVersion,
|
||||||
|
timestamp: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
return latestVersion;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to check for latest version:', error);
|
||||||
|
return 'unknown';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Import sql tag for raw SQL queries
|
||||||
|
import { sql } from "drizzle-orm";
|
||||||
109
src/pages/api/job/mirror-org.test.ts
Normal file
109
src/pages/api/job/mirror-org.test.ts
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
|
||||||
|
// Create a mock POST function
|
||||||
|
const mockPOST = mock(async ({ request }) => {
|
||||||
|
const body = await request.json();
|
||||||
|
|
||||||
|
// Check for missing userId or organizationIds
|
||||||
|
if (!body.userId || !body.organizationIds) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
error: "Missing userId or organizationIds."
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success case
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: true,
|
||||||
|
message: "Organization mirroring started",
|
||||||
|
batchId: "test-batch-id"
|
||||||
|
}),
|
||||||
|
{ status: 200 }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock module
|
||||||
|
const mockModule = {
|
||||||
|
POST: mockPOST
|
||||||
|
};
|
||||||
|
|
||||||
|
describe("Organization Mirroring API", () => {
|
||||||
|
// Mock console.log and console.error to prevent test output noise
|
||||||
|
let originalConsoleLog: typeof console.log;
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleLog = console.log;
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.log = mock(() => {});
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.log = originalConsoleLog;
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if userId is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
organizationIds: ["org-id-1", "org-id-2"]
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if organizationIds is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId: "user-id"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing userId or organizationIds.");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 200 and starts mirroring organizations", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-org", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId: "user-id",
|
||||||
|
organizationIds: ["org-id-1", "org-id-2"]
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(true);
|
||||||
|
expect(data.message).toBe("Organization mirroring started");
|
||||||
|
expect(data.batchId).toBe("test-batch-id");
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -6,6 +6,8 @@ import { createGitHubClient } from "@/lib/github";
|
|||||||
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
||||||
import { repoStatusEnum } from "@/types/Repository";
|
import { repoStatusEnum } from "@/types/Repository";
|
||||||
import { type MembershipRole } from "@/types/organizations";
|
import { type MembershipRole } from "@/types/organizations";
|
||||||
|
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -61,31 +63,72 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fire async mirroring without blocking response
|
// Fire async mirroring without blocking response, using parallel processing with resilience
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
for (const org of orgs) {
|
if (!config.githubConfig.token) {
|
||||||
if (!config.githubConfig.token) {
|
throw new Error("GitHub token is missing in config.");
|
||||||
throw new Error("GitHub token is missing in config.");
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const octokit = createGitHubClient(config.githubConfig.token);
|
// Create a single Octokit instance to be reused
|
||||||
|
const octokit = createGitHubClient(config.githubConfig.token);
|
||||||
|
|
||||||
try {
|
// Define the concurrency limit - adjust based on API rate limits
|
||||||
|
// Using a lower concurrency for organizations since each org might contain many repos
|
||||||
|
const CONCURRENCY_LIMIT = 2;
|
||||||
|
|
||||||
|
// Generate a batch ID to group related organizations
|
||||||
|
const batchId = uuidv4();
|
||||||
|
|
||||||
|
// Process organizations in parallel with resilience to container restarts
|
||||||
|
await processWithResilience(
|
||||||
|
orgs,
|
||||||
|
async (org) => {
|
||||||
|
// Prepare organization data
|
||||||
|
const orgData = {
|
||||||
|
...org,
|
||||||
|
status: repoStatusEnum.parse("imported"),
|
||||||
|
membershipRole: org.membershipRole as MembershipRole,
|
||||||
|
lastMirrored: org.lastMirrored ?? undefined,
|
||||||
|
errorMessage: org.errorMessage ?? undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log the start of mirroring
|
||||||
|
console.log(`Starting mirror for organization: ${org.name}`);
|
||||||
|
|
||||||
|
// Mirror the organization
|
||||||
await mirrorGitHubOrgToGitea({
|
await mirrorGitHubOrgToGitea({
|
||||||
config,
|
config,
|
||||||
octokit,
|
octokit,
|
||||||
organization: {
|
organization: orgData,
|
||||||
...org,
|
|
||||||
status: repoStatusEnum.parse("imported"),
|
|
||||||
membershipRole: org.membershipRole as MembershipRole,
|
|
||||||
lastMirrored: org.lastMirrored ?? undefined,
|
|
||||||
errorMessage: org.errorMessage ?? undefined,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
} catch (error) {
|
|
||||||
console.error(`Mirror failed for organization ${org.name}:`, error);
|
return org;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: config.userId || "",
|
||||||
|
jobType: "mirror",
|
||||||
|
batchId,
|
||||||
|
getItemId: (org) => org.id,
|
||||||
|
getItemName: (org) => org.name,
|
||||||
|
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 3000,
|
||||||
|
checkpointInterval: 1, // Checkpoint after each organization
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
console.log(`Organization mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log(`Successfully mirrored organization: ${result.name}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (org, error, attempt) => {
|
||||||
|
console.log(`Retrying organization ${org.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log("All organization mirroring tasks completed");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
const responsePayload: MirrorOrgResponse = {
|
const responsePayload: MirrorOrgResponse = {
|
||||||
|
|||||||
109
src/pages/api/job/mirror-repo.test.ts
Normal file
109
src/pages/api/job/mirror-repo.test.ts
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
import { describe, test, expect, mock, beforeEach, afterEach } from "bun:test";
|
||||||
|
|
||||||
|
// Create a mock POST function
|
||||||
|
const mockPOST = mock(async ({ request }) => {
|
||||||
|
const body = await request.json();
|
||||||
|
|
||||||
|
// Check for missing userId or repositoryIds
|
||||||
|
if (!body.userId || !body.repositoryIds) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
error: "Missing userId or repositoryIds."
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Success case
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: true,
|
||||||
|
message: "Repository mirroring started",
|
||||||
|
batchId: "test-batch-id"
|
||||||
|
}),
|
||||||
|
{ status: 200 }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a mock module
|
||||||
|
const mockModule = {
|
||||||
|
POST: mockPOST
|
||||||
|
};
|
||||||
|
|
||||||
|
describe("Repository Mirroring API", () => {
|
||||||
|
// Mock console.log and console.error to prevent test output noise
|
||||||
|
let originalConsoleLog: typeof console.log;
|
||||||
|
let originalConsoleError: typeof console.error;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
originalConsoleLog = console.log;
|
||||||
|
originalConsoleError = console.error;
|
||||||
|
console.log = mock(() => {});
|
||||||
|
console.error = mock(() => {});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
console.log = originalConsoleLog;
|
||||||
|
console.error = originalConsoleError;
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if userId is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 400 if repositoryIds is missing", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId: "user-id"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(400);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.error).toBe("Missing userId or repositoryIds.");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("returns 200 and starts mirroring repositories", async () => {
|
||||||
|
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
userId: "user-id",
|
||||||
|
repositoryIds: ["repo-id-1", "repo-id-2"]
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await mockModule.POST({ request } as any);
|
||||||
|
|
||||||
|
expect(response.status).toBe(200);
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
expect(data.success).toBe(true);
|
||||||
|
expect(data.message).toBe("Repository mirroring started");
|
||||||
|
expect(data.batchId).toBe("test-batch-id");
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -8,6 +8,8 @@ import {
|
|||||||
mirrorGitHubOrgRepoToGiteaOrg,
|
mirrorGitHubOrgRepoToGiteaOrg,
|
||||||
} from "@/lib/gitea";
|
} from "@/lib/gitea";
|
||||||
import { createGitHubClient } from "@/lib/github";
|
import { createGitHubClient } from "@/lib/github";
|
||||||
|
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -63,52 +65,83 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start async mirroring in background
|
// Start async mirroring in background with parallel processing and resilience
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
for (const repo of repos) {
|
if (!config.githubConfig.token) {
|
||||||
if (!config.githubConfig.token) {
|
throw new Error("GitHub token is missing.");
|
||||||
throw new Error("GitHub token is missing.");
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const octokit = createGitHubClient(config.githubConfig.token);
|
// Create a single Octokit instance to be reused
|
||||||
|
const octokit = createGitHubClient(config.githubConfig.token);
|
||||||
|
|
||||||
try {
|
// Define the concurrency limit - adjust based on API rate limits
|
||||||
|
const CONCURRENCY_LIMIT = 3;
|
||||||
|
|
||||||
|
// Generate a batch ID to group related repositories
|
||||||
|
const batchId = uuidv4();
|
||||||
|
|
||||||
|
// Process repositories in parallel with resilience to container restarts
|
||||||
|
await processWithResilience(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repoStatusEnum.parse("imported"),
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||||
|
mirroredLocation: repo.mirroredLocation || "",
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log the start of mirroring
|
||||||
|
console.log(`Starting mirror for repository: ${repo.name}`);
|
||||||
|
|
||||||
|
// Mirror the repository based on whether it's in an organization
|
||||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||||
config,
|
config,
|
||||||
octokit,
|
octokit,
|
||||||
orgName: repo.organization,
|
orgName: repo.organization,
|
||||||
repository: {
|
repository: repoData,
|
||||||
...repo,
|
|
||||||
status: repoStatusEnum.parse("imported"),
|
|
||||||
organization: repo.organization ?? undefined,
|
|
||||||
lastMirrored: repo.lastMirrored ?? undefined,
|
|
||||||
errorMessage: repo.errorMessage ?? undefined,
|
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
|
||||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
|
||||||
mirroredLocation: repo.mirroredLocation || "",
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
await mirrorGithubRepoToGitea({
|
await mirrorGithubRepoToGitea({
|
||||||
octokit,
|
octokit,
|
||||||
repository: {
|
repository: repoData,
|
||||||
...repo,
|
|
||||||
status: repoStatusEnum.parse("imported"),
|
|
||||||
organization: repo.organization ?? undefined,
|
|
||||||
lastMirrored: repo.lastMirrored ?? undefined,
|
|
||||||
errorMessage: repo.errorMessage ?? undefined,
|
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
|
||||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
|
||||||
mirroredLocation: repo.mirroredLocation || "",
|
|
||||||
},
|
|
||||||
config,
|
config,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} catch (error) {
|
|
||||||
console.error(`Mirror failed for repo ${repo.name}:`, error);
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: config.userId || "",
|
||||||
|
jobType: "mirror",
|
||||||
|
batchId,
|
||||||
|
getItemId: (repo) => repo.id,
|
||||||
|
getItemName: (repo) => repo.name,
|
||||||
|
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
checkpointInterval: 1, // Checkpoint after each repository
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
console.log(`Mirroring progress: ${percentComplete}% (${completed}/${total})`);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log(`Successfully mirrored repository: ${result.name}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (repo, error, attempt) => {
|
||||||
|
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log("All repository mirroring tasks completed");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
const responsePayload: MirrorRepoResponse = {
|
const responsePayload: MirrorRepoResponse = {
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ import {
|
|||||||
import { createGitHubClient } from "@/lib/github";
|
import { createGitHubClient } from "@/lib/github";
|
||||||
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
||||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||||
|
import { processWithRetry } from "@/lib/utils/concurrency";
|
||||||
|
import { createMirrorJob } from "@/lib/helpers";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -65,10 +67,21 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start background retry
|
// Start background retry with parallel processing
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
for (const repo of repos) {
|
// Create a single Octokit instance to be reused if needed
|
||||||
try {
|
const octokit = config.githubConfig.token
|
||||||
|
? createGitHubClient(config.githubConfig.token)
|
||||||
|
: null;
|
||||||
|
|
||||||
|
// Define the concurrency limit - adjust based on API rate limits
|
||||||
|
const CONCURRENCY_LIMIT = 3;
|
||||||
|
|
||||||
|
// Process repositories in parallel with retry capability
|
||||||
|
await processWithRetry(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
const visibility = repositoryVisibilityEnum.parse(repo.visibility);
|
const visibility = repositoryVisibilityEnum.parse(repo.visibility);
|
||||||
const status = repoStatusEnum.parse(repo.status);
|
const status = repoStatusEnum.parse(repo.status);
|
||||||
const repoData = {
|
const repoData = {
|
||||||
@@ -81,6 +94,20 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Log the start of retry operation
|
||||||
|
console.log(`Starting retry for repository: ${repo.name}`);
|
||||||
|
|
||||||
|
// Create a mirror job entry to track progress
|
||||||
|
await createMirrorJob({
|
||||||
|
userId: config.userId || "",
|
||||||
|
repositoryId: repo.id,
|
||||||
|
repositoryName: repo.name,
|
||||||
|
message: `Started retry operation for repository: ${repo.name}`,
|
||||||
|
details: `Repository ${repo.name} is now in the retry queue.`,
|
||||||
|
status: "imported",
|
||||||
|
});
|
||||||
|
|
||||||
|
// Determine if the repository exists in Gitea
|
||||||
let owner = getGiteaRepoOwner({
|
let owner = getGiteaRepoOwner({
|
||||||
config,
|
config,
|
||||||
repository: repoData,
|
repository: repoData,
|
||||||
@@ -93,16 +120,21 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (present) {
|
if (present) {
|
||||||
|
// If the repository exists, sync it
|
||||||
await syncGiteaRepo({ config, repository: repoData });
|
await syncGiteaRepo({ config, repository: repoData });
|
||||||
console.log(`Synced existing repo: ${repo.name}`);
|
console.log(`Synced existing repo: ${repo.name}`);
|
||||||
} else {
|
} else {
|
||||||
|
// If the repository doesn't exist, mirror it
|
||||||
if (!config.githubConfig.token) {
|
if (!config.githubConfig.token) {
|
||||||
throw new Error("GitHub token is missing.");
|
throw new Error("GitHub token is missing.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!octokit) {
|
||||||
|
throw new Error("Octokit client is not initialized.");
|
||||||
|
}
|
||||||
|
|
||||||
console.log(`Importing repo: ${repo.name} ${owner}`);
|
console.log(`Importing repo: ${repo.name} ${owner}`);
|
||||||
|
|
||||||
const octokit = createGitHubClient(config.githubConfig.token);
|
|
||||||
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
if (repo.organization && config.githubConfig.preserveOrgStructure) {
|
||||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||||
config,
|
config,
|
||||||
@@ -124,10 +156,28 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
|
||||||
console.error(`Failed to retry repo ${repo.name}:`, err);
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
console.log(`Retry progress: ${percentComplete}% (${completed}/${total})`);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log(`Successfully processed repository: ${result.name}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (repo, error, attempt) => {
|
||||||
|
console.log(`Retrying repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log("All repository retry tasks completed");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
const responsePayload: RetryRepoResponse = {
|
const responsePayload: RetryRepoResponse = {
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ import { eq, inArray } from "drizzle-orm";
|
|||||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||||
import { syncGiteaRepo } from "@/lib/gitea";
|
import { syncGiteaRepo } from "@/lib/gitea";
|
||||||
import type { SyncRepoResponse } from "@/types/sync";
|
import type { SyncRepoResponse } from "@/types/sync";
|
||||||
|
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
export const POST: APIRoute = async ({ request }) => {
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
try {
|
try {
|
||||||
@@ -60,26 +62,65 @@ export const POST: APIRoute = async ({ request }) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start async mirroring in background
|
// Start async mirroring in background with parallel processing and resilience
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
for (const repo of repos) {
|
// Define the concurrency limit - adjust based on API rate limits
|
||||||
try {
|
const CONCURRENCY_LIMIT = 5;
|
||||||
|
|
||||||
|
// Generate a batch ID to group related repositories
|
||||||
|
const batchId = uuidv4();
|
||||||
|
|
||||||
|
// Process repositories in parallel with resilience to container restarts
|
||||||
|
await processWithResilience(
|
||||||
|
repos,
|
||||||
|
async (repo) => {
|
||||||
|
// Prepare repository data
|
||||||
|
const repoData = {
|
||||||
|
...repo,
|
||||||
|
status: repoStatusEnum.parse(repo.status),
|
||||||
|
organization: repo.organization ?? undefined,
|
||||||
|
lastMirrored: repo.lastMirrored ?? undefined,
|
||||||
|
errorMessage: repo.errorMessage ?? undefined,
|
||||||
|
forkedFrom: repo.forkedFrom ?? undefined,
|
||||||
|
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Log the start of syncing
|
||||||
|
console.log(`Starting sync for repository: ${repo.name}`);
|
||||||
|
|
||||||
|
// Sync the repository
|
||||||
await syncGiteaRepo({
|
await syncGiteaRepo({
|
||||||
config,
|
config,
|
||||||
repository: {
|
repository: repoData,
|
||||||
...repo,
|
|
||||||
status: repoStatusEnum.parse(repo.status),
|
|
||||||
organization: repo.organization ?? undefined,
|
|
||||||
lastMirrored: repo.lastMirrored ?? undefined,
|
|
||||||
errorMessage: repo.errorMessage ?? undefined,
|
|
||||||
forkedFrom: repo.forkedFrom ?? undefined,
|
|
||||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
} catch (error) {
|
|
||||||
console.error(`Sync failed for repo ${repo.name}:`, error);
|
return repo;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId: config.userId || "",
|
||||||
|
jobType: "sync",
|
||||||
|
batchId,
|
||||||
|
getItemId: (repo) => repo.id,
|
||||||
|
getItemName: (repo) => repo.name,
|
||||||
|
concurrencyLimit: CONCURRENCY_LIMIT,
|
||||||
|
maxRetries: 2,
|
||||||
|
retryDelay: 2000,
|
||||||
|
checkpointInterval: 1, // Checkpoint after each repository
|
||||||
|
onProgress: (completed, total, result) => {
|
||||||
|
const percentComplete = Math.round((completed / total) * 100);
|
||||||
|
console.log(`Syncing progress: ${percentComplete}% (${completed}/${total})`);
|
||||||
|
|
||||||
|
if (result) {
|
||||||
|
console.log(`Successfully synced repository: ${result.name}`);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onRetry: (repo, error, attempt) => {
|
||||||
|
console.log(`Retrying sync for repository ${repo.name} (attempt ${attempt}): ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
);
|
||||||
|
|
||||||
|
console.log("All repository syncing tasks completed");
|
||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
const responsePayload: SyncRepoResponse = {
|
const responsePayload: SyncRepoResponse = {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import type { APIRoute } from "astro";
|
import type { APIRoute } from "astro";
|
||||||
import { redisSubscriber } from "@/lib/redis";
|
import { getNewEvents } from "@/lib/events";
|
||||||
|
|
||||||
export const GET: APIRoute = async ({ request }) => {
|
export const GET: APIRoute = async ({ request }) => {
|
||||||
const url = new URL(request.url);
|
const url = new URL(request.url);
|
||||||
@@ -11,50 +11,89 @@ export const GET: APIRoute = async ({ request }) => {
|
|||||||
|
|
||||||
const channel = `mirror-status:${userId}`;
|
const channel = `mirror-status:${userId}`;
|
||||||
let isClosed = false;
|
let isClosed = false;
|
||||||
|
const POLL_INTERVAL = 5000; // Poll every 5 seconds (reduced from 2 seconds for low-traffic usage)
|
||||||
|
|
||||||
const stream = new ReadableStream({
|
const stream = new ReadableStream({
|
||||||
start(controller) {
|
start(controller) {
|
||||||
const encoder = new TextEncoder();
|
const encoder = new TextEncoder();
|
||||||
|
let lastEventTime: Date | undefined = undefined;
|
||||||
|
let pollIntervalId: ReturnType<typeof setInterval> | null = null;
|
||||||
|
|
||||||
const handleMessage = (ch: string, message: string) => {
|
// Function to send a message to the client
|
||||||
if (isClosed || ch !== channel) return;
|
const sendMessage = (message: string) => {
|
||||||
|
if (isClosed) return;
|
||||||
try {
|
try {
|
||||||
controller.enqueue(encoder.encode(`data: ${message}\n\n`));
|
controller.enqueue(encoder.encode(message));
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Stream enqueue error:", err);
|
console.error("Stream enqueue error:", err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
redisSubscriber.subscribe(channel, (err) => {
|
// Function to poll for new events
|
||||||
if (err) {
|
const pollForEvents = async () => {
|
||||||
isClosed = true;
|
if (isClosed) return;
|
||||||
controller.error(err);
|
|
||||||
|
try {
|
||||||
|
console.log(`Polling for events for user ${userId} in channel ${channel}`);
|
||||||
|
|
||||||
|
// Get new events from SQLite
|
||||||
|
const events = await getNewEvents({
|
||||||
|
userId,
|
||||||
|
channel,
|
||||||
|
lastEventTime,
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(`Found ${events.length} new events`);
|
||||||
|
|
||||||
|
// Send events to client
|
||||||
|
if (events.length > 0) {
|
||||||
|
// Update last event time
|
||||||
|
lastEventTime = events[events.length - 1].createdAt;
|
||||||
|
|
||||||
|
// Send each event to the client
|
||||||
|
for (const event of events) {
|
||||||
|
console.log(`Sending event: ${JSON.stringify(event.payload)}`);
|
||||||
|
sendMessage(`data: ${JSON.stringify(event.payload)}\n\n`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Error polling for events:", err);
|
||||||
|
sendMessage(`data: {"error": "Error polling for events"}\n\n`);
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
redisSubscriber.on("message", handleMessage);
|
// Send initial connection message
|
||||||
|
sendMessage(": connected\n\n");
|
||||||
|
|
||||||
try {
|
// Start polling for events
|
||||||
controller.enqueue(encoder.encode(": connected\n\n"));
|
pollForEvents();
|
||||||
} catch (err) {
|
|
||||||
console.error("Initial enqueue error:", err);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// Set up polling interval
|
||||||
|
pollIntervalId = setInterval(pollForEvents, POLL_INTERVAL);
|
||||||
|
|
||||||
|
// Send a heartbeat every 30 seconds to keep the connection alive
|
||||||
|
const heartbeatInterval = setInterval(() => {
|
||||||
|
if (!isClosed) {
|
||||||
|
sendMessage(": heartbeat\n\n");
|
||||||
|
} else {
|
||||||
|
clearInterval(heartbeatInterval);
|
||||||
|
}
|
||||||
|
}, 30000);
|
||||||
|
|
||||||
|
// Handle client disconnection
|
||||||
request.signal?.addEventListener("abort", () => {
|
request.signal?.addEventListener("abort", () => {
|
||||||
if (!isClosed) {
|
if (!isClosed) {
|
||||||
isClosed = true;
|
isClosed = true;
|
||||||
redisSubscriber.off("message", handleMessage);
|
if (pollIntervalId) {
|
||||||
redisSubscriber.unsubscribe(channel);
|
clearInterval(pollIntervalId);
|
||||||
|
}
|
||||||
controller.close();
|
controller.close();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
// extra safety in case cancel is triggered
|
// Extra safety in case cancel is triggered
|
||||||
if (!isClosed) {
|
isClosed = true;
|
||||||
isClosed = true;
|
|
||||||
redisSubscriber.unsubscribe(channel);
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
56
src/pages/api/test-event.ts
Normal file
56
src/pages/api/test-event.ts
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import type { APIRoute } from "astro";
|
||||||
|
import { publishEvent } from "@/lib/events";
|
||||||
|
import { v4 as uuidv4 } from "uuid";
|
||||||
|
|
||||||
|
export const POST: APIRoute = async ({ request }) => {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const { userId, message, status } = body;
|
||||||
|
|
||||||
|
if (!userId || !message || !status) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
error: "Missing required fields: userId, message, status",
|
||||||
|
}),
|
||||||
|
{ status: 400 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a test event
|
||||||
|
const eventData = {
|
||||||
|
id: uuidv4(),
|
||||||
|
userId,
|
||||||
|
repositoryId: uuidv4(),
|
||||||
|
repositoryName: "test-repo",
|
||||||
|
message,
|
||||||
|
status,
|
||||||
|
timestamp: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Publish the event
|
||||||
|
const channel = `mirror-status:${userId}`;
|
||||||
|
await publishEvent({
|
||||||
|
userId,
|
||||||
|
channel,
|
||||||
|
payload: eventData,
|
||||||
|
});
|
||||||
|
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
success: true,
|
||||||
|
message: "Event published successfully",
|
||||||
|
event: eventData,
|
||||||
|
}),
|
||||||
|
{ status: 200 }
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error publishing test event:", error);
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
error: "Failed to publish event",
|
||||||
|
details: error instanceof Error ? error.message : String(error),
|
||||||
|
}),
|
||||||
|
{ status: 500 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
20
src/tests/setup.bun.ts
Normal file
20
src/tests/setup.bun.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
/**
|
||||||
|
* Bun test setup file
|
||||||
|
* This file is automatically loaded before running tests
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { afterEach, beforeEach } from "bun:test";
|
||||||
|
|
||||||
|
// Clean up after each test
|
||||||
|
afterEach(() => {
|
||||||
|
// Add any cleanup logic here
|
||||||
|
});
|
||||||
|
|
||||||
|
// Setup before each test
|
||||||
|
beforeEach(() => {
|
||||||
|
// Add any setup logic here
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add DOM testing support if needed
|
||||||
|
// import { DOMParser } from "linkedom";
|
||||||
|
// global.DOMParser = DOMParser;
|
||||||
Reference in New Issue
Block a user