mirror of
https://github.com/RayLabsHQ/gitea-mirror.git
synced 2026-03-18 03:46:08 +03:00
Compare commits
71 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d0693206c3 | ||
|
|
b079070c30 | ||
|
|
e68e9c38a8 | ||
|
|
534150ecf9 | ||
|
|
98da7065e0 | ||
|
|
58e0194aa6 | ||
|
|
7864c46279 | ||
|
|
e3970e53e1 | ||
|
|
be46cfdffa | ||
|
|
2e00a610cb | ||
|
|
61841dd7a5 | ||
|
|
5aa0f3260d | ||
|
|
d0efa200d9 | ||
|
|
c26b5574e0 | ||
|
|
89a6372565 | ||
|
|
f40cad4713 | ||
|
|
855906d990 | ||
|
|
08da526ddd | ||
|
|
2395e14382 | ||
|
|
91c1703bb5 | ||
|
|
6a548e3dac | ||
|
|
f28ac8fa09 | ||
|
|
5e86670a5b | ||
|
|
62d43df2ad | ||
|
|
cb7510f79d | ||
|
|
08c6302bf6 | ||
|
|
6e6c3fa124 | ||
|
|
85b1867490 | ||
|
|
545a575e1a | ||
|
|
ef13fefb69 | ||
|
|
ed59849392 | ||
|
|
5eb160861d | ||
|
|
6829bcff91 | ||
|
|
b1ca8c46bf | ||
|
|
888089b2d5 | ||
|
|
fb60449dc2 | ||
|
|
25854b04f9 | ||
|
|
c34056555f | ||
|
|
f4074a37ad | ||
|
|
6146d41197 | ||
|
|
4cce5b7cfe | ||
|
|
bc89b17a4c | ||
|
|
d023b255a7 | ||
|
|
71cc961f5c | ||
|
|
9bc7bbe33f | ||
|
|
6cc03364fb | ||
|
|
d623d81a44 | ||
|
|
5cc4dcfb29 | ||
|
|
893fae27d3 | ||
|
|
29051f3503 | ||
|
|
0a3ad4e7f5 | ||
|
|
f4d391b240 | ||
|
|
8280c6b337 | ||
|
|
bebbda9465 | ||
|
|
2496d6f6e0 | ||
|
|
179083aec4 | ||
|
|
aa74984fb0 | ||
|
|
18ab4cd53a | ||
|
|
e94bb86b61 | ||
|
|
3993d679e6 | ||
|
|
83cae16319 | ||
|
|
99ebe1a400 | ||
|
|
204d803937 | ||
|
|
2a08ae0b21 | ||
|
|
8dc7ae8bfc | ||
|
|
a4dbb49006 | ||
|
|
6531a9325d | ||
|
|
9968775210 | ||
|
|
0d63fd4dae | ||
|
|
109958342d | ||
|
|
491546a97c |
@@ -15,6 +15,7 @@ dist
|
||||
build
|
||||
.next
|
||||
out
|
||||
www
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
|
||||
@@ -47,6 +47,7 @@ DOCKER_TAG=latest
|
||||
# SKIP_FORKS=false
|
||||
# MIRROR_STARRED=false
|
||||
# STARRED_REPOS_ORG=starred # Organization name for starred repos
|
||||
# STARRED_REPOS_MODE=dedicated-org # dedicated-org | preserve-owner
|
||||
|
||||
# Organization Settings
|
||||
# MIRROR_ORGANIZATIONS=false
|
||||
@@ -66,6 +67,7 @@ DOCKER_TAG=latest
|
||||
|
||||
# Basic Gitea Settings
|
||||
# GITEA_URL=http://gitea:3000
|
||||
# GITEA_EXTERNAL_URL=https://gitea.example.com # Optional: used only for UI links
|
||||
# GITEA_TOKEN=your-local-gitea-token
|
||||
# GITEA_USERNAME=your-local-gitea-username
|
||||
# GITEA_ORGANIZATION=github-mirrors # Default organization for single-org strategy
|
||||
@@ -183,4 +185,4 @@ DOCKER_TAG=latest
|
||||
# ===========================================
|
||||
|
||||
# TLS/SSL Configuration
|
||||
# GITEA_SKIP_TLS_VERIFY=false # WARNING: Only use for testing
|
||||
# GITEA_SKIP_TLS_VERIFY=false # WARNING: Only use for testing
|
||||
|
||||
BIN
.github/screenshots/backup-strategy-ui.png
vendored
Normal file
BIN
.github/screenshots/backup-strategy-ui.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 34 KiB |
6
.github/workflows/README.md
vendored
6
.github/workflows/README.md
vendored
@@ -30,15 +30,17 @@ This workflow runs on all branches and pull requests. It:
|
||||
|
||||
### Docker Build and Push (`docker-build.yml`)
|
||||
|
||||
This workflow builds and pushes Docker images to GitHub Container Registry (ghcr.io), but only when changes are merged to the main branch.
|
||||
This workflow builds Docker images on pushes and pull requests, and pushes to GitHub Container Registry (ghcr.io) when permissions allow (main/tags and same-repo PRs).
|
||||
|
||||
**When it runs:**
|
||||
- On push to the main branch
|
||||
- On tag creation (v*)
|
||||
- On pull requests (build + scan; push only for same-repo PRs)
|
||||
|
||||
**Key features:**
|
||||
- Builds multi-architecture images (amd64 and arm64)
|
||||
- Pushes images only on main branch, not for PRs
|
||||
- Pushes images for main/tags and same-repo PRs
|
||||
- Skips registry push for fork PRs (avoids package write permission failures)
|
||||
- Uses build caching to speed up builds
|
||||
- Creates multiple tags for each image (latest, semver, sha)
|
||||
|
||||
|
||||
7
.github/workflows/astro-build-test.yml
vendored
7
.github/workflows/astro-build-test.yml
vendored
@@ -6,11 +6,15 @@ on:
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
- 'docs/**'
|
||||
- 'www/**'
|
||||
- 'helm/**'
|
||||
pull_request:
|
||||
branches: [ '*' ]
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
- 'docs/**'
|
||||
- 'www/**'
|
||||
- 'helm/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -20,6 +24,7 @@ jobs:
|
||||
build-and-test:
|
||||
name: Build and Test Astro Project
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -28,7 +33,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: '1.2.16'
|
||||
bun-version: '1.3.6'
|
||||
|
||||
- name: Check lockfile and install dependencies
|
||||
run: |
|
||||
|
||||
7
.github/workflows/docker-build.yml
vendored
7
.github/workflows/docker-build.yml
vendored
@@ -36,6 +36,7 @@ env:
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -55,6 +56,7 @@ jobs:
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Log into registry
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
@@ -105,7 +107,7 @@ jobs:
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
push: ${{ github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
@@ -128,13 +130,14 @@ jobs:
|
||||
|
||||
# Wait for image to be available in registry
|
||||
- name: Wait for image availability
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||
run: |
|
||||
echo "Waiting for image to be available in registry..."
|
||||
sleep 5
|
||||
|
||||
# Add comment to PR with image details
|
||||
- name: Comment PR with image tag
|
||||
if: github.event_name == 'pull_request'
|
||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
285
.github/workflows/e2e-tests.yml
vendored
Normal file
285
.github/workflows/e2e-tests.yml
vendored
Normal file
@@ -0,0 +1,285 @@
|
||||
name: E2E Integration Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*"]
|
||||
paths-ignore:
|
||||
- "README.md"
|
||||
- "docs/**"
|
||||
- "CHANGELOG.md"
|
||||
- "LICENSE"
|
||||
- "www/**"
|
||||
- "helm/**"
|
||||
pull_request:
|
||||
branches: ["*"]
|
||||
paths-ignore:
|
||||
- "README.md"
|
||||
- "docs/**"
|
||||
- "CHANGELOG.md"
|
||||
- "LICENSE"
|
||||
- "www/**"
|
||||
- "helm/**"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
debug_enabled:
|
||||
description: "Enable debug logging"
|
||||
required: false
|
||||
default: "false"
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
concurrency:
|
||||
group: e2e-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
GITEA_PORT: 3333
|
||||
FAKE_GITHUB_PORT: 4580
|
||||
GIT_SERVER_PORT: 4590
|
||||
APP_PORT: 4321
|
||||
BUN_VERSION: "1.3.6"
|
||||
|
||||
jobs:
|
||||
e2e-tests:
|
||||
name: E2E Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
bun install
|
||||
echo "✓ Dependencies installed"
|
||||
|
||||
- name: Install Playwright
|
||||
run: |
|
||||
npx playwright install chromium
|
||||
npx playwright install-deps chromium
|
||||
echo "✓ Playwright ready"
|
||||
|
||||
- name: Create test git repositories
|
||||
run: |
|
||||
echo "Creating bare git repos for E2E testing..."
|
||||
bun run tests/e2e/create-test-repos.ts --output-dir tests/e2e/git-repos
|
||||
|
||||
if [ ! -f tests/e2e/git-repos/manifest.json ]; then
|
||||
echo "ERROR: Test git repos were not created (manifest.json missing)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✓ Test repos created:"
|
||||
cat tests/e2e/git-repos/manifest.json | jq -r '.repos[] | " • \(.owner)/\(.name) — \(.description)"'
|
||||
|
||||
- name: Start Gitea and git-server containers
|
||||
run: |
|
||||
echo "Starting containers via docker compose..."
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml up -d
|
||||
|
||||
# Wait for git-server
|
||||
echo "Waiting for git HTTP server..."
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf http://localhost:${{ env.GIT_SERVER_PORT }}/manifest.json > /dev/null 2>&1; then
|
||||
echo "✓ Git HTTP server is ready"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 30 ]; then
|
||||
echo "ERROR: Git HTTP server did not start"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml logs git-server
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Wait for Gitea
|
||||
echo "Waiting for Gitea to be ready..."
|
||||
for i in $(seq 1 60); do
|
||||
if curl -sf http://localhost:${{ env.GITEA_PORT }}/api/v1/version > /dev/null 2>&1; then
|
||||
version=$(curl -sf http://localhost:${{ env.GITEA_PORT }}/api/v1/version | jq -r '.version // "unknown"')
|
||||
echo "✓ Gitea is ready (version: $version)"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 60 ]; then
|
||||
echo "ERROR: Gitea did not become healthy within 120s"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml logs gitea-e2e --tail=30
|
||||
exit 1
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Initialize database
|
||||
run: |
|
||||
bun run manage-db init
|
||||
echo "✓ Database initialized"
|
||||
|
||||
- name: Build application
|
||||
env:
|
||||
GH_API_URL: http://localhost:4580
|
||||
BETTER_AUTH_SECRET: e2e-test-secret
|
||||
run: |
|
||||
bun run build
|
||||
echo "✓ Build complete"
|
||||
|
||||
- name: Start fake GitHub API server
|
||||
run: |
|
||||
# Start with GIT_SERVER_URL pointing to the git-server container name
|
||||
# (Gitea will resolve it via Docker networking)
|
||||
PORT=${{ env.FAKE_GITHUB_PORT }} GIT_SERVER_URL="http://git-server" \
|
||||
npx tsx tests/e2e/fake-github-server.ts &
|
||||
echo $! > /tmp/fake-github.pid
|
||||
|
||||
echo "Waiting for fake GitHub API..."
|
||||
for i in $(seq 1 30); do
|
||||
if curl -sf http://localhost:${{ env.FAKE_GITHUB_PORT }}/___mgmt/health > /dev/null 2>&1; then
|
||||
echo "✓ Fake GitHub API is ready"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 30 ]; then
|
||||
echo "ERROR: Fake GitHub API did not start"
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
# Ensure clone URLs are set for the git-server container
|
||||
curl -sf -X POST http://localhost:${{ env.FAKE_GITHUB_PORT }}/___mgmt/set-clone-url \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"url": "http://git-server"}' || true
|
||||
echo "✓ Clone URLs configured for git-server container"
|
||||
|
||||
- name: Start gitea-mirror application
|
||||
env:
|
||||
GH_API_URL: http://localhost:4580
|
||||
BETTER_AUTH_SECRET: e2e-test-secret
|
||||
BETTER_AUTH_URL: http://localhost:4321
|
||||
DATABASE_URL: file:data/gitea-mirror.db
|
||||
HOST: 0.0.0.0
|
||||
PORT: ${{ env.APP_PORT }}
|
||||
NODE_ENV: production
|
||||
PRE_SYNC_BACKUP_ENABLED: "false"
|
||||
ENCRYPTION_SECRET: "e2e-encryption-secret-32char!!"
|
||||
run: |
|
||||
# Re-init DB in case build step cleared it
|
||||
bun run manage-db init 2>/dev/null || true
|
||||
|
||||
bun run start &
|
||||
echo $! > /tmp/app.pid
|
||||
|
||||
echo "Waiting for gitea-mirror app..."
|
||||
for i in $(seq 1 90); do
|
||||
if curl -sf http://localhost:${{ env.APP_PORT }}/api/health > /dev/null 2>&1 || \
|
||||
curl -sf -o /dev/null -w "%{http_code}" http://localhost:${{ env.APP_PORT }}/ 2>/dev/null | grep -q "^[23]"; then
|
||||
echo "✓ gitea-mirror app is ready"
|
||||
break
|
||||
fi
|
||||
if ! kill -0 $(cat /tmp/app.pid) 2>/dev/null; then
|
||||
echo "ERROR: App process died"
|
||||
exit 1
|
||||
fi
|
||||
if [ $i -eq 90 ]; then
|
||||
echo "ERROR: gitea-mirror app did not start within 180s"
|
||||
exit 1
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
|
||||
- name: Run E2E tests
|
||||
env:
|
||||
APP_URL: http://localhost:${{ env.APP_PORT }}
|
||||
GITEA_URL: http://localhost:${{ env.GITEA_PORT }}
|
||||
FAKE_GITHUB_URL: http://localhost:${{ env.FAKE_GITHUB_PORT }}
|
||||
GIT_SERVER_URL: http://localhost:${{ env.GIT_SERVER_PORT }}
|
||||
CI: true
|
||||
run: |
|
||||
mkdir -p tests/e2e/test-results
|
||||
npx playwright test \
|
||||
--config tests/e2e/playwright.config.ts \
|
||||
--reporter=github,html
|
||||
|
||||
- name: Diagnostic info on failure
|
||||
if: failure()
|
||||
run: |
|
||||
echo "═══════════════════════════════════════════════════════════"
|
||||
echo " Diagnostic Information"
|
||||
echo "═══════════════════════════════════════════════════════════"
|
||||
|
||||
echo ""
|
||||
echo "── Git server status ──"
|
||||
curl -sf http://localhost:${{ env.GIT_SERVER_PORT }}/manifest.json 2>/dev/null | jq . || echo "(unreachable)"
|
||||
|
||||
echo ""
|
||||
echo "── Gitea status ──"
|
||||
curl -sf http://localhost:${{ env.GITEA_PORT }}/api/v1/version 2>/dev/null || echo "(unreachable)"
|
||||
|
||||
echo ""
|
||||
echo "── Fake GitHub status ──"
|
||||
curl -sf http://localhost:${{ env.FAKE_GITHUB_PORT }}/___mgmt/health 2>/dev/null | jq . || echo "(unreachable)"
|
||||
|
||||
echo ""
|
||||
echo "── App status ──"
|
||||
curl -sf http://localhost:${{ env.APP_PORT }}/api/health 2>/dev/null || echo "(unreachable)"
|
||||
|
||||
echo ""
|
||||
echo "── Docker containers ──"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml ps 2>/dev/null || true
|
||||
|
||||
echo ""
|
||||
echo "── Gitea container logs (last 50 lines) ──"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml logs gitea-e2e --tail=50 2>/dev/null || echo "(no container)"
|
||||
|
||||
echo ""
|
||||
echo "── Git server logs (last 20 lines) ──"
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml logs git-server --tail=20 2>/dev/null || echo "(no container)"
|
||||
|
||||
echo ""
|
||||
echo "── Running processes ──"
|
||||
ps aux | grep -E "(fake-github|astro|bun|node)" | grep -v grep || true
|
||||
|
||||
- name: Upload Playwright report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-playwright-report
|
||||
path: tests/e2e/playwright-report/
|
||||
retention-days: 14
|
||||
|
||||
- name: Upload test results
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: e2e-test-results
|
||||
path: tests/e2e/test-results/
|
||||
retention-days: 14
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
# Stop background processes
|
||||
if [ -f /tmp/fake-github.pid ]; then
|
||||
kill $(cat /tmp/fake-github.pid) 2>/dev/null || true
|
||||
rm -f /tmp/fake-github.pid
|
||||
fi
|
||||
if [ -f /tmp/app.pid ]; then
|
||||
kill $(cat /tmp/app.pid) 2>/dev/null || true
|
||||
rm -f /tmp/app.pid
|
||||
fi
|
||||
|
||||
# Stop containers
|
||||
docker compose -f tests/e2e/docker-compose.e2e.yml down --volumes --remove-orphans 2>/dev/null || true
|
||||
|
||||
echo "✓ Cleanup complete"
|
||||
2
.github/workflows/helm-test.yml
vendored
2
.github/workflows/helm-test.yml
vendored
@@ -21,6 +21,7 @@ jobs:
|
||||
yamllint:
|
||||
name: Lint YAML
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
@@ -35,6 +36,7 @@ jobs:
|
||||
helm-template:
|
||||
name: Helm lint & template
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Helm
|
||||
|
||||
46
.github/workflows/nix-build.yml
vendored
Normal file
46
.github/workflows/nix-build.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Nix Flake Check
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, nix]
|
||||
tags:
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
- 'docs/**'
|
||||
- 'www/**'
|
||||
- 'helm/**'
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
- 'docs/**'
|
||||
- 'www/**'
|
||||
- 'helm/**'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Nix
|
||||
uses: DeterminateSystems/nix-installer-action@main
|
||||
|
||||
- name: Setup Nix Cache
|
||||
uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
|
||||
- name: Check flake
|
||||
run: nix flake check
|
||||
|
||||
- name: Show flake info
|
||||
run: nix flake show
|
||||
|
||||
- name: Build package
|
||||
if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')
|
||||
run: nix build --print-build-logs
|
||||
20
.gitignore
vendored
20
.gitignore
vendored
@@ -32,3 +32,23 @@ certs/*.pem
|
||||
certs/*.cer
|
||||
!certs/README.md
|
||||
|
||||
# Nix build artifacts
|
||||
result
|
||||
result-*
|
||||
.direnv/
|
||||
|
||||
# E2E test artifacts
|
||||
tests/e2e/test-results/
|
||||
tests/e2e/playwright-report/
|
||||
tests/e2e/.auth/
|
||||
tests/e2e/e2e-storage-state.json
|
||||
tests/e2e/.fake-github.pid
|
||||
tests/e2e/.app.pid
|
||||
tests/e2e/git-repos/
|
||||
|
||||
# Playwright
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
/playwright/.auth/
|
||||
|
||||
32
Dockerfile
32
Dockerfile
@@ -1,17 +1,17 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
|
||||
FROM oven/bun:1.3.1-alpine AS base
|
||||
FROM oven/bun:1.3.9-debian AS base
|
||||
WORKDIR /app
|
||||
RUN apk add --no-cache libc6-compat python3 make g++ gcc wget sqlite openssl ca-certificates
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3 make g++ gcc wget sqlite3 openssl ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# ----------------------------
|
||||
FROM base AS deps
|
||||
FROM base AS builder
|
||||
COPY package.json ./
|
||||
COPY bun.lock* ./
|
||||
RUN bun install --frozen-lockfile
|
||||
|
||||
# ----------------------------
|
||||
FROM deps AS builder
|
||||
COPY . .
|
||||
RUN bun run build
|
||||
RUN mkdir -p dist/scripts && \
|
||||
@@ -20,17 +20,22 @@ RUN mkdir -p dist/scripts && \
|
||||
done
|
||||
|
||||
# ----------------------------
|
||||
FROM deps AS pruner
|
||||
RUN bun install --production --frozen-lockfile
|
||||
FROM base AS pruner
|
||||
COPY package.json ./
|
||||
COPY bun.lock* ./
|
||||
RUN bun install --production --omit=peer --frozen-lockfile
|
||||
|
||||
# ----------------------------
|
||||
FROM base AS runner
|
||||
FROM oven/bun:1.3.9-debian AS runner
|
||||
WORKDIR /app
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
git git-lfs wget sqlite3 openssl ca-certificates \
|
||||
&& git lfs install \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
COPY --from=pruner /app/node_modules ./node_modules
|
||||
COPY --from=builder /app/dist ./dist
|
||||
COPY --from=builder /app/package.json ./package.json
|
||||
COPY --from=builder /app/docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
COPY --from=builder /app/scripts ./scripts
|
||||
COPY --from=builder /app/drizzle ./drizzle
|
||||
|
||||
ENV NODE_ENV=production
|
||||
@@ -42,10 +47,11 @@ ENV DATABASE_URL=file:data/gitea-mirror.db
|
||||
RUN mkdir -p /app/certs && \
|
||||
chmod +x ./docker-entrypoint.sh && \
|
||||
mkdir -p /app/data && \
|
||||
addgroup --system --gid 1001 nodejs && \
|
||||
adduser --system --uid 1001 gitea-mirror && \
|
||||
groupadd --system --gid 1001 nodejs && \
|
||||
useradd --system --uid 1001 --gid 1001 --create-home --home-dir /home/gitea-mirror gitea-mirror && \
|
||||
chown -R gitea-mirror:nodejs /app/data && \
|
||||
chown -R gitea-mirror:nodejs /app/certs
|
||||
chown -R gitea-mirror:nodejs /app/certs && \
|
||||
chown -R gitea-mirror:nodejs /home/gitea-mirror
|
||||
|
||||
USER gitea-mirror
|
||||
|
||||
@@ -55,4 +61,4 @@ EXPOSE 4321
|
||||
HEALTHCHECK --interval=30s --timeout=5s --start-period=5s --retries=3 \
|
||||
CMD wget --no-verbose --tries=1 --spider http://localhost:4321/api/health || exit 1
|
||||
|
||||
ENTRYPOINT ["./docker-entrypoint.sh"]
|
||||
ENTRYPOINT ["./docker-entrypoint.sh"]
|
||||
|
||||
189
NIX.md
Normal file
189
NIX.md
Normal file
@@ -0,0 +1,189 @@
|
||||
# Nix Deployment Quick Reference
|
||||
|
||||
## TL;DR
|
||||
|
||||
```bash
|
||||
# From GitHub (no clone needed!)
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Or from local clone
|
||||
nix run --extra-experimental-features 'nix-command flakes' .#gitea-mirror
|
||||
```
|
||||
|
||||
Secrets auto-generate, database auto-initializes, and the web UI starts at http://localhost:4321.
|
||||
|
||||
**Note:** If you have flakes enabled in your nix config, you can omit `--extra-experimental-features 'nix-command flakes'`
|
||||
|
||||
---
|
||||
|
||||
## Installation Options
|
||||
|
||||
### 1. Run Without Installing (from GitHub)
|
||||
```bash
|
||||
# Latest version from main branch
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Pin to specific version
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
### 2. Install to Profile
|
||||
```bash
|
||||
# Install from GitHub
|
||||
nix profile install --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Run the installed binary
|
||||
gitea-mirror
|
||||
```
|
||||
|
||||
### 3. Use Local Clone
|
||||
```bash
|
||||
# Clone and run
|
||||
git clone https://github.com/RayLabsHQ/gitea-mirror.git
|
||||
cd gitea-mirror
|
||||
nix run --extra-experimental-features 'nix-command flakes' .#gitea-mirror
|
||||
```
|
||||
|
||||
### 4. NixOS System Service
|
||||
```nix
|
||||
# configuration.nix
|
||||
{
|
||||
inputs.gitea-mirror.url = "github:RayLabsHQ/gitea-mirror";
|
||||
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
betterAuthUrl = "https://mirror.example.com"; # For production
|
||||
openFirewall = true;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Development (Local Clone)
|
||||
```bash
|
||||
nix develop --extra-experimental-features 'nix-command flakes'
|
||||
# or
|
||||
direnv allow # Handles experimental features automatically
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Enable Flakes Permanently (Recommended)
|
||||
|
||||
To avoid typing `--extra-experimental-features` every time, add to `~/.config/nix/nix.conf`:
|
||||
```
|
||||
experimental-features = nix-command flakes
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## What Gets Auto-Generated?
|
||||
|
||||
On first run, the wrapper automatically:
|
||||
|
||||
1. Creates `~/.local/share/gitea-mirror/` (or `$DATA_DIR`)
|
||||
2. Generates `BETTER_AUTH_SECRET` → `.better_auth_secret`
|
||||
3. Generates `ENCRYPTION_SECRET` → `.encryption_secret`
|
||||
4. Initializes SQLite database
|
||||
5. Runs startup recovery and repair scripts
|
||||
6. Starts the application
|
||||
|
||||
---
|
||||
|
||||
## Key Commands
|
||||
|
||||
```bash
|
||||
# Database management
|
||||
gitea-mirror-db init # Initialize database
|
||||
gitea-mirror-db check # Health check
|
||||
gitea-mirror-db fix # Fix issues
|
||||
|
||||
# Development (add --extra-experimental-features 'nix-command flakes' if needed)
|
||||
nix develop # Enter dev shell
|
||||
nix build # Build package
|
||||
nix flake check # Validate flake
|
||||
nix flake update # Update dependencies
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Environment Variables
|
||||
|
||||
All vars from `docker-compose.alt.yml` are supported:
|
||||
|
||||
```bash
|
||||
DATA_DIR="$HOME/.local/share/gitea-mirror"
|
||||
PORT=4321
|
||||
HOST="0.0.0.0"
|
||||
BETTER_AUTH_URL="http://localhost:4321"
|
||||
|
||||
# Secrets (auto-generated if not set)
|
||||
BETTER_AUTH_SECRET=auto-generated
|
||||
ENCRYPTION_SECRET=auto-generated
|
||||
|
||||
# Concurrency (for perfect ordering, set both to 1)
|
||||
MIRROR_ISSUE_CONCURRENCY=3
|
||||
MIRROR_PULL_REQUEST_CONCURRENCY=5
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## NixOS Module Options
|
||||
|
||||
```nix
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
package = ...; # Override package
|
||||
dataDir = "/var/lib/gitea-mirror"; # Data location
|
||||
user = "gitea-mirror"; # Service user
|
||||
group = "gitea-mirror"; # Service group
|
||||
host = "0.0.0.0"; # Bind address
|
||||
port = 4321; # Listen port
|
||||
betterAuthUrl = "http://..."; # External URL
|
||||
betterAuthTrustedOrigins = "..."; # CORS origins
|
||||
mirrorIssueConcurrency = 3; # Concurrency
|
||||
mirrorPullRequestConcurrency = 5; # Concurrency
|
||||
environmentFile = null; # Optional secrets file
|
||||
openFirewall = true; # Open firewall
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Comparison: Docker vs Nix
|
||||
|
||||
| Feature | Docker | Nix |
|
||||
|---------|--------|-----|
|
||||
| **Config Required** | BETTER_AUTH_SECRET | None (auto-generated) |
|
||||
| **Startup** | `docker-compose up` | `nix run .#gitea-mirror` |
|
||||
| **Service** | Docker daemon | systemd (NixOS) |
|
||||
| **Updates** | `docker pull` | `nix flake update` |
|
||||
| **Reproducible** | Image-based | Hash-based |
|
||||
|
||||
---
|
||||
|
||||
## Full Documentation
|
||||
|
||||
- **[docs/NIX_DEPLOYMENT.md](docs/NIX_DEPLOYMENT.md)** - Complete deployment guide
|
||||
- NixOS module configuration
|
||||
- Home Manager integration
|
||||
- Production deployment examples
|
||||
- Migration from Docker
|
||||
- Troubleshooting guide
|
||||
|
||||
- **[docs/NIX_DISTRIBUTION.md](docs/NIX_DISTRIBUTION.md)** - Distribution guide for maintainers
|
||||
- How users consume the package
|
||||
- CI build caching
|
||||
- Releasing new versions
|
||||
- Submitting to nixpkgs
|
||||
|
||||
---
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Zero-config deployment** - Runs immediately without setup
|
||||
- **Auto-secret generation** - Secure secrets created and persisted
|
||||
- **Startup recovery** - Handles interrupted jobs automatically
|
||||
- **Graceful shutdown** - Proper signal handling
|
||||
- **Health checks** - Built-in monitoring support
|
||||
- **Security hardening** - NixOS module includes systemd protections
|
||||
- **Docker parity** - Same behavior as `docker-compose.alt.yml`
|
||||
92
README.md
92
README.md
@@ -40,6 +40,7 @@ First user signup becomes admin. Configure GitHub and Gitea through the web inte
|
||||
- 🔄 **Auto-discovery** - Automatically import new GitHub repositories (v3.4.0+)
|
||||
- 🧹 **Repository cleanup** - Auto-remove repos deleted from GitHub (v3.4.0+)
|
||||
- 🎯 **Proper mirror intervals** - Respects configured sync intervals (v3.4.0+)
|
||||
- 🛡️ **[Force-push protection](docs/FORCE_PUSH_PROTECTION.md)** - Smart detection with backup-on-demand or block-and-approve modes (Beta)
|
||||
- 🗑️ Automatic database cleanup with configurable retention
|
||||
- 🐳 Dockerized with multi-arch support (AMD64/ARM64)
|
||||
|
||||
@@ -112,7 +113,7 @@ docker compose up -d
|
||||
#### Using Pre-built Image Directly
|
||||
|
||||
```bash
|
||||
docker pull ghcr.io/raylabshq/gitea-mirror:v3.1.1
|
||||
docker pull ghcr.io/raylabshq/gitea-mirror:latest
|
||||
```
|
||||
|
||||
### Configuration Options
|
||||
@@ -150,6 +151,38 @@ bash -c "$(curl -fsSL https://raw.githubusercontent.com/community-scripts/Proxmo
|
||||
|
||||
See the [Proxmox VE Community Scripts](https://community-scripts.github.io/ProxmoxVE/scripts?id=gitea-mirror) for more details.
|
||||
|
||||
### Nix/NixOS
|
||||
|
||||
Zero-configuration deployment with Nix:
|
||||
|
||||
```bash
|
||||
# Run immediately - no setup needed!
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Or build and run locally
|
||||
nix build --extra-experimental-features 'nix-command flakes'
|
||||
./result/bin/gitea-mirror
|
||||
|
||||
# Or install to profile
|
||||
nix profile install --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
gitea-mirror
|
||||
```
|
||||
|
||||
**NixOS users** - add to your configuration:
|
||||
```nix
|
||||
{
|
||||
inputs.gitea-mirror.url = "github:RayLabsHQ/gitea-mirror";
|
||||
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
betterAuthUrl = "https://mirror.example.com";
|
||||
openFirewall = true;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
Secrets auto-generate, database auto-initializes. See [NIX.md](NIX.md) for quick reference or [docs/NIX_DEPLOYMENT.md](docs/NIX_DEPLOYMENT.md) for full documentation.
|
||||
|
||||
### Manual Installation
|
||||
|
||||
```bash
|
||||
@@ -177,7 +210,7 @@ bun run dev
|
||||
3. **Customization**
|
||||
- Click edit buttons on organization cards to set custom destinations
|
||||
- Override individual repository destinations in the table view
|
||||
- Starred repositories automatically go to a dedicated organization
|
||||
- Starred repositories can go to a dedicated org or preserve source owner/org paths
|
||||
|
||||
## Advanced Features
|
||||
|
||||
@@ -250,6 +283,8 @@ CLEANUP_DRY_RUN=false # Set to true to test without changes
|
||||
**Important Notes**:
|
||||
- **Auto-Start**: When `SCHEDULE_ENABLED=true` or `GITEA_MIRROR_INTERVAL` is set, the service automatically imports all GitHub repositories and mirrors them on startup. No manual "Import" or "Mirror" button clicks required!
|
||||
- The scheduler checks every minute for tasks to run. The `GITEA_MIRROR_INTERVAL` determines how often each repository is actually synced. For example, with `8h`, each repo syncs every 8 hours from its last successful sync.
|
||||
- **Large repo bootstrap**: For first-time mirroring of large repositories (especially with metadata/LFS), avoid very short intervals (for example `5m`). Start with a longer interval (`1h` to `8h`) or temporarily disable scheduling during the initial import/mirror run, then enable your regular interval after the first pass completes.
|
||||
- **Why this matters**: If your Gitea instance takes a long time to complete migrations/imports, aggressive schedules can cause repeated retries and duplicate-looking mirror attempts.
|
||||
|
||||
**🛡️ Backup Protection Features**:
|
||||
- **No Accidental Deletions**: Repository cleanup is automatically skipped if GitHub is inaccessible (account deleted, banned, or API errors)
|
||||
@@ -267,6 +302,40 @@ CLEANUP_DRY_RUN=false # Set to true to test without changes
|
||||
|
||||
If using a reverse proxy (e.g., nginx proxy manager) and experiencing issues with JavaScript files not loading properly, try enabling HTTP/2 support in your proxy configuration. While not required by the application, some proxy configurations may have better compatibility with HTTP/2 enabled. See [issue #43](https://github.com/RayLabsHQ/gitea-mirror/issues/43) for reference.
|
||||
|
||||
### Mirror Token Rotation (GitHub Token Changed)
|
||||
|
||||
For existing pull-mirror repositories, changing the GitHub token in Gitea Mirror does not always update stored mirror credentials in Gitea/Forgejo for already-created repositories.
|
||||
|
||||
If sync logs show authentication failures (for example `terminal prompts disabled`), do one of the following:
|
||||
|
||||
1. In Gitea/Forgejo, open repository **Settings → Mirror Settings** and update the mirror authorization password/token.
|
||||
2. Or delete and re-mirror the repository from Gitea Mirror so it is recreated with current credentials.
|
||||
|
||||
### Re-sync Metadata After Changing Mirror Options
|
||||
|
||||
If you enable metadata options (issues/PRs/labels/milestones/releases) after repositories were already mirrored:
|
||||
|
||||
1. Go to **Repositories**, select the repositories, and click **Sync** to run a fresh sync pass.
|
||||
2. For a full metadata refresh, use **Re-run Metadata** on selected repositories. This clears metadata sync state for those repos and immediately starts Sync.
|
||||
3. If some repositories still miss metadata, reset metadata sync state in SQLite and sync again:
|
||||
|
||||
```bash
|
||||
sqlite3 data/gitea-mirror.db "UPDATE repositories SET metadata = NULL;"
|
||||
```
|
||||
|
||||
This clears per-repository metadata completion flags so the next sync can re-run metadata import steps.
|
||||
|
||||
### Mirror Interval vs Gitea/Forgejo `MIN_INTERVAL`
|
||||
|
||||
Gitea Mirror treats the interval configured in **Configuration** (or `GITEA_MIRROR_INTERVAL`) as the source of truth and applies it to mirrored repositories during sync.
|
||||
|
||||
If your Gitea/Forgejo server has `mirror.MIN_INTERVAL` set to a higher value (for example `24h`) and Gitea Mirror is set lower (for example `8h`), sync/mirror operations can fail when updating mirror settings.
|
||||
|
||||
To avoid this:
|
||||
|
||||
1. Set Gitea Mirror interval to a value greater than or equal to your server `MIN_INTERVAL`.
|
||||
2. Do not rely on manual per-repository mirror interval edits in Gitea/Forgejo, because Gitea Mirror will overwrite them on sync.
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
@@ -303,6 +372,20 @@ bun run build
|
||||
- Never stored in plaintext
|
||||
- Secure cookie-based session management
|
||||
|
||||
### Admin Password Recovery (CLI)
|
||||
If email delivery is not configured, an admin with server access can reset a user password from the command line:
|
||||
|
||||
```bash
|
||||
bun run reset-password -- --email=user@example.com --new-password='new-secure-password'
|
||||
```
|
||||
|
||||
What this does:
|
||||
- Updates the credential password hash for the matching user
|
||||
- Creates a credential account if one does not already exist
|
||||
- Invalidates all active sessions for that user (forces re-login)
|
||||
|
||||
Use this only from trusted server/admin environments.
|
||||
|
||||
## Authentication
|
||||
|
||||
Gitea Mirror supports multiple authentication methods. **Email/password authentication is the default and always enabled.**
|
||||
@@ -401,7 +484,7 @@ Contributions are welcome! Please read our [Contributing Guidelines](CONTRIBUTIN
|
||||
|
||||
## License
|
||||
|
||||
GNU General Public License v3.0 - see [LICENSE](LICENSE) file for details.
|
||||
GNU Affero General Public License v3.0 (AGPL-3.0) - see [LICENSE](LICENSE) file for details.
|
||||
|
||||
## Star History
|
||||
|
||||
@@ -416,7 +499,8 @@ GNU General Public License v3.0 - see [LICENSE](LICENSE) file for details.
|
||||
## Support
|
||||
|
||||
- 📖 [Documentation](https://github.com/RayLabsHQ/gitea-mirror/tree/main/docs)
|
||||
- 🔐 [Custom CA Certificates](docs/CA_CERTIFICATES.md)
|
||||
- 🔐 [Environment Variables](docs/ENVIRONMENT_VARIABLES.md)
|
||||
- 🛡️ [Force-Push Protection](docs/FORCE_PUSH_PROTECTION.md)
|
||||
- 🐛 [Report Issues](https://github.com/RayLabsHQ/gitea-mirror/issues)
|
||||
- 💬 [Discussions](https://github.com/RayLabsHQ/gitea-mirror/discussions)
|
||||
- 🔧 [Proxmox VE Script](https://community-scripts.github.io/ProxmoxVE/scripts?id=gitea-mirror)
|
||||
|
||||
@@ -3,4 +3,7 @@
|
||||
timeout = 5000
|
||||
|
||||
# Preload the setup file
|
||||
preload = ["./src/tests/setup.bun.ts"]
|
||||
preload = ["./src/tests/setup.bun.ts"]
|
||||
|
||||
# Only run tests in src/ directory (excludes tests/e2e/ which are Playwright tests)
|
||||
root = "./src/"
|
||||
@@ -325,8 +325,8 @@ bun test
|
||||
|
||||
4. **Create release**:
|
||||
```bash
|
||||
git tag v2.23.0
|
||||
git push origin v2.23.0
|
||||
git tag vX.Y.Z
|
||||
git push origin vX.Y.Z
|
||||
```
|
||||
|
||||
5. **Create GitHub release**
|
||||
@@ -349,6 +349,6 @@ git push origin v2.23.0
|
||||
|
||||
## Getting Help
|
||||
|
||||
- Check existing [issues](https://github.com/yourusername/gitea-mirror/issues)
|
||||
- Join [discussions](https://github.com/yourusername/gitea-mirror/discussions)
|
||||
- Read the [FAQ](./FAQ.md)
|
||||
- Check existing [issues](https://github.com/RayLabsHQ/gitea-mirror/issues)
|
||||
- Join [discussions](https://github.com/RayLabsHQ/gitea-mirror/discussions)
|
||||
- Review project docs in [docs/README.md](./README.md)
|
||||
|
||||
@@ -62,6 +62,7 @@ Settings for connecting to and configuring GitHub repository sources.
|
||||
| `SKIP_FORKS` | Skip forked repositories | `false` | `true`, `false` |
|
||||
| `MIRROR_STARRED` | Mirror starred repositories | `false` | `true`, `false` |
|
||||
| `STARRED_REPOS_ORG` | Organization name for starred repos | `starred` | Any string |
|
||||
| `STARRED_REPOS_MODE` | How starred repos are mirrored | `dedicated-org` | `dedicated-org`, `preserve-owner` |
|
||||
|
||||
### Organization Settings
|
||||
|
||||
@@ -77,6 +78,7 @@ Settings for connecting to and configuring GitHub repository sources.
|
||||
| Variable | Description | Default | Options |
|
||||
|----------|-------------|---------|---------|
|
||||
| `SKIP_STARRED_ISSUES` | Enable lightweight mode for starred repos (skip issues) | `false` | `true`, `false` |
|
||||
| `AUTO_MIRROR_STARRED` | Automatically mirror starred repos during scheduled syncs and "Mirror All". When `false`, starred repos are imported for browsing but must be mirrored individually. | `false` | `true`, `false` |
|
||||
|
||||
## Gitea Configuration
|
||||
|
||||
@@ -87,6 +89,7 @@ Settings for the destination Gitea instance.
|
||||
| Variable | Description | Default | Options |
|
||||
|----------|-------------|---------|---------|
|
||||
| `GITEA_URL` | Gitea instance URL | - | Valid URL |
|
||||
| `GITEA_EXTERNAL_URL` | Optional external/browser URL used for dashboard links. API and mirroring still use `GITEA_URL`. | - | Valid URL |
|
||||
| `GITEA_TOKEN` | Gitea access token | - | - |
|
||||
| `GITEA_USERNAME` | Gitea username | - | - |
|
||||
| `GITEA_ORGANIZATION` | Default organization for single-org strategy | `github-mirrors` | Any string |
|
||||
|
||||
179
docs/FORCE_PUSH_PROTECTION.md
Normal file
179
docs/FORCE_PUSH_PROTECTION.md
Normal file
@@ -0,0 +1,179 @@
|
||||
# Force-Push Protection
|
||||
|
||||
This document describes the smart force-push protection system introduced in gitea-mirror v3.11.0+.
|
||||
|
||||
## The Problem
|
||||
|
||||
GitHub repositories can be force-pushed at any time — rewriting history, deleting branches, or replacing commits entirely. When gitea-mirror syncs a force-pushed repo, the old history in Gitea is silently overwritten. Files, commits, and branches disappear with no way to recover them.
|
||||
|
||||
The original workaround (`backupBeforeSync: true`) created a full git bundle backup before **every** sync. This doesn't scale — a user with 100+ GiB of mirrors would need up to 2 TB of backup storage with default retention settings, even though force-pushes are rare.
|
||||
|
||||
## Solution: Smart Detection
|
||||
|
||||
Instead of backing up everything every time, the system detects force-pushes **before** they happen and only acts when needed.
|
||||
|
||||
### How Detection Works
|
||||
|
||||
Before each sync, the app compares branch SHAs between Gitea (the mirror) and GitHub (the source):
|
||||
|
||||
1. **Fetch branches from both sides** — lightweight API calls to get branch names and their latest commit SHAs
|
||||
2. **Compare each branch**:
|
||||
- SHAs match → nothing changed, no action needed
|
||||
- SHAs differ → check if the change is a normal push or a force-push
|
||||
3. **Ancestry check** — for branches with different SHAs, call GitHub's compare API to determine if the new SHA is a descendant of the old one:
|
||||
- **Fast-forward** (new SHA descends from old) → normal push, safe to sync
|
||||
- **Diverged** (histories split) → force-push detected
|
||||
- **404** (old SHA doesn't exist on GitHub anymore) → history was rewritten, force-push detected
|
||||
- **Branch deleted on GitHub** → flagged as destructive change
|
||||
|
||||
### What Happens on Detection
|
||||
|
||||
Depends on the configured strategy (see below):
|
||||
- **Backup strategies** (`always`, `on-force-push`): create a git bundle snapshot, then sync
|
||||
- **Block strategy** (`block-on-force-push`): halt the sync, mark the repo as `pending-approval`, wait for user action
|
||||
|
||||
### Fail-Open Design
|
||||
|
||||
If detection itself fails (GitHub rate limits, network errors, API outages), sync proceeds normally. Detection never blocks a sync due to its own failure. Individual branch check failures are skipped — one flaky branch doesn't affect the others.
|
||||
|
||||
## Backup Strategies
|
||||
|
||||
Configure via **Settings → GitHub Configuration → Destructive Update Protection**.
|
||||
|
||||
| Strategy | What It Does | Storage Cost | Best For |
|
||||
|---|---|---|---|
|
||||
| **Disabled** | No detection, no backups | Zero | Repos you don't care about losing |
|
||||
| **Always Backup** | Snapshot before every sync (original behavior) | High | Small mirror sets, maximum safety |
|
||||
| **Smart** (default) | Detect force-pushes, backup only when found | Near-zero normally | Most users — efficient protection |
|
||||
| **Block & Approve** | Detect force-pushes, block sync until approved | Zero | Critical repos needing manual review |
|
||||
|
||||
### Strategy Details
|
||||
|
||||
#### Disabled
|
||||
|
||||
Syncs proceed without any detection or backup. If a force-push happens on GitHub, the mirror silently overwrites.
|
||||
|
||||
#### Always Backup
|
||||
|
||||
Creates a git bundle snapshot before every sync regardless of whether a force-push occurred. This is the legacy behavior (equivalent to the old `backupBeforeSync: true`). Safe but expensive for large mirror sets.
|
||||
|
||||
#### Smart (`on-force-push`) — Recommended
|
||||
|
||||
Runs the force-push detection before each sync. On normal days (no force-pushes), syncs proceed without any backup overhead. When a force-push is detected, a snapshot is created before the sync runs.
|
||||
|
||||
This gives you protection when it matters with near-zero cost when it doesn't.
|
||||
|
||||
#### Block & Approve (`block-on-force-push`)
|
||||
|
||||
Runs detection and, when a force-push is found, **blocks the sync entirely**. The repository is marked as `pending-approval` and excluded from future scheduled syncs until you take action:
|
||||
|
||||
- **Approve**: creates a backup first, then syncs (safe)
|
||||
- **Dismiss**: clears the flag and resumes normal syncing (no backup)
|
||||
|
||||
Use this for repos where you want manual control over destructive changes.
|
||||
|
||||
## Additional Settings
|
||||
|
||||
These appear when any non-disabled strategy is selected:
|
||||
|
||||
### Snapshot Retention Count
|
||||
|
||||
How many backup snapshots to keep per repository. Oldest snapshots are deleted when this limit is exceeded. Default: **20**.
|
||||
|
||||
### Snapshot Directory
|
||||
|
||||
Where git bundle backups are stored. Default: **`data/repo-backups`**. Bundles are organized as `<directory>/<owner>/<repo>/<timestamp>.bundle`.
|
||||
|
||||
### Block Sync on Snapshot Failure
|
||||
|
||||
Available for **Always Backup** and **Smart** strategies. When enabled, if the snapshot creation fails (disk full, permissions error, etc.), the sync is also blocked. When disabled, sync continues even if the snapshot couldn't be created.
|
||||
|
||||
Recommended: **enabled** if you rely on backups for recovery.
|
||||
|
||||
## Backward Compatibility
|
||||
|
||||
The old `backupBeforeSync` boolean is still recognized:
|
||||
|
||||
| Old Setting | New Equivalent |
|
||||
|---|---|
|
||||
| `backupBeforeSync: true` | `backupStrategy: "always"` |
|
||||
| `backupBeforeSync: false` | `backupStrategy: "disabled"` |
|
||||
| Neither set | `backupStrategy: "on-force-push"` (new default) |
|
||||
|
||||
Existing configurations are automatically mapped. The old field is deprecated but will continue to work.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
No new environment variables are required. The backup strategy is configured through the web UI and stored in the database alongside other config.
|
||||
|
||||
## API
|
||||
|
||||
### Approve/Dismiss Blocked Repos
|
||||
|
||||
When using the `block-on-force-push` strategy, repos that are blocked can be managed via the API:
|
||||
|
||||
```bash
|
||||
# Approve sync (creates backup first, then syncs)
|
||||
curl -X POST http://localhost:4321/api/job/approve-sync \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Cookie: <session>" \
|
||||
-d '{"repositoryIds": ["<id>"], "action": "approve"}'
|
||||
|
||||
# Dismiss (clear the block, resume normal syncing)
|
||||
curl -X POST http://localhost:4321/api/job/approve-sync \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Cookie: <session>" \
|
||||
-d '{"repositoryIds": ["<id>"], "action": "dismiss"}'
|
||||
```
|
||||
|
||||
Blocked repos also show an **Approve** / **Dismiss** button in the repository table UI.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Key Files
|
||||
|
||||
| File | Purpose |
|
||||
|---|---|
|
||||
| `src/lib/utils/force-push-detection.ts` | Core detection: fetch branches, compare SHAs, check ancestry |
|
||||
| `src/lib/repo-backup.ts` | Strategy resolver, backup decision logic, bundle creation |
|
||||
| `src/lib/gitea-enhanced.ts` | Sync flow integration (calls detection + backup before mirror-sync) |
|
||||
| `src/pages/api/job/approve-sync.ts` | Approve/dismiss API endpoint |
|
||||
| `src/components/config/GitHubConfigForm.tsx` | Strategy selector UI |
|
||||
| `src/components/repositories/RepositoryTable.tsx` | Pending-approval badge + action buttons |
|
||||
|
||||
### Detection Flow
|
||||
|
||||
```
|
||||
syncGiteaRepoEnhanced()
|
||||
│
|
||||
├─ Resolve backup strategy (config → backupStrategy → backupBeforeSync → default)
|
||||
│
|
||||
├─ If strategy needs detection ("on-force-push" or "block-on-force-push"):
|
||||
│ │
|
||||
│ ├─ fetchGiteaBranches() — GET /api/v1/repos/{owner}/{repo}/branches
|
||||
│ ├─ fetchGitHubBranches() — octokit.paginate(repos.listBranches)
|
||||
│ │
|
||||
│ └─ For each Gitea branch where SHA differs:
|
||||
│ └─ checkAncestry() — octokit.repos.compareCommits()
|
||||
│ ├─ "ahead" or "identical" → fast-forward (safe)
|
||||
│ ├─ "diverged" or "behind" → force-push detected
|
||||
│ └─ 404/422 → old SHA gone → force-push detected
|
||||
│
|
||||
├─ If "block-on-force-push" + detected:
|
||||
│ └─ Set repo status to "pending-approval", return early
|
||||
│
|
||||
├─ If backup needed (always, or on-force-push + detected):
|
||||
│ └─ Create git bundle snapshot
|
||||
│
|
||||
└─ Proceed to mirror-sync
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Repos stuck in "pending-approval"**: Use the Approve or Dismiss buttons in the repository table, or call the approve-sync API endpoint.
|
||||
|
||||
**Detection always skipped**: Check the activity log for skip reasons. Common causes: Gitea repo not yet mirrored (first sync), GitHub API rate limits, network errors. All are fail-open by design.
|
||||
|
||||
**Backups consuming too much space**: Lower the retention count, or switch from "Always Backup" to "Smart" which only creates backups on actual force-pushes.
|
||||
|
||||
**False positives**: The detection compares branch-by-branch. A rebase (which is a force-push) will correctly trigger detection. If you routinely rebase branches, consider using "Smart" instead of "Block & Approve" to avoid constant approval prompts.
|
||||
486
docs/NIX_DEPLOYMENT.md
Normal file
486
docs/NIX_DEPLOYMENT.md
Normal file
@@ -0,0 +1,486 @@
|
||||
# Nix Deployment Guide
|
||||
|
||||
This guide covers deploying Gitea Mirror using Nix flakes. The Nix deployment follows the same minimal configuration philosophy as `docker-compose.alt.yml` - secrets are auto-generated, and everything else can be configured via the web UI.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Nix 2.4+ installed
|
||||
- For NixOS module: NixOS 23.05+
|
||||
|
||||
### Enable Flakes (Recommended)
|
||||
|
||||
To enable flakes permanently and avoid typing flags, add to `/etc/nix/nix.conf` or `~/.config/nix/nix.conf`:
|
||||
```
|
||||
experimental-features = nix-command flakes
|
||||
```
|
||||
|
||||
**Note:** If you don't enable flakes globally, add `--extra-experimental-features 'nix-command flakes'` to all nix commands shown below.
|
||||
|
||||
## Quick Start (Zero Configuration!)
|
||||
|
||||
### Run Immediately - No Setup Required
|
||||
|
||||
```bash
|
||||
# Run directly from the flake (local)
|
||||
nix run --extra-experimental-features 'nix-command flakes' .#gitea-mirror
|
||||
|
||||
# Or from GitHub (once published)
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# If you have flakes enabled globally, simply:
|
||||
nix run .#gitea-mirror
|
||||
```
|
||||
|
||||
That's it! On first run:
|
||||
- Secrets (`BETTER_AUTH_SECRET` and `ENCRYPTION_SECRET`) are auto-generated
|
||||
- Database is automatically created and initialized
|
||||
- Startup recovery and repair scripts run automatically
|
||||
- Access the web UI at http://localhost:4321
|
||||
|
||||
Everything else (GitHub credentials, Gitea settings, mirror options) is configured through the web interface after signup.
|
||||
|
||||
### Development Environment
|
||||
|
||||
```bash
|
||||
# Enter development shell with all dependencies
|
||||
nix develop --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Or use direnv for automatic environment loading (handles flags automatically)
|
||||
echo "use flake" > .envrc
|
||||
direnv allow
|
||||
```
|
||||
|
||||
### Build and Install
|
||||
|
||||
```bash
|
||||
# Build the package
|
||||
nix build --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Run the built package
|
||||
./result/bin/gitea-mirror
|
||||
|
||||
# Install to your profile
|
||||
nix profile install --extra-experimental-features 'nix-command flakes' .#gitea-mirror
|
||||
```
|
||||
|
||||
## What Happens on First Run?
|
||||
|
||||
Following the same pattern as the Docker deployment, the Nix package automatically:
|
||||
|
||||
1. **Creates data directory**: `~/.local/share/gitea-mirror` (or `$DATA_DIR`)
|
||||
2. **Generates secrets** (stored securely in data directory):
|
||||
- `BETTER_AUTH_SECRET` - Session authentication (32-char hex)
|
||||
- `ENCRYPTION_SECRET` - Token encryption (48-char base64)
|
||||
3. **Initializes database**: SQLite database with Drizzle migrations
|
||||
4. **Runs startup scripts**:
|
||||
- Environment configuration loader
|
||||
- Crash recovery for interrupted jobs
|
||||
- Repository status repair
|
||||
5. **Starts the application** with graceful shutdown handling
|
||||
|
||||
## NixOS Module - Minimal Deployment
|
||||
|
||||
### Simplest Possible Configuration
|
||||
|
||||
Add to your NixOS configuration (`/etc/nixos/configuration.nix`):
|
||||
|
||||
```nix
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
gitea-mirror.url = "github:RayLabsHQ/gitea-mirror";
|
||||
};
|
||||
|
||||
outputs = { nixpkgs, gitea-mirror, ... }: {
|
||||
nixosConfigurations.your-hostname = nixpkgs.lib.nixosSystem {
|
||||
system = "x86_64-linux";
|
||||
modules = [
|
||||
gitea-mirror.nixosModules.default
|
||||
{
|
||||
# That's it! Just enable the service
|
||||
services.gitea-mirror.enable = true;
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
Apply with:
|
||||
```bash
|
||||
sudo nixos-rebuild switch
|
||||
```
|
||||
|
||||
Access at http://localhost:4321, sign up (first user is admin), and configure everything via the web UI.
|
||||
|
||||
### Production Configuration
|
||||
|
||||
For production with custom domain and firewall:
|
||||
|
||||
```nix
|
||||
{
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
host = "0.0.0.0";
|
||||
port = 4321;
|
||||
betterAuthUrl = "https://mirror.example.com";
|
||||
betterAuthTrustedOrigins = "https://mirror.example.com";
|
||||
openFirewall = true;
|
||||
};
|
||||
|
||||
# Optional: Use with nginx reverse proxy
|
||||
services.nginx = {
|
||||
enable = true;
|
||||
virtualHosts."mirror.example.com" = {
|
||||
locations."/" = {
|
||||
proxyPass = "http://127.0.0.1:4321";
|
||||
proxyWebsockets = true;
|
||||
};
|
||||
enableACME = true;
|
||||
forceSSL = true;
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced: Manual Secret Management
|
||||
|
||||
If you prefer to manage secrets manually (e.g., with sops-nix or agenix):
|
||||
|
||||
1. Create a secrets file:
|
||||
```bash
|
||||
# /var/lib/gitea-mirror/secrets.env
|
||||
BETTER_AUTH_SECRET=your-32-character-minimum-secret-key-here
|
||||
ENCRYPTION_SECRET=your-encryption-secret-here
|
||||
```
|
||||
|
||||
2. Reference it in your configuration:
|
||||
```nix
|
||||
{
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
environmentFile = "/var/lib/gitea-mirror/secrets.env";
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Full Configuration Options
|
||||
|
||||
```nix
|
||||
{
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
package = gitea-mirror.packages.x86_64-linux.default; # Override package
|
||||
dataDir = "/var/lib/gitea-mirror";
|
||||
user = "gitea-mirror";
|
||||
group = "gitea-mirror";
|
||||
host = "0.0.0.0";
|
||||
port = 4321;
|
||||
betterAuthUrl = "https://mirror.example.com";
|
||||
betterAuthTrustedOrigins = "https://mirror.example.com";
|
||||
|
||||
# Concurrency controls (match docker-compose.alt.yml)
|
||||
mirrorIssueConcurrency = 3; # Set to 1 for perfect chronological order
|
||||
mirrorPullRequestConcurrency = 5; # Set to 1 for perfect chronological order
|
||||
|
||||
environmentFile = null; # Optional secrets file
|
||||
openFirewall = true;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Service Management (NixOS)
|
||||
|
||||
```bash
|
||||
# Start the service
|
||||
sudo systemctl start gitea-mirror
|
||||
|
||||
# Stop the service
|
||||
sudo systemctl stop gitea-mirror
|
||||
|
||||
# Restart the service
|
||||
sudo systemctl restart gitea-mirror
|
||||
|
||||
# Check status
|
||||
sudo systemctl status gitea-mirror
|
||||
|
||||
# View logs
|
||||
sudo journalctl -u gitea-mirror -f
|
||||
|
||||
# Health check
|
||||
curl http://localhost:4321/api/health
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
All variables from `docker-compose.alt.yml` are supported:
|
||||
|
||||
```bash
|
||||
# === AUTO-GENERATED (Don't set unless you want specific values) ===
|
||||
BETTER_AUTH_SECRET # Auto-generated, stored in data dir
|
||||
ENCRYPTION_SECRET # Auto-generated, stored in data dir
|
||||
|
||||
# === CORE SETTINGS (Have good defaults) ===
|
||||
DATA_DIR="$HOME/.local/share/gitea-mirror"
|
||||
DATABASE_URL="file:$DATA_DIR/gitea-mirror.db"
|
||||
HOST="0.0.0.0"
|
||||
PORT="4321"
|
||||
NODE_ENV="production"
|
||||
|
||||
# === BETTER AUTH (Override for custom domains) ===
|
||||
BETTER_AUTH_URL="http://localhost:4321"
|
||||
BETTER_AUTH_TRUSTED_ORIGINS="http://localhost:4321"
|
||||
PUBLIC_BETTER_AUTH_URL="http://localhost:4321"
|
||||
|
||||
# === CONCURRENCY CONTROLS ===
|
||||
MIRROR_ISSUE_CONCURRENCY=3 # Default: 3 (set to 1 for perfect order)
|
||||
MIRROR_PULL_REQUEST_CONCURRENCY=5 # Default: 5 (set to 1 for perfect order)
|
||||
|
||||
# === CONFIGURE VIA WEB UI (Not needed at startup) ===
|
||||
# GitHub credentials, Gitea settings, mirror options, scheduling, etc.
|
||||
# All configured after signup through the web interface
|
||||
```
|
||||
|
||||
## Database Management
|
||||
|
||||
The Nix package includes a database management helper:
|
||||
|
||||
```bash
|
||||
# Initialize database (done automatically on first run)
|
||||
gitea-mirror-db init
|
||||
|
||||
# Check database health
|
||||
gitea-mirror-db check
|
||||
|
||||
# Fix database issues
|
||||
gitea-mirror-db fix
|
||||
|
||||
# Reset users
|
||||
gitea-mirror-db reset-users
|
||||
```
|
||||
|
||||
## Home Manager Integration
|
||||
|
||||
For single-user deployments:
|
||||
|
||||
```nix
|
||||
{ config, pkgs, ... }:
|
||||
let
|
||||
gitea-mirror = (import (fetchTarball "https://github.com/RayLabsHQ/gitea-mirror/archive/main.tar.gz")).packages.${pkgs.system}.default;
|
||||
in {
|
||||
home.packages = [ gitea-mirror ];
|
||||
|
||||
# Optional: Run as user service
|
||||
systemd.user.services.gitea-mirror = {
|
||||
Unit = {
|
||||
Description = "Gitea Mirror Service";
|
||||
After = [ "network.target" ];
|
||||
};
|
||||
|
||||
Service = {
|
||||
Type = "simple";
|
||||
ExecStart = "${gitea-mirror}/bin/gitea-mirror";
|
||||
Restart = "always";
|
||||
Environment = [
|
||||
"DATA_DIR=%h/.local/share/gitea-mirror"
|
||||
"HOST=127.0.0.1"
|
||||
"PORT=4321"
|
||||
];
|
||||
};
|
||||
|
||||
Install = {
|
||||
WantedBy = [ "default.target" ];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Docker Image from Nix (Optional)
|
||||
|
||||
You can also use Nix to create a Docker image:
|
||||
|
||||
```nix
|
||||
# Add to flake.nix packages section
|
||||
dockerImage = pkgs.dockerTools.buildLayeredImage {
|
||||
name = "gitea-mirror";
|
||||
tag = "latest";
|
||||
contents = [ self.packages.${system}.default pkgs.cacert pkgs.openssl ];
|
||||
config = {
|
||||
Cmd = [ "${self.packages.${system}.default}/bin/gitea-mirror" ];
|
||||
ExposedPorts = { "4321/tcp" = {}; };
|
||||
Env = [
|
||||
"DATA_DIR=/data"
|
||||
"DATABASE_URL=file:/data/gitea-mirror.db"
|
||||
];
|
||||
Volumes = { "/data" = {}; };
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
Build and load:
|
||||
```bash
|
||||
nix build --extra-experimental-features 'nix-command flakes' .#dockerImage
|
||||
docker load < result
|
||||
docker run -p 4321:4321 -v gitea-mirror-data:/data gitea-mirror:latest
|
||||
```
|
||||
|
||||
## Comparison: Docker vs Nix
|
||||
|
||||
Both deployment methods follow the same philosophy:
|
||||
|
||||
| Feature | Docker Compose | Nix |
|
||||
|---------|---------------|-----|
|
||||
| **Configuration** | Minimal (only BETTER_AUTH_SECRET) | Zero config (auto-generated) |
|
||||
| **Secret Generation** | Auto-generated & persisted | Auto-generated & persisted |
|
||||
| **Database Init** | Automatic on first run | Automatic on first run |
|
||||
| **Startup Scripts** | Runs recovery/repair/env-config | Runs recovery/repair/env-config |
|
||||
| **Graceful Shutdown** | Signal handling in entrypoint | Signal handling in wrapper |
|
||||
| **Health Check** | Docker healthcheck | systemd timer (optional) |
|
||||
| **Updates** | `docker pull` | `nix flake update && nixos-rebuild` |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Check Auto-Generated Secrets
|
||||
```bash
|
||||
# For standalone
|
||||
cat ~/.local/share/gitea-mirror/.better_auth_secret
|
||||
cat ~/.local/share/gitea-mirror/.encryption_secret
|
||||
|
||||
# For NixOS service
|
||||
sudo cat /var/lib/gitea-mirror/.better_auth_secret
|
||||
sudo cat /var/lib/gitea-mirror/.encryption_secret
|
||||
```
|
||||
|
||||
### Database Issues
|
||||
```bash
|
||||
# Check if database exists
|
||||
ls -la ~/.local/share/gitea-mirror/gitea-mirror.db
|
||||
|
||||
# Reinitialize (deletes all data!)
|
||||
rm ~/.local/share/gitea-mirror/gitea-mirror.db
|
||||
gitea-mirror-db init
|
||||
```
|
||||
|
||||
### Permission Issues (NixOS)
|
||||
```bash
|
||||
sudo chown -R gitea-mirror:gitea-mirror /var/lib/gitea-mirror
|
||||
sudo chmod 700 /var/lib/gitea-mirror
|
||||
```
|
||||
|
||||
### Port Already in Use
|
||||
```bash
|
||||
# Change port
|
||||
export PORT=8080
|
||||
gitea-mirror
|
||||
|
||||
# Or in NixOS config
|
||||
services.gitea-mirror.port = 8080;
|
||||
```
|
||||
|
||||
### View Startup Logs
|
||||
```bash
|
||||
# Standalone (verbose output on console)
|
||||
gitea-mirror
|
||||
|
||||
# NixOS service
|
||||
sudo journalctl -u gitea-mirror -f --since "5 minutes ago"
|
||||
```
|
||||
|
||||
## Updating
|
||||
|
||||
### Standalone Installation
|
||||
```bash
|
||||
# Update flake lock
|
||||
nix flake update --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Rebuild
|
||||
nix build --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Or update profile
|
||||
nix profile upgrade --extra-experimental-features 'nix-command flakes' gitea-mirror
|
||||
```
|
||||
|
||||
### NixOS
|
||||
```bash
|
||||
# Update input
|
||||
sudo nix flake lock --update-input gitea-mirror --extra-experimental-features 'nix-command flakes'
|
||||
|
||||
# Rebuild system
|
||||
sudo nixos-rebuild switch --flake .#your-hostname
|
||||
```
|
||||
|
||||
## Migration from Docker
|
||||
|
||||
To migrate from Docker to Nix while keeping your data:
|
||||
|
||||
1. **Stop Docker container:**
|
||||
```bash
|
||||
docker-compose -f docker-compose.alt.yml down
|
||||
```
|
||||
|
||||
2. **Copy data directory:**
|
||||
```bash
|
||||
# For standalone
|
||||
cp -r ./data ~/.local/share/gitea-mirror
|
||||
|
||||
# For NixOS
|
||||
sudo cp -r ./data /var/lib/gitea-mirror
|
||||
sudo chown -R gitea-mirror:gitea-mirror /var/lib/gitea-mirror
|
||||
```
|
||||
|
||||
3. **Copy secrets (if you want to keep them):**
|
||||
```bash
|
||||
# Extract from Docker volume
|
||||
docker run --rm -v gitea-mirror_data:/data alpine \
|
||||
cat /data/.better_auth_secret > better_auth_secret
|
||||
docker run --rm -v gitea-mirror_data:/data alpine \
|
||||
cat /data/.encryption_secret > encryption_secret
|
||||
|
||||
# Copy to new location
|
||||
cp better_auth_secret ~/.local/share/gitea-mirror/.better_auth_secret
|
||||
cp encryption_secret ~/.local/share/gitea-mirror/.encryption_secret
|
||||
chmod 600 ~/.local/share/gitea-mirror/.*_secret
|
||||
```
|
||||
|
||||
4. **Start Nix version:**
|
||||
```bash
|
||||
gitea-mirror
|
||||
```
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
Example GitHub Actions workflow (see `.github/workflows/nix-build.yml`):
|
||||
|
||||
```yaml
|
||||
name: Nix Build
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: DeterminateSystems/nix-installer-action@main
|
||||
- uses: DeterminateSystems/magic-nix-cache-action@main
|
||||
- run: nix flake check
|
||||
- run: nix build --print-build-logs
|
||||
```
|
||||
|
||||
This uses:
|
||||
- **Determinate Nix Installer** - Fast, reliable Nix installation with flakes enabled by default
|
||||
- **Magic Nix Cache** - Free caching using GitHub Actions cache (no account needed)
|
||||
|
||||
## Resources
|
||||
|
||||
- [Nix Manual](https://nixos.org/manual/nix/stable/)
|
||||
- [NixOS Options Search](https://search.nixos.org/options)
|
||||
- [Nix Pills Tutorial](https://nixos.org/guides/nix-pills/)
|
||||
- [Project Documentation](../README.md)
|
||||
- [Docker Deployment](../docker-compose.alt.yml) - Equivalent minimal config
|
||||
322
docs/NIX_DISTRIBUTION.md
Normal file
322
docs/NIX_DISTRIBUTION.md
Normal file
@@ -0,0 +1,322 @@
|
||||
# Nix Package Distribution Guide
|
||||
|
||||
This guide explains how Gitea Mirror is distributed via Nix and how users can consume it.
|
||||
|
||||
## Distribution Methods
|
||||
|
||||
### Method 1: Direct GitHub Usage (Zero Infrastructure)
|
||||
|
||||
**No CI, releases, or setup needed!** Users can consume directly from GitHub:
|
||||
|
||||
```bash
|
||||
# Latest from main branch
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Pin to specific commit
|
||||
nix run github:RayLabsHQ/gitea-mirror/abc123def
|
||||
|
||||
# Pin to git tag
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
**How it works:**
|
||||
1. Nix fetches the repository from GitHub
|
||||
2. Nix reads `flake.nix` and `flake.lock`
|
||||
3. Nix builds the package locally on the user's machine
|
||||
4. Package is cached in `/nix/store` for reuse
|
||||
|
||||
**Pros:**
|
||||
- Zero infrastructure needed
|
||||
- Works immediately after pushing code
|
||||
- Users always get reproducible builds
|
||||
|
||||
**Cons:**
|
||||
- Users must build from source (slower first time)
|
||||
- Requires build dependencies (Bun, etc.)
|
||||
|
||||
---
|
||||
|
||||
### Method 2: CI Build Caching
|
||||
|
||||
The GitHub Actions workflow uses **Magic Nix Cache** (by Determinate Systems) to cache builds:
|
||||
|
||||
- **Zero configuration required** - no accounts or tokens needed
|
||||
- **Automatic** - CI workflow handles everything
|
||||
- **Uses GitHub Actions cache** - fast, reliable, free
|
||||
|
||||
#### How It Works:
|
||||
|
||||
1. GitHub Actions builds the package on each push/PR
|
||||
2. Build artifacts are cached in GitHub Actions cache
|
||||
3. Subsequent builds reuse cached dependencies (faster CI)
|
||||
|
||||
Note: This caches CI builds. Users still build locally, but the flake.lock ensures reproducibility.
|
||||
|
||||
---
|
||||
|
||||
### Method 3: nixpkgs Submission (Official Distribution)
|
||||
|
||||
Submit to the official Nix package repository for maximum visibility.
|
||||
|
||||
#### Process:
|
||||
|
||||
1. **Prepare package** (already done with `flake.nix`)
|
||||
2. **Test thoroughly**
|
||||
3. **Submit PR to nixpkgs:** https://github.com/NixOS/nixpkgs
|
||||
|
||||
#### User Experience:
|
||||
|
||||
```bash
|
||||
# After acceptance into nixpkgs
|
||||
nix run nixpkgs#gitea-mirror
|
||||
|
||||
# NixOS configuration
|
||||
environment.systemPackages = [ pkgs.gitea-mirror ];
|
||||
```
|
||||
|
||||
**Pros:**
|
||||
- Maximum discoverability (official repo)
|
||||
- Trusted by Nix community
|
||||
- Included in NixOS search
|
||||
- Binary caching by cache.nixos.org
|
||||
|
||||
**Cons:**
|
||||
- Submission/review process
|
||||
- Must follow nixpkgs guidelines
|
||||
- Updates require PRs
|
||||
|
||||
---
|
||||
|
||||
## Current Distribution Strategy
|
||||
|
||||
### Phase 1: Direct GitHub (Immediate) ✅
|
||||
|
||||
Already working! Users can:
|
||||
|
||||
```bash
|
||||
nix run github:RayLabsHQ/gitea-mirror
|
||||
```
|
||||
|
||||
### Phase 2: CI Build Validation ✅
|
||||
|
||||
GitHub Actions workflow validates builds on every push/PR:
|
||||
|
||||
- Uses Magic Nix Cache for fast CI builds
|
||||
- Tests on both Linux and macOS
|
||||
- No setup required - works automatically
|
||||
|
||||
### Phase 3: Version Releases (Optional)
|
||||
|
||||
Tag releases for version pinning:
|
||||
|
||||
```bash
|
||||
git tag vX.Y.Z
|
||||
git push origin vX.Y.Z
|
||||
|
||||
# Users can then pin:
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
### Phase 4: nixpkgs Submission (Long Term)
|
||||
|
||||
Once package is stable and well-tested, submit to nixpkgs.
|
||||
|
||||
---
|
||||
|
||||
## User Documentation
|
||||
|
||||
### For Users: How to Install
|
||||
|
||||
Add this to your `docs/NIX_DEPLOYMENT.md`:
|
||||
|
||||
#### Option 1: Direct Install (No Configuration)
|
||||
|
||||
```bash
|
||||
# Run immediately
|
||||
nix run --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
|
||||
# Install to profile
|
||||
nix profile install --extra-experimental-features 'nix-command flakes' github:RayLabsHQ/gitea-mirror
|
||||
```
|
||||
|
||||
#### Option 2: Pin to Specific Version
|
||||
|
||||
```bash
|
||||
# Pin to git tag
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
|
||||
# Pin to commit
|
||||
nix run github:RayLabsHQ/gitea-mirror/abc123def
|
||||
|
||||
# Lock in flake.nix
|
||||
inputs.gitea-mirror.url = "github:RayLabsHQ/gitea-mirror/vX.Y.Z";
|
||||
```
|
||||
|
||||
#### Option 3: NixOS Configuration
|
||||
|
||||
```nix
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
gitea-mirror.url = "github:RayLabsHQ/gitea-mirror";
|
||||
# Or pin to version:
|
||||
# gitea-mirror.url = "github:RayLabsHQ/gitea-mirror/vX.Y.Z";
|
||||
};
|
||||
|
||||
outputs = { nixpkgs, gitea-mirror, ... }: {
|
||||
nixosConfigurations.your-host = nixpkgs.lib.nixosSystem {
|
||||
modules = [
|
||||
gitea-mirror.nixosModules.default
|
||||
{
|
||||
services.gitea-mirror = {
|
||||
enable = true;
|
||||
betterAuthUrl = "https://mirror.example.com";
|
||||
openFirewall = true;
|
||||
};
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Maintaining the Distribution
|
||||
|
||||
### Releasing New Versions
|
||||
|
||||
```bash
|
||||
# 1. Update version in package.json
|
||||
vim package.json # Update version field
|
||||
|
||||
# 2. Update flake.nix version (line 17)
|
||||
vim flake.nix # Update version = "X.Y.Z";
|
||||
|
||||
# 3. Commit changes
|
||||
git add package.json flake.nix
|
||||
git commit -m "chore: bump version to vX.Y.Z"
|
||||
|
||||
# 4. Create git tag
|
||||
git tag vX.Y.Z
|
||||
git push origin main
|
||||
git push origin vX.Y.Z
|
||||
|
||||
# 5. GitHub Actions builds and caches automatically
|
||||
```
|
||||
|
||||
Users can then pin to the new version:
|
||||
```bash
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
### Updating Flake Lock
|
||||
|
||||
The `flake.lock` file pins all dependencies. Update it periodically:
|
||||
|
||||
```bash
|
||||
# Update all inputs
|
||||
nix flake update
|
||||
|
||||
# Update specific input
|
||||
nix flake lock --update-input nixpkgs
|
||||
|
||||
# Test after update
|
||||
nix build
|
||||
nix flake check
|
||||
|
||||
# Commit the updated lock file
|
||||
git add flake.lock
|
||||
git commit -m "chore: update flake dependencies"
|
||||
git push
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting Distribution Issues
|
||||
|
||||
### Users Report Build Failures
|
||||
|
||||
1. **Check GitHub Actions:** Ensure CI is passing
|
||||
2. **Test locally:** `nix flake check`
|
||||
3. **Check flake.lock:** May need update if dependencies changed
|
||||
|
||||
### CI Cache Not Working
|
||||
|
||||
1. **Check workflow logs:** Review GitHub Actions for errors
|
||||
2. **Clear cache:** GitHub Actions → Caches → Delete relevant cache
|
||||
3. **Verify flake.lock:** May need `nix flake update` if dependencies changed
|
||||
|
||||
### Version Pinning Not Working
|
||||
|
||||
```bash
|
||||
# Verify tag exists
|
||||
git tag -l
|
||||
|
||||
# Ensure tag is pushed
|
||||
git ls-remote --tags origin
|
||||
|
||||
# Test specific tag
|
||||
nix run github:RayLabsHQ/gitea-mirror/vX.Y.Z
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Advanced: Custom Binary Cache
|
||||
|
||||
If you prefer self-hosting instead of Cachix:
|
||||
|
||||
### Option 1: S3-Compatible Storage
|
||||
|
||||
```nix
|
||||
# Generate signing key
|
||||
nix-store --generate-binary-cache-key cache.example.com cache-priv-key.pem cache-pub-key.pem
|
||||
|
||||
# Push to S3
|
||||
nix copy --to s3://my-nix-cache?region=us-east-1 $(nix-build)
|
||||
```
|
||||
|
||||
Users configure:
|
||||
```nix
|
||||
substituters = https://my-bucket.s3.amazonaws.com/nix-cache
|
||||
trusted-public-keys = cache.example.com:BASE64_PUBLIC_KEY
|
||||
```
|
||||
|
||||
### Option 2: Self-Hosted Nix Store
|
||||
|
||||
Run `nix-serve` on your server:
|
||||
|
||||
```bash
|
||||
# On server
|
||||
nix-serve -p 8080
|
||||
|
||||
# Behind nginx/caddy
|
||||
proxy_pass http://localhost:8080;
|
||||
```
|
||||
|
||||
Users configure:
|
||||
```nix
|
||||
substituters = https://cache.example.com
|
||||
trusted-public-keys = YOUR_KEY
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Comparison: Distribution Methods
|
||||
|
||||
| Method | Setup Time | User Speed | Cost | Discoverability |
|
||||
|--------|-----------|------------|------|-----------------|
|
||||
| Direct GitHub | 0 min | Slow (build) | Free | Low |
|
||||
| nixpkgs | Hours/days | Fast (binary) | Free | High |
|
||||
| Self-hosted cache | 30+ min | Fast (binary) | Server cost | Low |
|
||||
|
||||
**Current approach:** Direct GitHub consumption with CI validation using Magic Nix Cache. Users build locally (reproducible via flake.lock). Consider **nixpkgs** submission for maximum reach once the package is mature.
|
||||
|
||||
---
|
||||
|
||||
## Resources
|
||||
|
||||
- [Nix Flakes Documentation](https://nixos.wiki/wiki/Flakes)
|
||||
- [Magic Nix Cache](https://github.com/DeterminateSystems/magic-nix-cache-action)
|
||||
- [nixpkgs Contributing Guide](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md)
|
||||
- [Nix Binary Cache Setup](https://nixos.org/manual/nix/stable/package-management/binary-cache-substituter.html)
|
||||
@@ -7,6 +7,8 @@ This folder contains engineering and operations references for the open-source G
|
||||
### Core workflow
|
||||
- **[DEVELOPMENT_WORKFLOW.md](./DEVELOPMENT_WORKFLOW.md)** – Set up a local environment, run scripts, and understand the repo layout (app + marketing site).
|
||||
- **[ENVIRONMENT_VARIABLES.md](./ENVIRONMENT_VARIABLES.md)** – Complete reference for every configuration flag supported by the app and Docker images.
|
||||
- **[NIX_DEPLOYMENT.md](./NIX_DEPLOYMENT.md)** – User-facing deployment guide for Nix and NixOS.
|
||||
- **[NIX_DISTRIBUTION.md](./NIX_DISTRIBUTION.md)** – Maintainer notes for packaging, releases, and distribution strategy.
|
||||
|
||||
### Reliability & recovery
|
||||
- **[GRACEFUL_SHUTDOWN.md](./GRACEFUL_SHUTDOWN.md)** – How signal handling, shutdown coordination, and job persistence work in v3.
|
||||
@@ -32,8 +34,6 @@ The first user you create locally becomes the administrator. All other configura
|
||||
## Contributing & support
|
||||
|
||||
- 🎯 Contribution guide: [../CONTRIBUTING.md](../CONTRIBUTING.md)
|
||||
- 📘 Code of conduct: [../CODE_OF_CONDUCT.md](../CODE_OF_CONDUCT.md)
|
||||
- 🐞 Issues & feature requests: <https://github.com/RayLabsHQ/gitea-mirror/issues>
|
||||
- 💬 Discussions: <https://github.com/RayLabsHQ/gitea-mirror/discussions>
|
||||
|
||||
Security disclosures should follow the process in [../SECURITY.md](../SECURITY.md).
|
||||
- 🔐 Security policy & advisories: <https://github.com/RayLabsHQ/gitea-mirror/security>
|
||||
|
||||
BIN
docs/images/add-repo-target-org.png
Normal file
BIN
docs/images/add-repo-target-org.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 22 KiB |
170
flake.lock
generated
Normal file
170
flake.lock
generated
Normal file
@@ -0,0 +1,170 @@
|
||||
{
|
||||
"nodes": {
|
||||
"bun2nix": {
|
||||
"inputs": {
|
||||
"flake-parts": "flake-parts",
|
||||
"import-tree": "import-tree",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"systems": "systems",
|
||||
"treefmt-nix": "treefmt-nix"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1770895533,
|
||||
"narHash": "sha256-v3QaK9ugy9bN9RXDnjw0i2OifKmz2NnKM82agtqm/UY=",
|
||||
"owner": "nix-community",
|
||||
"repo": "bun2nix",
|
||||
"rev": "c843f477b15f51151f8c6bcc886954699440a6e1",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "bun2nix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-parts": {
|
||||
"inputs": {
|
||||
"nixpkgs-lib": "nixpkgs-lib"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1769996383,
|
||||
"narHash": "sha256-AnYjnFWgS49RlqX7LrC4uA+sCCDBj0Ry/WOJ5XWAsa0=",
|
||||
"owner": "hercules-ci",
|
||||
"repo": "flake-parts",
|
||||
"rev": "57928607ea566b5db3ad13af0e57e921e6b12381",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "hercules-ci",
|
||||
"repo": "flake-parts",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"import-tree": {
|
||||
"locked": {
|
||||
"lastModified": 1763762820,
|
||||
"narHash": "sha256-ZvYKbFib3AEwiNMLsejb/CWs/OL/srFQ8AogkebEPF0=",
|
||||
"owner": "vic",
|
||||
"repo": "import-tree",
|
||||
"rev": "3c23749d8013ec6daa1d7255057590e9ca726646",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "vic",
|
||||
"repo": "import-tree",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1761672384,
|
||||
"narHash": "sha256-o9KF3DJL7g7iYMZq9SWgfS1BFlNbsm6xplRjVlOCkXI=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "08dacfca559e1d7da38f3cf05f1f45ee9bfd213c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-lib": {
|
||||
"locked": {
|
||||
"lastModified": 1769909678,
|
||||
"narHash": "sha256-cBEymOf4/o3FD5AZnzC3J9hLbiZ+QDT/KDuyHXVJOpM=",
|
||||
"owner": "nix-community",
|
||||
"repo": "nixpkgs.lib",
|
||||
"rev": "72716169fe93074c333e8d0173151350670b824c",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "nixpkgs.lib",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"bun2nix": "bun2nix",
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems_2": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"treefmt-nix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"bun2nix",
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1770228511,
|
||||
"narHash": "sha256-wQ6NJSuFqAEmIg2VMnLdCnUc0b7vslUohqqGGD+Fyxk=",
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"rev": "337a4fe074be1042a35086f15481d763b8ddc0e7",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
454
flake.nix
Normal file
454
flake.nix
Normal file
@@ -0,0 +1,454 @@
|
||||
{
|
||||
description = "Gitea Mirror - Self-hosted GitHub to Gitea mirroring service";
|
||||
|
||||
nixConfig = {
|
||||
extra-substituters = [
|
||||
"https://nix-community.cachix.org"
|
||||
];
|
||||
extra-trusted-public-keys = [
|
||||
"nix-community.cachix.org-1:mB9FSh9qf2dCimDSUo8Zy7bkq5CX+/rkCWyvRCYg3Fs="
|
||||
];
|
||||
};
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
bun2nix = {
|
||||
url = "github:nix-community/bun2nix";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils, bun2nix }:
|
||||
let
|
||||
forEachSystem = flake-utils.lib.eachDefaultSystem;
|
||||
in
|
||||
(forEachSystem (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
b2n = bun2nix.packages.${system}.default;
|
||||
|
||||
# Build the application
|
||||
gitea-mirror = pkgs.stdenv.mkDerivation {
|
||||
pname = "gitea-mirror";
|
||||
version = "3.9.6";
|
||||
|
||||
src = ./.;
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkgs.bun
|
||||
b2n.hook
|
||||
];
|
||||
|
||||
buildInputs = with pkgs; [
|
||||
sqlite
|
||||
openssl
|
||||
];
|
||||
|
||||
bunDeps = b2n.fetchBunDeps {
|
||||
bunNix = ./bun.nix;
|
||||
};
|
||||
|
||||
# Let the bun2nix hook handle dependency installation via the
|
||||
# pre-fetched cache, but skip its default build/check/install
|
||||
# phases since we have custom ones.
|
||||
dontUseBunBuild = true;
|
||||
dontUseBunCheck = true;
|
||||
dontUseBunInstall = true;
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
export HOME=$TMPDIR
|
||||
|
||||
# The bun2nix cache is in the read-only Nix store, but bunx/astro
|
||||
# may try to write to it at build time. Copy the cache to a
|
||||
# writable location.
|
||||
if [ -n "$BUN_INSTALL_CACHE_DIR" ] && [ -d "$BUN_INSTALL_CACHE_DIR" ]; then
|
||||
WRITABLE_CACHE="$TMPDIR/bun-cache"
|
||||
cp -rL "$BUN_INSTALL_CACHE_DIR" "$WRITABLE_CACHE" 2>/dev/null || true
|
||||
chmod -R u+w "$WRITABLE_CACHE" 2>/dev/null || true
|
||||
export BUN_INSTALL_CACHE_DIR="$WRITABLE_CACHE"
|
||||
fi
|
||||
|
||||
# Build the Astro application
|
||||
bun run build
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir -p $out/lib/gitea-mirror
|
||||
mkdir -p $out/bin
|
||||
|
||||
# Copy the built application
|
||||
cp -r dist $out/lib/gitea-mirror/
|
||||
cp -r node_modules $out/lib/gitea-mirror/
|
||||
cp -r scripts $out/lib/gitea-mirror/
|
||||
cp -r src $out/lib/gitea-mirror/
|
||||
cp -r drizzle $out/lib/gitea-mirror/
|
||||
cp package.json $out/lib/gitea-mirror/
|
||||
cp tsconfig.json $out/lib/gitea-mirror/
|
||||
|
||||
# Create entrypoint script that matches Docker behavior
|
||||
cat > $out/bin/gitea-mirror <<'EOF'
|
||||
#!${pkgs.bash}/bin/bash
|
||||
set -e
|
||||
|
||||
# === DEFAULT CONFIGURATION ===
|
||||
# These match docker-compose.alt.yml defaults
|
||||
export DATA_DIR=''${DATA_DIR:-"$HOME/.local/share/gitea-mirror"}
|
||||
export DATABASE_URL=''${DATABASE_URL:-"file:$DATA_DIR/gitea-mirror.db"}
|
||||
export HOST=''${HOST:-"0.0.0.0"}
|
||||
export PORT=''${PORT:-"4321"}
|
||||
export NODE_ENV=''${NODE_ENV:-"production"}
|
||||
|
||||
# Better Auth configuration
|
||||
export BETTER_AUTH_URL=''${BETTER_AUTH_URL:-"http://localhost:4321"}
|
||||
export BETTER_AUTH_TRUSTED_ORIGINS=''${BETTER_AUTH_TRUSTED_ORIGINS:-"http://localhost:4321"}
|
||||
export PUBLIC_BETTER_AUTH_URL=''${PUBLIC_BETTER_AUTH_URL:-"http://localhost:4321"}
|
||||
|
||||
# Concurrency settings (match docker-compose.alt.yml)
|
||||
export MIRROR_ISSUE_CONCURRENCY=''${MIRROR_ISSUE_CONCURRENCY:-3}
|
||||
export MIRROR_PULL_REQUEST_CONCURRENCY=''${MIRROR_PULL_REQUEST_CONCURRENCY:-5}
|
||||
|
||||
# Create data directory
|
||||
mkdir -p "$DATA_DIR"
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
APP_DIR="$SCRIPT_DIR/../lib/gitea-mirror"
|
||||
|
||||
# The app uses process.cwd()/data for the database, but the Nix store
|
||||
# is read-only. Create a writable working directory with symlinks to
|
||||
# the app files and a real data directory.
|
||||
WORK_DIR="$DATA_DIR/.workdir"
|
||||
mkdir -p "$WORK_DIR"
|
||||
for item in dist node_modules scripts src drizzle package.json tsconfig.json; do
|
||||
ln -sfn "$APP_DIR/$item" "$WORK_DIR/$item"
|
||||
done
|
||||
ln -sfn "$DATA_DIR" "$WORK_DIR/data"
|
||||
cd "$WORK_DIR"
|
||||
|
||||
# === AUTO-GENERATE SECRETS ===
|
||||
BETTER_AUTH_SECRET_FILE="$DATA_DIR/.better_auth_secret"
|
||||
ENCRYPTION_SECRET_FILE="$DATA_DIR/.encryption_secret"
|
||||
|
||||
# Generate BETTER_AUTH_SECRET if not provided
|
||||
if [ -z "$BETTER_AUTH_SECRET" ]; then
|
||||
if [ -f "$BETTER_AUTH_SECRET_FILE" ]; then
|
||||
echo "Using previously generated BETTER_AUTH_SECRET"
|
||||
export BETTER_AUTH_SECRET=$(cat "$BETTER_AUTH_SECRET_FILE")
|
||||
else
|
||||
echo "Generating a secure random BETTER_AUTH_SECRET"
|
||||
GENERATED_SECRET=$(${pkgs.openssl}/bin/openssl rand -hex 32)
|
||||
export BETTER_AUTH_SECRET="$GENERATED_SECRET"
|
||||
echo "$GENERATED_SECRET" > "$BETTER_AUTH_SECRET_FILE"
|
||||
chmod 600 "$BETTER_AUTH_SECRET_FILE"
|
||||
echo "✅ BETTER_AUTH_SECRET generated and saved to $BETTER_AUTH_SECRET_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Generate ENCRYPTION_SECRET if not provided
|
||||
if [ -z "$ENCRYPTION_SECRET" ]; then
|
||||
if [ -f "$ENCRYPTION_SECRET_FILE" ]; then
|
||||
echo "Using previously generated ENCRYPTION_SECRET"
|
||||
export ENCRYPTION_SECRET=$(cat "$ENCRYPTION_SECRET_FILE")
|
||||
else
|
||||
echo "Generating a secure random ENCRYPTION_SECRET"
|
||||
GENERATED_ENCRYPTION_SECRET=$(${pkgs.openssl}/bin/openssl rand -base64 36)
|
||||
export ENCRYPTION_SECRET="$GENERATED_ENCRYPTION_SECRET"
|
||||
echo "$GENERATED_ENCRYPTION_SECRET" > "$ENCRYPTION_SECRET_FILE"
|
||||
chmod 600 "$ENCRYPTION_SECRET_FILE"
|
||||
echo "✅ ENCRYPTION_SECRET generated and saved to $ENCRYPTION_SECRET_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# === DATABASE INITIALIZATION ===
|
||||
DB_PATH=$(echo "$DATABASE_URL" | ${pkgs.gnused}/bin/sed 's|^file:||')
|
||||
if [ ! -f "$DB_PATH" ]; then
|
||||
echo "Database not found. It will be created and initialized via Drizzle migrations on first app startup..."
|
||||
touch "$DB_PATH"
|
||||
else
|
||||
echo "Database already exists, Drizzle will check for pending migrations on startup..."
|
||||
fi
|
||||
|
||||
# === STARTUP SCRIPTS ===
|
||||
# Initialize configuration from environment variables
|
||||
echo "Checking for environment configuration..."
|
||||
if [ -f "scripts/startup-env-config.ts" ]; then
|
||||
echo "Loading configuration from environment variables..."
|
||||
${pkgs.bun}/bin/bun scripts/startup-env-config.ts && \
|
||||
echo "✅ Environment configuration loaded successfully" || \
|
||||
echo "⚠️ Environment configuration loading completed with warnings"
|
||||
fi
|
||||
|
||||
# Run startup recovery
|
||||
echo "Running startup recovery..."
|
||||
if [ -f "scripts/startup-recovery.ts" ]; then
|
||||
${pkgs.bun}/bin/bun scripts/startup-recovery.ts --timeout=30000 && \
|
||||
echo "✅ Startup recovery completed successfully" || \
|
||||
echo "⚠️ Startup recovery completed with warnings"
|
||||
fi
|
||||
|
||||
# Run repository status repair
|
||||
echo "Running repository status repair..."
|
||||
if [ -f "scripts/repair-mirrored-repos.ts" ]; then
|
||||
${pkgs.bun}/bin/bun scripts/repair-mirrored-repos.ts --startup && \
|
||||
echo "✅ Repository status repair completed successfully" || \
|
||||
echo "⚠️ Repository status repair completed with warnings"
|
||||
fi
|
||||
|
||||
# === SIGNAL HANDLING ===
|
||||
shutdown_handler() {
|
||||
echo "🛑 Received shutdown signal, forwarding to application..."
|
||||
if [ ! -z "$APP_PID" ]; then
|
||||
kill -TERM "$APP_PID" 2>/dev/null || true
|
||||
wait "$APP_PID" 2>/dev/null || true
|
||||
fi
|
||||
exit 0
|
||||
}
|
||||
|
||||
trap 'shutdown_handler' TERM INT HUP
|
||||
|
||||
# === START APPLICATION ===
|
||||
echo "Starting Gitea Mirror..."
|
||||
echo "Access the web interface at $BETTER_AUTH_URL"
|
||||
${pkgs.bun}/bin/bun dist/server/entry.mjs &
|
||||
APP_PID=$!
|
||||
|
||||
wait "$APP_PID"
|
||||
EOF
|
||||
chmod +x $out/bin/gitea-mirror
|
||||
|
||||
# Create database management helper
|
||||
cat > $out/bin/gitea-mirror-db <<'EOF'
|
||||
#!${pkgs.bash}/bin/bash
|
||||
export DATA_DIR=''${DATA_DIR:-"$HOME/.local/share/gitea-mirror"}
|
||||
mkdir -p "$DATA_DIR"
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
cd "$SCRIPT_DIR/../lib/gitea-mirror"
|
||||
exec ${pkgs.bun}/bin/bun scripts/manage-db.ts "$@"
|
||||
EOF
|
||||
chmod +x $out/bin/gitea-mirror-db
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
meta = with pkgs.lib; {
|
||||
description = "Self-hosted GitHub to Gitea mirroring service";
|
||||
homepage = "https://github.com/RayLabsHQ/gitea-mirror";
|
||||
license = licenses.mit;
|
||||
maintainers = [ ];
|
||||
platforms = platforms.linux ++ platforms.darwin;
|
||||
};
|
||||
};
|
||||
|
||||
in
|
||||
{
|
||||
packages = {
|
||||
default = gitea-mirror;
|
||||
gitea-mirror = gitea-mirror;
|
||||
};
|
||||
|
||||
# Development shell
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = with pkgs; [
|
||||
bun
|
||||
sqlite
|
||||
openssl
|
||||
b2n
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
echo "🚀 Gitea Mirror development environment"
|
||||
echo ""
|
||||
echo "Quick start:"
|
||||
echo " bun install # Install dependencies"
|
||||
echo " bun run dev # Start development server"
|
||||
echo " bun run build # Build for production"
|
||||
echo ""
|
||||
echo "Nix packaging:"
|
||||
echo " bun2nix -o bun.nix # Regenerate bun.nix after dependency changes"
|
||||
echo " nix build # Build the package"
|
||||
echo ""
|
||||
echo "Database:"
|
||||
echo " bun run manage-db init # Initialize database"
|
||||
echo " bun run db:studio # Open Drizzle Studio"
|
||||
'';
|
||||
};
|
||||
|
||||
}
|
||||
)) // {
|
||||
nixosModules.default = { config, lib, pkgs, ... }:
|
||||
with lib;
|
||||
let
|
||||
cfg = config.services.gitea-mirror;
|
||||
in {
|
||||
options.services.gitea-mirror = {
|
||||
enable = mkEnableOption "Gitea Mirror service";
|
||||
|
||||
package = mkOption {
|
||||
type = types.package;
|
||||
default = self.packages.${pkgs.system}.default;
|
||||
description = "The Gitea Mirror package to use";
|
||||
};
|
||||
|
||||
dataDir = mkOption {
|
||||
type = types.path;
|
||||
default = "/var/lib/gitea-mirror";
|
||||
description = "Directory to store data and database";
|
||||
};
|
||||
|
||||
user = mkOption {
|
||||
type = types.str;
|
||||
default = "gitea-mirror";
|
||||
description = "User account under which Gitea Mirror runs";
|
||||
};
|
||||
|
||||
group = mkOption {
|
||||
type = types.str;
|
||||
default = "gitea-mirror";
|
||||
description = "Group under which Gitea Mirror runs";
|
||||
};
|
||||
|
||||
host = mkOption {
|
||||
type = types.str;
|
||||
default = "0.0.0.0";
|
||||
description = "Host to bind to";
|
||||
};
|
||||
|
||||
port = mkOption {
|
||||
type = types.port;
|
||||
default = 4321;
|
||||
description = "Port to listen on";
|
||||
};
|
||||
|
||||
betterAuthUrl = mkOption {
|
||||
type = types.str;
|
||||
default = "http://localhost:4321";
|
||||
description = "Better Auth URL (external URL of the service)";
|
||||
};
|
||||
|
||||
betterAuthTrustedOrigins = mkOption {
|
||||
type = types.str;
|
||||
default = "http://localhost:4321";
|
||||
description = "Comma-separated list of trusted origins for Better Auth";
|
||||
};
|
||||
|
||||
mirrorIssueConcurrency = mkOption {
|
||||
type = types.int;
|
||||
default = 3;
|
||||
description = "Number of concurrent issue mirror operations (set to 1 for perfect ordering)";
|
||||
};
|
||||
|
||||
mirrorPullRequestConcurrency = mkOption {
|
||||
type = types.int;
|
||||
default = 5;
|
||||
description = "Number of concurrent PR mirror operations (set to 1 for perfect ordering)";
|
||||
};
|
||||
|
||||
environmentFile = mkOption {
|
||||
type = types.nullOr types.path;
|
||||
default = null;
|
||||
description = ''
|
||||
Path to file containing environment variables.
|
||||
Only needed if you want to set BETTER_AUTH_SECRET or ENCRYPTION_SECRET manually.
|
||||
Otherwise, secrets will be auto-generated and stored in the data directory.
|
||||
|
||||
Example:
|
||||
BETTER_AUTH_SECRET=your-32-character-secret-here
|
||||
ENCRYPTION_SECRET=your-encryption-secret-here
|
||||
'';
|
||||
};
|
||||
|
||||
openFirewall = mkOption {
|
||||
type = types.bool;
|
||||
default = false;
|
||||
description = "Open the firewall for the specified port";
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
users.users.${cfg.user} = {
|
||||
isSystemUser = true;
|
||||
group = cfg.group;
|
||||
home = cfg.dataDir;
|
||||
createHome = true;
|
||||
};
|
||||
|
||||
users.groups.${cfg.group} = {};
|
||||
|
||||
systemd.services.gitea-mirror = {
|
||||
description = "Gitea Mirror - GitHub to Gitea mirroring service";
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
environment = {
|
||||
DATA_DIR = cfg.dataDir;
|
||||
DATABASE_URL = "file:${cfg.dataDir}/gitea-mirror.db";
|
||||
HOST = cfg.host;
|
||||
PORT = toString cfg.port;
|
||||
NODE_ENV = "production";
|
||||
BETTER_AUTH_URL = cfg.betterAuthUrl;
|
||||
BETTER_AUTH_TRUSTED_ORIGINS = cfg.betterAuthTrustedOrigins;
|
||||
PUBLIC_BETTER_AUTH_URL = cfg.betterAuthUrl;
|
||||
MIRROR_ISSUE_CONCURRENCY = toString cfg.mirrorIssueConcurrency;
|
||||
MIRROR_PULL_REQUEST_CONCURRENCY = toString cfg.mirrorPullRequestConcurrency;
|
||||
};
|
||||
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
User = cfg.user;
|
||||
Group = cfg.group;
|
||||
ExecStart = "${cfg.package}/bin/gitea-mirror";
|
||||
Restart = "always";
|
||||
RestartSec = "10s";
|
||||
|
||||
# Security hardening
|
||||
NoNewPrivileges = true;
|
||||
PrivateTmp = true;
|
||||
ProtectSystem = "strict";
|
||||
ProtectHome = true;
|
||||
ReadWritePaths = [ cfg.dataDir ];
|
||||
|
||||
# Graceful shutdown
|
||||
TimeoutStopSec = "30s";
|
||||
KillMode = "mixed";
|
||||
KillSignal = "SIGTERM";
|
||||
} // optionalAttrs (cfg.environmentFile != null) {
|
||||
EnvironmentFile = cfg.environmentFile;
|
||||
};
|
||||
};
|
||||
|
||||
# Health check timer (optional monitoring)
|
||||
systemd.timers.gitea-mirror-healthcheck = {
|
||||
description = "Gitea Mirror health check timer";
|
||||
wantedBy = [ "timers.target" ];
|
||||
timerConfig = {
|
||||
OnBootSec = "5min";
|
||||
OnUnitActiveSec = "5min";
|
||||
};
|
||||
};
|
||||
|
||||
systemd.services.gitea-mirror-healthcheck = {
|
||||
description = "Gitea Mirror health check";
|
||||
after = [ "gitea-mirror.service" ];
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
ExecStart = "${pkgs.bash}/bin/bash -c '${pkgs.curl}/bin/curl -f http://127.0.0.1:${toString cfg.port}/api/health || true'";
|
||||
User = "nobody";
|
||||
};
|
||||
};
|
||||
|
||||
networking.firewall = mkIf cfg.openFirewall {
|
||||
allowedTCPPorts = [ cfg.port ];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# Overlay for adding to nixpkgs
|
||||
overlays.default = final: prev: {
|
||||
gitea-mirror = self.packages.${final.system}.default;
|
||||
};
|
||||
};
|
||||
}
|
||||
72
package.json
72
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "gitea-mirror",
|
||||
"type": "module",
|
||||
"version": "3.9.2",
|
||||
"version": "3.10.1",
|
||||
"engines": {
|
||||
"bun": ">=1.2.9"
|
||||
},
|
||||
@@ -16,6 +16,7 @@
|
||||
"check-db": "bun scripts/manage-db.ts check",
|
||||
"fix-db": "bun scripts/manage-db.ts fix",
|
||||
"reset-users": "bun scripts/manage-db.ts reset-users",
|
||||
"reset-password": "bun scripts/manage-db.ts reset-password",
|
||||
"db:generate": "bun drizzle-kit generate",
|
||||
"db:migrate": "bun drizzle-kit migrate",
|
||||
"db:push": "bun drizzle-kit push",
|
||||
@@ -35,18 +36,22 @@
|
||||
"test": "bun test",
|
||||
"test:watch": "bun test --watch",
|
||||
"test:coverage": "bun test --coverage",
|
||||
"test:e2e": "bash tests/e2e/run-e2e.sh",
|
||||
"test:e2e:ci": "bash tests/e2e/run-e2e.sh --ci",
|
||||
"test:e2e:keep": "bash tests/e2e/run-e2e.sh --keep",
|
||||
"test:e2e:cleanup": "bash tests/e2e/cleanup.sh",
|
||||
"astro": "bunx --bun astro"
|
||||
},
|
||||
"overrides": {
|
||||
"@esbuild-kit/esm-loader": "npm:tsx@^4.20.6",
|
||||
"devalue": "^5.4.2"
|
||||
"@esbuild-kit/esm-loader": "npm:tsx@^4.21.0",
|
||||
"devalue": "^5.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/check": "^0.9.5",
|
||||
"@astrojs/mdx": "4.3.10",
|
||||
"@astrojs/node": "9.5.0",
|
||||
"@astrojs/check": "^0.9.6",
|
||||
"@astrojs/mdx": "4.3.13",
|
||||
"@astrojs/node": "9.5.4",
|
||||
"@astrojs/react": "^4.4.2",
|
||||
"@better-auth/sso": "1.4.0-beta.12",
|
||||
"@better-auth/sso": "1.4.19",
|
||||
"@octokit/plugin-throttling": "^11.0.3",
|
||||
"@octokit/rest": "^22.0.1",
|
||||
"@radix-ui/react-accordion": "^1.2.12",
|
||||
@@ -67,49 +72,52 @@
|
||||
"@radix-ui/react-switch": "^1.2.6",
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tailwindcss/vite": "^4.1.17",
|
||||
"@tanstack/react-virtual": "^3.13.12",
|
||||
"@tailwindcss/vite": "^4.2.1",
|
||||
"@tanstack/react-virtual": "^3.13.19",
|
||||
"@types/canvas-confetti": "^1.9.0",
|
||||
"@types/react": "^19.2.2",
|
||||
"@types/react-dom": "^19.2.2",
|
||||
"astro": "^5.15.4",
|
||||
"@types/react": "^19.2.14",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"astro": "^5.18.0",
|
||||
"bcryptjs": "^3.0.3",
|
||||
"better-auth": "1.4.19",
|
||||
"buffer": "^6.0.3",
|
||||
"better-auth": "1.4.0-beta.13",
|
||||
"canvas-confetti": "^1.9.3",
|
||||
"canvas-confetti": "^1.9.4",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"dotenv": "^17.2.3",
|
||||
"drizzle-orm": "^0.44.7",
|
||||
"dotenv": "^17.3.1",
|
||||
"drizzle-orm": "^0.45.1",
|
||||
"fuse.js": "^7.1.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"lucide-react": "^0.553.0",
|
||||
"jsonwebtoken": "^9.0.3",
|
||||
"lucide-react": "^0.575.0",
|
||||
"nanoid": "^3.3.11",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react": "^19.2.4",
|
||||
"react-dom": "^19.2.4",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tailwindcss": "^4.1.17",
|
||||
"tailwind-merge": "^3.5.0",
|
||||
"tailwindcss": "^4.2.1",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "^5.9.3",
|
||||
"uuid": "^13.0.0",
|
||||
"vaul": "^1.1.2",
|
||||
"zod": "^4.1.12"
|
||||
"zod": "^4.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.58.2",
|
||||
"@testing-library/jest-dom": "^6.9.1",
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@testing-library/react": "^16.3.2",
|
||||
"@types/bcryptjs": "^3.0.0",
|
||||
"@types/bun": "^1.3.1",
|
||||
"@types/bun": "^1.3.9",
|
||||
"@types/jsonwebtoken": "^9.0.10",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@vitejs/plugin-react": "^5.1.0",
|
||||
"drizzle-kit": "^0.31.6",
|
||||
"jsdom": "^26.1.0",
|
||||
"tsx": "^4.20.6",
|
||||
"vitest": "^3.2.4"
|
||||
"@types/node": "^25.3.2",
|
||||
"@types/uuid": "^11.0.0",
|
||||
"@vitejs/plugin-react": "^5.1.4",
|
||||
"drizzle-kit": "^0.31.9",
|
||||
"jsdom": "^28.1.0",
|
||||
"tsx": "^4.21.0",
|
||||
"vitest": "^4.0.18"
|
||||
},
|
||||
"packageManager": "bun@1.3.1"
|
||||
"packageManager": "bun@1.3.3"
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ import { Database } from "bun:sqlite";
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { users, configs, repositories, organizations, mirrorJobs, events } from "../src/lib/db/schema";
|
||||
import bcrypt from "bcryptjs";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { users, configs, repositories, organizations, mirrorJobs, events, accounts, sessions } from "../src/lib/db/schema";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import { hashPassword } from "better-auth/crypto";
|
||||
|
||||
// Command line arguments
|
||||
const args = process.argv.slice(2);
|
||||
@@ -194,6 +194,92 @@ async function fixDatabase() {
|
||||
console.log("✅ Database location fixed");
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset a single user's password (admin recovery flow)
|
||||
*/
|
||||
async function resetPassword() {
|
||||
const emailArg = args.find((arg) => arg.startsWith("--email="));
|
||||
const passwordArg = args.find((arg) => arg.startsWith("--new-password="));
|
||||
const email = emailArg?.split("=")[1]?.trim().toLowerCase();
|
||||
const newPassword = passwordArg?.split("=")[1];
|
||||
|
||||
if (!email || !newPassword) {
|
||||
console.log("❌ Missing required arguments");
|
||||
console.log("Usage:");
|
||||
console.log(" bun run manage-db reset-password --email=user@example.com --new-password='new-secure-password'");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (newPassword.length < 8) {
|
||||
console.log("❌ Password must be at least 8 characters");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!fs.existsSync(dbPath)) {
|
||||
console.log("❌ Database does not exist");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const sqlite = new Database(dbPath);
|
||||
const db = drizzle({ client: sqlite });
|
||||
|
||||
try {
|
||||
const user = await db.query.users.findFirst({
|
||||
where: eq(users.email, email),
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
console.log(`❌ No user found for email: ${email}`);
|
||||
sqlite.close();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const hashedPassword = await hashPassword(newPassword);
|
||||
const now = new Date();
|
||||
|
||||
const credentialAccount = await db.query.accounts.findFirst({
|
||||
where: and(
|
||||
eq(accounts.userId, user.id),
|
||||
eq(accounts.providerId, "credential"),
|
||||
),
|
||||
});
|
||||
|
||||
if (credentialAccount) {
|
||||
await db
|
||||
.update(accounts)
|
||||
.set({
|
||||
password: hashedPassword,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(accounts.id, credentialAccount.id));
|
||||
} else {
|
||||
await db.insert(accounts).values({
|
||||
id: uuidv4(),
|
||||
accountId: user.id,
|
||||
userId: user.id,
|
||||
providerId: "credential",
|
||||
password: hashedPassword,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
});
|
||||
}
|
||||
|
||||
const deletedSessions = await db
|
||||
.delete(sessions)
|
||||
.where(eq(sessions.userId, user.id))
|
||||
.returning({ id: sessions.id });
|
||||
|
||||
console.log(`✅ Password reset for ${email}`);
|
||||
console.log(`🔒 Cleared ${deletedSessions.length} active session(s)`);
|
||||
|
||||
sqlite.close();
|
||||
} catch (error) {
|
||||
console.error("❌ Error resetting password:", error);
|
||||
sqlite.close();
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto mode - check and initialize if needed
|
||||
*/
|
||||
@@ -224,6 +310,9 @@ switch (command) {
|
||||
case "cleanup":
|
||||
await cleanupDatabase();
|
||||
break;
|
||||
case "reset-password":
|
||||
await resetPassword();
|
||||
break;
|
||||
case "auto":
|
||||
await autoMode();
|
||||
break;
|
||||
@@ -233,7 +322,8 @@ switch (command) {
|
||||
console.log(" check - Check database status");
|
||||
console.log(" fix - Fix database location issues");
|
||||
console.log(" reset-users - Remove all users and related data");
|
||||
console.log(" reset-password - Reset one user's password and clear sessions");
|
||||
console.log(" cleanup - Remove all database files");
|
||||
console.log(" auto - Auto initialize if needed");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,12 +42,18 @@ export function ConfigTabs() {
|
||||
},
|
||||
giteaConfig: {
|
||||
url: '',
|
||||
externalUrl: '',
|
||||
username: '',
|
||||
token: '',
|
||||
organization: 'github-mirrors',
|
||||
visibility: 'public',
|
||||
starredReposOrg: 'starred',
|
||||
starredReposMode: 'dedicated-org',
|
||||
preserveOrgStructure: false,
|
||||
backupStrategy: "on-force-push",
|
||||
backupRetentionCount: 20,
|
||||
backupDirectory: 'data/repo-backups',
|
||||
blockSyncOnBackupFailure: true,
|
||||
},
|
||||
scheduleConfig: {
|
||||
enabled: false, // Don't set defaults here - will be loaded from API
|
||||
@@ -77,6 +83,7 @@ export function ConfigTabs() {
|
||||
advancedOptions: {
|
||||
skipForks: false,
|
||||
starredCodeOnly: false,
|
||||
autoMirrorStarred: false,
|
||||
},
|
||||
});
|
||||
const { user } = useAuth();
|
||||
@@ -654,9 +661,20 @@ export function ConfigTabs() {
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
giteaConfig={config.giteaConfig}
|
||||
setGiteaConfig={update =>
|
||||
setConfig(prev => ({
|
||||
...prev,
|
||||
giteaConfig:
|
||||
typeof update === 'function'
|
||||
? update(prev.giteaConfig)
|
||||
: update,
|
||||
}))
|
||||
}
|
||||
onAutoSave={autoSaveGitHubConfig}
|
||||
onMirrorOptionsAutoSave={autoSaveMirrorOptions}
|
||||
onAdvancedOptionsAutoSave={autoSaveAdvancedOptions}
|
||||
onGiteaAutoSave={autoSaveGiteaConfig}
|
||||
isAutoSaving={isAutoSavingGitHub}
|
||||
/>
|
||||
<GiteaConfigForm
|
||||
|
||||
@@ -7,10 +7,11 @@ import {
|
||||
CardTitle,
|
||||
} from "@/components/ui/card";
|
||||
import { githubApi } from "@/lib/api";
|
||||
import type { GitHubConfig, MirrorOptions, AdvancedOptions } from "@/types/config";
|
||||
import type { GitHubConfig, MirrorOptions, AdvancedOptions, GiteaConfig, BackupStrategy } from "@/types/config";
|
||||
import { Input } from "../ui/input";
|
||||
import { toast } from "sonner";
|
||||
import { Info } from "lucide-react";
|
||||
import { Info, ShieldAlert } from "lucide-react";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { GitHubMirrorSettings } from "./GitHubMirrorSettings";
|
||||
import { Separator } from "../ui/separator";
|
||||
import {
|
||||
@@ -26,23 +27,29 @@ interface GitHubConfigFormProps {
|
||||
setMirrorOptions: React.Dispatch<React.SetStateAction<MirrorOptions>>;
|
||||
advancedOptions: AdvancedOptions;
|
||||
setAdvancedOptions: React.Dispatch<React.SetStateAction<AdvancedOptions>>;
|
||||
giteaConfig?: GiteaConfig;
|
||||
setGiteaConfig?: React.Dispatch<React.SetStateAction<GiteaConfig>>;
|
||||
onAutoSave?: (githubConfig: GitHubConfig) => Promise<void>;
|
||||
onMirrorOptionsAutoSave?: (mirrorOptions: MirrorOptions) => Promise<void>;
|
||||
onAdvancedOptionsAutoSave?: (advancedOptions: AdvancedOptions) => Promise<void>;
|
||||
onGiteaAutoSave?: (giteaConfig: GiteaConfig) => Promise<void>;
|
||||
isAutoSaving?: boolean;
|
||||
}
|
||||
|
||||
export function GitHubConfigForm({
|
||||
config,
|
||||
setConfig,
|
||||
config,
|
||||
setConfig,
|
||||
mirrorOptions,
|
||||
setMirrorOptions,
|
||||
advancedOptions,
|
||||
setAdvancedOptions,
|
||||
onAutoSave,
|
||||
giteaConfig,
|
||||
setGiteaConfig,
|
||||
onAutoSave,
|
||||
onMirrorOptionsAutoSave,
|
||||
onAdvancedOptionsAutoSave,
|
||||
isAutoSaving
|
||||
onGiteaAutoSave,
|
||||
isAutoSaving
|
||||
}: GitHubConfigFormProps) {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
@@ -202,7 +209,139 @@ export function GitHubConfigForm({
|
||||
if (onAdvancedOptionsAutoSave) onAdvancedOptionsAutoSave(newOptions);
|
||||
}}
|
||||
/>
|
||||
|
||||
|
||||
{giteaConfig && setGiteaConfig && (
|
||||
<>
|
||||
<Separator />
|
||||
|
||||
<div className="space-y-4">
|
||||
<h3 className="text-sm font-medium flex items-center gap-2">
|
||||
<ShieldAlert className="h-4 w-4 text-primary" />
|
||||
Destructive Update Protection
|
||||
<Badge variant="secondary" className="ml-2 text-[10px] px-1.5 py-0">BETA</Badge>
|
||||
</h3>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Choose how to handle force-pushes or rewritten upstream history on GitHub.
|
||||
</p>
|
||||
|
||||
<div className="grid grid-cols-2 md:grid-cols-4 gap-2">
|
||||
{([
|
||||
{
|
||||
value: "disabled",
|
||||
label: "Disabled",
|
||||
desc: "No detection or backups",
|
||||
},
|
||||
{
|
||||
value: "always",
|
||||
label: "Always Backup",
|
||||
desc: "Snapshot before every sync",
|
||||
},
|
||||
{
|
||||
value: "on-force-push",
|
||||
label: "Smart",
|
||||
desc: "Backup only on force-push",
|
||||
},
|
||||
{
|
||||
value: "block-on-force-push",
|
||||
label: "Block & Approve",
|
||||
desc: "Require approval on force-push",
|
||||
},
|
||||
] as const).map((opt) => {
|
||||
const isSelected = (giteaConfig.backupStrategy ?? "on-force-push") === opt.value;
|
||||
return (
|
||||
<button
|
||||
key={opt.value}
|
||||
type="button"
|
||||
onClick={() => {
|
||||
const newConfig = { ...giteaConfig, backupStrategy: opt.value as BackupStrategy };
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className={`flex flex-col items-start gap-1 rounded-lg border p-3 text-left text-sm transition-colors ${
|
||||
isSelected
|
||||
? "border-primary bg-primary/5 ring-1 ring-primary"
|
||||
: "border-input hover:bg-accent hover:text-accent-foreground"
|
||||
}`}
|
||||
>
|
||||
<span className="font-medium">{opt.label}</span>
|
||||
<span className="text-xs text-muted-foreground">{opt.desc}</span>
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
{(giteaConfig.backupStrategy ?? "on-force-push") !== "disabled" && (
|
||||
<>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
<div>
|
||||
<label htmlFor="backup-retention" className="block text-sm font-medium mb-1.5">
|
||||
Snapshot retention count
|
||||
</label>
|
||||
<input
|
||||
id="backup-retention"
|
||||
name="backupRetentionCount"
|
||||
type="number"
|
||||
min={1}
|
||||
value={giteaConfig.backupRetentionCount ?? 20}
|
||||
onChange={(e) => {
|
||||
const newConfig = {
|
||||
...giteaConfig,
|
||||
backupRetentionCount: Math.max(1, Number.parseInt(e.target.value, 10) || 20),
|
||||
};
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label htmlFor="backup-directory" className="block text-sm font-medium mb-1.5">
|
||||
Snapshot directory
|
||||
</label>
|
||||
<input
|
||||
id="backup-directory"
|
||||
name="backupDirectory"
|
||||
type="text"
|
||||
value={giteaConfig.backupDirectory || "data/repo-backups"}
|
||||
onChange={(e) => {
|
||||
const newConfig = { ...giteaConfig, backupDirectory: e.target.value };
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
placeholder="data/repo-backups"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{((giteaConfig.backupStrategy ?? "on-force-push") === "always" ||
|
||||
(giteaConfig.backupStrategy ?? "on-force-push") === "on-force-push") && (
|
||||
<label className="flex items-start gap-3 text-sm">
|
||||
<input
|
||||
name="blockSyncOnBackupFailure"
|
||||
type="checkbox"
|
||||
checked={Boolean(giteaConfig.blockSyncOnBackupFailure)}
|
||||
onChange={(e) => {
|
||||
const newConfig = { ...giteaConfig, blockSyncOnBackupFailure: e.target.checked };
|
||||
setGiteaConfig(newConfig);
|
||||
if (onGiteaAutoSave) onGiteaAutoSave(newConfig);
|
||||
}}
|
||||
className="mt-0.5 rounded border-input"
|
||||
/>
|
||||
<span>
|
||||
Block sync when snapshot fails
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Recommended for backup-first behavior. If disabled, sync continues even when snapshot creation fails.
|
||||
</p>
|
||||
</span>
|
||||
</label>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Mobile: Show button at bottom */}
|
||||
<Button
|
||||
type="button"
|
||||
|
||||
@@ -287,6 +287,31 @@ export function GitHubMirrorSettings({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Auto-mirror starred repos toggle */}
|
||||
{githubConfig.mirrorStarred && (
|
||||
<div className="mt-4">
|
||||
<div className="flex items-start space-x-3">
|
||||
<Checkbox
|
||||
id="auto-mirror-starred"
|
||||
checked={advancedOptions.autoMirrorStarred ?? false}
|
||||
onCheckedChange={(checked) => handleAdvancedChange('autoMirrorStarred', !!checked)}
|
||||
/>
|
||||
<div className="space-y-0.5 flex-1">
|
||||
<Label
|
||||
htmlFor="auto-mirror-starred"
|
||||
className="text-sm font-normal cursor-pointer flex items-center gap-2"
|
||||
>
|
||||
<Star className="h-3.5 w-3.5" />
|
||||
Auto-mirror new starred repositories
|
||||
</Label>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
When disabled, starred repos are imported for browsing but not automatically mirrored. You can still mirror individual repos manually.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Duplicate name handling for starred repos */}
|
||||
{githubConfig.mirrorStarred && (
|
||||
<div className="mt-4 space-y-2">
|
||||
@@ -377,14 +402,13 @@ export function GitHubMirrorSettings({
|
||||
id="release-limit"
|
||||
type="number"
|
||||
min="1"
|
||||
max="100"
|
||||
value={mirrorOptions.releaseLimit || 10}
|
||||
onChange={(e) => {
|
||||
const value = parseInt(e.target.value) || 10;
|
||||
const clampedValue = Math.min(100, Math.max(1, value));
|
||||
const clampedValue = Math.max(1, value);
|
||||
handleMirrorChange('releaseLimit', clampedValue);
|
||||
}}
|
||||
className="w-16 px-2 py-1 text-xs border border-input rounded bg-background text-foreground"
|
||||
className="w-20 px-2 py-1 text-xs border border-input rounded bg-background text-foreground"
|
||||
/>
|
||||
<span className="text-xs text-muted-foreground">releases</span>
|
||||
</div>
|
||||
|
||||
@@ -100,9 +100,14 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
);
|
||||
}
|
||||
|
||||
const normalizedValue =
|
||||
type === "checkbox"
|
||||
? checked
|
||||
: value;
|
||||
|
||||
const newConfig = {
|
||||
...config,
|
||||
[name]: type === "checkbox" ? checked : value,
|
||||
[name]: normalizedValue,
|
||||
};
|
||||
setConfig(newConfig);
|
||||
|
||||
@@ -195,6 +200,27 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="gitea-external-url"
|
||||
className="block text-sm font-medium mb-1.5"
|
||||
>
|
||||
Gitea External URL (optional)
|
||||
</label>
|
||||
<input
|
||||
id="gitea-external-url"
|
||||
name="externalUrl"
|
||||
type="url"
|
||||
value={config.externalUrl || ""}
|
||||
onChange={handleChange}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
placeholder="https://gitea.example.com"
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Used only for dashboard links. API sync still uses Gitea URL.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="gitea-token"
|
||||
@@ -224,6 +250,7 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
strategy={mirrorStrategy}
|
||||
destinationOrg={config.organization}
|
||||
starredReposOrg={config.starredReposOrg}
|
||||
starredReposMode={config.starredReposMode}
|
||||
onStrategyChange={setMirrorStrategy}
|
||||
githubUsername={githubUsername}
|
||||
giteaUsername={config.username}
|
||||
@@ -235,6 +262,7 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
strategy={mirrorStrategy}
|
||||
destinationOrg={config.organization}
|
||||
starredReposOrg={config.starredReposOrg}
|
||||
starredReposMode={config.starredReposMode}
|
||||
personalReposOrg={config.personalReposOrg}
|
||||
visibility={config.visibility}
|
||||
onDestinationOrgChange={(org) => {
|
||||
@@ -247,6 +275,11 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
setConfig(newConfig);
|
||||
if (onAutoSave) onAutoSave(newConfig);
|
||||
}}
|
||||
onStarredReposModeChange={(mode) => {
|
||||
const newConfig = { ...config, starredReposMode: mode };
|
||||
setConfig(newConfig);
|
||||
if (onAutoSave) onAutoSave(newConfig);
|
||||
}}
|
||||
onPersonalReposOrgChange={(org) => {
|
||||
const newConfig = { ...config, personalReposOrg: org };
|
||||
setConfig(newConfig);
|
||||
@@ -258,7 +291,7 @@ export function GiteaConfigForm({ config, setConfig, onAutoSave, isAutoSaving, g
|
||||
if (onAutoSave) onAutoSave(newConfig);
|
||||
}}
|
||||
/>
|
||||
|
||||
|
||||
{/* Mobile: Show button at bottom */}
|
||||
<Button
|
||||
type="button"
|
||||
|
||||
@@ -9,16 +9,18 @@ import {
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { cn } from "@/lib/utils";
|
||||
import type { MirrorStrategy, GiteaOrgVisibility } from "@/types/config";
|
||||
import type { MirrorStrategy, GiteaOrgVisibility, StarredReposMode } from "@/types/config";
|
||||
|
||||
interface OrganizationConfigurationProps {
|
||||
strategy: MirrorStrategy;
|
||||
destinationOrg?: string;
|
||||
starredReposOrg?: string;
|
||||
starredReposMode?: StarredReposMode;
|
||||
personalReposOrg?: string;
|
||||
visibility: GiteaOrgVisibility;
|
||||
onDestinationOrgChange: (org: string) => void;
|
||||
onStarredReposOrgChange: (org: string) => void;
|
||||
onStarredReposModeChange: (mode: StarredReposMode) => void;
|
||||
onPersonalReposOrgChange: (org: string) => void;
|
||||
onVisibilityChange: (visibility: GiteaOrgVisibility) => void;
|
||||
}
|
||||
@@ -33,13 +35,19 @@ export const OrganizationConfiguration: React.FC<OrganizationConfigurationProps>
|
||||
strategy,
|
||||
destinationOrg,
|
||||
starredReposOrg,
|
||||
starredReposMode,
|
||||
personalReposOrg,
|
||||
visibility,
|
||||
onDestinationOrgChange,
|
||||
onStarredReposOrgChange,
|
||||
onStarredReposModeChange,
|
||||
onPersonalReposOrgChange,
|
||||
onVisibilityChange,
|
||||
}) => {
|
||||
const activeStarredMode = starredReposMode || "dedicated-org";
|
||||
const showStarredReposOrgInput = activeStarredMode === "dedicated-org";
|
||||
const showDestinationOrgInput = strategy === "single-org" || strategy === "mixed";
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
@@ -49,38 +57,94 @@ export const OrganizationConfiguration: React.FC<OrganizationConfigurationProps>
|
||||
</h4>
|
||||
</div>
|
||||
|
||||
{/* First row - Organization inputs with consistent layout */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
{/* Left column - always shows starred repos org */}
|
||||
<div className="space-y-1">
|
||||
<Label htmlFor="starredReposOrg" className="text-sm font-normal flex items-center gap-2">
|
||||
<Star className="h-3.5 w-3.5" />
|
||||
Starred Repos Organization
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Info className="h-3.5 w-3.5 text-muted-foreground" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Starred repositories will be organized separately in this organization</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</Label>
|
||||
<Input
|
||||
id="starredReposOrg"
|
||||
value={starredReposOrg || ""}
|
||||
onChange={(e) => onStarredReposOrgChange(e.target.value)}
|
||||
placeholder="starred"
|
||||
className=""
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Keep starred repos organized separately
|
||||
<div className="space-y-2">
|
||||
<Label className="text-sm font-normal flex items-center gap-2">
|
||||
Starred Repository Destination
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Info className="h-3.5 w-3.5 text-muted-foreground" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Choose whether starred repos use one org or keep their source Owner/Org paths</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</Label>
|
||||
<div className="rounded-lg border bg-muted/20 p-2">
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 gap-2">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => onStarredReposModeChange("dedicated-org")}
|
||||
aria-pressed={activeStarredMode === "dedicated-org"}
|
||||
className={cn(
|
||||
"text-left px-3 py-2 rounded-md border text-sm transition-all",
|
||||
activeStarredMode === "dedicated-org"
|
||||
? "bg-accent border-accent-foreground/30 ring-1 ring-accent-foreground/20 font-medium shadow-sm"
|
||||
: "bg-background hover:bg-accent/50 border-input"
|
||||
)}
|
||||
>
|
||||
Dedicated Organization
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => onStarredReposModeChange("preserve-owner")}
|
||||
aria-pressed={activeStarredMode === "preserve-owner"}
|
||||
className={cn(
|
||||
"text-left px-3 py-2 rounded-md border text-sm transition-all",
|
||||
activeStarredMode === "preserve-owner"
|
||||
? "bg-accent border-accent-foreground/30 ring-1 ring-accent-foreground/20 font-medium shadow-sm"
|
||||
: "bg-background hover:bg-accent/50 border-input"
|
||||
)}
|
||||
>
|
||||
Preserve Source Owner/Org
|
||||
</button>
|
||||
</div>
|
||||
<p className="mt-2 px-1 text-xs text-muted-foreground">
|
||||
{
|
||||
activeStarredMode === "dedicated-org"
|
||||
? "All starred repositories go to a single destination organization."
|
||||
: "Starred repositories keep their original GitHub Owner/Org destination."
|
||||
}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Right column - shows destination org for single-org/mixed, personal repos org for preserve, empty div for others */}
|
||||
{strategy === "single-org" || strategy === "mixed" ? (
|
||||
{/* First row - Organization inputs */}
|
||||
{(showStarredReposOrgInput || showDestinationOrgInput) && (
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
{showStarredReposOrgInput ? (
|
||||
<div className="space-y-1">
|
||||
<Label htmlFor="starredReposOrg" className="text-sm font-normal flex items-center gap-2">
|
||||
<Star className="h-3.5 w-3.5" />
|
||||
Starred Repos Organization
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Info className="h-3.5 w-3.5 text-muted-foreground" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Starred repositories will be organized separately in this organization</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</Label>
|
||||
<Input
|
||||
id="starredReposOrg"
|
||||
value={starredReposOrg || ""}
|
||||
onChange={(e) => onStarredReposOrgChange(e.target.value)}
|
||||
placeholder="starred"
|
||||
className=""
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
Keep starred repos organized separately
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="hidden md:block" />
|
||||
)}
|
||||
|
||||
{showDestinationOrgInput ? (
|
||||
<div className="space-y-1">
|
||||
<Label htmlFor="destinationOrg" className="text-sm font-normal flex items-center gap-2">
|
||||
{strategy === "mixed" ? "Personal Repos Organization" : "Destination Organization"}
|
||||
@@ -114,10 +178,11 @@ export const OrganizationConfiguration: React.FC<OrganizationConfigurationProps>
|
||||
}
|
||||
</p>
|
||||
</div>
|
||||
) : (
|
||||
<div className="hidden md:block" />
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<div className="hidden md:block" />
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Second row - Organization Visibility (always shown) */}
|
||||
<div className="space-y-2">
|
||||
@@ -172,4 +237,3 @@ export const OrganizationConfiguration: React.FC<OrganizationConfigurationProps>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
HoverCardTrigger,
|
||||
} from "@/components/ui/hover-card";
|
||||
import { cn } from "@/lib/utils";
|
||||
import type { StarredReposMode } from "@/types/config";
|
||||
|
||||
export type MirrorStrategy = "preserve" | "single-org" | "flat-user" | "mixed";
|
||||
|
||||
@@ -15,6 +16,7 @@ interface OrganizationStrategyProps {
|
||||
strategy: MirrorStrategy;
|
||||
destinationOrg?: string;
|
||||
starredReposOrg?: string;
|
||||
starredReposMode?: StarredReposMode;
|
||||
onStrategyChange: (strategy: MirrorStrategy) => void;
|
||||
githubUsername?: string;
|
||||
giteaUsername?: string;
|
||||
@@ -76,13 +78,18 @@ const MappingPreview: React.FC<{
|
||||
config: typeof strategyConfig.preserve;
|
||||
destinationOrg?: string;
|
||||
starredReposOrg?: string;
|
||||
starredReposMode?: StarredReposMode;
|
||||
githubUsername?: string;
|
||||
giteaUsername?: string;
|
||||
}> = ({ strategy, config, destinationOrg, starredReposOrg, githubUsername, giteaUsername }) => {
|
||||
}> = ({ strategy, config, destinationOrg, starredReposOrg, starredReposMode, githubUsername, giteaUsername }) => {
|
||||
const displayGithubUsername = githubUsername || "<username>";
|
||||
const displayGiteaUsername = giteaUsername || "<username>";
|
||||
const isGithubPlaceholder = !githubUsername;
|
||||
const isGiteaPlaceholder = !giteaUsername;
|
||||
const starredDestination =
|
||||
(starredReposMode || "dedicated-org") === "preserve-owner"
|
||||
? "awesome/starred-repo"
|
||||
: `${starredReposOrg || "starred"}/starred-repo`;
|
||||
|
||||
if (strategy === "preserve") {
|
||||
return (
|
||||
@@ -122,7 +129,7 @@ const MappingPreview: React.FC<{
|
||||
</div>
|
||||
<div className={cn("flex items-center gap-2 p-1.5 rounded text-xs", config.repoColors.bg)}>
|
||||
<Building2 className={cn("h-3 w-3", config.repoColors.icon)} />
|
||||
<span>{starredReposOrg || "starred"}/starred-repo</span>
|
||||
<span>{starredDestination}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -168,7 +175,7 @@ const MappingPreview: React.FC<{
|
||||
</div>
|
||||
<div className={cn("flex items-center gap-2 p-1.5 rounded text-xs", config.repoColors.bg)}>
|
||||
<Building2 className={cn("h-3 w-3", config.repoColors.icon)} />
|
||||
<span>{starredReposOrg || "starred"}/starred-repo</span>
|
||||
<span>{starredDestination}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -214,7 +221,7 @@ const MappingPreview: React.FC<{
|
||||
</div>
|
||||
<div className={cn("flex items-center gap-2 p-1.5 rounded text-xs", config.repoColors.bg)}>
|
||||
<Building2 className={cn("h-3 w-3", config.repoColors.icon)} />
|
||||
<span>{starredReposOrg || "starred"}/starred-repo</span>
|
||||
<span>{starredDestination}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -260,7 +267,7 @@ const MappingPreview: React.FC<{
|
||||
</div>
|
||||
<div className={cn("flex items-center gap-2 p-1.5 rounded text-xs", config.repoColors.bg)}>
|
||||
<Building2 className={cn("h-3 w-3", config.repoColors.icon)} />
|
||||
<span>{starredReposOrg || "starred"}/starred-repo</span>
|
||||
<span>{starredDestination}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -275,6 +282,7 @@ export const OrganizationStrategy: React.FC<OrganizationStrategyProps> = ({
|
||||
strategy,
|
||||
destinationOrg,
|
||||
starredReposOrg,
|
||||
starredReposMode,
|
||||
onStrategyChange,
|
||||
githubUsername,
|
||||
giteaUsername,
|
||||
@@ -339,7 +347,7 @@ export const OrganizationStrategy: React.FC<OrganizationStrategyProps> = ({
|
||||
<span className="text-xs font-medium">Starred Repositories</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground pl-5">
|
||||
Always go to the configured starred repos organization and cannot be overridden.
|
||||
Follow your starred-repo mode and cannot be overridden per repository.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -415,6 +423,7 @@ export const OrganizationStrategy: React.FC<OrganizationStrategyProps> = ({
|
||||
config={config}
|
||||
destinationOrg={destinationOrg}
|
||||
starredReposOrg={starredReposOrg}
|
||||
starredReposMode={starredReposMode}
|
||||
githubUsername={githubUsername}
|
||||
giteaUsername={giteaUsername}
|
||||
/>
|
||||
@@ -434,4 +443,4 @@ export const OrganizationStrategy: React.FC<OrganizationStrategyProps> = ({
|
||||
</RadioGroup>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
@@ -15,7 +15,8 @@ export function RepositoryList({ repositories }: RepositoryListProps) {
|
||||
|
||||
// Helper function to construct Gitea repository URL
|
||||
const getGiteaRepoUrl = (repository: Repository): string | null => {
|
||||
if (!giteaConfig?.url) {
|
||||
const rawBaseUrl = giteaConfig?.externalUrl || giteaConfig?.url;
|
||||
if (!rawBaseUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -38,9 +39,9 @@ export function RepositoryList({ repositories }: RepositoryListProps) {
|
||||
}
|
||||
|
||||
// Ensure the base URL doesn't have a trailing slash
|
||||
const baseUrl = giteaConfig.url.endsWith('/')
|
||||
? giteaConfig.url.slice(0, -1)
|
||||
: giteaConfig.url;
|
||||
const baseUrl = rawBaseUrl.endsWith("/")
|
||||
? rawBaseUrl.slice(0, -1)
|
||||
: rawBaseUrl;
|
||||
|
||||
return `${baseUrl}/${repoPath}`;
|
||||
};
|
||||
|
||||
@@ -159,7 +159,7 @@ function AppWithProviders({ page: initialPage }: AppProps) {
|
||||
{currentPage === "activity-log" && <ActivityLog />}
|
||||
</section>
|
||||
</div>
|
||||
<Toaster />
|
||||
<Toaster position="top-center" />
|
||||
</main>
|
||||
</NavigationContext.Provider>
|
||||
);
|
||||
|
||||
@@ -67,7 +67,8 @@ export function OrganizationList({
|
||||
|
||||
// Helper function to construct Gitea organization URL
|
||||
const getGiteaOrgUrl = (organization: Organization): string | null => {
|
||||
if (!giteaConfig?.url) {
|
||||
const rawBaseUrl = giteaConfig?.externalUrl || giteaConfig?.url;
|
||||
if (!rawBaseUrl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -84,9 +85,9 @@ export function OrganizationList({
|
||||
}
|
||||
|
||||
// Ensure the base URL doesn't have a trailing slash
|
||||
const baseUrl = giteaConfig.url.endsWith('/')
|
||||
? giteaConfig.url.slice(0, -1)
|
||||
: giteaConfig.url;
|
||||
const baseUrl = rawBaseUrl.endsWith("/")
|
||||
? rawBaseUrl.slice(0, -1)
|
||||
: rawBaseUrl;
|
||||
|
||||
return `${baseUrl}/${orgName}`;
|
||||
};
|
||||
|
||||
@@ -18,10 +18,12 @@ interface AddRepositoryDialogProps {
|
||||
repo,
|
||||
owner,
|
||||
force,
|
||||
destinationOrg,
|
||||
}: {
|
||||
repo: string;
|
||||
owner: string;
|
||||
force?: boolean;
|
||||
destinationOrg?: string;
|
||||
}) => Promise<void>;
|
||||
}
|
||||
|
||||
@@ -32,6 +34,7 @@ export default function AddRepositoryDialog({
|
||||
}: AddRepositoryDialogProps) {
|
||||
const [repo, setRepo] = useState<string>("");
|
||||
const [owner, setOwner] = useState<string>("");
|
||||
const [destinationOrg, setDestinationOrg] = useState<string>("");
|
||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||
const [error, setError] = useState<string>("");
|
||||
|
||||
@@ -40,6 +43,7 @@ export default function AddRepositoryDialog({
|
||||
setError("");
|
||||
setRepo("");
|
||||
setOwner("");
|
||||
setDestinationOrg("");
|
||||
}
|
||||
}, [isDialogOpen]);
|
||||
|
||||
@@ -54,11 +58,16 @@ export default function AddRepositoryDialog({
|
||||
try {
|
||||
setIsLoading(true);
|
||||
|
||||
await onAddRepository({ repo, owner });
|
||||
await onAddRepository({
|
||||
repo,
|
||||
owner,
|
||||
destinationOrg: destinationOrg.trim() || undefined,
|
||||
});
|
||||
|
||||
setError("");
|
||||
setRepo("");
|
||||
setOwner("");
|
||||
setDestinationOrg("");
|
||||
setIsDialogOpen(false);
|
||||
} catch (err: any) {
|
||||
setError(err?.message || "Failed to add repository.");
|
||||
@@ -124,6 +133,27 @@ export default function AddRepositoryDialog({
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="destinationOrg"
|
||||
className="block text-sm font-medium mb-1.5"
|
||||
>
|
||||
Target Organization{" "}
|
||||
<span className="text-muted-foreground font-normal">
|
||||
(optional)
|
||||
</span>
|
||||
</label>
|
||||
<input
|
||||
id="destinationOrg"
|
||||
type="text"
|
||||
value={destinationOrg}
|
||||
onChange={(e) => setDestinationOrg(e.target.value)}
|
||||
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm shadow-sm transition-colors placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring"
|
||||
placeholder="Gitea org or user (uses default strategy if empty)"
|
||||
autoComplete="off"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{error && <p className="text-sm text-red-500 mt-1">{error}</p>}
|
||||
</div>
|
||||
|
||||
|
||||
@@ -28,9 +28,16 @@ export function InlineDestinationEditor({
|
||||
|
||||
// Determine the default destination based on repository properties and config
|
||||
const getDefaultDestination = () => {
|
||||
// Starred repos always go to the configured starredReposOrg
|
||||
if (repository.isStarred && giteaConfig?.starredReposOrg) {
|
||||
return giteaConfig.starredReposOrg;
|
||||
// Starred repos can use either dedicated org or preserved source owner
|
||||
if (repository.isStarred) {
|
||||
const starredReposMode = giteaConfig?.starredReposMode || "dedicated-org";
|
||||
if (starredReposMode === "preserve-owner") {
|
||||
return repository.organization || repository.owner;
|
||||
}
|
||||
if (giteaConfig?.starredReposOrg) {
|
||||
return giteaConfig.starredReposOrg;
|
||||
}
|
||||
return "starred";
|
||||
}
|
||||
|
||||
// Check mirror strategy
|
||||
@@ -60,7 +67,7 @@ export function InlineDestinationEditor({
|
||||
const defaultDestination = getDefaultDestination();
|
||||
const currentDestination = repository.destinationOrg || defaultDestination;
|
||||
const hasOverride = repository.destinationOrg && repository.destinationOrg !== defaultDestination;
|
||||
const isStarredRepo = repository.isStarred && giteaConfig?.starredReposOrg;
|
||||
const isStarredRepo = repository.isStarred;
|
||||
|
||||
useEffect(() => {
|
||||
if (isEditing && inputRef.current) {
|
||||
@@ -184,4 +191,4 @@ export function InlineDestinationEditor({
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,6 +44,7 @@ import { toast } from "sonner";
|
||||
import type { SyncRepoRequest, SyncRepoResponse } from "@/types/sync";
|
||||
import { OwnerCombobox, OrganizationCombobox } from "./RepositoryComboboxes";
|
||||
import type { RetryRepoRequest, RetryRepoResponse } from "@/types/retry";
|
||||
import type { ResetMetadataRequest, ResetMetadataResponse } from "@/types/reset-metadata";
|
||||
import AddRepositoryDialog from "./AddRepositoryDialog";
|
||||
|
||||
import { useLiveRefresh } from "@/hooks/useLiveRefresh";
|
||||
@@ -55,7 +56,7 @@ export default function Repository() {
|
||||
const [isInitialLoading, setIsInitialLoading] = useState(true);
|
||||
const { user } = useAuth();
|
||||
const { registerRefreshCallback, isLiveEnabled } = useLiveRefresh();
|
||||
const { isGitHubConfigured, isFullyConfigured } = useConfigStatus();
|
||||
const { isGitHubConfigured, isFullyConfigured, autoMirrorStarred, githubOwner } = useConfigStatus();
|
||||
const { navigationKey } = useNavigation();
|
||||
const { filter, setFilter } = useFilterParams({
|
||||
searchTerm: "",
|
||||
@@ -232,10 +233,12 @@ export default function Repository() {
|
||||
// Filter out repositories that are already mirroring, mirrored, or ignored
|
||||
const eligibleRepos = repositories.filter(
|
||||
(repo) =>
|
||||
repo.status !== "mirroring" &&
|
||||
repo.status !== "mirrored" &&
|
||||
repo.status !== "mirroring" &&
|
||||
repo.status !== "mirrored" &&
|
||||
repo.status !== "ignored" && // Skip ignored repositories
|
||||
repo.id
|
||||
repo.id &&
|
||||
// Skip starred repos from other owners when autoMirrorStarred is disabled
|
||||
!(repo.isStarred && !autoMirrorStarred && repo.owner !== githubOwner)
|
||||
);
|
||||
|
||||
if (eligibleRepos.length === 0) {
|
||||
@@ -291,7 +294,7 @@ export default function Repository() {
|
||||
|
||||
const selectedRepos = repositories.filter(repo => repo.id && selectedRepoIds.has(repo.id));
|
||||
const eligibleRepos = selectedRepos.filter(
|
||||
repo => repo.status === "imported" || repo.status === "failed"
|
||||
repo => repo.status === "imported" || repo.status === "failed" || repo.status === "pending-approval"
|
||||
);
|
||||
|
||||
if (eligibleRepos.length === 0) {
|
||||
@@ -300,7 +303,7 @@ export default function Repository() {
|
||||
}
|
||||
|
||||
const repoIds = eligibleRepos.map(repo => repo.id as string);
|
||||
|
||||
|
||||
setLoadingRepoIds(prev => {
|
||||
const newSet = new Set(prev);
|
||||
repoIds.forEach(id => newSet.add(id));
|
||||
@@ -378,6 +381,67 @@ export default function Repository() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleBulkRerunMetadata = async () => {
|
||||
if (selectedRepoIds.size === 0) return;
|
||||
|
||||
const selectedRepos = repositories.filter(repo => repo.id && selectedRepoIds.has(repo.id));
|
||||
const eligibleRepos = selectedRepos.filter(
|
||||
repo => ["mirrored", "synced", "archived"].includes(repo.status)
|
||||
);
|
||||
|
||||
if (eligibleRepos.length === 0) {
|
||||
toast.info("No eligible repositories to re-run metadata in selection");
|
||||
return;
|
||||
}
|
||||
|
||||
const repoIds = eligibleRepos.map(repo => repo.id as string);
|
||||
|
||||
setLoadingRepoIds(prev => {
|
||||
const newSet = new Set(prev);
|
||||
repoIds.forEach(id => newSet.add(id));
|
||||
return newSet;
|
||||
});
|
||||
|
||||
try {
|
||||
const resetPayload: ResetMetadataRequest = {
|
||||
userId: user?.id || "",
|
||||
repositoryIds: repoIds,
|
||||
};
|
||||
|
||||
const resetResponse = await apiRequest<ResetMetadataResponse>("/job/reset-metadata", {
|
||||
method: "POST",
|
||||
data: resetPayload,
|
||||
});
|
||||
|
||||
if (!resetResponse.success) {
|
||||
showErrorToast(resetResponse.error || "Failed to reset metadata state", toast);
|
||||
return;
|
||||
}
|
||||
|
||||
const syncResponse = await apiRequest<SyncRepoResponse>("/job/sync-repo", {
|
||||
method: "POST",
|
||||
data: { userId: user?.id, repositoryIds: repoIds },
|
||||
});
|
||||
|
||||
if (syncResponse.success) {
|
||||
toast.success(`Re-running metadata for ${repoIds.length} repositories`);
|
||||
setRepositories(prevRepos =>
|
||||
prevRepos.map(repo => {
|
||||
const updated = syncResponse.repositories.find(r => r.id === repo.id);
|
||||
return updated ? updated : repo;
|
||||
})
|
||||
);
|
||||
setSelectedRepoIds(new Set());
|
||||
} else {
|
||||
showErrorToast(syncResponse.error || "Error starting metadata re-sync", toast);
|
||||
}
|
||||
} catch (error) {
|
||||
showErrorToast(error, toast);
|
||||
} finally {
|
||||
setLoadingRepoIds(new Set());
|
||||
}
|
||||
};
|
||||
|
||||
const handleBulkRetry = async () => {
|
||||
if (selectedRepoIds.size === 0) return;
|
||||
|
||||
@@ -632,14 +696,90 @@ export default function Repository() {
|
||||
}
|
||||
};
|
||||
|
||||
const handleApproveSyncAction = async ({ repoId }: { repoId: string }) => {
|
||||
try {
|
||||
if (!user || !user.id) return;
|
||||
setLoadingRepoIds((prev) => new Set(prev).add(repoId));
|
||||
|
||||
const response = await apiRequest<{
|
||||
success: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
repositories: Repository[];
|
||||
}>("/job/approve-sync", {
|
||||
method: "POST",
|
||||
data: { repositoryIds: [repoId], action: "approve" },
|
||||
});
|
||||
|
||||
if (response.success) {
|
||||
toast.success("Sync approved — backup + sync started");
|
||||
setRepositories((prevRepos) =>
|
||||
prevRepos.map((repo) => {
|
||||
const updated = response.repositories.find((r) => r.id === repo.id);
|
||||
return updated ? updated : repo;
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
showErrorToast(response.error || "Error approving sync", toast);
|
||||
}
|
||||
} catch (error) {
|
||||
showErrorToast(error, toast);
|
||||
} finally {
|
||||
setLoadingRepoIds((prev) => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.delete(repoId);
|
||||
return newSet;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleDismissSyncAction = async ({ repoId }: { repoId: string }) => {
|
||||
try {
|
||||
if (!user || !user.id) return;
|
||||
setLoadingRepoIds((prev) => new Set(prev).add(repoId));
|
||||
|
||||
const response = await apiRequest<{
|
||||
success: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
repositories: Repository[];
|
||||
}>("/job/approve-sync", {
|
||||
method: "POST",
|
||||
data: { repositoryIds: [repoId], action: "dismiss" },
|
||||
});
|
||||
|
||||
if (response.success) {
|
||||
toast.success("Force-push alert dismissed");
|
||||
setRepositories((prevRepos) =>
|
||||
prevRepos.map((repo) => {
|
||||
const updated = response.repositories.find((r) => r.id === repo.id);
|
||||
return updated ? updated : repo;
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
showErrorToast(response.error || "Error dismissing alert", toast);
|
||||
}
|
||||
} catch (error) {
|
||||
showErrorToast(error, toast);
|
||||
} finally {
|
||||
setLoadingRepoIds((prev) => {
|
||||
const newSet = new Set(prev);
|
||||
newSet.delete(repoId);
|
||||
return newSet;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleAddRepository = async ({
|
||||
repo,
|
||||
owner,
|
||||
force = false,
|
||||
destinationOrg,
|
||||
}: {
|
||||
repo: string;
|
||||
owner: string;
|
||||
force?: boolean;
|
||||
destinationOrg?: string;
|
||||
}) => {
|
||||
if (!user || !user.id) {
|
||||
return;
|
||||
@@ -674,6 +814,7 @@ export default function Repository() {
|
||||
repo: trimmedRepo,
|
||||
owner: trimmedOwner,
|
||||
force,
|
||||
...(destinationOrg ? { destinationOrg } : {}),
|
||||
};
|
||||
|
||||
const response = await apiRequest<AddRepositoriesApiResponse>(
|
||||
@@ -798,7 +939,7 @@ export default function Repository() {
|
||||
const actions = [];
|
||||
|
||||
// Check if any selected repos can be mirrored
|
||||
if (selectedRepos.some(repo => repo.status === "imported" || repo.status === "failed")) {
|
||||
if (selectedRepos.some(repo => repo.status === "imported" || repo.status === "failed" || repo.status === "pending-approval")) {
|
||||
actions.push('mirror');
|
||||
}
|
||||
|
||||
@@ -806,6 +947,10 @@ export default function Repository() {
|
||||
if (selectedRepos.some(repo => repo.status === "mirrored" || repo.status === "synced")) {
|
||||
actions.push('sync');
|
||||
}
|
||||
|
||||
if (selectedRepos.some(repo => ["mirrored", "synced", "archived"].includes(repo.status))) {
|
||||
actions.push('rerun-metadata');
|
||||
}
|
||||
|
||||
// Check if any selected repos are failed
|
||||
if (selectedRepos.some(repo => repo.status === "failed")) {
|
||||
@@ -832,8 +977,9 @@ export default function Repository() {
|
||||
const selectedRepos = repositories.filter(repo => repo.id && selectedRepoIds.has(repo.id));
|
||||
|
||||
return {
|
||||
mirror: selectedRepos.filter(repo => repo.status === "imported" || repo.status === "failed").length,
|
||||
mirror: selectedRepos.filter(repo => repo.status === "imported" || repo.status === "failed" || repo.status === "pending-approval").length,
|
||||
sync: selectedRepos.filter(repo => repo.status === "mirrored" || repo.status === "synced").length,
|
||||
rerunMetadata: selectedRepos.filter(repo => ["mirrored", "synced", "archived"].includes(repo.status)).length,
|
||||
retry: selectedRepos.filter(repo => repo.status === "failed").length,
|
||||
ignore: selectedRepos.filter(repo => repo.status !== "ignored").length,
|
||||
include: selectedRepos.filter(repo => repo.status === "ignored").length,
|
||||
@@ -1157,6 +1303,18 @@ export default function Repository() {
|
||||
Sync ({actionCounts.sync})
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{availableActions.includes('rerun-metadata') && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="default"
|
||||
onClick={handleBulkRerunMetadata}
|
||||
disabled={loadingRepoIds.size > 0}
|
||||
>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Re-run Metadata ({actionCounts.rerunMetadata})
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{availableActions.includes('retry') && (
|
||||
<Button
|
||||
@@ -1240,6 +1398,18 @@ export default function Repository() {
|
||||
<span className="hidden sm:inline">Sync </span>({actionCounts.sync})
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{availableActions.includes('rerun-metadata') && (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleBulkRerunMetadata}
|
||||
disabled={loadingRepoIds.size > 0}
|
||||
>
|
||||
<RefreshCw className="h-4 w-4 mr-2" />
|
||||
Re-run Metadata ({actionCounts.rerunMetadata})
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{availableActions.includes('retry') && (
|
||||
<Button
|
||||
@@ -1315,6 +1485,8 @@ export default function Repository() {
|
||||
await fetchRepositories(false);
|
||||
}}
|
||||
onDelete={handleRequestDeleteRepository}
|
||||
onApproveSync={handleApproveSyncAction}
|
||||
onDismissSync={handleDismissSyncAction}
|
||||
/>
|
||||
)}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useMemo, useRef } from "react";
|
||||
import Fuse from "fuse.js";
|
||||
import { useVirtualizer } from "@tanstack/react-virtual";
|
||||
import { FlipHorizontal, GitFork, RefreshCw, RotateCcw, Star, Lock, Ban, Check, ChevronDown, Trash2 } from "lucide-react";
|
||||
import { FlipHorizontal, GitFork, RefreshCw, RotateCcw, Star, Lock, Ban, Check, ChevronDown, Trash2, X } from "lucide-react";
|
||||
import { SiGithub, SiGitea } from "react-icons/si";
|
||||
import type { Repository } from "@/lib/db/schema";
|
||||
import { Button } from "@/components/ui/button";
|
||||
@@ -42,6 +42,8 @@ interface RepositoryTableProps {
|
||||
onSelectionChange: (selectedIds: Set<string>) => void;
|
||||
onRefresh?: () => Promise<void>;
|
||||
onDelete?: (repoId: string) => void;
|
||||
onApproveSync?: ({ repoId }: { repoId: string }) => Promise<void>;
|
||||
onDismissSync?: ({ repoId }: { repoId: string }) => Promise<void>;
|
||||
}
|
||||
|
||||
export default function RepositoryTable({
|
||||
@@ -59,6 +61,8 @@ export default function RepositoryTable({
|
||||
onSelectionChange,
|
||||
onRefresh,
|
||||
onDelete,
|
||||
onApproveSync,
|
||||
onDismissSync,
|
||||
}: RepositoryTableProps) {
|
||||
const tableParentRef = useRef<HTMLDivElement>(null);
|
||||
const { giteaConfig } = useGiteaConfig();
|
||||
@@ -239,6 +243,7 @@ export default function RepositoryTable({
|
||||
repo.status === 'failed' ? 'bg-red-500/10 text-red-600 hover:bg-red-500/20 dark:text-red-400' :
|
||||
repo.status === 'ignored' ? 'bg-gray-500/10 text-gray-600 hover:bg-gray-500/20 dark:text-gray-400' :
|
||||
repo.status === 'skipped' ? 'bg-orange-500/10 text-orange-600 hover:bg-orange-500/20 dark:text-orange-400' :
|
||||
repo.status === 'pending-approval' ? 'bg-amber-500/10 text-amber-600 hover:bg-amber-500/20 dark:text-amber-400' :
|
||||
'bg-muted hover:bg-muted/80'}`}
|
||||
variant="secondary"
|
||||
>
|
||||
@@ -316,7 +321,40 @@ export default function RepositoryTable({
|
||||
)}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{repo.status === "pending-approval" && (
|
||||
<div className="flex gap-2 w-full">
|
||||
<Button
|
||||
size="default"
|
||||
variant="default"
|
||||
onClick={() => repo.id && onApproveSync?.({ repoId: repo.id })}
|
||||
disabled={isLoading}
|
||||
className="flex-1 h-10"
|
||||
>
|
||||
{isLoading ? (
|
||||
<>
|
||||
<Check className="h-4 w-4 mr-2 animate-spin" />
|
||||
Approving...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Check className="h-4 w-4 mr-2" />
|
||||
Approve Sync
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
size="default"
|
||||
variant="outline"
|
||||
onClick={() => repo.id && onDismissSync?.({ repoId: repo.id })}
|
||||
disabled={isLoading}
|
||||
className="flex-1 h-10"
|
||||
>
|
||||
<X className="h-4 w-4 mr-2" />
|
||||
Dismiss
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Ignore/Include button */}
|
||||
{repo.status === "ignored" ? (
|
||||
<Button
|
||||
@@ -663,6 +701,7 @@ export default function RepositoryTable({
|
||||
repo.status === 'failed' ? 'bg-red-500/10 text-red-600 hover:bg-red-500/20 dark:text-red-400' :
|
||||
repo.status === 'ignored' ? 'bg-gray-500/10 text-gray-600 hover:bg-gray-500/20 dark:text-gray-400' :
|
||||
repo.status === 'skipped' ? 'bg-orange-500/10 text-orange-600 hover:bg-orange-500/20 dark:text-orange-400' :
|
||||
repo.status === 'pending-approval' ? 'bg-amber-500/10 text-amber-600 hover:bg-amber-500/20 dark:text-amber-400' :
|
||||
'bg-muted hover:bg-muted/80'}`}
|
||||
variant="secondary"
|
||||
>
|
||||
@@ -680,6 +719,8 @@ export default function RepositoryTable({
|
||||
onRetry={() => onRetry({ repoId: repo.id ?? "" })}
|
||||
onSkip={(skip) => onSkip({ repoId: repo.id ?? "", skip })}
|
||||
onDelete={onDelete && repo.id ? () => onDelete(repo.id as string) : undefined}
|
||||
onApproveSync={onApproveSync ? () => onApproveSync({ repoId: repo.id ?? "" }) : undefined}
|
||||
onDismissSync={onDismissSync ? () => onDismissSync({ repoId: repo.id ?? "" }) : undefined}
|
||||
/>
|
||||
</div>
|
||||
{/* Links */}
|
||||
@@ -791,6 +832,8 @@ function RepoActionButton({
|
||||
onRetry,
|
||||
onSkip,
|
||||
onDelete,
|
||||
onApproveSync,
|
||||
onDismissSync,
|
||||
}: {
|
||||
repo: { id: string; status: string };
|
||||
isLoading: boolean;
|
||||
@@ -799,7 +842,36 @@ function RepoActionButton({
|
||||
onRetry: () => void;
|
||||
onSkip: (skip: boolean) => void;
|
||||
onDelete?: () => void;
|
||||
onApproveSync?: () => void;
|
||||
onDismissSync?: () => void;
|
||||
}) {
|
||||
// For pending-approval repos, show approve/dismiss actions
|
||||
if (repo.status === "pending-approval") {
|
||||
return (
|
||||
<div className="flex gap-1">
|
||||
<Button
|
||||
variant="default"
|
||||
size="sm"
|
||||
disabled={isLoading}
|
||||
onClick={onApproveSync}
|
||||
className="min-w-[70px]"
|
||||
>
|
||||
<Check className="h-4 w-4 mr-1" />
|
||||
Approve
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
disabled={isLoading}
|
||||
onClick={onDismissSync}
|
||||
>
|
||||
<X className="h-4 w-4 mr-1" />
|
||||
Dismiss
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// For ignored repos, show an "Include" action
|
||||
if (repo.status === "ignored") {
|
||||
return (
|
||||
|
||||
@@ -9,6 +9,8 @@ interface ConfigStatus {
|
||||
isFullyConfigured: boolean;
|
||||
isLoading: boolean;
|
||||
error: string | null;
|
||||
autoMirrorStarred: boolean;
|
||||
githubOwner: string;
|
||||
}
|
||||
|
||||
// Cache to prevent duplicate API calls across components
|
||||
@@ -33,6 +35,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured: false,
|
||||
isLoading: true,
|
||||
error: null,
|
||||
autoMirrorStarred: false,
|
||||
githubOwner: '',
|
||||
});
|
||||
|
||||
// Track if this hook has already checked config to prevent multiple calls
|
||||
@@ -46,6 +50,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured: false,
|
||||
isLoading: false,
|
||||
error: 'No user found',
|
||||
autoMirrorStarred: false,
|
||||
githubOwner: '',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -78,6 +84,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
autoMirrorStarred: configResponse?.advancedOptions?.autoMirrorStarred ?? false,
|
||||
githubOwner: configResponse?.githubConfig?.username ?? '',
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -119,6 +127,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured,
|
||||
isLoading: false,
|
||||
error: null,
|
||||
autoMirrorStarred: configResponse?.advancedOptions?.autoMirrorStarred ?? false,
|
||||
githubOwner: configResponse?.githubConfig?.username ?? '',
|
||||
});
|
||||
|
||||
hasCheckedRef.current = true;
|
||||
@@ -129,6 +139,8 @@ export function useConfigStatus(): ConfigStatus {
|
||||
isFullyConfigured: false,
|
||||
isLoading: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to check configuration',
|
||||
autoMirrorStarred: false,
|
||||
githubOwner: '',
|
||||
});
|
||||
hasCheckedRef.current = true;
|
||||
}
|
||||
|
||||
66
src/lib/auth-guards.test.ts
Normal file
66
src/lib/auth-guards.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { describe, expect, mock, test } from "bun:test";
|
||||
|
||||
const getSessionMock = mock(async () => null);
|
||||
|
||||
mock.module("@/lib/auth", () => ({
|
||||
auth: {
|
||||
api: {
|
||||
getSession: getSessionMock,
|
||||
},
|
||||
},
|
||||
}));
|
||||
|
||||
import { requireAuthenticatedUserId } from "./auth-guards";
|
||||
|
||||
describe("requireAuthenticatedUserId", () => {
|
||||
test("returns user id from locals session without calling auth api", async () => {
|
||||
getSessionMock.mockImplementation(async () => {
|
||||
throw new Error("should not be called");
|
||||
});
|
||||
|
||||
const result = await requireAuthenticatedUserId({
|
||||
request: new Request("http://localhost/test"),
|
||||
locals: {
|
||||
session: { userId: "local-user-id" },
|
||||
} as any,
|
||||
});
|
||||
|
||||
expect("userId" in result).toBe(true);
|
||||
if ("userId" in result) {
|
||||
expect(result.userId).toBe("local-user-id");
|
||||
}
|
||||
});
|
||||
|
||||
test("returns user id from auth session when locals are empty", async () => {
|
||||
getSessionMock.mockImplementation(async () => ({
|
||||
user: { id: "session-user-id" },
|
||||
session: { id: "session-id" },
|
||||
}));
|
||||
|
||||
const result = await requireAuthenticatedUserId({
|
||||
request: new Request("http://localhost/test"),
|
||||
locals: {} as any,
|
||||
});
|
||||
|
||||
expect("userId" in result).toBe(true);
|
||||
if ("userId" in result) {
|
||||
expect(result.userId).toBe("session-user-id");
|
||||
}
|
||||
});
|
||||
|
||||
test("returns unauthorized response when auth lookup throws", async () => {
|
||||
getSessionMock.mockImplementation(async () => {
|
||||
throw new Error("session provider unavailable");
|
||||
});
|
||||
|
||||
const result = await requireAuthenticatedUserId({
|
||||
request: new Request("http://localhost/test"),
|
||||
locals: {} as any,
|
||||
});
|
||||
|
||||
expect("response" in result).toBe(true);
|
||||
if ("response" in result) {
|
||||
expect(result.response.status).toBe(401);
|
||||
}
|
||||
});
|
||||
});
|
||||
45
src/lib/auth-guards.ts
Normal file
45
src/lib/auth-guards.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import type { APIContext } from "astro";
|
||||
import { auth } from "@/lib/auth";
|
||||
|
||||
function unauthorizedResponse() {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "Unauthorized",
|
||||
}),
|
||||
{
|
||||
status: 401,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures request is authenticated and returns the authenticated user ID.
|
||||
* Never trust client-provided userId for authorization decisions.
|
||||
*/
|
||||
export async function requireAuthenticatedUserId(
|
||||
context: Pick<APIContext, "request" | "locals">
|
||||
): Promise<{ userId: string } | { response: Response }> {
|
||||
const localUserId =
|
||||
context.locals?.session?.userId || context.locals?.user?.id;
|
||||
|
||||
if (localUserId) {
|
||||
return { userId: localUserId };
|
||||
}
|
||||
|
||||
let session: Awaited<ReturnType<typeof auth.api.getSession>> | null = null;
|
||||
try {
|
||||
session = await auth.api.getSession({
|
||||
headers: context.request.headers,
|
||||
});
|
||||
} catch {
|
||||
return { response: unauthorizedResponse() };
|
||||
}
|
||||
|
||||
if (!session?.user?.id) {
|
||||
return { response: unauthorizedResponse() };
|
||||
}
|
||||
|
||||
return { userId: session.user.id };
|
||||
}
|
||||
@@ -25,15 +25,25 @@ export const githubConfigSchema = z.object({
|
||||
includePublic: z.boolean().default(true),
|
||||
includeOrganizations: z.array(z.string()).default([]),
|
||||
starredReposOrg: z.string().optional(),
|
||||
starredReposMode: z.enum(["dedicated-org", "preserve-owner"]).default("dedicated-org"),
|
||||
mirrorStrategy: z.enum(["preserve", "single-org", "flat-user", "mixed"]).default("preserve"),
|
||||
defaultOrg: z.string().optional(),
|
||||
starredCodeOnly: z.boolean().default(false),
|
||||
autoMirrorStarred: z.boolean().default(false),
|
||||
skipStarredIssues: z.boolean().optional(), // Deprecated: kept for backward compatibility, use starredCodeOnly instead
|
||||
starredDuplicateStrategy: z.enum(["suffix", "prefix", "owner-org"]).default("suffix").optional(),
|
||||
});
|
||||
|
||||
export const backupStrategyEnum = z.enum([
|
||||
"disabled",
|
||||
"always",
|
||||
"on-force-push",
|
||||
"block-on-force-push",
|
||||
]);
|
||||
|
||||
export const giteaConfigSchema = z.object({
|
||||
url: z.url(),
|
||||
externalUrl: z.url().optional(),
|
||||
token: z.string(),
|
||||
defaultOwner: z.string(),
|
||||
organization: z.string().optional(),
|
||||
@@ -63,6 +73,11 @@ export const giteaConfigSchema = z.object({
|
||||
mirrorPullRequests: z.boolean().default(false),
|
||||
mirrorLabels: z.boolean().default(false),
|
||||
mirrorMilestones: z.boolean().default(false),
|
||||
backupStrategy: backupStrategyEnum.default("on-force-push"),
|
||||
backupBeforeSync: z.boolean().default(true), // Deprecated: kept for backward compat, use backupStrategy
|
||||
backupRetentionCount: z.number().int().min(1).default(20),
|
||||
backupDirectory: z.string().optional(),
|
||||
blockSyncOnBackupFailure: z.boolean().default(true),
|
||||
});
|
||||
|
||||
export const scheduleConfigSchema = z.object({
|
||||
@@ -159,6 +174,7 @@ export const repositorySchema = z.object({
|
||||
"syncing",
|
||||
"synced",
|
||||
"archived",
|
||||
"pending-approval", // Blocked by force-push detection, needs manual approval
|
||||
])
|
||||
.default("imported"),
|
||||
lastMirrored: z.coerce.date().optional().nullable(),
|
||||
@@ -190,6 +206,7 @@ export const mirrorJobSchema = z.object({
|
||||
"syncing",
|
||||
"synced",
|
||||
"archived",
|
||||
"pending-approval",
|
||||
])
|
||||
.default("imported"),
|
||||
message: z.string(),
|
||||
|
||||
@@ -22,11 +22,14 @@ interface EnvConfig {
|
||||
preserveOrgStructure?: boolean;
|
||||
onlyMirrorOrgs?: boolean;
|
||||
starredCodeOnly?: boolean;
|
||||
autoMirrorStarred?: boolean;
|
||||
starredReposOrg?: string;
|
||||
starredReposMode?: 'dedicated-org' | 'preserve-owner';
|
||||
mirrorStrategy?: 'preserve' | 'single-org' | 'flat-user' | 'mixed';
|
||||
};
|
||||
gitea: {
|
||||
url?: string;
|
||||
externalUrl?: string;
|
||||
username?: string;
|
||||
token?: string;
|
||||
organization?: string;
|
||||
@@ -111,11 +114,14 @@ function parseEnvConfig(): EnvConfig {
|
||||
preserveOrgStructure: process.env.PRESERVE_ORG_STRUCTURE === 'true',
|
||||
onlyMirrorOrgs: process.env.ONLY_MIRROR_ORGS === 'true',
|
||||
starredCodeOnly: process.env.SKIP_STARRED_ISSUES === 'true',
|
||||
autoMirrorStarred: process.env.AUTO_MIRROR_STARRED === 'true',
|
||||
starredReposOrg: process.env.STARRED_REPOS_ORG,
|
||||
starredReposMode: process.env.STARRED_REPOS_MODE as 'dedicated-org' | 'preserve-owner',
|
||||
mirrorStrategy: process.env.MIRROR_STRATEGY as 'preserve' | 'single-org' | 'flat-user' | 'mixed',
|
||||
},
|
||||
gitea: {
|
||||
url: process.env.GITEA_URL,
|
||||
externalUrl: process.env.GITEA_EXTERNAL_URL,
|
||||
username: process.env.GITEA_USERNAME,
|
||||
token: process.env.GITEA_TOKEN,
|
||||
organization: process.env.GITEA_ORGANIZATION,
|
||||
@@ -256,14 +262,17 @@ export async function initializeConfigFromEnv(): Promise<void> {
|
||||
includePublic: envConfig.github.publicRepositories ?? existingConfig?.[0]?.githubConfig?.includePublic ?? true,
|
||||
includeOrganizations: envConfig.github.mirrorOrganizations ? [] : (existingConfig?.[0]?.githubConfig?.includeOrganizations ?? []),
|
||||
starredReposOrg: envConfig.github.starredReposOrg || existingConfig?.[0]?.githubConfig?.starredReposOrg || 'starred',
|
||||
starredReposMode: envConfig.github.starredReposMode || existingConfig?.[0]?.githubConfig?.starredReposMode || 'dedicated-org',
|
||||
mirrorStrategy,
|
||||
defaultOrg: envConfig.gitea.organization || existingConfig?.[0]?.githubConfig?.defaultOrg || 'github-mirrors',
|
||||
starredCodeOnly: envConfig.github.starredCodeOnly ?? existingConfig?.[0]?.githubConfig?.starredCodeOnly ?? false,
|
||||
autoMirrorStarred: envConfig.github.autoMirrorStarred ?? existingConfig?.[0]?.githubConfig?.autoMirrorStarred ?? false,
|
||||
};
|
||||
|
||||
// Build Gitea config
|
||||
const giteaConfig = {
|
||||
url: envConfig.gitea.url || existingConfig?.[0]?.giteaConfig?.url || '',
|
||||
externalUrl: envConfig.gitea.externalUrl || existingConfig?.[0]?.giteaConfig?.externalUrl || undefined,
|
||||
token: envConfig.gitea.token ? encrypt(envConfig.gitea.token) : existingConfig?.[0]?.giteaConfig?.token || '',
|
||||
defaultOwner: envConfig.gitea.username || existingConfig?.[0]?.giteaConfig?.defaultOwner || '',
|
||||
organization: envConfig.gitea.organization || existingConfig?.[0]?.giteaConfig?.organization || undefined,
|
||||
|
||||
@@ -13,6 +13,11 @@ const mockMirrorGitRepoPullRequestsToGitea = mock(() => Promise.resolve());
|
||||
const mockMirrorGitRepoLabelsToGitea = mock(() => Promise.resolve());
|
||||
const mockMirrorGitRepoMilestonesToGitea = mock(() => Promise.resolve());
|
||||
const mockGetGiteaRepoOwnerAsync = mock(() => Promise.resolve("starred"));
|
||||
const mockCreatePreSyncBundleBackup = mock(() =>
|
||||
Promise.resolve({ bundlePath: "/tmp/mock.bundle" })
|
||||
);
|
||||
let mockShouldCreatePreSyncBackup = false;
|
||||
let mockShouldBlockSyncOnBackupFailure = true;
|
||||
|
||||
// Mock the database module
|
||||
const mockDb = {
|
||||
@@ -28,8 +33,14 @@ const mockDb = {
|
||||
|
||||
mock.module("@/lib/db", () => ({
|
||||
db: mockDb,
|
||||
users: {},
|
||||
configs: {},
|
||||
organizations: {},
|
||||
mirrorJobs: {},
|
||||
repositories: {}
|
||||
repositories: {},
|
||||
events: {},
|
||||
accounts: {},
|
||||
sessions: {},
|
||||
}));
|
||||
|
||||
// Mock config encryption
|
||||
@@ -235,6 +246,12 @@ mock.module("@/lib/http-client", () => ({
|
||||
HttpError: MockHttpError
|
||||
}));
|
||||
|
||||
mock.module("@/lib/repo-backup", () => ({
|
||||
createPreSyncBundleBackup: mockCreatePreSyncBundleBackup,
|
||||
shouldCreatePreSyncBackup: () => mockShouldCreatePreSyncBackup,
|
||||
shouldBlockSyncOnBackupFailure: () => mockShouldBlockSyncOnBackupFailure,
|
||||
}));
|
||||
|
||||
// Now import the modules we're testing
|
||||
import {
|
||||
getGiteaRepoInfo,
|
||||
@@ -264,6 +281,15 @@ describe("Enhanced Gitea Operations", () => {
|
||||
mockMirrorGitRepoMilestonesToGitea.mockClear();
|
||||
mockGetGiteaRepoOwnerAsync.mockClear();
|
||||
mockGetGiteaRepoOwnerAsync.mockImplementation(() => Promise.resolve("starred"));
|
||||
mockHttpGet.mockClear();
|
||||
mockHttpPost.mockClear();
|
||||
mockHttpDelete.mockClear();
|
||||
mockCreatePreSyncBundleBackup.mockClear();
|
||||
mockCreatePreSyncBundleBackup.mockImplementation(() =>
|
||||
Promise.resolve({ bundlePath: "/tmp/mock.bundle" })
|
||||
);
|
||||
mockShouldCreatePreSyncBackup = false;
|
||||
mockShouldBlockSyncOnBackupFailure = true;
|
||||
// Reset tracking variables
|
||||
orgCheckCount = 0;
|
||||
orgTestContext = "";
|
||||
@@ -529,6 +555,125 @@ describe("Enhanced Gitea Operations", () => {
|
||||
expect(releaseCall.octokit).toBeDefined();
|
||||
});
|
||||
|
||||
test("blocks sync when pre-sync snapshot fails and blocking is enabled", async () => {
|
||||
mockShouldCreatePreSyncBackup = true;
|
||||
mockShouldBlockSyncOnBackupFailure = true;
|
||||
mockCreatePreSyncBundleBackup.mockImplementation(() =>
|
||||
Promise.reject(new Error("simulated backup failure"))
|
||||
);
|
||||
|
||||
const config: Partial<Config> = {
|
||||
userId: "user123",
|
||||
githubConfig: {
|
||||
username: "testuser",
|
||||
token: "github-token",
|
||||
privateRepositories: false,
|
||||
mirrorStarred: true,
|
||||
},
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "encrypted-token",
|
||||
defaultOwner: "testuser",
|
||||
mirrorReleases: false,
|
||||
backupBeforeSync: true,
|
||||
blockSyncOnBackupFailure: true,
|
||||
},
|
||||
};
|
||||
|
||||
const repository: Repository = {
|
||||
id: "repo456",
|
||||
name: "mirror-repo",
|
||||
fullName: "user/mirror-repo",
|
||||
owner: "user",
|
||||
cloneUrl: "https://github.com/user/mirror-repo.git",
|
||||
isPrivate: false,
|
||||
isStarred: true,
|
||||
status: repoStatusEnum.parse("mirrored"),
|
||||
visibility: "public",
|
||||
userId: "user123",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
await expect(
|
||||
syncGiteaRepoEnhanced(
|
||||
{ config, repository },
|
||||
{
|
||||
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
|
||||
mirrorGitHubReleasesToGitea: mockMirrorGitHubReleasesToGitea,
|
||||
mirrorGitRepoIssuesToGitea: mockMirrorGitRepoIssuesToGitea,
|
||||
mirrorGitRepoPullRequestsToGitea: mockMirrorGitRepoPullRequestsToGitea,
|
||||
mirrorGitRepoLabelsToGitea: mockMirrorGitRepoLabelsToGitea,
|
||||
mirrorGitRepoMilestonesToGitea: mockMirrorGitRepoMilestonesToGitea,
|
||||
}
|
||||
)
|
||||
).rejects.toThrow("Snapshot failed; sync blocked to protect history.");
|
||||
|
||||
const mirrorSyncCalls = mockHttpPost.mock.calls.filter((call) =>
|
||||
String(call[0]).includes("/mirror-sync")
|
||||
);
|
||||
expect(mirrorSyncCalls.length).toBe(0);
|
||||
});
|
||||
|
||||
test("continues sync when pre-sync snapshot fails and blocking is disabled", async () => {
|
||||
mockShouldCreatePreSyncBackup = true;
|
||||
mockShouldBlockSyncOnBackupFailure = false;
|
||||
mockCreatePreSyncBundleBackup.mockImplementation(() =>
|
||||
Promise.reject(new Error("simulated backup failure"))
|
||||
);
|
||||
|
||||
const config: Partial<Config> = {
|
||||
userId: "user123",
|
||||
githubConfig: {
|
||||
username: "testuser",
|
||||
token: "github-token",
|
||||
privateRepositories: false,
|
||||
mirrorStarred: true,
|
||||
},
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "encrypted-token",
|
||||
defaultOwner: "testuser",
|
||||
mirrorReleases: false,
|
||||
backupBeforeSync: true,
|
||||
blockSyncOnBackupFailure: false,
|
||||
},
|
||||
};
|
||||
|
||||
const repository: Repository = {
|
||||
id: "repo457",
|
||||
name: "mirror-repo",
|
||||
fullName: "user/mirror-repo",
|
||||
owner: "user",
|
||||
cloneUrl: "https://github.com/user/mirror-repo.git",
|
||||
isPrivate: false,
|
||||
isStarred: true,
|
||||
status: repoStatusEnum.parse("mirrored"),
|
||||
visibility: "public",
|
||||
userId: "user123",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
const result = await syncGiteaRepoEnhanced(
|
||||
{ config, repository },
|
||||
{
|
||||
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
|
||||
mirrorGitHubReleasesToGitea: mockMirrorGitHubReleasesToGitea,
|
||||
mirrorGitRepoIssuesToGitea: mockMirrorGitRepoIssuesToGitea,
|
||||
mirrorGitRepoPullRequestsToGitea: mockMirrorGitRepoPullRequestsToGitea,
|
||||
mirrorGitRepoLabelsToGitea: mockMirrorGitRepoLabelsToGitea,
|
||||
mirrorGitRepoMilestonesToGitea: mockMirrorGitRepoMilestonesToGitea,
|
||||
}
|
||||
);
|
||||
|
||||
expect(result).toEqual({ success: true });
|
||||
const mirrorSyncCalls = mockHttpPost.mock.calls.filter((call) =>
|
||||
String(call[0]).includes("/mirror-sync")
|
||||
);
|
||||
expect(mirrorSyncCalls.length).toBe(1);
|
||||
});
|
||||
|
||||
test("mirrors metadata components when enabled and not previously synced", async () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user123",
|
||||
@@ -587,7 +732,7 @@ describe("Enhanced Gitea Operations", () => {
|
||||
expect(mockMirrorGitRepoLabelsToGitea).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test("skips metadata mirroring when components already synced", async () => {
|
||||
test("continues incremental issue and PR syncing when metadata was previously synced", async () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user123",
|
||||
githubConfig: {
|
||||
@@ -647,8 +792,8 @@ describe("Enhanced Gitea Operations", () => {
|
||||
);
|
||||
|
||||
expect(mockMirrorGitHubReleasesToGitea).not.toHaveBeenCalled();
|
||||
expect(mockMirrorGitRepoIssuesToGitea).not.toHaveBeenCalled();
|
||||
expect(mockMirrorGitRepoPullRequestsToGitea).not.toHaveBeenCalled();
|
||||
expect(mockMirrorGitRepoIssuesToGitea).toHaveBeenCalledTimes(1);
|
||||
expect(mockMirrorGitRepoPullRequestsToGitea).toHaveBeenCalledTimes(1);
|
||||
expect(mockMirrorGitRepoLabelsToGitea).not.toHaveBeenCalled();
|
||||
expect(mockMirrorGitRepoMilestonesToGitea).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -15,6 +15,16 @@ import { httpPost, httpGet, httpPatch, HttpError } from "./http-client";
|
||||
import { db, repositories } from "./db";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import {
|
||||
createPreSyncBundleBackup,
|
||||
shouldCreatePreSyncBackup,
|
||||
shouldBlockSyncOnBackupFailure,
|
||||
resolveBackupStrategy,
|
||||
shouldBackupForStrategy,
|
||||
shouldBlockSyncForStrategy,
|
||||
strategyNeedsDetection,
|
||||
} from "./repo-backup";
|
||||
import { detectForcePush } from "./utils/force-push-detection";
|
||||
import {
|
||||
parseRepositoryMetadataState,
|
||||
serializeRepositoryMetadataState,
|
||||
@@ -250,9 +260,12 @@ export async function getOrCreateGiteaOrgEnhanced({
|
||||
export async function syncGiteaRepoEnhanced({
|
||||
config,
|
||||
repository,
|
||||
skipForcePushDetection,
|
||||
}: {
|
||||
config: Partial<Config>;
|
||||
repository: Repository;
|
||||
/** When true, skip force-push detection and blocking (used by approve-sync). */
|
||||
skipForcePushDetection?: boolean;
|
||||
}, deps?: SyncDependencies): Promise<any> {
|
||||
try {
|
||||
if (!config.userId || !config.giteaConfig?.url || !config.giteaConfig?.token) {
|
||||
@@ -313,6 +326,141 @@ export async function syncGiteaRepoEnhanced({
|
||||
throw new Error(`Repository ${repository.name} is not a mirror. Cannot sync.`);
|
||||
}
|
||||
|
||||
// ---- Smart backup strategy with force-push detection ----
|
||||
const backupStrategy = resolveBackupStrategy(config);
|
||||
let forcePushDetected = false;
|
||||
|
||||
if (backupStrategy !== "disabled") {
|
||||
// Run force-push detection if the strategy requires it
|
||||
// (skip when called from approve-sync to avoid re-blocking)
|
||||
if (strategyNeedsDetection(backupStrategy) && !skipForcePushDetection) {
|
||||
try {
|
||||
const decryptedGithubToken = decryptedConfig.githubConfig?.token;
|
||||
if (decryptedGithubToken) {
|
||||
const fpOctokit = new Octokit({ auth: decryptedGithubToken });
|
||||
const detectionResult = await detectForcePush({
|
||||
giteaUrl: config.giteaConfig.url,
|
||||
giteaToken: decryptedConfig.giteaConfig.token,
|
||||
giteaOwner: repoOwner,
|
||||
giteaRepo: repository.name,
|
||||
octokit: fpOctokit,
|
||||
githubOwner: repository.owner,
|
||||
githubRepo: repository.name,
|
||||
});
|
||||
|
||||
forcePushDetected = detectionResult.detected;
|
||||
|
||||
if (detectionResult.skipped) {
|
||||
console.log(
|
||||
`[Sync] Force-push detection skipped for ${repository.name}: ${detectionResult.skipReason}`,
|
||||
);
|
||||
} else if (forcePushDetected) {
|
||||
const branchNames = detectionResult.affectedBranches
|
||||
.map((b) => `${b.name} (${b.reason})`)
|
||||
.join(", ");
|
||||
console.warn(
|
||||
`[Sync] Force-push detected on ${repository.name}: ${branchNames}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.log(
|
||||
`[Sync] Skipping force-push detection for ${repository.name}: no GitHub token`,
|
||||
);
|
||||
}
|
||||
} catch (detectionError) {
|
||||
// Fail-open: detection errors should never block sync
|
||||
console.warn(
|
||||
`[Sync] Force-push detection failed for ${repository.name}, proceeding with sync: ${
|
||||
detectionError instanceof Error ? detectionError.message : String(detectionError)
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if sync should be blocked (block-on-force-push mode)
|
||||
if (shouldBlockSyncForStrategy(backupStrategy, forcePushDetected)) {
|
||||
const branchInfo = `Force-push detected; sync blocked for manual approval.`;
|
||||
|
||||
await db
|
||||
.update(repositories)
|
||||
.set({
|
||||
status: "pending-approval",
|
||||
updatedAt: new Date(),
|
||||
errorMessage: branchInfo,
|
||||
})
|
||||
.where(eq(repositories.id, repository.id!));
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Sync blocked for ${repository.name}: force-push detected`,
|
||||
details: branchInfo,
|
||||
status: "pending-approval",
|
||||
});
|
||||
|
||||
console.warn(`[Sync] Sync blocked for ${repository.name}: pending manual approval`);
|
||||
return { blocked: true, reason: branchInfo };
|
||||
}
|
||||
|
||||
// Create backup if strategy says so
|
||||
if (shouldBackupForStrategy(backupStrategy, forcePushDetected)) {
|
||||
const cloneUrl =
|
||||
repoInfo.clone_url ||
|
||||
`${config.giteaConfig.url.replace(/\/$/, "")}/${repoOwner}/${repository.name}.git`;
|
||||
|
||||
try {
|
||||
const backupResult = await createPreSyncBundleBackup({
|
||||
config,
|
||||
owner: repoOwner,
|
||||
repoName: repository.name,
|
||||
cloneUrl,
|
||||
force: true, // Strategy already decided to backup; skip legacy gate
|
||||
});
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Snapshot created for ${repository.name}`,
|
||||
details: `Pre-sync snapshot created at ${backupResult.bundlePath}.`,
|
||||
status: "syncing",
|
||||
});
|
||||
} catch (backupError) {
|
||||
const errorMessage =
|
||||
backupError instanceof Error ? backupError.message : String(backupError);
|
||||
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Snapshot failed for ${repository.name}`,
|
||||
details: `Pre-sync snapshot failed: ${errorMessage}`,
|
||||
status: "failed",
|
||||
});
|
||||
|
||||
if (shouldBlockSyncOnBackupFailure(config)) {
|
||||
await db
|
||||
.update(repositories)
|
||||
.set({
|
||||
status: repoStatusEnum.parse("failed"),
|
||||
updatedAt: new Date(),
|
||||
errorMessage: `Snapshot failed; sync blocked to protect history. ${errorMessage}`,
|
||||
})
|
||||
.where(eq(repositories.id, repository.id!));
|
||||
|
||||
throw new Error(
|
||||
`Snapshot failed; sync blocked to protect history. ${errorMessage}`,
|
||||
);
|
||||
}
|
||||
|
||||
console.warn(
|
||||
`[Sync] Snapshot failed for ${repository.name}, continuing because blockSyncOnBackupFailure=false: ${errorMessage}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update mirror interval if needed
|
||||
if (config.giteaConfig?.mirrorInterval) {
|
||||
try {
|
||||
@@ -361,12 +509,10 @@ export async function syncGiteaRepoEnhanced({
|
||||
!!config.giteaConfig?.mirrorReleases && !skipMetadataForStarred;
|
||||
const shouldMirrorIssuesThisRun =
|
||||
!!config.giteaConfig?.mirrorIssues &&
|
||||
!skipMetadataForStarred &&
|
||||
!metadataState.components.issues;
|
||||
!skipMetadataForStarred;
|
||||
const shouldMirrorPullRequests =
|
||||
!!config.giteaConfig?.mirrorPullRequests &&
|
||||
!skipMetadataForStarred &&
|
||||
!metadataState.components.pullRequests;
|
||||
!skipMetadataForStarred;
|
||||
const shouldMirrorLabels =
|
||||
!!config.giteaConfig?.mirrorLabels &&
|
||||
!skipMetadataForStarred &&
|
||||
@@ -440,13 +586,6 @@ export async function syncGiteaRepoEnhanced({
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
config.giteaConfig?.mirrorIssues &&
|
||||
metadataState.components.issues
|
||||
) {
|
||||
console.log(
|
||||
`[Sync] Issues already mirrored for ${repository.name}; skipping`
|
||||
);
|
||||
}
|
||||
|
||||
if (shouldMirrorPullRequests) {
|
||||
@@ -477,13 +616,6 @@ export async function syncGiteaRepoEnhanced({
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if (
|
||||
config.giteaConfig?.mirrorPullRequests &&
|
||||
metadataState.components.pullRequests
|
||||
) {
|
||||
console.log(
|
||||
`[Sync] Pull requests already mirrored for ${repository.name}; skipping`
|
||||
);
|
||||
}
|
||||
|
||||
if (shouldMirrorLabels) {
|
||||
@@ -587,12 +719,12 @@ export async function syncGiteaRepoEnhanced({
|
||||
userId: config.userId,
|
||||
repositoryId: repository.id,
|
||||
repositoryName: repository.name,
|
||||
message: `Successfully synced repository: ${repository.name}`,
|
||||
details: `Repository ${repository.name} was synced with Gitea.`,
|
||||
message: `Sync requested for repository: ${repository.name}`,
|
||||
details: `Mirror sync was requested for ${repository.name}. Gitea/Forgejo performs the actual pull asynchronously; check remote logs for pull errors.`,
|
||||
status: "synced",
|
||||
});
|
||||
|
||||
console.log(`[Sync] Repository ${repository.name} synced successfully`);
|
||||
console.log(`[Sync] Mirror sync requested for repository ${repository.name}`);
|
||||
return response.data;
|
||||
} catch (syncError) {
|
||||
if (syncError instanceof HttpError && syncError.status === 400) {
|
||||
|
||||
@@ -24,9 +24,14 @@ mock.module("@/lib/db", () => {
|
||||
values: mock(() => Promise.resolve())
|
||||
}))
|
||||
},
|
||||
users: {},
|
||||
configs: {},
|
||||
repositories: {},
|
||||
organizations: {},
|
||||
events: {}
|
||||
events: {},
|
||||
mirrorJobs: {},
|
||||
accounts: {},
|
||||
sessions: {},
|
||||
};
|
||||
});
|
||||
|
||||
@@ -59,10 +64,16 @@ const mockGetOrCreateGiteaOrg = mock(async ({ orgName, config }: any) => {
|
||||
|
||||
const mockMirrorGitHubOrgRepoToGiteaOrg = mock(async () => {});
|
||||
const mockIsRepoPresentInGitea = mock(async () => false);
|
||||
const mockMirrorGithubRepoToGitea = mock(async () => {});
|
||||
const mockGetGiteaRepoOwnerAsync = mock(async () => "starred");
|
||||
const mockGetGiteaRepoOwner = mock(() => "starred");
|
||||
|
||||
mock.module("./gitea", () => ({
|
||||
getOrCreateGiteaOrg: mockGetOrCreateGiteaOrg,
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mockMirrorGitHubOrgRepoToGiteaOrg,
|
||||
mirrorGithubRepoToGitea: mockMirrorGithubRepoToGitea,
|
||||
getGiteaRepoOwner: mockGetGiteaRepoOwner,
|
||||
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
|
||||
isRepoPresentInGitea: mockIsRepoPresentInGitea
|
||||
}));
|
||||
|
||||
@@ -226,4 +237,4 @@ describe("Starred Repository Error Handling", () => {
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
@@ -27,8 +27,14 @@ mock.module("@/lib/db", () => {
|
||||
})
|
||||
})
|
||||
},
|
||||
users: {},
|
||||
configs: {},
|
||||
repositories: {},
|
||||
organizations: {}
|
||||
organizations: {},
|
||||
mirrorJobs: {},
|
||||
events: {},
|
||||
accounts: {},
|
||||
sessions: {},
|
||||
};
|
||||
});
|
||||
|
||||
@@ -55,8 +61,50 @@ mock.module("@/lib/http-client", () => {
|
||||
|
||||
// Mock the gitea module itself
|
||||
mock.module("./gitea", () => {
|
||||
const mockGetGiteaRepoOwner = mock(({ config, repository }: any) => {
|
||||
if (repository?.isStarred && config?.githubConfig?.starredReposMode === "preserve-owner") {
|
||||
return repository.organization || repository.owner;
|
||||
}
|
||||
if (repository?.isStarred) {
|
||||
return config?.githubConfig?.starredReposOrg || "starred";
|
||||
}
|
||||
|
||||
const mirrorStrategy =
|
||||
config?.githubConfig?.mirrorStrategy ||
|
||||
(config?.giteaConfig?.preserveOrgStructure ? "preserve" : "flat-user");
|
||||
|
||||
switch (mirrorStrategy) {
|
||||
case "preserve":
|
||||
return repository?.organization || config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
case "single-org":
|
||||
return config?.giteaConfig?.organization || config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
case "mixed":
|
||||
if (repository?.organization) return repository.organization;
|
||||
return config?.giteaConfig?.organization || config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
case "flat-user":
|
||||
default:
|
||||
return config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
}
|
||||
});
|
||||
const mockGetGiteaRepoOwnerAsync = mock(async ({ config, repository }: any) => {
|
||||
if (repository?.isStarred && config?.githubConfig?.starredReposMode === "preserve-owner") {
|
||||
return repository.organization || repository.owner;
|
||||
}
|
||||
|
||||
if (repository?.destinationOrg) {
|
||||
return repository.destinationOrg;
|
||||
}
|
||||
|
||||
if (repository?.organization && mockDbSelectResult[0]?.destinationOrg) {
|
||||
return mockDbSelectResult[0].destinationOrg;
|
||||
}
|
||||
|
||||
return config?.giteaConfig?.defaultOwner || "giteauser";
|
||||
});
|
||||
return {
|
||||
isRepoPresentInGitea: mockIsRepoPresentInGitea,
|
||||
getGiteaRepoOwner: mockGetGiteaRepoOwner,
|
||||
getGiteaRepoOwnerAsync: mockGetGiteaRepoOwnerAsync,
|
||||
mirrorGithubRepoToGitea: mock(async () => {}),
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mock(async () => {})
|
||||
};
|
||||
@@ -342,6 +390,8 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
mirrorPublicOrgs: false,
|
||||
publicOrgs: [],
|
||||
starredCodeOnly: false,
|
||||
starredReposOrg: "starred",
|
||||
starredReposMode: "dedicated-org",
|
||||
mirrorStrategy: "preserve"
|
||||
},
|
||||
giteaConfig: {
|
||||
@@ -350,7 +400,6 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
token: "gitea-token",
|
||||
organization: "github-mirrors",
|
||||
visibility: "public",
|
||||
starredReposOrg: "starred",
|
||||
preserveVisibility: false
|
||||
}
|
||||
};
|
||||
@@ -390,8 +439,8 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
const repo = { ...baseRepo, isStarred: true };
|
||||
const configWithoutStarredOrg = {
|
||||
...baseConfig,
|
||||
giteaConfig: {
|
||||
...baseConfig.giteaConfig,
|
||||
githubConfig: {
|
||||
...baseConfig.githubConfig,
|
||||
starredReposOrg: undefined
|
||||
}
|
||||
};
|
||||
@@ -399,6 +448,34 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
expect(result).toBe("starred");
|
||||
});
|
||||
|
||||
test("starred repos preserve owner/org when starredReposMode is preserve-owner", () => {
|
||||
const repo = { ...baseRepo, isStarred: true, owner: "FOO", organization: "FOO", fullName: "FOO/BAR" };
|
||||
const configWithPreserveStarred = {
|
||||
...baseConfig,
|
||||
githubConfig: {
|
||||
...baseConfig.githubConfig!,
|
||||
starredReposMode: "preserve-owner" as const,
|
||||
},
|
||||
};
|
||||
|
||||
const result = getGiteaRepoOwner({ config: configWithPreserveStarred, repository: repo });
|
||||
expect(result).toBe("FOO");
|
||||
});
|
||||
|
||||
test("starred personal repos preserve owner when starredReposMode is preserve-owner", () => {
|
||||
const repo = { ...baseRepo, isStarred: true, owner: "alice", organization: undefined, fullName: "alice/demo" };
|
||||
const configWithPreserveStarred = {
|
||||
...baseConfig,
|
||||
githubConfig: {
|
||||
...baseConfig.githubConfig!,
|
||||
starredReposMode: "preserve-owner" as const,
|
||||
},
|
||||
};
|
||||
|
||||
const result = getGiteaRepoOwner({ config: configWithPreserveStarred, repository: repo });
|
||||
expect(result).toBe("alice");
|
||||
});
|
||||
|
||||
// Removed test for personalReposOrg as this field no longer exists
|
||||
|
||||
test("preserve strategy: personal repos fallback to username when no override", () => {
|
||||
@@ -492,4 +569,24 @@ describe("getGiteaRepoOwner - Organization Override Tests", () => {
|
||||
|
||||
expect(result).toBe("custom-org");
|
||||
});
|
||||
|
||||
test("getGiteaRepoOwnerAsync preserves starred owner when preserve-owner mode is enabled", async () => {
|
||||
const configWithUser: Partial<Config> = {
|
||||
...baseConfig,
|
||||
userId: "user-id",
|
||||
githubConfig: {
|
||||
...baseConfig.githubConfig!,
|
||||
starredReposMode: "preserve-owner",
|
||||
},
|
||||
};
|
||||
|
||||
const repo = { ...baseRepo, isStarred: true, owner: "FOO", organization: "FOO", fullName: "FOO/BAR" };
|
||||
|
||||
const result = await getGiteaRepoOwnerAsync({
|
||||
config: configWithUser,
|
||||
repository: repo,
|
||||
});
|
||||
|
||||
expect(result).toBe("FOO");
|
||||
});
|
||||
});
|
||||
|
||||
379
src/lib/gitea.ts
379
src/lib/gitea.ts
@@ -77,8 +77,12 @@ export const getGiteaRepoOwnerAsync = async ({
|
||||
throw new Error("User ID is required for organization overrides.");
|
||||
}
|
||||
|
||||
// Check if repository is starred - starred repos always go to starredReposOrg (highest priority)
|
||||
// Check if repository is starred
|
||||
if (repository.isStarred) {
|
||||
const starredReposMode = config.githubConfig.starredReposMode || "dedicated-org";
|
||||
if (starredReposMode === "preserve-owner") {
|
||||
return repository.organization || repository.owner;
|
||||
}
|
||||
return config.githubConfig.starredReposOrg || "starred";
|
||||
}
|
||||
|
||||
@@ -122,8 +126,12 @@ export const getGiteaRepoOwner = ({
|
||||
throw new Error("Gitea username is required.");
|
||||
}
|
||||
|
||||
// Check if repository is starred - starred repos always go to starredReposOrg
|
||||
// Check if repository is starred
|
||||
if (repository.isStarred) {
|
||||
const starredReposMode = config.githubConfig.starredReposMode || "dedicated-org";
|
||||
if (starredReposMode === "preserve-owner") {
|
||||
return repository.organization || repository.owner;
|
||||
}
|
||||
return config.githubConfig.starredReposOrg || "starred";
|
||||
}
|
||||
|
||||
@@ -372,7 +380,11 @@ export const mirrorGithubRepoToGitea = async ({
|
||||
// Determine the actual repository name to use (handle duplicates for starred repos)
|
||||
let targetRepoName = repository.name;
|
||||
|
||||
if (repository.isStarred && config.githubConfig) {
|
||||
if (
|
||||
repository.isStarred &&
|
||||
config.githubConfig &&
|
||||
(config.githubConfig.starredReposMode || "dedicated-org") === "dedicated-org"
|
||||
) {
|
||||
// Extract GitHub owner from full_name (format: owner/repo)
|
||||
const githubOwner = repository.fullName.split('/')[0];
|
||||
|
||||
@@ -567,7 +579,7 @@ export const mirrorGithubRepoToGitea = async ({
|
||||
lfs: config.giteaConfig?.lfs || false,
|
||||
private: repository.isPrivate,
|
||||
repo_owner: repoOwner,
|
||||
description: "",
|
||||
description: repository.description?.trim() || "",
|
||||
service: "git",
|
||||
};
|
||||
|
||||
@@ -990,7 +1002,11 @@ export async function mirrorGitHubRepoToGiteaOrg({
|
||||
// Determine the actual repository name to use (handle duplicates for starred repos)
|
||||
let targetRepoName = repository.name;
|
||||
|
||||
if (repository.isStarred && config.githubConfig) {
|
||||
if (
|
||||
repository.isStarred &&
|
||||
config.githubConfig &&
|
||||
(config.githubConfig.starredReposMode || "dedicated-org") === "dedicated-org"
|
||||
) {
|
||||
// Extract GitHub owner from full_name (format: owner/repo)
|
||||
const githubOwner = repository.fullName.split('/')[0];
|
||||
|
||||
@@ -1757,6 +1773,53 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
return;
|
||||
}
|
||||
|
||||
const ghIssueMarkerRegex = /\[GH-ISSUE #(\d+)\]/i;
|
||||
const extractGitHubIssueNumber = (value: string | null | undefined): number | null => {
|
||||
if (!value) return null;
|
||||
const match = value.match(ghIssueMarkerRegex);
|
||||
if (!match?.[1]) return null;
|
||||
const parsed = Number.parseInt(match[1], 10);
|
||||
return Number.isFinite(parsed) ? parsed : null;
|
||||
};
|
||||
|
||||
const existingGiteaIssues: any[] = [];
|
||||
const titleFallbackMap = new Map<string, any[]>();
|
||||
const giteaIssueByGitHubNumber = new Map<number, any>();
|
||||
let issuesPage = 1;
|
||||
const issuesPerPage = 100;
|
||||
|
||||
while (true) {
|
||||
const existingIssuesRes = await httpGet(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${giteaOwner}/${repoName}/issues?state=all&page=${issuesPage}&limit=${issuesPerPage}`,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
|
||||
}
|
||||
);
|
||||
|
||||
const pageIssues = Array.isArray(existingIssuesRes.data)
|
||||
? existingIssuesRes.data
|
||||
: [];
|
||||
if (!pageIssues.length) break;
|
||||
|
||||
existingGiteaIssues.push(...pageIssues);
|
||||
if (pageIssues.length < issuesPerPage) break;
|
||||
issuesPage += 1;
|
||||
}
|
||||
|
||||
for (const giteaIssue of existingGiteaIssues) {
|
||||
const mappedNumber = extractGitHubIssueNumber(giteaIssue.title);
|
||||
if (mappedNumber !== null) {
|
||||
giteaIssueByGitHubNumber.set(mappedNumber, giteaIssue);
|
||||
continue;
|
||||
}
|
||||
|
||||
const title = (giteaIssue.title || "").trim();
|
||||
if (!title) continue;
|
||||
const existing = titleFallbackMap.get(title) || [];
|
||||
existing.push(giteaIssue);
|
||||
titleFallbackMap.set(title, existing);
|
||||
}
|
||||
|
||||
// Get existing labels from Gitea
|
||||
const giteaLabelsRes = await httpGet(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${giteaOwner}/${repoName}/labels`,
|
||||
@@ -1832,22 +1895,83 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
const issueOriginHeader = `Originally created by @${issueAuthor} on GitHub${
|
||||
issueCreatedOn ? ` (${issueCreatedOn})` : ""
|
||||
}.`;
|
||||
const issueMarker = `[GH-ISSUE #${issue.number}]`;
|
||||
const mirroredTitle = `${issueMarker} ${issue.title}`;
|
||||
const issueBody = `${issueOriginHeader}\nOriginal GitHub issue: ${issue.html_url}${originalAssignees}\n\n${issue.body ?? ""}`;
|
||||
|
||||
const issuePayload: any = {
|
||||
title: issue.title,
|
||||
body: `${issueOriginHeader}${originalAssignees}\n\n${issue.body ?? ""}`,
|
||||
title: mirroredTitle,
|
||||
body: issueBody,
|
||||
closed: issue.state === "closed",
|
||||
labels: giteaLabelIds,
|
||||
};
|
||||
|
||||
// Create the issue in Gitea
|
||||
const createdIssue = await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues`,
|
||||
issuePayload,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
let existingIssue = giteaIssueByGitHubNumber.get(issue.number);
|
||||
if (!existingIssue) {
|
||||
const titleFallbackCandidates = titleFallbackMap.get(issue.title.trim()) || [];
|
||||
if (titleFallbackCandidates.length === 1) {
|
||||
existingIssue = titleFallbackCandidates[0];
|
||||
giteaIssueByGitHubNumber.set(issue.number, existingIssue);
|
||||
console.log(
|
||||
`[Issues] Matched legacy issue by title for #${issue.number}; converting to marker-based title`
|
||||
);
|
||||
} else if (titleFallbackCandidates.length > 1) {
|
||||
const filtered = titleFallbackCandidates.filter((candidate) =>
|
||||
String(candidate.body || "").startsWith(issueOriginHeader)
|
||||
);
|
||||
if (filtered.length === 1) {
|
||||
existingIssue = filtered[0];
|
||||
giteaIssueByGitHubNumber.set(issue.number, existingIssue);
|
||||
console.log(
|
||||
`[Issues] Matched legacy issue by body prefix for #${issue.number}; converting to marker-based title`
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
let targetIssueNumber: number;
|
||||
if (existingIssue) {
|
||||
targetIssueNumber = existingIssue.number;
|
||||
await httpPatch(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${targetIssueNumber}`,
|
||||
{
|
||||
title: issuePayload.title,
|
||||
body: issuePayload.body,
|
||||
state: issue.state === "closed" ? "closed" : "open",
|
||||
labels: issuePayload.labels,
|
||||
},
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
} else {
|
||||
const createdIssue = await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues`,
|
||||
issuePayload,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
targetIssueNumber = createdIssue.data.number;
|
||||
|
||||
if (issue.state === "closed" && createdIssue.data.state !== "closed") {
|
||||
try {
|
||||
await httpPatch(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${targetIssueNumber}`,
|
||||
{ state: "closed" },
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
} catch (closeError) {
|
||||
console.error(
|
||||
`[Issues] Failed to close issue #${targetIssueNumber}: ${
|
||||
closeError instanceof Error ? closeError.message : String(closeError)
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clone comments
|
||||
const comments = await octokit.paginate(
|
||||
@@ -1872,19 +1996,59 @@ export const mirrorGitRepoIssuesToGitea = async ({
|
||||
|
||||
// Process comments sequentially to preserve historical ordering
|
||||
if (sortedComments.length > 0) {
|
||||
const existingComments: any[] = [];
|
||||
let commentsPage = 1;
|
||||
const commentsPerPage = 100;
|
||||
while (true) {
|
||||
const existingCommentsRes = await httpGet(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${targetIssueNumber}/comments?page=${commentsPage}&limit=${commentsPerPage}`,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
const pageComments = Array.isArray(existingCommentsRes.data)
|
||||
? existingCommentsRes.data
|
||||
: [];
|
||||
if (!pageComments.length) break;
|
||||
existingComments.push(...pageComments);
|
||||
if (pageComments.length < commentsPerPage) break;
|
||||
commentsPage += 1;
|
||||
}
|
||||
const mirroredCommentIds = new Set<number>();
|
||||
const existingCommentBodies = new Set<string>();
|
||||
for (const existingComment of existingComments) {
|
||||
const body = String(existingComment.body || "");
|
||||
if (body) existingCommentBodies.add(body);
|
||||
const marker = String(existingComment.body || "").match(
|
||||
/<!--\s*gh-comment-id:(\d+)\s*-->/i
|
||||
);
|
||||
if (marker?.[1]) {
|
||||
const parsed = Number.parseInt(marker[1], 10);
|
||||
if (Number.isFinite(parsed)) mirroredCommentIds.add(parsed);
|
||||
}
|
||||
}
|
||||
|
||||
await processWithRetry(
|
||||
sortedComments,
|
||||
async (comment) => {
|
||||
if (mirroredCommentIds.has(comment.id)) {
|
||||
return comment;
|
||||
}
|
||||
const commenter = comment.user?.login ?? "unknown";
|
||||
const commentDate = formatDateShort(comment.created_at);
|
||||
const commentHeader = `@${commenter} commented on GitHub${
|
||||
commentDate ? ` (${commentDate})` : ""
|
||||
}:`;
|
||||
const legacyBody = `${commentHeader}\n\n${comment.body ?? ""}`;
|
||||
const markedBody = `<!-- gh-comment-id:${comment.id} -->\n${legacyBody}`;
|
||||
if (existingCommentBodies.has(legacyBody) || existingCommentBodies.has(markedBody)) {
|
||||
return comment;
|
||||
}
|
||||
|
||||
await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${createdIssue.data.number}/comments`,
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${targetIssueNumber}/comments`,
|
||||
{
|
||||
body: `${commentHeader}\n\n${comment.body ?? ""}`,
|
||||
body: markedBody,
|
||||
},
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
@@ -1974,17 +2138,43 @@ export async function mirrorGitHubReleasesToGitea({
|
||||
}
|
||||
|
||||
// Get release limit from config (default to 10)
|
||||
const releaseLimit = config.giteaConfig?.releaseLimit || 10;
|
||||
|
||||
const releases = await octokit.rest.repos.listReleases({
|
||||
owner: repository.owner,
|
||||
repo: repository.name,
|
||||
per_page: releaseLimit, // Only fetch the latest N releases
|
||||
});
|
||||
const releaseLimit = Math.max(1, Math.floor(config.giteaConfig?.releaseLimit || 10));
|
||||
|
||||
console.log(`[Releases] Found ${releases.data.length} releases (limited to latest ${releaseLimit}) to mirror for ${repository.fullName}`);
|
||||
// GitHub API max per page is 100; paginate until we reach the configured limit.
|
||||
const releases: Awaited<
|
||||
ReturnType<typeof octokit.rest.repos.listReleases>
|
||||
>["data"] = [];
|
||||
let page = 1;
|
||||
const perPage = Math.min(100, releaseLimit);
|
||||
|
||||
if (releases.data.length === 0) {
|
||||
while (releases.length < releaseLimit) {
|
||||
const response = await octokit.rest.repos.listReleases({
|
||||
owner: repository.owner,
|
||||
repo: repository.name,
|
||||
per_page: perPage,
|
||||
page,
|
||||
});
|
||||
|
||||
if (response.data.length === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
releases.push(...response.data);
|
||||
|
||||
if (response.data.length < perPage) {
|
||||
break;
|
||||
}
|
||||
|
||||
page++;
|
||||
}
|
||||
|
||||
const limitedReleases = releases.slice(0, releaseLimit);
|
||||
|
||||
console.log(
|
||||
`[Releases] Found ${limitedReleases.length} releases (limited to latest ${releaseLimit}) to mirror for ${repository.fullName}`
|
||||
);
|
||||
|
||||
if (limitedReleases.length === 0) {
|
||||
console.log(`[Releases] No releases to mirror for ${repository.fullName}`);
|
||||
return;
|
||||
}
|
||||
@@ -1992,7 +2182,7 @@ export async function mirrorGitHubReleasesToGitea({
|
||||
let mirroredCount = 0;
|
||||
let skippedCount = 0;
|
||||
|
||||
const getReleaseTimestamp = (release: typeof releases.data[number]) => {
|
||||
const getReleaseTimestamp = (release: (typeof limitedReleases)[number]) => {
|
||||
// Use published_at first (when the release was published on GitHub)
|
||||
// Fall back to created_at (when the git tag was created) only if published_at is missing
|
||||
// This matches GitHub's sorting behavior and handles cases where multiple tags
|
||||
@@ -2003,10 +2193,9 @@ export async function mirrorGitHubReleasesToGitea({
|
||||
};
|
||||
|
||||
// Capture the latest releases, then process them oldest-to-newest so Gitea mirrors keep chronological order
|
||||
const releasesToProcess = releases.data
|
||||
const releasesToProcess = limitedReleases
|
||||
.slice()
|
||||
.sort((a, b) => getReleaseTimestamp(b) - getReleaseTimestamp(a))
|
||||
.slice(0, releaseLimit)
|
||||
.sort((a, b) => getReleaseTimestamp(a) - getReleaseTimestamp(b));
|
||||
|
||||
console.log(`[Releases] Processing ${releasesToProcess.length} releases in chronological order (oldest to newest by published date)`);
|
||||
@@ -2361,6 +2550,34 @@ export async function mirrorGitRepoPullRequestsToGitea({
|
||||
}
|
||||
}
|
||||
|
||||
const existingPrIssuesByNumber = new Map<number, any>();
|
||||
let prIssuesPage = 1;
|
||||
const prIssuesPerPage = 100;
|
||||
while (true) {
|
||||
const existingIssuesRes = await httpGet(
|
||||
`${config.giteaConfig.url}/api/v1/repos/${giteaOwner}/${repoName}/issues?state=all&page=${prIssuesPage}&limit=${prIssuesPerPage}`,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig.token}`,
|
||||
}
|
||||
);
|
||||
const pageIssues = Array.isArray(existingIssuesRes.data)
|
||||
? existingIssuesRes.data
|
||||
: [];
|
||||
if (!pageIssues.length) break;
|
||||
|
||||
for (const issue of pageIssues) {
|
||||
const match = String(issue.title || "").match(/\[PR #(\d+)\]/i);
|
||||
if (!match?.[1]) continue;
|
||||
const prNumber = Number.parseInt(match[1], 10);
|
||||
if (Number.isFinite(prNumber)) {
|
||||
existingPrIssuesByNumber.set(prNumber, issue);
|
||||
}
|
||||
}
|
||||
|
||||
if (pageIssues.length < prIssuesPerPage) break;
|
||||
prIssuesPage += 1;
|
||||
}
|
||||
|
||||
const { processWithRetry } = await import("@/lib/utils/concurrency");
|
||||
|
||||
const rawPullConcurrency = config.giteaConfig?.pullRequestConcurrency ?? 5;
|
||||
@@ -2458,14 +2675,54 @@ export async function mirrorGitRepoPullRequestsToGitea({
|
||||
closed: pr.state === "closed" || pr.merged_at !== null,
|
||||
};
|
||||
|
||||
console.log(`[Pull Requests] Creating enriched issue for PR #${pr.number}: ${pr.title}`);
|
||||
await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues`,
|
||||
issueData,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
const existingPrIssue = existingPrIssuesByNumber.get(pr.number);
|
||||
if (existingPrIssue) {
|
||||
await httpPatch(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${existingPrIssue.number}`,
|
||||
{
|
||||
title: issueData.title,
|
||||
body: issueData.body,
|
||||
state: issueData.closed ? "closed" : "open",
|
||||
labels: issueData.labels,
|
||||
},
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
} else {
|
||||
console.log(`[Pull Requests] Creating enriched issue for PR #${pr.number}: ${pr.title}`);
|
||||
const createdPrIssue = await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues`,
|
||||
issueData,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
existingPrIssuesByNumber.set(pr.number, createdPrIssue.data);
|
||||
|
||||
// Verify and explicitly close if the PR issue should be closed but wasn't
|
||||
const prShouldBeClosed = pr.state === "closed" || pr.merged_at !== null;
|
||||
const prIsActuallyClosed = createdPrIssue.data.state === "closed";
|
||||
|
||||
if (prShouldBeClosed && !prIsActuallyClosed) {
|
||||
try {
|
||||
await httpPatch(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${createdPrIssue.data.number}`,
|
||||
{ state: "closed" },
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
} catch (closeError) {
|
||||
console.error(
|
||||
`[Pull Requests] Failed to close issue for PR #${pr.number}: ${
|
||||
closeError instanceof Error ? closeError.message : String(closeError)
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
successCount++;
|
||||
console.log(`[Pull Requests] ✅ Successfully created issue for PR #${pr.number}`);
|
||||
} catch (apiError) {
|
||||
@@ -2479,13 +2736,53 @@ export async function mirrorGitRepoPullRequestsToGitea({
|
||||
};
|
||||
|
||||
try {
|
||||
await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues`,
|
||||
basicIssueData,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
const existingPrIssue = existingPrIssuesByNumber.get(pr.number);
|
||||
if (existingPrIssue) {
|
||||
await httpPatch(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${existingPrIssue.number}`,
|
||||
{
|
||||
title: basicIssueData.title,
|
||||
body: basicIssueData.body,
|
||||
state: basicIssueData.closed ? "closed" : "open",
|
||||
labels: basicIssueData.labels,
|
||||
},
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
} else {
|
||||
const createdBasicPrIssue = await httpPost(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues`,
|
||||
basicIssueData,
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
existingPrIssuesByNumber.set(pr.number, createdBasicPrIssue.data);
|
||||
|
||||
// Verify and explicitly close if needed
|
||||
const basicPrShouldBeClosed = pr.state === "closed" || pr.merged_at !== null;
|
||||
const basicPrIsActuallyClosed = createdBasicPrIssue.data.state === "closed";
|
||||
|
||||
if (basicPrShouldBeClosed && !basicPrIsActuallyClosed) {
|
||||
try {
|
||||
await httpPatch(
|
||||
`${config.giteaConfig!.url}/api/v1/repos/${giteaOwner}/${repoName}/issues/${createdBasicPrIssue.data.number}`,
|
||||
{ state: "closed" },
|
||||
{
|
||||
Authorization: `token ${decryptedConfig.giteaConfig!.token}`,
|
||||
}
|
||||
);
|
||||
} catch (closeError) {
|
||||
console.error(
|
||||
`[Pull Requests] Failed to close basic issue for PR #${pr.number}: ${
|
||||
closeError instanceof Error ? closeError.message : String(closeError)
|
||||
}`
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
successCount++;
|
||||
console.log(`[Pull Requests] ✅ Created basic issue for PR #${pr.number}`);
|
||||
} catch (error) {
|
||||
|
||||
@@ -22,22 +22,30 @@ if (process.env.NODE_ENV !== "test") {
|
||||
// Fallback to base Octokit if .plugin is not present
|
||||
const MyOctokit: any = (Octokit as any)?.plugin?.call
|
||||
? (Octokit as any).plugin(throttling)
|
||||
: Octokit as any;
|
||||
: (Octokit as any);
|
||||
|
||||
/**
|
||||
* Creates an authenticated Octokit instance with rate limit tracking and throttling
|
||||
*/
|
||||
export function createGitHubClient(token: string, userId?: string, username?: string): Octokit {
|
||||
export function createGitHubClient(
|
||||
token: string,
|
||||
userId?: string,
|
||||
username?: string,
|
||||
): Octokit {
|
||||
// Create a proper User-Agent to identify our application
|
||||
// This helps GitHub understand our traffic patterns and can provide better rate limits
|
||||
const userAgent = username
|
||||
? `gitea-mirror/3.5.4 (user:${username})`
|
||||
const userAgent = username
|
||||
? `gitea-mirror/3.5.4 (user:${username})`
|
||||
: "gitea-mirror/3.5.4";
|
||||
|
||||
|
||||
// Support GH_API_URL (preferred) or GITHUB_API_URL (may conflict with GitHub Actions)
|
||||
// GitHub Actions sets GITHUB_API_URL to https://api.github.com by default
|
||||
const baseUrl = process.env.GH_API_URL || process.env.GITHUB_API_URL || "https://api.github.com";
|
||||
|
||||
const octokit = new MyOctokit({
|
||||
auth: token, // Always use token for authentication (5000 req/hr vs 60 for unauthenticated)
|
||||
userAgent, // Identify our application and user
|
||||
baseUrl: "https://api.github.com", // Explicitly set the API endpoint
|
||||
baseUrl, // Configurable for E2E testing
|
||||
log: {
|
||||
debug: () => {},
|
||||
info: console.log,
|
||||
@@ -52,14 +60,19 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
},
|
||||
},
|
||||
throttle: {
|
||||
onRateLimit: async (retryAfter: number, options: any, octokit: any, retryCount: number) => {
|
||||
onRateLimit: async (
|
||||
retryAfter: number,
|
||||
options: any,
|
||||
octokit: any,
|
||||
retryCount: number,
|
||||
) => {
|
||||
const isSearch = options.url.includes("/search/");
|
||||
const maxRetries = isSearch ? 5 : 3; // Search endpoints get more retries
|
||||
|
||||
|
||||
console.warn(
|
||||
`[GitHub] Rate limit hit for ${options.method} ${options.url}. Retry ${retryCount + 1}/${maxRetries}`
|
||||
`[GitHub] Rate limit hit for ${options.method} ${options.url}. Retry ${retryCount + 1}/${maxRetries}`,
|
||||
);
|
||||
|
||||
|
||||
// Update rate limit status and notify UI (if available)
|
||||
if (userId && RateLimitManager) {
|
||||
await RateLimitManager.updateFromResponse(userId, {
|
||||
@@ -68,7 +81,7 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
"x-ratelimit-reset": (Date.now() / 1000 + retryAfter).toString(),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if (userId && publishEvent) {
|
||||
await publishEvent({
|
||||
userId,
|
||||
@@ -83,22 +96,29 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Retry with exponential backoff
|
||||
if (retryCount < maxRetries) {
|
||||
console.log(`[GitHub] Waiting ${retryAfter}s before retry...`);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// Max retries reached
|
||||
console.error(`[GitHub] Max retries (${maxRetries}) reached for ${options.url}`);
|
||||
console.error(
|
||||
`[GitHub] Max retries (${maxRetries}) reached for ${options.url}`,
|
||||
);
|
||||
return false;
|
||||
},
|
||||
onSecondaryRateLimit: async (retryAfter: number, options: any, octokit: any, retryCount: number) => {
|
||||
onSecondaryRateLimit: async (
|
||||
retryAfter: number,
|
||||
options: any,
|
||||
octokit: any,
|
||||
retryCount: number,
|
||||
) => {
|
||||
console.warn(
|
||||
`[GitHub] Secondary rate limit hit for ${options.method} ${options.url}`
|
||||
`[GitHub] Secondary rate limit hit for ${options.method} ${options.url}`,
|
||||
);
|
||||
|
||||
|
||||
// Update status and notify UI (if available)
|
||||
if (userId && publishEvent) {
|
||||
await publishEvent({
|
||||
@@ -114,13 +134,15 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Retry up to 2 times for secondary rate limits
|
||||
if (retryCount < 2) {
|
||||
console.log(`[GitHub] Waiting ${retryAfter}s for secondary rate limit...`);
|
||||
console.log(
|
||||
`[GitHub] Waiting ${retryAfter}s for secondary rate limit...`,
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
return false;
|
||||
},
|
||||
// Throttle options to prevent hitting limits
|
||||
@@ -129,50 +151,57 @@ export function createGitHubClient(token: string, userId?: string, username?: st
|
||||
retryAfterBaseValue: 1000, // Base retry in ms
|
||||
},
|
||||
});
|
||||
|
||||
// Add additional rate limit tracking if userId is provided and RateLimitManager is available
|
||||
|
||||
// Add rate limit tracking hooks if userId is provided and RateLimitManager is available
|
||||
if (userId && RateLimitManager) {
|
||||
octokit.hook.after("request", async (response: any, options: any) => {
|
||||
// Update rate limit from response headers
|
||||
octokit.hook.after("request", async (response: any, _options: any) => {
|
||||
if (response.headers) {
|
||||
await RateLimitManager.updateFromResponse(userId, response.headers);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
octokit.hook.error("request", async (error: any, options: any) => {
|
||||
// Handle rate limit errors
|
||||
if (error.status === 403 || error.status === 429) {
|
||||
const message = error.message || "";
|
||||
|
||||
if (message.includes("rate limit") || message.includes("API rate limit")) {
|
||||
console.error(`[GitHub] Rate limit error for user ${userId}: ${message}`);
|
||||
|
||||
|
||||
if (
|
||||
message.includes("rate limit") ||
|
||||
message.includes("API rate limit")
|
||||
) {
|
||||
console.error(
|
||||
`[GitHub] Rate limit error for user ${userId}: ${message}`,
|
||||
);
|
||||
|
||||
// Update rate limit status from error response (if available)
|
||||
if (error.response?.headers && RateLimitManager) {
|
||||
await RateLimitManager.updateFromResponse(userId, error.response.headers);
|
||||
await RateLimitManager.updateFromResponse(
|
||||
userId,
|
||||
error.response.headers,
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// Create error event for UI (if available)
|
||||
if (publishEvent) {
|
||||
await publishEvent({
|
||||
userId,
|
||||
channel: "rate-limit",
|
||||
payload: {
|
||||
type: "error",
|
||||
provider: "github",
|
||||
error: message,
|
||||
endpoint: options.url,
|
||||
message: `Rate limit exceeded: ${message}`,
|
||||
},
|
||||
});
|
||||
channel: "rate-limit",
|
||||
payload: {
|
||||
type: "error",
|
||||
provider: "github",
|
||||
error: message,
|
||||
endpoint: options.url,
|
||||
message: `Rate limit exceeded: ${message}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
return octokit;
|
||||
}
|
||||
|
||||
@@ -213,7 +242,7 @@ export async function getGithubRepositories({
|
||||
try {
|
||||
const repos = await octokit.paginate(
|
||||
octokit.repos.listForAuthenticatedUser,
|
||||
{ per_page: 100 }
|
||||
{ per_page: 100 },
|
||||
);
|
||||
|
||||
const skipForks = config.githubConfig?.skipForks ?? false;
|
||||
@@ -254,6 +283,7 @@ export async function getGithubRepositories({
|
||||
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
|
||||
|
||||
status: "imported",
|
||||
isDisabled: repo.disabled ?? false,
|
||||
lastMirrored: undefined,
|
||||
errorMessage: undefined,
|
||||
|
||||
@@ -264,7 +294,7 @@ export async function getGithubRepositories({
|
||||
throw new Error(
|
||||
`Error fetching repositories: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -275,13 +305,13 @@ export async function getGithubStarredRepositories({
|
||||
}: {
|
||||
octokit: Octokit;
|
||||
config: Partial<Config>;
|
||||
}) {
|
||||
}): Promise<GitRepo[]> {
|
||||
try {
|
||||
const starredRepos = await octokit.paginate(
|
||||
octokit.activity.listReposStarredByAuthenticatedUser,
|
||||
{
|
||||
per_page: 100,
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
return starredRepos.map((repo) => ({
|
||||
@@ -314,6 +344,7 @@ export async function getGithubStarredRepositories({
|
||||
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
|
||||
|
||||
status: "imported",
|
||||
isDisabled: repo.disabled ?? false,
|
||||
lastMirrored: undefined,
|
||||
errorMessage: undefined,
|
||||
|
||||
@@ -324,7 +355,7 @@ export async function getGithubStarredRepositories({
|
||||
throw new Error(
|
||||
`Error fetching starred repositories: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -347,13 +378,15 @@ export async function getGithubOrganizations({
|
||||
// Get excluded organizations from environment variable
|
||||
const excludedOrgsEnv = process.env.GITHUB_EXCLUDED_ORGS;
|
||||
const excludedOrgs = excludedOrgsEnv
|
||||
? excludedOrgsEnv.split(',').map(org => org.trim().toLowerCase())
|
||||
? excludedOrgsEnv.split(",").map((org) => org.trim().toLowerCase())
|
||||
: [];
|
||||
|
||||
// Filter out excluded organizations
|
||||
const filteredOrgs = orgs.filter(org => {
|
||||
const filteredOrgs = orgs.filter((org) => {
|
||||
if (excludedOrgs.includes(org.login.toLowerCase())) {
|
||||
console.log(`Skipping organization ${org.login} - excluded via GITHUB_EXCLUDED_ORGS environment variable`);
|
||||
console.log(
|
||||
`Skipping organization ${org.login} - excluded via GITHUB_EXCLUDED_ORGS environment variable`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@@ -379,7 +412,7 @@ export async function getGithubOrganizations({
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
return organizations;
|
||||
@@ -387,7 +420,7 @@ export async function getGithubOrganizations({
|
||||
throw new Error(
|
||||
`Error fetching organizations: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -438,6 +471,7 @@ export async function getGithubOrganizationRepositories({
|
||||
visibility: (repo.visibility ?? "public") as GitRepo["visibility"],
|
||||
|
||||
status: "imported",
|
||||
isDisabled: repo.disabled ?? false,
|
||||
lastMirrored: undefined,
|
||||
errorMessage: undefined,
|
||||
|
||||
@@ -448,7 +482,7 @@ export async function getGithubOrganizationRepositories({
|
||||
throw new Error(
|
||||
`Error fetching organization repositories: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
248
src/lib/repo-backup.test.ts
Normal file
248
src/lib/repo-backup.test.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
import path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, test } from "bun:test";
|
||||
import type { Config } from "@/types/config";
|
||||
import {
|
||||
resolveBackupPaths,
|
||||
resolveBackupStrategy,
|
||||
shouldBackupForStrategy,
|
||||
shouldBlockSyncForStrategy,
|
||||
strategyNeedsDetection,
|
||||
} from "@/lib/repo-backup";
|
||||
|
||||
describe("resolveBackupPaths", () => {
|
||||
let originalBackupDirEnv: string | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
originalBackupDirEnv = process.env.PRE_SYNC_BACKUP_DIR;
|
||||
delete process.env.PRE_SYNC_BACKUP_DIR;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (originalBackupDirEnv === undefined) {
|
||||
delete process.env.PRE_SYNC_BACKUP_DIR;
|
||||
} else {
|
||||
process.env.PRE_SYNC_BACKUP_DIR = originalBackupDirEnv;
|
||||
}
|
||||
});
|
||||
|
||||
test("returns absolute paths when backupDirectory is relative", () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {
|
||||
backupDirectory: "data/repo-backups",
|
||||
} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { backupRoot, repoBackupDir } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "RayLabsHQ",
|
||||
repoName: "gitea-mirror",
|
||||
});
|
||||
|
||||
expect(path.isAbsolute(backupRoot)).toBe(true);
|
||||
expect(path.isAbsolute(repoBackupDir)).toBe(true);
|
||||
expect(repoBackupDir).toBe(
|
||||
path.join(backupRoot, "user-123", "RayLabsHQ", "gitea-mirror")
|
||||
);
|
||||
});
|
||||
|
||||
test("returns absolute paths when backupDirectory is already absolute", () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {
|
||||
backupDirectory: "/data/repo-backups",
|
||||
} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { backupRoot, repoBackupDir } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "owner",
|
||||
repoName: "repo",
|
||||
});
|
||||
|
||||
expect(backupRoot).toBe("/data/repo-backups");
|
||||
expect(path.isAbsolute(repoBackupDir)).toBe(true);
|
||||
});
|
||||
|
||||
test("falls back to cwd-based path when no backupDirectory is set", () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { backupRoot } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "owner",
|
||||
repoName: "repo",
|
||||
});
|
||||
|
||||
expect(path.isAbsolute(backupRoot)).toBe(true);
|
||||
expect(backupRoot).toBe(
|
||||
path.resolve(process.cwd(), "data", "repo-backups")
|
||||
);
|
||||
});
|
||||
|
||||
test("uses PRE_SYNC_BACKUP_DIR env var when config has no backupDirectory", () => {
|
||||
process.env.PRE_SYNC_BACKUP_DIR = "custom/backup/path";
|
||||
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { backupRoot } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "owner",
|
||||
repoName: "repo",
|
||||
});
|
||||
|
||||
expect(path.isAbsolute(backupRoot)).toBe(true);
|
||||
expect(backupRoot).toBe(path.resolve("custom/backup/path"));
|
||||
});
|
||||
|
||||
test("sanitizes owner and repoName in path segments", () => {
|
||||
const config: Partial<Config> = {
|
||||
userId: "user-123",
|
||||
giteaConfig: {
|
||||
backupDirectory: "/backups",
|
||||
} as Config["giteaConfig"],
|
||||
};
|
||||
|
||||
const { repoBackupDir } = resolveBackupPaths({
|
||||
config,
|
||||
owner: "org/with-slash",
|
||||
repoName: "repo name!",
|
||||
});
|
||||
|
||||
expect(repoBackupDir).toBe(
|
||||
path.join("/backups", "user-123", "org_with-slash", "repo_name_")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// ---- Backup strategy resolver tests ----
|
||||
|
||||
function makeConfig(overrides: Record<string, any> = {}): Partial<Config> {
|
||||
return {
|
||||
giteaConfig: {
|
||||
url: "https://gitea.example.com",
|
||||
token: "tok",
|
||||
...overrides,
|
||||
},
|
||||
} as Partial<Config>;
|
||||
}
|
||||
|
||||
const envKeysToClean = ["PRE_SYNC_BACKUP_STRATEGY", "PRE_SYNC_BACKUP_ENABLED"];
|
||||
|
||||
describe("resolveBackupStrategy", () => {
|
||||
let savedEnv: Record<string, string | undefined> = {};
|
||||
|
||||
beforeEach(() => {
|
||||
savedEnv = {};
|
||||
for (const key of envKeysToClean) {
|
||||
savedEnv[key] = process.env[key];
|
||||
delete process.env[key];
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
for (const [key, value] of Object.entries(savedEnv)) {
|
||||
if (value === undefined) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test("returns explicit backupStrategy when set", () => {
|
||||
expect(resolveBackupStrategy(makeConfig({ backupStrategy: "always" }))).toBe("always");
|
||||
expect(resolveBackupStrategy(makeConfig({ backupStrategy: "disabled" }))).toBe("disabled");
|
||||
expect(resolveBackupStrategy(makeConfig({ backupStrategy: "on-force-push" }))).toBe("on-force-push");
|
||||
expect(resolveBackupStrategy(makeConfig({ backupStrategy: "block-on-force-push" }))).toBe("block-on-force-push");
|
||||
});
|
||||
|
||||
test("maps backupBeforeSync: true → 'always' (backward compat)", () => {
|
||||
expect(resolveBackupStrategy(makeConfig({ backupBeforeSync: true }))).toBe("always");
|
||||
});
|
||||
|
||||
test("maps backupBeforeSync: false → 'disabled' (backward compat)", () => {
|
||||
expect(resolveBackupStrategy(makeConfig({ backupBeforeSync: false }))).toBe("disabled");
|
||||
});
|
||||
|
||||
test("prefers explicit backupStrategy over backupBeforeSync", () => {
|
||||
expect(
|
||||
resolveBackupStrategy(
|
||||
makeConfig({ backupStrategy: "on-force-push", backupBeforeSync: true }),
|
||||
),
|
||||
).toBe("on-force-push");
|
||||
});
|
||||
|
||||
test("falls back to PRE_SYNC_BACKUP_STRATEGY env var", () => {
|
||||
process.env.PRE_SYNC_BACKUP_STRATEGY = "block-on-force-push";
|
||||
expect(resolveBackupStrategy(makeConfig({}))).toBe("block-on-force-push");
|
||||
});
|
||||
|
||||
test("falls back to PRE_SYNC_BACKUP_ENABLED env var (legacy)", () => {
|
||||
process.env.PRE_SYNC_BACKUP_ENABLED = "false";
|
||||
expect(resolveBackupStrategy(makeConfig({}))).toBe("disabled");
|
||||
});
|
||||
|
||||
test("defaults to 'on-force-push' when nothing is configured", () => {
|
||||
expect(resolveBackupStrategy(makeConfig({}))).toBe("on-force-push");
|
||||
});
|
||||
|
||||
test("handles empty giteaConfig gracefully", () => {
|
||||
expect(resolveBackupStrategy({})).toBe("on-force-push");
|
||||
});
|
||||
});
|
||||
|
||||
describe("shouldBackupForStrategy", () => {
|
||||
test("disabled → never backup", () => {
|
||||
expect(shouldBackupForStrategy("disabled", false)).toBe(false);
|
||||
expect(shouldBackupForStrategy("disabled", true)).toBe(false);
|
||||
});
|
||||
|
||||
test("always → always backup", () => {
|
||||
expect(shouldBackupForStrategy("always", false)).toBe(true);
|
||||
expect(shouldBackupForStrategy("always", true)).toBe(true);
|
||||
});
|
||||
|
||||
test("on-force-push → backup only when detected", () => {
|
||||
expect(shouldBackupForStrategy("on-force-push", false)).toBe(false);
|
||||
expect(shouldBackupForStrategy("on-force-push", true)).toBe(true);
|
||||
});
|
||||
|
||||
test("block-on-force-push → backup only when detected", () => {
|
||||
expect(shouldBackupForStrategy("block-on-force-push", false)).toBe(false);
|
||||
expect(shouldBackupForStrategy("block-on-force-push", true)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("shouldBlockSyncForStrategy", () => {
|
||||
test("only block-on-force-push + detected returns true", () => {
|
||||
expect(shouldBlockSyncForStrategy("block-on-force-push", true)).toBe(true);
|
||||
});
|
||||
|
||||
test("block-on-force-push without detection does not block", () => {
|
||||
expect(shouldBlockSyncForStrategy("block-on-force-push", false)).toBe(false);
|
||||
});
|
||||
|
||||
test("other strategies never block", () => {
|
||||
expect(shouldBlockSyncForStrategy("disabled", true)).toBe(false);
|
||||
expect(shouldBlockSyncForStrategy("always", true)).toBe(false);
|
||||
expect(shouldBlockSyncForStrategy("on-force-push", true)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("strategyNeedsDetection", () => {
|
||||
test("returns true for detection-based strategies", () => {
|
||||
expect(strategyNeedsDetection("on-force-push")).toBe(true);
|
||||
expect(strategyNeedsDetection("block-on-force-push")).toBe(true);
|
||||
});
|
||||
|
||||
test("returns false for non-detection strategies", () => {
|
||||
expect(strategyNeedsDetection("disabled")).toBe(false);
|
||||
expect(strategyNeedsDetection("always")).toBe(false);
|
||||
});
|
||||
});
|
||||
276
src/lib/repo-backup.ts
Normal file
276
src/lib/repo-backup.ts
Normal file
@@ -0,0 +1,276 @@
|
||||
import { mkdir, mkdtemp, readdir, rm, stat } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type { Config, BackupStrategy } from "@/types/config";
|
||||
import { decryptConfigTokens } from "./utils/config-encryption";
|
||||
|
||||
const TRUE_VALUES = new Set(["1", "true", "yes", "on"]);
|
||||
|
||||
function parseBoolean(value: string | undefined, fallback: boolean): boolean {
|
||||
if (value === undefined) return fallback;
|
||||
return TRUE_VALUES.has(value.trim().toLowerCase());
|
||||
}
|
||||
|
||||
function parsePositiveInt(value: string | undefined, fallback: number): number {
|
||||
if (!value) return fallback;
|
||||
const parsed = Number.parseInt(value, 10);
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return fallback;
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function sanitizePathSegment(input: string): string {
|
||||
return input.replace(/[^a-zA-Z0-9._-]/g, "_");
|
||||
}
|
||||
|
||||
function buildTimestamp(): string {
|
||||
// Example: 2026-02-25T18-34-22-123Z
|
||||
return new Date().toISOString().replace(/[:.]/g, "-");
|
||||
}
|
||||
|
||||
function buildAuthenticatedCloneUrl(cloneUrl: string, token: string): string {
|
||||
const parsed = new URL(cloneUrl);
|
||||
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||
return cloneUrl;
|
||||
}
|
||||
|
||||
parsed.username = process.env.PRE_SYNC_BACKUP_GIT_USERNAME || "oauth2";
|
||||
parsed.password = token;
|
||||
return parsed.toString();
|
||||
}
|
||||
|
||||
function maskToken(text: string, token: string): string {
|
||||
if (!token) return text;
|
||||
return text.split(token).join("***");
|
||||
}
|
||||
|
||||
async function runGit(args: string[], tokenToMask: string): Promise<void> {
|
||||
const proc = Bun.spawn({
|
||||
cmd: ["git", ...args],
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
if (exitCode !== 0) {
|
||||
const details = [stdout, stderr].filter(Boolean).join("\n").trim();
|
||||
const safeDetails = maskToken(details, tokenToMask);
|
||||
throw new Error(`git command failed: ${safeDetails || "unknown git error"}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function enforceRetention(repoBackupDir: string, keepCount: number): Promise<void> {
|
||||
const entries = await readdir(repoBackupDir);
|
||||
const bundleFiles = entries
|
||||
.filter((name) => name.endsWith(".bundle"))
|
||||
.map((name) => path.join(repoBackupDir, name));
|
||||
|
||||
if (bundleFiles.length <= keepCount) return;
|
||||
|
||||
const filesWithMtime = await Promise.all(
|
||||
bundleFiles.map(async (filePath) => ({
|
||||
filePath,
|
||||
mtimeMs: (await stat(filePath)).mtimeMs,
|
||||
}))
|
||||
);
|
||||
|
||||
filesWithMtime.sort((a, b) => b.mtimeMs - a.mtimeMs);
|
||||
const toDelete = filesWithMtime.slice(keepCount);
|
||||
|
||||
await Promise.all(toDelete.map((entry) => rm(entry.filePath, { force: true })));
|
||||
}
|
||||
|
||||
export function isPreSyncBackupEnabled(): boolean {
|
||||
return parseBoolean(process.env.PRE_SYNC_BACKUP_ENABLED, true);
|
||||
}
|
||||
|
||||
export function shouldCreatePreSyncBackup(config: Partial<Config>): boolean {
|
||||
const configSetting = config.giteaConfig?.backupBeforeSync;
|
||||
const fallback = isPreSyncBackupEnabled();
|
||||
return configSetting === undefined ? fallback : Boolean(configSetting);
|
||||
}
|
||||
|
||||
export function shouldBlockSyncOnBackupFailure(config: Partial<Config>): boolean {
|
||||
const configSetting = config.giteaConfig?.blockSyncOnBackupFailure;
|
||||
return configSetting === undefined ? true : Boolean(configSetting);
|
||||
}
|
||||
|
||||
// ---- Backup strategy resolver ----
|
||||
|
||||
const VALID_STRATEGIES = new Set<BackupStrategy>([
|
||||
"disabled",
|
||||
"always",
|
||||
"on-force-push",
|
||||
"block-on-force-push",
|
||||
]);
|
||||
|
||||
/**
|
||||
* Resolve the effective backup strategy from config, falling back through:
|
||||
* 1. `backupStrategy` field (new)
|
||||
* 2. `backupBeforeSync` boolean (deprecated, backward compat)
|
||||
* 3. `PRE_SYNC_BACKUP_STRATEGY` env var
|
||||
* 4. `PRE_SYNC_BACKUP_ENABLED` env var (legacy)
|
||||
* 5. Default: `"on-force-push"`
|
||||
*/
|
||||
export function resolveBackupStrategy(config: Partial<Config>): BackupStrategy {
|
||||
// 1. Explicit backupStrategy field
|
||||
const explicit = config.giteaConfig?.backupStrategy;
|
||||
if (explicit && VALID_STRATEGIES.has(explicit as BackupStrategy)) {
|
||||
return explicit as BackupStrategy;
|
||||
}
|
||||
|
||||
// 2. Legacy backupBeforeSync boolean → map to strategy
|
||||
const legacy = config.giteaConfig?.backupBeforeSync;
|
||||
if (legacy !== undefined) {
|
||||
return legacy ? "always" : "disabled";
|
||||
}
|
||||
|
||||
// 3. Env var (new)
|
||||
const envStrategy = process.env.PRE_SYNC_BACKUP_STRATEGY?.trim().toLowerCase();
|
||||
if (envStrategy && VALID_STRATEGIES.has(envStrategy as BackupStrategy)) {
|
||||
return envStrategy as BackupStrategy;
|
||||
}
|
||||
|
||||
// 4. Env var (legacy)
|
||||
const envEnabled = process.env.PRE_SYNC_BACKUP_ENABLED;
|
||||
if (envEnabled !== undefined) {
|
||||
return parseBoolean(envEnabled, true) ? "always" : "disabled";
|
||||
}
|
||||
|
||||
// 5. Default
|
||||
return "on-force-push";
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether a backup should be created for the given strategy and
|
||||
* force-push detection result.
|
||||
*/
|
||||
export function shouldBackupForStrategy(
|
||||
strategy: BackupStrategy,
|
||||
forcePushDetected: boolean,
|
||||
): boolean {
|
||||
switch (strategy) {
|
||||
case "disabled":
|
||||
return false;
|
||||
case "always":
|
||||
return true;
|
||||
case "on-force-push":
|
||||
case "block-on-force-push":
|
||||
return forcePushDetected;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether sync should be blocked (requires manual approval).
|
||||
* Only `block-on-force-push` with an actual detection blocks sync.
|
||||
*/
|
||||
export function shouldBlockSyncForStrategy(
|
||||
strategy: BackupStrategy,
|
||||
forcePushDetected: boolean,
|
||||
): boolean {
|
||||
return strategy === "block-on-force-push" && forcePushDetected;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true when the strategy requires running force-push detection
|
||||
* before deciding on backup / block behavior.
|
||||
*/
|
||||
export function strategyNeedsDetection(strategy: BackupStrategy): boolean {
|
||||
return strategy === "on-force-push" || strategy === "block-on-force-push";
|
||||
}
|
||||
|
||||
export function resolveBackupPaths({
|
||||
config,
|
||||
owner,
|
||||
repoName,
|
||||
}: {
|
||||
config: Partial<Config>;
|
||||
owner: string;
|
||||
repoName: string;
|
||||
}): { backupRoot: string; repoBackupDir: string } {
|
||||
let backupRoot =
|
||||
config.giteaConfig?.backupDirectory?.trim() ||
|
||||
process.env.PRE_SYNC_BACKUP_DIR?.trim() ||
|
||||
path.join(process.cwd(), "data", "repo-backups");
|
||||
|
||||
// Ensure backupRoot is absolute - relative paths break git bundle creation
|
||||
// because git runs with -C mirrorClonePath and interprets relative paths from there.
|
||||
// Always use path.resolve() which guarantees an absolute path, rather than a
|
||||
// conditional check that can miss edge cases (e.g., NixOS systemd services).
|
||||
backupRoot = path.resolve(backupRoot);
|
||||
|
||||
const repoBackupDir = path.join(
|
||||
backupRoot,
|
||||
sanitizePathSegment(config.userId || "unknown-user"),
|
||||
sanitizePathSegment(owner),
|
||||
sanitizePathSegment(repoName)
|
||||
);
|
||||
|
||||
return { backupRoot, repoBackupDir };
|
||||
}
|
||||
|
||||
export async function createPreSyncBundleBackup({
|
||||
config,
|
||||
owner,
|
||||
repoName,
|
||||
cloneUrl,
|
||||
force,
|
||||
}: {
|
||||
config: Partial<Config>;
|
||||
owner: string;
|
||||
repoName: string;
|
||||
cloneUrl: string;
|
||||
/** When true, skip the legacy shouldCreatePreSyncBackup check.
|
||||
* Used by the strategy-driven path which has already decided to backup. */
|
||||
force?: boolean;
|
||||
}): Promise<{ bundlePath: string }> {
|
||||
if (!force && !shouldCreatePreSyncBackup(config)) {
|
||||
throw new Error("Pre-sync backup is disabled.");
|
||||
}
|
||||
|
||||
if (!config.giteaConfig?.token) {
|
||||
throw new Error("Gitea token is required for pre-sync backup.");
|
||||
}
|
||||
|
||||
const decryptedConfig = decryptConfigTokens(config as Config);
|
||||
const giteaToken = decryptedConfig.giteaConfig?.token;
|
||||
if (!giteaToken) {
|
||||
throw new Error("Decrypted Gitea token is required for pre-sync backup.");
|
||||
}
|
||||
|
||||
const { repoBackupDir } = resolveBackupPaths({ config, owner, repoName });
|
||||
const retention = Math.max(
|
||||
1,
|
||||
Number.isFinite(config.giteaConfig?.backupRetentionCount)
|
||||
? Number(config.giteaConfig?.backupRetentionCount)
|
||||
: parsePositiveInt(process.env.PRE_SYNC_BACKUP_KEEP_COUNT, 20)
|
||||
);
|
||||
|
||||
await mkdir(repoBackupDir, { recursive: true });
|
||||
|
||||
const tmpDir = await mkdtemp(path.join(os.tmpdir(), "gitea-mirror-backup-"));
|
||||
const mirrorClonePath = path.join(tmpDir, "repo.git");
|
||||
// path.resolve guarantees an absolute path, critical because git -C changes
|
||||
// the working directory and would misinterpret a relative bundlePath
|
||||
const bundlePath = path.resolve(repoBackupDir, `${buildTimestamp()}.bundle`);
|
||||
|
||||
try {
|
||||
const authCloneUrl = buildAuthenticatedCloneUrl(cloneUrl, giteaToken);
|
||||
|
||||
await runGit(["clone", "--mirror", authCloneUrl, mirrorClonePath], giteaToken);
|
||||
await runGit(["-C", mirrorClonePath, "bundle", "create", bundlePath, "--all"], giteaToken);
|
||||
|
||||
await enforceRetention(repoBackupDir, retention);
|
||||
return { bundlePath };
|
||||
} finally {
|
||||
await rm(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
17
src/lib/repo-eligibility.test.ts
Normal file
17
src/lib/repo-eligibility.test.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { isMirrorableGitHubRepo } from "@/lib/repo-eligibility";
|
||||
|
||||
describe("isMirrorableGitHubRepo", () => {
|
||||
it("returns false for disabled repos", () => {
|
||||
expect(isMirrorableGitHubRepo({ isDisabled: true })).toBe(false);
|
||||
});
|
||||
|
||||
it("returns true for enabled repos", () => {
|
||||
expect(isMirrorableGitHubRepo({ isDisabled: false })).toBe(true);
|
||||
});
|
||||
|
||||
it("returns true when disabled flag is absent", () => {
|
||||
expect(isMirrorableGitHubRepo({})).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
6
src/lib/repo-eligibility.ts
Normal file
6
src/lib/repo-eligibility.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { GitRepo } from "@/types/Repository";
|
||||
|
||||
export function isMirrorableGitHubRepo(repo: Pick<GitRepo, "isDisabled">): boolean {
|
||||
return repo.isDisabled !== true;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import { createGitHubClient, getGithubRepositories, getGithubStarredRepositories
|
||||
import { createGiteaClient, deleteGiteaRepo, archiveGiteaRepo, getGiteaRepoOwnerAsync, checkRepoLocation } from '@/lib/gitea';
|
||||
import { getDecryptedGitHubToken, getDecryptedGiteaToken } from '@/lib/utils/config-encryption';
|
||||
import { publishEvent } from '@/lib/events';
|
||||
import { isMirrorableGitHubRepo } from '@/lib/repo-eligibility';
|
||||
|
||||
let cleanupInterval: NodeJS.Timeout | null = null;
|
||||
let isCleanupRunning = false;
|
||||
@@ -59,7 +60,9 @@ async function identifyOrphanedRepositories(config: any): Promise<any[]> {
|
||||
return [];
|
||||
}
|
||||
|
||||
const githubRepoFullNames = new Set(allGithubRepos.map(repo => repo.fullName));
|
||||
const githubReposByFullName = new Map(
|
||||
allGithubRepos.map((repo) => [repo.fullName, repo] as const)
|
||||
);
|
||||
|
||||
// Get all repositories from our database
|
||||
const dbRepos = await db
|
||||
@@ -70,18 +73,30 @@ async function identifyOrphanedRepositories(config: any): Promise<any[]> {
|
||||
// Only identify repositories as orphaned if we successfully accessed GitHub
|
||||
// This prevents false positives when GitHub is down or account is inaccessible
|
||||
const orphanedRepos = dbRepos.filter(repo => {
|
||||
const isOrphaned = !githubRepoFullNames.has(repo.fullName);
|
||||
if (!isOrphaned) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Skip repositories we've already archived/preserved
|
||||
if (repo.status === 'archived' || repo.isArchived) {
|
||||
console.log(`[Repository Cleanup] Skipping ${repo.fullName} - already archived`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
// If starred repos are not being fetched from GitHub, we can't determine
|
||||
// if a starred repo is orphaned - skip it to prevent data loss
|
||||
if (repo.isStarred && !config.githubConfig?.includeStarred) {
|
||||
console.log(`[Repository Cleanup] Skipping starred repo ${repo.fullName} - starred repos not being fetched from GitHub`);
|
||||
return false;
|
||||
}
|
||||
|
||||
const githubRepo = githubReposByFullName.get(repo.fullName);
|
||||
if (!githubRepo) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!isMirrorableGitHubRepo(githubRepo)) {
|
||||
console.log(`[Repository Cleanup] Preserving ${repo.fullName} - repository is disabled on GitHub`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
if (orphanedRepos.length > 0) {
|
||||
|
||||
@@ -12,6 +12,8 @@ import { parseInterval, formatDuration } from '@/lib/utils/duration-parser';
|
||||
import type { Repository } from '@/lib/db/schema';
|
||||
import { repoStatusEnum, repositoryVisibilityEnum } from '@/types/Repository';
|
||||
import { mergeGitReposPreferStarred, normalizeGitRepoToInsert, calcBatchSizeForInsert } from '@/lib/repo-utils';
|
||||
import { isMirrorableGitHubRepo } from '@/lib/repo-eligibility';
|
||||
import { createMirrorJob } from '@/lib/helpers';
|
||||
|
||||
let schedulerInterval: NodeJS.Timeout | null = null;
|
||||
let isSchedulerRunning = false;
|
||||
@@ -96,6 +98,7 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
: Promise.resolve([]),
|
||||
]);
|
||||
const allGithubRepos = mergeGitReposPreferStarred(basicAndForkedRepos, starredRepos);
|
||||
const mirrorableGithubRepos = allGithubRepos.filter(isMirrorableGitHubRepo);
|
||||
|
||||
// Check for new repositories
|
||||
const existingRepos = await db
|
||||
@@ -104,7 +107,7 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
.where(eq(repositories.userId, userId));
|
||||
|
||||
const existingRepoNames = new Set(existingRepos.map(r => r.normalizedFullName));
|
||||
const newRepos = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
|
||||
const newRepos = mirrorableGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
|
||||
|
||||
if (newRepos.length > 0) {
|
||||
console.log(`[Scheduler] Found ${newRepos.length} new repositories for user ${userId}`);
|
||||
@@ -126,9 +129,26 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
.onConflictDoNothing({ target: [repositories.userId, repositories.normalizedFullName] });
|
||||
}
|
||||
console.log(`[Scheduler] Successfully imported ${newRepos.length} new repositories for user ${userId}`);
|
||||
|
||||
// Log activity for each newly imported repo
|
||||
for (const repo of newRepos) {
|
||||
const sourceLabel = repo.isStarred ? 'starred' : 'owned';
|
||||
await createMirrorJob({
|
||||
userId,
|
||||
repositoryName: repo.fullName,
|
||||
message: `Auto-imported ${sourceLabel} repository: ${repo.fullName}`,
|
||||
details: `Repository ${repo.fullName} was discovered and imported during scheduled sync.`,
|
||||
status: 'imported',
|
||||
skipDuplicateEvent: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.log(`[Scheduler] No new repositories found for user ${userId}`);
|
||||
}
|
||||
const skippedDisabledCount = allGithubRepos.length - mirrorableGithubRepos.length;
|
||||
if (skippedDisabledCount > 0) {
|
||||
console.log(`[Scheduler] Skipped ${skippedDisabledCount} disabled GitHub repositories for user ${userId}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[Scheduler] Failed to auto-import repositories for user ${userId}:`, error);
|
||||
}
|
||||
@@ -170,7 +190,7 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
if (scheduleConfig.autoMirror) {
|
||||
try {
|
||||
console.log(`[Scheduler] Auto-mirror enabled - checking for repositories to mirror for user ${userId}...`);
|
||||
const reposNeedingMirror = await db
|
||||
let reposNeedingMirror = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(
|
||||
@@ -184,6 +204,19 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
)
|
||||
);
|
||||
|
||||
// Filter out starred repos from auto-mirror when autoMirrorStarred is disabled
|
||||
if (!config.githubConfig?.autoMirrorStarred) {
|
||||
const githubOwner = config.githubConfig?.owner || '';
|
||||
const beforeCount = reposNeedingMirror.length;
|
||||
reposNeedingMirror = reposNeedingMirror.filter(
|
||||
repo => !repo.isStarred || repo.owner === githubOwner
|
||||
);
|
||||
const skippedCount = beforeCount - reposNeedingMirror.length;
|
||||
if (skippedCount > 0) {
|
||||
console.log(`[Scheduler] Skipped ${skippedCount} starred repositories from auto-mirror (autoMirrorStarred is disabled)`);
|
||||
}
|
||||
}
|
||||
|
||||
if (reposNeedingMirror.length > 0) {
|
||||
console.log(`[Scheduler] Found ${reposNeedingMirror.length} repositories that need initial mirroring`);
|
||||
|
||||
@@ -274,11 +307,29 @@ async function runScheduledSync(config: any): Promise<void> {
|
||||
});
|
||||
}
|
||||
|
||||
// Log pending-approval repos that are excluded from sync
|
||||
try {
|
||||
const pendingApprovalRepos = await db
|
||||
.select({ id: repositories.id })
|
||||
.from(repositories)
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
eq(repositories.status, 'pending-approval')
|
||||
)
|
||||
);
|
||||
if (pendingApprovalRepos.length > 0) {
|
||||
console.log(`[Scheduler] ${pendingApprovalRepos.length} repositories pending approval (force-push detected) for user ${userId} — skipping sync for those`);
|
||||
}
|
||||
} catch {
|
||||
// Non-critical logging, ignore errors
|
||||
}
|
||||
|
||||
if (reposToSync.length === 0) {
|
||||
console.log(`[Scheduler] No repositories to sync for user ${userId}`);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
console.log(`[Scheduler] Syncing ${reposToSync.length} repositories for user ${userId}`);
|
||||
|
||||
// Process repositories in batches
|
||||
@@ -429,6 +480,7 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
: Promise.resolve([]),
|
||||
]);
|
||||
const allGithubRepos = mergeGitReposPreferStarred(basicAndForkedRepos, starredRepos);
|
||||
const mirrorableGithubRepos = allGithubRepos.filter(isMirrorableGitHubRepo);
|
||||
|
||||
// Check for new repositories
|
||||
const existingRepos = await db
|
||||
@@ -437,7 +489,7 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
.where(eq(repositories.userId, config.userId));
|
||||
|
||||
const existingRepoNames = new Set(existingRepos.map(r => r.normalizedFullName));
|
||||
const reposToImport = allGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
|
||||
const reposToImport = mirrorableGithubRepos.filter(r => !existingRepoNames.has(r.fullName.toLowerCase()));
|
||||
|
||||
if (reposToImport.length > 0) {
|
||||
console.log(`[Scheduler] Importing ${reposToImport.length} repositories for user ${config.userId}...`);
|
||||
@@ -459,10 +511,27 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
.onConflictDoNothing({ target: [repositories.userId, repositories.normalizedFullName] });
|
||||
}
|
||||
console.log(`[Scheduler] Successfully imported ${reposToImport.length} repositories`);
|
||||
|
||||
// Log activity for each newly imported repo
|
||||
for (const repo of reposToImport) {
|
||||
const sourceLabel = repo.isStarred ? 'starred' : 'owned';
|
||||
await createMirrorJob({
|
||||
userId: config.userId,
|
||||
repositoryName: repo.fullName,
|
||||
message: `Auto-imported ${sourceLabel} repository: ${repo.fullName}`,
|
||||
details: `Repository ${repo.fullName} was discovered and imported during auto-start.`,
|
||||
status: 'imported',
|
||||
skipDuplicateEvent: true,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.log(`[Scheduler] No new repositories to import for user ${config.userId}`);
|
||||
}
|
||||
|
||||
const skippedDisabledCount = allGithubRepos.length - mirrorableGithubRepos.length;
|
||||
if (skippedDisabledCount > 0) {
|
||||
console.log(`[Scheduler] Skipped ${skippedDisabledCount} disabled GitHub repositories for user ${config.userId}`);
|
||||
}
|
||||
|
||||
// Check if we already have mirrored repositories (indicating this isn't first run)
|
||||
const mirroredRepos = await db
|
||||
.select()
|
||||
@@ -505,8 +574,34 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
}
|
||||
|
||||
// Step 2: Trigger mirror for all repositories that need mirroring
|
||||
// Only auto-mirror if autoMirror is enabled in schedule config
|
||||
if (!config.scheduleConfig?.autoMirror) {
|
||||
console.log(`[Scheduler] Step 2: Skipping initial mirror - autoMirror is disabled for user ${config.userId}`);
|
||||
|
||||
// Still update schedule config timestamps
|
||||
const currentTime2 = new Date();
|
||||
const intervalSource2 = config.scheduleConfig?.interval ||
|
||||
config.giteaConfig?.mirrorInterval ||
|
||||
'8h';
|
||||
const interval2 = parseScheduleInterval(intervalSource2);
|
||||
const nextRun2 = new Date(currentTime2.getTime() + interval2);
|
||||
|
||||
await db.update(configs).set({
|
||||
scheduleConfig: {
|
||||
...config.scheduleConfig,
|
||||
enabled: true,
|
||||
lastRun: currentTime2,
|
||||
nextRun: nextRun2,
|
||||
},
|
||||
updatedAt: currentTime2,
|
||||
}).where(eq(configs.id, config.id));
|
||||
|
||||
console.log(`[Scheduler] Scheduling enabled for user ${config.userId}, next sync at ${nextRun2.toISOString()}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`[Scheduler] Step 2: Triggering mirror for repositories that need mirroring...`);
|
||||
const reposNeedingMirror = await db
|
||||
let reposNeedingMirror = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(
|
||||
@@ -519,7 +614,20 @@ async function performInitialAutoStart(): Promise<void> {
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
// Filter out starred repos from auto-mirror when autoMirrorStarred is disabled
|
||||
if (!config.githubConfig?.autoMirrorStarred) {
|
||||
const githubOwner = config.githubConfig?.owner || '';
|
||||
const beforeCount = reposNeedingMirror.length;
|
||||
reposNeedingMirror = reposNeedingMirror.filter(
|
||||
repo => !repo.isStarred || repo.owner === githubOwner
|
||||
);
|
||||
const skippedCount = beforeCount - reposNeedingMirror.length;
|
||||
if (skippedCount > 0) {
|
||||
console.log(`[Scheduler] Skipped ${skippedCount} starred repositories from initial auto-mirror (autoMirrorStarred is disabled)`);
|
||||
}
|
||||
}
|
||||
|
||||
if (reposNeedingMirror.length > 0) {
|
||||
console.log(`[Scheduler] Found ${reposNeedingMirror.length} repositories that need mirroring`);
|
||||
|
||||
|
||||
@@ -92,8 +92,13 @@ async function preCreateOrganizations({
|
||||
// Get unique organization names
|
||||
const orgNames = new Set<string>();
|
||||
|
||||
// Add starred repos org
|
||||
if (config.githubConfig?.starredReposOrg) {
|
||||
const starredReposMode = config.githubConfig?.starredReposMode || "dedicated-org";
|
||||
|
||||
if (starredReposMode === "preserve-owner") {
|
||||
for (const repo of repositories) {
|
||||
orgNames.add(repo.organization || repo.owner);
|
||||
}
|
||||
} else if (config.githubConfig?.starredReposOrg) {
|
||||
orgNames.add(config.githubConfig.starredReposOrg);
|
||||
} else {
|
||||
orgNames.add("starred");
|
||||
@@ -129,7 +134,11 @@ async function processStarredRepository({
|
||||
octokit: Octokit;
|
||||
strategyConfig: ReturnType<typeof getMirrorStrategyConfig>;
|
||||
}): Promise<void> {
|
||||
const starredOrg = config.githubConfig?.starredReposOrg || "starred";
|
||||
const starredReposMode = config.githubConfig?.starredReposMode || "dedicated-org";
|
||||
const starredOrg =
|
||||
starredReposMode === "preserve-owner"
|
||||
? repository.organization || repository.owner
|
||||
: config.githubConfig?.starredReposOrg || "starred";
|
||||
|
||||
// Check if repository exists in Gitea
|
||||
const existingRepo = await getGiteaRepoInfo({
|
||||
@@ -257,7 +266,11 @@ export async function syncStarredRepositories({
|
||||
if (error instanceof Error && error.message.includes("not a mirror")) {
|
||||
console.warn(`Repository ${repository.name} is not a mirror, handling...`);
|
||||
|
||||
const starredOrg = config.githubConfig?.starredReposOrg || "starred";
|
||||
const starredReposMode = config.githubConfig?.starredReposMode || "dedicated-org";
|
||||
const starredOrg =
|
||||
starredReposMode === "preserve-owner"
|
||||
? repository.organization || repository.owner
|
||||
: config.githubConfig?.starredReposOrg || "starred";
|
||||
const repoInfo = await getGiteaRepoInfo({
|
||||
config,
|
||||
owner: starredOrg,
|
||||
@@ -287,4 +300,4 @@ export async function syncStarredRepositories({
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,4 +169,31 @@ describe("parseErrorMessage", () => {
|
||||
expect(result.description).toBeUndefined();
|
||||
expect(result.isStructured).toBe(false);
|
||||
});
|
||||
|
||||
test("adds trusted origins guidance for invalid origin errors", () => {
|
||||
const errorMessage = "Invalid Origin: https://mirror.example.com";
|
||||
|
||||
const result = parseErrorMessage(errorMessage);
|
||||
|
||||
expect(result.title).toBe("Invalid Origin");
|
||||
expect(result.description).toContain("BETTER_AUTH_TRUSTED_ORIGINS");
|
||||
expect(result.description).toContain("https://mirror.example.com");
|
||||
expect(result.isStructured).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("showErrorToast", () => {
|
||||
test("shows invalid origin guidance in toast description", () => {
|
||||
const calls: any[] = [];
|
||||
const toast = {
|
||||
error: (...args: any[]) => calls.push(args),
|
||||
};
|
||||
|
||||
showErrorToast("Invalid Origin: http://10.10.20.45:4321", toast);
|
||||
|
||||
expect(calls).toHaveLength(1);
|
||||
expect(calls[0][0]).toBe("Invalid Origin");
|
||||
expect(calls[0][1].description).toContain("BETTER_AUTH_TRUSTED_ORIGINS");
|
||||
expect(calls[0][1].description).toContain("http://10.10.20.45:4321");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -86,6 +86,30 @@ export interface ParsedErrorMessage {
|
||||
isStructured: boolean;
|
||||
}
|
||||
|
||||
function getInvalidOriginGuidance(title: string, description?: string): ParsedErrorMessage | null {
|
||||
const fullMessage = `${title} ${description ?? ""}`.trim();
|
||||
if (!/invalid origin/i.test(fullMessage)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const urlMatch = fullMessage.match(/https?:\/\/[^\s'")]+/i);
|
||||
let originHint = "this URL";
|
||||
|
||||
if (urlMatch) {
|
||||
try {
|
||||
originHint = new URL(urlMatch[0]).origin;
|
||||
} catch {
|
||||
originHint = urlMatch[0];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
title: "Invalid Origin",
|
||||
description: `Add ${originHint} to BETTER_AUTH_TRUSTED_ORIGINS and restart the app.`,
|
||||
isStructured: true,
|
||||
};
|
||||
}
|
||||
|
||||
export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||
// Handle Error objects
|
||||
if (error instanceof Error) {
|
||||
@@ -102,29 +126,32 @@ export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||
if (typeof parsed === "object" && parsed !== null) {
|
||||
// Format 1: { error: "message", errorType: "type", troubleshooting: "info" }
|
||||
if (parsed.error) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: parsed.error,
|
||||
description: parsed.troubleshooting || parsed.errorType || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
// Format 2: { title: "title", description: "desc" }
|
||||
if (parsed.title) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: parsed.title,
|
||||
description: parsed.description || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
// Format 3: { message: "msg", details: "details" }
|
||||
if (parsed.message) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: parsed.message,
|
||||
description: parsed.details || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
@@ -132,11 +159,12 @@ export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||
}
|
||||
|
||||
// Plain string message
|
||||
return {
|
||||
const formatted = {
|
||||
title: error,
|
||||
description: undefined,
|
||||
isStructured: false,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
// Handle objects directly
|
||||
@@ -144,36 +172,40 @@ export function parseErrorMessage(error: unknown): ParsedErrorMessage {
|
||||
const errorObj = error as any;
|
||||
|
||||
if (errorObj.error) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: errorObj.error,
|
||||
description: errorObj.troubleshooting || errorObj.errorType || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
if (errorObj.title) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: errorObj.title,
|
||||
description: errorObj.description || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
|
||||
if (errorObj.message) {
|
||||
return {
|
||||
const formatted = {
|
||||
title: errorObj.message,
|
||||
description: errorObj.details || undefined,
|
||||
isStructured: true,
|
||||
};
|
||||
return getInvalidOriginGuidance(formatted.title, formatted.description) || formatted;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback for unknown types
|
||||
return {
|
||||
const fallback = {
|
||||
title: String(error),
|
||||
description: undefined,
|
||||
isStructured: false,
|
||||
};
|
||||
return getInvalidOriginGuidance(fallback.title, fallback.description) || fallback;
|
||||
}
|
||||
|
||||
// Enhanced toast helper that parses structured error messages
|
||||
@@ -248,6 +280,8 @@ export const getStatusColor = (status: string): string => {
|
||||
return "bg-orange-500"; // Deleting
|
||||
case "deleted":
|
||||
return "bg-gray-600"; // Deleted
|
||||
case "pending-approval":
|
||||
return "bg-amber-500"; // Needs manual approval
|
||||
default:
|
||||
return "bg-gray-400"; // Unknown/neutral
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ export interface DefaultConfigOptions {
|
||||
githubToken?: string;
|
||||
githubUsername?: string;
|
||||
giteaUrl?: string;
|
||||
giteaExternalUrl?: string;
|
||||
giteaToken?: string;
|
||||
giteaUsername?: string;
|
||||
scheduleEnabled?: boolean;
|
||||
@@ -38,6 +39,8 @@ export async function createDefaultConfig({ userId, envOverrides = {} }: Default
|
||||
const githubToken = envOverrides.githubToken || process.env.GITHUB_TOKEN || "";
|
||||
const githubUsername = envOverrides.githubUsername || process.env.GITHUB_USERNAME || "";
|
||||
const giteaUrl = envOverrides.giteaUrl || process.env.GITEA_URL || "";
|
||||
const giteaExternalUrl =
|
||||
envOverrides.giteaExternalUrl || process.env.GITEA_EXTERNAL_URL || "";
|
||||
const giteaToken = envOverrides.giteaToken || process.env.GITEA_TOKEN || "";
|
||||
const giteaUsername = envOverrides.giteaUsername || process.env.GITEA_USERNAME || "";
|
||||
|
||||
@@ -71,11 +74,13 @@ export async function createDefaultConfig({ userId, envOverrides = {} }: Default
|
||||
includePublic: true,
|
||||
includeOrganizations: [],
|
||||
starredReposOrg: "starred",
|
||||
starredReposMode: "dedicated-org",
|
||||
mirrorStrategy: "preserve",
|
||||
defaultOrg: "github-mirrors",
|
||||
},
|
||||
giteaConfig: {
|
||||
url: giteaUrl,
|
||||
externalUrl: giteaExternalUrl || undefined,
|
||||
token: giteaToken ? encrypt(giteaToken) : "",
|
||||
defaultOwner: giteaUsername,
|
||||
mirrorInterval: "8h",
|
||||
@@ -88,6 +93,11 @@ export async function createDefaultConfig({ userId, envOverrides = {} }: Default
|
||||
forkStrategy: "reference",
|
||||
issueConcurrency: 3,
|
||||
pullRequestConcurrency: 5,
|
||||
backupStrategy: "on-force-push",
|
||||
backupBeforeSync: true, // Deprecated: kept for backward compat
|
||||
backupRetentionCount: 20,
|
||||
backupDirectory: "data/repo-backups",
|
||||
blockSyncOnBackupFailure: true,
|
||||
},
|
||||
include: [],
|
||||
exclude: [],
|
||||
|
||||
@@ -48,6 +48,7 @@ export function mapUiToDbConfig(
|
||||
|
||||
// Starred repos organization
|
||||
starredReposOrg: giteaConfig.starredReposOrg,
|
||||
starredReposMode: giteaConfig.starredReposMode || "dedicated-org",
|
||||
|
||||
// Mirror strategy
|
||||
mirrorStrategy: giteaConfig.mirrorStrategy || "preserve",
|
||||
@@ -55,11 +56,13 @@ export function mapUiToDbConfig(
|
||||
|
||||
// Advanced options
|
||||
starredCodeOnly: advancedOptions.starredCodeOnly,
|
||||
autoMirrorStarred: advancedOptions.autoMirrorStarred ?? false,
|
||||
};
|
||||
|
||||
// Map Gitea config to match database schema
|
||||
const dbGiteaConfig: DbGiteaConfig = {
|
||||
url: giteaConfig.url,
|
||||
externalUrl: giteaConfig.externalUrl?.trim() || undefined,
|
||||
token: giteaConfig.token,
|
||||
defaultOwner: giteaConfig.username, // Map username to defaultOwner
|
||||
organization: giteaConfig.organization, // Add organization field
|
||||
@@ -98,6 +101,11 @@ export function mapUiToDbConfig(
|
||||
mirrorPullRequests: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.pullRequests,
|
||||
mirrorLabels: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.labels,
|
||||
mirrorMilestones: mirrorOptions.mirrorMetadata && mirrorOptions.metadataComponents.milestones,
|
||||
backupStrategy: giteaConfig.backupStrategy,
|
||||
backupBeforeSync: giteaConfig.backupBeforeSync ?? true,
|
||||
backupRetentionCount: giteaConfig.backupRetentionCount ?? 20,
|
||||
backupDirectory: giteaConfig.backupDirectory?.trim() || undefined,
|
||||
blockSyncOnBackupFailure: giteaConfig.blockSyncOnBackupFailure ?? true,
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -126,16 +134,23 @@ export function mapDbToUiConfig(dbConfig: any): {
|
||||
// Map from database Gitea config to UI fields
|
||||
const giteaConfig: GiteaConfig = {
|
||||
url: dbConfig.giteaConfig?.url || "",
|
||||
externalUrl: dbConfig.giteaConfig?.externalUrl || "",
|
||||
username: dbConfig.giteaConfig?.defaultOwner || "", // Map defaultOwner to username
|
||||
token: dbConfig.giteaConfig?.token || "",
|
||||
organization: dbConfig.githubConfig?.defaultOrg || "github-mirrors", // Get from GitHub config
|
||||
visibility: dbConfig.giteaConfig?.visibility === "default" ? "public" : dbConfig.giteaConfig?.visibility || "public",
|
||||
starredReposOrg: dbConfig.githubConfig?.starredReposOrg || "starred", // Get from GitHub config
|
||||
starredReposMode: dbConfig.githubConfig?.starredReposMode || "dedicated-org", // Get from GitHub config
|
||||
preserveOrgStructure: dbConfig.giteaConfig?.preserveVisibility || false, // Map preserveVisibility
|
||||
mirrorStrategy: dbConfig.githubConfig?.mirrorStrategy || "preserve", // Get from GitHub config
|
||||
personalReposOrg: undefined, // Not stored in current schema
|
||||
issueConcurrency: dbConfig.giteaConfig?.issueConcurrency ?? 3,
|
||||
pullRequestConcurrency: dbConfig.giteaConfig?.pullRequestConcurrency ?? 5,
|
||||
backupStrategy: dbConfig.giteaConfig?.backupStrategy || undefined,
|
||||
backupBeforeSync: dbConfig.giteaConfig?.backupBeforeSync ?? true,
|
||||
backupRetentionCount: dbConfig.giteaConfig?.backupRetentionCount ?? 20,
|
||||
backupDirectory: dbConfig.giteaConfig?.backupDirectory || "data/repo-backups",
|
||||
blockSyncOnBackupFailure: dbConfig.giteaConfig?.blockSyncOnBackupFailure ?? true,
|
||||
};
|
||||
|
||||
// Map mirror options from various database fields
|
||||
@@ -158,6 +173,7 @@ export function mapDbToUiConfig(dbConfig: any): {
|
||||
skipForks: !(dbConfig.githubConfig?.includeForks ?? true), // Invert includeForks to get skipForks
|
||||
// Support both old (skipStarredIssues) and new (starredCodeOnly) field names for backward compatibility
|
||||
starredCodeOnly: dbConfig.githubConfig?.starredCodeOnly ?? (dbConfig.githubConfig as any)?.skipStarredIssues ?? false,
|
||||
autoMirrorStarred: dbConfig.githubConfig?.autoMirrorStarred ?? false,
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
319
src/lib/utils/force-push-detection.test.ts
Normal file
319
src/lib/utils/force-push-detection.test.ts
Normal file
@@ -0,0 +1,319 @@
|
||||
import { describe, expect, it, mock } from "bun:test";
|
||||
import {
|
||||
detectForcePush,
|
||||
fetchGitHubBranches,
|
||||
checkAncestry,
|
||||
type BranchInfo,
|
||||
} from "./force-push-detection";
|
||||
|
||||
// ---- Helpers ----
|
||||
|
||||
function makeOctokit(overrides: Record<string, any> = {}) {
|
||||
return {
|
||||
repos: {
|
||||
listBranches: mock(() => Promise.resolve({ data: [] })),
|
||||
compareCommits: mock(() =>
|
||||
Promise.resolve({ data: { status: "ahead" } }),
|
||||
),
|
||||
...overrides.repos,
|
||||
},
|
||||
paginate: mock(async (_method: any, params: any) => {
|
||||
// Default: return whatever the test wired into _githubBranches
|
||||
return overrides._githubBranches ?? [];
|
||||
}),
|
||||
...overrides,
|
||||
} as any;
|
||||
}
|
||||
|
||||
// ---- fetchGitHubBranches ----
|
||||
|
||||
describe("fetchGitHubBranches", () => {
|
||||
it("maps Octokit paginated response to BranchInfo[]", async () => {
|
||||
const octokit = makeOctokit({
|
||||
_githubBranches: [
|
||||
{ name: "main", commit: { sha: "aaa" } },
|
||||
{ name: "dev", commit: { sha: "bbb" } },
|
||||
],
|
||||
});
|
||||
|
||||
const result = await fetchGitHubBranches({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
});
|
||||
|
||||
expect(result).toEqual([
|
||||
{ name: "main", sha: "aaa" },
|
||||
{ name: "dev", sha: "bbb" },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
// ---- checkAncestry ----
|
||||
|
||||
describe("checkAncestry", () => {
|
||||
it("returns true for fast-forward (ahead)", async () => {
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() =>
|
||||
Promise.resolve({ data: { status: "ahead" } }),
|
||||
),
|
||||
},
|
||||
});
|
||||
|
||||
const result = await checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "old",
|
||||
headSha: "new",
|
||||
});
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it("returns true for identical", async () => {
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() =>
|
||||
Promise.resolve({ data: { status: "identical" } }),
|
||||
),
|
||||
},
|
||||
});
|
||||
|
||||
const result = await checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "same",
|
||||
headSha: "same",
|
||||
});
|
||||
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false for diverged", async () => {
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() =>
|
||||
Promise.resolve({ data: { status: "diverged" } }),
|
||||
),
|
||||
},
|
||||
});
|
||||
|
||||
const result = await checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "old",
|
||||
headSha: "new",
|
||||
});
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false when API returns 404 (old SHA gone)", async () => {
|
||||
const error404 = Object.assign(new Error("Not Found"), { status: 404 });
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() => Promise.reject(error404)),
|
||||
},
|
||||
});
|
||||
|
||||
const result = await checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "gone",
|
||||
headSha: "new",
|
||||
});
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("throws on transient errors (fail-open for caller)", async () => {
|
||||
const error500 = Object.assign(new Error("Internal Server Error"), { status: 500 });
|
||||
const octokit = makeOctokit({
|
||||
repos: {
|
||||
compareCommits: mock(() => Promise.reject(error500)),
|
||||
},
|
||||
});
|
||||
|
||||
expect(
|
||||
checkAncestry({
|
||||
octokit,
|
||||
owner: "user",
|
||||
repo: "repo",
|
||||
baseSha: "old",
|
||||
headSha: "new",
|
||||
}),
|
||||
).rejects.toThrow("Internal Server Error");
|
||||
});
|
||||
});
|
||||
|
||||
// ---- detectForcePush ----
|
||||
// Uses _deps injection to avoid fragile global fetch mocking.
|
||||
|
||||
describe("detectForcePush", () => {
|
||||
const baseArgs = {
|
||||
giteaUrl: "https://gitea.example.com",
|
||||
giteaToken: "tok",
|
||||
giteaOwner: "org",
|
||||
giteaRepo: "repo",
|
||||
githubOwner: "user",
|
||||
githubRepo: "repo",
|
||||
};
|
||||
|
||||
function makeDeps(overrides: {
|
||||
giteaBranches?: BranchInfo[] | Error;
|
||||
githubBranches?: BranchInfo[] | Error;
|
||||
ancestryResult?: boolean;
|
||||
} = {}) {
|
||||
return {
|
||||
fetchGiteaBranches: mock(async () => {
|
||||
if (overrides.giteaBranches instanceof Error) throw overrides.giteaBranches;
|
||||
return overrides.giteaBranches ?? [];
|
||||
}) as any,
|
||||
fetchGitHubBranches: mock(async () => {
|
||||
if (overrides.githubBranches instanceof Error) throw overrides.githubBranches;
|
||||
return overrides.githubBranches ?? [];
|
||||
}) as any,
|
||||
checkAncestry: mock(async () => overrides.ancestryResult ?? true) as any,
|
||||
};
|
||||
}
|
||||
|
||||
const dummyOctokit = {} as any;
|
||||
|
||||
it("skips when Gitea has no branches (first mirror)", async () => {
|
||||
const deps = makeDeps({ giteaBranches: [] });
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(true);
|
||||
expect(result.skipReason).toContain("No Gitea branches");
|
||||
});
|
||||
|
||||
it("returns no detection when all SHAs match", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [
|
||||
{ name: "main", sha: "aaa" },
|
||||
{ name: "dev", sha: "bbb" },
|
||||
],
|
||||
githubBranches: [
|
||||
{ name: "main", sha: "aaa" },
|
||||
{ name: "dev", sha: "bbb" },
|
||||
],
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(false);
|
||||
expect(result.affectedBranches).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("detects deleted branch", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [
|
||||
{ name: "main", sha: "aaa" },
|
||||
{ name: "old-branch", sha: "ccc" },
|
||||
],
|
||||
githubBranches: [{ name: "main", sha: "aaa" }],
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(true);
|
||||
expect(result.affectedBranches).toHaveLength(1);
|
||||
expect(result.affectedBranches[0]).toEqual({
|
||||
name: "old-branch",
|
||||
reason: "deleted",
|
||||
giteaSha: "ccc",
|
||||
githubSha: null,
|
||||
});
|
||||
});
|
||||
|
||||
it("returns no detection for fast-forward", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [{ name: "main", sha: "old-sha" }],
|
||||
githubBranches: [{ name: "main", sha: "new-sha" }],
|
||||
ancestryResult: true, // fast-forward
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.affectedBranches).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("detects diverged branch", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [{ name: "main", sha: "old-sha" }],
|
||||
githubBranches: [{ name: "main", sha: "rewritten-sha" }],
|
||||
ancestryResult: false, // diverged
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(true);
|
||||
expect(result.affectedBranches).toHaveLength(1);
|
||||
expect(result.affectedBranches[0]).toEqual({
|
||||
name: "main",
|
||||
reason: "diverged",
|
||||
giteaSha: "old-sha",
|
||||
githubSha: "rewritten-sha",
|
||||
});
|
||||
});
|
||||
|
||||
it("detects force-push when ancestry check fails (old SHA gone)", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [{ name: "main", sha: "old-sha" }],
|
||||
githubBranches: [{ name: "main", sha: "new-sha" }],
|
||||
ancestryResult: false, // checkAncestry returns false on error
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(true);
|
||||
expect(result.affectedBranches).toHaveLength(1);
|
||||
expect(result.affectedBranches[0].reason).toBe("diverged");
|
||||
});
|
||||
|
||||
it("skips when Gitea API returns 404", async () => {
|
||||
const { HttpError } = await import("@/lib/http-client");
|
||||
const deps = makeDeps({
|
||||
giteaBranches: new HttpError("not found", 404, "Not Found"),
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(true);
|
||||
expect(result.skipReason).toContain("not found");
|
||||
});
|
||||
|
||||
it("skips when Gitea API returns server error", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: new Error("HTTP 500: internal error"),
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(true);
|
||||
expect(result.skipReason).toContain("Failed to fetch Gitea branches");
|
||||
});
|
||||
|
||||
it("skips when GitHub API fails", async () => {
|
||||
const deps = makeDeps({
|
||||
giteaBranches: [{ name: "main", sha: "aaa" }],
|
||||
githubBranches: new Error("rate limited"),
|
||||
});
|
||||
|
||||
const result = await detectForcePush({ ...baseArgs, octokit: dummyOctokit, _deps: deps });
|
||||
|
||||
expect(result.detected).toBe(false);
|
||||
expect(result.skipped).toBe(true);
|
||||
expect(result.skipReason).toContain("Failed to fetch GitHub branches");
|
||||
});
|
||||
});
|
||||
286
src/lib/utils/force-push-detection.ts
Normal file
286
src/lib/utils/force-push-detection.ts
Normal file
@@ -0,0 +1,286 @@
|
||||
/**
|
||||
* Force-push detection module.
|
||||
*
|
||||
* Compares branch SHAs between a Gitea mirror and GitHub source to detect
|
||||
* branches that were deleted, rewritten, or force-pushed.
|
||||
*
|
||||
* **Fail-open**: If detection itself fails (API errors, rate limits, etc.),
|
||||
* the result indicates no force-push so sync proceeds normally. Detection
|
||||
* should never block sync due to its own failure.
|
||||
*/
|
||||
|
||||
import type { Octokit } from "@octokit/rest";
|
||||
import { httpGet, HttpError } from "@/lib/http-client";
|
||||
|
||||
// ---- Types ----
|
||||
|
||||
export interface BranchInfo {
|
||||
name: string;
|
||||
sha: string;
|
||||
}
|
||||
|
||||
export type ForcePushReason = "deleted" | "diverged" | "non-fast-forward";
|
||||
|
||||
export interface AffectedBranch {
|
||||
name: string;
|
||||
reason: ForcePushReason;
|
||||
giteaSha: string;
|
||||
githubSha: string | null; // null when branch was deleted
|
||||
}
|
||||
|
||||
export interface ForcePushDetectionResult {
|
||||
detected: boolean;
|
||||
affectedBranches: AffectedBranch[];
|
||||
/** True when detection could not run (API error, etc.) */
|
||||
skipped: boolean;
|
||||
skipReason?: string;
|
||||
}
|
||||
|
||||
const NO_FORCE_PUSH: ForcePushDetectionResult = {
|
||||
detected: false,
|
||||
affectedBranches: [],
|
||||
skipped: false,
|
||||
};
|
||||
|
||||
function skippedResult(reason: string): ForcePushDetectionResult {
|
||||
return {
|
||||
detected: false,
|
||||
affectedBranches: [],
|
||||
skipped: true,
|
||||
skipReason: reason,
|
||||
};
|
||||
}
|
||||
|
||||
// ---- Branch fetching ----
|
||||
|
||||
/**
|
||||
* Fetch all branches from a Gitea repository (paginated).
|
||||
*/
|
||||
export async function fetchGiteaBranches({
|
||||
giteaUrl,
|
||||
giteaToken,
|
||||
owner,
|
||||
repo,
|
||||
}: {
|
||||
giteaUrl: string;
|
||||
giteaToken: string;
|
||||
owner: string;
|
||||
repo: string;
|
||||
}): Promise<BranchInfo[]> {
|
||||
const branches: BranchInfo[] = [];
|
||||
let page = 1;
|
||||
const perPage = 50;
|
||||
|
||||
while (true) {
|
||||
const url = `${giteaUrl}/api/v1/repos/${owner}/${repo}/branches?page=${page}&limit=${perPage}`;
|
||||
const response = await httpGet<Array<{ name: string; commit: { id: string } }>>(
|
||||
url,
|
||||
{ Authorization: `token ${giteaToken}` },
|
||||
);
|
||||
|
||||
if (!Array.isArray(response.data) || response.data.length === 0) break;
|
||||
|
||||
for (const b of response.data) {
|
||||
branches.push({ name: b.name, sha: b.commit.id });
|
||||
}
|
||||
|
||||
if (response.data.length < perPage) break;
|
||||
page++;
|
||||
}
|
||||
|
||||
return branches;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch all branches from a GitHub repository (paginated via Octokit).
|
||||
*/
|
||||
export async function fetchGitHubBranches({
|
||||
octokit,
|
||||
owner,
|
||||
repo,
|
||||
}: {
|
||||
octokit: Octokit;
|
||||
owner: string;
|
||||
repo: string;
|
||||
}): Promise<BranchInfo[]> {
|
||||
const data = await octokit.paginate(octokit.repos.listBranches, {
|
||||
owner,
|
||||
repo,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
return data.map((b) => ({ name: b.name, sha: b.commit.sha }));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the transition from `baseSha` to `headSha` on the same branch
|
||||
* is a fast-forward (i.e. `baseSha` is an ancestor of `headSha`).
|
||||
*
|
||||
* Returns `true` when the change is safe (fast-forward) and `false` when it
|
||||
* is a confirmed force-push (404 = old SHA garbage-collected from GitHub).
|
||||
*
|
||||
* Throws on transient errors (rate limits, network issues) so the caller
|
||||
* can decide how to handle them (fail-open: skip that branch).
|
||||
*/
|
||||
export async function checkAncestry({
|
||||
octokit,
|
||||
owner,
|
||||
repo,
|
||||
baseSha,
|
||||
headSha,
|
||||
}: {
|
||||
octokit: Octokit;
|
||||
owner: string;
|
||||
repo: string;
|
||||
baseSha: string;
|
||||
headSha: string;
|
||||
}): Promise<boolean> {
|
||||
try {
|
||||
const { data } = await octokit.repos.compareCommits({
|
||||
owner,
|
||||
repo,
|
||||
base: baseSha,
|
||||
head: headSha,
|
||||
});
|
||||
// "ahead" means headSha is strictly ahead of baseSha → fast-forward.
|
||||
// "behind" or "diverged" means the branch was rewritten.
|
||||
return data.status === "ahead" || data.status === "identical";
|
||||
} catch (error: any) {
|
||||
// 404 / 422 = old SHA no longer exists on GitHub → confirmed force-push.
|
||||
if (error?.status === 404 || error?.status === 422) {
|
||||
return false;
|
||||
}
|
||||
// Any other error (rate limit, network) → rethrow so caller can
|
||||
// handle it as fail-open (skip branch) rather than false-positive.
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// ---- Main detection ----
|
||||
|
||||
/**
|
||||
* Compare branch SHAs between Gitea and GitHub to detect force-pushes.
|
||||
*
|
||||
* The function is intentionally fail-open: any error during detection returns
|
||||
* a "skipped" result so that sync can proceed normally.
|
||||
*/
|
||||
export async function detectForcePush({
|
||||
giteaUrl,
|
||||
giteaToken,
|
||||
giteaOwner,
|
||||
giteaRepo,
|
||||
octokit,
|
||||
githubOwner,
|
||||
githubRepo,
|
||||
_deps,
|
||||
}: {
|
||||
giteaUrl: string;
|
||||
giteaToken: string;
|
||||
giteaOwner: string;
|
||||
giteaRepo: string;
|
||||
octokit: Octokit;
|
||||
githubOwner: string;
|
||||
githubRepo: string;
|
||||
/** @internal — test-only dependency injection */
|
||||
_deps?: {
|
||||
fetchGiteaBranches: typeof fetchGiteaBranches;
|
||||
fetchGitHubBranches: typeof fetchGitHubBranches;
|
||||
checkAncestry: typeof checkAncestry;
|
||||
};
|
||||
}): Promise<ForcePushDetectionResult> {
|
||||
const deps = _deps ?? { fetchGiteaBranches, fetchGitHubBranches, checkAncestry };
|
||||
|
||||
// 1. Fetch Gitea branches
|
||||
let giteaBranches: BranchInfo[];
|
||||
try {
|
||||
giteaBranches = await deps.fetchGiteaBranches({
|
||||
giteaUrl,
|
||||
giteaToken,
|
||||
owner: giteaOwner,
|
||||
repo: giteaRepo,
|
||||
});
|
||||
} catch (error) {
|
||||
// Gitea 404 = repo not yet mirrored, skip detection
|
||||
if (error instanceof HttpError && error.status === 404) {
|
||||
return skippedResult("Gitea repository not found (first mirror?)");
|
||||
}
|
||||
return skippedResult(
|
||||
`Failed to fetch Gitea branches: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
|
||||
// First-time mirror: no Gitea branches → nothing to compare
|
||||
if (giteaBranches.length === 0) {
|
||||
return skippedResult("No Gitea branches found (first mirror?)");
|
||||
}
|
||||
|
||||
// 2. Fetch GitHub branches
|
||||
let githubBranches: BranchInfo[];
|
||||
try {
|
||||
githubBranches = await deps.fetchGitHubBranches({
|
||||
octokit,
|
||||
owner: githubOwner,
|
||||
repo: githubRepo,
|
||||
});
|
||||
} catch (error) {
|
||||
return skippedResult(
|
||||
`Failed to fetch GitHub branches: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const githubBranchMap = new Map(githubBranches.map((b) => [b.name, b.sha]));
|
||||
|
||||
// 3. Compare each Gitea branch against GitHub
|
||||
const affected: AffectedBranch[] = [];
|
||||
|
||||
for (const giteaBranch of giteaBranches) {
|
||||
const githubSha = githubBranchMap.get(giteaBranch.name);
|
||||
|
||||
if (githubSha === undefined) {
|
||||
// Branch was deleted on GitHub
|
||||
affected.push({
|
||||
name: giteaBranch.name,
|
||||
reason: "deleted",
|
||||
giteaSha: giteaBranch.sha,
|
||||
githubSha: null,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Same SHA → no change
|
||||
if (githubSha === giteaBranch.sha) continue;
|
||||
|
||||
// SHAs differ → check if it's a fast-forward
|
||||
try {
|
||||
const isFastForward = await deps.checkAncestry({
|
||||
octokit,
|
||||
owner: githubOwner,
|
||||
repo: githubRepo,
|
||||
baseSha: giteaBranch.sha,
|
||||
headSha: githubSha,
|
||||
});
|
||||
|
||||
if (!isFastForward) {
|
||||
affected.push({
|
||||
name: giteaBranch.name,
|
||||
reason: "diverged",
|
||||
giteaSha: giteaBranch.sha,
|
||||
githubSha,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Individual branch check failure → skip that branch (fail-open)
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (affected.length === 0) {
|
||||
return NO_FORCE_PUSH;
|
||||
}
|
||||
|
||||
return {
|
||||
detected: true,
|
||||
affectedBranches: affected,
|
||||
skipped: false,
|
||||
};
|
||||
}
|
||||
@@ -2,28 +2,13 @@ import type { APIRoute } from "astro";
|
||||
import { db, mirrorJobs, events } from "@/lib/db";
|
||||
import { eq, count } from "drizzle-orm";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
let body;
|
||||
try {
|
||||
body = await request.json();
|
||||
} catch (jsonError) {
|
||||
console.error("Invalid JSON in request body:", jsonError);
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Invalid JSON in request body." }),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
const { userId } = body || {};
|
||||
|
||||
if (!userId) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Missing 'userId' in request body." }),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
// Start a transaction to ensure all operations succeed or fail together
|
||||
const result = await db.transaction(async (tx) => {
|
||||
|
||||
@@ -1,21 +1,16 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { db, mirrorJobs, configs } from "@/lib/db";
|
||||
import { db, mirrorJobs } from "@/lib/db";
|
||||
import { eq, sql } from "drizzle-orm";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import type { MirrorJob } from "@/lib/db/schema";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const GET: APIRoute = async ({ url }) => {
|
||||
export const GET: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const searchParams = new URL(url).searchParams;
|
||||
const userId = searchParams.get("userId");
|
||||
|
||||
if (!userId) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Missing 'userId' in query parameters." }),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
// Fetch mirror jobs associated with the user
|
||||
const jobs = await db
|
||||
|
||||
@@ -2,7 +2,6 @@ import type { APIRoute } from "astro";
|
||||
import { db, configs, users } from "@/lib/db";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { calculateCleanupInterval } from "@/lib/cleanup-service";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import {
|
||||
mapUiToDbConfig,
|
||||
@@ -12,20 +11,25 @@ import {
|
||||
mapDbScheduleToUi,
|
||||
mapDbCleanupToUi
|
||||
} from "@/lib/utils/config-mapper";
|
||||
import { encrypt, decrypt, migrateToken } from "@/lib/utils/encryption";
|
||||
import { encrypt, decrypt } from "@/lib/utils/encryption";
|
||||
import { createDefaultConfig } from "@/lib/utils/config-defaults";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const { userId, githubConfig, giteaConfig, scheduleConfig, cleanupConfig, mirrorOptions, advancedOptions } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!userId || !githubConfig || !giteaConfig || !scheduleConfig || !cleanupConfig || !mirrorOptions || !advancedOptions) {
|
||||
const body = await request.json();
|
||||
const { githubConfig, giteaConfig, scheduleConfig, cleanupConfig, mirrorOptions, advancedOptions } = body;
|
||||
|
||||
if (!githubConfig || !giteaConfig || !scheduleConfig || !cleanupConfig || !mirrorOptions || !advancedOptions) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message:
|
||||
"userId, githubConfig, giteaConfig, scheduleConfig, cleanupConfig, mirrorOptions, and advancedOptions are required.",
|
||||
"githubConfig, giteaConfig, scheduleConfig, cleanupConfig, mirrorOptions, and advancedOptions are required.",
|
||||
}),
|
||||
{
|
||||
status: 400,
|
||||
@@ -172,17 +176,11 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
export const GET: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get("userId");
|
||||
|
||||
if (!userId) {
|
||||
return new Response(JSON.stringify({ error: "User ID is required" }), {
|
||||
status: 400,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
// Fetch the configuration for the user
|
||||
const config = await db
|
||||
|
||||
@@ -3,24 +3,14 @@ import { db, repositories, organizations, mirrorJobs, configs } from "@/lib/db";
|
||||
import { eq, count, and, sql, or } from "drizzle-orm";
|
||||
import { jsonResponse, createSecureErrorResponse } from "@/lib/utils";
|
||||
import type { DashboardApiResponse } from "@/types/dashboard";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import { membershipRoleEnum } from "@/types/organizations";
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get("userId");
|
||||
|
||||
if (!userId) {
|
||||
return jsonResponse({
|
||||
data: {
|
||||
success: false,
|
||||
error: "Missing userId",
|
||||
},
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const GET: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
const [
|
||||
userRepos,
|
||||
userOrgs,
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { getNewEvents } from "@/lib/events";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get("userId");
|
||||
|
||||
if (!userId) {
|
||||
return new Response("Missing userId", { status: 400 });
|
||||
}
|
||||
export const GET: APIRoute = async ({ request, locals }) => {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
// Create a new ReadableStream for SSE
|
||||
const stream = new ReadableStream({
|
||||
@@ -66,4 +64,4 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
"X-Accel-Buffering": "no", // Disable nginx buffering
|
||||
},
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
@@ -9,22 +9,14 @@ import {
|
||||
import type { Organization } from "@/lib/db/schema";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import { jsonResponse, createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get("userId");
|
||||
|
||||
if (!userId) {
|
||||
return jsonResponse({
|
||||
data: {
|
||||
success: false,
|
||||
error: "Missing userId",
|
||||
},
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
export const GET: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
// Fetch the user's active configuration to respect filtering settings
|
||||
const [config] = await db
|
||||
.select()
|
||||
|
||||
@@ -7,19 +7,14 @@ import {
|
||||
type RepositoryApiResponse,
|
||||
} from "@/types/Repository";
|
||||
import { jsonResponse, createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get("userId");
|
||||
|
||||
if (!userId) {
|
||||
return jsonResponse({
|
||||
data: { success: false, error: "Missing userId" },
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
export const GET: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
// Fetch the user's active configuration
|
||||
const [config] = await db
|
||||
.select()
|
||||
|
||||
202
src/pages/api/job/approve-sync.ts
Normal file
202
src/pages/api/job/approve-sync.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { db, configs, repositories } from "@/lib/db";
|
||||
import { and, eq, inArray } from "drizzle-orm";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import { syncGiteaRepoEnhanced } from "@/lib/gitea-enhanced";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
import { createPreSyncBundleBackup } from "@/lib/repo-backup";
|
||||
import { decryptConfigTokens } from "@/lib/utils/config-encryption";
|
||||
import type { Config } from "@/types/config";
|
||||
import { createMirrorJob } from "@/lib/helpers";
|
||||
|
||||
interface ApproveSyncRequest {
|
||||
repositoryIds: string[];
|
||||
action: "approve" | "dismiss";
|
||||
}
|
||||
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
const body: ApproveSyncRequest = await request.json();
|
||||
const { repositoryIds, action } = body;
|
||||
|
||||
if (!repositoryIds || !Array.isArray(repositoryIds) || repositoryIds.length === 0) {
|
||||
return new Response(
|
||||
JSON.stringify({ success: false, message: "repositoryIds are required." }),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
}
|
||||
|
||||
if (action !== "approve" && action !== "dismiss") {
|
||||
return new Response(
|
||||
JSON.stringify({ success: false, message: "action must be 'approve' or 'dismiss'." }),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
}
|
||||
|
||||
// Fetch config
|
||||
const configResult = await db
|
||||
.select()
|
||||
.from(configs)
|
||||
.where(eq(configs.userId, userId))
|
||||
.limit(1);
|
||||
|
||||
const config = configResult[0];
|
||||
if (!config) {
|
||||
return new Response(
|
||||
JSON.stringify({ success: false, message: "No configuration found." }),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
}
|
||||
|
||||
// Fetch repos — only those in pending-approval status
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
eq(repositories.status, "pending-approval"),
|
||||
inArray(repositories.id, repositoryIds),
|
||||
),
|
||||
);
|
||||
|
||||
if (!repos.length) {
|
||||
return new Response(
|
||||
JSON.stringify({ success: false, message: "No pending-approval repositories found for the given IDs." }),
|
||||
{ status: 404, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
}
|
||||
|
||||
if (action === "dismiss") {
|
||||
// Reset status to "synced" so repos resume normal schedule
|
||||
for (const repo of repos) {
|
||||
await db
|
||||
.update(repositories)
|
||||
.set({
|
||||
status: "synced",
|
||||
errorMessage: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(repositories.id, repo.id));
|
||||
|
||||
await createMirrorJob({
|
||||
userId,
|
||||
repositoryId: repo.id,
|
||||
repositoryName: repo.name,
|
||||
message: `Force-push alert dismissed for ${repo.name}`,
|
||||
details: "User dismissed the force-push alert. Repository will resume normal sync schedule.",
|
||||
status: "synced",
|
||||
});
|
||||
}
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: `Dismissed ${repos.length} repository alert(s).`,
|
||||
repositories: repos.map((repo) => ({
|
||||
...repo,
|
||||
status: "synced",
|
||||
errorMessage: null,
|
||||
})),
|
||||
}),
|
||||
{ status: 200, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
}
|
||||
|
||||
// action === "approve": create backup first (safety), then trigger sync
|
||||
const decryptedConfig = decryptConfigTokens(config as unknown as Config);
|
||||
|
||||
// Process in background
|
||||
setTimeout(async () => {
|
||||
for (const repo of repos) {
|
||||
try {
|
||||
const { getGiteaRepoOwnerAsync } = await import("@/lib/gitea");
|
||||
const repoOwner = await getGiteaRepoOwnerAsync({ config, repository: repo });
|
||||
|
||||
// Always create a backup before approved sync for safety
|
||||
const cloneUrl = `${config.giteaConfig.url.replace(/\/$/, "")}/${repoOwner}/${repo.name}.git`;
|
||||
try {
|
||||
const backupResult = await createPreSyncBundleBackup({
|
||||
config,
|
||||
owner: repoOwner,
|
||||
repoName: repo.name,
|
||||
cloneUrl,
|
||||
force: true, // Bypass legacy gate — approval implies backup
|
||||
});
|
||||
|
||||
await createMirrorJob({
|
||||
userId,
|
||||
repositoryId: repo.id,
|
||||
repositoryName: repo.name,
|
||||
message: `Safety snapshot created for ${repo.name}`,
|
||||
details: `Pre-approval snapshot at ${backupResult.bundlePath}.`,
|
||||
status: "syncing",
|
||||
});
|
||||
} catch (backupError) {
|
||||
console.warn(
|
||||
`[ApproveSync] Backup failed for ${repo.name}, proceeding with sync: ${
|
||||
backupError instanceof Error ? backupError.message : String(backupError)
|
||||
}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Trigger sync — skip detection to avoid re-blocking
|
||||
const repoData = {
|
||||
...repo,
|
||||
status: repoStatusEnum.parse("syncing"),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
};
|
||||
|
||||
await syncGiteaRepoEnhanced({
|
||||
config,
|
||||
repository: repoData,
|
||||
skipForcePushDetection: true,
|
||||
});
|
||||
console.log(`[ApproveSync] Sync completed for approved repository: ${repo.name}`);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[ApproveSync] Failed to sync approved repository ${repo.name}:`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
}
|
||||
}, 0);
|
||||
|
||||
// Immediately update status to syncing for responsiveness
|
||||
for (const repo of repos) {
|
||||
await db
|
||||
.update(repositories)
|
||||
.set({
|
||||
status: "syncing",
|
||||
errorMessage: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(repositories.id, repo.id));
|
||||
}
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
message: `Approved sync for ${repos.length} repository(ies). Backup + sync started.`,
|
||||
repositories: repos.map((repo) => ({
|
||||
...repo,
|
||||
status: "syncing",
|
||||
errorMessage: null,
|
||||
})),
|
||||
}),
|
||||
{ status: 200, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
} catch (error) {
|
||||
return createSecureErrorResponse(error, "approve-sync", 500);
|
||||
}
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import type { MirrorOrgRequest, MirrorOrgResponse } from "@/types/mirror";
|
||||
import { db, configs, organizations } from "@/lib/db";
|
||||
import { eq, inArray } from "drizzle-orm";
|
||||
import { and, eq, inArray } from "drizzle-orm";
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { mirrorGitHubOrgToGitea } from "@/lib/gitea";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
@@ -10,17 +10,22 @@ import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body: MirrorOrgRequest = await request.json();
|
||||
const { userId, organizationIds } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!userId || !organizationIds || !Array.isArray(organizationIds)) {
|
||||
const body: MirrorOrgRequest = await request.json();
|
||||
const { organizationIds } = body;
|
||||
|
||||
if (!organizationIds || !Array.isArray(organizationIds)) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "userId and organizationIds are required.",
|
||||
message: "organizationIds are required.",
|
||||
}),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
@@ -56,7 +61,12 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
const orgs = await db
|
||||
.select()
|
||||
.from(organizations)
|
||||
.where(inArray(organizations.id, organizationIds));
|
||||
.where(
|
||||
and(
|
||||
eq(organizations.userId, userId),
|
||||
inArray(organizations.id, organizationIds)
|
||||
)
|
||||
);
|
||||
|
||||
if (!orgs.length) {
|
||||
return new Response(
|
||||
|
||||
@@ -62,7 +62,13 @@ const mockRepositories = {};
|
||||
mock.module("@/lib/db", () => ({
|
||||
db: mockDb,
|
||||
configs: mockConfigs,
|
||||
repositories: mockRepositories
|
||||
repositories: mockRepositories,
|
||||
users: {},
|
||||
organizations: {},
|
||||
mirrorJobs: {},
|
||||
events: {},
|
||||
accounts: {},
|
||||
sessions: {}
|
||||
}));
|
||||
|
||||
// Mock the gitea module
|
||||
@@ -71,7 +77,10 @@ const mockMirrorGitHubOrgRepoToGiteaOrg = mock(() => Promise.resolve());
|
||||
|
||||
mock.module("@/lib/gitea", () => ({
|
||||
mirrorGithubRepoToGitea: mockMirrorGithubRepoToGitea,
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mockMirrorGitHubOrgRepoToGiteaOrg
|
||||
mirrorGitHubOrgRepoToGiteaOrg: mockMirrorGitHubOrgRepoToGiteaOrg,
|
||||
getGiteaRepoOwnerAsync: mock(() => Promise.resolve("test-owner")),
|
||||
isRepoPresentInGitea: mock(() => Promise.resolve(true)),
|
||||
syncGiteaRepo: mock(() => Promise.resolve({ success: true })),
|
||||
}));
|
||||
|
||||
// Mock the github module
|
||||
@@ -90,6 +99,7 @@ mock.module("@/lib/utils/concurrency", () => ({
|
||||
|
||||
// Mock drizzle-orm
|
||||
mock.module("drizzle-orm", () => ({
|
||||
and: mock(() => ({})),
|
||||
eq: mock(() => ({})),
|
||||
inArray: mock(() => ({}))
|
||||
}));
|
||||
@@ -121,7 +131,7 @@ describe("Repository Mirroring API", () => {
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
test("returns 400 if userId is missing", async () => {
|
||||
test("returns 401 when request is unauthenticated", async () => {
|
||||
const request = new Request("http://localhost/api/job/mirror-repo", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
@@ -134,11 +144,11 @@ describe("Repository Mirroring API", () => {
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
expect(response.status).toBe(401);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("userId and repositoryIds are required.");
|
||||
expect(data.error).toBe("Unauthorized");
|
||||
});
|
||||
|
||||
test("returns 400 if repositoryIds is missing", async () => {
|
||||
@@ -152,13 +162,18 @@ describe("Repository Mirroring API", () => {
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
const response = await POST({
|
||||
request,
|
||||
locals: {
|
||||
session: { userId: "user-id" },
|
||||
},
|
||||
} as any);
|
||||
|
||||
expect(response.status).toBe(400);
|
||||
|
||||
const data = await response.json();
|
||||
expect(data.success).toBe(false);
|
||||
expect(data.message).toBe("userId and repositoryIds are required.");
|
||||
expect(data.message).toBe("repositoryIds are required.");
|
||||
});
|
||||
|
||||
test("returns 200 and starts mirroring repositories", async () => {
|
||||
@@ -173,7 +188,12 @@ describe("Repository Mirroring API", () => {
|
||||
})
|
||||
});
|
||||
|
||||
const response = await POST({ request } as any);
|
||||
const response = await POST({
|
||||
request,
|
||||
locals: {
|
||||
session: { userId: "user-id" },
|
||||
},
|
||||
} as any);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import type { MirrorRepoRequest, MirrorRepoResponse } from "@/types/mirror";
|
||||
import { db, configs, repositories } from "@/lib/db";
|
||||
import { eq, inArray } from "drizzle-orm";
|
||||
import { and, eq, inArray } from "drizzle-orm";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import {
|
||||
mirrorGithubRepoToGitea,
|
||||
@@ -12,17 +12,22 @@ import { createGitHubClient } from "@/lib/github";
|
||||
import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body: MirrorRepoRequest = await request.json();
|
||||
const { userId, repositoryIds } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!userId || !repositoryIds || !Array.isArray(repositoryIds)) {
|
||||
const body: MirrorRepoRequest = await request.json();
|
||||
const { repositoryIds } = body;
|
||||
|
||||
if (!repositoryIds || !Array.isArray(repositoryIds)) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "userId and repositoryIds are required.",
|
||||
message: "repositoryIds are required.",
|
||||
}),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
@@ -58,7 +63,12 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(inArray(repositories.id, repositoryIds));
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
inArray(repositories.id, repositoryIds)
|
||||
)
|
||||
);
|
||||
|
||||
if (!repos.length) {
|
||||
return new Response(
|
||||
@@ -108,15 +118,14 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
|
||||
console.log(`Repository ${repo.name} will be mirrored to owner: ${owner}`);
|
||||
|
||||
// For single-org and starred repos strategies, or when mirroring to an org,
|
||||
// always use the org mirroring function to ensure proper organization handling
|
||||
// For single-org strategy, or when mirroring to an org,
|
||||
// use the org mirroring function to ensure proper organization handling
|
||||
const mirrorStrategy = config.githubConfig?.mirrorStrategy ||
|
||||
(config.githubConfig?.preserveOrgStructure ? "preserve" : "flat-user");
|
||||
(config.giteaConfig?.preserveOrgStructure ? "preserve" : "flat-user");
|
||||
|
||||
const shouldUseOrgMirror =
|
||||
owner !== config.giteaConfig?.defaultOwner || // Different owner means org
|
||||
mirrorStrategy === "single-org" || // Single-org strategy always uses org
|
||||
repoData.isStarred; // Starred repos always go to org
|
||||
mirrorStrategy === "single-org"; // Single-org strategy always uses org
|
||||
|
||||
if (shouldUseOrgMirror) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
@@ -222,4 +231,4 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
|
||||
return createSecureErrorResponse(error, "mirror-repo API", 500);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
121
src/pages/api/job/reset-metadata.ts
Normal file
121
src/pages/api/job/reset-metadata.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { and, eq, inArray } from "drizzle-orm";
|
||||
import { db, configs, repositories } from "@/lib/db";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import type { ResetMetadataRequest, ResetMetadataResponse } from "@/types/reset-metadata";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
const body: ResetMetadataRequest = await request.json();
|
||||
const { repositoryIds } = body;
|
||||
|
||||
if (!repositoryIds || !Array.isArray(repositoryIds)) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "repositoryIds are required.",
|
||||
}),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
if (repositoryIds.length === 0) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "No repository IDs provided.",
|
||||
}),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
const configResult = await db
|
||||
.select()
|
||||
.from(configs)
|
||||
.where(eq(configs.userId, userId))
|
||||
.limit(1);
|
||||
|
||||
const config = configResult[0];
|
||||
|
||||
if (!config || !config.githubConfig.token || !config.giteaConfig?.token) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "Missing GitHub or Gitea configuration.",
|
||||
}),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
inArray(repositories.id, repositoryIds)
|
||||
)
|
||||
);
|
||||
|
||||
if (!repos.length) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "No repositories found for the given IDs.",
|
||||
}),
|
||||
{ status: 404, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
await db
|
||||
.update(repositories)
|
||||
.set({
|
||||
metadata: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
inArray(repositories.id, repositoryIds)
|
||||
)
|
||||
);
|
||||
|
||||
const updatedRepos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
inArray(repositories.id, repositoryIds)
|
||||
)
|
||||
);
|
||||
|
||||
const responsePayload: ResetMetadataResponse = {
|
||||
success: true,
|
||||
message: "Metadata state reset. Trigger sync to re-run metadata import.",
|
||||
repositories: updatedRepos.map((repo) => ({
|
||||
...repo,
|
||||
status: repoStatusEnum.parse(repo.status),
|
||||
organization: repo.organization ?? undefined,
|
||||
lastMirrored: repo.lastMirrored ?? undefined,
|
||||
errorMessage: repo.errorMessage ?? undefined,
|
||||
forkedFrom: repo.forkedFrom ?? undefined,
|
||||
visibility: repositoryVisibilityEnum.parse(repo.visibility),
|
||||
mirroredLocation: repo.mirroredLocation || "",
|
||||
})),
|
||||
};
|
||||
|
||||
return new Response(JSON.stringify(responsePayload), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
} catch (error) {
|
||||
return createSecureErrorResponse(error, "metadata reset", 500);
|
||||
}
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { db, configs, repositories } from "@/lib/db";
|
||||
import { eq, inArray } from "drizzle-orm";
|
||||
import { and, eq, inArray } from "drizzle-orm";
|
||||
import { getGiteaRepoOwnerAsync, isRepoPresentInGitea } from "@/lib/gitea";
|
||||
import {
|
||||
mirrorGithubRepoToGitea,
|
||||
@@ -14,17 +14,22 @@ import { processWithRetry } from "@/lib/utils/concurrency";
|
||||
import { createMirrorJob } from "@/lib/helpers";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body: RetryRepoRequest = await request.json();
|
||||
const { userId, repositoryIds } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!userId || !repositoryIds || !Array.isArray(repositoryIds)) {
|
||||
const body: RetryRepoRequest = await request.json();
|
||||
const { repositoryIds } = body;
|
||||
|
||||
if (!repositoryIds || !Array.isArray(repositoryIds)) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "userId and repositoryIds are required.",
|
||||
message: "repositoryIds are required.",
|
||||
}),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
@@ -60,7 +65,12 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(inArray(repositories.id, repositoryIds));
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
inArray(repositories.id, repositoryIds)
|
||||
)
|
||||
);
|
||||
|
||||
if (!repos.length) {
|
||||
return new Response(
|
||||
@@ -142,15 +152,14 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
|
||||
console.log(`Importing repo: ${repo.name} to owner: ${owner}`);
|
||||
|
||||
// For single-org and starred repos strategies, or when mirroring to an org,
|
||||
// always use the org mirroring function to ensure proper organization handling
|
||||
// For single-org strategy, or when mirroring to an org,
|
||||
// use the org mirroring function to ensure proper organization handling
|
||||
const mirrorStrategy = config.githubConfig?.mirrorStrategy ||
|
||||
(config.githubConfig?.preserveOrgStructure ? "preserve" : "flat-user");
|
||||
(config.giteaConfig?.preserveOrgStructure ? "preserve" : "flat-user");
|
||||
|
||||
const shouldUseOrgMirror =
|
||||
owner !== config.giteaConfig?.defaultOwner || // Different owner means org
|
||||
mirrorStrategy === "single-org" || // Single-org strategy always uses org
|
||||
repoData.isStarred; // Starred repos always go to org
|
||||
mirrorStrategy === "single-org"; // Single-org strategy always uses org
|
||||
|
||||
if (shouldUseOrgMirror) {
|
||||
await mirrorGitHubOrgRepoToGiteaOrg({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { db, configs, repositories } from "@/lib/db";
|
||||
import { eq, or } from "drizzle-orm";
|
||||
import { and, eq, or } from "drizzle-orm";
|
||||
import { repoStatusEnum, repositoryVisibilityEnum } from "@/types/Repository";
|
||||
import { isRepoPresentInGitea, syncGiteaRepo } from "@/lib/gitea";
|
||||
import type {
|
||||
@@ -9,22 +9,15 @@ import type {
|
||||
} from "@/types/sync";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { parseInterval } from "@/lib/utils/duration-parser";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body: ScheduleSyncRepoRequest = await request.json();
|
||||
const { userId } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!userId) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "Missing userId in request body.",
|
||||
repositories: [],
|
||||
} satisfies ScheduleSyncRepoResponse),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
await request.json().catch(() => ({} as ScheduleSyncRepoRequest));
|
||||
|
||||
// Fetch config for the user
|
||||
const configResult = await db
|
||||
@@ -51,12 +44,14 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(
|
||||
eq(repositories.userId, userId) &&
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
or(
|
||||
eq(repositories.status, "mirrored"),
|
||||
eq(repositories.status, "synced"),
|
||||
eq(repositories.status, "failed")
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
if (!repos.length) {
|
||||
|
||||
@@ -1,23 +1,28 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import type { MirrorRepoRequest } from "@/types/mirror";
|
||||
import { db, configs, repositories } from "@/lib/db";
|
||||
import { eq, inArray } from "drizzle-orm";
|
||||
import { and, eq, inArray } from "drizzle-orm";
|
||||
import { repositoryVisibilityEnum, repoStatusEnum } from "@/types/Repository";
|
||||
import { syncGiteaRepo } from "@/lib/gitea";
|
||||
import type { SyncRepoResponse } from "@/types/sync";
|
||||
import { processWithResilience } from "@/lib/utils/concurrency";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body: MirrorRepoRequest = await request.json();
|
||||
const { userId, repositoryIds } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!userId || !repositoryIds || !Array.isArray(repositoryIds)) {
|
||||
const body: MirrorRepoRequest = await request.json();
|
||||
const { repositoryIds } = body;
|
||||
|
||||
if (!repositoryIds || !Array.isArray(repositoryIds)) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
message: "userId and repositoryIds are required.",
|
||||
message: "repositoryIds are required.",
|
||||
}),
|
||||
{ status: 400, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
@@ -53,7 +58,12 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
const repos = await db
|
||||
.select()
|
||||
.from(repositories)
|
||||
.where(inArray(repositories.id, repositoryIds));
|
||||
.where(
|
||||
and(
|
||||
eq(repositories.userId, userId),
|
||||
inArray(repositories.id, repositoryIds)
|
||||
)
|
||||
);
|
||||
|
||||
if (!repos.length) {
|
||||
return new Response(
|
||||
|
||||
@@ -2,18 +2,23 @@ import type { APIContext } from "astro";
|
||||
import { db, organizations } from "@/lib/db";
|
||||
import { eq, and } from "drizzle-orm";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export async function PATCH({ params, request }: APIContext) {
|
||||
export async function PATCH({ params, request, locals }: APIContext) {
|
||||
try {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
const { id } = params;
|
||||
const body = await request.json();
|
||||
const { status, userId } = body;
|
||||
const { status } = body;
|
||||
|
||||
if (!id || !userId) {
|
||||
if (!id) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "Organization ID and User ID are required",
|
||||
error: "Organization ID is required",
|
||||
}),
|
||||
{
|
||||
status: 400,
|
||||
@@ -78,4 +83,4 @@ export async function PATCH({ params, request }: APIContext) {
|
||||
} catch (error) {
|
||||
return createSecureErrorResponse(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,19 +6,16 @@ import { RateLimitManager } from "@/lib/rate-limit-manager";
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
|
||||
import { configs } from "@/lib/db";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const GET: APIRoute = async ({ request, locals }) => {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get("userId");
|
||||
const refresh = url.searchParams.get("refresh") === "true";
|
||||
|
||||
if (!userId) {
|
||||
return jsonResponse({
|
||||
data: { error: "Missing userId" },
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// If refresh is requested, fetch current rate limit from GitHub
|
||||
if (refresh) {
|
||||
@@ -101,4 +98,4 @@ export const GET: APIRoute = async ({ request }) => {
|
||||
} catch (error) {
|
||||
return createSecureErrorResponse(error, "rate limit check", 500);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -3,18 +3,23 @@ import { db, repositories } from "@/lib/db";
|
||||
import { eq, and } from "drizzle-orm";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { repoStatusEnum } from "@/types/Repository";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export async function PATCH({ params, request }: APIContext) {
|
||||
export async function PATCH({ params, request, locals }: APIContext) {
|
||||
try {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
const { id } = params;
|
||||
const body = await request.json();
|
||||
const { status, userId } = body;
|
||||
const { status } = body;
|
||||
|
||||
if (!id || !userId) {
|
||||
if (!id) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "Repository ID and User ID are required",
|
||||
error: "Repository ID is required",
|
||||
}),
|
||||
{
|
||||
status: 400,
|
||||
@@ -79,4 +84,4 @@ export async function PATCH({ params, request }: APIContext) {
|
||||
} catch (error) {
|
||||
return createSecureErrorResponse(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import type { APIRoute } from "astro";
|
||||
import { getNewEvents } from "@/lib/events";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const GET: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get("userId");
|
||||
|
||||
if (!userId) {
|
||||
return new Response("Missing userId", { status: 400 });
|
||||
}
|
||||
export const GET: APIRoute = async ({ request, locals }) => {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
const channel = `mirror-status:${userId}`;
|
||||
let isClosed = false;
|
||||
|
||||
@@ -12,14 +12,13 @@ import {
|
||||
import { jsonResponse, createSecureErrorResponse } from "@/lib/utils";
|
||||
import { mergeGitReposPreferStarred, calcBatchSizeForInsert } from "@/lib/repo-utils";
|
||||
import { getDecryptedGitHubToken } from "@/lib/utils/config-encryption";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
import { isMirrorableGitHubRepo } from "@/lib/repo-eligibility";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.searchParams.get("userId");
|
||||
|
||||
if (!userId) {
|
||||
return jsonResponse({ data: { error: "Missing userId" }, status: 400 });
|
||||
}
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
try {
|
||||
const [config] = await db
|
||||
@@ -58,9 +57,10 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
|
||||
// Merge and de-duplicate by fullName, preferring starred variant when duplicated
|
||||
const allGithubRepos = mergeGitReposPreferStarred(basicAndForkedRepos, starredRepos);
|
||||
const mirrorableGithubRepos = allGithubRepos.filter(isMirrorableGitHubRepo);
|
||||
|
||||
// Prepare full list of repos and orgs
|
||||
const newRepos = allGithubRepos.map((repo) => ({
|
||||
const newRepos = mirrorableGithubRepos.map((repo) => ({
|
||||
id: uuidv4(),
|
||||
userId,
|
||||
configId: config.id,
|
||||
@@ -188,6 +188,7 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
message: "Repositories and organizations synced successfully",
|
||||
newRepositories: insertedRepos.length,
|
||||
newOrganizations: insertedOrgs.length,
|
||||
skippedDisabledRepositories: allGithubRepos.length - mirrorableGithubRepos.length,
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
|
||||
@@ -10,15 +10,20 @@ import type { RepositoryVisibility, RepoStatus } from "@/types/Repository";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { decryptConfigTokens } from "@/lib/utils/config-encryption";
|
||||
import { createGitHubClient } from "@/lib/github";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body: AddOrganizationApiRequest = await request.json();
|
||||
const { role, org, userId, force = false } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!org || !userId || !role) {
|
||||
const body: AddOrganizationApiRequest = await request.json();
|
||||
const { role, org, force = false } = body;
|
||||
|
||||
if (!org || !role) {
|
||||
return jsonResponse({
|
||||
data: { success: false, error: "Missing org, role or userId" },
|
||||
data: { success: false, error: "Missing org or role" },
|
||||
status: 400,
|
||||
});
|
||||
}
|
||||
@@ -145,9 +150,10 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
const existingIds = new Set(allRepos.map(r => r.id));
|
||||
const uniqueMemberRepos = memberRepos.filter(r => !existingIds.has(r.id));
|
||||
allRepos.push(...uniqueMemberRepos);
|
||||
const mirrorableRepos = allRepos.filter((repo) => !repo.disabled);
|
||||
|
||||
// Insert repositories
|
||||
const repoRecords = allRepos.map((repo) => {
|
||||
const repoRecords = mirrorableRepos.map((repo) => {
|
||||
const normalizedOwner = repo.owner.login.trim().toLowerCase();
|
||||
const normalizedRepoName = repo.name.trim().toLowerCase();
|
||||
|
||||
|
||||
@@ -11,17 +11,22 @@ import type {
|
||||
RepositoryVisibility,
|
||||
} from "@/types/Repository";
|
||||
import { createMirrorJob } from "@/lib/helpers";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body: AddRepositoriesApiRequest = await request.json();
|
||||
const { owner, repo, userId, force = false } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!owner || !repo || !userId) {
|
||||
const body: AddRepositoriesApiRequest = await request.json();
|
||||
const { owner, repo, force = false, destinationOrg } = body;
|
||||
|
||||
if (!owner || !repo) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
success: false,
|
||||
error: "Missing owner, repo, or userId",
|
||||
error: "Missing owner or repo",
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
@@ -34,7 +39,7 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
return jsonResponse({
|
||||
data: {
|
||||
success: false,
|
||||
error: "Missing owner, repo, or userId",
|
||||
error: "Missing owner or repo",
|
||||
},
|
||||
status: 400,
|
||||
});
|
||||
@@ -117,7 +122,7 @@ export const POST: APIRoute = async ({ request }) => {
|
||||
lastMirrored: existingRepo?.lastMirrored ?? null,
|
||||
errorMessage: existingRepo?.errorMessage ?? null,
|
||||
mirroredLocation: existingRepo?.mirroredLocation ?? "",
|
||||
destinationOrg: existingRepo?.destinationOrg ?? null,
|
||||
destinationOrg: destinationOrg?.trim() || existingRepo?.destinationOrg || null,
|
||||
updatedAt: repoData.updated_at
|
||||
? new Date(repoData.updated_at)
|
||||
: new Date(),
|
||||
|
||||
@@ -2,16 +2,21 @@ import type { APIRoute } from "astro";
|
||||
import { publishEvent } from "@/lib/events";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { createSecureErrorResponse } from "@/lib/utils";
|
||||
import { requireAuthenticatedUserId } from "@/lib/auth-guards";
|
||||
|
||||
export const POST: APIRoute = async ({ request }) => {
|
||||
export const POST: APIRoute = async ({ request, locals }) => {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const { userId, message, status } = body;
|
||||
const authResult = await requireAuthenticatedUserId({ request, locals });
|
||||
if ("response" in authResult) return authResult.response;
|
||||
const userId = authResult.userId;
|
||||
|
||||
if (!userId || !message || !status) {
|
||||
const body = await request.json();
|
||||
const { message, status } = body;
|
||||
|
||||
if (!message || !status) {
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
error: "Missing required fields: userId, message, status",
|
||||
error: "Missing required fields: message, status",
|
||||
}),
|
||||
{ status: 400 }
|
||||
);
|
||||
|
||||
@@ -13,6 +13,7 @@ export const repoStatusEnum = z.enum([
|
||||
"syncing",
|
||||
"synced",
|
||||
"archived",
|
||||
"pending-approval", // Blocked by force-push detection, needs manual approval
|
||||
]);
|
||||
|
||||
export type RepoStatus = z.infer<typeof repoStatusEnum>;
|
||||
@@ -70,6 +71,7 @@ export interface GitRepo {
|
||||
visibility: RepositoryVisibility;
|
||||
|
||||
status: RepoStatus;
|
||||
isDisabled?: boolean;
|
||||
lastMirrored?: Date;
|
||||
errorMessage?: string;
|
||||
|
||||
@@ -82,6 +84,7 @@ export interface AddRepositoriesApiRequest {
|
||||
repo: string;
|
||||
owner: string;
|
||||
force?: boolean;
|
||||
destinationOrg?: string;
|
||||
}
|
||||
|
||||
export interface AddRepositoriesApiResponse {
|
||||
|
||||
@@ -2,19 +2,28 @@ import { type Config as ConfigType } from "@/lib/db/schema";
|
||||
|
||||
export type GiteaOrgVisibility = "public" | "private" | "limited";
|
||||
export type MirrorStrategy = "preserve" | "single-org" | "flat-user" | "mixed";
|
||||
export type StarredReposMode = "dedicated-org" | "preserve-owner";
|
||||
export type BackupStrategy = "disabled" | "always" | "on-force-push" | "block-on-force-push";
|
||||
|
||||
export interface GiteaConfig {
|
||||
url: string;
|
||||
externalUrl?: string;
|
||||
username: string;
|
||||
token: string;
|
||||
organization: string;
|
||||
visibility: GiteaOrgVisibility;
|
||||
starredReposOrg: string;
|
||||
starredReposMode?: StarredReposMode;
|
||||
preserveOrgStructure: boolean;
|
||||
mirrorStrategy?: MirrorStrategy; // New field for the strategy
|
||||
personalReposOrg?: string; // Override destination for personal repos
|
||||
issueConcurrency?: number;
|
||||
pullRequestConcurrency?: number;
|
||||
backupStrategy?: BackupStrategy;
|
||||
backupBeforeSync?: boolean; // Deprecated: kept for backward compat, use backupStrategy
|
||||
backupRetentionCount?: number;
|
||||
backupDirectory?: string;
|
||||
blockSyncOnBackupFailure?: boolean;
|
||||
}
|
||||
|
||||
export interface ScheduleConfig {
|
||||
@@ -46,6 +55,7 @@ export interface GitHubConfig {
|
||||
privateRepositories: boolean;
|
||||
mirrorStarred: boolean;
|
||||
starredDuplicateStrategy?: DuplicateNameStrategy;
|
||||
starredReposMode?: StarredReposMode;
|
||||
}
|
||||
|
||||
export interface MirrorOptions {
|
||||
@@ -65,6 +75,7 @@ export interface MirrorOptions {
|
||||
export interface AdvancedOptions {
|
||||
skipForks: boolean;
|
||||
starredCodeOnly: boolean;
|
||||
autoMirrorStarred?: boolean;
|
||||
}
|
||||
|
||||
export interface SaveConfigApiRequest {
|
||||
|
||||
13
src/types/reset-metadata.ts
Normal file
13
src/types/reset-metadata.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import type { Repository } from "@/lib/db/schema";
|
||||
|
||||
export interface ResetMetadataRequest {
|
||||
userId: string;
|
||||
repositoryIds: string[];
|
||||
}
|
||||
|
||||
export interface ResetMetadataResponse {
|
||||
success: boolean;
|
||||
message?: string;
|
||||
error?: string;
|
||||
repositories: Repository[];
|
||||
}
|
||||
77
tests/e2e/01-health.spec.ts
Normal file
77
tests/e2e/01-health.spec.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* 01 – Service health checks.
|
||||
*
|
||||
* Quick smoke tests that confirm every service required by the E2E suite is
|
||||
* reachable before the heavier workflow tests run.
|
||||
*/
|
||||
|
||||
import { test, expect } from "@playwright/test";
|
||||
import {
|
||||
APP_URL,
|
||||
GITEA_URL,
|
||||
FAKE_GITHUB_URL,
|
||||
GIT_SERVER_URL,
|
||||
waitFor,
|
||||
} from "./helpers";
|
||||
|
||||
test.describe("E2E: Service health checks", () => {
|
||||
test("Fake GitHub API is running", async ({ request }) => {
|
||||
const resp = await request.get(`${FAKE_GITHUB_URL}/___mgmt/health`);
|
||||
expect(resp.ok()).toBeTruthy();
|
||||
const data = await resp.json();
|
||||
expect(data.status).toBe("ok");
|
||||
expect(data.repos).toBeGreaterThan(0);
|
||||
console.log(
|
||||
`[Health] Fake GitHub: ${data.repos} repos, ${data.orgs} orgs, clone base: ${data.gitCloneBaseUrl ?? "default"}`,
|
||||
);
|
||||
});
|
||||
|
||||
test("Git HTTP server is running (serves test repos)", async ({
|
||||
request,
|
||||
}) => {
|
||||
const resp = await request.get(`${GIT_SERVER_URL}/manifest.json`, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
expect(resp.ok(), "Git server should serve manifest.json").toBeTruthy();
|
||||
const manifest = await resp.json();
|
||||
expect(manifest.repos).toBeDefined();
|
||||
expect(manifest.repos.length).toBeGreaterThan(0);
|
||||
console.log(`[Health] Git server: serving ${manifest.repos.length} repos`);
|
||||
for (const r of manifest.repos) {
|
||||
console.log(`[Health] • ${r.owner}/${r.name} — ${r.description}`);
|
||||
}
|
||||
});
|
||||
|
||||
test("Gitea instance is running", async ({ request }) => {
|
||||
await waitFor(
|
||||
async () => {
|
||||
const resp = await request.get(`${GITEA_URL}/api/v1/version`, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
return resp.ok();
|
||||
},
|
||||
{ timeout: 30_000, interval: 2_000, label: "Gitea healthy" },
|
||||
);
|
||||
const resp = await request.get(`${GITEA_URL}/api/v1/version`);
|
||||
const data = await resp.json();
|
||||
console.log(`[Health] Gitea version: ${data.version}`);
|
||||
expect(data.version).toBeTruthy();
|
||||
});
|
||||
|
||||
test("gitea-mirror app is running", async ({ request }) => {
|
||||
await waitFor(
|
||||
async () => {
|
||||
const resp = await request.get(`${APP_URL}/`, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
return resp.status() < 500;
|
||||
},
|
||||
{ timeout: 60_000, interval: 2_000, label: "App healthy" },
|
||||
);
|
||||
const resp = await request.get(`${APP_URL}/`, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
console.log(`[Health] App status: ${resp.status()}`);
|
||||
expect(resp.status()).toBeLessThan(500);
|
||||
});
|
||||
});
|
||||
344
tests/e2e/02-mirror-workflow.spec.ts
Normal file
344
tests/e2e/02-mirror-workflow.spec.ts
Normal file
@@ -0,0 +1,344 @@
|
||||
/**
|
||||
* 02 – Main mirror workflow.
|
||||
*
|
||||
* Walks through the full first-time user journey:
|
||||
* 1. Create Gitea admin user + API token
|
||||
* 2. Create the mirror target organization
|
||||
* 3. Register / sign-in to the gitea-mirror app
|
||||
* 4. Save GitHub + Gitea configuration
|
||||
* 5. Trigger a GitHub data sync (pull repo list from fake GitHub)
|
||||
* 6. Trigger mirror jobs (push repos into Gitea)
|
||||
* 7. Verify repos actually appeared in Gitea with real content
|
||||
* 8. Verify mirror job activity and app state
|
||||
*/
|
||||
|
||||
import { test, expect } from "@playwright/test";
|
||||
import {
|
||||
APP_URL,
|
||||
GITEA_URL,
|
||||
GITEA_MIRROR_ORG,
|
||||
GiteaAPI,
|
||||
getAppSessionCookies,
|
||||
saveConfig,
|
||||
waitFor,
|
||||
getRepositoryIds,
|
||||
triggerMirrorJobs,
|
||||
} from "./helpers";
|
||||
|
||||
test.describe("E2E: Mirror workflow", () => {
|
||||
let giteaApi: GiteaAPI;
|
||||
let appCookies = "";
|
||||
|
||||
test.beforeAll(async () => {
|
||||
giteaApi = new GiteaAPI(GITEA_URL);
|
||||
});
|
||||
|
||||
test.afterAll(async () => {
|
||||
await giteaApi.dispose();
|
||||
});
|
||||
|
||||
test("Step 1: Setup Gitea admin user and token", async () => {
|
||||
await giteaApi.ensureAdminUser();
|
||||
const token = await giteaApi.createToken();
|
||||
expect(token).toBeTruthy();
|
||||
expect(token.length).toBeGreaterThan(10);
|
||||
console.log(`[Setup] Gitea token acquired (length: ${token.length})`);
|
||||
});
|
||||
|
||||
test("Step 2: Create mirror organization in Gitea", async () => {
|
||||
await giteaApi.ensureOrg(GITEA_MIRROR_ORG);
|
||||
|
||||
const repos = await giteaApi.listOrgRepos(GITEA_MIRROR_ORG);
|
||||
expect(Array.isArray(repos)).toBeTruthy();
|
||||
console.log(
|
||||
`[Setup] Org ${GITEA_MIRROR_ORG} exists with ${repos.length} repos`,
|
||||
);
|
||||
});
|
||||
|
||||
test("Step 3: Register and sign in to gitea-mirror app", async ({
|
||||
request,
|
||||
}) => {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
expect(appCookies).toBeTruthy();
|
||||
console.log(
|
||||
`[Auth] Session cookies acquired (length: ${appCookies.length})`,
|
||||
);
|
||||
|
||||
const whoami = await request.get(`${APP_URL}/api/config`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
expect(
|
||||
whoami.status(),
|
||||
`Auth check returned ${whoami.status()} – cookies may be invalid`,
|
||||
).not.toBe(401);
|
||||
console.log(`[Auth] Auth check status: ${whoami.status()}`);
|
||||
});
|
||||
|
||||
test("Step 4: Configure mirrors via API (backup disabled)", async ({
|
||||
request,
|
||||
}) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
const giteaToken = giteaApi.getTokenValue();
|
||||
expect(giteaToken, "Gitea token should be set from Step 1").toBeTruthy();
|
||||
|
||||
await saveConfig(request, giteaToken, appCookies, {
|
||||
giteaConfig: {
|
||||
backupBeforeSync: false,
|
||||
blockSyncOnBackupFailure: false,
|
||||
},
|
||||
});
|
||||
console.log("[Config] Configuration saved (backup disabled)");
|
||||
});
|
||||
|
||||
test("Step 5: Trigger GitHub data sync (fetch repos from fake GitHub)", async ({
|
||||
request,
|
||||
}) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
const syncResp = await request.post(`${APP_URL}/api/sync`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Cookie: appCookies,
|
||||
},
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
|
||||
const status = syncResp.status();
|
||||
console.log(`[Sync] GitHub sync response: ${status}`);
|
||||
|
||||
if (status >= 400) {
|
||||
const body = await syncResp.text();
|
||||
console.log(`[Sync] Error body: ${body}`);
|
||||
}
|
||||
|
||||
expect(status, "Sync should not be unauthorized").not.toBe(401);
|
||||
expect(status, "Sync should not return server error").toBeLessThan(500);
|
||||
|
||||
if (syncResp.ok()) {
|
||||
const data = await syncResp.json();
|
||||
console.log(
|
||||
`[Sync] New repos: ${data.newRepositories ?? "?"}, new orgs: ${data.newOrganizations ?? "?"}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test("Step 6: Trigger mirror jobs (push repos to Gitea)", async ({
|
||||
request,
|
||||
}) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
// Fetch repository IDs from the dashboard API
|
||||
const { ids: repositoryIds, repos } = await getRepositoryIds(
|
||||
request,
|
||||
appCookies,
|
||||
);
|
||||
console.log(
|
||||
`[Mirror] Found ${repositoryIds.length} repos to mirror: ${repos.map((r: any) => r.name).join(", ")}`,
|
||||
);
|
||||
|
||||
if (repositoryIds.length === 0) {
|
||||
// Fallback: try the github/repositories endpoint
|
||||
const repoResp = await request.get(
|
||||
`${APP_URL}/api/github/repositories`,
|
||||
{
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
},
|
||||
);
|
||||
if (repoResp.ok()) {
|
||||
const repoData = await repoResp.json();
|
||||
const fallbackRepos: any[] = Array.isArray(repoData)
|
||||
? repoData
|
||||
: (repoData.repositories ?? []);
|
||||
repositoryIds.push(...fallbackRepos.map((r: any) => r.id));
|
||||
console.log(
|
||||
`[Mirror] Fallback: found ${repositoryIds.length} repos`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
expect(
|
||||
repositoryIds.length,
|
||||
"Should have at least one repository to mirror",
|
||||
).toBeGreaterThan(0);
|
||||
|
||||
const status = await triggerMirrorJobs(
|
||||
request,
|
||||
appCookies,
|
||||
repositoryIds,
|
||||
30_000,
|
||||
);
|
||||
console.log(`[Mirror] Mirror job response: ${status}`);
|
||||
|
||||
expect(status, "Mirror job should not be unauthorized").not.toBe(401);
|
||||
expect(status, "Mirror job should not return server error").toBeLessThan(
|
||||
500,
|
||||
);
|
||||
});
|
||||
|
||||
test("Step 7: Verify repos were actually mirrored to Gitea", async ({
|
||||
request,
|
||||
}) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
// Wait for mirror jobs to finish processing
|
||||
await waitFor(
|
||||
async () => {
|
||||
const orgRepos = await giteaApi.listOrgRepos(GITEA_MIRROR_ORG);
|
||||
console.log(
|
||||
`[Verify] Gitea org repos so far: ${orgRepos.length} (${orgRepos.map((r: any) => r.name).join(", ")})`,
|
||||
);
|
||||
// We expect at least 3 repos (my-project, dotfiles, notes)
|
||||
return orgRepos.length >= 3;
|
||||
},
|
||||
{
|
||||
timeout: 90_000,
|
||||
interval: 5_000,
|
||||
label: "repos appear in Gitea",
|
||||
},
|
||||
);
|
||||
|
||||
const orgRepos = await giteaApi.listOrgRepos(GITEA_MIRROR_ORG);
|
||||
const orgRepoNames = orgRepos.map((r: any) => r.name);
|
||||
console.log(
|
||||
`[Verify] Gitea org repos: ${orgRepoNames.join(", ")} (total: ${orgRepos.length})`,
|
||||
);
|
||||
|
||||
// Check that at least the 3 personal repos are mirrored
|
||||
for (const repoName of ["my-project", "dotfiles", "notes"]) {
|
||||
expect(
|
||||
orgRepoNames,
|
||||
`Expected repo "${repoName}" to be mirrored into org ${GITEA_MIRROR_ORG}`,
|
||||
).toContain(repoName);
|
||||
}
|
||||
|
||||
// Verify my-project has actual content (branches, commits)
|
||||
const myProjectBranches = await giteaApi.listBranches(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
);
|
||||
const branchNames = myProjectBranches.map((b: any) => b.name);
|
||||
console.log(`[Verify] my-project branches: ${branchNames.join(", ")}`);
|
||||
expect(branchNames, "main branch should exist").toContain("main");
|
||||
|
||||
// Verify we can read actual file content
|
||||
const readmeContent = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"README.md",
|
||||
);
|
||||
expect(readmeContent, "README.md should have content").toBeTruthy();
|
||||
expect(readmeContent).toContain("My Project");
|
||||
console.log(
|
||||
`[Verify] my-project README.md starts with: ${readmeContent?.substring(0, 50)}...`,
|
||||
);
|
||||
|
||||
// Verify tags were mirrored
|
||||
const tags = await giteaApi.listTags(GITEA_MIRROR_ORG, "my-project");
|
||||
const tagNames = tags.map((t: any) => t.name);
|
||||
console.log(`[Verify] my-project tags: ${tagNames.join(", ")}`);
|
||||
if (tagNames.length > 0) {
|
||||
expect(tagNames).toContain("v1.0.0");
|
||||
}
|
||||
|
||||
// Verify commits exist
|
||||
const commits = await giteaApi.listCommits(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
);
|
||||
console.log(`[Verify] my-project commits: ${commits.length}`);
|
||||
expect(commits.length, "Should have multiple commits").toBeGreaterThan(0);
|
||||
|
||||
// Verify dotfiles repo has content
|
||||
const bashrc = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"dotfiles",
|
||||
".bashrc",
|
||||
);
|
||||
expect(bashrc, "dotfiles should contain .bashrc").toBeTruthy();
|
||||
console.log("[Verify] dotfiles .bashrc verified");
|
||||
});
|
||||
|
||||
test("Step 8: Verify mirror jobs and app state", async ({ request }) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
// Check activity log
|
||||
const activitiesResp = await request.get(`${APP_URL}/api/activities`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
|
||||
if (activitiesResp.ok()) {
|
||||
const activities = await activitiesResp.json();
|
||||
const jobs: any[] = Array.isArray(activities)
|
||||
? activities
|
||||
: (activities.jobs ?? activities.activities ?? []);
|
||||
console.log(`[State] Activity/job records: ${jobs.length}`);
|
||||
|
||||
const mirrorJobs = jobs.filter(
|
||||
(j: any) =>
|
||||
j.status === "mirroring" ||
|
||||
j.status === "failed" ||
|
||||
j.status === "success" ||
|
||||
j.status === "mirrored" ||
|
||||
j.message?.includes("mirror") ||
|
||||
j.message?.includes("Mirror"),
|
||||
);
|
||||
console.log(`[State] Mirror-related jobs: ${mirrorJobs.length}`);
|
||||
for (const j of mirrorJobs.slice(0, 5)) {
|
||||
console.log(
|
||||
`[State] • ${j.repositoryName ?? "?"}: ${j.status} — ${j.message ?? ""}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check dashboard repos
|
||||
const dashResp = await request.get(`${APP_URL}/api/dashboard`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
|
||||
if (dashResp.ok()) {
|
||||
const dashData = await dashResp.json();
|
||||
const repos: any[] = dashData.repositories ?? [];
|
||||
console.log(`[State] Dashboard repos: ${repos.length}`);
|
||||
|
||||
for (const r of repos) {
|
||||
console.log(
|
||||
`[State] • ${r.name}: status=${r.status}, mirrored=${r.mirroredLocation ?? "none"}`,
|
||||
);
|
||||
}
|
||||
|
||||
expect(repos.length, "Repos should exist in DB").toBeGreaterThan(0);
|
||||
|
||||
const succeeded = repos.filter(
|
||||
(r: any) => r.status === "mirrored" || r.status === "success",
|
||||
);
|
||||
console.log(
|
||||
`[State] Successfully mirrored repos: ${succeeded.length}/${repos.length}`,
|
||||
);
|
||||
}
|
||||
|
||||
// App should still be running
|
||||
const healthResp = await request.get(`${APP_URL}/`, {
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
expect(
|
||||
healthResp.status(),
|
||||
"App should still be running after mirror attempts",
|
||||
).toBeLessThan(500);
|
||||
console.log(`[State] App health: ${healthResp.status()}`);
|
||||
});
|
||||
});
|
||||
305
tests/e2e/03-backup.spec.ts
Normal file
305
tests/e2e/03-backup.spec.ts
Normal file
@@ -0,0 +1,305 @@
|
||||
/**
|
||||
* 03 – Backup configuration tests.
|
||||
*
|
||||
* Exercises the pre-sync backup system by toggling config flags through
|
||||
* the app API and triggering re-syncs on repos that were already mirrored
|
||||
* by the 02-mirror-workflow suite.
|
||||
*
|
||||
* What is tested:
|
||||
* B1. Enable backupStrategy: "always" in config
|
||||
* B2. Confirm mirrored repos exist in Gitea (precondition)
|
||||
* B3. Trigger a re-sync with backup enabled — verify the backup code path
|
||||
* runs (snapshot activity entries appear in the activity log)
|
||||
* B4. Inspect activity log for snapshot-related entries
|
||||
* B5. Enable blockSyncOnBackupFailure and verify the flag is persisted
|
||||
* B6. Disable backup (backupStrategy: "disabled") and verify config resets cleanly
|
||||
*/
|
||||
|
||||
import { test, expect } from "@playwright/test";
|
||||
import {
|
||||
APP_URL,
|
||||
GITEA_URL,
|
||||
GITEA_MIRROR_ORG,
|
||||
GiteaAPI,
|
||||
getAppSessionCookies,
|
||||
saveConfig,
|
||||
getRepositoryIds,
|
||||
triggerSyncRepo,
|
||||
} from "./helpers";
|
||||
|
||||
test.describe("E2E: Backup configuration", () => {
|
||||
let giteaApi: GiteaAPI;
|
||||
let appCookies = "";
|
||||
|
||||
test.beforeAll(async () => {
|
||||
giteaApi = new GiteaAPI(GITEA_URL);
|
||||
try {
|
||||
await giteaApi.createToken();
|
||||
} catch {
|
||||
console.log(
|
||||
"[Backup] Could not create Gitea token; tests may be limited",
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test.afterAll(async () => {
|
||||
await giteaApi.dispose();
|
||||
});
|
||||
|
||||
// ── B1 ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
test("Step B1: Enable backup in config", async ({ request }) => {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
|
||||
const giteaToken = giteaApi.getTokenValue();
|
||||
expect(giteaToken, "Gitea token required").toBeTruthy();
|
||||
|
||||
// Save config with backup strategy set to "always"
|
||||
await saveConfig(request, giteaToken, appCookies, {
|
||||
giteaConfig: {
|
||||
backupStrategy: "always",
|
||||
blockSyncOnBackupFailure: false,
|
||||
backupRetentionCount: 5,
|
||||
backupDirectory: "data/repo-backups",
|
||||
},
|
||||
});
|
||||
|
||||
// Verify config was saved
|
||||
const configResp = await request.get(`${APP_URL}/api/config`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
expect(configResp.status()).toBeLessThan(500);
|
||||
|
||||
if (configResp.ok()) {
|
||||
const configData = await configResp.json();
|
||||
const giteaCfg = configData.giteaConfig ?? configData.gitea ?? {};
|
||||
console.log(
|
||||
`[Backup] Config saved: backupStrategy=${giteaCfg.backupStrategy}, blockOnFailure=${giteaCfg.blockSyncOnBackupFailure}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// ── B2 ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
test("Step B2: Verify mirrored repos exist in Gitea before backup test", async () => {
|
||||
// We need repos to already be mirrored from the 02-mirror-workflow suite
|
||||
const orgRepos = await giteaApi.listOrgRepos(GITEA_MIRROR_ORG);
|
||||
console.log(
|
||||
`[Backup] Repos in ${GITEA_MIRROR_ORG}: ${orgRepos.length} (${orgRepos.map((r: any) => r.name).join(", ")})`,
|
||||
);
|
||||
|
||||
if (orgRepos.length === 0) {
|
||||
console.log(
|
||||
"[Backup] WARNING: No repos in Gitea yet. Backup test will verify " +
|
||||
"job creation but not bundle creation.",
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// ── B3 ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
test("Step B3: Trigger re-sync with backup enabled", async ({ request }) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
// Fetch mirrored repository IDs (sync-repo requires them)
|
||||
const { ids: repositoryIds, repos } = await getRepositoryIds(
|
||||
request,
|
||||
appCookies,
|
||||
{ status: "mirrored" },
|
||||
);
|
||||
|
||||
// Also include repos with "success" status
|
||||
if (repositoryIds.length === 0) {
|
||||
const { ids: successIds } = await getRepositoryIds(
|
||||
request,
|
||||
appCookies,
|
||||
{ status: "success" },
|
||||
);
|
||||
repositoryIds.push(...successIds);
|
||||
}
|
||||
|
||||
// Fall back to all repos if no mirrored/success repos
|
||||
if (repositoryIds.length === 0) {
|
||||
const { ids: allIds } = await getRepositoryIds(request, appCookies);
|
||||
repositoryIds.push(...allIds);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[Backup] Found ${repositoryIds.length} repos to re-sync: ` +
|
||||
repos.map((r: any) => r.name).join(", "),
|
||||
);
|
||||
|
||||
expect(
|
||||
repositoryIds.length,
|
||||
"Need at least one repo to test backup",
|
||||
).toBeGreaterThan(0);
|
||||
|
||||
// Trigger sync-repo — this calls syncGiteaRepoEnhanced which checks
|
||||
// shouldCreatePreSyncBackup and creates bundles before syncing
|
||||
const status = await triggerSyncRepo(
|
||||
request,
|
||||
appCookies,
|
||||
repositoryIds,
|
||||
25_000,
|
||||
);
|
||||
console.log(`[Backup] Sync-repo response: ${status}`);
|
||||
expect(status, "Sync-repo should accept request").toBeLessThan(500);
|
||||
});
|
||||
|
||||
// ── B4 ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
test("Step B4: Verify backup-related activity in logs", async ({
|
||||
request,
|
||||
}) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
const activitiesResp = await request.get(`${APP_URL}/api/activities`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
|
||||
if (!activitiesResp.ok()) {
|
||||
console.log(
|
||||
`[Backup] Could not fetch activities: ${activitiesResp.status()}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const activities = await activitiesResp.json();
|
||||
const jobs: any[] = Array.isArray(activities)
|
||||
? activities
|
||||
: (activities.jobs ?? activities.activities ?? []);
|
||||
|
||||
// Look for backup / snapshot related messages
|
||||
const backupJobs = jobs.filter(
|
||||
(j: any) =>
|
||||
j.message?.toLowerCase().includes("snapshot") ||
|
||||
j.message?.toLowerCase().includes("backup") ||
|
||||
j.details?.toLowerCase().includes("snapshot") ||
|
||||
j.details?.toLowerCase().includes("backup") ||
|
||||
j.details?.toLowerCase().includes("bundle"),
|
||||
);
|
||||
|
||||
console.log(
|
||||
`[Backup] Backup-related activity entries: ${backupJobs.length}`,
|
||||
);
|
||||
for (const j of backupJobs.slice(0, 10)) {
|
||||
console.log(
|
||||
`[Backup] • ${j.repositoryName ?? "?"}: ${j.status} — ${j.message ?? ""} | ${(j.details ?? "").substring(0, 120)}`,
|
||||
);
|
||||
}
|
||||
|
||||
// We expect at least some backup-related entries if repos were mirrored
|
||||
const orgRepos = await giteaApi.listOrgRepos(GITEA_MIRROR_ORG);
|
||||
if (orgRepos.length > 0) {
|
||||
// With repos in Gitea, the backup system should have tried to create
|
||||
// snapshots. All snapshots should succeed.
|
||||
expect(
|
||||
backupJobs.length,
|
||||
"Expected at least one backup/snapshot activity entry when " +
|
||||
"backupStrategy is 'always' and repos exist in Gitea",
|
||||
).toBeGreaterThan(0);
|
||||
|
||||
// Check for any failed backups
|
||||
const failedBackups = backupJobs.filter(
|
||||
(j: any) =>
|
||||
j.status === "failed" &&
|
||||
(j.message?.toLowerCase().includes("snapshot") ||
|
||||
j.details?.toLowerCase().includes("snapshot")),
|
||||
);
|
||||
expect(
|
||||
failedBackups.length,
|
||||
`Expected all backups to succeed, but ${failedBackups.length} backup(s) failed. ` +
|
||||
`Failed: ${failedBackups.map((j: any) => `${j.repositoryName}: ${j.details?.substring(0, 100)}`).join("; ")}`,
|
||||
).toBe(0);
|
||||
|
||||
console.log(
|
||||
`[Backup] Confirmed: backup system was invoked for ${backupJobs.length} repos`,
|
||||
);
|
||||
}
|
||||
|
||||
// Dump all recent jobs for debugging visibility
|
||||
console.log(`[Backup] All recent jobs (last 20):`);
|
||||
for (const j of jobs.slice(0, 20)) {
|
||||
console.log(
|
||||
`[Backup] - [${j.status}] ${j.repositoryName ?? "?"}: ${j.message ?? ""} ` +
|
||||
`${j.details ? `(${j.details.substring(0, 80)})` : ""}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// ── B5 ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
test("Step B5: Enable blockSyncOnBackupFailure and verify behavior", async ({
|
||||
request,
|
||||
}) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
const giteaToken = giteaApi.getTokenValue();
|
||||
|
||||
// Update config to block sync on backup failure
|
||||
await saveConfig(request, giteaToken, appCookies, {
|
||||
giteaConfig: {
|
||||
backupStrategy: "always",
|
||||
blockSyncOnBackupFailure: true,
|
||||
backupRetentionCount: 5,
|
||||
backupDirectory: "data/repo-backups",
|
||||
},
|
||||
});
|
||||
console.log("[Backup] Config updated: blockSyncOnBackupFailure=true");
|
||||
|
||||
// Verify the flag persisted
|
||||
const configResp = await request.get(`${APP_URL}/api/config`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
if (configResp.ok()) {
|
||||
const configData = await configResp.json();
|
||||
const giteaCfg = configData.giteaConfig ?? configData.gitea ?? {};
|
||||
expect(giteaCfg.blockSyncOnBackupFailure).toBe(true);
|
||||
console.log(
|
||||
`[Backup] Verified: blockSyncOnBackupFailure=${giteaCfg.blockSyncOnBackupFailure}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// ── B6 ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
test("Step B6: Disable backup and verify config resets", async ({
|
||||
request,
|
||||
}) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
const giteaToken = giteaApi.getTokenValue();
|
||||
|
||||
// Disable backup
|
||||
await saveConfig(request, giteaToken, appCookies, {
|
||||
giteaConfig: {
|
||||
backupStrategy: "disabled",
|
||||
blockSyncOnBackupFailure: false,
|
||||
},
|
||||
});
|
||||
|
||||
const configResp = await request.get(`${APP_URL}/api/config`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
if (configResp.ok()) {
|
||||
const configData = await configResp.json();
|
||||
const giteaCfg = configData.giteaConfig ?? configData.gitea ?? {};
|
||||
console.log(
|
||||
`[Backup] After disable: backupStrategy=${giteaCfg.backupStrategy}`,
|
||||
);
|
||||
}
|
||||
console.log("[Backup] Backup configuration test complete");
|
||||
});
|
||||
});
|
||||
864
tests/e2e/04-force-push.spec.ts
Normal file
864
tests/e2e/04-force-push.spec.ts
Normal file
@@ -0,0 +1,864 @@
|
||||
/**
|
||||
* 04 – Force-push simulation and backup verification.
|
||||
*
|
||||
* This is the critical test that proves data loss can happen from a
|
||||
* force-push on the source repo, and verifies that the backup system
|
||||
* (when enabled) preserves the old state.
|
||||
*
|
||||
* Scenario:
|
||||
* 1. Confirm my-project is already mirrored with known commits / content
|
||||
* 2. Record the pre-force-push state (branch SHAs, commit messages, file content)
|
||||
* 3. Rewrite history in the source bare repo (simulate a force-push)
|
||||
* 4. Trigger Gitea mirror-sync WITHOUT backup
|
||||
* 5. Verify Gitea now reflects the rewritten history — old commits are GONE
|
||||
* 6. Restore the source repo, re-mirror, then enable backup
|
||||
* 7. Force-push again and sync WITH backup enabled
|
||||
* 8. Verify backup activity was recorded (snapshot attempted before sync)
|
||||
*
|
||||
* The source bare repos live on the host filesystem at
|
||||
* tests/e2e/git-repos/<owner>/<name>.git and are served read-only into the
|
||||
* git-server container. Because the bind-mount is :ro in docker-compose,
|
||||
* we modify the repos on the host and Gitea's dumb-HTTP clone picks up
|
||||
* the changes on the next fetch.
|
||||
*
|
||||
* Prerequisites: 02-mirror-workflow.spec.ts must have run first so that
|
||||
* my-project is already mirrored into Gitea.
|
||||
*/
|
||||
|
||||
import { execSync } from "node:child_process";
|
||||
import { existsSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { join, resolve, dirname } from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { test, expect } from "@playwright/test";
|
||||
import {
|
||||
APP_URL,
|
||||
GITEA_URL,
|
||||
GITEA_MIRROR_ORG,
|
||||
GiteaAPI,
|
||||
getAppSessionCookies,
|
||||
saveConfig,
|
||||
waitFor,
|
||||
getRepositoryIds,
|
||||
triggerSyncRepo,
|
||||
} from "./helpers";
|
||||
|
||||
// ─── Paths ───────────────────────────────────────────────────────────────────
|
||||
|
||||
const E2E_DIR = resolve(dirname(fileURLToPath(import.meta.url)));
|
||||
const GIT_REPOS_DIR = join(E2E_DIR, "git-repos");
|
||||
const MY_PROJECT_BARE = join(GIT_REPOS_DIR, "e2e-test-user", "my-project.git");
|
||||
|
||||
// ─── Git helpers ─────────────────────────────────────────────────────────────
|
||||
|
||||
/** Run a git command in a given directory. */
|
||||
function git(args: string, cwd: string): string {
|
||||
try {
|
||||
return execSync(`git ${args}`, {
|
||||
cwd,
|
||||
encoding: "utf-8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_AUTHOR_NAME: "Force Push Bot",
|
||||
GIT_AUTHOR_EMAIL: "force-push@test.local",
|
||||
GIT_COMMITTER_NAME: "Force Push Bot",
|
||||
GIT_COMMITTER_EMAIL: "force-push@test.local",
|
||||
},
|
||||
}).trim();
|
||||
} catch (err: any) {
|
||||
const stderr = err.stderr?.toString() ?? "";
|
||||
const stdout = err.stdout?.toString() ?? "";
|
||||
throw new Error(
|
||||
`git ${args} failed in ${cwd}:\n${stderr || stdout || err.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the SHA of a ref in a bare repository.
|
||||
* Uses `git rev-parse` so it works for branches and tags.
|
||||
*/
|
||||
function getRefSha(bareRepo: string, ref: string): string {
|
||||
return git(`rev-parse ${ref}`, bareRepo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone the bare repo to a temporary working copy, execute a callback that
|
||||
* mutates the working copy, then force-push back to the bare repo and
|
||||
* update server-info for dumb-HTTP serving.
|
||||
*/
|
||||
function mutateSourceRepo(
|
||||
bareRepo: string,
|
||||
tmpName: string,
|
||||
mutate: (workDir: string) => void,
|
||||
): void {
|
||||
const tmpDir = join(GIT_REPOS_DIR, ".work-force-push", tmpName);
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
mkdirSync(join(GIT_REPOS_DIR, ".work-force-push"), { recursive: true });
|
||||
|
||||
try {
|
||||
// Clone from the bare repo
|
||||
git(`clone "${bareRepo}" "${tmpDir}"`, GIT_REPOS_DIR);
|
||||
git("config user.name 'Force Push Bot'", tmpDir);
|
||||
git("config user.email 'force-push@test.local'", tmpDir);
|
||||
|
||||
// Let the caller rewrite history
|
||||
mutate(tmpDir);
|
||||
|
||||
// Force-push all refs back to the bare repo
|
||||
git(`push --force --all "${bareRepo}"`, tmpDir);
|
||||
git(`push --force --tags "${bareRepo}"`, tmpDir);
|
||||
|
||||
// Update server-info so the dumb-HTTP server picks up the new refs
|
||||
git("update-server-info", bareRepo);
|
||||
} finally {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper to clean up the temporary working directory. */
|
||||
function cleanupWorkDir(): void {
|
||||
const workDir = join(GIT_REPOS_DIR, ".work-force-push");
|
||||
rmSync(workDir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// ─── Tests ───────────────────────────────────────────────────────────────────
|
||||
|
||||
test.describe("E2E: Force-push simulation", () => {
|
||||
let giteaApi: GiteaAPI;
|
||||
let appCookies = "";
|
||||
|
||||
/** SHA of the main branch BEFORE we force-push. */
|
||||
let originalMainSha = "";
|
||||
/** The commit message of the HEAD commit before force-push. */
|
||||
let originalHeadMessage = "";
|
||||
/** Content of README.md before force-push. */
|
||||
let originalReadmeContent = "";
|
||||
/** Number of commits on main before force-push. */
|
||||
let originalCommitCount = 0;
|
||||
|
||||
test.beforeAll(async () => {
|
||||
giteaApi = new GiteaAPI(GITEA_URL);
|
||||
try {
|
||||
await giteaApi.createToken();
|
||||
} catch {
|
||||
console.log("[ForcePush] Could not create Gitea token");
|
||||
}
|
||||
});
|
||||
|
||||
test.afterAll(async () => {
|
||||
cleanupWorkDir();
|
||||
await giteaApi.dispose();
|
||||
});
|
||||
|
||||
// ── F0: Preconditions ────────────────────────────────────────────────────
|
||||
|
||||
test("F0: Confirm my-project is mirrored and record its state", async ({
|
||||
request,
|
||||
}) => {
|
||||
// Verify the source bare repo exists on the host
|
||||
expect(
|
||||
existsSync(MY_PROJECT_BARE),
|
||||
`Bare repo should exist at ${MY_PROJECT_BARE}`,
|
||||
).toBeTruthy();
|
||||
|
||||
// Verify it is mirrored in Gitea
|
||||
const repo = await giteaApi.getRepo(GITEA_MIRROR_ORG, "my-project");
|
||||
expect(repo, "my-project should exist in Gitea").toBeTruthy();
|
||||
console.log(
|
||||
`[ForcePush] my-project in Gitea: mirror=${repo.mirror}, ` +
|
||||
`default_branch=${repo.default_branch}`,
|
||||
);
|
||||
|
||||
// Record the current state of main in Gitea
|
||||
const mainBranch = await giteaApi.getBranch(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"main",
|
||||
);
|
||||
expect(mainBranch, "main branch should exist").toBeTruthy();
|
||||
originalMainSha = mainBranch.commit.id;
|
||||
originalHeadMessage =
|
||||
mainBranch.commit.message?.trim() ?? "(unknown message)";
|
||||
console.log(
|
||||
`[ForcePush] Original main HEAD: ${originalMainSha.substring(0, 12)} ` +
|
||||
`"${originalHeadMessage}"`,
|
||||
);
|
||||
|
||||
// Record commit count
|
||||
const commits = await giteaApi.listCommits(GITEA_MIRROR_ORG, "my-project", {
|
||||
limit: 50,
|
||||
});
|
||||
originalCommitCount = commits.length;
|
||||
console.log(
|
||||
`[ForcePush] Original commit count on main: ${originalCommitCount}`,
|
||||
);
|
||||
|
||||
// Record README content
|
||||
const readme = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"README.md",
|
||||
);
|
||||
originalReadmeContent = readme ?? "";
|
||||
expect(originalReadmeContent).toContain("My Project");
|
||||
console.log(
|
||||
`[ForcePush] Original README length: ${originalReadmeContent.length} chars`,
|
||||
);
|
||||
|
||||
// Also verify the source bare repo matches
|
||||
const sourceSha = getRefSha(MY_PROJECT_BARE, "refs/heads/main");
|
||||
console.log(
|
||||
`[ForcePush] Source bare main SHA: ${sourceSha.substring(0, 12)}`,
|
||||
);
|
||||
// They may differ slightly if Gitea hasn't synced the very latest, but
|
||||
// the important thing is that both exist.
|
||||
});
|
||||
|
||||
// ── F1: Rewrite history on the source repo ───────────────────────────────
|
||||
|
||||
test("F1: Force-push rewritten history to source repo", async () => {
|
||||
const shaBeforeRewrite = getRefSha(MY_PROJECT_BARE, "refs/heads/main");
|
||||
console.log(
|
||||
`[ForcePush] Source main before rewrite: ${shaBeforeRewrite.substring(0, 12)}`,
|
||||
);
|
||||
|
||||
mutateSourceRepo(MY_PROJECT_BARE, "my-project-rewrite", (workDir) => {
|
||||
// We're on the main branch.
|
||||
// Rewrite history: remove the last commit (the LICENSE commit) via
|
||||
// reset --hard HEAD~1, then add a completely different commit.
|
||||
git("checkout main", workDir);
|
||||
|
||||
// Record what HEAD is for logging
|
||||
const headBefore = git("log --oneline -1", workDir);
|
||||
console.log(`[ForcePush] Working copy HEAD before reset: ${headBefore}`);
|
||||
|
||||
// Hard reset to remove the last commit (this drops "Add MIT license")
|
||||
git("reset --hard HEAD~1", workDir);
|
||||
|
||||
const headAfterReset = git("log --oneline -1", workDir);
|
||||
console.log(`[ForcePush] After reset HEAD~1: ${headAfterReset}`);
|
||||
|
||||
// Write a replacement commit with different content (simulates someone
|
||||
// rewriting history with different changes)
|
||||
writeFileSync(
|
||||
join(workDir, "README.md"),
|
||||
"# My Project\n\nThis README was FORCE-PUSHED.\n\nOriginal history has been rewritten.\n",
|
||||
);
|
||||
writeFileSync(
|
||||
join(workDir, "FORCE_PUSH_MARKER.txt"),
|
||||
`Force-pushed at ${new Date().toISOString()}\n`,
|
||||
);
|
||||
git("add -A", workDir);
|
||||
|
||||
execSync('git commit -m "FORCE PUSH: Rewritten history"', {
|
||||
cwd: workDir,
|
||||
encoding: "utf-8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_AUTHOR_NAME: "Force Push Bot",
|
||||
GIT_AUTHOR_EMAIL: "force-push@test.local",
|
||||
GIT_AUTHOR_DATE: "2024-06-15T12:00:00+00:00",
|
||||
GIT_COMMITTER_NAME: "Force Push Bot",
|
||||
GIT_COMMITTER_EMAIL: "force-push@test.local",
|
||||
GIT_COMMITTER_DATE: "2024-06-15T12:00:00+00:00",
|
||||
},
|
||||
});
|
||||
|
||||
const headAfterRewrite = git("log --oneline -3", workDir);
|
||||
console.log(`[ForcePush] After rewrite (last 3):\n${headAfterRewrite}`);
|
||||
});
|
||||
|
||||
const shaAfterRewrite = getRefSha(MY_PROJECT_BARE, "refs/heads/main");
|
||||
console.log(
|
||||
`[ForcePush] Source main after rewrite: ${shaAfterRewrite.substring(0, 12)}`,
|
||||
);
|
||||
|
||||
// The SHA must have changed — this proves the force-push happened
|
||||
expect(
|
||||
shaAfterRewrite,
|
||||
"Source repo main SHA should change after force-push",
|
||||
).not.toBe(originalMainSha);
|
||||
|
||||
// Verify the old SHA is no longer reachable on main
|
||||
const logOutput = git("log --oneline main", MY_PROJECT_BARE);
|
||||
expect(
|
||||
logOutput,
|
||||
"Rewritten history should NOT contain the old head commit",
|
||||
).toContain("FORCE PUSH");
|
||||
});
|
||||
|
||||
// ── F2: Sync to Gitea WITHOUT backup ─────────────────────────────────────
|
||||
|
||||
test("F2: Disable backup and sync force-pushed repo to Gitea", async ({
|
||||
request,
|
||||
}) => {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
|
||||
const giteaToken = giteaApi.getTokenValue();
|
||||
expect(giteaToken).toBeTruthy();
|
||||
|
||||
// Ensure backup is disabled for this test
|
||||
await saveConfig(request, giteaToken, appCookies, {
|
||||
giteaConfig: {
|
||||
backupStrategy: "disabled",
|
||||
blockSyncOnBackupFailure: false,
|
||||
},
|
||||
});
|
||||
console.log("[ForcePush] Backup disabled for unprotected sync test");
|
||||
|
||||
// Trigger Gitea's mirror-sync directly via the Gitea API.
|
||||
// This is more reliable than going through the app for this test because
|
||||
// the app's sync-repo endpoint involves extra processing. We want to test
|
||||
// the raw effect of Gitea pulling the rewritten refs.
|
||||
const synced = await giteaApi.triggerMirrorSync(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
);
|
||||
console.log(`[ForcePush] Gitea mirror-sync triggered: ${synced}`);
|
||||
|
||||
// Wait for Gitea to pull the new refs from the git-server
|
||||
console.log("[ForcePush] Waiting for Gitea to pull rewritten refs...");
|
||||
await new Promise((r) => setTimeout(r, 15_000));
|
||||
});
|
||||
|
||||
// ── F3: Verify Gitea reflects the rewritten history ──────────────────────
|
||||
|
||||
test("F3: Verify Gitea has the force-pushed content (old history GONE)", async () => {
|
||||
// Poll until Gitea picks up the new HEAD
|
||||
await waitFor(
|
||||
async () => {
|
||||
const branch = await giteaApi.getBranch(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"main",
|
||||
);
|
||||
if (!branch) return false;
|
||||
return branch.commit.id !== originalMainSha;
|
||||
},
|
||||
{
|
||||
timeout: 60_000,
|
||||
interval: 5_000,
|
||||
label: "Gitea main branch updates to new SHA",
|
||||
},
|
||||
);
|
||||
|
||||
// Read the new state
|
||||
const newMainBranch = await giteaApi.getBranch(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"main",
|
||||
);
|
||||
expect(newMainBranch).toBeTruthy();
|
||||
const newSha = newMainBranch.commit.id;
|
||||
const newMsg = newMainBranch.commit.message?.trim() ?? "";
|
||||
console.log(
|
||||
`[ForcePush] New main HEAD: ${newSha.substring(0, 12)} "${newMsg}"`,
|
||||
);
|
||||
|
||||
// The SHA MUST be different from the original
|
||||
expect(
|
||||
newSha,
|
||||
"Gitea main SHA should have changed after force-push sync",
|
||||
).not.toBe(originalMainSha);
|
||||
|
||||
// The new commit message should be the force-pushed one
|
||||
expect(newMsg).toContain("FORCE PUSH");
|
||||
|
||||
// Verify the force-push marker file now exists in Gitea
|
||||
const markerContent = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"FORCE_PUSH_MARKER.txt",
|
||||
);
|
||||
expect(
|
||||
markerContent,
|
||||
"FORCE_PUSH_MARKER.txt should appear after sync",
|
||||
).toBeTruthy();
|
||||
console.log(
|
||||
`[ForcePush] Marker file present: ${markerContent?.substring(0, 40)}...`,
|
||||
);
|
||||
|
||||
// Verify the README was overwritten
|
||||
const newReadme = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"README.md",
|
||||
);
|
||||
expect(newReadme).toContain("FORCE-PUSHED");
|
||||
expect(newReadme).not.toBe(originalReadmeContent);
|
||||
console.log("[ForcePush] README.md confirms overwritten content");
|
||||
|
||||
// Verify the LICENSE file is GONE (it was in the dropped commit)
|
||||
const licenseContent = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"LICENSE",
|
||||
);
|
||||
expect(
|
||||
licenseContent,
|
||||
"LICENSE should be GONE after force-push removed that commit",
|
||||
).toBeNull();
|
||||
console.log("[ForcePush] ✗ LICENSE file is GONE — data loss confirmed");
|
||||
|
||||
// Verify the old commit SHA is no longer accessible
|
||||
const oldCommit = await giteaApi.getCommit(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
originalMainSha,
|
||||
);
|
||||
// Gitea may or may not GC the unreachable commit immediately, so this
|
||||
// is informational rather than a hard assertion.
|
||||
if (oldCommit) {
|
||||
console.log(
|
||||
`[ForcePush] Old commit ${originalMainSha.substring(0, 12)} is ` +
|
||||
`still in Gitea's object store (not yet GC'd)`,
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`[ForcePush] Old commit ${originalMainSha.substring(0, 12)} is ` +
|
||||
`no longer accessible — data loss complete`,
|
||||
);
|
||||
}
|
||||
|
||||
// Check commit count changed
|
||||
const newCommits = await giteaApi.listCommits(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
{ limit: 50 },
|
||||
);
|
||||
console.log(
|
||||
`[ForcePush] Commit count: was ${originalCommitCount}, now ${newCommits.length}`,
|
||||
);
|
||||
// The rewrite dropped one commit and added one, so the count should differ
|
||||
// or at minimum the commit list should not contain the old head message.
|
||||
const commitMessages = newCommits.map(
|
||||
(c: any) => c.commit?.message?.trim() ?? "",
|
||||
);
|
||||
expect(
|
||||
commitMessages.some((m: string) => m.includes("FORCE PUSH")),
|
||||
"New commit list should contain the force-pushed commit",
|
||||
).toBeTruthy();
|
||||
|
||||
console.log(
|
||||
"\n[ForcePush] ════════════════════════════════════════════════════",
|
||||
);
|
||||
console.log(
|
||||
"[ForcePush] CONFIRMED: Force-push without backup = DATA LOSS",
|
||||
);
|
||||
console.log(
|
||||
"[ForcePush] The LICENSE file and original HEAD commit are gone.",
|
||||
);
|
||||
console.log(
|
||||
"[ForcePush] ════════════════════════════════════════════════════\n",
|
||||
);
|
||||
});
|
||||
|
||||
// ── F4: Restore source, re-mirror, then test WITH backup ─────────────────
|
||||
|
||||
test("F4: Restore source repo to a good state and re-mirror", async ({
|
||||
request,
|
||||
}) => {
|
||||
// To test the backup path we need a clean slate. Re-create the original
|
||||
// my-project content in the source repo so it has known good history.
|
||||
mutateSourceRepo(MY_PROJECT_BARE, "my-project-restore", (workDir) => {
|
||||
git("checkout main", workDir);
|
||||
|
||||
// Remove the force-push marker
|
||||
try {
|
||||
execSync("rm -f FORCE_PUSH_MARKER.txt", { cwd: workDir });
|
||||
} catch {
|
||||
// may not exist
|
||||
}
|
||||
|
||||
// Restore README
|
||||
writeFileSync(
|
||||
join(workDir, "README.md"),
|
||||
"# My Project\n\nA sample project for E2E testing.\n\n" +
|
||||
"## Features\n- Greeting module\n- Math utilities\n",
|
||||
);
|
||||
|
||||
// Restore LICENSE
|
||||
writeFileSync(
|
||||
join(workDir, "LICENSE"),
|
||||
"MIT License\n\nCopyright (c) 2024 E2E Test\n",
|
||||
);
|
||||
|
||||
git("add -A", workDir);
|
||||
execSync(
|
||||
'git commit -m "Restore original content after force-push test"',
|
||||
{
|
||||
cwd: workDir,
|
||||
encoding: "utf-8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_AUTHOR_NAME: "E2E Test Bot",
|
||||
GIT_AUTHOR_EMAIL: "e2e-bot@test.local",
|
||||
GIT_COMMITTER_NAME: "E2E Test Bot",
|
||||
GIT_COMMITTER_EMAIL: "e2e-bot@test.local",
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const newHead = git("log --oneline -1", workDir);
|
||||
console.log(`[ForcePush] Restored source HEAD: ${newHead}`);
|
||||
});
|
||||
|
||||
// Sync Gitea to pick up the restored state
|
||||
const synced = await giteaApi.triggerMirrorSync(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
);
|
||||
console.log(`[ForcePush] Gitea mirror-sync for restore: ${synced}`);
|
||||
await new Promise((r) => setTimeout(r, 15_000));
|
||||
|
||||
// Verify Gitea has the restored content
|
||||
await waitFor(
|
||||
async () => {
|
||||
const readme = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"README.md",
|
||||
);
|
||||
return readme !== null && readme.includes("Features");
|
||||
},
|
||||
{
|
||||
timeout: 60_000,
|
||||
interval: 5_000,
|
||||
label: "Gitea picks up restored content",
|
||||
},
|
||||
);
|
||||
|
||||
const license = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"LICENSE",
|
||||
);
|
||||
expect(license, "LICENSE should be restored").toBeTruthy();
|
||||
console.log("[ForcePush] Gitea restored to good state");
|
||||
|
||||
// Record the new "good" SHA for the next force-push test
|
||||
const restoredBranch = await giteaApi.getBranch(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"main",
|
||||
);
|
||||
originalMainSha = restoredBranch.commit.id;
|
||||
console.log(
|
||||
`[ForcePush] Restored main SHA: ${originalMainSha.substring(0, 12)}`,
|
||||
);
|
||||
});
|
||||
|
||||
// ── F5: Force-push AGAIN, this time with backup enabled ──────────────────
|
||||
|
||||
test("F5: Enable backup, force-push, and sync", async ({ request }) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
const giteaToken = giteaApi.getTokenValue();
|
||||
|
||||
// Enable backup with "always" strategy
|
||||
await saveConfig(request, giteaToken, appCookies, {
|
||||
giteaConfig: {
|
||||
backupStrategy: "always",
|
||||
blockSyncOnBackupFailure: false, // don't block — we want to see both backup + sync happen
|
||||
backupRetentionCount: 5,
|
||||
backupDirectory: "data/repo-backups",
|
||||
},
|
||||
});
|
||||
console.log("[ForcePush] Backup enabled (strategy=always) for protected sync test");
|
||||
|
||||
// Force-push again
|
||||
mutateSourceRepo(MY_PROJECT_BARE, "my-project-rewrite2", (workDir) => {
|
||||
git("checkout main", workDir);
|
||||
|
||||
writeFileSync(
|
||||
join(workDir, "README.md"),
|
||||
"# My Project\n\nSECOND FORCE-PUSH — backup should have preserved old state.\n",
|
||||
);
|
||||
writeFileSync(
|
||||
join(workDir, "SECOND_FORCE_PUSH.txt"),
|
||||
`Second force-push at ${new Date().toISOString()}\n`,
|
||||
);
|
||||
// Remove LICENSE again to simulate destructive rewrite
|
||||
try {
|
||||
execSync("rm -f LICENSE", { cwd: workDir });
|
||||
} catch {
|
||||
// may not exist
|
||||
}
|
||||
git("add -A", workDir);
|
||||
execSync('git commit -m "SECOND FORCE PUSH: backup should catch this"', {
|
||||
cwd: workDir,
|
||||
encoding: "utf-8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_AUTHOR_NAME: "Force Push Bot",
|
||||
GIT_AUTHOR_EMAIL: "force-push@test.local",
|
||||
GIT_COMMITTER_NAME: "Force Push Bot",
|
||||
GIT_COMMITTER_EMAIL: "force-push@test.local",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
console.log("[ForcePush] Second force-push applied to source repo");
|
||||
|
||||
// Use the app's sync-repo to trigger the sync (this goes through
|
||||
// syncGiteaRepoEnhanced which runs the backup code path)
|
||||
const { ids: repoIds } = await getRepositoryIds(request, appCookies);
|
||||
// Find the my-project repo ID
|
||||
const dashResp = await request.get(`${APP_URL}/api/dashboard`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
let myProjectId = "";
|
||||
if (dashResp.ok()) {
|
||||
const data = await dashResp.json();
|
||||
const repos: any[] = data.repositories ?? [];
|
||||
const myProj = repos.find((r: any) => r.name === "my-project");
|
||||
if (myProj) myProjectId = myProj.id;
|
||||
}
|
||||
|
||||
if (myProjectId) {
|
||||
console.log(
|
||||
`[ForcePush] Triggering app sync-repo for my-project (${myProjectId})`,
|
||||
);
|
||||
const status = await triggerSyncRepo(
|
||||
request,
|
||||
appCookies,
|
||||
[myProjectId],
|
||||
25_000,
|
||||
);
|
||||
console.log(`[ForcePush] App sync-repo response: ${status}`);
|
||||
} else {
|
||||
// Fallback: trigger via Gitea API directly
|
||||
console.log(
|
||||
"[ForcePush] Could not find my-project ID, using Gitea API directly",
|
||||
);
|
||||
await giteaApi.triggerMirrorSync(GITEA_MIRROR_ORG, "my-project");
|
||||
await new Promise((r) => setTimeout(r, 15_000));
|
||||
}
|
||||
});
|
||||
|
||||
// ── F6: Verify Gitea picked up the second force-push ─────────────────────
|
||||
|
||||
test("F6: Verify Gitea reflects second force-push", async () => {
|
||||
await waitFor(
|
||||
async () => {
|
||||
const branch = await giteaApi.getBranch(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"main",
|
||||
);
|
||||
if (!branch) return false;
|
||||
return branch.commit.id !== originalMainSha;
|
||||
},
|
||||
{
|
||||
timeout: 60_000,
|
||||
interval: 5_000,
|
||||
label: "Gitea main branch updates after second force-push",
|
||||
},
|
||||
);
|
||||
|
||||
const newBranch = await giteaApi.getBranch(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"main",
|
||||
);
|
||||
const newSha = newBranch.commit.id;
|
||||
console.log(
|
||||
`[ForcePush] After 2nd force-push: main=${newSha.substring(0, 12)}, ` +
|
||||
`msg="${newBranch.commit.message?.trim()}"`,
|
||||
);
|
||||
expect(newSha).not.toBe(originalMainSha);
|
||||
|
||||
// Verify the second force-push marker
|
||||
const marker = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"SECOND_FORCE_PUSH.txt",
|
||||
);
|
||||
expect(marker, "Second force-push marker should exist").toBeTruthy();
|
||||
|
||||
// LICENSE should be gone again
|
||||
const license = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"LICENSE",
|
||||
);
|
||||
expect(license, "LICENSE gone again after 2nd force-push").toBeNull();
|
||||
console.log("[ForcePush] Second force-push verified in Gitea");
|
||||
});
|
||||
|
||||
// ── F7: Verify backup activity was logged for the second force-push ──────
|
||||
|
||||
test("F7: Verify backup activity was recorded for protected sync", async ({
|
||||
request,
|
||||
}) => {
|
||||
if (!appCookies) {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
}
|
||||
|
||||
const activitiesResp = await request.get(`${APP_URL}/api/activities`, {
|
||||
headers: { Cookie: appCookies },
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
|
||||
if (!activitiesResp.ok()) {
|
||||
console.log(
|
||||
`[ForcePush] Could not fetch activities: ${activitiesResp.status()}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const activities = await activitiesResp.json();
|
||||
const jobs: any[] = Array.isArray(activities)
|
||||
? activities
|
||||
: (activities.jobs ?? activities.activities ?? []);
|
||||
|
||||
// Filter to backup/snapshot entries for my-project
|
||||
const backupJobs = jobs.filter(
|
||||
(j: any) =>
|
||||
(j.repositoryName === "my-project" ||
|
||||
j.repositoryName === "my-project") &&
|
||||
(j.message?.toLowerCase().includes("snapshot") ||
|
||||
j.message?.toLowerCase().includes("backup") ||
|
||||
j.details?.toLowerCase().includes("snapshot") ||
|
||||
j.details?.toLowerCase().includes("backup") ||
|
||||
j.details?.toLowerCase().includes("bundle")),
|
||||
);
|
||||
|
||||
console.log(
|
||||
`[ForcePush] Backup activity for my-project: ${backupJobs.length} entries`,
|
||||
);
|
||||
for (const j of backupJobs) {
|
||||
console.log(
|
||||
`[ForcePush] • [${j.status}] ${j.message ?? ""} | ${(j.details ?? "").substring(0, 100)}`,
|
||||
);
|
||||
}
|
||||
|
||||
// The backup system should have been invoked and must succeed.
|
||||
expect(
|
||||
backupJobs.length,
|
||||
"At least one backup/snapshot activity should exist for my-project " +
|
||||
"when backupStrategy is 'always'",
|
||||
).toBeGreaterThan(0);
|
||||
|
||||
// Check whether any backups actually succeeded
|
||||
const successfulBackups = backupJobs.filter(
|
||||
(j: any) =>
|
||||
j.status === "syncing" ||
|
||||
j.message?.includes("Snapshot created") ||
|
||||
j.details?.includes("Pre-sync snapshot created"),
|
||||
);
|
||||
const failedBackups = backupJobs.filter(
|
||||
(j: any) =>
|
||||
j.status === "failed" &&
|
||||
(j.message?.includes("Snapshot failed") ||
|
||||
j.details?.includes("snapshot failed")),
|
||||
);
|
||||
|
||||
if (successfulBackups.length > 0) {
|
||||
console.log(
|
||||
`[ForcePush] ✓ ${successfulBackups.length} backup(s) SUCCEEDED — ` +
|
||||
`old state was preserved in bundle`,
|
||||
);
|
||||
}
|
||||
if (failedBackups.length > 0) {
|
||||
console.log(
|
||||
`[ForcePush] ⚠ ${failedBackups.length} backup(s) FAILED`,
|
||||
);
|
||||
// Extract and log the first failure reason for visibility
|
||||
const firstFailure = failedBackups[0];
|
||||
console.log(
|
||||
`[ForcePush] Failure reason: ${firstFailure.details?.substring(0, 200)}`,
|
||||
);
|
||||
}
|
||||
|
||||
console.log(
|
||||
"[ForcePush] ════════════════════════════════════════════════════",
|
||||
);
|
||||
if (successfulBackups.length > 0) {
|
||||
console.log(
|
||||
"[ForcePush] RESULT: Backup system PROTECTED against force-push",
|
||||
);
|
||||
} else {
|
||||
console.log("[ForcePush] RESULT: Backup system was INVOKED but FAILED.");
|
||||
}
|
||||
console.log(
|
||||
"[ForcePush] ════════════════════════════════════════════════════\n",
|
||||
);
|
||||
|
||||
// Fail the test if any backups failed
|
||||
expect(
|
||||
failedBackups.length,
|
||||
`Expected all backups to succeed, but ${failedBackups.length} backup(s) failed. ` +
|
||||
`First failure: ${failedBackups[0]?.details || "unknown error"}`,
|
||||
).toBe(0);
|
||||
});
|
||||
|
||||
// ── F8: Restore source repo for subsequent test suites ───────────────────
|
||||
|
||||
test("F8: Restore source repo to clean state for other tests", async () => {
|
||||
mutateSourceRepo(MY_PROJECT_BARE, "my-project-final-restore", (workDir) => {
|
||||
git("checkout main", workDir);
|
||||
|
||||
// Remove force-push artifacts
|
||||
try {
|
||||
execSync("rm -f FORCE_PUSH_MARKER.txt SECOND_FORCE_PUSH.txt", {
|
||||
cwd: workDir,
|
||||
});
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
// Restore content
|
||||
writeFileSync(
|
||||
join(workDir, "README.md"),
|
||||
"# My Project\n\nA sample project for E2E testing.\n\n" +
|
||||
"## Features\n- Greeting module\n- Math utilities\n",
|
||||
);
|
||||
writeFileSync(
|
||||
join(workDir, "LICENSE"),
|
||||
"MIT License\n\nCopyright (c) 2024 E2E Test\n",
|
||||
);
|
||||
git("add -A", workDir);
|
||||
execSync(
|
||||
'git commit --allow-empty -m "Final restore after force-push tests"',
|
||||
{
|
||||
cwd: workDir,
|
||||
encoding: "utf-8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_AUTHOR_NAME: "E2E Test Bot",
|
||||
GIT_AUTHOR_EMAIL: "e2e-bot@test.local",
|
||||
GIT_COMMITTER_NAME: "E2E Test Bot",
|
||||
GIT_COMMITTER_EMAIL: "e2e-bot@test.local",
|
||||
},
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
// Sync Gitea
|
||||
await giteaApi.triggerMirrorSync(GITEA_MIRROR_ORG, "my-project");
|
||||
await new Promise((r) => setTimeout(r, 10_000));
|
||||
|
||||
// Verify restoration
|
||||
const license = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"LICENSE",
|
||||
);
|
||||
if (license) {
|
||||
console.log("[ForcePush] Source repo restored for subsequent tests");
|
||||
} else {
|
||||
console.log(
|
||||
"[ForcePush] Warning: restoration may not have synced yet (Gitea async)",
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
342
tests/e2e/05-sync-verification.spec.ts
Normal file
342
tests/e2e/05-sync-verification.spec.ts
Normal file
@@ -0,0 +1,342 @@
|
||||
/**
|
||||
* 05 – Sync verification and cleanup.
|
||||
*
|
||||
* Exercises the dynamic aspects of the sync pipeline:
|
||||
* • Adding a repo to the fake GitHub at runtime and verifying the app
|
||||
* discovers it on the next sync
|
||||
* • Deep content-integrity checks on repos mirrored during earlier suites
|
||||
* • Resetting the fake GitHub store to its defaults
|
||||
*
|
||||
* Prerequisites: 02-mirror-workflow.spec.ts must have run so that repos
|
||||
* already exist in Gitea.
|
||||
*/
|
||||
|
||||
import { test, expect } from "@playwright/test";
|
||||
import {
|
||||
APP_URL,
|
||||
GITEA_URL,
|
||||
FAKE_GITHUB_URL,
|
||||
GITEA_MIRROR_ORG,
|
||||
GiteaAPI,
|
||||
getAppSessionCookies,
|
||||
} from "./helpers";
|
||||
|
||||
test.describe("E2E: Sync verification", () => {
|
||||
let giteaApi: GiteaAPI;
|
||||
let appCookies = "";
|
||||
|
||||
test.beforeAll(async () => {
|
||||
giteaApi = new GiteaAPI(GITEA_URL);
|
||||
try {
|
||||
await giteaApi.createToken();
|
||||
} catch {
|
||||
console.log("[SyncVerify] Could not create Gitea token; tests may skip");
|
||||
}
|
||||
});
|
||||
|
||||
test.afterAll(async () => {
|
||||
await giteaApi.dispose();
|
||||
});
|
||||
|
||||
// ── Dynamic repo addition ────────────────────────────────────────────────
|
||||
|
||||
test("Verify fake GitHub management API can add repos dynamically", async ({
|
||||
request,
|
||||
}) => {
|
||||
const addResp = await request.post(`${FAKE_GITHUB_URL}/___mgmt/add-repo`, {
|
||||
data: {
|
||||
name: "dynamic-repo",
|
||||
owner_login: "e2e-test-user",
|
||||
description: "Dynamically added for E2E testing",
|
||||
language: "Rust",
|
||||
},
|
||||
});
|
||||
expect(addResp.ok()).toBeTruthy();
|
||||
|
||||
const repoResp = await request.get(
|
||||
`${FAKE_GITHUB_URL}/repos/e2e-test-user/dynamic-repo`,
|
||||
);
|
||||
expect(repoResp.ok()).toBeTruthy();
|
||||
const repo = await repoResp.json();
|
||||
expect(repo.name).toBe("dynamic-repo");
|
||||
expect(repo.language).toBe("Rust");
|
||||
console.log("[DynamicRepo] Successfully added and verified dynamic repo");
|
||||
});
|
||||
|
||||
test("Newly added fake GitHub repo gets picked up by sync", async ({
|
||||
request,
|
||||
}) => {
|
||||
appCookies = await getAppSessionCookies(request);
|
||||
|
||||
const syncResp = await request.post(`${APP_URL}/api/sync`, {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Cookie: appCookies,
|
||||
},
|
||||
failOnStatusCode: false,
|
||||
});
|
||||
|
||||
const status = syncResp.status();
|
||||
console.log(`[DynamicSync] Sync response: ${status}`);
|
||||
expect(status).toBeLessThan(500);
|
||||
|
||||
if (syncResp.ok()) {
|
||||
const data = await syncResp.json();
|
||||
console.log(
|
||||
`[DynamicSync] New repos discovered: ${data.newRepositories ?? "?"}`,
|
||||
);
|
||||
if (data.newRepositories !== undefined) {
|
||||
expect(data.newRepositories).toBeGreaterThanOrEqual(0);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// ── Content integrity ────────────────────────────────────────────────────
|
||||
|
||||
test("Verify repo content integrity after mirror", async () => {
|
||||
// Check repos in the mirror org
|
||||
const orgRepos = await giteaApi.listOrgRepos(GITEA_MIRROR_ORG);
|
||||
const orgRepoNames = orgRepos.map((r: any) => r.name);
|
||||
console.log(
|
||||
`[Integrity] Repos in ${GITEA_MIRROR_ORG}: ${orgRepoNames.join(", ")}`,
|
||||
);
|
||||
|
||||
// Check github-stars org for starred repos
|
||||
const starsRepos = await giteaApi.listOrgRepos("github-stars");
|
||||
const starsRepoNames = starsRepos.map((r: any) => r.name);
|
||||
console.log(
|
||||
`[Integrity] Repos in github-stars: ${starsRepoNames.join(", ")}`,
|
||||
);
|
||||
|
||||
// ── notes repo (minimal single-commit repo) ──────────────────────────
|
||||
|
||||
if (orgRepoNames.includes("notes")) {
|
||||
const notesReadme = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"notes",
|
||||
"README.md",
|
||||
);
|
||||
if (notesReadme) {
|
||||
expect(notesReadme).toContain("Notes");
|
||||
console.log("[Integrity] notes/README.md verified");
|
||||
}
|
||||
|
||||
const ideas = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"notes",
|
||||
"ideas.md",
|
||||
);
|
||||
if (ideas) {
|
||||
expect(ideas).toContain("Ideas");
|
||||
console.log("[Integrity] notes/ideas.md verified");
|
||||
}
|
||||
|
||||
const todo = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"notes",
|
||||
"todo.md",
|
||||
);
|
||||
if (todo) {
|
||||
expect(todo).toContain("TODO");
|
||||
console.log("[Integrity] notes/todo.md verified");
|
||||
}
|
||||
}
|
||||
|
||||
// ── dotfiles repo ────────────────────────────────────────────────────
|
||||
|
||||
if (orgRepoNames.includes("dotfiles")) {
|
||||
const vimrc = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"dotfiles",
|
||||
".vimrc",
|
||||
);
|
||||
if (vimrc) {
|
||||
expect(vimrc).toContain("set number");
|
||||
console.log("[Integrity] dotfiles/.vimrc verified");
|
||||
}
|
||||
|
||||
const gitconfig = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"dotfiles",
|
||||
".gitconfig",
|
||||
);
|
||||
if (gitconfig) {
|
||||
expect(gitconfig).toContain("[user]");
|
||||
console.log("[Integrity] dotfiles/.gitconfig verified");
|
||||
}
|
||||
|
||||
// Verify commit count (dotfiles has 2 commits)
|
||||
const commits = await giteaApi.listCommits(
|
||||
GITEA_MIRROR_ORG,
|
||||
"dotfiles",
|
||||
);
|
||||
console.log(`[Integrity] dotfiles commit count: ${commits.length}`);
|
||||
expect(
|
||||
commits.length,
|
||||
"dotfiles should have at least 2 commits",
|
||||
).toBeGreaterThanOrEqual(2);
|
||||
}
|
||||
|
||||
// ── popular-lib (starred repo from other-user) ───────────────────────
|
||||
|
||||
// In single-org strategy it goes to the starredReposOrg ("github-stars")
|
||||
if (starsRepoNames.includes("popular-lib")) {
|
||||
const readme = await giteaApi.getFileContent(
|
||||
"github-stars",
|
||||
"popular-lib",
|
||||
"README.md",
|
||||
);
|
||||
if (readme) {
|
||||
expect(readme).toContain("Popular Lib");
|
||||
console.log("[Integrity] popular-lib/README.md verified");
|
||||
}
|
||||
|
||||
const pkg = await giteaApi.getFileContent(
|
||||
"github-stars",
|
||||
"popular-lib",
|
||||
"package.json",
|
||||
);
|
||||
if (pkg) {
|
||||
const parsed = JSON.parse(pkg);
|
||||
expect(parsed.name).toBe("popular-lib");
|
||||
expect(parsed.version).toBe("2.5.0");
|
||||
console.log("[Integrity] popular-lib/package.json verified");
|
||||
}
|
||||
|
||||
const tags = await giteaApi.listTags("github-stars", "popular-lib");
|
||||
const tagNames = tags.map((t: any) => t.name);
|
||||
console.log(
|
||||
`[Integrity] popular-lib tags: ${tagNames.join(", ") || "(none)"}`,
|
||||
);
|
||||
if (tagNames.length > 0) {
|
||||
expect(tagNames).toContain("v2.5.0");
|
||||
}
|
||||
} else {
|
||||
console.log(
|
||||
"[Integrity] popular-lib not found in github-stars " +
|
||||
"(may be in mirror org or not yet mirrored)",
|
||||
);
|
||||
}
|
||||
|
||||
// ── org-tool (organization repo) ─────────────────────────────────────
|
||||
|
||||
// org-tool may be in the mirror org or a separate org depending on
|
||||
// the mirror strategy — check several possible locations.
|
||||
const orgToolOwners = [GITEA_MIRROR_ORG, "test-org"];
|
||||
let foundOrgTool = false;
|
||||
for (const owner of orgToolOwners) {
|
||||
const repo = await giteaApi.getRepo(owner, "org-tool");
|
||||
if (repo) {
|
||||
foundOrgTool = true;
|
||||
console.log(`[Integrity] org-tool found in ${owner}`);
|
||||
|
||||
const readme = await giteaApi.getFileContent(
|
||||
owner,
|
||||
"org-tool",
|
||||
"README.md",
|
||||
);
|
||||
if (readme) {
|
||||
expect(readme).toContain("Org Tool");
|
||||
console.log("[Integrity] org-tool/README.md verified");
|
||||
}
|
||||
|
||||
const mainGo = await giteaApi.getFileContent(
|
||||
owner,
|
||||
"org-tool",
|
||||
"main.go",
|
||||
);
|
||||
if (mainGo) {
|
||||
expect(mainGo).toContain("package main");
|
||||
console.log("[Integrity] org-tool/main.go verified");
|
||||
}
|
||||
|
||||
// Check branches
|
||||
const branches = await giteaApi.listBranches(owner, "org-tool");
|
||||
const branchNames = branches.map((b: any) => b.name);
|
||||
console.log(
|
||||
`[Integrity] org-tool branches: ${branchNames.join(", ")}`,
|
||||
);
|
||||
if (branchNames.length > 0) {
|
||||
expect(branchNames).toContain("main");
|
||||
}
|
||||
|
||||
// Check tags
|
||||
const tags = await giteaApi.listTags(owner, "org-tool");
|
||||
const tagNames = tags.map((t: any) => t.name);
|
||||
console.log(
|
||||
`[Integrity] org-tool tags: ${tagNames.join(", ") || "(none)"}`,
|
||||
);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!foundOrgTool) {
|
||||
console.log(
|
||||
"[Integrity] org-tool not found in Gitea " +
|
||||
"(may not have been mirrored in single-org strategy)",
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// ── my-project deep check ────────────────────────────────────────────────
|
||||
|
||||
test("Verify my-project branch and tag structure", async () => {
|
||||
const branches = await giteaApi.listBranches(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
);
|
||||
const branchNames = branches.map((b: any) => b.name);
|
||||
console.log(
|
||||
`[Integrity] my-project branches: ${branchNames.join(", ")}`,
|
||||
);
|
||||
|
||||
// The source repo had main, develop, and feature/add-tests
|
||||
expect(branchNames, "main branch should exist").toContain("main");
|
||||
// develop and feature/add-tests may or may not survive force-push tests
|
||||
// depending on test ordering, so just log them
|
||||
for (const expected of ["develop", "feature/add-tests"]) {
|
||||
if (branchNames.includes(expected)) {
|
||||
console.log(`[Integrity] ✓ Branch "${expected}" present`);
|
||||
} else {
|
||||
console.log(`[Integrity] ⊘ Branch "${expected}" not present (may have been affected by force-push tests)`);
|
||||
}
|
||||
}
|
||||
|
||||
const tags = await giteaApi.listTags(GITEA_MIRROR_ORG, "my-project");
|
||||
const tagNames = tags.map((t: any) => t.name);
|
||||
console.log(
|
||||
`[Integrity] my-project tags: ${tagNames.join(", ") || "(none)"}`,
|
||||
);
|
||||
|
||||
// Verify package.json exists and is valid JSON
|
||||
const pkg = await giteaApi.getFileContent(
|
||||
GITEA_MIRROR_ORG,
|
||||
"my-project",
|
||||
"package.json",
|
||||
);
|
||||
if (pkg) {
|
||||
const parsed = JSON.parse(pkg);
|
||||
expect(parsed.name).toBe("my-project");
|
||||
console.log("[Integrity] my-project/package.json verified");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ─── Fake GitHub reset ───────────────────────────────────────────────────────
|
||||
|
||||
test.describe("E2E: Fake GitHub reset", () => {
|
||||
test("Can reset fake GitHub to default state", async ({ request }) => {
|
||||
const resp = await request.post(`${FAKE_GITHUB_URL}/___mgmt/reset`);
|
||||
expect(resp.ok()).toBeTruthy();
|
||||
const data = await resp.json();
|
||||
expect(data.message).toContain("reset");
|
||||
console.log("[Reset] Fake GitHub reset to defaults");
|
||||
|
||||
const health = await request.get(`${FAKE_GITHUB_URL}/___mgmt/health`);
|
||||
const healthData = await health.json();
|
||||
expect(healthData.repos).toBeGreaterThan(0);
|
||||
console.log(
|
||||
`[Reset] After reset: ${healthData.repos} repos, ${healthData.orgs} orgs`,
|
||||
);
|
||||
});
|
||||
});
|
||||
141
tests/e2e/cleanup.sh
Executable file
141
tests/e2e/cleanup.sh
Executable file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env bash
|
||||
# ────────────────────────────────────────────────────────────────────────────────
|
||||
# E2E Cleanup Script
|
||||
# Removes all temporary data from previous E2E test runs.
|
||||
#
|
||||
# Usage:
|
||||
# ./tests/e2e/cleanup.sh # cleanup everything
|
||||
# ./tests/e2e/cleanup.sh --soft # keep container images, only remove volumes/data
|
||||
# ────────────────────────────────────────────────────────────────────────────────
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
COMPOSE_FILE="$SCRIPT_DIR/docker-compose.e2e.yml"
|
||||
|
||||
SOFT_CLEAN=false
|
||||
if [[ "${1:-}" == "--soft" ]]; then
|
||||
SOFT_CLEAN=true
|
||||
fi
|
||||
|
||||
# Detect container runtime (podman or docker)
|
||||
if command -v podman-compose &>/dev/null; then
|
||||
COMPOSE_CMD="podman-compose"
|
||||
CONTAINER_CMD="podman"
|
||||
elif command -v docker-compose &>/dev/null; then
|
||||
COMPOSE_CMD="docker-compose"
|
||||
CONTAINER_CMD="docker"
|
||||
elif command -v docker &>/dev/null && docker compose version &>/dev/null 2>&1; then
|
||||
COMPOSE_CMD="docker compose"
|
||||
CONTAINER_CMD="docker"
|
||||
else
|
||||
echo "[cleanup] WARNING: No container compose tool found. Skipping container cleanup."
|
||||
COMPOSE_CMD=""
|
||||
CONTAINER_CMD=""
|
||||
fi
|
||||
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ E2E Test Cleanup ║"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
|
||||
# ── 1. Stop and remove containers ─────────────────────────────────────────────
|
||||
if [[ -n "$COMPOSE_CMD" ]] && [[ -f "$COMPOSE_FILE" ]]; then
|
||||
echo "[cleanup] Stopping E2E containers..."
|
||||
$COMPOSE_CMD -f "$COMPOSE_FILE" down --volumes --remove-orphans 2>/dev/null || true
|
||||
echo "[cleanup] ✓ Containers stopped and removed"
|
||||
else
|
||||
echo "[cleanup] ⊘ No compose file or runtime found, skipping container teardown"
|
||||
fi
|
||||
|
||||
# ── 2. Remove named volumes created by E2E compose ───────────────────────────
|
||||
if [[ -n "$CONTAINER_CMD" ]]; then
|
||||
for vol in e2e-gitea-data; do
|
||||
full_vol_name="e2e_${vol}"
|
||||
# Try both with and without the project prefix
|
||||
for candidate in "$vol" "$full_vol_name" "tests_e2e_${vol}"; do
|
||||
if $CONTAINER_CMD volume inspect "$candidate" &>/dev/null 2>&1; then
|
||||
echo "[cleanup] Removing volume: $candidate"
|
||||
$CONTAINER_CMD volume rm -f "$candidate" 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
done
|
||||
echo "[cleanup] ✓ Named volumes cleaned"
|
||||
fi
|
||||
|
||||
# ── 3. Kill leftover background processes from previous runs ──────────────────
|
||||
echo "[cleanup] Checking for leftover processes..."
|
||||
|
||||
# Kill fake GitHub server
|
||||
if pgrep -f "fake-github-server" &>/dev/null; then
|
||||
echo "[cleanup] Killing leftover fake-github-server process(es)..."
|
||||
pkill -f "fake-github-server" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Kill any stray node/tsx processes on our E2E ports (including git-server on 4590)
|
||||
for port in 4580 4590 4321 3333; do
|
||||
pid=$(lsof -ti :"$port" 2>/dev/null || true)
|
||||
if [[ -n "$pid" ]]; then
|
||||
echo "[cleanup] Killing process on port $port (PID: $pid)..."
|
||||
kill -9 $pid 2>/dev/null || true
|
||||
fi
|
||||
done
|
||||
|
||||
echo "[cleanup] ✓ Leftover processes cleaned"
|
||||
|
||||
# ── 4. Remove E2E database and data files ─────────────────────────────────────
|
||||
echo "[cleanup] Removing E2E data files..."
|
||||
|
||||
# Remove test databases
|
||||
rm -f "$PROJECT_ROOT/gitea-mirror.db" 2>/dev/null || true
|
||||
rm -f "$PROJECT_ROOT/data/gitea-mirror.db" 2>/dev/null || true
|
||||
rm -f "$PROJECT_ROOT/e2e-gitea-mirror.db" 2>/dev/null || true
|
||||
|
||||
# Remove test backup data
|
||||
rm -rf "$PROJECT_ROOT/data/repo-backups"* 2>/dev/null || true
|
||||
|
||||
# Remove programmatically created test git repositories
|
||||
if [[ -d "$SCRIPT_DIR/git-repos" ]]; then
|
||||
echo "[cleanup] Removing test git repos..."
|
||||
rm -rf "$SCRIPT_DIR/git-repos" 2>/dev/null || true
|
||||
echo "[cleanup] ✓ Test git repos removed"
|
||||
fi
|
||||
|
||||
# Remove Playwright state/artifacts from previous runs
|
||||
rm -rf "$SCRIPT_DIR/test-results" 2>/dev/null || true
|
||||
rm -rf "$SCRIPT_DIR/playwright-report" 2>/dev/null || true
|
||||
rm -rf "$SCRIPT_DIR/.auth" 2>/dev/null || true
|
||||
rm -f "$SCRIPT_DIR/e2e-storage-state.json" 2>/dev/null || true
|
||||
|
||||
# Remove any PID files we might have created
|
||||
rm -f "$SCRIPT_DIR/.fake-github.pid" 2>/dev/null || true
|
||||
rm -f "$SCRIPT_DIR/.app.pid" 2>/dev/null || true
|
||||
|
||||
echo "[cleanup] ✓ Data files cleaned"
|
||||
|
||||
# ── 5. Remove temp directories ────────────────────────────────────────────────
|
||||
echo "[cleanup] Removing temp directories..."
|
||||
rm -rf /tmp/gitea-mirror-backup-* 2>/dev/null || true
|
||||
rm -rf /tmp/e2e-gitea-mirror-* 2>/dev/null || true
|
||||
echo "[cleanup] ✓ Temp directories cleaned"
|
||||
|
||||
# ── 6. Optionally remove container images ─────────────────────────────────────
|
||||
if [[ "$SOFT_CLEAN" == false ]] && [[ -n "$CONTAINER_CMD" ]]; then
|
||||
echo "[cleanup] Pruning dangling images..."
|
||||
$CONTAINER_CMD image prune -f 2>/dev/null || true
|
||||
echo "[cleanup] ✓ Dangling images pruned"
|
||||
else
|
||||
echo "[cleanup] ⊘ Skipping image cleanup (soft mode)"
|
||||
fi
|
||||
|
||||
# ── 7. Remove node_modules/.cache artifacts from E2E ──────────────────────────
|
||||
if [[ -d "$PROJECT_ROOT/node_modules/.cache/playwright" ]]; then
|
||||
echo "[cleanup] Removing Playwright cache..."
|
||||
rm -rf "$PROJECT_ROOT/node_modules/.cache/playwright" 2>/dev/null || true
|
||||
echo "[cleanup] ✓ Playwright cache removed"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "═══════════════════════════════════════════════════════════════"
|
||||
echo " ✅ E2E cleanup complete"
|
||||
echo "═══════════════════════════════════════════════════════════════"
|
||||
522
tests/e2e/create-test-repos.ts
Normal file
522
tests/e2e/create-test-repos.ts
Normal file
@@ -0,0 +1,522 @@
|
||||
#!/usr/bin/env bun
|
||||
/**
|
||||
* create-test-repos.ts
|
||||
*
|
||||
* Programmatically creates bare git repositories with real commits, branches,
|
||||
* and tags so that Gitea can actually clone them during E2E testing.
|
||||
*
|
||||
* Repos are created under <outputDir>/<owner>/<name>.git as bare repositories.
|
||||
* After creation, `git update-server-info` is run on each so they can be served
|
||||
* via the "dumb HTTP" protocol by any static file server (nginx, darkhttpd, etc.).
|
||||
*
|
||||
* Usage:
|
||||
* bun run tests/e2e/create-test-repos.ts [--output-dir tests/e2e/git-repos]
|
||||
*
|
||||
* The script creates the following repositories matching the fake GitHub server's
|
||||
* default store:
|
||||
*
|
||||
* e2e-test-user/my-project.git – repo with commits, branches, tags, README
|
||||
* e2e-test-user/dotfiles.git – simple repo with a few config files
|
||||
* e2e-test-user/notes.git – minimal repo with one commit
|
||||
* other-user/popular-lib.git – starred repo from another user
|
||||
* test-org/org-tool.git – organization repository
|
||||
*/
|
||||
|
||||
import { execSync } from "node:child_process";
|
||||
import { mkdirSync, rmSync, writeFileSync, existsSync } from "node:fs";
|
||||
import { join, resolve } from "node:path";
|
||||
|
||||
// ─── Configuration ───────────────────────────────────────────────────────────
|
||||
|
||||
const DEFAULT_OUTPUT_DIR = join(import.meta.dir, "git-repos");
|
||||
|
||||
const outputDir = (() => {
|
||||
const idx = process.argv.indexOf("--output-dir");
|
||||
if (idx !== -1 && process.argv[idx + 1]) {
|
||||
return resolve(process.argv[idx + 1]);
|
||||
}
|
||||
return DEFAULT_OUTPUT_DIR;
|
||||
})();
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
|
||||
function git(args: string, cwd: string): string {
|
||||
try {
|
||||
return execSync(`git ${args}`, {
|
||||
cwd,
|
||||
encoding: "utf-8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
// Deterministic committer for reproducible repos
|
||||
GIT_AUTHOR_NAME: "E2E Test Bot",
|
||||
GIT_AUTHOR_EMAIL: "e2e-bot@test.local",
|
||||
GIT_AUTHOR_DATE: "2024-01-15T10:00:00+00:00",
|
||||
GIT_COMMITTER_NAME: "E2E Test Bot",
|
||||
GIT_COMMITTER_EMAIL: "e2e-bot@test.local",
|
||||
GIT_COMMITTER_DATE: "2024-01-15T10:00:00+00:00",
|
||||
},
|
||||
}).trim();
|
||||
} catch (err: any) {
|
||||
const stderr = err.stderr?.toString() ?? "";
|
||||
const stdout = err.stdout?.toString() ?? "";
|
||||
throw new Error(
|
||||
`git ${args} failed in ${cwd}:\n${stderr || stdout || err.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** Increment the fake date for each commit so they have unique timestamps */
|
||||
let commitCounter = 0;
|
||||
function gitCommit(msg: string, cwd: string): void {
|
||||
commitCounter++;
|
||||
const date = `2024-01-15T${String(10 + Math.floor(commitCounter / 60)).padStart(2, "0")}:${String(commitCounter % 60).padStart(2, "0")}:00+00:00`;
|
||||
execSync(`git commit -m "${msg}"`, {
|
||||
cwd,
|
||||
encoding: "utf-8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
env: {
|
||||
...process.env,
|
||||
GIT_AUTHOR_NAME: "E2E Test Bot",
|
||||
GIT_AUTHOR_EMAIL: "e2e-bot@test.local",
|
||||
GIT_AUTHOR_DATE: date,
|
||||
GIT_COMMITTER_NAME: "E2E Test Bot",
|
||||
GIT_COMMITTER_EMAIL: "e2e-bot@test.local",
|
||||
GIT_COMMITTER_DATE: date,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function writeFile(repoDir: string, relPath: string, content: string): void {
|
||||
const fullPath = join(repoDir, relPath);
|
||||
const dir = fullPath.substring(0, fullPath.lastIndexOf("/"));
|
||||
if (dir && !existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
writeFileSync(fullPath, content, "utf-8");
|
||||
}
|
||||
|
||||
interface RepoSpec {
|
||||
owner: string;
|
||||
name: string;
|
||||
description: string;
|
||||
/** Function that populates the working repo with commits/branches/tags */
|
||||
populate: (workDir: string) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a bare repo at <outputDir>/<owner>/<name>.git
|
||||
* by first building a working repo, then cloning it as bare.
|
||||
*/
|
||||
function createBareRepo(spec: RepoSpec): string {
|
||||
const barePath = join(outputDir, spec.owner, `${spec.name}.git`);
|
||||
const workPath = join(outputDir, ".work", spec.owner, spec.name);
|
||||
|
||||
// Clean previous
|
||||
rmSync(barePath, { recursive: true, force: true });
|
||||
rmSync(workPath, { recursive: true, force: true });
|
||||
|
||||
// Create working repo
|
||||
mkdirSync(workPath, { recursive: true });
|
||||
git("init -b main", workPath);
|
||||
git("config user.name 'E2E Test Bot'", workPath);
|
||||
git("config user.email 'e2e-bot@test.local'", workPath);
|
||||
|
||||
// Populate with content
|
||||
spec.populate(workPath);
|
||||
|
||||
// Clone as bare
|
||||
mkdirSync(join(outputDir, spec.owner), { recursive: true });
|
||||
git(`clone --bare "${workPath}" "${barePath}"`, outputDir);
|
||||
|
||||
// Enable dumb HTTP protocol support
|
||||
git("update-server-info", barePath);
|
||||
|
||||
// Also enable the post-update hook so update-server-info runs on push
|
||||
const hookPath = join(barePath, "hooks", "post-update");
|
||||
mkdirSync(join(barePath, "hooks"), { recursive: true });
|
||||
writeFileSync(hookPath, "#!/bin/sh\nexec git update-server-info\n", {
|
||||
mode: 0o755,
|
||||
});
|
||||
|
||||
return barePath;
|
||||
}
|
||||
|
||||
// ─── Repository Definitions ──────────────────────────────────────────────────
|
||||
|
||||
const repos: RepoSpec[] = [
|
||||
// ── my-project: feature-rich repo ────────────────────────────────────────
|
||||
{
|
||||
owner: "e2e-test-user",
|
||||
name: "my-project",
|
||||
description: "A test project with branches, tags, and multiple commits",
|
||||
populate(dir) {
|
||||
// Initial commit
|
||||
writeFile(
|
||||
dir,
|
||||
"README.md",
|
||||
"# My Project\n\nA sample project for E2E testing.\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"package.json",
|
||||
JSON.stringify(
|
||||
{
|
||||
name: "my-project",
|
||||
version: "1.0.0",
|
||||
description: "E2E test project",
|
||||
main: "index.js",
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"index.js",
|
||||
'// Main entry point\nconsole.log("Hello from my-project");\n',
|
||||
);
|
||||
writeFile(dir, ".gitignore", "node_modules/\ndist/\n.env\n");
|
||||
git("add -A", dir);
|
||||
gitCommit("Initial commit", dir);
|
||||
|
||||
// Second commit
|
||||
writeFile(
|
||||
dir,
|
||||
"src/lib.js",
|
||||
"export function greet(name) {\n return `Hello, ${name}!`;\n}\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"src/utils.js",
|
||||
"export function sum(a, b) {\n return a + b;\n}\n",
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Add library modules", dir);
|
||||
|
||||
// Tag v1.0.0
|
||||
git("tag -a v1.0.0 -m 'Initial release'", dir);
|
||||
|
||||
// Create develop branch
|
||||
git("checkout -b develop", dir);
|
||||
writeFile(
|
||||
dir,
|
||||
"src/feature.js",
|
||||
"export function newFeature() {\n return 'coming soon';\n}\n",
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Add new feature placeholder", dir);
|
||||
|
||||
// Create feature branch from develop
|
||||
git("checkout -b feature/add-tests", dir);
|
||||
writeFile(
|
||||
dir,
|
||||
"tests/lib.test.js",
|
||||
`import { greet } from '../src/lib.js';
|
||||
import { sum } from '../src/utils.js';
|
||||
|
||||
console.assert(greet('World') === 'Hello, World!');
|
||||
console.assert(sum(2, 3) === 5);
|
||||
console.log('All tests passed');
|
||||
`,
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Add unit tests", dir);
|
||||
|
||||
// Go back to main and add another commit
|
||||
git("checkout main", dir);
|
||||
writeFile(
|
||||
dir,
|
||||
"README.md",
|
||||
"# My Project\n\nA sample project for E2E testing.\n\n## Features\n- Greeting module\n- Math utilities\n",
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Update README with features list", dir);
|
||||
|
||||
// Tag v1.1.0
|
||||
git("tag -a v1.1.0 -m 'Feature update'", dir);
|
||||
|
||||
// Third commit on main for more history
|
||||
writeFile(dir, "LICENSE", "MIT License\n\nCopyright (c) 2024 E2E Test\n");
|
||||
git("add -A", dir);
|
||||
gitCommit("Add MIT license", dir);
|
||||
},
|
||||
},
|
||||
|
||||
// ── dotfiles: simple config repo ─────────────────────────────────────────
|
||||
{
|
||||
owner: "e2e-test-user",
|
||||
name: "dotfiles",
|
||||
description: "Personal configuration files",
|
||||
populate(dir) {
|
||||
writeFile(
|
||||
dir,
|
||||
".bashrc",
|
||||
"# Bash configuration\nalias ll='ls -la'\nalias gs='git status'\nexport EDITOR=vim\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
".vimrc",
|
||||
'" Vim configuration\nset number\nset tabstop=2\nset shiftwidth=2\nset expandtab\nsyntax on\n',
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
".gitconfig",
|
||||
"[user]\n name = E2E Test User\n email = e2e@test.local\n[alias]\n co = checkout\n br = branch\n st = status\n",
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Add dotfiles", dir);
|
||||
|
||||
writeFile(
|
||||
dir,
|
||||
".tmux.conf",
|
||||
"# Tmux configuration\nset -g mouse on\nset -g default-terminal 'screen-256color'\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"install.sh",
|
||||
'#!/bin/bash\n# Symlink dotfiles to home\nfor f in .bashrc .vimrc .gitconfig .tmux.conf; do\n ln -sf "$(pwd)/$f" "$HOME/$f"\ndone\necho \'Dotfiles installed!\'\n',
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Add tmux config and install script", dir);
|
||||
},
|
||||
},
|
||||
|
||||
// ── notes: minimal single-commit repo ────────────────────────────────────
|
||||
{
|
||||
owner: "e2e-test-user",
|
||||
name: "notes",
|
||||
description: "Personal notes and documentation",
|
||||
populate(dir) {
|
||||
writeFile(
|
||||
dir,
|
||||
"README.md",
|
||||
"# Notes\n\nA collection of personal notes.\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"ideas.md",
|
||||
"# Ideas\n\n- Build a mirror tool\n- Automate backups\n- Learn Rust\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"todo.md",
|
||||
"# TODO\n\n- [x] Set up repository\n- [ ] Add more notes\n- [ ] Organize by topic\n",
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Initial notes", dir);
|
||||
},
|
||||
},
|
||||
|
||||
// ── popular-lib: starred repo from another user ──────────────────────────
|
||||
{
|
||||
owner: "other-user",
|
||||
name: "popular-lib",
|
||||
description: "A popular library that we starred",
|
||||
populate(dir) {
|
||||
writeFile(
|
||||
dir,
|
||||
"README.md",
|
||||
"# Popular Lib\n\nA widely-used utility library.\n\n## Installation\n\n```bash\nnpm install popular-lib\n```\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"package.json",
|
||||
JSON.stringify(
|
||||
{
|
||||
name: "popular-lib",
|
||||
version: "2.5.0",
|
||||
description: "A widely-used utility library",
|
||||
main: "dist/index.js",
|
||||
license: "Apache-2.0",
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + "\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"src/index.ts",
|
||||
`/**
|
||||
* Popular Lib - utility functions
|
||||
*/
|
||||
export function capitalize(str: string): string {
|
||||
return str.charAt(0).toUpperCase() + str.slice(1);
|
||||
}
|
||||
|
||||
export function slugify(str: string): string {
|
||||
return str.toLowerCase().replace(/\\s+/g, '-').replace(/[^a-z0-9-]/g, '');
|
||||
}
|
||||
|
||||
export function truncate(str: string, len: number): string {
|
||||
if (str.length <= len) return str;
|
||||
return str.slice(0, len) + '...';
|
||||
}
|
||||
`,
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Initial release of popular-lib", dir);
|
||||
|
||||
git("tag -a v2.5.0 -m 'Stable release 2.5.0'", dir);
|
||||
|
||||
// Add a second commit
|
||||
writeFile(
|
||||
dir,
|
||||
"CHANGELOG.md",
|
||||
"# Changelog\n\n## 2.5.0\n- Added capitalize, slugify, truncate\n\n## 2.4.0\n- Bug fixes\n",
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Add changelog", dir);
|
||||
},
|
||||
},
|
||||
|
||||
// ── org-tool: organization repo ──────────────────────────────────────────
|
||||
{
|
||||
owner: "test-org",
|
||||
name: "org-tool",
|
||||
description: "Internal organization tooling",
|
||||
populate(dir) {
|
||||
writeFile(
|
||||
dir,
|
||||
"README.md",
|
||||
"# Org Tool\n\nInternal tooling for test-org.\n\n## Usage\n\n```bash\norg-tool run <command>\n```\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"main.go",
|
||||
`package main
|
||||
|
||||
import "fmt"
|
||||
|
||||
func main() {
|
||||
\tfmt.Println("org-tool v0.1.0")
|
||||
}
|
||||
`,
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"go.mod",
|
||||
"module github.com/test-org/org-tool\n\ngo 1.21\n",
|
||||
);
|
||||
writeFile(
|
||||
dir,
|
||||
"Makefile",
|
||||
"build:\n\tgo build -o org-tool .\n\ntest:\n\tgo test ./...\n\nclean:\n\trm -f org-tool\n",
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Initial org tool", dir);
|
||||
|
||||
// Add a release branch
|
||||
git("checkout -b release/v0.1", dir);
|
||||
writeFile(dir, "VERSION", "0.1.0\n");
|
||||
git("add -A", dir);
|
||||
gitCommit("Pin version for release", dir);
|
||||
git("tag -a v0.1.0 -m 'Release v0.1.0'", dir);
|
||||
|
||||
// Back to main with more work
|
||||
git("checkout main", dir);
|
||||
writeFile(
|
||||
dir,
|
||||
"cmd/serve.go",
|
||||
`package cmd
|
||||
|
||||
import "fmt"
|
||||
|
||||
func Serve() {
|
||||
\tfmt.Println("Starting server on :8080")
|
||||
}
|
||||
`,
|
||||
);
|
||||
git("add -A", dir);
|
||||
gitCommit("Add serve command", dir);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// ─── Main ────────────────────────────────────────────────────────────────────
|
||||
|
||||
function main() {
|
||||
console.log(
|
||||
"╔══════════════════════════════════════════════════════════════╗",
|
||||
);
|
||||
console.log(
|
||||
"║ Create E2E Test Git Repositories ║",
|
||||
);
|
||||
console.log(
|
||||
"╠══════════════════════════════════════════════════════════════╣",
|
||||
);
|
||||
console.log(`║ Output directory: ${outputDir}`);
|
||||
console.log(`║ Repositories: ${repos.length}`);
|
||||
console.log(
|
||||
"╚══════════════════════════════════════════════════════════════╝",
|
||||
);
|
||||
console.log("");
|
||||
|
||||
// Verify git is available
|
||||
try {
|
||||
const version = execSync("git --version", { encoding: "utf-8" }).trim();
|
||||
console.log(`[setup] Git version: ${version}`);
|
||||
} catch {
|
||||
console.error("ERROR: git is not installed or not in PATH");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Clean output directory (preserve the directory itself)
|
||||
if (existsSync(outputDir)) {
|
||||
console.log("[setup] Cleaning previous repos...");
|
||||
rmSync(outputDir, { recursive: true, force: true });
|
||||
}
|
||||
mkdirSync(outputDir, { recursive: true });
|
||||
|
||||
// Create each repository
|
||||
const created: string[] = [];
|
||||
for (const spec of repos) {
|
||||
const label = `${spec.owner}/${spec.name}`;
|
||||
console.log(`\n[repo] Creating ${label} ...`);
|
||||
try {
|
||||
const barePath = createBareRepo(spec);
|
||||
console.log(`[repo] ✓ ${label} → ${barePath}`);
|
||||
created.push(label);
|
||||
} catch (err) {
|
||||
console.error(`[repo] ✗ ${label} FAILED:`, err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup working directories
|
||||
const workDir = join(outputDir, ".work");
|
||||
if (existsSync(workDir)) {
|
||||
rmSync(workDir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
// Write a manifest file so other scripts know what repos exist
|
||||
const manifest = {
|
||||
createdAt: new Date().toISOString(),
|
||||
outputDir,
|
||||
repos: repos.map((r) => ({
|
||||
owner: r.owner,
|
||||
name: r.name,
|
||||
description: r.description,
|
||||
barePath: `${r.owner}/${r.name}.git`,
|
||||
})),
|
||||
};
|
||||
writeFileSync(
|
||||
join(outputDir, "manifest.json"),
|
||||
JSON.stringify(manifest, null, 2) + "\n",
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
console.log(
|
||||
"\n═══════════════════════════════════════════════════════════════",
|
||||
);
|
||||
console.log(` ✅ Created ${created.length} bare repositories:`);
|
||||
for (const name of created) {
|
||||
console.log(` • ${name}.git`);
|
||||
}
|
||||
console.log(`\n Manifest: ${join(outputDir, "manifest.json")}`);
|
||||
console.log(
|
||||
"═══════════════════════════════════════════════════════════════",
|
||||
);
|
||||
}
|
||||
|
||||
main();
|
||||
105
tests/e2e/docker-compose.e2e.yml
Normal file
105
tests/e2e/docker-compose.e2e.yml
Normal file
@@ -0,0 +1,105 @@
|
||||
# E2E testing environment
|
||||
# Spins up a Gitea instance and a git HTTP server for integration testing.
|
||||
#
|
||||
# The git-server container serves bare git repositories created by
|
||||
# create-test-repos.ts via the "dumb HTTP" protocol so that Gitea can
|
||||
# actually clone them during mirror operations.
|
||||
#
|
||||
# Usage: podman-compose -f tests/e2e/docker-compose.e2e.yml up -d
|
||||
|
||||
services:
|
||||
gitea-e2e:
|
||||
image: docker.io/gitea/gitea:1.22
|
||||
container_name: gitea-e2e
|
||||
environment:
|
||||
- USER_UID=1000
|
||||
- USER_GID=1000
|
||||
- GITEA__database__DB_TYPE=sqlite3
|
||||
- GITEA__database__PATH=/data/gitea/gitea.db
|
||||
- GITEA__server__DOMAIN=localhost
|
||||
- GITEA__server__ROOT_URL=http://localhost:3333/
|
||||
- GITEA__server__HTTP_PORT=3000
|
||||
- GITEA__server__SSH_DOMAIN=localhost
|
||||
- GITEA__server__START_SSH_SERVER=false
|
||||
- GITEA__security__INSTALL_LOCK=true
|
||||
- GITEA__service__DISABLE_REGISTRATION=false
|
||||
- GITEA__service__REQUIRE_SIGNIN_VIEW=false
|
||||
- GITEA__api__ENABLE_SWAGGER=false
|
||||
- GITEA__log__MODE=console
|
||||
- GITEA__log__LEVEL=Warn
|
||||
- GITEA__mirror__ENABLED=true
|
||||
- GITEA__mirror__DEFAULT_INTERVAL=1m
|
||||
- GITEA__mirror__MIN_INTERVAL=1m
|
||||
# Allow migrations from any domain including the git-server container
|
||||
- GITEA__migrations__ALLOWED_DOMAINS=*
|
||||
- GITEA__migrations__ALLOW_LOCAL_NETWORKS=true
|
||||
- GITEA__migrations__SKIP_TLS_VERIFY=true
|
||||
ports:
|
||||
- "3333:3000"
|
||||
volumes:
|
||||
- e2e-gitea-data:/data
|
||||
depends_on:
|
||||
git-server:
|
||||
condition: service_started
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--no-verbose",
|
||||
"--tries=1",
|
||||
"--spider",
|
||||
"http://localhost:3000/",
|
||||
]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 30
|
||||
start_period: 10s
|
||||
tmpfs:
|
||||
- /tmp
|
||||
networks:
|
||||
- e2e-net
|
||||
|
||||
# Lightweight HTTP server that serves bare git repositories.
|
||||
# Repos are created on the host by create-test-repos.ts and bind-mounted
|
||||
# into this container. Gitea clones from http://git-server/<owner>/<name>.git
|
||||
# using the "dumb HTTP" protocol (repos have git update-server-info run).
|
||||
git-server:
|
||||
image: docker.io/alpine:3.19
|
||||
container_name: git-server
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
- |
|
||||
apk add --no-cache darkhttpd >/dev/null 2>&1
|
||||
echo "[git-server] Serving repos from /repos on port 80"
|
||||
ls -la /repos/ 2>/dev/null || echo "[git-server] WARNING: /repos is empty"
|
||||
exec darkhttpd /repos --port 80 --no-listing --log /dev/stdout
|
||||
volumes:
|
||||
- ./git-repos:/repos:ro
|
||||
ports:
|
||||
- "4590:80"
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD",
|
||||
"wget",
|
||||
"--no-verbose",
|
||||
"--tries=1",
|
||||
"--spider",
|
||||
"http://localhost:80/manifest.json",
|
||||
]
|
||||
interval: 3s
|
||||
timeout: 3s
|
||||
retries: 15
|
||||
start_period: 5s
|
||||
networks:
|
||||
- e2e-net
|
||||
|
||||
networks:
|
||||
e2e-net:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
e2e-gitea-data:
|
||||
driver: local
|
||||
1027
tests/e2e/fake-github-server.ts
Normal file
1027
tests/e2e/fake-github-server.ts
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user